Skip to content

Instantly share code, notes, and snippets.

@ritsz
Created November 12, 2012 10:29
Show Gist options
  • Select an option

  • Save ritsz/4058543 to your computer and use it in GitHub Desktop.

Select an option

Save ritsz/4058543 to your computer and use it in GitHub Desktop.
Using Kinect+OpenNI,changing global HeadPoint[] which is used by OSG to edit the views. 3D Kalman Filter for removing noisy Kinect measurements.User selection depending upon distance from ROI=(X,Y,Z).
unsigned __stdcall xnproThread(void *arg);
extern double headPoint[3];
extern bool endit;
//---------------------------------------------------------------------------
// Includes
//---------------------------------------------------------------------------
#include <stdafx.h>
#include <XnCppWrapper.h>
#include <opencv2/core/core.hpp>
#include <opencv2/video/tracking.hpp>
#include <iostream>
#include <math.h>
#include "xnpro.h"
#include <process.h>
//---------------------------------------------------------------------------
// Defines
//---------------------------------------------------------------------------
#define SAMPLE_XML_PATH "SamplesConfig.xml"
//---------------------------------------------------------------------------
// NameSpaces
//---------------------------------------------------------------------------
using namespace cv;
using namespace xn;
using namespace std;
//---------------------------------------------------------------------------
// Globals
//---------------------------------------------------------------------------
//OpenNI
Context g_Context;
ScriptNode g_scriptNode;
DepthGenerator g_DepthGenerator;
UserGenerator g_UserGenerator;
XnBool g_bNeedPose = FALSE;
XnChar g_strPose[20] = "";
//General
#define MAX_NUM_USERS 5
double headPoint[3]={0};
static bool user_track[MAX_NUM_USERS] = {false,false,false,false,false};
static bool render_flag = false; // Tells whether any user is being tracked currently
static bool lookfor = true;
static bool track = true; // Whether tracking should be done or not
double prevPoint[3] = {0};
//--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
// CODE
//---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
XnBool fileExists(const char *fn)
{
/*
check whether the file referenced by fn exists.Used to check if *.xml is present
*/
XnBool exists;
xnOSDoesFileExist(fn, &exists);
return exists;
}
//---------------------------------------------------------------------------
// CallBacks
//---------------------------------------------------------------------------
// Callback: New user was detected
void XN_CALLBACK_TYPE User_NewUser(xn::UserGenerator& generator, XnUserID nId, void* pCookie)
{
/*
Callback generated when a new user is detected.Checks whether the user is in the pose declared by g_strPose,if g_bNeedPose is
set to true.Else request calibration
*/
XnUInt32 epochTime = 0;
xnOSGetEpochTime(&epochTime);
printf("\n%d New User %d\n", epochTime, nId);
// New user found
if (g_bNeedPose)
{
g_UserGenerator.GetPoseDetectionCap().StartPoseDetection(g_strPose, nId);
}
else
{
g_UserGenerator.GetSkeletonCap().RequestCalibration(nId, TRUE);
}
}
// Callback: An existing user was lost
void XN_CALLBACK_TYPE User_LostUser(xn::UserGenerator& generator, XnUserID nId, void* pCookie)
{
/*
Callback for a lost user.Reset the headPoints to 0 so that default view is generated.
*/
XnUInt32 epochTime = 0;
xnOSGetEpochTime(&epochTime);
printf("\n%d Lost user %d\n", epochTime, nId);
headPoint[0]=0;
headPoint[1]=0;
headPoint[2]=0;
// Set render_flag to false as no one is being tracked
// Set lookfor flag to true as we need to look for a new user near the ROI
render_flag = false;
user_track[nId] = false;
lookfor = true;
}
void XN_CALLBACK_TYPE UserExit(xn::UserGenerator & generate,XnUserID nID,void* pCookie)
{
XnUInt32 epochTime = 0;
xnOSGetEpochTime(&epochTime);
printf("\n%d User Exited\n", epochTime);
headPoint[0]=0;
headPoint[1]=0;
headPoint[2]=0;
// Set render_flag to false as no one is being tracked
// Set lookfor flag to true as we need to look for a new user near the ROI
render_flag = false;
user_track[nID] = false;
lookfor = true;
}
// Callback: Detected a pose
void XN_CALLBACK_TYPE UserPose_PoseDetected(xn::PoseDetectionCapability& capability, const XnChar* strPose, XnUserID nId, void* pCookie)
{
/*
If pose detection was started,this callback runs when pose is succesfully detected.It stops detecting the pose and
starts calibration for that particular user.
*/
XnUInt32 epochTime = 0;
xnOSGetEpochTime(&epochTime);
printf("%d Pose %s detected for user %d\n", epochTime, strPose, nId);
g_UserGenerator.GetPoseDetectionCap().StopPoseDetection(nId);
g_UserGenerator.GetSkeletonCap().RequestCalibration(nId, TRUE);
}
// Callback: Started calibration
void XN_CALLBACK_TYPE UserCalibration_CalibrationStart(xn::SkeletonCapability& capability, XnUserID nId, void* pCookie)
{
/*
When User calibration is caleld upon by either UserDetected callback or UserPose_PoseDetected Callback
*/
XnUInt32 epochTime = 0;
xnOSGetEpochTime(&epochTime);
//printf("%d Calibration started for user %d\n", epochTime, nId);
}
void XN_CALLBACK_TYPE UserCalibration_CalibrationComplete(xn::SkeletonCapability& capability, XnUserID nId, XnCalibrationStatus eStatus, void* pCookie)
{
/*
When calibration is succesfully completed,it starts tracking the particular User.This can be checked using
g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i]) and is being done in the main loop.For all users for
which IsTracking is true,Joint locations can be found.
If calibration was unsuccesfull,try pose detection/Calibration again
*/
XnUInt32 epochTime = 0;
xnOSGetEpochTime(&epochTime);
if (eStatus == XN_CALIBRATION_STATUS_OK)
{
// Calibration succeeded
//printf("%d Calibration complete, start tracking user %d\n", epochTime, nId);
g_UserGenerator.GetSkeletonCap().StartTracking(nId);
}
else
{
// Calibration failed
printf("%d Calibration failed for user %d\n", epochTime, nId);
if(eStatus==XN_CALIBRATION_STATUS_MANUAL_ABORT)
{
printf("Manual abort occured, stop attempting to calibrate!");
return;
}
if (g_bNeedPose)
{
g_UserGenerator.GetPoseDetectionCap().StartPoseDetection(g_strPose, nId);
}
else
{
g_UserGenerator.GetSkeletonCap().RequestCalibration(nId, TRUE);
}
}
}
#define CHECK_RC(nRetVal, what) \
if (nRetVal != XN_STATUS_OK) \
printf("%s failed: %s\n", what, xnGetStatusString(nRetVal)); \
/*
MAIN FUNCTION
*/
unsigned __stdcall xnproThread(void *arg)
{
XnStatus nRetVal = XN_STATUS_OK;
EnumerationErrors errors;
//Setting Up The Kalman Filter
KalmanFilter Kal_Fil(6, 3, 0);
Kal_Fil.transitionMatrix = *(Mat_<float>(6, 6) << 1,0,0,1,0,0, 0,1,0,0,1,0, 0,0,1,0,0,1, 0,0,0,1,0,0, 0,0,0,0,1,0, 0,0,0,0,0,1);
Mat_<float> measurement(3,1); measurement.setTo(Scalar(0));
const char *fn = NULL;
if (fileExists(SAMPLE_XML_PATH)) //Checking for the XML file
fn = SAMPLE_XML_PATH;
else
{
printf("Could not find '%s'. Aborting.\n" SAMPLE_XML_PATH);
}
// Context Initialization
reset: nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors); //When Reseting,the program starts again from here
if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
{
XnChar strError[1024];
errors.ToString(strError, 1024);
printf("%s\n", strError);
while(1)
{
// If kinect is not present,the thread will keep sending (0,0,0) as output and the
// rendering software will generate the default view.
headPoint[0] = 0;
headPoint[1] = 0;
headPoint[2] = 0;
}
}
else if (nRetVal != XN_STATUS_OK)
{
printf("Open failed: %s\n", xnGetStatusString(nRetVal));
}
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); // Generate the Depth and User Nodes
CHECK_RC(nRetVal,"No depth");
nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
if (nRetVal != XN_STATUS_OK)
{
nRetVal = g_UserGenerator.Create(g_Context);
CHECK_RC(nRetVal, "Find user generator");
}
// Declare and register callbacks.
XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected, hUserExit;
if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
{
printf("Supplied user generator doesn't support skeleton\n");
}
nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
CHECK_RC(nRetVal, "Register to user callbacks");
nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
CHECK_RC(nRetVal, "Register to calibration start");
nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
CHECK_RC(nRetVal, "Register to calibration complete");
nRetVal = g_UserGenerator.RegisterToUserExit(UserExit,NULL,hUserExit);
CHECK_RC(nRetVal, "Register to user Exit");
//Check whether Pose Calibration is required
if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
{
g_bNeedPose = TRUE;
if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
{
printf("Pose required, but not supported\n");
}
nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
CHECK_RC(nRetVal, "Register to Pose Detected");
g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
}
// Select the profile of the skeleton to be tracked
g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_UPPER);
// Start generating data.
nRetVal = g_Context.StartGeneratingAll();
CHECK_RC(nRetVal, "StartGenerating");
//User specific data
//User IDs and Joint positions
XnUserID aUsers[MAX_NUM_USERS];
XnUInt16 nUsers;
XnSkeletonJointTransformation headJoint;
if(g_bNeedPose)
{
printf("Assume calibration pose\n");
}
XnUInt32 epochTime = 0;
//Variables for User Selection Procedure
double dist,min_dist = 60000;
double X = 0;
double Y = 500;
double Z = 3000;
double Closest_X,Closest_Y,Closest_Z;
XnUInt16 user_id;
XnUInt16 curr_id;
//MAIN LOOP
while (!endit)
{
if(xnOSWasKeyboardHit())
{
//If a Key was pressed
//Decide whether to track or not
char c = xnOSReadCharFromInput();
if (c == 32)
{
system("cls");
track = !track; // Tracking can be paused by pressing SpaceBar
cout<<"\nTracking Paused/Restarted\n";
}
if(c == 27)
{
system("cls");
g_Context.StopGeneratingAll(); //If Esc is pressed,Reset Tracking
cout<<"\nTracking Reset\n";
//goto reset;
_endthread();
}
}
g_Context.WaitOneUpdateAll(g_DepthGenerator); // Wait for Updated Data
nUsers=MAX_NUM_USERS;
g_UserGenerator.GetUsers(aUsers, nUsers); //Find all the users and their number(nUsers)
if(nUsers == 0)
{
cout<<"\n Waiting For a User \n ";
}
for(XnUInt16 i=0; i<nUsers; i++)
{
if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE) // If the user is not being tracked,continue
continue;
g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_HEAD,headJoint); // get head joint points
double x=headJoint.position.position.X;
double y=headJoint.position.position.Y;
double z=headJoint.position.position.Z;
dist = sqrt( (x-X)*(x-X) + (y-Y)*(y-Y) + (z-Z)*(z-Z) ); //Find the distance of headPoint from ROI
if(track && dist <= min_dist && !render_flag && lookfor)
{
// If no user is tracked/user is lost, find closest again
// Enters if we are tracking(track),but not yet rendering (render_flag) and still looking(lookfor)
cout<<"\nPLEASE STAND AT THE CENTER ";
min_dist = dist;
user_id = i;
Closest_X = x;
Closest_Y = y;
Closest_Z = z;
}
if(!track)
{
//Enters when not tracking.Pauses the Tracking
headPoint[0] = 0;
headPoint[1] = 0;
headPoint[2] = 0;
}
if(track && !lookfor && render_flag == true && user_track[curr_id] == true)
{
//If some user is already being tracked and we are not looking for user in ROI,Render using that user
//First Do the Prediction Part
Mat prediction = Kal_Fil.predict();
//Point predictPt(prediction.at<float>(0),prediction.at<float>(1));
// Then Do the Measurement
g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[curr_id],XN_SKEL_HEAD,headJoint);
if( (prevPoint[0] - headJoint.position.position.X > 10 || headJoint.position.position.X - prevPoint[0] > 10)
||(prevPoint[1] - headJoint.position.position.Y > 10 || headJoint.position.position.Y - prevPoint[1] > 10)
||(prevPoint[2] - headJoint.position.position.Z > 10 || headJoint.position.position.Z - prevPoint[2] > 10))
{
measurement(0) = headJoint.position.position.X;
measurement(1) = headJoint.position.position.Y;
measurement(2) = headJoint.position.position.Z;
prevPoint[0] = headJoint.position.position.X;
prevPoint[1] = headJoint.position.position.Y;
prevPoint[2] = headJoint.position.position.Z;
}
else
{
measurement(0) = (float)prevPoint[0];
measurement(1) = (float)prevPoint[1];
measurement(2) = (float)prevPoint[2];
}
//Then Do the Measurement Update
//Point measPt(measurement(0),measurement(1));
// The "correct" phase that is going to use the predicted value and our measurement
Mat estimated = Kal_Fil.correct(measurement);
Point3d statePt(estimated.at<float>(0),estimated.at<float>(1),estimated.at<float>(2));
headPoint[0] = statePt.x;
headPoint[1] = statePt.y;
headPoint[2] = statePt.z;
}
}
// END OF FOR LOOP
// user_id has id of the closest user
if(track && lookfor && min_dist < 300 && !render_flag)
{
// If no user was tracked,we are looking for user near ROI,the distance of the user from ROI is less than 300
// and we are still not rendering,make closest user as the current user and start tracking for rendering
headPoint[0] = Closest_X;
headPoint[1] = Closest_Y;
headPoint[2] = Closest_Z;
system("cls");
printf("\nUSER %d : %g , %g , %g",user_id,headPoint[0],headPoint[1],headPoint[2]);
render_flag = !render_flag; // Start Rendering
curr_id = user_id; // Make Closest User as Current User
user_track[curr_id] = true;
lookfor = false; // Stop Looking For user
min_dist = 60000; // Make min_dist 60000 again
// When New User Detected + Tracked -> Change the Kalman Filter over to him
cout<<"\n Kalman Reinitiated";
Kal_Fil.statePre.at<float>(0) = (float)headPoint[0];
Kal_Fil.statePre.at<float>(1) = (float)headPoint[1];
Kal_Fil.statePre.at<float>(2) = (float)headPoint[2];
Kal_Fil.statePre.at<float>(3) = 0;
Kal_Fil.statePre.at<float>(4) = 0;
Kal_Fil.statePre.at<float>(5) = 0;
setIdentity(Kal_Fil.measurementMatrix);
setIdentity(Kal_Fil.processNoiseCov, Scalar::all(1e-4));
setIdentity(Kal_Fil.measurementNoiseCov, Scalar::all(1e-1));
setIdentity(Kal_Fil.errorCovPost, Scalar::all(.1));
}
else if( min_dist > 500 && !render_flag)
{
headPoint[0] = 0;
headPoint[1] = 0;
headPoint[2] = 0;
}
}
if(endit)
{
g_scriptNode.Release();
g_DepthGenerator.Release();
g_UserGenerator.Release();
g_Context.Release();
}
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment