1. 程式人生 > >kinect 學習筆記二(深度影象的利用--摳取使用者軀體)

kinect 學習筆記二(深度影象的利用--摳取使用者軀體)

今天主要把深度資料和骨骼還有視訊資料同步起來。算是上一次的三個的組合吧。期間遇到點白痴的問題整治了一下午:帶ID的資料的ID是否準確。而且差點把自己的質疑給發到部落格上來,竟然是自己程式碼的錯誤。傷心傷心。。。

直接把程式碼貼出來吧,也許有人覺得貼程式碼沒水平,嗨,給自己留個小版本說不定以後還用得著,而且自信自己程式碼寫的還算是規範,方便後來人嘛。再有一點,那個getTheContour函式畫蛇添足了,實際直接利用深度資料的ID就可以摳出任務的區域。

先上實驗結果:這是實時影象


然後是深度影象、骨骼影象和摳出的人體區域

這是程式碼,希望可以幫助到大家,當然,如果有錯誤歡迎指正:

#include <iostream>
#include <fstream>
#include  "math.h"
#include "Windows.h"    
#include "MSR_NuiApi.h"    
#include "cv.h"    
#include "highgui.h" 

using namespace std;

bool tracked[NUI_SKELETON_COUNT]={FALSE};
CvPoint skeletonPoint[NUI_SKELETON_COUNT][NUI_SKELETON_POSITION_COUNT]={cvPoint(0,0)};
CvPoint colorPoint[NUI_SKELETON_COUNT][NUI_SKELETON_POSITION_COUNT]={cvPoint(0,0)};

void getColorImage(HANDLE &colorEvent, HANDLE &colorStreamHandle, IplImage *colorImage);
void getDepthImage(HANDLE &depthEvent, HANDLE &depthStreamHandle, IplImage *depthImage);
void getSkeletonImage(HANDLE &skeletonEvent, IplImage *skeletonImage, IplImage *colorImage, IplImage *depthImage);
void drawSkeleton(IplImage *image, CvPoint pointSet[], int witchone);
void getTheContour(IplImage *image, int whichone, IplImage *mask);//得到各個人物的輪廓

int main()
{
	IplImage *colorImage = cvCreateImage(cvSize(640, 480), 8, 3);
	IplImage *depthImage = cvCreateImage(cvSize(320, 240), 8, 3);
	IplImage *skeletonImage = cvCreateImage(cvSize(320, 240), 8, 3);
	IplImage *mask = cvCreateImage(cvSize(320, 240), 8, 3);

	HANDLE colorEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	HANDLE depthEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	HANDLE skeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );

	HANDLE colorStreamHandle = NULL;
	HANDLE depthStreamHandle = NULL;

	HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON);  
	if( hr != S_OK )  
	{  
		cout<<"NuiInitialize failed"<<endl;  
		return hr;  
	}

	hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, NULL, 4, colorEvent, &colorStreamHandle);
	if( hr != S_OK )  
	{  
		cout<<"Open the color Stream failed"<<endl;
		NuiShutdown();
		return hr;  
	}
	hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_320x240, NULL, 2, depthEvent, &depthStreamHandle);
	if( hr != S_OK )  
	{  
		cout<<"Open the depth Stream failed"<<endl;
		NuiShutdown();
		return hr;  
	}
	hr = NuiSkeletonTrackingEnable( skeletonEvent, 0 );//開啟骨骼跟蹤事件  
	if( hr != S_OK )  
	{  
		cout << "NuiSkeletonTrackingEnable failed" << endl;  
		NuiShutdown();
		return hr;  
	}

	//HANDLE hEvents[3];
	//int nEventIdx;
	//hEvents[0] = colorEvent;
	//hEvents[1] = depthEvent;
	//hEvents[2] = skeletonEvent;
	int a=0;
	while (1)
	{
		//剛開始想用WaitForMultipleObjects,但是怎麼調都是值顯示視訊,深度很少顯示,骨骼直接不顯示。
		//自己理解不深入,只能臨時用WaitForSingleObject這樣了。
		//nEventIdx = WaitForMultipleObjects(sizeof(hEvents)/sizeof(hEvents[0]), hEvents, FALSE, 10);
		////cout << nEventIdx << endl;

		//switch(nEventIdx)
		//{
		//case 0:
		//	//if(WaitForSingleObject(colorEvent, 0)==0)
		//	getColorImage(colorEvent, colorStreamHandle, colorImage);
		//	break;
		//case 1:
		//	//if(WaitForSingleObject(depthEvent, 0)==0)
		//	getDepthImage(depthEvent, depthStreamHandle, depthImage);
		//	break;
		//case 2:
		//	//if(WaitForSingleObject(skeletonEvent, 0)==0)
		//	getSkeletonImage(skeletonEvent, skeletonImage);
		//	break;
		//}

		if(WaitForSingleObject(colorEvent, 0)==0)
			getColorImage(colorEvent, colorStreamHandle, colorImage);
		if(WaitForSingleObject(depthEvent, 0)==0)
			getDepthImage(depthEvent, depthStreamHandle, depthImage);
		if(WaitForSingleObject(skeletonEvent, INFINITE)==0)//這裡使用INFINITE是為了避免沒有啟用skeletonEvent而跳過此程式碼出現colorimage頻閃的現象
			getSkeletonImage(skeletonEvent, skeletonImage, colorImage, depthImage);

		
		for (int i=0; i<6; i++)
		{
			if(tracked[i] == TRUE)
			{
				cvZero(mask);
				getTheContour(depthImage, i, mask);
				tracked[i] = FALSE;
				break;
			}
		}

		cvShowImage("mask", mask);
		cvShowImage("colorImage", colorImage);
		cvShowImage("depthImage", depthImage);
		cvShowImage("skeletonImage", skeletonImage);

		if(cvWaitKey(20)==27)
		{
			cvReleaseImage(&colorImage);
			cvReleaseImage(&depthImage);
			cvReleaseImage(&skeletonImage);
			break;
		}
	}

	NuiShutdown();
	return 0;
}

void getColorImage(HANDLE &colorEvent, HANDLE &colorStreamHandle, IplImage *colorImage)
{
	const NUI_IMAGE_FRAME *colorFrame = NULL;

	NuiImageStreamGetNextFrame(colorStreamHandle, 0, &colorFrame);
	NuiImageBuffer *pTexture = colorFrame->pFrameTexture;  

	KINECT_LOCKED_RECT LockedRect;
	pTexture->LockRect(0, &LockedRect, NULL, 0);  

	if( LockedRect.Pitch != 0 )  
	{  
		//cvZero(colorImage);
		for (int i=0; i<480; i++)  
		{  
			uchar* ptr = (uchar*)(colorImage->imageData+i*colorImage->widthStep);  
			BYTE * pBuffer = (BYTE*)(LockedRect.pBits)+i*LockedRect.Pitch;//每個位元組代表一個顏色資訊,直接使用BYTE  
			for (int j=0; j<640; j++)  
			{  
				ptr[3*j] = pBuffer[4*j];//內部資料是4個位元組,0-1-2是BGR,第4個現在未使用  
				ptr[3*j+1] = pBuffer[4*j+1];  
				ptr[3*j+2] = pBuffer[4*j+2];  
			}  
		}  

		//cvShowImage("colorImage", colorImage);//顯示影象
		//cvWaitKey(1);

	}  
	else  
	{  
		cout<<"捕捉視訊幀時發生錯誤"<<endl;  
	}  
	
	NuiImageStreamReleaseFrame( colorStreamHandle, colorFrame );  
}

void getDepthImage(HANDLE &depthEvent, HANDLE &depthStreamHandle, IplImage *depthImage)
{
	const NUI_IMAGE_FRAME *depthFrame = NULL;

	NuiImageStreamGetNextFrame(depthStreamHandle, 0, &depthFrame);
	NuiImageBuffer *pTexture = depthFrame->pFrameTexture;  

	KINECT_LOCKED_RECT LockedRect;
	pTexture->LockRect(0, &LockedRect, NULL, 0);  

	RGBQUAD q;
	//q.rgbBlue = q.rgbGreen = q.rgbRed = 0;
	//cvZero(depthImage);
	if( LockedRect.Pitch != 0 )
	{
		for (int i=0; i<240; i++)
		{
			uchar *ptr = (uchar*)(depthImage->imageData+i*depthImage->widthStep);
			BYTE *buffer = (BYTE*)(LockedRect.pBits)+i*LockedRect.Pitch;
			USHORT *bufferRun = (USHORT*)buffer;
			for (int j=0; j<320; j++)
			{
				int player = bufferRun[j]&7;
				int data = (bufferRun[j]&0xfff8) >> 3;
				
				uchar imageData = 255-(uchar)(256*data/0x0fff);
				q.rgbBlue = q.rgbGreen = q.rgbRed = 0;

				switch(player)
				{
				case 0:  
					q.rgbRed = imageData / 2;  
					q.rgbBlue = imageData / 2;  
					q.rgbGreen = imageData / 2;  
					break;  
				case 1:  
					q.rgbRed = imageData;  
					break;  
				case 2:  
					q.rgbGreen = imageData;  
					break;  
				case 3:  
					q.rgbRed = imageData / 4;  
					q.rgbGreen = q.rgbRed*4;  //這裡利用乘的方法,而不用原來的方法可以避免不整除的情況
					q.rgbBlue = q.rgbRed*4;  //可以在後面的getTheContour()中配合使用,避免遺漏一些情況
					break;  
				case 4:  
					q.rgbBlue = imageData / 4; 
					q.rgbRed = q.rgbBlue*4;  
					q.rgbGreen = q.rgbBlue*4;  
					break;  
				case 5:  
					q.rgbGreen = imageData / 4; 
					q.rgbRed = q.rgbGreen*4;  
					q.rgbBlue = q.rgbGreen*4;  
					break;  
				case 6:  
					q.rgbRed = imageData / 2;  
					q.rgbGreen = imageData / 2;  
					q.rgbBlue = q.rgbGreen*2;  
					break;  
				case 7:  
					q.rgbRed = 255 - ( imageData / 2 );  
					q.rgbGreen = 255 - ( imageData / 2 );  
					q.rgbBlue = 255 - ( imageData / 2 );
				}

				ptr[3*j] = q.rgbBlue;
				ptr[3*j+1] = q.rgbGreen;
				ptr[3*j+2] = q.rgbRed;
			}
		}

		//cvShowImage("depthImage", depthImage);
		//cvWaitKey(1);
	}
	else
	{
		cout << "捕捉深度影象出現錯誤" << endl;
	}

	NuiImageStreamReleaseFrame(depthStreamHandle, depthFrame);

}

void getSkeletonImage(HANDLE &skeletonEvent, IplImage *skeletonImage, IplImage *colorImage, IplImage *depthImage)
{
	/*兩者效果竟然不一樣。奇怪
	NUI_SKELETON_FRAME *skeletonFrame = NULL;
	NUI_SKELETON_FRAME skeletonFrame;*/

	NUI_SKELETON_FRAME skeletonFrame;
	bool bFoundSkeleton = false; 

	if(NuiSkeletonGetNextFrame( 0, &skeletonFrame ) == S_OK )  
	{  
		for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )  
		{  
			if( skeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED )
			{  
				bFoundSkeleton = true;  
				//cout << "ok" << endl;
				break;
			}  
		}  
	}
	else
	{
		cout << "沒有找到合適的骨骼幀" << endl;
		return; 
	}

	if( !bFoundSkeleton )  
	{  
		return; 
	}  

	NuiTransformSmooth(&skeletonFrame,NULL);//平滑骨骼幀,消除抖動  

	cvZero(skeletonImage);  
	for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )  
	{  
		if( skeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED &&  
			skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)  
		{  
			float fx, fy;  

			for (int j = 0; j < NUI_SKELETON_POSITION_COUNT; j++)//所有的座標轉化為深度圖的座標  
			{  
				NuiTransformSkeletonToDepthImageF(skeletonFrame.SkeletonData[i].SkeletonPositions[j], &fx, &fy );  
				skeletonPoint[i][j].x = (int)(fx*320+0.5f);  
				skeletonPoint[i][j].y = (int)(fy*240+0.5f);  
			}  

			for (int j=0; j<NUI_SKELETON_POSITION_COUNT ; j++)  
			{  
				if (skeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[j] != NUI_SKELETON_POSITION_NOT_TRACKED)//跟蹤點一用有三種狀態:1沒有被跟蹤到,2跟蹤到,3根據跟蹤到的估計到  
				{  
					LONG colorx, colory;
					NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, 0, 
						skeletonPoint[i][j].x, skeletonPoint[i][j].y, 0,&colorx, &colory);
					colorPoint[i][j].x = int(colorx);colorPoint[i][j].y = int(colory);//儲存座標點
					cvCircle(colorImage, colorPoint[i][j], 4, cvScalar(0, 255, 255), -1, 8, 0);
					//cvCircle(depthImage, skeletonPoint[i][j], 3, cvScalar(0, 255, 255), -1, 8, 0);
					cvCircle(skeletonImage, skeletonPoint[i][j], 3, cvScalar(0, 255, 255), -1, 8, 0);

					tracked[i] = TRUE;
				}
			}

			drawSkeleton(colorImage, colorPoint[i], i);
			//drawSkeleton(depthImage, skeletonPoint[i], i);
			drawSkeleton(skeletonImage, skeletonPoint[i], i);
		}
	}  

	//cvShowImage("skeletonImage", skeletonImage);  
	//cvShowImage("skeletsdfonImage", colorImage);  
	//cvWaitKey(1);  
}

void drawSkeleton(IplImage *image, CvPoint pointSet[], int witchone)
{
	CvScalar color;
	switch(witchone)//跟蹤不同的人顯示不同的顏色
	{
	case 0:
		color = cvScalar(255);
		break;
	case 1:
		color = cvScalar(0,255);
		break;
	case 2:
		color = cvScalar(0, 0, 255);
		break;
	case 3:
		color = cvScalar(255, 255, 0);
		break;
	case 4:
		color = cvScalar(255, 0, 255);
		break;
	case 5:
		color = cvScalar(0, 255, 255);
		break;
	}

	if((pointSet[NUI_SKELETON_POSITION_HEAD].x!=0 || pointSet[NUI_SKELETON_POSITION_HEAD].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_SPINE].x!=0 || pointSet[NUI_SKELETON_POSITION_SPINE].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_SPINE].x!=0 || pointSet[NUI_SKELETON_POSITION_SPINE].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);

	//左上肢
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);

	//右上肢
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);

	//左下肢
	if((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x!=0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);

	//右下肢
	if((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT],color, 2);
	if((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);
	if((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y!=0) &&
		(pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x!=0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y!=0))
		cvLine(image, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2);
}


void getTheContour(IplImage *image, int whichone, IplImage *mask)//根據給定的深度資料的關係(在getDepthImage()中的)確定不同的跟蹤目標
{
	for (int i=0; i<240; i++)
	{
		uchar *ptr = (uchar*)(image->imageData+i*image->widthStep);
		uchar *ptrmask = (uchar*)(mask->imageData+i*mask->widthStep);
		for (int j=0; j<320; j++)
		{
			if (ptr[3*j]==0 && ptr[3*j+1]==0 && ptr[3*j+2]==0)//都為0的時候予以忽略
			{
				ptrmask[3*j]=ptrmask[3*j+1]=ptrmask[3*j+2]=0;
			}else if(ptr[3*j]==0 && ptr[3*j+1]==0 && ptr[3*j+2]!=0)//ID為1的時候,顯示綠色
			{
				ptrmask[3*j] = 0;
				ptrmask[3*j+1] = 255;
				ptrmask[3*j+2] = 0;
			}else if (ptr[3*j]==0 && ptr[3*j+1]!=0 && ptr[3*j+2]==0)//ID為2的時候,顯示紅色
			{
				ptrmask[3*j] = 0;
				ptrmask[3*j+1] = 0;
				ptrmask[3*j+2] = 255;
			}else if (ptr[3*j]==ptr[3*j+1] && ptr[3*j]==4*ptr[3*j+2])//ID為3的時候
			{
				ptrmask[3*j] = 255;
				ptrmask[3*j+1] = 255;
				ptrmask[3*j+2] = 0;
			}else if (4*ptr[3*j]==ptr[3*j+1] && ptr[3*j+1]==ptr[3*j+2])//ID為4的時候
			{
				ptrmask[3*j] = 255;
				ptrmask[3*j+1] = 0;
				ptrmask[3*j+2] = 255;
			}else if (ptr[3*j]==4*ptr[3*j+1] && ptr[3*j]==ptr[3*j+2])//ID為5的時候
			{
				ptrmask[3*j] = 0;
				ptrmask[3*j+1] = 255;
				ptrmask[3*j+2] = 255;
			}else if (ptr[3*j]==2*ptr[3*j+1] && ptr[3*j+1]==ptr[3*j+2])//ID為6的時候
			{
				ptrmask[3*j] = 255;
				ptrmask[3*j+1] = 255;
				ptrmask[3*j+2] = 255;
			}else if (ptr[3*j]==ptr[3*j+1] && ptr[3*j]==ptr[3*j+2])//ID為7的時候或者ID為0的時候,顯示藍色
			{
				ptrmask[3*j] = 255;
				ptrmask[3*j+1] = 0;
				ptrmask[3*j+2] = 0;
			}else
			{
				cout <<"如果輸出這段程式碼,說明有遺漏的情況,請查詢getTheContour函式" << endl;
			}
		}
	}
}