HumanMotion.cpp
上传用户:jsylhbnbhn
上传日期:2013-11-03
资源大小:119k
文件大小:19k
源码类别:

OpenCV

开发平台:

Visual C++

  1. // HumanMotion.cpp: implementation of the HumanMotion class.
  2. //
  3. //////////////////////////////////////////////////////////////////////
  4. #ifdef _CH_
  5. #pragma package <opencv>
  6. #endif
  7. #ifndef _EiC
  8. #include "cv.h"
  9. #include "highgui.h"
  10. #include <stdio.h>
  11. #include <ctype.h>
  12. #include "skelecton.h"
  13. #include "skeletons.h"
  14. #include "kalman.h"
  15. #include "OpenGL.h"
  16. #include "stdafx.h"
  17. #include "HumanMotion.h"
  18. #endif
  19. IplImage *image = 0, *grey = 0, *prev_grey = 0,
  20.  *pyramid = 0, *prev_pyramid = 0,*motion_track_img = 0, *swap_temp;
  21. IplImage* background_image = 0;
  22. int win_size = 10;
  23. const int MAX_COUNT = 13;  //骨架节点数
  24. CvPoint2D32f* points[2] = {0,0}, *swap_points;
  25. char* status = 0;
  26. int count = 0;
  27. int need_to_init = 1;
  28. int night_mode = 0;
  29. int flags = 0;
  30. int add_remove_pt = 0;
  31. int pause = 0;
  32. bool init_OK = false;
  33. CvPoint pt;
  34. CvSize image_sz;
  35. OpenGL *pgl;
  36. //五个U形点
  37. U_Joint ujoints[5];
  38. int ujoints_count = 0;
  39. //三个n形点,即手臂与身体两个,裆部一个
  40. U_Joint njoints[3];
  41. int njoints_count = 0;
  42. const float pi = 3.1415926f;
  43. CvMemStorage* storage = 0; // temporary storage
  44. IplImage* abs_image = 0;
  45. IplImage* add_abs_image = 0;
  46. IplImage* add_abs_image2 = 0;
  47. IplImage* dst = 0;
  48. IplImage* src = 0;
  49. CvSeq* contour = 0;
  50. IplConvKernel* element = 0;
  51. int frame_count =0;
  52. void on_mouse( int event, int x, int y, int flags, void *param )
  53. {
  54. if( !image )
  55.         return;
  56.     if( image->origin )
  57.         y = image->height - y;
  58.     if( event == CV_EVENT_LBUTTONDOWN )
  59.     {
  60.         pt = cvPoint(x,y);
  61.         add_remove_pt = 1;
  62.     } 
  63. }
  64. HumanMotion::HumanMotion()
  65. {
  66. }
  67. HumanMotion::~HumanMotion()
  68. {
  69. }
  70. int HumanMotion::Start()
  71. {
  72. main_loop(1);
  73. return 0;
  74. }
  75. int HumanMotion::main_loop( int useCAM)
  76. {
  77. skelecton *ske = new skelecton();
  78. skelectons *skes = new skelectons();
  79. kalman *bone_kalmans = new kalman[MAX_COUNT];
  80. CvCapture* capture = 0;
  81.     
  82.     if( useCAM == 1)
  83. capture = cvCaptureFromFile("e:\mse\movie\capture4.avi");
  84.     else if( useCAM == 2 )
  85.         capture = cvCaptureFromCAM(-1);
  86.     if( !capture )
  87.     {
  88.         fprintf(stderr,"Could not initialize capturing...n");
  89.         return -1;
  90.     }
  91.     printf( "Hot keys: n"
  92.             "tESC - quit the programn"
  93.             "tr - auto-initialize trackingn"
  94.             "tc - delete all the pointsn"
  95.             "tn - switch the "night" mode on/offn"
  96. "tp - pause on/offn"
  97.             "n" );
  98.     cvNamedWindow( "LkDemo", 1 );
  99. cvNamedWindow( "Contour", 1 );
  100.     cvSetMouseCallback( "LkDemo", on_mouse, 0 );
  101.     for(;;)
  102.     {
  103.         IplImage* frame = 0;
  104.         int i, k, c;
  105.         if(pause)
  106. frame = image ;
  107. else 
  108. frame = cvQueryFrame( capture );
  109.         if( !frame ) break;
  110.         if( !image )
  111.         {
  112.             /* allocate all the buffers */
  113. image_sz = cvGetSize(frame) ;
  114.             image = cvCreateImage(image_sz , 8, 3 );
  115.             image->origin = frame->origin;
  116. motion_track_img = cvCreateImage( image_sz, 8, 3 );
  117. motion_track_img->origin = frame->origin; 
  118.             grey = cvCreateImage( image_sz, 8, 1 );
  119.             prev_grey = cvCreateImage( image_sz, 8, 1 );
  120.             pyramid = cvCreateImage( image_sz, 8, 1 );
  121.             prev_pyramid = cvCreateImage( image_sz, 8, 1 );
  122.             points[0] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0]));
  123.             points[1] = (CvPoint2D32f*)cvAlloc(MAX_COUNT*sizeof(points[0][0]));
  124.             status = (char*)cvAlloc(MAX_COUNT);
  125.             flags = 0;
  126. CvSize sz = cvSize( image->width/2 & -1, image->height/2 & -1 );
  127. CvSize sz2 = cvSize( sz.width/2 & -1, sz.height/2 & -1 );
  128. abs_image = cvCreateImage( cvGetSize(image), 8, 1 );
  129. add_abs_image =cvCreateImage(sz, 8, 1 ); 
  130. add_abs_image2 =cvCreateImage(sz2, 8, 1 ); 
  131. dst = cvCreateImage( image_sz, 8, 3 );
  132. dst->origin = frame->origin;
  133. background_image = cvCreateImage( image_sz, 8, 1 );
  134. background_image->origin = frame->origin;
  135. storage = cvCreateMemStorage(0);
  136.         }
  137.         cvCopy( frame, image, 0 );
  138.         cvCvtColor( image, grey, CV_BGR2GRAY );
  139. cvZero( motion_track_img );
  140.         if( night_mode )
  141.             cvZero( image );
  142.         
  143.         if( need_to_init )
  144.         {
  145. //分析轮廓,开始自动标定
  146. analyze_contour(grey);
  147. if(init_OK) 
  148. {
  149. count = MAX_COUNT;
  150. cvFindCornerSubPix( grey, points[1], count,
  151. cvSize(win_size,win_size), cvSize(-1,-1),
  152.   cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
  153. for(int s=0;s<count;s++) 
  154.  ske->CalcLengthRatio (s+1,points[1][s]);
  155.  bone_kalmans[s].init_kalman (points[1][s].x,0,points[1][s].y,0);
  156. }
  157. }
  158.         }
  159.         else if( count > 0 )
  160.         {
  161.             cvCalcOpticalFlowPyrLK( prev_grey, grey, prev_pyramid, pyramid,
  162.                 points[0], points[1], count, cvSize(win_size,win_size), 3, status, 0,
  163.                 cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03), flags );
  164.             flags |= CV_LKFLOW_PYR_A_READY;
  165.             for( i = k = 0; i < count; i++ )
  166.             {
  167.                 if( add_remove_pt )
  168.                 {
  169.                     double dx = pt.x - points[1][i].x;
  170.                     double dy = pt.y - points[1][i].y;
  171.                     if( dx*dx + dy*dy <= 25 )
  172.                     {
  173.                         add_remove_pt = 0;
  174.                         continue;
  175.                     }
  176.                 }
  177.                 
  178.                 if( !status[i] )
  179.                     continue;
  180.                 
  181.                 points[1][k++] = points[1][i];
  182. ske->CorrectPosition (i+1,points[1][i],bone_kalmans[i],grey);
  183.                 cvCircle( image, cvPointFrom32f(points[1][i]), 3, CV_RGB(0,255,0), -1, 8,0);
  184.             }
  185.             count = k;
  186.         }
  187. //
  188. skelecton *pske;
  189. pske = skes->get_previous_skelecton ();
  190. if(pske!=NULL)  ske->set_previous_skelecton (pske);
  191. ske->pgl = pgl;
  192. ske->drawOpenGL3D(image); 
  193. //ske->draw2D(image); 
  194. skes->add (ske);  //to do list 不需要每次都保存,应该隔单位时间保存一次
  195.         if( add_remove_pt && count < MAX_COUNT )
  196.         {
  197.             points[1][count++] = cvPointTo32f(pt);
  198.             cvFindCornerSubPix( grey, points[1] + count - 1, 1,
  199.                 cvSize(win_size,win_size), cvSize(-1,-1),
  200.                 cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,20,0.03));
  201.             add_remove_pt = 0;
  202. bone_kalmans[count-1].init_kalman (points[1][count-1].x,0,points[1][count-1].y,0);
  203. ske->CalcLengthRatio (count,points[1][count-1]);
  204.         }
  205.         CV_SWAP( prev_grey, grey, swap_temp );
  206.         CV_SWAP( prev_pyramid, pyramid, swap_temp );
  207.         CV_SWAP( points[0], points[1], swap_points );
  208.         //need_to_init = 0;
  209.         cvShowImage( "LkDemo", image );
  210. //cvShowImage( "MotionTrackDemo", motion_track_img );
  211.         c = cvWaitKey(10);
  212. //printf("%c,%d",c,c);
  213.         if( c == 27 )
  214.             break;
  215.         switch( c )
  216.         {
  217.         case 'r':
  218.             need_to_init = 1;
  219.             break;
  220.         case 'c':
  221.             count = 0;
  222. ske->reset();
  223.             break;
  224.         case 'n':
  225.             night_mode ^= 1;
  226.             break;
  227. case 'p':
  228.             pause ^= 1;
  229.             break;
  230. case 'o':            
  231. if(ske->ske_pause)  ske->ske_pause= false;
  232. else ske->ske_pause= true;
  233.             break;
  234. case 'i':            
  235. ske->view_up();
  236. break;
  237. case 'k':            
  238. ske->view_down();
  239. break;
  240. case 'j':            
  241. ske->view_left();
  242. break;
  243. case 'l':            
  244. ske->view_right();
  245. break;
  246.         default:
  247.             ;
  248.         }
  249. frame_count++;
  250.     }
  251.     cvReleaseCapture( &capture );
  252.     cvDestroyWindow("LkDemo");
  253. cvDestroyWindow("Contour");
  254. delete ske;
  255. ske = NULL;
  256. delete skes;
  257. skes = NULL;
  258. delete []bone_kalmans;
  259.     return 0;
  260. }
  261. int HumanMotion::analyze_contour(IplImage* grey_image)
  262. {
  263. getContour(grey_image);
  264. cvShowImage( "Contour", dst );
  265. return 0;
  266. }
  267. int HumanMotion::getContour( IplImage* grey)
  268. {
  269. double d_count = frame_count;
  270. cvAddWeighted(background_image, d_count/(d_count+1),
  271.   grey, 1/(d_count+1),
  272.                       0,background_image);
  273. cvAbsDiff( grey,background_image, abs_image );
  274.         cvThreshold( abs_image, abs_image, 50, 255, CV_THRESH_BINARY );
  275. //cvPyrDown(abs_image, add_abs_image, 7);
  276. //cvPyrDown(add_abs_image, add_abs_image2, 7);
  277. //cvCanny(add_abs_image2, add_abs_image2, 20.0, 20.0*3, 3);
  278. int pos = 4;
  279. const int element_shape = CV_SHAPE_RECT;
  280. element = cvCreateStructuringElementEx( pos*2+1, pos*2+1, pos, pos, element_shape, 0 );
  281. cvDilate(abs_image,abs_image,element,1);
  282. cvErode(abs_image,abs_image,element,1);
  283. cvReleaseStructuringElement(&element);
  284.         cvFindContours( abs_image, storage, &contour, sizeof(CvContour),
  285. CV_RETR_TREE , CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) );
  286. cvZero( dst );
  287. if(contour != NULL)
  288. {
  289. //int ApproxCount = image_sz.height / 30; //估算的简化度,待完善。
  290. int ApproxCount = 14; 
  291. contour = cvApproxPoly( contour, sizeof(CvContour), storage, CV_POLY_APPROX_DP,ApproxCount, 1 );
  292. CvSeq* contour2 = contour;
  293. int N = contour->total;
  294. CvSeqReader reader;
  295. int i;
  296. CvPoint pt;
  297. CvPoint pre_pt;
  298. //for (; contour != NULL; contour = contour->h_next)
  299. //{
  300. cvStartReadSeq(contour, &reader);
  301. CV_READ_SEQ_ELEM(pt, reader);
  302. pre_pt= pt;
  303. for (i = 1; i < N+1; i++)
  304. {
  305. CV_READ_SEQ_ELEM(pt, reader);
  306. cvLine( dst, pt,pre_pt,CV_RGB(0,255,0),2,CV_AA, 0 );
  307. pre_pt= pt;
  308. }
  309. //}
  310. contour = contour2;
  311. if(contour != NULL) 
  312. {
  313. //简化模型中只有8个节点
  314. if(N==8) 
  315. {
  316. // cvShowImage( "Contour", dst );
  317. // cvWaitKey(0);
  318. SegmentBody(contour);
  319. FindBody();
  320. ClearBodyJoints();
  321. }
  322. }
  323. }
  324.         
  325. return 0;
  326. }
  327. int HumanMotion::SegmentBody(CvSeq* contour)
  328. {
  329. const float StandBodyLength = 1.0;
  330. const float StandArc = 90;
  331. const float nStandArc = 90;
  332. float l1,l2=0;
  333. CvPoint pt1,pt2;
  334. double total_arc = 0; //内旋角度
  335. double rarc12= 0;
  336. double arc12= 0;
  337. bool Haslkvec = false; //是否存在先前记录的矢量
  338. bool HasFoundedJoint = false; //是否找到拐点
  339. CvSeqReader reader;
  340. CvSeqReader subReader;
  341. int N = contour->total;
  342. int i;
  343. CvPoint pt;
  344. CvPoint pre_pt;
  345. CvPoint pre_pre_pt;
  346. ujoints_count=0;
  347. njoints_count=0;
  348. cvStartReadSeq(contour, &reader);
  349. for (i= 0; i < N; i++)
  350. {
  351. CV_READ_SEQ_ELEM(pt, reader);
  352. pre_pt = pre_pre_pt = pt;
  353. //printf("in sub %d,%d n",pt.x,pt.y);
  354.  
  355. //计算两点之间的距离
  356. subReader = reader ;
  357. Haslkvec = false;
  358. total_arc =0;
  359. for(int j=0;j<3;j++)
  360. {
  361. l1=calc_2Point_Magnitude(pt,pre_pt);
  362. if (l1>StandBodyLength)
  363. {
  364. //是否存在先前记录的点,如果有,检查角度范围
  365. if(Haslkvec)
  366. {
  367. Vector3f vt1 =  Vector3f (pt2.x-pt1.x,pt2.y-pt1.y,0);
  368. Vector3f vt2 =  Vector3f (pt1.x-pt.x,pt1.y-pt.y,0);
  369. arc12 = AngleBetweenVectors(vt1,vt2);
  370. rarc12 = 180*arc12/pi;
  371. Vector3f vtnor=Cross(vt1,vt2); //使用叉集检查矢量是内旋还是外旋。
  372. if(vtnor.z <0) total_arc -= rarc12; 
  373. else total_arc += rarc12; 
  374. //检查角度范围合理U -200 -160
  375. if (abs(total_arc+180)<StandArc)  
  376. {
  377. //绘出U形的位置
  378. //draw_2Points(pt,pre_pt,pt1,pt2,CV_RGB(255,255,250));
  379. save_Points(pt,pre_pt,pt1,pt2,U_TYPE);
  380. HasFoundedJoint = true ;
  381. }
  382. //检查角度范围N 150 210
  383. if (abs(total_arc-180)<nStandArc)  
  384. {
  385. //记录n形点的位置
  386. //draw_2Points(pt,pre_pt,pt1,pt2,CV_RGB(230,255,0));
  387. save_Points(pt,pre_pt,pt1,pt2,N_TYPE);
  388. HasFoundedJoint = true ;
  389. }
  390. if(HasFoundedJoint)
  391. {
  392. //记录两个矢量,清空标记
  393. Haslkvec = false;
  394. total_arc = 0;
  395. HasFoundedJoint = false;
  396. break;
  397. }
  398. }
  399. else
  400. {
  401. //记录两个点
  402. pt1=pt;
  403. pt2=pre_pt;
  404. Haslkvec = true;
  405. }
  406. }
  407. pre_pre_pt = pre_pt;
  408. pre_pt= pt;
  409. CV_READ_SEQ_ELEM(pt, subReader);
  410. }
  411. }
  412. return 0;
  413. }
  414. float HumanMotion::calc_2Point_Magnitude(CvPoint pt1,CvPoint pt2)
  415. {
  416. return (float)sqrt( pow((pt1.x-pt2.x),2) + pow((pt1.y-pt2.y),2) );
  417. }
  418. int HumanMotion::draw_2Points(CvPoint pt1,CvPoint pt2,CvPoint pt3,CvPoint pt4,CvScalar color)
  419. {
  420. cvLine( dst,pt1,pt2,color,1, CV_AA, 0);
  421. cvLine( dst,pt3,pt4,color,1, CV_AA, 0);
  422. cvLine( dst,pt1,pt4,color,1, CV_AA, 0);
  423. cvLine( dst,pt2,pt3,color,1, CV_AA, 0);
  424. return 0;
  425. }
  426. //检查平行线之间构成四边形是否在同一轴线上,正方形相似度判断
  427. //方法如下,取两个矢量中心点连线,取连线与较大矢量的夹角,如果夹角在90度偏差20度内认为是正方形。
  428. bool HumanMotion::Is_Closed_Vectors(CvPoint pt1,CvPoint pt2,CvPoint pt3,CvPoint pt4)
  429. {
  430. CvPoint pt5,pt6;
  431. const float StandSqArc = 40;
  432. pt5.x= (pt1.x + pt2.x ) /2;
  433. pt5.y= (pt1.y + pt2.y ) /2;
  434. pt6.x= (pt3.x + pt4.x ) /2;
  435. pt6.y= (pt3.y + pt4.y ) /2;
  436. Vector3f vt1 = Vector3f (pt1.x-pt2.x,pt1.y-pt2.y,0);
  437. Vector3f vt2 = Vector3f (pt3.x-pt4.x,pt3.y-pt4.y,0);
  438. Vector3f vtc = Vector3f (pt6.x-pt5.x,pt6.y-pt5.y,0);
  439. Vector3f vtmax;
  440. if(Magnitude(vt1) > Magnitude(vt2)) 
  441. vtmax= vt1;
  442. else
  443. vtmax= vt2;
  444. double arc12 = AngleBetweenVectors(vtc,vtmax);
  445. double rarc12 = 180*arc12/pi;
  446. if (abs(abs(rarc12)-90)<StandSqArc) 
  447. return true;
  448. else
  449. return false;
  450. }
  451. //排除的第二种情况
  452. //1.两矢量长度不等,差距相差4倍时,认为无效
  453. //2.两矢量错位,不协调,即对角线长度大于两矢量长度的两倍。
  454. bool HumanMotion::Is_Apar_Vectors(CvPoint pt1,CvPoint pt2,CvPoint pt3,CvPoint pt4)
  455. {
  456. bool result = false;
  457. const float StandLenRatio = 4.0;  
  458. const float StandCrossLenRatio = 2.0;  
  459. Vector3f vt1 = Vector3f (pt1.x-pt2.x,pt1.y-pt2.y,0);
  460. Vector3f vt2 = Vector3f (pt3.x-pt4.x,pt3.y-pt4.y,0);
  461. //1.两矢量长度不等,差距相差4倍时,认为无效
  462. double lenvct1,lenvct2,raido12;
  463. lenvct1 = Magnitude(vt1);
  464. lenvct2 = Magnitude(vt2);
  465. raido12 = lenvct1/lenvct2;
  466. if( raido12<StandLenRatio && raido12 > 1/StandLenRatio )
  467. result = true ;
  468. //2.两矢量错位,不协调,即对角线长度大于两矢量长度的两倍。
  469. if(result)
  470. {
  471. Vector3f vtc1 = Vector3f (pt1.x-pt3.x,pt1.y-pt3.y,0); //对角线1
  472. Vector3f vtc2 = Vector3f (pt1.x-pt3.x,pt1.y-pt3.y,0); //对角线2
  473. double c1,c2,ratio12;
  474. double vct_min,cross_vct_max;
  475. c1 = Magnitude(vtc1);
  476. c2 = Magnitude(vtc2);
  477. if (lenvct1>lenvct2) 
  478. vct_min = lenvct2; 
  479. else
  480. vct_min = lenvct1; 
  481. if(c1>c2)
  482. cross_vct_max = c1;
  483. else
  484. cross_vct_max = c2;
  485. ratio12 = cross_vct_max/vct_min;
  486. if (ratio12<StandCrossLenRatio) 
  487. result = true ;
  488. else
  489. result = false;
  490. }
  491. return result;
  492. }
  493. int HumanMotion::FindBody()
  494. {
  495. init_OK = false;
  496. if(ujoints_count==5 && njoints_count==3)
  497. {
  498. CvPoint pt1,pt2,pt3,pthead;
  499. //右腋下
  500. pt1 = njoints[0].pt;
  501. //裆部
  502. pt2 = njoints[1].pt;
  503. //左腋下
  504. pt3 = njoints[2].pt;
  505. //寻找head 节点
  506. int Head_i;
  507. int temp = 0;
  508. for(int i= 0; i<5 ;i++)
  509. {
  510. pthead.y = ujoints[i].pt.y ;
  511. if (pthead.y>temp) Head_i =i;
  512. }
  513. pthead=ujoints[Head_i].pt ;
  514. CvPoint pt_neck;
  515. pt_neck.x = ( pthead.x - pt2.x )*0.75 + pt2.x; 
  516. pt_neck.y = ( pthead.y - pt2.y )*0.75 + pt2.y;
  517. CvPoint pt_waist;
  518. pt_waist.x = ( pthead.x - pt2.x )*0.3 + pt2.x; 
  519. pt_waist.y = ( pthead.y - pt2.y )*0.3 + pt2.y;
  520. CvPoint pt_lhand;
  521. pt_lhand = ujoints[3].pt ;
  522. CvPoint pt_rhand;
  523. pt_rhand = ujoints[0].pt ;
  524. CvPoint pt_rAnkle;
  525. pt_rAnkle = ujoints[1].pt ;
  526. CvPoint pt_lAnkle;
  527. pt_lAnkle = ujoints[2].pt ;
  528. CvPoint pt_rknee;
  529. pt_rknee.x  =( pt_waist.x +  pt_rAnkle.x ) /2;
  530. pt_rknee.y  =( pt_waist.y +  pt_rAnkle.y ) /2;
  531. CvPoint pt_lknee;
  532. pt_lknee.x  =( pt_waist.x +  pt_lAnkle.x ) /2;
  533. pt_lknee.y  =( pt_waist.y +  pt_lAnkle.y ) /2;
  534. CvPoint pt_lshoulder;
  535. //pt_lshoulder.x = ( pthead.x - pt_lhand.x )*(pt_neck.y - pt_lhand.y)/(pthead.y - pt_lhand.y) + pt_lhand.x; 
  536. //pt_lshoulder.y = pt_neck.y;
  537. CvPoint pt_rshoulder;
  538. //pt_rshoulder.x =( pthead.x - pt_rhand.x )*(pt_neck.y - pt_rhand.y)/(pthead.y - pt_rhand.y) + pt_rhand.x; 
  539. //pt_rshoulder.y = pt_neck.y;
  540. CvPoint pt_relbow;
  541. pt_relbow.x  =( pt_rshoulder.x +  pt_rhand.x ) /2;
  542. pt_relbow.y  =( pt_rshoulder.y +  pt_rhand.y ) /2;
  543. CvPoint pt_lelbow;
  544. pt_lelbow.x  =( pt_lshoulder.x +  pt_lhand.x ) /2;
  545. pt_lelbow.y  =( pt_lshoulder.y +  pt_lhand.y ) /2;
  546. ZoomLine(pthead,pt_neck);
  547. ZoomLine(pt_neck,pt_waist);
  548. ZoomLine(pt_neck,pt_rshoulder);
  549. ZoomLine(pt_neck,pt_lshoulder);
  550. ZoomLine(pt_rshoulder,pt_relbow);
  551. ZoomLine(pt_lshoulder,pt_lelbow);
  552. ZoomLine(pt_relbow,pt_rhand);
  553. ZoomLine(pt_lelbow,pt_lhand);
  554. ZoomLine(pt_waist,pt_rAnkle);
  555. ZoomLine(pt_waist,pt_lAnkle);
  556. points[1][0] = Point_to_32f(pt_neck) ;
  557. points[1][1] = Point_to_32f(pthead) ;
  558. points[1][2] = Point_to_32f(pt_rshoulder) ;
  559. points[1][3] = Point_to_32f(pt_lshoulder) ;
  560. points[1][4] = Point_to_32f(pt_relbow) ;
  561. points[1][5] = Point_to_32f(pt_lelbow) ;
  562. points[1][6] = Point_to_32f(pt_rhand) ;
  563. points[1][7] = Point_to_32f(pt_lhand) ;
  564. points[1][8] = Point_to_32f(pt_waist) ;
  565. points[1][9] = Point_to_32f(pt_rknee) ;
  566. points[1][10] = Point_to_32f(pt_lknee) ;
  567. points[1][11] = Point_to_32f(pt_rAnkle) ;
  568. points[1][12] = Point_to_32f(pt_lAnkle) ;
  569. need_to_init = 0 ;
  570. init_OK = true ;
  571. cvShowImage( "LkDemo", image );
  572. cvWaitKey(0);
  573. }
  574. return 0;
  575. }
  576. int  HumanMotion::ClearBodyJoints()
  577. {
  578. for(int i= 0; i<5 ;i++) ujoints[i].HasSet = false;
  579. for(i= 0; i<3 ;i++) njoints[i].HasSet = false;
  580. ujoints_count = 0;
  581. njoints_count = 0;
  582. return 0;
  583. }
  584. int HumanMotion::save_Points(CvPoint pt1,CvPoint pt2,CvPoint pt3,CvPoint pt4,JointType upoint_type)
  585. {
  586. if(upoint_type == U_TYPE)
  587. {
  588. ujoints_count = ujoints_count % 5;
  589. ujoints[ujoints_count].next_pt = pt1;
  590. ujoints[ujoints_count].pt = pt2;
  591. ujoints[ujoints_count].pre_pt = pt4;
  592. ujoints[ujoints_count].HasSet = true;
  593. ujoints_count ++;
  594. }
  595. if(upoint_type == N_TYPE)
  596. {
  597. njoints_count = njoints_count % 3;
  598. njoints[njoints_count].next_pt = pt1;
  599. njoints[njoints_count].pt = pt2;
  600. njoints[njoints_count].pre_pt = pt4;
  601. njoints[njoints_count].HasSet = true;
  602. njoints_count ++;
  603. }
  604. return 0;
  605. }
  606. int HumanMotion::ZoomLine(CvPoint pt1,CvPoint pt2)
  607. {
  608. CvPoint pts1,pts2;
  609. int zoom = 1;
  610. pts1.x = pt1.x * zoom ;
  611. pts1.y = pt1.y * zoom ;
  612. pts2.x = pt2.x * zoom ;
  613. pts2.y = pt2.y * zoom ;
  614. cvCircle( image,pts1,5, CV_RGB(0,233,250),3, CV_AA, 0 );
  615. cvCircle( image,pts2,5, CV_RGB(0,233,250),3, CV_AA, 0 );
  616. cvLine( image,pts1,pts2,CV_RGB(240,210,0),1, CV_AA, 0);
  617. cvShowImage( "LkDemo", image );
  618. cvWaitKey(0);
  619. return 0;
  620. }
  621. CvPoint2D32f HumanMotion::Point_to_32f(CvPoint pt)
  622. {
  623. CvPoint2D32f pt32;
  624. int zoom = 1;
  625. pt32.x = pt.x * zoom;
  626. pt32.y = pt.y * zoom;
  627. return pt32;
  628. }