$search
00001 /***************************************************************************** 00002 * * 00003 * OpenNI 1.0 Alpha * 00004 * Copyright (C) 2010 PrimeSense Ltd. * 00005 * * 00006 * This file is part of OpenNI. * 00007 * * 00008 * OpenNI is free software: you can redistribute it and/or modify * 00009 * it under the terms of the GNU Lesser General Public License as published * 00010 * by the Free Software Foundation, either version 3 of the License, or * 00011 * (at your option) any later version. * 00012 * * 00013 * OpenNI is distributed in the hope that it will be useful, * 00014 * but WITHOUT ANY WARRANTY; without even the implied warranty of * 00015 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * 00016 * GNU Lesser General Public License for more details. * 00017 * * 00018 * You should have received a copy of the GNU Lesser General Public License * 00019 * along with OpenNI. If not, see <http://www.gnu.org/licenses/>. * 00020 * * 00021 *****************************************************************************/ 00022 00023 00024 00025 00026 //--------------------------------------------------------------------------- 00027 // Includes 00028 //--------------------------------------------------------------------------- 00029 #include "cob_people_detection/SceneDrawer.h" 00030 00031 extern xn::UserGenerator g_UserGenerator; 00032 extern xn::DepthGenerator g_DepthGenerator; 00033 00034 #define MAX_DEPTH 10000 00035 float g_pDepthHist[MAX_DEPTH]; 00036 unsigned int getClosestPowerOfTwo(unsigned int n) 00037 { 00038 unsigned int m = 2; 00039 while(m < n) m<<=1; 00040 00041 return m; 00042 } 00043 00044 XnFloat Colors[][3] = 00045 { 00046 {0,1,1}, 00047 {0,0,1}, 00048 {0,1,0}, 00049 {1,1,0}, 00050 {1,0,0}, 00051 {1,.5,0}, 00052 {.5,1,0}, 00053 {0,.5,1}, 00054 {.5,0,1}, 00055 {1,1,.5}, 00056 {1,1,1} 00057 }; 00058 XnUInt32 nColors = 10; 00059 00060 00061 void DrawLimb(XnUserID player, XnSkeletonJoint eJoint1, XnSkeletonJoint eJoint2) 00062 { 00063 if (!g_UserGenerator.GetSkeletonCap().IsTracking(player)) 00064 { 00065 printf("not tracked!\n"); 00066 return; 00067 } 00068 00069 XnSkeletonJointPosition joint1, joint2; 00070 g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(player, eJoint1, joint1); 00071 g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(player, eJoint2, joint2); 00072 00073 if (joint1.fConfidence < 0.5 || joint2.fConfidence < 0.5) 00074 { 00075 return; 00076 } 00077 00078 XnPoint3D pt[2]; 00079 pt[0] = joint1.position; 00080 pt[1] = joint2.position; 00081 00082 g_DepthGenerator.ConvertRealWorldToProjective(2, pt, pt); 00083 } 00084 00085 00086 void PublishPeopleImage(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd, image_transport::Publisher& pub) 00087 { 00088 const XnDepthPixel* pDepth = dmd.Data(); 00089 const XnLabel* pLabels = smd.Data(); 00090 unsigned int nValue = 0; 00091 XnUInt16 g_nXRes = dmd.XRes(); 00092 XnUInt16 g_nYRes = dmd.YRes(); 00093 cv::Mat peopleSegmentation(g_nYRes, g_nXRes, CV_8UC3); 00094 00095 // Prepare the texture map 00096 for (unsigned int nY=0; nY<g_nYRes; nY++) 00097 { 00098 uchar* pDestImage = peopleSegmentation.ptr(nY); 00099 for (unsigned int nX=0; nX < g_nXRes; nX++) 00100 { 00101 00102 pDestImage[0] = 0; 00103 pDestImage[1] = 0; 00104 pDestImage[2] = 0; 00105 if (*pLabels != 0) 00106 { 00107 nValue = *pDepth; 00108 XnLabel label = *pLabels; 00109 XnUInt32 nColorID = label % nColors; 00110 00111 if (nValue != 0) 00112 { 00113 pDestImage[0] = 255 * Colors[nColorID][0]; 00114 pDestImage[1] = 255 * Colors[nColorID][1]; 00115 pDestImage[2] = 255 * Colors[nColorID][2]; 00116 } 00117 } 00118 00119 pDepth++; 00120 pLabels++; 00121 pDestImage+=3; 00122 } 00123 } 00124 00125 // todo: stop and start with respect to odometry: segmentation works best if robot is standing still 00126 00127 // publish 00128 try 00129 { 00130 IplImage img = (IplImage)peopleSegmentation; 00131 sensor_msgs::ImagePtr msg = (sensor_msgs::CvBridge::cvToImgMsg(&img, "bgr8")); 00132 msg->header.stamp = ros::Time::now(); 00133 pub.publish(msg); 00134 } 00135 catch (sensor_msgs::CvBridgeException error) 00136 { 00137 ROS_ERROR("[openni_tracker] Could not convert IplImage to ROS message"); 00138 } 00139 00140 // cv_bridge::CvImage bridgeImage; did not work 00141 // bridgeImage.image = peopleSegmentation.clone(); 00142 // sensor_msgs::ImagePtr msg = bridgeImage.toImageMsg(); 00143 // pub.publish(msg); 00144 00145 00146 // display for checking the output 00147 // cv::namedWindow("Test"); 00148 // imshow("Test", peopleSegmentation); 00149 // uchar key = cv::waitKey(10); 00150 // if (key == 'r') 00151 // { 00152 // g_UserGenerator.StopGenerating(); 00153 // std::cout << "stop\n"; 00154 // } 00155 // if (key == 'c') 00156 // { 00157 // g_UserGenerator.StartGenerating(); 00158 // std::cout << "start\n"; 00159 // } 00160 }