00001
00010 #ifndef __APPLE__
00011 #include <GL/glu.h>
00012 #else
00013 #include <OpenGL/gl.h>
00014 #endif
00015 #include <rtm/CorbaNaming.h>
00016 #include <hrpModel/ModelLoaderUtil.h>
00017 #include "hrpsys/util/Project.h"
00018 #include "hrpsys/util/VectorConvert.h"
00019 #include "hrpsys/util/GLcamera.h"
00020 #include "hrpsys/util/GLbody.h"
00021 #include "hrpsys/util/GLlink.h"
00022 #include "hrpsys/util/GLutil.h"
00023 #include "VirtualCamera.h"
00024 #include "RTCGLbody.h"
00025 #include "GLscene.h"
00026
00027
00028
00029 static const char* virtualcamera_spec[] =
00030 {
00031 "implementation_id", "VirtualCamera",
00032 "type_name", "VirtualCamera",
00033 "description", "virtual camera component",
00034 "version", HRPSYS_PACKAGE_VERSION,
00035 "vendor", "AIST",
00036 "category", "example",
00037 "activity_type", "DataFlowComponent",
00038 "max_instance", "10",
00039 "language", "C++",
00040 "lang_type", "compile",
00041
00042 "conf.default.rangerMaxAngle", "0.25",
00043 "conf.default.rangerMinAngle", "-0.25",
00044 "conf.default.rangerAngularRes", "0.01",
00045 "conf.default.rangerMaxRange", "5.0",
00046 "conf.default.rangerMinRange", "0.5",
00047 "conf.default.generateRange", "1",
00048 "conf.default.generatePointCloud", "0",
00049 "conf.default.generatePointCloudStep", "1",
00050 "conf.default.pcFormat", "xyz",
00051 "conf.default.generateMovie", "0",
00052 "conf.default.debugLevel", "0",
00053 "conf.default.project", "",
00054 "conf.default.camera", "",
00055
00056 ""
00057 };
00058
00059
00060 VirtualCamera::VirtualCamera(RTC::Manager* manager)
00061 : RTC::DataFlowComponentBase(manager),
00062
00063 m_sceneStateIn("state", m_sceneState),
00064 m_basePosIn("basePos", m_basePos),
00065 m_baseRpyIn("baseRpy", m_baseRpy),
00066 m_qIn("q", m_q),
00067 m_imageOut("image", m_image),
00068 m_rangeOut("range", m_range),
00069 m_cloudOut("cloud", m_cloud),
00070 m_poseSensorOut("poseSensor", m_poseSensor),
00071
00072 m_scene(&m_log),
00073 m_window(&m_scene, &m_log),
00074 m_camera(NULL),
00075 m_generateRange(true),
00076 m_generatePointCloud(false),
00077 m_generateMovie(false),
00078 m_isGeneratingMovie(false),
00079 m_debugLevel(0),
00080 dummy(0)
00081 {
00082 m_scene.showFloorGrid(false);
00083 m_scene.showInfo(false);
00084 }
00085
00086 VirtualCamera::~VirtualCamera()
00087 {
00088 }
00089
00090
00091
00092 RTC::ReturnCode_t VirtualCamera::onInitialize()
00093 {
00094 std::cout << m_profile.instance_name << ": onInitialize()" << std::endl;
00095
00096
00097 RTC::Properties& ref = getProperties();
00098 bindParameter("rangerMaxAngle", m_range.config.maxAngle, "0.25");
00099 bindParameter("rangerMinAngle", m_range.config.minAngle, "-0.25");
00100 bindParameter("rangerAngularRes", m_range.config.angularRes, "0.01");
00101 bindParameter("rangerMaxRange", m_range.config.maxRange, "5.0");
00102 bindParameter("rangerMinRange", m_range.config.minRange, "0.5");
00103 bindParameter("generateRange", m_generateRange, "1");
00104 bindParameter("generatePointCloud", m_generatePointCloud, "0");
00105 bindParameter("generatePointCloudStep", m_generatePointCloudStep, "1");
00106 bindParameter("pcFormat", m_pcFormat, ref["conf.default.pcFormat"].c_str());
00107 bindParameter("generateMovie", m_generateMovie, "0");
00108 bindParameter("debugLevel", m_debugLevel, "0");
00109 bindParameter("project", m_projectName, ref["conf.default.project"].c_str());
00110 bindParameter("camera", m_cameraName, ref["conf.default.camera"].c_str());
00111
00112
00113
00114
00115
00116
00117 addInPort("state", m_sceneStateIn);
00118 addInPort("basePos", m_basePosIn);
00119 addInPort("baseRpy", m_baseRpyIn);
00120 addInPort("q", m_qIn);
00121
00122
00123 addOutPort("image", m_imageOut);
00124 addOutPort("range", m_rangeOut);
00125 addOutPort("cloud", m_cloudOut);
00126 addOutPort("poseSensor", m_poseSensorOut);
00127
00128
00129
00130
00131
00132
00133
00134
00135
00136 return RTC::RTC_OK;
00137 }
00138
00139
00140
00141
00142
00143
00144
00145
00146
00147
00148
00149
00150
00151
00152
00153
00154
00155
00156
00157
00158
00159
00160
00161
00162 RTC::ReturnCode_t VirtualCamera::onActivated(RTC::UniqueId ec_id)
00163 {
00164 std::cout << m_profile.instance_name<< ": onActivated(" << ec_id << ")" << std::endl;
00165
00166
00167
00168 RTC::Manager& rtcManager = RTC::Manager::instance();
00169 RTC::CorbaNaming naming(rtcManager.getORB(), "localhost:2809");
00170 CORBA::Object_ptr obj = naming.resolve("ModelLoader");
00171 if (!CORBA::is_nil(obj)){
00172 std::cout << "found ModelLoader on localhost:2809" << std::endl;
00173 }else{
00174 std::string nameServer = rtcManager.getConfig()["corba.nameservers"];
00175 int comPos = nameServer.find(",");
00176 if (comPos < 0){
00177 comPos = nameServer.length();
00178 }
00179 nameServer = nameServer.substr(0, comPos);
00180 naming.init(nameServer.c_str());
00181 }
00182
00183 if (m_projectName == ""){
00184 std::cerr << "Project file is not specified." << std::endl;
00185 return RTC::RTC_ERROR;
00186 }
00187
00188 Project prj;
00189 if (!prj.parse(m_projectName)) return RTC::RTC_ERROR;
00190
00191 unsigned int pos = m_cameraName.find(':',0);
00192 std::string bodyName, cameraName;
00193 if (pos == std::string::npos){
00194 std::cerr << "can't find a separator in the camera name" << std::endl;
00195 return RTC::RTC_ERROR;
00196 }else{
00197 bodyName = m_cameraName.substr(0, pos);
00198 cameraName = m_cameraName.substr(pos+1);
00199 std::cout << "body:" << bodyName << ", camera:" << cameraName << std::endl;
00200 }
00201
00202 std::vector<std::pair<std::string, OpenHRP::BodyInfo_var> > binfos;
00203 int w=0, h=0;
00204 OpenHRP::ModelLoader_var ml = hrp::getModelLoader(CosNaming::NamingContext::_duplicate(naming.getRootContext()));
00205 for (std::map<std::string, ModelItem>::iterator it=prj.models().begin();
00206 it != prj.models().end(); it++){
00207
00208 OpenHRP::BodyInfo_var binfo;
00209 OpenHRP::ModelLoader::ModelLoadOption opt;
00210 opt.readImage = true;
00211 opt.AABBdata.length(0);
00212 opt.AABBtype = OpenHRP::ModelLoader::AABB_NUM;
00213 binfo = ml->getBodyInfoEx(it->second.url.c_str(), opt);
00214 if (CORBA::is_nil(binfo)){
00215 std::cerr << "failed to load model[" << it->second.url << "]"
00216 << std::endl;
00217 return RTC::RTC_ERROR;
00218 }
00219
00220
00221 if (it->first == bodyName){
00222 OpenHRP::LinkInfoSequence_var lis = binfo->links();
00223 for (unsigned int i=0; i<lis->length(); i++){
00224 const OpenHRP::SensorInfoSequence& sensors = lis[i].sensors;
00225 for (unsigned int j=0; j<sensors.length(); j++){
00226 const OpenHRP::SensorInfo& si = sensors[j];
00227 std::string type(si.type);
00228 std::string name(si.name);
00229 if (type == "Vision" && name == cameraName){
00230 w = si.specValues[4];
00231 h = si.specValues[5];
00232 break;
00233 }
00234 }
00235 }
00236 if (!w || !h){
00237 std::cout << "invalid image size:" << w << "x" << h << std::endl;
00238 return RTC::RTC_ERROR;
00239 }
00240 }
00241 binfos.push_back(std::make_pair(it->first, binfo));
00242 }
00243
00244 m_window.init(w,h,false);
00245 RTCGLbody *robot=NULL;
00246 for (unsigned int i=0; i<binfos.size(); i++){
00247 GLbody *glbody = new GLbody();
00248 hrp::BodyPtr body(glbody);
00249 hrp::loadBodyFromBodyInfo(body, binfos[i].second, false, GLlinkFactory);
00250 loadShapeFromBodyInfo(glbody, binfos[i].second);
00251 body->setName(binfos[i].first);
00252 m_scene.WorldBase::addBody(body);
00253 RTCGLbody *rtcglbody = new RTCGLbody(glbody, this);
00254 m_bodies[binfos[i].first] = rtcglbody;
00255 if (binfos[i].first == bodyName){
00256 robot = rtcglbody;
00257 }
00258 }
00259 if (!robot) {
00260 std::cerr << "can't find a robot named " << bodyName << std::endl;
00261 }
00262 m_camera = robot->body()->findCamera(cameraName.c_str());
00263 if (!m_camera){
00264 std::cerr << "VirtualCamera: can't find camera("
00265 << cameraName << ")" << std::endl;
00266 return RTC::RTC_ERROR;
00267 }
00268 m_scene.setCamera(m_camera);
00269
00270 m_image.data.image.width = m_camera->width();
00271 m_image.data.image.height = m_camera->height();
00272 m_image.data.image.format = Img::CF_RGB;
00273 m_image.data.image.raw_data.length(m_image.data.image.width*m_image.data.image.height*3);
00274
00275 return RTC::RTC_OK;
00276 }
00277
00278 RTC::ReturnCode_t VirtualCamera::onDeactivated(RTC::UniqueId ec_id)
00279 {
00280 std::cout << m_profile.instance_name<< ": onDeactivated(" << ec_id << ")" << std::endl;
00281 return RTC::RTC_OK;
00282 }
00283
00284 void capture(int w, int h, unsigned char *o_buffer)
00285 {
00286 glReadBuffer(GL_BACK);
00287 glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
00288 for (int i=0; i<h; i++){
00289 glReadPixels(0,(h-1-i),w,1,GL_RGB,GL_UNSIGNED_BYTE,
00290 o_buffer + i*3*w);
00291 }
00292 }
00293
00294 RTC::ReturnCode_t VirtualCamera::onExecute(RTC::UniqueId ec_id)
00295 {
00296 if (m_debugLevel > 0) std::cout << m_profile.instance_name<< ": onExecute(" << ec_id << ") " << std::endl;
00297 coil::TimeValue t1(coil::gettimeofday());
00298
00299 if (m_sceneStateIn.isNew()){
00300 do{
00301 m_sceneStateIn.read();
00302 }while(m_sceneStateIn.isNew());
00303 for (unsigned int i=0; i<m_sceneState.states.length(); i++){
00304 const OpenHRP::RobotState& state = m_sceneState.states[i];
00305 std::string name(state.name);
00306 RTCGLbody *rtcglb=m_bodies[name];
00307 if (rtcglb){
00308 GLbody *body = rtcglb->body();
00309 body->setPosition(state.basePose.position.x,
00310 state.basePose.position.y,
00311 state.basePose.position.z);
00312 body->setRotation(state.basePose.orientation.r,
00313 state.basePose.orientation.p,
00314 state.basePose.orientation.y);
00315 body->setPosture(state.q.get_buffer());
00316 }
00317 }
00318 }
00319 GLbody *body = dynamic_cast<GLbody *>(m_camera->link()->body);
00320 if (m_basePosIn.isNew()){
00321 do{
00322 m_basePosIn.read();
00323 }while(m_basePosIn.isNew());
00324 body->setPosition(m_basePos.data.x, m_basePos.data.y, m_basePos.data.z);
00325 }
00326 if (m_baseRpyIn.isNew()){
00327 do{
00328 m_baseRpyIn.read();
00329 }while(m_baseRpyIn.isNew());
00330 body->setRotation(m_baseRpy.data.r, m_baseRpy.data.p, m_baseRpy.data.y);
00331 }
00332 if (m_qIn.isNew()){
00333 do{
00334 m_qIn.read();
00335 }while(m_qIn.isNew());
00336 body->setPosture(m_q.data.get_buffer());
00337 }
00338
00339 for (std::map<std::string, RTCGLbody *>::iterator it=m_bodies.begin();
00340 it != m_bodies.end(); it++){
00341 it->second->input();
00342 }
00343
00344 coil::TimeValue t6(coil::gettimeofday());
00345 m_window.draw();
00346 m_window.swapBuffers();
00347 capture(m_camera->width(), m_camera->height(), m_image.data.image.raw_data.get_buffer());
00348 coil::TimeValue t7(coil::gettimeofday());
00349
00350 double *T = m_camera->getAbsTransform();
00351 hrp::Vector3 p;
00352 p[0] = T[12];
00353 p[1] = T[13];
00354 p[2] = T[14];
00355 hrp::Matrix33 R;
00356 R(0,0) = T[0]; R(0,1) = T[4]; R(0,2) = T[8];
00357 R(1,0) = T[1]; R(1,1) = T[5]; R(1,2) = T[9];
00358 R(2,0) = T[2]; R(2,1) = T[6]; R(2,2) = T[10];
00359 hrp::Vector3 rpy = hrp::rpyFromRot(R);
00360 m_poseSensor.data.position.x = p[0];
00361 m_poseSensor.data.position.y = p[1];
00362 m_poseSensor.data.position.z = p[2];
00363 m_poseSensor.data.orientation.r = rpy[0];
00364 m_poseSensor.data.orientation.p = rpy[1];
00365 m_poseSensor.data.orientation.y = rpy[2];
00366
00367 coil::TimeValue t2(coil::gettimeofday());
00368 if (m_generateRange) setupRangeData();
00369 coil::TimeValue t3(coil::gettimeofday());
00370 if (m_generatePointCloud) setupPointCloud();
00371 coil::TimeValue t4(coil::gettimeofday());
00372 if (m_generateMovie){
00373 if (!m_isGeneratingMovie){
00374 std::string fname(m_profile.instance_name);
00375 fname += ".avi";
00376 m_videoWriter = cvCreateVideoWriter(
00377 fname.c_str(),
00378 CV_FOURCC('D','I','V','X'),
00379 10,
00380 cvSize(m_image.data.image.width, m_image.data.image.height));
00381 m_cvImage = cvCreateImage(
00382 cvSize(m_image.data.image.width, m_image.data.image.height),
00383 IPL_DEPTH_8U, 3);
00384 m_isGeneratingMovie = true;
00385 }
00386
00387 char *dst = m_cvImage->imageData;
00388 for (int i=0; i<m_image.data.image.width*m_image.data.image.height; i++){
00389 dst[i*3 ] = m_image.data.image.raw_data[i*3+2];
00390 dst[i*3+1] = m_image.data.image.raw_data[i*3+1];
00391 dst[i*3+2] = m_image.data.image.raw_data[i*3 ];
00392 }
00393 cvWriteFrame(m_videoWriter, m_cvImage);
00394 }else{
00395 if (m_isGeneratingMovie){
00396 cvReleaseVideoWriter(&m_videoWriter);
00397 cvReleaseImage(&m_cvImage);
00398 m_isGeneratingMovie = false;
00399 }
00400 }
00401
00402 m_imageOut.write();
00403 if (m_generateRange) m_rangeOut.write();
00404 if (m_generatePointCloud) m_cloudOut.write();
00405 m_poseSensorOut.write();
00406
00407 coil::TimeValue t5(coil::gettimeofday());
00408 if (m_debugLevel > 0){
00409 coil::TimeValue dt;
00410 dt = t5-t1;
00411 std::cout << "VirtualCamera::onExecute() : total:"
00412 << dt.sec()*1e3+dt.usec()/1e3;
00413 dt = t7-t6;
00414 std::cout << ", render:"
00415 << dt.sec()*1e3+dt.usec()/1e3;
00416
00417 if (m_generateRange){
00418 dt = t3 - t2;
00419 std::cout << ", range2d:" << dt.sec()*1e3+dt.usec()/1e3;
00420 }
00421 if (m_generatePointCloud){
00422 dt = t4 - t3;
00423 std::cout << ", range3d:" << dt.sec()*1e3+dt.usec()/1e3;
00424 }
00425 std::cout << "[ms]" << std::endl;
00426 }
00427
00428 return RTC::RTC_OK;
00429 }
00430
00431 void VirtualCamera::setupRangeData()
00432 {
00433 int w = m_camera->width();
00434 int h = m_camera->height();
00435 float depth[w];
00436 glReadPixels(0, h/2, w, 1, GL_DEPTH_COMPONENT, GL_FLOAT, depth);
00437 double far = m_camera->far();
00438 double near = m_camera->near();
00439 double fovx = 2*atan(w*tan(m_camera->fovy()/2)/h);
00440 RangerConfig &rc = m_range.config;
00441 double max = ((int)(fovx/2/rc.angularRes))*rc.angularRes;
00442 if (rc.maxAngle > max) rc.maxAngle = max;
00443 if (rc.minAngle < -max) rc.minAngle = -max;
00444 unsigned int nrange = round((rc.maxAngle - rc.minAngle)/rc.angularRes)+1;
00445
00446 m_range.ranges.length(nrange);
00447 #define THETA(x) (-atan(((x)-w/2)*2*tan(fovx/2)/w))
00448 #define RANGE(d,th) (-far*near/((d)*(far-near)-far)/cos(th))
00449 double dth, alpha, th, th_old = THETA(w-1);
00450 double r, r_old = RANGE(depth[w-1], th_old);
00451 int idx = w-2;
00452 double angle;
00453 for (unsigned int i=0; i<nrange; i++){
00454 angle = rc.minAngle + rc.angularRes*i;
00455 while(idx >= 0){
00456 th = THETA(idx);
00457 r = RANGE(depth[idx], th);
00458 idx--;
00459 if (th > angle){
00460
00461 dth = th - th_old;
00462 alpha = atan2(r*sin(dth), fabs(r_old - r*cos(dth)));
00463
00464 if (r != 0 && r_old != 0 && alpha > 0.01){
00465 m_range.ranges[i] = ((th - angle)*r_old + (angle - th_old)*r)/dth;
00466 }else if (th - angle < dth/2){
00467 m_range.ranges[i] = r;
00468 }else{
00469 m_range.ranges[i] = r_old;
00470 }
00471 if (m_range.ranges[i] > rc.maxRange
00472 || m_range.ranges[i] < rc.minRange){
00473 m_range.ranges[i] = rc.maxRange;
00474 }
00475 th_old = th;
00476 r_old = r;
00477 break;
00478 }
00479 th_old = th;
00480 r_old = r;
00481 }
00482 #if 0
00483 std::cout << angle << " " << depth[idx] << " " << m_range.ranges[i] << " "
00484 << m_range.ranges[i]*cos(angle) << " " << m_range.ranges[i]*sin(angle) << std::endl;
00485 #endif
00486 }
00487 }
00488
00489 void VirtualCamera::setupPointCloud()
00490 {
00491 int w = m_camera->width();
00492 int h = m_camera->height();
00493 float depth[w*h];
00494 m_cloud.width = w;
00495 m_cloud.height = h;
00496 m_cloud.type = m_pcFormat.c_str();
00497 bool colored = false;
00498 if (m_pcFormat == "xyz"){
00499 m_cloud.fields.length(3);
00500 }else if (m_pcFormat == "xyzrgb"){
00501 m_cloud.fields.length(6);
00502 colored = true;
00503 }else{
00504 std::cerr << "unknown point cloud format:[" << m_pcFormat << "]" << std::endl;
00505 }
00506 m_cloud.fields[0].name = "x";
00507 m_cloud.fields[0].offset = 0;
00508 m_cloud.fields[0].data_type = PointCloudTypes::FLOAT32;
00509 m_cloud.fields[0].count = 4;
00510 m_cloud.fields[1].name = "y";
00511 m_cloud.fields[1].offset = 4;
00512 m_cloud.fields[1].data_type = PointCloudTypes::FLOAT32;
00513 m_cloud.fields[1].count = 4;
00514 m_cloud.fields[2].name = "z";
00515 m_cloud.fields[2].offset = 8;
00516 m_cloud.fields[2].data_type = PointCloudTypes::FLOAT32;
00517 m_cloud.fields[2].count = 4;
00518 if (m_pcFormat == "xyzrgb"){
00519 m_cloud.fields[3].name = "r";
00520 m_cloud.fields[3].offset = 12;
00521 m_cloud.fields[3].data_type = PointCloudTypes::UINT8;
00522 m_cloud.fields[3].count = 1;
00523 m_cloud.fields[4].name = "g";
00524 m_cloud.fields[4].offset = 13;
00525 m_cloud.fields[4].data_type = PointCloudTypes::UINT8;
00526 m_cloud.fields[4].count = 1;
00527 m_cloud.fields[5].name = "b";
00528 m_cloud.fields[5].offset = 14;
00529 m_cloud.fields[5].data_type = PointCloudTypes::UINT8;
00530 m_cloud.fields[5].count = 1;
00531 }
00532 m_cloud.is_bigendian = false;
00533 m_cloud.point_step = 16;
00534 m_cloud.data.length(w*h*m_cloud.point_step);
00535 m_cloud.row_step = m_cloud.point_step*w;
00536 m_cloud.is_dense = true;
00537 double far = m_camera->far();
00538 double near = m_camera->near();
00539 double fovx = 2*atan(w*tan(m_camera->fovy()/2)/h);
00540 double zs = w/(2*tan(fovx/2));
00541 unsigned int npoints=0;
00542 float *ptr = (float *)m_cloud.data.get_buffer();
00543 unsigned char *rgb = m_image.data.image.raw_data.get_buffer();
00544 glReadPixels(0,0, w, h, GL_DEPTH_COMPONENT, GL_FLOAT, depth);
00545 for (int i=0; i<h; i+=m_generatePointCloudStep){
00546 for (int j=0; j<w; j+=m_generatePointCloudStep){
00547 float d = depth[i*w+j];
00548 if (d == 1.0) {
00549 continue;
00550 }
00551 ptr[2] = far*near/(d*(far-near)-far);
00552 ptr[0] = -(j-w/2)*ptr[2]/zs;
00553 ptr[1] = -(i-h/2)*ptr[2]/zs;
00554 if (colored){
00555 unsigned char *c = (unsigned char *)(ptr + 3);
00556 int offset = ((h-1-i)*w+j)*3;
00557 c[0] = rgb[offset];
00558 c[1] = rgb[offset+1];
00559 c[2] = rgb[offset+2];
00560 }
00561 ptr += 4;
00562 npoints++;
00563 }
00564 }
00565 m_cloud.data.length(npoints*m_cloud.point_step);
00566 }
00567
00568
00569
00570
00571
00572
00573
00574
00575
00576
00577
00578
00579
00580
00581
00582
00583
00584
00585
00586
00587
00588
00589
00590
00591
00592
00593
00594
00595
00596
00597
00598
00599
00600
00601
00602
00603
00604 extern "C"
00605 {
00606
00607 void VirtualCameraInit(RTC::Manager* manager)
00608 {
00609 RTC::Properties profile(virtualcamera_spec);
00610 manager->registerFactory(profile,
00611 RTC::Create<VirtualCamera>,
00612 RTC::Delete<VirtualCamera>);
00613 }
00614
00615 };
00616
00617