Go to the documentation of this file.00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032 #include "OpenNI.h"
00033
00034 #include <boost/lexical_cast.hpp>
00035 #include <boost/algorithm/string/replace.hpp>
00036
00037 #include "openni2_camera/openni2_device.h"
00038 #include "openni2_camera/openni2_exception.h"
00039 #include "openni2_camera/openni2_convert.h"
00040 #include "openni2_camera/openni2_frame_listener.h"
00041
00042 #include <boost/shared_ptr.hpp>
00043 #include <boost/make_shared.hpp>
00044
00045 #include <string>
00046
00047 namespace openni2_wrapper
00048 {
00049
00050 OpenNI2Device::OpenNI2Device(const std::string& device_URI) throw (OpenNI2Exception) :
00051 openni_device_(),
00052 ir_video_started_(false),
00053 color_video_started_(false),
00054 depth_video_started_(false),
00055 image_registration_activated_(false),
00056 use_device_time_(false)
00057 {
00058 openni::Status rc = openni::OpenNI::initialize();
00059 if (rc != openni::STATUS_OK)
00060 THROW_OPENNI_EXCEPTION("Initialize failed\n%s\n", openni::OpenNI::getExtendedError());
00061
00062 openni_device_ = boost::make_shared<openni::Device>();
00063
00064 if (device_URI.length() > 0)
00065 {
00066 rc = openni_device_->open(device_URI.c_str());
00067 }
00068 else
00069 {
00070 rc = openni_device_->open(openni::ANY_DEVICE);
00071 }
00072
00073 if (rc != openni::STATUS_OK)
00074 THROW_OPENNI_EXCEPTION("Initialize failed\n%s\n", openni::OpenNI::getExtendedError());
00075
00076 device_info_ = boost::make_shared<openni::DeviceInfo>();
00077 *device_info_ = openni_device_->getDeviceInfo();
00078
00079 ir_frame_listener = boost::make_shared<OpenNI2FrameListener>();
00080 color_frame_listener = boost::make_shared<OpenNI2FrameListener>();
00081 depth_frame_listener = boost::make_shared<OpenNI2FrameListener>();
00082
00083 }
00084
00085 OpenNI2Device::~OpenNI2Device()
00086 {
00087 stopAllStreams();
00088
00089 shutdown();
00090
00091 openni_device_->close();
00092 }
00093
00094 const std::string OpenNI2Device::getUri() const
00095 {
00096 return std::string(device_info_->getUri());
00097 }
00098
00099 const std::string OpenNI2Device::getVendor() const
00100 {
00101 return std::string(device_info_->getVendor());
00102 }
00103
00104 const std::string OpenNI2Device::getName() const
00105 {
00106 return std::string(device_info_->getName());
00107 }
00108
00109 uint16_t OpenNI2Device::getUsbVendorId() const
00110 {
00111 return device_info_->getUsbVendorId();
00112 }
00113
00114 uint16_t OpenNI2Device::getUsbProductId() const
00115 {
00116 return device_info_->getUsbProductId();
00117 }
00118
00119 const std::string OpenNI2Device::getStringID() const
00120 {
00121 std::string ID_str = getName() + "_" + getVendor();
00122
00123 boost::replace_all(ID_str, "/", "");
00124 boost::replace_all(ID_str, ".", "");
00125 boost::replace_all(ID_str, "@", "");
00126
00127 return ID_str;
00128 }
00129
00130 bool OpenNI2Device::isValid() const
00131 {
00132 return (openni_device_.get() != 0) && openni_device_->isValid();
00133 }
00134
00135 float OpenNI2Device::getIRFocalLength(int output_y_resolution) const
00136 {
00137 float focal_length = 0.0f;
00138 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00139
00140 if (stream)
00141 {
00142 focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
00143 }
00144
00145 return focal_length;
00146 }
00147
00148 float OpenNI2Device::getColorFocalLength(int output_y_resolution) const
00149 {
00150 float focal_length = 0.0f;
00151 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00152
00153 if (stream)
00154 {
00155 focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
00156 }
00157
00158 return focal_length;
00159 }
00160
00161 float OpenNI2Device::getDepthFocalLength(int output_y_resolution) const
00162 {
00163 float focal_length = 0.0f;
00164 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00165
00166 if (stream)
00167 {
00168 focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
00169 }
00170
00171 return focal_length;
00172 }
00173
00174 bool OpenNI2Device::isIRVideoModeSupported(const OpenNI2VideoMode& video_mode) const
00175 {
00176 getSupportedIRVideoModes();
00177
00178 bool supported = false;
00179
00180 std::vector<OpenNI2VideoMode>::const_iterator it = ir_video_modes_.begin();
00181 std::vector<OpenNI2VideoMode>::const_iterator it_end = ir_video_modes_.end();
00182
00183 while (it != it_end && !supported)
00184 {
00185 supported = (*it == video_mode);
00186 ++it;
00187 }
00188
00189 return supported;
00190 }
00191
00192 bool OpenNI2Device::isColorVideoModeSupported(const OpenNI2VideoMode& video_mode) const
00193 {
00194 getSupportedColorVideoModes();
00195
00196 bool supported = false;
00197
00198 std::vector<OpenNI2VideoMode>::const_iterator it = color_video_modes_.begin();
00199 std::vector<OpenNI2VideoMode>::const_iterator it_end = color_video_modes_.end();
00200
00201 while (it != it_end && !supported)
00202 {
00203 supported = (*it == video_mode);
00204 ++it;
00205 }
00206
00207 return supported;
00208 }
00209
00210 bool OpenNI2Device::isDepthVideoModeSupported(const OpenNI2VideoMode& video_mode) const
00211 {
00212 getSupportedDepthVideoModes();
00213
00214 bool supported = false;
00215
00216 std::vector<OpenNI2VideoMode>::const_iterator it = depth_video_modes_.begin();
00217 std::vector<OpenNI2VideoMode>::const_iterator it_end = depth_video_modes_.end();
00218
00219 while (it != it_end && !supported)
00220 {
00221 supported = (*it == video_mode);
00222 ++it;
00223 }
00224
00225 return supported;
00226
00227 }
00228
00229 bool OpenNI2Device::hasIRSensor() const
00230 {
00231 return openni_device_->hasSensor(openni::SENSOR_IR);
00232 }
00233
00234 bool OpenNI2Device::hasColorSensor() const
00235 {
00236 return openni_device_->hasSensor(openni::SENSOR_COLOR);
00237 }
00238
00239 bool OpenNI2Device::hasDepthSensor() const
00240 {
00241 return openni_device_->hasSensor(openni::SENSOR_DEPTH);
00242 }
00243
00244 void OpenNI2Device::startIRStream()
00245 {
00246 boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00247
00248 if (stream)
00249 {
00250 stream->setMirroringEnabled(false);
00251 stream->start();
00252 stream->addNewFrameListener(ir_frame_listener.get());
00253 ir_video_started_ = true;
00254 }
00255
00256 }
00257
00258 void OpenNI2Device::startColorStream()
00259 {
00260 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00261
00262 if (stream)
00263 {
00264 stream->setMirroringEnabled(false);
00265 stream->start();
00266 stream->addNewFrameListener(color_frame_listener.get());
00267 color_video_started_ = true;
00268 }
00269 }
00270 void OpenNI2Device::startDepthStream()
00271 {
00272 boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00273
00274 if (stream)
00275 {
00276 stream->setMirroringEnabled(false);
00277 stream->start();
00278 stream->addNewFrameListener(depth_frame_listener.get());
00279 depth_video_started_ = true;
00280 }
00281 }
00282
00283 void OpenNI2Device::stopAllStreams()
00284 {
00285 stopIRStream();
00286 stopColorStream();
00287 stopDepthStream();
00288 }
00289
00290 void OpenNI2Device::stopIRStream()
00291 {
00292 if (ir_video_stream_.get() != 0)
00293 {
00294 ir_video_started_ = false;
00295
00296 ir_video_stream_->removeNewFrameListener(ir_frame_listener.get());
00297
00298 ir_video_stream_->stop();
00299 }
00300 }
00301 void OpenNI2Device::stopColorStream()
00302 {
00303 if (color_video_stream_.get() != 0)
00304 {
00305 color_video_started_ = false;
00306
00307 color_video_stream_->removeNewFrameListener(color_frame_listener.get());
00308
00309 color_video_stream_->stop();
00310 }
00311 }
00312 void OpenNI2Device::stopDepthStream()
00313 {
00314 if (depth_video_stream_.get() != 0)
00315 {
00316 depth_video_started_ = false;
00317
00318 depth_video_stream_->removeNewFrameListener(depth_frame_listener.get());
00319
00320 depth_video_stream_->stop();
00321 }
00322 }
00323
00324 void OpenNI2Device::shutdown()
00325 {
00326 if (ir_video_stream_.get() != 0)
00327 ir_video_stream_->destroy();
00328
00329 if (color_video_stream_.get() != 0)
00330 color_video_stream_->destroy();
00331
00332 if (depth_video_stream_.get() != 0)
00333 depth_video_stream_->destroy();
00334
00335 }
00336
00337 bool OpenNI2Device::isIRStreamStarted()
00338 {
00339 return ir_video_started_;
00340 }
00341 bool OpenNI2Device::isColorStreamStarted()
00342 {
00343 return color_video_started_;
00344 }
00345 bool OpenNI2Device::isDepthStreamStarted()
00346 {
00347 return depth_video_started_;
00348 }
00349
00350 const std::vector<OpenNI2VideoMode>& OpenNI2Device::getSupportedIRVideoModes() const
00351 {
00352 boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00353
00354 ir_video_modes_.clear();
00355
00356 if (stream)
00357 {
00358 const openni::SensorInfo& sensor_info = stream->getSensorInfo();
00359
00360 ir_video_modes_ = openni2_convert(sensor_info.getSupportedVideoModes());
00361 }
00362
00363 return ir_video_modes_;
00364 }
00365
00366 const std::vector<OpenNI2VideoMode>& OpenNI2Device::getSupportedColorVideoModes() const
00367 {
00368 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00369
00370 color_video_modes_.clear();
00371
00372 if (stream)
00373 {
00374 const openni::SensorInfo& sensor_info = stream->getSensorInfo();
00375
00376 color_video_modes_ = openni2_convert(sensor_info.getSupportedVideoModes());
00377 }
00378
00379 return color_video_modes_;
00380 }
00381
00382 const std::vector<OpenNI2VideoMode>& OpenNI2Device::getSupportedDepthVideoModes() const
00383 {
00384 boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00385
00386 depth_video_modes_.clear();
00387
00388 if (stream)
00389 {
00390 const openni::SensorInfo& sensor_info = stream->getSensorInfo();
00391
00392 depth_video_modes_ = openni2_convert(sensor_info.getSupportedVideoModes());
00393 }
00394
00395 return depth_video_modes_;
00396 }
00397
00398 bool OpenNI2Device::isImageRegistrationModeSupported() const
00399 {
00400 return openni_device_->isImageRegistrationModeSupported(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
00401 }
00402
00403 void OpenNI2Device::setImageRegistrationMode(bool enabled) throw (OpenNI2Exception)
00404 {
00405 if (isImageRegistrationModeSupported())
00406 {
00407 image_registration_activated_ = enabled;
00408 if (enabled)
00409 {
00410 openni::Status rc = openni_device_->setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
00411 if (rc != openni::STATUS_OK)
00412 THROW_OPENNI_EXCEPTION("Enabling image registration mode failed: \n%s\n", openni::OpenNI::getExtendedError());
00413 }
00414 else
00415 {
00416 openni::Status rc = openni_device_->setImageRegistrationMode(openni::IMAGE_REGISTRATION_OFF);
00417 if (rc != openni::STATUS_OK)
00418 THROW_OPENNI_EXCEPTION("Enabling image registration mode failed: \n%s\n", openni::OpenNI::getExtendedError());
00419 }
00420 }
00421 }
00422
00423 void OpenNI2Device::setDepthColorSync(bool enabled) throw (OpenNI2Exception)
00424 {
00425 openni::Status rc = openni_device_->setDepthColorSyncEnabled(enabled);
00426 if (rc != openni::STATUS_OK)
00427 THROW_OPENNI_EXCEPTION("Enabling depth color synchronization failed: \n%s\n", openni::OpenNI::getExtendedError());
00428 }
00429
00430 const OpenNI2VideoMode OpenNI2Device::getIRVideoMode() throw (OpenNI2Exception)
00431 {
00432 OpenNI2VideoMode ret;
00433
00434 boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00435
00436 if (stream)
00437 {
00438 openni::VideoMode video_mode = stream->getVideoMode();
00439
00440 ret = openni2_convert(video_mode);
00441 }
00442 else
00443 THROW_OPENNI_EXCEPTION("Could not create video stream.");
00444
00445 return ret;
00446 }
00447
00448 const OpenNI2VideoMode OpenNI2Device::getColorVideoMode() throw (OpenNI2Exception)
00449 {
00450 OpenNI2VideoMode ret;
00451
00452 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00453
00454 if (stream)
00455 {
00456 openni::VideoMode video_mode = stream->getVideoMode();
00457
00458 ret = openni2_convert(video_mode);
00459 }
00460 else
00461 THROW_OPENNI_EXCEPTION("Could not create video stream.");
00462
00463 return ret;
00464 }
00465
00466 const OpenNI2VideoMode OpenNI2Device::getDepthVideoMode() throw (OpenNI2Exception)
00467 {
00468 OpenNI2VideoMode ret;
00469
00470 boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00471
00472 if (stream)
00473 {
00474 openni::VideoMode video_mode = stream->getVideoMode();
00475
00476 ret = openni2_convert(video_mode);
00477 }
00478 else
00479 THROW_OPENNI_EXCEPTION("Could not create video stream.");
00480
00481 return ret;
00482 }
00483
00484 void OpenNI2Device::setIRVideoMode(const OpenNI2VideoMode& video_mode) throw (OpenNI2Exception)
00485 {
00486 boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00487
00488 if (stream)
00489 {
00490 const openni::VideoMode videoMode = openni2_convert(video_mode);
00491 const openni::Status rc = stream->setVideoMode(videoMode);
00492 if (rc != openni::STATUS_OK)
00493 THROW_OPENNI_EXCEPTION("Couldn't set IR video mode: \n%s\n", openni::OpenNI::getExtendedError());
00494 }
00495 }
00496
00497 void OpenNI2Device::setColorVideoMode(const OpenNI2VideoMode& video_mode) throw (OpenNI2Exception)
00498 {
00499 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00500
00501 if (stream)
00502 {
00503 const openni::VideoMode videoMode = openni2_convert(video_mode);
00504 const openni::Status rc = stream->setVideoMode(videoMode);
00505 if (rc != openni::STATUS_OK)
00506 THROW_OPENNI_EXCEPTION("Couldn't set color video mode: \n%s\n", openni::OpenNI::getExtendedError());
00507 }
00508 }
00509
00510 void OpenNI2Device::setDepthVideoMode(const OpenNI2VideoMode& video_mode) throw (OpenNI2Exception)
00511 {
00512 boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00513
00514 if (stream)
00515 {
00516 const openni::VideoMode videoMode = openni2_convert(video_mode);
00517 const openni::Status rc = stream->setVideoMode(videoMode);
00518 if (rc != openni::STATUS_OK)
00519 THROW_OPENNI_EXCEPTION("Couldn't set depth video mode: \n%s\n", openni::OpenNI::getExtendedError());
00520 }
00521 }
00522
00523 void OpenNI2Device::setAutoExposure(bool enable) throw (OpenNI2Exception)
00524 {
00525 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00526
00527 if (stream)
00528 {
00529 openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00530 if (camera_seeting)
00531 {
00532 const openni::Status rc = camera_seeting->setAutoExposureEnabled(enable);
00533 if (rc != openni::STATUS_OK)
00534 THROW_OPENNI_EXCEPTION("Couldn't set auto exposure: \n%s\n", openni::OpenNI::getExtendedError());
00535 }
00536
00537 }
00538 }
00539 void OpenNI2Device::setAutoWhiteBalance(bool enable) throw (OpenNI2Exception)
00540 {
00541 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00542
00543 if (stream)
00544 {
00545 openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00546 if (camera_seeting)
00547 {
00548 const openni::Status rc = camera_seeting->setAutoWhiteBalanceEnabled(enable);
00549 if (rc != openni::STATUS_OK)
00550 THROW_OPENNI_EXCEPTION("Couldn't set auto white balance: \n%s\n", openni::OpenNI::getExtendedError());
00551 }
00552
00553 }
00554 }
00555
00556 bool OpenNI2Device::getAutoExposure() const
00557 {
00558 bool ret = false;
00559
00560 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00561
00562 if (stream)
00563 {
00564 openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00565 if (camera_seeting)
00566 ret = camera_seeting->getAutoExposureEnabled();
00567 }
00568
00569 return ret;
00570 }
00571 bool OpenNI2Device::getAutoWhiteBalance() const
00572 {
00573 bool ret = false;
00574
00575 boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00576
00577 if (stream)
00578 {
00579 openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00580 if (camera_seeting)
00581 ret = camera_seeting->getAutoWhiteBalanceEnabled();
00582 }
00583
00584 return ret;
00585 }
00586
00587 void OpenNI2Device::setUseDeviceTimer(bool enable)
00588 {
00589 if (ir_frame_listener)
00590 ir_frame_listener->setUseDeviceTimer(enable);
00591
00592 if (color_frame_listener)
00593 color_frame_listener->setUseDeviceTimer(enable);
00594
00595 if (depth_frame_listener)
00596 depth_frame_listener->setUseDeviceTimer(enable);
00597 }
00598
00599 void OpenNI2Device::setIRFrameCallback(FrameCallbackFunction callback)
00600 {
00601 ir_frame_listener->setCallback(callback);
00602 }
00603
00604 void OpenNI2Device::setColorFrameCallback(FrameCallbackFunction callback)
00605 {
00606 color_frame_listener->setCallback(callback);
00607 }
00608
00609 void OpenNI2Device::setDepthFrameCallback(FrameCallbackFunction callback)
00610 {
00611 depth_frame_listener->setCallback(callback);
00612 }
00613
00614 boost::shared_ptr<openni::VideoStream> OpenNI2Device::getIRVideoStream() const throw (OpenNI2Exception)
00615 {
00616 if (ir_video_stream_.get() == 0)
00617 {
00618 if (hasIRSensor())
00619 {
00620 ir_video_stream_ = boost::make_shared<openni::VideoStream>();
00621
00622 const openni::Status rc = ir_video_stream_->create(*openni_device_, openni::SENSOR_IR);
00623 if (rc != openni::STATUS_OK)
00624 THROW_OPENNI_EXCEPTION("Couldn't create IR video stream: \n%s\n", openni::OpenNI::getExtendedError());
00625 }
00626 }
00627 return ir_video_stream_;
00628 }
00629
00630 boost::shared_ptr<openni::VideoStream> OpenNI2Device::getColorVideoStream() const throw (OpenNI2Exception)
00631 {
00632 if (color_video_stream_.get() == 0)
00633 {
00634 if (hasColorSensor())
00635 {
00636 color_video_stream_ = boost::make_shared<openni::VideoStream>();
00637
00638 const openni::Status rc = color_video_stream_->create(*openni_device_, openni::SENSOR_COLOR);
00639 if (rc != openni::STATUS_OK)
00640 THROW_OPENNI_EXCEPTION("Couldn't create color video stream: \n%s\n", openni::OpenNI::getExtendedError());
00641 }
00642 }
00643 return color_video_stream_;
00644 }
00645
00646 boost::shared_ptr<openni::VideoStream> OpenNI2Device::getDepthVideoStream() const throw (OpenNI2Exception)
00647 {
00648 if (depth_video_stream_.get() == 0)
00649 {
00650 if (hasDepthSensor())
00651 {
00652 depth_video_stream_ = boost::make_shared<openni::VideoStream>();
00653
00654 const openni::Status rc = depth_video_stream_->create(*openni_device_, openni::SENSOR_DEPTH);
00655 if (rc != openni::STATUS_OK)
00656 THROW_OPENNI_EXCEPTION("Couldn't create depth video stream: \n%s\n", openni::OpenNI::getExtendedError());
00657 }
00658 }
00659 return depth_video_stream_;
00660 }
00661
00662 std::ostream& operator <<(std::ostream& stream, const OpenNI2Device& device)
00663 {
00664
00665 stream << "Device info (" << device.getUri() << ")" << std::endl;
00666 stream << " Vendor: " << device.getVendor() << std::endl;
00667 stream << " Name: " << device.getName() << std::endl;
00668 stream << " USB Vendor ID: " << device.getUsbVendorId() << std::endl;
00669 stream << " USB Product ID: " << device.getUsbVendorId() << std::endl << std::endl;
00670
00671 if (device.hasIRSensor())
00672 {
00673 stream << "IR sensor video modes:" << std::endl;
00674 const std::vector<OpenNI2VideoMode>& video_modes = device.getSupportedIRVideoModes();
00675
00676 std::vector<OpenNI2VideoMode>::const_iterator it = video_modes.begin();
00677 std::vector<OpenNI2VideoMode>::const_iterator it_end = video_modes.end();
00678 for (; it != it_end; ++it)
00679 stream << " - " << *it << std::endl;
00680 }
00681 else
00682 {
00683 stream << "No IR sensor available" << std::endl;
00684 }
00685
00686 if (device.hasColorSensor())
00687 {
00688 stream << "Color sensor video modes:" << std::endl;
00689 const std::vector<OpenNI2VideoMode>& video_modes = device.getSupportedColorVideoModes();
00690
00691 std::vector<OpenNI2VideoMode>::const_iterator it = video_modes.begin();
00692 std::vector<OpenNI2VideoMode>::const_iterator it_end = video_modes.end();
00693 for (; it != it_end; ++it)
00694 stream << " - " << *it << std::endl;
00695 }
00696 else
00697 {
00698 stream << "No Color sensor available" << std::endl;
00699 }
00700
00701 if (device.hasDepthSensor())
00702 {
00703 stream << "Depth sensor video modes:" << std::endl;
00704 const std::vector<OpenNI2VideoMode>& video_modes = device.getSupportedDepthVideoModes();
00705
00706 std::vector<OpenNI2VideoMode>::const_iterator it = video_modes.begin();
00707 std::vector<OpenNI2VideoMode>::const_iterator it_end = video_modes.end();
00708 for (; it != it_end; ++it)
00709 stream << " - " << *it << std::endl;
00710 }
00711 else
00712 {
00713 stream << "No Depth sensor available" << std::endl;
00714 }
00715
00716 }
00717
00718 }