openni2_device.cpp
Go to the documentation of this file.
00001 /*
00002  * Copyright (c) 2013, Willow Garage, Inc.
00003  * All rights reserved.
00004  *
00005  * Redistribution and use in source and binary forms, with or without
00006  * modification, are permitted provided that the following conditions are met:
00007  *
00008  *     * Redistributions of source code must retain the above copyright
00009  *       notice, this list of conditions and the following disclaimer.
00010  *     * Redistributions in binary form must reproduce the above copyright
00011  *       notice, this list of conditions and the following disclaimer in the
00012  *       documentation and/or other materials provided with the distribution.
00013  *     * Neither the name of the Willow Garage, Inc. nor the names of its
00014  *       contributors may be used to endorse or promote products derived from
00015  *       this software without specific prior written permission.
00016  *
00017  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
00018  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
00019  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
00020  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
00021  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00022  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
00023  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
00024  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
00025  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
00026  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00027  * POSSIBILITY OF SUCH DAMAGE.
00028  *
00029  *      Author: Julius Kammerl (jkammerl@willowgarage.com)
00030  */
00031 
00032 #include "OpenNI.h"
00033 #include <PS1080.h> // For XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE property
00034 
00035 #include <boost/lexical_cast.hpp>
00036 #include <boost/algorithm/string/replace.hpp>
00037 
00038 #include "openni2_camera/openni2_device.h"
00039 #include "openni2_camera/openni2_exception.h"
00040 #include "openni2_camera/openni2_convert.h"
00041 #include "openni2_camera/openni2_frame_listener.h"
00042 
00043 #include <boost/shared_ptr.hpp>
00044 #include <boost/make_shared.hpp>
00045 
00046 #include <string>
00047 
00048 namespace openni2_wrapper
00049 {
00050 
00051 OpenNI2Device::OpenNI2Device(const std::string& device_URI) throw (OpenNI2Exception) :
00052     openni_device_(),
00053     ir_video_started_(false),
00054     color_video_started_(false),
00055     depth_video_started_(false),
00056     image_registration_activated_(false),
00057     use_device_time_(false)
00058 {
00059   openni::Status rc = openni::OpenNI::initialize();
00060   if (rc != openni::STATUS_OK)
00061     THROW_OPENNI_EXCEPTION("Initialize failed\n%s\n", openni::OpenNI::getExtendedError());
00062 
00063   openni_device_ = boost::make_shared<openni::Device>();
00064 
00065   if (device_URI.length() > 0)
00066   {
00067     rc = openni_device_->open(device_URI.c_str());
00068   }
00069   else
00070   {
00071     rc = openni_device_->open(openni::ANY_DEVICE);
00072   }
00073 
00074   if (rc != openni::STATUS_OK)
00075     THROW_OPENNI_EXCEPTION("Device open failed\n%s\n", openni::OpenNI::getExtendedError());
00076 
00077   device_info_ = boost::make_shared<openni::DeviceInfo>();
00078   *device_info_ = openni_device_->getDeviceInfo();
00079 
00080   ir_frame_listener = boost::make_shared<OpenNI2FrameListener>();
00081   color_frame_listener = boost::make_shared<OpenNI2FrameListener>();
00082   depth_frame_listener = boost::make_shared<OpenNI2FrameListener>();
00083 
00084 }
00085 
00086 OpenNI2Device::~OpenNI2Device()
00087 {
00088   stopAllStreams();
00089 
00090   shutdown();
00091 
00092   openni_device_->close();
00093 }
00094 
00095 const std::string OpenNI2Device::getUri() const
00096 {
00097   return std::string(device_info_->getUri());
00098 }
00099 
00100 const std::string OpenNI2Device::getVendor() const
00101 {
00102   return std::string(device_info_->getVendor());
00103 }
00104 
00105 const std::string OpenNI2Device::getName() const
00106 {
00107   return std::string(device_info_->getName());
00108 }
00109 
00110 uint16_t OpenNI2Device::getUsbVendorId() const
00111 {
00112   return device_info_->getUsbVendorId();
00113 }
00114 
00115 uint16_t OpenNI2Device::getUsbProductId() const
00116 {
00117   return device_info_->getUsbProductId();
00118 }
00119 
00120 const std::string OpenNI2Device::getStringID() const
00121 {
00122   std::string ID_str = getName() + "_" + getVendor();
00123 
00124   boost::replace_all(ID_str, "/", "");
00125   boost::replace_all(ID_str, ".", "");
00126   boost::replace_all(ID_str, "@", "");
00127 
00128   return ID_str;
00129 }
00130 
00131 bool OpenNI2Device::isValid() const
00132 {
00133   return (openni_device_.get() != 0) && openni_device_->isValid();
00134 }
00135 
00136 float OpenNI2Device::getIRFocalLength(int output_y_resolution) const
00137 {
00138   float focal_length = 0.0f;
00139   boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00140 
00141   if (stream)
00142   {
00143     focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
00144   }
00145 
00146   return focal_length;
00147 }
00148 
00149 float OpenNI2Device::getColorFocalLength(int output_y_resolution) const
00150 {
00151   float focal_length = 0.0f;
00152   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00153 
00154   if (stream)
00155   {
00156     focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
00157   }
00158 
00159   return focal_length;
00160 }
00161 
00162 float OpenNI2Device::getDepthFocalLength(int output_y_resolution) const
00163 {
00164   float focal_length = 0.0f;
00165   boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00166 
00167   if (stream)
00168   {
00169     focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
00170   }
00171 
00172   return focal_length;
00173 }
00174 
00175 float OpenNI2Device::getBaseline() const
00176 {
00177   float baseline = 0.075f;
00178   boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00179 
00180   if (stream && stream->isPropertySupported(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE))
00181   {
00182     double baseline_meters;
00183     stream->getProperty(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, &baseline_meters); // Device specific -- from PS1080.h
00184     baseline = static_cast<float>(baseline_meters * 0.01f);  // baseline from cm -> meters
00185   }
00186   return baseline;
00187 }
00188 
00189 bool OpenNI2Device::isIRVideoModeSupported(const OpenNI2VideoMode& video_mode) const
00190 {
00191   getSupportedIRVideoModes();
00192 
00193   bool supported = false;
00194 
00195   std::vector<OpenNI2VideoMode>::const_iterator it = ir_video_modes_.begin();
00196   std::vector<OpenNI2VideoMode>::const_iterator it_end = ir_video_modes_.end();
00197 
00198   while (it != it_end && !supported)
00199   {
00200     supported = (*it == video_mode);
00201     ++it;
00202   }
00203 
00204   return supported;
00205 }
00206 
00207 bool OpenNI2Device::isColorVideoModeSupported(const OpenNI2VideoMode& video_mode) const
00208 {
00209   getSupportedColorVideoModes();
00210 
00211   bool supported = false;
00212 
00213   std::vector<OpenNI2VideoMode>::const_iterator it = color_video_modes_.begin();
00214   std::vector<OpenNI2VideoMode>::const_iterator it_end = color_video_modes_.end();
00215 
00216   while (it != it_end && !supported)
00217   {
00218     supported = (*it == video_mode);
00219     ++it;
00220   }
00221 
00222   return supported;
00223 }
00224 
00225 bool OpenNI2Device::isDepthVideoModeSupported(const OpenNI2VideoMode& video_mode) const
00226 {
00227   getSupportedDepthVideoModes();
00228 
00229   bool supported = false;
00230 
00231   std::vector<OpenNI2VideoMode>::const_iterator it = depth_video_modes_.begin();
00232   std::vector<OpenNI2VideoMode>::const_iterator it_end = depth_video_modes_.end();
00233 
00234   while (it != it_end && !supported)
00235   {
00236     supported = (*it == video_mode);
00237     ++it;
00238   }
00239 
00240   return supported;
00241 
00242 }
00243 
00244 bool OpenNI2Device::hasIRSensor() const
00245 {
00246   return openni_device_->hasSensor(openni::SENSOR_IR);
00247 }
00248 
00249 bool OpenNI2Device::hasColorSensor() const
00250 {
00251   return openni_device_->hasSensor(openni::SENSOR_COLOR);
00252 }
00253 
00254 bool OpenNI2Device::hasDepthSensor() const
00255 {
00256   return openni_device_->hasSensor(openni::SENSOR_DEPTH);
00257 }
00258 
00259 void OpenNI2Device::startIRStream()
00260 {
00261   boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00262 
00263   if (stream)
00264   {
00265     stream->setMirroringEnabled(false);
00266     stream->start();
00267     stream->addNewFrameListener(ir_frame_listener.get());
00268     ir_video_started_ = true;
00269   }
00270 
00271 }
00272 
00273 void OpenNI2Device::startColorStream()
00274 {
00275   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00276 
00277   if (stream)
00278   {
00279     stream->setMirroringEnabled(false);
00280     stream->start();
00281     stream->addNewFrameListener(color_frame_listener.get());
00282     color_video_started_ = true;
00283   }
00284 }
00285 void OpenNI2Device::startDepthStream()
00286 {
00287   boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00288 
00289   if (stream)
00290   {
00291     stream->setMirroringEnabled(false);
00292     stream->start();
00293     stream->addNewFrameListener(depth_frame_listener.get());
00294     depth_video_started_ = true;
00295   }
00296 }
00297 
00298 void OpenNI2Device::stopAllStreams()
00299 {
00300   stopIRStream();
00301   stopColorStream();
00302   stopDepthStream();
00303 }
00304 
00305 void OpenNI2Device::stopIRStream()
00306 {
00307   if (ir_video_stream_.get() != 0)
00308   {
00309     ir_video_started_ = false;
00310 
00311     ir_video_stream_->removeNewFrameListener(ir_frame_listener.get());
00312 
00313     ir_video_stream_->stop();
00314   }
00315 }
00316 void OpenNI2Device::stopColorStream()
00317 {
00318   if (color_video_stream_.get() != 0)
00319   {
00320     color_video_started_ = false;
00321 
00322     color_video_stream_->removeNewFrameListener(color_frame_listener.get());
00323 
00324     color_video_stream_->stop();
00325   }
00326 }
00327 void OpenNI2Device::stopDepthStream()
00328 {
00329   if (depth_video_stream_.get() != 0)
00330   {
00331     depth_video_started_ = false;
00332 
00333     depth_video_stream_->removeNewFrameListener(depth_frame_listener.get());
00334 
00335     depth_video_stream_->stop();
00336   }
00337 }
00338 
00339 void OpenNI2Device::shutdown()
00340 {
00341   if (ir_video_stream_.get() != 0)
00342     ir_video_stream_->destroy();
00343 
00344   if (color_video_stream_.get() != 0)
00345     color_video_stream_->destroy();
00346 
00347   if (depth_video_stream_.get() != 0)
00348     depth_video_stream_->destroy();
00349 
00350 }
00351 
00352 bool OpenNI2Device::isIRStreamStarted()
00353 {
00354   return ir_video_started_;
00355 }
00356 bool OpenNI2Device::isColorStreamStarted()
00357 {
00358   return color_video_started_;
00359 }
00360 bool OpenNI2Device::isDepthStreamStarted()
00361 {
00362   return depth_video_started_;
00363 }
00364 
00365 const std::vector<OpenNI2VideoMode>& OpenNI2Device::getSupportedIRVideoModes() const
00366 {
00367   boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00368 
00369   ir_video_modes_.clear();
00370 
00371   if (stream)
00372   {
00373     const openni::SensorInfo& sensor_info = stream->getSensorInfo();
00374 
00375     ir_video_modes_ = openni2_convert(sensor_info.getSupportedVideoModes());
00376   }
00377 
00378   return ir_video_modes_;
00379 }
00380 
00381 const std::vector<OpenNI2VideoMode>& OpenNI2Device::getSupportedColorVideoModes() const
00382 {
00383   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00384 
00385   color_video_modes_.clear();
00386 
00387   if (stream)
00388   {
00389     const openni::SensorInfo& sensor_info = stream->getSensorInfo();
00390 
00391     color_video_modes_ = openni2_convert(sensor_info.getSupportedVideoModes());
00392   }
00393 
00394   return color_video_modes_;
00395 }
00396 
00397 const std::vector<OpenNI2VideoMode>& OpenNI2Device::getSupportedDepthVideoModes() const
00398 {
00399   boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00400 
00401   depth_video_modes_.clear();
00402 
00403   if (stream)
00404   {
00405     const openni::SensorInfo& sensor_info = stream->getSensorInfo();
00406 
00407     depth_video_modes_ = openni2_convert(sensor_info.getSupportedVideoModes());
00408   }
00409 
00410   return depth_video_modes_;
00411 }
00412 
00413 bool OpenNI2Device::isImageRegistrationModeSupported() const
00414 {
00415   return openni_device_->isImageRegistrationModeSupported(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
00416 }
00417 
00418 void OpenNI2Device::setImageRegistrationMode(bool enabled) throw (OpenNI2Exception)
00419 {
00420   if (isImageRegistrationModeSupported())
00421   {
00422     image_registration_activated_ = enabled;
00423     if (enabled)
00424     {
00425       openni::Status rc = openni_device_->setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
00426       if (rc != openni::STATUS_OK)
00427         THROW_OPENNI_EXCEPTION("Enabling image registration mode failed: \n%s\n", openni::OpenNI::getExtendedError());
00428     }
00429     else
00430     {
00431       openni::Status rc = openni_device_->setImageRegistrationMode(openni::IMAGE_REGISTRATION_OFF);
00432       if (rc != openni::STATUS_OK)
00433         THROW_OPENNI_EXCEPTION("Enabling image registration mode failed: \n%s\n", openni::OpenNI::getExtendedError());
00434     }
00435   }
00436 }
00437 
00438 void OpenNI2Device::setDepthColorSync(bool enabled) throw (OpenNI2Exception)
00439 {
00440   openni::Status rc = openni_device_->setDepthColorSyncEnabled(enabled);
00441   if (rc != openni::STATUS_OK)
00442     THROW_OPENNI_EXCEPTION("Enabling depth color synchronization failed: \n%s\n", openni::OpenNI::getExtendedError());
00443 }
00444 
00445 const OpenNI2VideoMode OpenNI2Device::getIRVideoMode() throw (OpenNI2Exception)
00446 {
00447   OpenNI2VideoMode ret;
00448 
00449   boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00450 
00451   if (stream)
00452   {
00453     openni::VideoMode video_mode = stream->getVideoMode();
00454 
00455     ret = openni2_convert(video_mode);
00456   }
00457   else
00458     THROW_OPENNI_EXCEPTION("Could not create video stream.");
00459 
00460   return ret;
00461 }
00462 
00463 const OpenNI2VideoMode OpenNI2Device::getColorVideoMode() throw (OpenNI2Exception)
00464 {
00465   OpenNI2VideoMode ret;
00466 
00467   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00468 
00469   if (stream)
00470   {
00471     openni::VideoMode video_mode = stream->getVideoMode();
00472 
00473     ret = openni2_convert(video_mode);
00474   }
00475   else
00476     THROW_OPENNI_EXCEPTION("Could not create video stream.");
00477 
00478   return ret;
00479 }
00480 
00481 const OpenNI2VideoMode OpenNI2Device::getDepthVideoMode() throw (OpenNI2Exception)
00482 {
00483   OpenNI2VideoMode ret;
00484 
00485   boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00486 
00487   if (stream)
00488   {
00489     openni::VideoMode video_mode = stream->getVideoMode();
00490 
00491     ret = openni2_convert(video_mode);
00492   }
00493   else
00494     THROW_OPENNI_EXCEPTION("Could not create video stream.");
00495 
00496   return ret;
00497 }
00498 
00499 void OpenNI2Device::setIRVideoMode(const OpenNI2VideoMode& video_mode) throw (OpenNI2Exception)
00500 {
00501   boost::shared_ptr<openni::VideoStream> stream = getIRVideoStream();
00502 
00503   if (stream)
00504   {
00505     const openni::VideoMode videoMode = openni2_convert(video_mode);
00506     const openni::Status rc = stream->setVideoMode(videoMode);
00507     if (rc != openni::STATUS_OK)
00508       THROW_OPENNI_EXCEPTION("Couldn't set IR video mode: \n%s\n", openni::OpenNI::getExtendedError());
00509   }
00510 }
00511 
00512 void OpenNI2Device::setColorVideoMode(const OpenNI2VideoMode& video_mode) throw (OpenNI2Exception)
00513 {
00514   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00515 
00516   if (stream)
00517   {
00518     const openni::VideoMode videoMode = openni2_convert(video_mode);
00519     const openni::Status rc = stream->setVideoMode(videoMode);
00520     if (rc != openni::STATUS_OK)
00521       THROW_OPENNI_EXCEPTION("Couldn't set color video mode: \n%s\n", openni::OpenNI::getExtendedError());
00522   }
00523 }
00524 
00525 void OpenNI2Device::setDepthVideoMode(const OpenNI2VideoMode& video_mode) throw (OpenNI2Exception)
00526 {
00527   boost::shared_ptr<openni::VideoStream> stream = getDepthVideoStream();
00528 
00529   if (stream)
00530   {
00531     const openni::VideoMode videoMode = openni2_convert(video_mode);
00532     const openni::Status rc = stream->setVideoMode(videoMode);
00533     if (rc != openni::STATUS_OK)
00534       THROW_OPENNI_EXCEPTION("Couldn't set depth video mode: \n%s\n", openni::OpenNI::getExtendedError());
00535   }
00536 }
00537 
00538 void OpenNI2Device::setAutoExposure(bool enable) throw (OpenNI2Exception)
00539 {
00540   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00541 
00542   if (stream)
00543   {
00544     openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00545     if (camera_seeting)
00546     {
00547       const openni::Status rc = camera_seeting->setAutoExposureEnabled(enable);
00548       if (rc != openni::STATUS_OK)
00549         THROW_OPENNI_EXCEPTION("Couldn't set auto exposure: \n%s\n", openni::OpenNI::getExtendedError());
00550     }
00551 
00552   }
00553 }
00554 void OpenNI2Device::setAutoWhiteBalance(bool enable) throw (OpenNI2Exception)
00555 {
00556   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00557 
00558   if (stream)
00559   {
00560     openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00561     if (camera_seeting)
00562     {
00563       const openni::Status rc = camera_seeting->setAutoWhiteBalanceEnabled(enable);
00564       if (rc != openni::STATUS_OK)
00565         THROW_OPENNI_EXCEPTION("Couldn't set auto white balance: \n%s\n", openni::OpenNI::getExtendedError());
00566     }
00567 
00568   }
00569 }
00570 
00571 void OpenNI2Device::setExposure(int exposure) throw (OpenNI2Exception)
00572 {
00573   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00574 
00575   if (stream)
00576   {
00577     openni::CameraSettings* camera_settings = stream->getCameraSettings();
00578     if (camera_settings)
00579     {
00580       const openni::Status rc = camera_settings->setExposure(exposure);
00581       if (rc != openni::STATUS_OK)
00582         THROW_OPENNI_EXCEPTION("Couldn't set exposure: \n%s\n", openni::OpenNI::getExtendedError());
00583     }
00584   }
00585 }
00586 
00587 bool OpenNI2Device::getAutoExposure() const
00588 {
00589   bool ret = false;
00590 
00591   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00592 
00593   if (stream)
00594   {
00595     openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00596     if (camera_seeting)
00597       ret = camera_seeting->getAutoExposureEnabled();
00598   }
00599 
00600   return ret;
00601 }
00602 
00603 bool OpenNI2Device::getAutoWhiteBalance() const
00604 {
00605   bool ret = false;
00606 
00607   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00608 
00609   if (stream)
00610   {
00611     openni::CameraSettings* camera_seeting = stream->getCameraSettings();
00612     if (camera_seeting)
00613       ret = camera_seeting->getAutoWhiteBalanceEnabled();
00614   }
00615 
00616   return ret;
00617 }
00618 
00619 int OpenNI2Device::getExposure() const
00620 {
00621   int ret = 0;
00622 
00623   boost::shared_ptr<openni::VideoStream> stream = getColorVideoStream();
00624 
00625   if (stream)
00626   {
00627     openni::CameraSettings* camera_settings = stream->getCameraSettings();
00628     if (camera_settings)
00629       ret = camera_settings->getExposure();
00630   }
00631 
00632   return ret;
00633 }
00634 
00635 void OpenNI2Device::setUseDeviceTimer(bool enable)
00636 {
00637   if (ir_frame_listener)
00638     ir_frame_listener->setUseDeviceTimer(enable);
00639 
00640   if (color_frame_listener)
00641     color_frame_listener->setUseDeviceTimer(enable);
00642 
00643   if (depth_frame_listener)
00644     depth_frame_listener->setUseDeviceTimer(enable);
00645 }
00646 
00647 void OpenNI2Device::setIRFrameCallback(FrameCallbackFunction callback)
00648 {
00649   ir_frame_listener->setCallback(callback);
00650 }
00651 
00652 void OpenNI2Device::setColorFrameCallback(FrameCallbackFunction callback)
00653 {
00654   color_frame_listener->setCallback(callback);
00655 }
00656 
00657 void OpenNI2Device::setDepthFrameCallback(FrameCallbackFunction callback)
00658 {
00659   depth_frame_listener->setCallback(callback);
00660 }
00661 
00662 boost::shared_ptr<openni::VideoStream> OpenNI2Device::getIRVideoStream() const throw (OpenNI2Exception)
00663 {
00664   if (ir_video_stream_.get() == 0)
00665   {
00666     if (hasIRSensor())
00667     {
00668       ir_video_stream_ = boost::make_shared<openni::VideoStream>();
00669 
00670       const openni::Status rc = ir_video_stream_->create(*openni_device_, openni::SENSOR_IR);
00671       if (rc != openni::STATUS_OK)
00672         THROW_OPENNI_EXCEPTION("Couldn't create IR video stream: \n%s\n", openni::OpenNI::getExtendedError());
00673     }
00674   }
00675   return ir_video_stream_;
00676 }
00677 
00678 boost::shared_ptr<openni::VideoStream> OpenNI2Device::getColorVideoStream() const throw (OpenNI2Exception)
00679 {
00680   if (color_video_stream_.get() == 0)
00681   {
00682     if (hasColorSensor())
00683     {
00684       color_video_stream_ = boost::make_shared<openni::VideoStream>();
00685 
00686       const openni::Status rc = color_video_stream_->create(*openni_device_, openni::SENSOR_COLOR);
00687       if (rc != openni::STATUS_OK)
00688         THROW_OPENNI_EXCEPTION("Couldn't create color video stream: \n%s\n", openni::OpenNI::getExtendedError());
00689     }
00690   }
00691   return color_video_stream_;
00692 }
00693 
00694 boost::shared_ptr<openni::VideoStream> OpenNI2Device::getDepthVideoStream() const throw (OpenNI2Exception)
00695 {
00696   if (depth_video_stream_.get() == 0)
00697   {
00698     if (hasDepthSensor())
00699     {
00700       depth_video_stream_ = boost::make_shared<openni::VideoStream>();
00701 
00702       const openni::Status rc = depth_video_stream_->create(*openni_device_, openni::SENSOR_DEPTH);
00703       if (rc != openni::STATUS_OK)
00704         THROW_OPENNI_EXCEPTION("Couldn't create depth video stream: \n%s\n", openni::OpenNI::getExtendedError());
00705     }
00706   }
00707   return depth_video_stream_;
00708 }
00709 
00710 std::ostream& operator <<(std::ostream& stream, const OpenNI2Device& device)
00711 {
00712 
00713   stream << "Device info (" << device.getUri() << ")" << std::endl;
00714   stream << "   Vendor: " << device.getVendor() << std::endl;
00715   stream << "   Name: " << device.getName() << std::endl;
00716   stream << "   USB Vendor ID: " << device.getUsbVendorId() << std::endl;
00717   stream << "   USB Product ID: " << device.getUsbVendorId() << std::endl << std::endl;
00718 
00719   if (device.hasIRSensor())
00720   {
00721     stream << "IR sensor video modes:" << std::endl;
00722     const std::vector<OpenNI2VideoMode>& video_modes = device.getSupportedIRVideoModes();
00723 
00724     std::vector<OpenNI2VideoMode>::const_iterator it = video_modes.begin();
00725     std::vector<OpenNI2VideoMode>::const_iterator it_end = video_modes.end();
00726     for (; it != it_end; ++it)
00727       stream << "   - " << *it << std::endl;
00728   }
00729   else
00730   {
00731     stream << "No IR sensor available" << std::endl;
00732   }
00733 
00734   if (device.hasColorSensor())
00735   {
00736     stream << "Color sensor video modes:" << std::endl;
00737     const std::vector<OpenNI2VideoMode>& video_modes = device.getSupportedColorVideoModes();
00738 
00739     std::vector<OpenNI2VideoMode>::const_iterator it = video_modes.begin();
00740     std::vector<OpenNI2VideoMode>::const_iterator it_end = video_modes.end();
00741     for (; it != it_end; ++it)
00742       stream << "   - " << *it << std::endl;
00743   }
00744   else
00745   {
00746     stream << "No Color sensor available" << std::endl;
00747   }
00748 
00749   if (device.hasDepthSensor())
00750   {
00751     stream << "Depth sensor video modes:" << std::endl;
00752     const std::vector<OpenNI2VideoMode>& video_modes = device.getSupportedDepthVideoModes();
00753 
00754     std::vector<OpenNI2VideoMode>::const_iterator it = video_modes.begin();
00755     std::vector<OpenNI2VideoMode>::const_iterator it_end = video_modes.end();
00756     for (; it != it_end; ++it)
00757       stream << "   - " << *it << std::endl;
00758   }
00759   else
00760   {
00761     stream << "No Depth sensor available" << std::endl;
00762   }
00763 
00764   return stream;
00765 }
00766 
00767 }


openni2_camera
Author(s): Julius Kammerl
autogenerated on Thu Jun 6 2019 21:28:42