ObjectFinderStereo.cpp
Go to the documentation of this file.
1 // ****************************************************************************
2 // This file is part of the Integrating Vision Toolkit (IVT).
3 //
4 // The IVT is maintained by the Karlsruhe Institute of Technology (KIT)
5 // (www.kit.edu) in cooperation with the company Keyetech (www.keyetech.de).
6 //
7 // Copyright (C) 2014 Karlsruhe Institute of Technology (KIT).
8 // All rights reserved.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are met:
12 //
13 // 1. Redistributions of source code must retain the above copyright
14 // notice, this list of conditions and the following disclaimer.
15 //
16 // 2. Redistributions in binary form must reproduce the above copyright
17 // notice, this list of conditions and the following disclaimer in the
18 // documentation and/or other materials provided with the distribution.
19 //
20 // 3. Neither the name of the KIT nor the names of its contributors may be
21 // used to endorse or promote products derived from this software
22 // without specific prior written permission.
23 //
24 // THIS SOFTWARE IS PROVIDED BY THE KIT AND CONTRIBUTORS “AS IS” AND ANY
25 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
26 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
27 // DISCLAIMED. IN NO EVENT SHALL THE KIT OR CONTRIBUTORS BE LIABLE FOR ANY
28 // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
29 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30 // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
31 // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
32 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
33 // THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34 // ****************************************************************************
35 // ****************************************************************************
36 // Filename: ObjectFinderStereo.cpp
37 // Author: Pedram Azad
38 // Date: 2005
39 // ****************************************************************************
40 
41 
42 // ****************************************************************************
43 // Includes
44 // ****************************************************************************
45 
46 #include <new> // for explicitly using correct new/delete operators on VC DSPs
47 
48 #include "ObjectFinderStereo.h"
49 
50 #include "ObjectFinder.h"
54 #include "Image/ByteImage.h"
55 
56 #include <stdio.h>
57 #include <math.h>
58 
59 
60 
61 // ****************************************************************************
62 // Constructor / Destructor
63 // ****************************************************************************
64 
66 {
69 
71  m_bOwnCalibration = true;
72 
74 }
75 
77 {
79  delete m_pStereoCalibration;
80 
81  delete m_pObjectFinderLeft;
82  delete m_pObjectFinderRight;
83 }
84 
85 
86 // ****************************************************************************
87 // Methods
88 // ****************************************************************************
89 
90 bool CObjectFinderStereo::Init(const char *pCameraParameterFileName)
91 {
94 
95  m_bOwnCalibration = true;
96 
97  return m_pStereoCalibration->LoadCameraParameters(pCameraParameterFileName);
98 }
99 
101 {
102  if (m_bOwnCalibration)
103  delete m_pStereoCalibration;
104 
105  m_bOwnCalibration = false;
106 
107  m_pStereoCalibration = pStereoCalibration;
108 }
109 
111 {
112  m_pObjectFinderLeft->SetColorParameterSet(pColorParameterSet);
113  m_pObjectFinderRight->SetColorParameterSet(pColorParameterSet);
114 }
115 
117 {
118  m_pObjectFinderLeft->SetRegionFilter(pRegionFilter);
119  m_pObjectFinderRight->SetRegionFilter(pRegionFilter);
120 }
121 
123 {
125 }
126 
128 {
130 }
131 
132 
133 void CObjectFinderStereo::PrepareImages(const CByteImage * const *ppImages, float fROIFactor, bool bCalculateHSVImage)
134 {
135  m_pObjectFinderLeft->PrepareImages(ppImages[0], fROIFactor, bCalculateHSVImage);
136  m_pObjectFinderRight->PrepareImages(ppImages[1], fROIFactor, bCalculateHSVImage);
137 }
138 
139 int CObjectFinderStereo::Finalize(float fMinZDistance, float fMaxZDistance, bool bInputImagesAreRectified, ObjectColor finalizeColor, float fMaxYDiff, bool bUseDistortionParameters)
140 {
143 
144  const int nRet = DetermineMatches(m_pObjectFinderLeft->m_objectList, m_pObjectFinderRight->m_objectList, fMinZDistance, fMaxZDistance, bInputImagesAreRectified, bUseDistortionParameters, finalizeColor, fMaxYDiff);
145 
146  // update object lists of (2D) CObjectFinder objects
148 
149  return nRet;
150 }
151 
152 
153 void CObjectFinderStereo::FindObjects(const CByteImage * const *ppImages, CByteImage **ppResultImages, ObjectColor color, int nMinPointsPerRegion, bool bShowSegmentedImage)
154 {
155  CByteImage *pResultImageLeft = ppResultImages ? ppResultImages[0] : 0;
156  CByteImage *pResultImageRight = ppResultImages ? ppResultImages[1] : 0;
157 
158  m_pObjectFinderLeft->FindObjects(ppImages[0], pResultImageLeft, color, nMinPointsPerRegion, bShowSegmentedImage);
159  m_pObjectFinderRight->FindObjects(ppImages[1], pResultImageRight, color, nMinPointsPerRegion, bShowSegmentedImage);
160 }
161 
162 void CObjectFinderStereo::FindObjects(const CByteImage * const *ppImages, CByteImage **ppResultImages, ObjectColor color, int nMinPointsPerRegion, CByteImage **ppSegmentedResultImages)
163 {
164  CByteImage *pResultImageLeft = ppResultImages ? ppResultImages[0] : 0;
165  CByteImage *pResultImageRight = ppResultImages ? ppResultImages[1] : 0;
166 
167  m_pObjectFinderLeft->FindObjects(ppImages[0], pResultImageLeft, color, nMinPointsPerRegion, ppSegmentedResultImages[0]);
168  m_pObjectFinderRight->FindObjects(ppImages[1], pResultImageRight, color, nMinPointsPerRegion, ppSegmentedResultImages[1]);
169 }
170 
171 void CObjectFinderStereo::FindObjects(const CByteImage * const *ppImages, CByteImage **ppResultImages, ObjectColor colorName, int hue, int hue_tol, int min_sat, int max_sat, int min_v, int max_v, int nMinPointsPerRegion, bool bShowSegmentedImage)
172 {
173  CByteImage *pResultImageLeft = ppResultImages ? ppResultImages[0] : 0;
174  CByteImage *pResultImageRight = ppResultImages ? ppResultImages[1] : 0;
175 
176  m_pObjectFinderLeft->FindObjects(ppImages[0], pResultImageLeft, colorName, hue, hue_tol, min_sat, max_sat, min_v, max_v, nMinPointsPerRegion, bShowSegmentedImage);
177  m_pObjectFinderRight->FindObjects(ppImages[1], pResultImageRight, colorName, hue, hue_tol, min_sat, max_sat, min_v, max_v, nMinPointsPerRegion, bShowSegmentedImage);
178 }
179 
180 void CObjectFinderStereo::FindObjectsInSegmentedImage(const CByteImage * const *ppImages, CByteImage **ppResultImages, ObjectColor color, int nMinPointsPerRegion, bool bShowSegmentedImage)
181 {
182  CByteImage *pResultImageLeft = ppResultImages ? ppResultImages[0] : 0;
183  CByteImage *pResultImageRight = ppResultImages ? ppResultImages[1] : 0;
184 
185  m_pObjectFinderLeft->FindObjectsInSegmentedImage(ppImages[0], pResultImageLeft, color, nMinPointsPerRegion, bShowSegmentedImage);
186  m_pObjectFinderRight->FindObjectsInSegmentedImage(ppImages[1], pResultImageRight, color, nMinPointsPerRegion, bShowSegmentedImage);
187 }
188 
189 
190 int CObjectFinderStereo::DetermineMatches(Object2DList &resultListLeft, Object2DList &resultListRight, float fMinZDistance, float fMaxZDistance, bool bInputImagesAreRectified, bool bUseDistortionParameters, ObjectColor finalizeColor, float fMaxEpipolarDistance)
191 {
192  // create copy of current object list
193  Object3DList oldObjectList = m_objectList;
194 
195  if (finalizeColor == eNone)
196  {
197  m_objectList.clear();
198  }
199  else
200  {
201  for (int i = 0; i < (int) m_objectList.size(); i++)
202  {
203  if (m_objectList.at(i).color == finalizeColor)
204  {
205  m_objectList.erase(m_objectList.begin() + i);
206  i--;
207  }
208  }
209  }
210 
211  int i, j;
212 
213  for (i = 0; i < (int) resultListLeft.size(); i++)
214  resultListLeft.at(i).reserved = 0;
215 
216  for (i = 0; i < (int) resultListRight.size(); i++)
217  resultListRight.at(i).reserved = 0;
218 
219  // add already existing region pairs
220  for (i = 0; i < (int) oldObjectList.size(); i++)
221  {
222  Object3DEntry &objectEntry = oldObjectList.at(i);
223 
224  if (finalizeColor != eNone && finalizeColor != objectEntry.color)
225  continue;
226 
227  const int region_left_id = objectEntry.region_id_left;
228  const int region_right_id = objectEntry.region_id_right;
229  int nMatchLeft = -1, nMatchRight = -1;
230 
231  for (j = 0; j < (int) resultListLeft.size(); j++)
232  {
233  if (resultListLeft.at(j).id == region_left_id)
234  {
235  nMatchLeft = j;
236  break;
237  }
238  }
239 
240  for (j = 0; j < (int) resultListRight.size(); j++)
241  if (resultListRight.at(j).id == region_right_id)
242  {
243  nMatchRight = j;
244  break;
245  }
246 
247  if (nMatchLeft != -1 && nMatchRight != -1)
248  {
249  // calculate 3D point
250  objectEntry.region_left = resultListLeft.at(nMatchLeft).region;
251  objectEntry.region_right = resultListRight.at(nMatchRight).region;
252  m_pStereoCalibration->Calculate3DPoint(objectEntry.region_left.centroid, objectEntry.region_right.centroid, objectEntry.pose.translation, bInputImagesAreRectified, bUseDistortionParameters);
253 
254  Math3d::SetVec(objectEntry.world_point, objectEntry.pose.translation);
255 
256  if (objectEntry.pose.translation.z >= fMinZDistance && objectEntry.pose.translation.z <= fMaxZDistance &&
258  {
259  resultListLeft.at(nMatchLeft).reserved = 1;
260  resultListRight.at(nMatchRight).reserved = 1;
261  m_objectList.push_back(objectEntry);
262  }
263  }
264  }
265 
266  // add new region pairs
267  for (i = 0; i < (int) resultListLeft.size(); i++)
268  {
269  Object2DEntry &entryLeft = resultListLeft.at(i);
270 
271  if (!entryLeft.reserved && (finalizeColor == eNone || entryLeft.color == finalizeColor))
272  {
273  // find best match in list for right image
274  float best_diff = fMaxEpipolarDistance;
275  int best_j = -1;
276 
277  for (j = 0; j < (int) resultListRight.size(); j++)
278  {
279  Object2DEntry &entryRight = resultListRight.at(j);
280 
281  if (!entryRight.reserved)
282  {
283  const float ratio = entryLeft.region.nPixels < entryRight.region.nPixels ? (float) entryLeft.region.nPixels / entryRight.region.nPixels : (float) entryRight.region.nPixels / entryLeft.region.nPixels;
284  const float ratio2 = entryLeft.region.ratio < entryRight.region.ratio ? (float) entryLeft.region.ratio / entryRight.region.ratio : (float) entryRight.region.ratio / entryLeft.region.ratio;
285  const float y_diff = bInputImagesAreRectified ? fabsf(entryLeft.region.centroid.y - entryRight.region.centroid.y) : fabsf(m_pStereoCalibration->CalculateEpipolarLineInLeftImageDistance(entryLeft.region.centroid, entryRight.region.centroid));
286 
287  Vec3d position;
288  m_pStereoCalibration->Calculate3DPoint(entryLeft.region.centroid, entryRight.region.centroid, position, bInputImagesAreRectified, bUseDistortionParameters);
289 
290  //printf("%.2f %.2f -- %.2f %.2f -- %.2f %.2f %.2f\n", entryLeft.region.centroid.x, entryLeft.region.centroid.y, entryRight.region.centroid.x, entryRight.region.centroid.y, ratio, y_diff, position.z);
291 
292  if (ratio > 0.5f && ratio2 > 0.5f && y_diff < fMaxEpipolarDistance &&
293  (entryLeft.type == entryRight.type || entryLeft.type == eCompactObject || entryRight.type == eCompactObject) && entryLeft.color == entryRight.color && (finalizeColor == eNone || entryLeft.color == finalizeColor) &&
294  position.z >= fMinZDistance && position.z <= fMaxZDistance && y_diff < best_diff)
295  {
296  best_diff = y_diff;
297  best_j = j;
298  }
299  }
300  }
301 
302  if (best_j != -1)
303  {
304  Object2DEntry &entryRight = resultListRight.at(best_j);
305  Object3DEntry entry;
306 
307  entry.region_left = entryLeft.region;
308  entry.region_right = entryRight.region;
309  entry.region_id_left = entryLeft.id;
310  entry.region_id_right = entryRight.id;
311  entry.color = entryLeft.color;
312  entry.type = entryLeft.type == eCompactObject ? entryRight.type : entryLeft.type;
313  entry.sName = "CompactObject";
314  entry.sOivFilePath = "";
315 
316  m_pStereoCalibration->Calculate3DPoint(entry.region_left.centroid, entry.region_right.centroid, entry.pose.translation, bInputImagesAreRectified, bUseDistortionParameters);
317 
319 
321  {
322  entryLeft.reserved = 1;
323  entryRight.reserved = 1;
324  m_objectList.push_back(entry);
325  }
326  }
327  }
328  }
329 
330  // run object classifiers
331  for (i = 0; i < (int) m_objectClassifierList.size(); i++)
332  m_objectClassifierList.at(i)->Classify(m_objectList);
333 
334  return (int) m_objectList.size();
335 }
336 
338 {
339  int i;
340 
341  // delete unmatched entries in left 2D list
342  for (i = 0; i < (int) resultListLeft.size(); i++)
343  if (!resultListLeft.at(i).reserved)
344  {
345  resultListLeft.erase(resultListLeft.begin() + i);
346  i--;
347  }
348 
349  // delete unmatched entries in right 2D list
350  for (i = 0; i < (int) resultListRight.size(); i++)
351  if (!resultListRight.at(i).reserved)
352  {
353  resultListRight.erase(resultListRight.begin() + i);
354  i--;
355  }
356 
357  // assign types to 2D lists
358  for (i = 0; i < (int) m_objectList.size(); i++)
359  {
360  int j;
361 
362  const Object3DEntry &object = m_objectList.at(i);
363 
364  for (j = 0; j < (int) resultListLeft.size(); j++)
365  {
366  Object2DEntry &entry = resultListLeft.at(j);
367 
368  if (entry.color == object.color && entry.region.centroid.x == object.region_left.centroid.x && entry.region.centroid.y == object.region_left.centroid.y && entry.region.nPixels == object.region_left.nPixels)
369  entry.type = object.type;
370  }
371 
372  for (j = 0; j < (int) resultListRight.size(); j++)
373  {
374  Object2DEntry &entry = resultListRight.at(j);
375 
376  if (entry.color == object.color && entry.region.centroid.x == object.region_right.centroid.x && entry.region.centroid.y == object.region_right.centroid.y && entry.region.nPixels == object.region_right.nPixels)
377  entry.type = object.type;
378  }
379  }
380 }
381 
383 {
384  m_objectList.clear();
387 }
388 
390 {
391  m_objectList.push_back(entry);
392 
393  Object2DEntry entry2D;
394  entry2D.type = entry.type;
395  entry2D.color = entry.color;
396 
397  entry2D.id = entry.region_id_left;
398  entry2D.region = entry.region_left;
399  m_pObjectFinderLeft->AddObject(entry2D);
400 
401  entry2D.id = entry.region_id_right;
402  entry2D.region = entry.region_right;
404 }
405 
406 
408 {
409  m_objectClassifierList.push_back(pObjectClassifier);
410 }
411 
413 {
414  for (std::vector<CObjectClassifierInterface*>::iterator it = m_objectClassifierList.begin(); it != m_objectClassifierList.end(); it++)
415  {
416  if (*it == pObjectClassifier)
417  {
418  m_objectClassifierList.erase(it);
419  break;
420  }
421  }
422 }
423 
425 {
426  m_objectClassifierList.clear();
427 }
void PrepareImages(const CByteImage *const *ppImages, float fROIFactor=-1, bool bCalculateHSVImage=true)
std::vector< CObjectClassifierInterface * > m_objectClassifierList
int Finalize(float dMinZDistance, float fMaxZDistance, bool bInputImagesAreRectified, ObjectColor finalizeColor=eNone, float fMaxEpipolarDistance=10, bool bUseDistortionParameters=true)
bool LoadCameraParameters(const char *pCameraParameterFileName, bool bTransformLeftCameraToIdentity=true)
Initializes the stereo camera model, given a file path to a stereo camera parameter file...
virtual bool CheckEntry(const Object3DEntry &entry)=0
Vec2d centroid
Definition: Structs.h:331
CByteImage * GetRightSegmentationResult()
void AddObject(const Object3DEntry &entry)
ObjectColor color
void SetColorParameterSet(const CColorParameterSet *pColorParameterSet)
float y
Definition: Math2d.h:84
CByteImage * GetLeftSegmentationResult()
Vec3d translation
Definition: Math3d.h:108
Data structure for the representation of a 3D vector.
Definition: Math3d.h:73
Data structure for the representation of 8-bit grayscale images and 24-bit RGB (or HSV) color images ...
Definition: ByteImage.h:80
void RemoveObjectClassifier(CObjectClassifierInterface *pObjectClassifier)
void SetRegionFilter(CRegionFilterInterface *pRegionFilter)
CObjectEntryFilterInterface * m_pObjectEntryFilter
ObjectColor color
void FindObjects(const CByteImage *const *ppImages, CByteImage **ppResultImages, ObjectColor color, int nMinPointsPerRegion, bool bShowSegmentedImage)
float z
Definition: Math3d.h:75
void Calculate3DPoint(const Vec2d &cameraPointLeft, const Vec2d &cameraPointRight, Vec3d &worldPoint, bool bInputImagesAreRectified, bool bUseDistortionParameters=true, PointPair3d *pConnectionLine=0)
Computes a 3D point, given a point correspondence in both images, by performing stereo triangulation...
Object2DList m_objectList
Definition: ObjectFinder.h:123
Transformation3d pose
std::string sName
float x
Definition: Math2d.h:84
float CalculateEpipolarLineInLeftImageDistance(const Vec2d &pointInLeftImage, const Vec2d &pointInRightImage)
Given a point correspondence, computes the distance from the epipolar line in the left image...
void SetVec(Vec3d &vec, float x, float y, float z)
Definition: Math3d.cpp:243
void SetColorParameterSet(const CColorParameterSet *pColorParameterSet)
CByteImage * GetSegmentationResult()
Definition: ObjectFinder.h:98
CObjectFinder * m_pObjectFinderRight
std::vector< Object2DEntry > Object2DList
Camera model and functions for a stereo camera system.
int nPixels
Definition: Structs.h:327
void ClearObjectList()
CStereoCalibration * m_pStereoCalibration
void FindObjectsInSegmentedImage(const CByteImage *const *ppImages, CByteImage **ppResultImages, ObjectColor color, int nMinPointsPerRegion, bool bShowSegmentedImage)
void AddObject(const Object2DEntry &entry)
void FindObjectsInSegmentedImage(const CByteImage *pSegmentedImage, CByteImage *pResultImage, ObjectColor color, int nMinPointsPerRegion, bool bShowSegmentedImage)
float ratio
Definition: Structs.h:338
int DetermineMatches(Object2DList &resultListLeft, Object2DList &resultListRight, float fMinZDistance, float fMaxZDistance, bool bInputImagesAreRectified, bool bUseDistortionParameters, ObjectColor finalizeColor, float fMaxYDiff)
void FindObjects(const CByteImage *pImage, CByteImage *pResultImage, ObjectColor color, int nMinPointsPerRegion, bool bShowSegmentedImage)
std::string sOivFilePath
void AddObjectClassifier(CObjectClassifierInterface *pObjectClassifier)
void PrepareImages(const CByteImage *pImage, float fROIFactor=-1, bool bCalculateHSVImage=true)
bool Init(const char *pCameraParameterFileName)
std::vector< Object3DEntry > Object3DList
void UpdateObjectFinderLists(Object2DList &resultListLeft, Object2DList &resultListRight)
CObjectFinder * m_pObjectFinderLeft
ObjectColor
void SetRegionFilter(CRegionFilterInterface *pRegionFilter)
Definition: ObjectFinder.h:101


asr_ivt
Author(s): Allgeyer Tobias, Hutmacher Robin, Kleinert Daniel, Meißner Pascal, Scholz Jonas, Stöckle Patrick
autogenerated on Mon Dec 2 2019 03:47:28