test_blockmem_gridmap.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2017, the neonavigation authors
3  * All rights reserved.
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions are met:
7  *
8  * * Redistributions of source code must retain the above copyright
9  * notice, this list of conditions and the following disclaimer.
10  * * Redistributions in binary form must reproduce the above copyright
11  * notice, this list of conditions and the following disclaimer in the
12  * documentation and/or other materials provided with the distribution.
13  * * Neither the name of the copyright holder nor the names of its
14  * contributors may be used to endorse or promote products derived from
15  * this software without specific prior written permission.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27  * POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifdef NDEBUG
31 // Enable assertion to check out-of-bound access
32 #undef NDEBUG
33 #endif
34 
35 #include <cstddef>
36 #include <limits>
37 
38 #include <gtest/gtest.h>
39 
41 
42 namespace planner_cspace
43 {
44 template <int BLOCK_WIDTH>
45 class BlockMemGridmapHelper : public BlockMemGridmap<int, 1, 1, BLOCK_WIDTH, false>
46 {
47 public:
48  size_t getBlockBit() const
49  {
50  return this->block_bit_;
51  }
52 };
53 TEST(BlockmemGridmap, BlockWidth)
54 {
55  ASSERT_EQ(4u, BlockMemGridmapHelper<0x10>().getBlockBit());
56  ASSERT_EQ(8u, BlockMemGridmapHelper<0x100>().getBlockBit());
57  ASSERT_EQ(12u, BlockMemGridmapHelper<0x1000>().getBlockBit());
58  ASSERT_EQ(16u, BlockMemGridmapHelper<0x10000>().getBlockBit());
59 }
60 
61 TEST(BlockmemGridmap, ResetClear)
62 {
64 
65  for (int i = 0; i < 2; i++)
66  {
67  for (int s = 4; s <= 6; s += 2)
68  {
69  gm.reset(CyclicVecInt<3, 3>(s, s, s));
70  gm.clear(0.0);
71 
73  for (i[0] = 0; i[0] < s; ++i[0])
74  {
75  for (i[1] = 0; i[1] < s; ++i[1])
76  {
77  for (i[2] = 0; i[2] < s; ++i[2])
78  {
79  ASSERT_EQ(gm[i], 0.0);
80  }
81  }
82  }
83 
84  gm.clear(3.0);
85  for (i[0] = 0; i[0] < s; ++i[0])
86  {
87  for (i[1] = 0; i[1] < s; ++i[1])
88  {
89  for (i[2] = 0; i[2] < s; ++i[2])
90  {
91  ASSERT_EQ(gm[i], 3.0);
92  }
93  }
94  }
95  }
96  }
97 }
98 
99 TEST(BlockmemGridmap, ClearAndCopyPartially)
100 {
101  const CyclicVecInt<3, 3> base_size(17, 8, 3);
103  gm_base.reset(base_size);
104  for (CyclicVecInt<3, 3> p(0, 0, 0); p[0] < base_size[0]; ++p[0])
105  {
106  for (p[1] = 0; p[1] < base_size[1]; ++p[1])
107  {
108  for (p[2] = 0; p[2] < base_size[2]; ++p[2])
109  {
110  gm_base[p] = p[0] * base_size[1] * base_size[2] + p[1] * base_size[2] + p[2];
111  }
112  }
113  }
114 
116  gm = gm_base;
117 
118  const CyclicVecInt<3, 3> copy_min_pos(3, 5, 0);
119  const CyclicVecInt<3, 3> copy_max_pos(6, 7, 2);
120  gm.clear_partially(-1, copy_min_pos, copy_max_pos);
121 
122  for (CyclicVecInt<3, 3> p(0, 0, 0); p[0] < base_size[0]; ++p[0])
123  {
124  for (p[1] = 0; p[1] < base_size[1]; ++p[1])
125  {
126  for (p[2] = 0; p[2] < base_size[2]; ++p[2])
127  {
128  if ((copy_min_pos[0] <= p[0]) && (p[0] <= copy_max_pos[0]) &&
129  (copy_min_pos[1] <= p[1]) && (p[1] <= copy_max_pos[1]) &&
130  (copy_min_pos[2] <= p[2]) && (p[2] <= copy_max_pos[2]))
131  {
132  EXPECT_EQ(gm[p], -1) << p[0] << "," << p[1] << "," << p[2];
133  }
134  else
135  {
136  EXPECT_EQ(gm[p], gm_base[p]) << p[0] << "," << p[1] << "," << p[2];
137  }
138  }
139  }
140  }
141 
143  gm_update.reset(base_size);
144  for (CyclicVecInt<3, 3> p(0, 0, 0); p[0] < base_size[0]; ++p[0])
145  {
146  for (p[1] = 0; p[1] < base_size[1]; ++p[1])
147  {
148  for (p[2] = 0; p[2] < base_size[2]; ++p[2])
149  {
150  gm_update[p] = p[0] * base_size[1] * base_size[2] + p[1] * base_size[2] + p[2] * -1;
151  }
152  }
153  }
154 
155  gm = gm_base;
156  gm.copy_partially(gm_update, copy_min_pos, copy_max_pos);
157 
158  for (CyclicVecInt<3, 3> p(0, 0, 0); p[0] < base_size[0]; ++p[0])
159  {
160  for (p[1] = 0; p[1] < base_size[1]; ++p[1])
161  {
162  for (p[2] = 0; p[2] < base_size[2]; ++p[2])
163  {
164  if ((copy_min_pos[0] <= p[0]) && (p[0] <= copy_max_pos[0]) &&
165  (copy_min_pos[1] <= p[1]) && (p[1] <= copy_max_pos[1]) &&
166  (copy_min_pos[2] <= p[2]) && (p[2] <= copy_max_pos[2]))
167  {
168  ASSERT_EQ(gm[p], gm_update[p]);
169  }
170  else
171  {
172  ASSERT_EQ(gm[p], gm_base[p]);
173  }
174  }
175  }
176  }
177 }
178 
179 TEST(BlockmemGridmap, CopyPartiallyWithOffset)
180 {
181  const CyclicVecInt<3, 3> src_size(17, 8, 3);
183  gm_src.reset(src_size);
184  for (CyclicVecInt<3, 3> p(0, 0, 0); p[0] < src_size[0]; ++p[0])
185  {
186  for (p[1] = 0; p[1] < src_size[1]; ++p[1])
187  {
188  for (p[2] = 0; p[2] < src_size[2]; ++p[2])
189  {
190  gm_src[p] = p[0] * src_size[1] * src_size[2] + p[1] * src_size[2] + p[2];
191  }
192  }
193  }
194 
195  const CyclicVecInt<3, 3> dst_size(6, 5, 3);
197  gm_dst.reset(dst_size);
198 
199  const CyclicVecInt<3, 3> src_min(5, 3, 0);
200  const CyclicVecInt<3, 3> src_max(8, 5, 2);
201  const CyclicVecInt<3, 3> dst_min(1, 0, 0);
202  const CyclicVecInt<3, 3> dst_max(4, 2, 2);
203 
204  gm_dst.clear(-1);
205  gm_dst.copy_partially(dst_min, gm_src, src_min, src_max);
206 
207  for (CyclicVecInt<3, 3> p(0, 0, 0); p[0] < dst_size[0]; ++p[0])
208  {
209  for (p[1] = 0; p[1] < dst_size[1]; ++p[1])
210  {
211  for (p[2] = 0; p[2] < dst_size[2]; ++p[2])
212  {
213  if ((dst_min[0] <= p[0]) && (p[0] <= dst_max[0]) &&
214  (dst_min[1] <= p[1]) && (p[1] <= dst_max[1]) &&
215  (dst_min[2] <= p[2]) && (p[2] <= dst_max[2]))
216  {
217  ASSERT_EQ(gm_dst[p], gm_src[p - dst_min + src_min]);
218  }
219  else
220  {
221  ASSERT_EQ(gm_dst[p], -1);
222  }
223  }
224  }
225  }
226 }
227 
228 TEST(BlockmemGridmap, WriteRead)
229 {
231 
232  const int s = 4;
233  gm.reset(CyclicVecInt<3, 3>(s, s, s));
234  gm.clear(0.0);
235 
237  for (i[0] = 0; i[0] < s; ++i[0])
238  {
239  for (i[1] = 0; i[1] < s; ++i[1])
240  {
241  for (i[2] = 0; i[2] < s; ++i[2])
242  {
243  gm[i] = i[2] * 100 + i[1] * 10 + i[0];
244  }
245  }
246  }
247 
248  for (i[0] = 0; i[0] < s; ++i[0])
249  {
250  for (i[1] = 0; i[1] < s; ++i[1])
251  {
252  for (i[2] = 0; i[2] < s; ++i[2])
253  {
254  ASSERT_EQ(gm[i], i[2] * 100 + i[1] * 10 + i[0]);
255  }
256  }
257  }
258 }
259 
260 TEST(BlockmemGridmap, OuterBoundary)
261 {
263 
264  const int s = 0x30;
265  gm.reset(CyclicVecInt<3, 2>(s, s, s));
266  gm.clear(1.0);
267 
269  const int outer = 0x10;
270  for (i[0] = -outer; i[0] < s + outer; ++i[0])
271  {
272  for (i[1] = -outer; i[1] < s + outer; ++i[1])
273  {
274  for (i[2] = -outer; i[2] < s + outer; ++i[2])
275  {
276  if (i[0] >= 0 && i[1] >= 0 && i[2] >= 0 &&
277  i[0] < s && i[1] < s && i[2] < s)
278  {
279  ASSERT_TRUE(gm.validate(i));
280  }
281  else
282  {
283  ASSERT_FALSE(gm.validate(i));
284  }
285  // Confirm at least not dead
286  gm[i] = 1.0;
287  }
288  }
289  }
290 }
291 } // namespace planner_cspace
292 
293 int main(int argc, char** argv)
294 {
295  testing::InitGoogleTest(&argc, argv);
296 
297  return RUN_ALL_TESTS();
298 }
planner_cspace
Definition: bbf.h:33
s
XmlRpcServer s
planner_cspace::BlockMemGridmapHelper
Definition: test_blockmem_gridmap.cpp:45
planner_cspace::BlockMemGridmap::validate
bool validate(const CyclicVecInt< DIM, NONCYCLIC > &pos, const int tolerance=0) const override
Definition: blockmem_gridmap.h:267
planner_cspace::BlockMemGridmap
Definition: blockmem_gridmap.h:67
planner_cspace::BlockMemGridmapHelper::getBlockBit
size_t getBlockBit() const
Definition: test_blockmem_gridmap.cpp:48
planner_cspace::BlockMemGridmap::copy_partially
void copy_partially(const BlockMemGridmapBase< T, DIM, NONCYCLIC > &base, const CyclicVecInt< DIM, NONCYCLIC > &min, const CyclicVecInt< DIM, NONCYCLIC > &max) override
Definition: blockmem_gridmap.h:152
planner_cspace::TEST
TEST(BlockmemGridmap, BlockWidth)
Definition: test_blockmem_gridmap.cpp:53
main
int main(int argc, char **argv)
Definition: test_blockmem_gridmap.cpp:293
planner_cspace::BlockMemGridmap::reset
void reset(const CyclicVecInt< DIM, NONCYCLIC > &size) override
Definition: blockmem_gridmap.h:195
planner_cspace::BlockMemGridmap< int, 1, 1, BLOCK_WIDTH, false >::block_bit_
constexpr static size_t block_bit_
Definition: blockmem_gridmap.h:83
planner_cspace::BlockMemGridmap::clear
void clear(const T zero) override
Definition: blockmem_gridmap.h:129
planner_cspace::BlockMemGridmap::clear_partially
void clear_partially(const T zero, const CyclicVecInt< DIM, NONCYCLIC > &min, const CyclicVecInt< DIM, NONCYCLIC > &max) override
Definition: blockmem_gridmap.h:136
blockmem_gridmap.h
planner_cspace::CyclicVecBase
Definition: cyclic_vec.h:78


planner_cspace
Author(s): Atsushi Watanabe
autogenerated on Fri May 16 2025 02:15:23