trajectory_optimizer.hpp
Go to the documentation of this file.
1 /***************************************************************************
2  * Software License Agreement (BSD License) *
3  * Copyright (C) 2017 by Horatiu George Todoran <todorangrg@gmail.com> *
4  * *
5  * Redistribution and use in source and binary forms, with or without *
6  * modification, are permitted provided that the following conditions *
7  * are met: *
8  * *
9  * 1. Redistributions of source code must retain the above copyright *
10  * notice, this list of conditions and the following disclaimer. *
11  * 2. Redistributions in binary form must reproduce the above copyright *
12  * notice, this list of conditions and the following disclaimer in *
13  * the documentation and/or other materials provided with the *
14  * distribution. *
15  * 3. Neither the name of the copyright holder nor the names of its *
16  * contributors may be used to endorse or promote products derived *
17  * from this software without specific prior written permission. *
18  * *
19  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS *
20  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT *
21  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS *
22  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE *
23  * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, *
24  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, *
25  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; *
26  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER *
27  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT *
28  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY *
29  * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE *
30  * POSSIBILITY OF SUCH DAMAGE. *
31  ***************************************************************************/
32 
33 #ifndef TRAJECTORY_OPTIMIZER_HPP
34 #define TRAJECTORY_OPTIMIZER_HPP
35 
36 #include <tuw_control/utils.h>
37 #include <eigen3/Eigen/Eigen>
38 
39 #include <float.h>
40 
41 namespace tuw
42 {
43 template <template <class, class, class...> class TLatticeType, template <class> class TCostsJ,
44  template <class> class TCostsJG, template <class> class TCostsG, template <class> class TCostsH,
45  template <class> class TCostsK>
47 {
48 public:
49  template <class TNumType, class TMapDataType>
50  using FuncJ = TLatticeType<TNumType, TMapDataType, TCostsJ<TNumType> >;
51 
52 public:
53  template <class TNumType, class TMapDataType>
54  using FuncG = TLatticeType<TNumType, TMapDataType, TCostsG<TNumType>, TCostsK<TNumType>, TCostsJG<TNumType> >;
55 
56 public:
57  template <class TNumType, class TMapDataType>
58  using FuncH = TLatticeType<TNumType, TMapDataType, TCostsH<TNumType> >;
59 
60 public:
61  template <class TNumType, class TMapDataType>
62  using FuncJGH = TLatticeType<TNumType, TMapDataType, TCostsJ<TNumType>, TCostsG<TNumType>, TCostsH<TNumType>,
63  TCostsK<TNumType>, TCostsJG<TNumType> >;
64 };
65 
66 template <class TNumType>
67 inline static bool isSame(const Eigen::Matrix<TNumType, -1, 1>& _optVar0,
68  const Eigen::Matrix<TNumType, -1, 1>& _optVar1)
69 {
70  const int optVar0Size = _optVar0.size();
71  if (optVar0Size != _optVar1.size())
72  {
73  return false;
74  }
75  static constexpr const double eps = 2 * FLT_MIN;
76  for (int i = 0; i < optVar0Size; ++i)
77  {
78  if (fabs(_optVar0(i) - _optVar1(i)) > eps)
79  {
80  return false;
81  }
82  }
83  return true;
84 }
85 
86 enum class OptCacheType
87 {
88  LAST1,
89  LAST2,
90  ITER_START,
91  ENUM_SIZE
92 };
93 
96 
97 template <class TNumType, class TTrajSim, class TMyParamType, template <class, class> class TOptVarMap>
98 class CostsEvaluatorCached : public TTrajSim
99 {
100 public:
101  CostsEvaluatorCached() : TTrajSim(), idxCacheLast_(0)
102  {
103  for (size_t i = 0; i < asInt(OptCacheType::ENUM_SIZE); ++i)
104  {
105  xCacheConstr_[i] = std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >(new Eigen::Matrix<TNumType, -1, 1>);
106  xCacheGradConstr_[i] = std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >( new Eigen::Matrix<TNumType, -1, 1>);
107  for (size_t j = 0; j < TTrajSim::CostFuncsTypesNr; ++j)
108  {
109  constCache_[i][j] = std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >( new Eigen::Matrix<TNumType, -1, 1>);
110  gradConstrCache_[i][j] = std::shared_ptr<Eigen::Matrix<TNumType, -1, -1> >(new Eigen::Matrix<TNumType, -1, -1>);
111  }
112  }
113  }
114 
115 public:
116  auto& costs()
117  {
118  return this->costs_;
119  }
120 
121 public:
122  auto& gradCosts()
123  {
124  return this->gradCostsMap_;
125  }
126 
127 public:
128  bool evaluateCosts(const Eigen::Matrix<TNumType, -1, 1>& _x, const OptCacheType& _cacheType = OptCacheType::LAST1)
129  {
130  for (size_t i = 0; i < asInt(OptCacheType::ENUM_SIZE); ++i)
131  {
132  if (isSame(_x, *xCacheConstr_[i]))
133  {
134  idxEvalFunc_ = i;
135  if (i < 2)
136  {
137  idxCacheLast_ = i;
138  } // we remember which one was used last time
139  return true;
140  }
141  }
142 
143  // std::cout<<"computing it Func!"<<std::endl;
144  size_t idxCache = asInt(_cacheType);
145  if (_cacheType != OptCacheType::ITER_START)
146  {
147  idxCache = idxCacheLast_ = !idxCacheLast_;
148  }
149  setOptVar(_x);
150  this->simulateTrajectory();
151  *xCacheConstr_[idxCache] = _x;
152  for (size_t j = 0; j < TTrajSim::CostFuncsTypesNr; ++j)
153  {
154  *constCache_[idxCache][j] = this->costs_.sub(j).data();
155  }
156  idxEvalFunc_ = idxCache;
157  return false;
158  }
159 
160 public:
161  bool evaluateCostsWithGrad(const Eigen::Matrix<TNumType, -1, 1>& _x,
162  const OptCacheType& _cacheType = OptCacheType::LAST1)
163  {
164  for (size_t i = 0; i < asInt(OptCacheType::ENUM_SIZE); ++i)
165  {
166  if (isSame(_x, *xCacheGradConstr_[i]))
167  {
168  idxEvalFunc_ = i;
169  idxEvalGrad_ = i;
170  if (i < 2)
171  {
172  idxCacheGradLast_ = i;
173  }
174  return true;
175  }
176  }
177  // std::cout<<"computing it Grad!"<<std::endl;
178  size_t idxCache = asInt(_cacheType);
179  size_t idxCacheGrad = asInt(_cacheType);
180  if (_cacheType != OptCacheType::ITER_START)
181  {
182  idxCache = idxCacheLast_ = !idxCacheLast_;
183  idxCacheGrad = idxCacheGradLast_ = !idxCacheGradLast_;
184  }
185  setOptVar(_x);
187  *xCacheConstr_[idxCache] = *xCacheGradConstr_[idxCacheGrad] = _x;
188  for (size_t j = 0; j < TTrajSim::CostFuncsTypesNr; ++j)
189  {
190  *constCache_[idxCache][j] = this->costs_.sub(j).data();
191  *gradConstrCache_[idxCacheGrad][j] = *this->gradCostsMap_[j];
192  }
193  idxEvalFunc_ = idxCache;
194  idxEvalGrad_ = idxCacheGrad;
195  return false;
196  }
197 
198 public:
199  const Eigen::Matrix<TNumType, -1, 1>& cachedCosts(const size_t& _i) const
200  {
201  return *constCache_[idxEvalFunc_][_i];
202  }
203 
204 public:
205  const Eigen::Matrix<TNumType, -1, -1>& cachedGradCosts(const size_t& _i) const
206  {
207  return *gradConstrCache_[idxEvalGrad_][_i];
208  }
209 
210 public:
211  void getOptVar(Eigen::Matrix<TNumType, -1, 1>& _optVarExt)
212  {
213  TOptVarMap<TNumType, TMyParamType>::getOptVar(_optVarExt, *this->stateSim()->paramStruct.get());
214  }
215 
216 private:
217  void setOptVar(const Eigen::Matrix<TNumType, -1, 1>& _optVarExt)
218  {
219  TOptVarMap<TNumType, TMyParamType>::setOptVar(*this->stateSim()->paramStruct.get(), _optVarExt);
220  }
221 
222 private:
223  std::array<std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >, asInt(OptCacheType::ENUM_SIZE)> xCacheConstr_;
224 
225 private:
226  std::array<std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >, asInt(OptCacheType::ENUM_SIZE)> xCacheGradConstr_;
227 
228 private:
229  std::array<std::array<std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >, TTrajSim::CostFuncsTypesNr>,
231 
232 private:
233  std::array<std::array<std::shared_ptr<Eigen::Matrix<TNumType, -1, -1> >, TTrajSim::CostFuncsTypesNr>,
235 
236 private:
238 
239 private:
241 
242 private:
243  size_t idxEvalFunc_;
244 
245 private:
246  size_t idxEvalGrad_;
247 
248  template <class TNumType2, class TTrajSimJGH2, class TTrajSimJ2, class TTrajSimG2, class TTrajSimH2,
249  class TMyParamType2, template <class, class> class TOptVarMap2>
250  friend class TrajectoryOptimizer;
251 };
252 
253 template <class TNumType, class TTrajSimJGH, class TTrajSimJ, class TTrajSimG, class TTrajSimH, class TMyParamType,
254  template <class, class> class TOptVarMap>
255 struct TrajectoryOptimizer
256 {
258  {
259  static constexpr const size_t idxIterStart = asInt(OptCacheType::ITER_START);
260  trajSimJ.xCacheConstr_[idxIterStart] = trajSimJGH.xCacheConstr_[idxIterStart];
261  trajSimG.xCacheConstr_[idxIterStart] = trajSimJGH.xCacheConstr_[idxIterStart];
262  trajSimH.xCacheConstr_[idxIterStart] = trajSimJGH.xCacheConstr_[idxIterStart];
263 
264  trajSimJ.xCacheGradConstr_[idxIterStart] = trajSimJGH.xCacheGradConstr_[idxIterStart];
265  trajSimG.xCacheGradConstr_[idxIterStart] = trajSimJGH.xCacheGradConstr_[idxIterStart];
266  trajSimH.xCacheGradConstr_[idxIterStart] = trajSimJGH.xCacheGradConstr_[idxIterStart];
267 
268  trajSimJ.constCache_[idxIterStart][0] = trajSimJGH.constCache_[idxIterStart][0];
269  trajSimG.constCache_[idxIterStart][0] = trajSimJGH.constCache_[idxIterStart][1];
270  trajSimG.constCache_[idxIterStart][1] = trajSimJGH.constCache_[idxIterStart][3]; // here
271  trajSimG.constCache_[idxIterStart][2] = trajSimJGH.constCache_[idxIterStart][4]; // here
272  trajSimH.constCache_[idxIterStart][0] = trajSimJGH.constCache_[idxIterStart][2];
273 
274  trajSimJ.gradConstrCache_[idxIterStart][0] = trajSimJGH.gradConstrCache_[idxIterStart][0];
275  trajSimG.gradConstrCache_[idxIterStart][0] = trajSimJGH.gradConstrCache_[idxIterStart][1];
276  trajSimG.gradConstrCache_[idxIterStart][1] = trajSimJGH.gradConstrCache_[idxIterStart][3]; // here
277  trajSimG.gradConstrCache_[idxIterStart][2] = trajSimJGH.gradConstrCache_[idxIterStart][4]; // here
278  trajSimH.gradConstrCache_[idxIterStart][0] = trajSimJGH.gradConstrCache_[idxIterStart][2];
279  }
284  void initParamStruct(std::shared_ptr<TMyParamType>& _paramStructPtr)
285  {
286  trajSimJGH.stateSim()->paramStruct = _paramStructPtr;
287  trajSimJ.stateSim()->paramStruct = _paramStructPtr;
288  trajSimG.stateSim()->paramStruct = _paramStructPtr;
289  trajSimH.stateSim()->paramStruct = _paramStructPtr;
290  paramStructPtr = _paramStructPtr;
291  }
292  std::shared_ptr<TMyParamType> paramStructPtr;
293 };
294 }
295 
296 #endif // TRAJECTORY_OPTIMIZER_HPP
CostsEvaluatorCached< TNumType, TTrajSimJ, TMyParamType, TOptVarMap > trajSimJ
bool evaluateCosts(const Eigen::Matrix< TNumType,-1, 1 > &_x, const OptCacheType &_cacheType=OptCacheType::LAST1)
void initParamStruct(std::shared_ptr< TMyParamType > &_paramStructPtr)
const Eigen::Matrix< TNumType,-1,-1 > & cachedGradCosts(const size_t &_i) const
std::array< std::shared_ptr< Eigen::Map< Eigen::Matrix< TNumType,-1,-1, Eigen::RowMajor > > >, CostFuncsTypesNr > gradCostsMap_
void simulateTrajectoryWithGrad(const bool &_saveLatticeStates=false)
TLatticeType< TNumType, TMapDataType, TCostsH< TNumType > > FuncH
void setOptVar(const Eigen::Matrix< TNumType,-1, 1 > &_optVarExt)
constexpr auto asInt(Enumeration const value) -> typename std::underlying_type< Enumeration >::type
Definition: utils.h:87
std::array< std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1,-1 > >, TTrajSim::CostFuncsTypesNr >, asInt(OptCacheType::ENUM_SIZE)> gradConstrCache_
TLatticeType< TNumType, TMapDataType, TCostsJ< TNumType > > FuncJ
static bool isSame(const Eigen::Matrix< TNumType,-1, 1 > &_optVar0, const Eigen::Matrix< TNumType,-1, 1 > &_optVar1)
CostsEvaluatorCached< TNumType, TTrajSimG, TMyParamType, TOptVarMap > trajSimG
void simulateTrajectory(const bool &_saveLatticeStates=false)
std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1, 1 > >, asInt(OptCacheType::ENUM_SIZE)> xCacheConstr_
CostsEvaluatorCached< TNumType, TTrajSimH, TMyParamType, TOptVarMap > trajSimH
std::shared_ptr< TMyParamType > paramStructPtr
TLatticeType< TNumType, TMapDataType, TCostsG< TNumType >, TCostsK< TNumType >, TCostsJG< TNumType > > FuncG
void getOptVar(Eigen::Matrix< TNumType,-1, 1 > &_optVarExt)
bool evaluateCostsWithGrad(const Eigen::Matrix< TNumType,-1, 1 > &_x, const OptCacheType &_cacheType=OptCacheType::LAST1)
StateMapArray< TNumType, StateMapVector< TNumType, TNumType >, CostFuncsTypesNr > costs_
const Eigen::Matrix< TNumType,-1, 1 > & cachedCosts(const size_t &_i) const
std::array< std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1, 1 > >, TTrajSim::CostFuncsTypesNr >, asInt(OptCacheType::ENUM_SIZE)> constCache_
CostsEvaluatorCached< TNumType, TTrajSimJGH, TMyParamType, TOptVarMap > trajSimJGH
TLatticeType< TNumType, TMapDataType, TCostsJ< TNumType >, TCostsG< TNumType >, TCostsH< TNumType >, TCostsK< TNumType >, TCostsJG< TNumType > > FuncJGH
std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1, 1 > >, asInt(OptCacheType::ENUM_SIZE)> xCacheGradConstr_


tuw_control
Author(s): George Todoran
autogenerated on Mon Jun 10 2019 15:27:22