33 #ifndef TRAJECTORY_OPTIMIZER_HPP 34 #define TRAJECTORY_OPTIMIZER_HPP 37 #include <eigen3/Eigen/Eigen> 43 template <
template <
class,
class,
class...>
class TLatticeType,
template <
class>
class TCostsJ,
44 template <
class>
class TCostsJG,
template <
class>
class TCostsG,
template <
class>
class TCostsH,
45 template <
class>
class TCostsK>
49 template <
class TNumType,
class TMapDataType>
50 using FuncJ = TLatticeType<TNumType, TMapDataType, TCostsJ<TNumType> >;
53 template <
class TNumType,
class TMapDataType>
54 using FuncG = TLatticeType<TNumType, TMapDataType, TCostsG<TNumType>, TCostsK<TNumType>, TCostsJG<TNumType> >;
57 template <
class TNumType,
class TMapDataType>
58 using FuncH = TLatticeType<TNumType, TMapDataType, TCostsH<TNumType> >;
61 template <
class TNumType,
class TMapDataType>
62 using FuncJGH = TLatticeType<TNumType, TMapDataType, TCostsJ<TNumType>, TCostsG<TNumType>, TCostsH<TNumType>,
63 TCostsK<TNumType>, TCostsJG<TNumType> >;
66 template <
class TNumType>
67 inline static bool isSame(
const Eigen::Matrix<TNumType, -1, 1>& _optVar0,
68 const Eigen::Matrix<TNumType, -1, 1>& _optVar1)
70 const int optVar0Size = _optVar0.size();
71 if (optVar0Size != _optVar1.size())
75 static constexpr
const double eps = 2 * FLT_MIN;
76 for (
int i = 0; i < optVar0Size; ++i)
78 if (fabs(_optVar0(i) - _optVar1(i)) > eps)
97 template <
class TNumType,
class TTrajSim,
class TMyParamType,
template <
class,
class>
class TOptVarMap>
105 xCacheConstr_[i] = std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >(
new Eigen::Matrix<TNumType, -1, 1>);
106 xCacheGradConstr_[i] = std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >(
new Eigen::Matrix<TNumType, -1, 1>);
107 for (
size_t j = 0; j < TTrajSim::CostFuncsTypesNr; ++j)
109 constCache_[i][j] = std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >(
new Eigen::Matrix<TNumType, -1, 1>);
110 gradConstrCache_[i][j] = std::shared_ptr<Eigen::Matrix<TNumType, -1, -1> >(
new Eigen::Matrix<TNumType, -1, -1>);
132 if (
isSame(_x, *xCacheConstr_[i]))
144 size_t idxCache =
asInt(_cacheType);
147 idxCache = idxCacheLast_ = !idxCacheLast_;
151 *xCacheConstr_[idxCache] = _x;
152 for (
size_t j = 0; j < TTrajSim::CostFuncsTypesNr; ++j)
154 *constCache_[idxCache][j] = this->
costs_.sub(j).data();
156 idxEvalFunc_ = idxCache;
166 if (
isSame(_x, *xCacheGradConstr_[i]))
172 idxCacheGradLast_ = i;
178 size_t idxCache =
asInt(_cacheType);
179 size_t idxCacheGrad =
asInt(_cacheType);
182 idxCache = idxCacheLast_ = !idxCacheLast_;
183 idxCacheGrad = idxCacheGradLast_ = !idxCacheGradLast_;
187 *xCacheConstr_[idxCache] = *xCacheGradConstr_[idxCacheGrad] = _x;
188 for (
size_t j = 0; j < TTrajSim::CostFuncsTypesNr; ++j)
190 *constCache_[idxCache][j] = this->
costs_.sub(j).data();
191 *gradConstrCache_[idxCacheGrad][j] = *this->
gradCostsMap_[j];
193 idxEvalFunc_ = idxCache;
194 idxEvalGrad_ = idxCacheGrad;
199 const Eigen::Matrix<TNumType, -1, 1>&
cachedCosts(
const size_t& _i)
const 201 return *constCache_[idxEvalFunc_][_i];
207 return *gradConstrCache_[idxEvalGrad_][_i];
211 void getOptVar(Eigen::Matrix<TNumType, -1, 1>& _optVarExt)
213 TOptVarMap<TNumType, TMyParamType>::getOptVar(_optVarExt, *this->stateSim()->paramStruct.get());
217 void setOptVar(
const Eigen::Matrix<TNumType, -1, 1>& _optVarExt)
219 TOptVarMap<TNumType, TMyParamType>::setOptVar(*this->stateSim()->paramStruct.get(), _optVarExt);
229 std::array<std::array<std::shared_ptr<Eigen::Matrix<TNumType, -1, 1> >, TTrajSim::CostFuncsTypesNr>,
233 std::array<std::array<std::shared_ptr<Eigen::Matrix<TNumType, -1, -1> >, TTrajSim::CostFuncsTypesNr>,
248 template <
class TNumType2,
class TTrajSimJGH2,
class TTrajSimJ2,
class TTrajSimG2,
class TTrajSimH2,
249 class TMyParamType2,
template <
class,
class>
class TOptVarMap2>
253 template <
class TNumType,
class TTrajSimJGH,
class TTrajSimJ,
class TTrajSimG,
class TTrajSimH,
class TMyParamType,
254 template <
class,
class>
class TOptVarMap>
260 trajSimJ.xCacheConstr_[idxIterStart] = trajSimJGH.xCacheConstr_[idxIterStart];
261 trajSimG.xCacheConstr_[idxIterStart] = trajSimJGH.xCacheConstr_[idxIterStart];
262 trajSimH.xCacheConstr_[idxIterStart] = trajSimJGH.xCacheConstr_[idxIterStart];
264 trajSimJ.xCacheGradConstr_[idxIterStart] = trajSimJGH.xCacheGradConstr_[idxIterStart];
265 trajSimG.xCacheGradConstr_[idxIterStart] = trajSimJGH.xCacheGradConstr_[idxIterStart];
266 trajSimH.xCacheGradConstr_[idxIterStart] = trajSimJGH.xCacheGradConstr_[idxIterStart];
268 trajSimJ.constCache_[idxIterStart][0] = trajSimJGH.constCache_[idxIterStart][0];
269 trajSimG.constCache_[idxIterStart][0] = trajSimJGH.constCache_[idxIterStart][1];
270 trajSimG.constCache_[idxIterStart][1] = trajSimJGH.constCache_[idxIterStart][3];
271 trajSimG.constCache_[idxIterStart][2] = trajSimJGH.constCache_[idxIterStart][4];
272 trajSimH.constCache_[idxIterStart][0] = trajSimJGH.constCache_[idxIterStart][2];
274 trajSimJ.gradConstrCache_[idxIterStart][0] = trajSimJGH.gradConstrCache_[idxIterStart][0];
275 trajSimG.gradConstrCache_[idxIterStart][0] = trajSimJGH.gradConstrCache_[idxIterStart][1];
276 trajSimG.gradConstrCache_[idxIterStart][1] = trajSimJGH.gradConstrCache_[idxIterStart][3];
277 trajSimG.gradConstrCache_[idxIterStart][2] = trajSimJGH.gradConstrCache_[idxIterStart][4];
278 trajSimH.gradConstrCache_[idxIterStart][0] = trajSimJGH.gradConstrCache_[idxIterStart][2];
286 trajSimJGH.stateSim()->paramStruct = _paramStructPtr;
287 trajSimJ.stateSim()->paramStruct = _paramStructPtr;
288 trajSimG.stateSim()->paramStruct = _paramStructPtr;
289 trajSimH.stateSim()->paramStruct = _paramStructPtr;
290 paramStructPtr = _paramStructPtr;
296 #endif // TRAJECTORY_OPTIMIZER_HPP
CostsEvaluatorCached< TNumType, TTrajSimJ, TMyParamType, TOptVarMap > trajSimJ
bool evaluateCosts(const Eigen::Matrix< TNumType,-1, 1 > &_x, const OptCacheType &_cacheType=OptCacheType::LAST1)
void initParamStruct(std::shared_ptr< TMyParamType > &_paramStructPtr)
const Eigen::Matrix< TNumType,-1,-1 > & cachedGradCosts(const size_t &_i) const
std::array< std::shared_ptr< Eigen::Map< Eigen::Matrix< TNumType,-1,-1, Eigen::RowMajor > > >, CostFuncsTypesNr > gradCostsMap_
void simulateTrajectoryWithGrad(const bool &_saveLatticeStates=false)
TLatticeType< TNumType, TMapDataType, TCostsH< TNumType > > FuncH
void setOptVar(const Eigen::Matrix< TNumType,-1, 1 > &_optVarExt)
constexpr auto asInt(Enumeration const value) -> typename std::underlying_type< Enumeration >::type
std::array< std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1,-1 > >, TTrajSim::CostFuncsTypesNr >, asInt(OptCacheType::ENUM_SIZE)> gradConstrCache_
TLatticeType< TNumType, TMapDataType, TCostsJ< TNumType > > FuncJ
static bool isSame(const Eigen::Matrix< TNumType,-1, 1 > &_optVar0, const Eigen::Matrix< TNumType,-1, 1 > &_optVar1)
CostsEvaluatorCached< TNumType, TTrajSimG, TMyParamType, TOptVarMap > trajSimG
void simulateTrajectory(const bool &_saveLatticeStates=false)
std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1, 1 > >, asInt(OptCacheType::ENUM_SIZE)> xCacheConstr_
CostsEvaluatorCached< TNumType, TTrajSimH, TMyParamType, TOptVarMap > trajSimH
std::shared_ptr< TMyParamType > paramStructPtr
TLatticeType< TNumType, TMapDataType, TCostsG< TNumType >, TCostsK< TNumType >, TCostsJG< TNumType > > FuncG
void getOptVar(Eigen::Matrix< TNumType,-1, 1 > &_optVarExt)
bool evaluateCostsWithGrad(const Eigen::Matrix< TNumType,-1, 1 > &_x, const OptCacheType &_cacheType=OptCacheType::LAST1)
StateMapArray< TNumType, StateMapVector< TNumType, TNumType >, CostFuncsTypesNr > costs_
const Eigen::Matrix< TNumType,-1, 1 > & cachedCosts(const size_t &_i) const
std::array< std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1, 1 > >, TTrajSim::CostFuncsTypesNr >, asInt(OptCacheType::ENUM_SIZE)> constCache_
CostsEvaluatorCached< TNumType, TTrajSimJGH, TMyParamType, TOptVarMap > trajSimJGH
TLatticeType< TNumType, TMapDataType, TCostsJ< TNumType >, TCostsG< TNumType >, TCostsH< TNumType >, TCostsK< TNumType >, TCostsJG< TNumType > > FuncJGH
std::array< std::shared_ptr< Eigen::Matrix< TNumType,-1, 1 > >, asInt(OptCacheType::ENUM_SIZE)> xCacheGradConstr_