DynamicSparseMatrix.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
11 #define EIGEN_DYNAMIC_SPARSEMATRIX_H
12 
13 namespace Eigen {
14 
35 namespace internal {
36 template<typename _Scalar, int _Options, typename _Index>
37 struct traits<DynamicSparseMatrix<_Scalar, _Options, _Index> >
38 {
39  typedef _Scalar Scalar;
40  typedef _Index Index;
41  typedef Sparse StorageKind;
42  typedef MatrixXpr XprKind;
43  enum {
44  RowsAtCompileTime = Dynamic,
45  ColsAtCompileTime = Dynamic,
46  MaxRowsAtCompileTime = Dynamic,
47  MaxColsAtCompileTime = Dynamic,
48  Flags = _Options | NestByRefBit | LvalueBit,
49  CoeffReadCost = NumTraits<Scalar>::ReadCost,
50  SupportedAccessPatterns = OuterRandomAccessPattern
51  };
52 };
53 }
54 
55 template<typename _Scalar, int _Options, typename _Index>
57  : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _Index> >
58 {
59  public:
61  // FIXME: why are these operator already alvailable ???
62  // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, +=)
63  // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, -=)
64  typedef MappedSparseMatrix<Scalar,Flags> Map;
65  using Base::IsRowMajor;
66  using Base::operator=;
67  enum {
68  Options = _Options
69  };
70 
71  protected:
72 
74 
76  std::vector<internal::CompressedStorage<Scalar,Index> > m_data;
77 
78  public:
79 
80  inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; }
81  inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); }
82  inline Index innerSize() const { return m_innerSize; }
83  inline Index outerSize() const { return static_cast<Index>(m_data.size()); }
84  inline Index innerNonZeros(Index j) const { return m_data[j].size(); }
85 
86  std::vector<internal::CompressedStorage<Scalar,Index> >& _data() { return m_data; }
87  const std::vector<internal::CompressedStorage<Scalar,Index> >& _data() const { return m_data; }
88 
92  inline Scalar coeff(Index row, Index col) const
93  {
94  const Index outer = IsRowMajor ? row : col;
95  const Index inner = IsRowMajor ? col : row;
96  return m_data[outer].at(inner);
97  }
98 
103  inline Scalar& coeffRef(Index row, Index col)
104  {
105  const Index outer = IsRowMajor ? row : col;
106  const Index inner = IsRowMajor ? col : row;
107  return m_data[outer].atWithInsertion(inner);
108  }
109 
110  class InnerIterator;
111  class ReverseInnerIterator;
112 
113  void setZero()
114  {
115  for (Index j=0; j<outerSize(); ++j)
116  m_data[j].clear();
117  }
118 
120  Index nonZeros() const
121  {
122  Index res = 0;
123  for (Index j=0; j<outerSize(); ++j)
124  res += static_cast<Index>(m_data[j].size());
125  return res;
126  }
127 
128 
129 
130  void reserve(Index reserveSize = 1000)
131  {
132  if (outerSize()>0)
133  {
134  Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4));
135  for (Index j=0; j<outerSize(); ++j)
136  {
137  m_data[j].reserve(reserveSizePerVector);
138  }
139  }
140  }
141 
143  inline void startVec(Index /*outer*/) {}
144 
150  inline Scalar& insertBack(Index row, Index col)
151  {
152  return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
153  }
154 
156  inline Scalar& insertBackByOuterInner(Index outer, Index inner)
157  {
158  eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range");
159  eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
160  && "wrong sorted insertion");
161  m_data[outer].append(0, inner);
162  return m_data[outer].value(m_data[outer].size()-1);
163  }
164 
165  inline Scalar& insert(Index row, Index col)
166  {
167  const Index outer = IsRowMajor ? row : col;
168  const Index inner = IsRowMajor ? col : row;
169 
170  Index startId = 0;
171  Index id = static_cast<Index>(m_data[outer].size()) - 1;
172  m_data[outer].resize(id+2,1);
173 
174  while ( (id >= startId) && (m_data[outer].index(id) > inner) )
175  {
176  m_data[outer].index(id+1) = m_data[outer].index(id);
177  m_data[outer].value(id+1) = m_data[outer].value(id);
178  --id;
179  }
180  m_data[outer].index(id+1) = inner;
181  m_data[outer].value(id+1) = 0;
182  return m_data[outer].value(id+1);
183  }
184 
186  inline void finalize() {}
187 
189  void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
190  {
191  for (Index j=0; j<outerSize(); ++j)
192  m_data[j].prune(reference,epsilon);
193  }
194 
197  void resize(Index rows, Index cols)
198  {
199  const Index outerSize = IsRowMajor ? rows : cols;
200  m_innerSize = IsRowMajor ? cols : rows;
201  setZero();
202  if (Index(m_data.size()) != outerSize)
203  {
204  m_data.resize(outerSize);
205  }
206  }
207 
208  void resizeAndKeepData(Index rows, Index cols)
209  {
210  const Index outerSize = IsRowMajor ? rows : cols;
211  const Index innerSize = IsRowMajor ? cols : rows;
212  if (m_innerSize>innerSize)
213  {
214  // remove all coefficients with innerCoord>=innerSize
215  // TODO
216  //std::cerr << "not implemented yet\n";
217  exit(2);
218  }
219  if (m_data.size() != outerSize)
220  {
221  m_data.resize(outerSize);
222  }
223  }
224 
227  : m_innerSize(0), m_data(0)
228  {
229  eigen_assert(innerSize()==0 && outerSize()==0);
230  }
231 
234  : m_innerSize(0)
235  {
236  resize(rows, cols);
237  }
238 
240  template<typename OtherDerived>
242  : m_innerSize(0)
243  {
244  Base::operator=(other.derived());
245  }
246 
248  : Base(), m_innerSize(0)
249  {
250  *this = other.derived();
251  }
252 
253  inline void swap(DynamicSparseMatrix& other)
254  {
255  //EIGEN_DBG_SPARSE(std::cout << "SparseMatrix:: swap\n");
256  std::swap(m_innerSize, other.m_innerSize);
257  //std::swap(m_outerSize, other.m_outerSize);
258  m_data.swap(other.m_data);
259  }
260 
262  {
263  if (other.isRValue())
264  {
265  swap(other.const_cast_derived());
266  }
267  else
268  {
269  resize(other.rows(), other.cols());
270  m_data = other.m_data;
271  }
272  return *this;
273  }
274 
277 
278  public:
279 
282  EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
283  {
284  setZero();
285  reserve(reserveSize);
286  }
287 
298  {
299  const Index outer = IsRowMajor ? row : col;
300  const Index inner = IsRowMajor ? col : row;
301  return insertBack(outer,inner);
302  }
303 
310  {
311  return insert(row,col);
312  }
313 
317 
318 # ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
319 # include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
320 # endif
321  };
322 
323 template<typename Scalar, int _Options, typename _Index>
324 class DynamicSparseMatrix<Scalar,_Options,_Index>::InnerIterator : public SparseVector<Scalar,_Options,_Index>::InnerIterator
325 {
327  public:
328  InnerIterator(const DynamicSparseMatrix& mat, Index outer)
329  : Base(mat.m_data[outer]), m_outer(outer)
330  {}
331 
332  inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
333  inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
334 
335  protected:
336  const Index m_outer;
337 };
338 
339 template<typename Scalar, int _Options, typename _Index>
340 class DynamicSparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
341 {
343  public:
345  : Base(mat.m_data[outer]), m_outer(outer)
346  {}
347 
348  inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
349  inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
350 
351  protected:
352  const Index m_outer;
353 };
354 
355 } // end namespace Eigen
356 
357 #endif // EIGEN_DYNAMIC_SPARSEMATRIX_H
Scalar & insertBackByOuterInner(Index outer, Index inner)
const int OuterRandomAccessPattern
Definition: SparseUtil.h:67
Scalar coeff(Index row, Index col) const
void prune(Scalar reference, RealScalar epsilon=NumTraits< RealScalar >::dummy_precision())
DynamicSparseMatrix(const DynamicSparseMatrix &other)
void resize(Index rows, Index cols)
const unsigned int LvalueBit
iterative scaling algorithm to equilibrate rows and column norms in matrices
Definition: matrix.hpp:471
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
Definition: NumTraits.h:88
std::vector< internal::CompressedStorage< Scalar, Index > > m_data
EIGEN_DEPRECATED Scalar & fill(Index row, Index col)
#define EIGEN_DEPRECATED
Scalar & insert(Index row, Index col)
DynamicSparseMatrix & operator=(const DynamicSparseMatrix &other)
SparseVector< Scalar, _Options, _Index >::InnerIterator Base
ReverseInnerIterator(const DynamicSparseMatrix &mat, Index outer)
EIGEN_DEPRECATED Scalar & fillrand(Index row, Index col)
void reserve(Index reserveSize=1000)
Index innerNonZeros(Index j) const
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
Definition: SparseUtil.h:62
Base class of any sparse matrices or sparse expressions.
InnerIterator(const DynamicSparseMatrix &mat, Index outer)
internal::traits< DynamicSparseMatrix< _Scalar, _Options, _Index > >::Scalar Scalar
a sparse vector class
Definition: SparseUtil.h:73
void swap(DynamicSparseMatrix &other)
const std::vector< internal::CompressedStorage< Scalar, Index > > & _data() const
Scalar & coeffRef(Index row, Index col)
Provides a generic way to set and pass user-specified options.
Definition: options.hpp:65
Scalar & insertBack(Index row, Index col)
A sparse matrix class designed for matrix assembly purpose.
Definition: SparseUtil.h:72
EIGEN_DEPRECATED DynamicSparseMatrix()
internal::traits< Derived >::Index Index
Definition: EigenBase.h:31
EIGEN_DEPRECATED DynamicSparseMatrix(const SparseMatrixBase< OtherDerived > &other)
const unsigned int NestByRefBit
EIGEN_DEPRECATED void endFill()
RowXpr row(Index i)
Definition: BlockMethods.h:725
EIGEN_DEPRECATED void startFill(Index reserveSize=1000)
const Derived & derived() const
DynamicSparseMatrix< Scalar,(Flags &~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix
std::vector< internal::CompressedStorage< Scalar, Index > > & _data()
ColXpr col(Index i)
Definition: BlockMethods.h:708
#define eigen_assert(x)
SparseVector< Scalar, _Options, _Index >::ReverseInnerIterator Base
void resizeAndKeepData(Index rows, Index cols)
EIGEN_DEPRECATED DynamicSparseMatrix(Index rows, Index cols)
Derived & const_cast_derived() const
static void insert(double *xd, double *xa, double *u, double *p)


acado
Author(s): Milan Vukov, Rien Quirynen
autogenerated on Mon Jun 10 2019 12:34:32