SparseCompressedBase.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2015 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
11 #define EIGEN_SPARSE_COMPRESSED_BASE_H
12 
13 namespace Eigen {
14 
15 template<typename Derived> class SparseCompressedBase;
16 
17 namespace internal {
18 
19 template<typename Derived>
20 struct traits<SparseCompressedBase<Derived> > : traits<Derived>
21 {};
22 
23 } // end namespace internal
24 
35 template<typename Derived>
37  : public SparseMatrixBase<Derived>
38 {
39  public:
42  using Base::operator=;
43  using Base::IsRowMajor;
44 
45  class InnerIterator;
46  class ReverseInnerIterator;
47 
48  protected:
49  typedef typename Base::IndexVector IndexVector;
50  Eigen::Map<IndexVector> innerNonZeros() { return Eigen::Map<IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
51  const Eigen::Map<const IndexVector> innerNonZeros() const { return Eigen::Map<const IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
52 
53  public:
54 
56  inline Index nonZeros() const
57  {
58  if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0)
59  return derived().nonZeros();
60  else if(isCompressed())
61  return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
62  else if(derived().outerSize()==0)
63  return 0;
64  else
65  return innerNonZeros().sum();
66  }
67 
71  inline const Scalar* valuePtr() const { return derived().valuePtr(); }
75  inline Scalar* valuePtr() { return derived().valuePtr(); }
76 
80  inline const StorageIndex* innerIndexPtr() const { return derived().innerIndexPtr(); }
84  inline StorageIndex* innerIndexPtr() { return derived().innerIndexPtr(); }
85 
90  inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
95  inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
96 
100  inline const StorageIndex* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); }
104  inline StorageIndex* innerNonZeroPtr() { return derived().innerNonZeroPtr(); }
105 
107  inline bool isCompressed() const { return innerNonZeroPtr()==0; }
108 
114  const Map<const Array<Scalar,Dynamic,1> > coeffs() const { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); }
115 
126  Map<Array<Scalar,Dynamic,1> > coeffs() { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); }
127 
128  protected:
131  private:
132  template<typename OtherDerived> explicit SparseCompressedBase(const SparseCompressedBase<OtherDerived>&);
133 };
134 
135 template<typename Derived>
137 {
138  public:
140  : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0)
141  {}
142 
144  : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end)
145  {}
146 
148  {
149  m_values = other.m_values;
150  m_indices = other.m_indices;
151  const_cast<OuterType&>(m_outer).setValue(other.m_outer.value());
152  m_id = other.m_id;
153  m_end = other.m_end;
154  return *this;
155  }
156 
158  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
159  {
160  if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
161  {
162  m_id = 0;
163  m_end = mat.nonZeros();
164  }
165  else
166  {
167  m_id = mat.outerIndexPtr()[outer];
168  if(mat.isCompressed())
169  m_end = mat.outerIndexPtr()[outer+1];
170  else
171  m_end = m_id + mat.innerNonZeroPtr()[outer];
172  }
173  }
174 
175  explicit InnerIterator(const SparseCompressedBase& mat)
176  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros())
177  {
179  }
180 
182  : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.size())
183  {
185  }
186 
187  inline InnerIterator& operator++() { m_id++; return *this; }
188 
189  inline const Scalar& value() const { return m_values[m_id]; }
190  inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
191 
192  inline StorageIndex index() const { return m_indices[m_id]; }
193  inline Index outer() const { return m_outer.value(); }
194  inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
195  inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
196 
197  inline operator bool() const { return (m_id < m_end); }
198 
199  protected:
200  const Scalar* m_values;
203  const OuterType m_outer;
206  private:
207  // If you get here, then you're not using the right InnerIterator type, e.g.:
208  // SparseMatrix<double,RowMajor> A;
209  // SparseMatrix<double>::InnerIterator it(A,0);
210  template<typename T> InnerIterator(const SparseMatrixBase<T>&, Index outer);
211 };
212 
213 template<typename Derived>
215 {
216  public:
218  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
219  {
220  if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
221  {
222  m_start = 0;
223  m_id = mat.nonZeros();
224  }
225  else
226  {
227  m_start = mat.outerIndexPtr()[outer];
228  if(mat.isCompressed())
229  m_id = mat.outerIndexPtr()[outer+1];
230  else
231  m_id = m_start + mat.innerNonZeroPtr()[outer];
232  }
233  }
234 
236  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
237  {
239  }
240 
242  : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.size())
243  {
245  }
246 
247  inline ReverseInnerIterator& operator--() { --m_id; return *this; }
248 
249  inline const Scalar& value() const { return m_values[m_id-1]; }
250  inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
251 
252  inline StorageIndex index() const { return m_indices[m_id-1]; }
253  inline Index outer() const { return m_outer.value(); }
254  inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
255  inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
256 
257  inline operator bool() const { return (m_id > m_start); }
258 
259  protected:
260  const Scalar* m_values;
263  const OuterType m_outer;
266 };
267 
268 namespace internal {
269 
270 template<typename Derived>
272  : evaluator_base<Derived>
273 {
274  typedef typename Derived::Scalar Scalar;
275  typedef typename Derived::InnerIterator InnerIterator;
276 
277  enum {
279  Flags = Derived::Flags
280  };
281 
282  evaluator() : m_matrix(0), m_zero(0)
283  {
284  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
285  }
286  explicit evaluator(const Derived &mat) : m_matrix(&mat), m_zero(0)
287  {
288  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
289  }
290 
291  inline Index nonZerosEstimate() const {
292  return m_matrix->nonZeros();
293  }
294 
295  operator Derived&() { return m_matrix->const_cast_derived(); }
296  operator const Derived&() const { return *m_matrix; }
297 
299  const Scalar& coeff(Index row, Index col) const
300  {
301  Index p = find(row,col);
302 
303  if(p==Dynamic)
304  return m_zero;
305  else
306  return m_matrix->const_cast_derived().valuePtr()[p];
307  }
308 
310  {
311  Index p = find(row,col);
312  eigen_assert(p!=Dynamic && "written coefficient does not exist");
313  return m_matrix->const_cast_derived().valuePtr()[p];
314  }
315 
316 protected:
317 
319  {
320  eigen_internal_assert(row>=0 && row<m_matrix->rows() && col>=0 && col<m_matrix->cols());
321 
322  const Index outer = Derived::IsRowMajor ? row : col;
323  const Index inner = Derived::IsRowMajor ? col : row;
324 
325  Index start = m_matrix->outerIndexPtr()[outer];
326  Index end = m_matrix->isCompressed() ? m_matrix->outerIndexPtr()[outer+1] : m_matrix->outerIndexPtr()[outer] + m_matrix->innerNonZeroPtr()[outer];
327  eigen_assert(end>=start && "you are using a non finalized sparse matrix or written coefficient does not exist");
328  const Index p = std::lower_bound(m_matrix->innerIndexPtr()+start, m_matrix->innerIndexPtr()+end,inner) - m_matrix->innerIndexPtr();
329 
330  return ((p<end) && (m_matrix->innerIndexPtr()[p]==inner)) ? p : Dynamic;
331  }
332 
333  const Derived *m_matrix;
334  const Scalar m_zero;
335 };
336 
337 }
338 
339 } // end namespace Eigen
340 
341 #endif // EIGEN_SPARSE_COMPRESSED_BASE_H
ReverseInnerIterator(const internal::CompressedStorage< Scalar, StorageIndex > &data)
SparseMatrixBase< Derived > Base
const Eigen::Map< const IndexVector > innerNonZeros() const
A matrix or vector expression mapping an existing array of data.
Definition: Map.h:88
TFSIMD_FORCE_INLINE void setValue(const tfScalar &x, const tfScalar &y, const tfScalar &z)
Definition: LDLT.h:16
static EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T value()
Definition: XprHelper.h:111
static constexpr size_t size(Tuple< Args... > &)
Provides access to the number of elements in a tuple as a compile-time constant expression.
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
Definition: NumTraits.h:150
const Map< const Array< Scalar, Dynamic, 1 > > coeffs() const
Map< Array< Scalar, Dynamic, 1 > > coeffs()
internal::traits< Block< const SparseMatrix< _Scalar, _Options, _StorageIndex >, BlockRows, BlockCols, true > >::StorageIndex StorageIndex
#define EIGEN_INTERNAL_CHECK_COST_VALUE(C)
Definition: StaticAssert.h:213
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
Definition: SparseUtil.h:43
EIGEN_DEVICE_FUNC ColXpr col(Index i)
This is the const version of col().
Definition: BlockMethods.h:838
Base class of any sparse matrices or sparse expressions.
const StorageIndex * outerIndexPtr() const
const StorageIndex * innerNonZeroPtr() const
internal::traits< Block< const SparseMatrix< _Scalar, _Options, _StorageIndex >, BlockRows, BlockCols, true > >::Scalar Scalar
internal::variable_if_dynamic< Index, Derived::IsVectorAtCompileTime?0:Dynamic > OuterType
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:33
#define eigen_assert(x)
Definition: Macros.h:577
InnerIterator(const internal::CompressedStorage< Scalar, StorageIndex > &data)
EIGEN_DEVICE_FUNC RowXpr row(Index i)
This is the const version of row(). */.
Definition: BlockMethods.h:859
InnerIterator(const SparseCompressedBase &mat, Index outer)
const StorageIndex * innerIndexPtr() const
internal::variable_if_dynamic< Index, Derived::IsVectorAtCompileTime?0:Dynamic > OuterType
ReverseInnerIterator(const SparseCompressedBase &mat)
DenseCoeffsBase< Derived, ReadOnlyAccessors >::CoeffReturnType CoeffReturnType
InnerIterator & operator=(const InnerIterator &other)
ReverseInnerIterator(const SparseCompressedBase &mat, Index outer)
const int Dynamic
Definition: Constants.h:21
#define eigen_internal_assert(x)
Definition: Macros.h:583
const Scalar * valuePtr() const
Common base class for sparse [compressed]-{row|column}-storage format.
#define EIGEN_STATIC_ASSERT_VECTOR_ONLY(TYPE)
Definition: StaticAssert.h:137
InnerIterator(const SparseCompressedBase &mat)
An InnerIterator allows to loop over the element of any matrix expression.
Definition: CoreIterators.h:33


hebiros
Author(s): Xavier Artache , Matthew Tesch
autogenerated on Thu Sep 3 2020 04:08:51