10 #ifndef EIGEN_SPARSEVECTOR_H 11 #define EIGEN_SPARSEVECTOR_H 29 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
39 RowsAtCompileTime = IsColVector ?
Dynamic : 1,
40 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
41 MaxRowsAtCompileTime = RowsAtCompileTime,
42 MaxColsAtCompileTime = ColsAtCompileTime,
55 template<
typename Dest,
typename Src,
57 : Src::InnerSizeAtCompileTime==1 ?
SVA_Outer 63 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
104 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
105 return coeff(IsColVector ? row : col);
115 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
116 return coeffRef(IsColVector ? row : col);
152 return insertBack(inner);
157 return m_data.value(m_data.size()-1);
164 return insertBackUnordered(inner);
169 return m_data.value(m_data.size()-1);
174 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
176 Index inner = IsColVector ? row :
col;
177 Index outer = IsColVector ? col :
row;
180 return insert(inner);
189 m_data.resize(p+2,1);
191 while ( (p >= startId) && (m_data.index(p) > i) )
193 m_data.index(p+1) = m_data.index(p);
194 m_data.value(p+1) = m_data.value(p);
198 m_data.value(p+1) = 0;
199 return m_data.value(p+1);
204 inline void reserve(
Index reserveSize) { m_data.reserve(reserveSize); }
212 m_data.prune(reference,
epsilon);
225 eigen_assert((IsColVector ? cols : rows)==1 &&
"Outer dimension must equal 1");
226 resize(IsColVector ? rows : cols);
248 if (newSize < m_size)
251 while (i<m_data.size() && m_data.index(i)<newSize) ++i;
259 inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
265 template<
typename OtherDerived>
269 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 270 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
272 check_template_parameters();
277 : Base(other), m_size(0)
279 check_template_parameters();
290 m_data.swap(other.
m_data);
293 template<
int OtherOptions>
298 m_data.swap(other.
m_data);
309 resize(other.
size());
315 template<
typename OtherDerived>
324 #ifndef EIGEN_PARSED_BY_DOXYGEN 325 template<
typename Lhs,
typename Rhs>
328 return Base::operator=(product);
352 m_data.reserve(reserve);
359 return fill(IsColVector ? r : c);
366 return m_data.value(m_data.size()-1);
373 return fillrand(IsColVector ? r : c);
391 # ifdef EIGEN_SPARSEVECTOR_PLUGIN 392 # include EIGEN_SPARSEVECTOR_PLUGIN 409 template<
typename _Scalar,
int _Options,
typename _Index>
420 Flags = SparseVectorType::Flags
425 explicit evaluator(
const SparseVectorType &mat) : m_matrix(&mat)
431 return m_matrix->nonZeros();
434 operator SparseVectorType&() {
return m_matrix->const_cast_derived(); }
435 operator const SparseVectorType&()
const {
return *m_matrix; }
440 template<
typename Dest,
typename Src>
442 static void run(Dest& dst,
const Src& src) {
445 SrcEvaluatorType srcEval(src);
446 for(
typename SrcEvaluatorType::InnerIterator it(srcEval, 0); it; ++it)
447 dst.insert(it.index()) = it.value();
451 template<
typename Dest,
typename Src>
453 static void run(Dest& dst,
const Src& src) {
456 SrcEvaluatorType srcEval(src);
457 for(
Index i=0; i<src.size(); ++i)
459 typename SrcEvaluatorType::InnerIterator it(srcEval, i);
461 dst.insert(i) = it.value();
466 template<
typename Dest,
typename Src>
468 static void run(Dest& dst,
const Src& src) {
478 #endif // EIGEN_SPARSEVECTOR_H Scalar & insert(Index row, Index col)
SparseVector & operator=(const SparseSparseProduct< Lhs, Rhs > &product)
const Storage & data() const
#define EIGEN_STRONG_INLINE
StorageIndex & index(Index i)
StorageIndex * innerNonZeroPtr()
Scalar coeff(Index row, Index col) const
const unsigned int CompressedAccessBit
SparseVectorType::ReverseInnerIterator ReverseInnerIterator
Scalar & insertBack(Index i)
A versatible sparse matrix representation.
SparseVector(const SparseMatrixBase< OtherDerived > &other)
EIGEN_DEPRECATED Scalar & fillrand(Index i)
std::ostream & operator<<(std::ostream &s, const Packet16uc &v)
EIGEN_STRONG_INLINE Index rows() const
const int InnerRandomAccessPattern
static void run(Dest &dst, const Src &src)
const unsigned int LvalueBit
Base::ReverseInnerIterator ReverseInnerIterator
evaluator(const SparseVectorType &mat)
Scalar & coeffRef(Index i)
static constexpr size_t size(Tuple< Args... > &)
Provides access to the number of elements in a tuple as a compile-time constant expression.
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
#define EIGEN_STATIC_ASSERT(CONDITION, MSG)
EIGEN_DEVICE_FUNC IndexDest convert_index(const IndexSrc &idx)
void conservativeResize(Index newSize)
Eigen::Index Index
The interface type of indices.
const unsigned int RowMajorBit
Scalar & insertBackByOuterInnerUnordered(Index outer, Index inner)
void swap(SparseVector &other)
void reserve(Index reserveSize)
const StorageIndex * innerNonZeroPtr() const
Scalar coeff(Index i) const
internal::traits< SparseVector< _Scalar, _Options, _StorageIndex > >::StorageIndex StorageIndex
EIGEN_STRONG_INLINE Index innerSize() const
#define EIGEN_INTERNAL_CHECK_COST_VALUE(C)
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
EIGEN_DEVICE_FUNC ColXpr col(Index i)
This is the const version of col().
Base class of any sparse matrices or sparse expressions.
EIGEN_DEPRECATED const Storage & _data() const
internal::traits< SparseVector< _Scalar, _Options, _StorageIndex > >::Scalar Scalar
EIGEN_STRONG_INLINE const StorageIndex * innerIndexPtr() const
SparseVectorType::InnerIterator InnerIterator
const SparseVectorType * m_matrix
void resizeNonZeros(Index size)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
SparseVector(const SparseVector &other)
EIGEN_DEVICE_FUNC RowXpr row(Index i)
This is the const version of row(). */.
static void run(Dest &dst, const Src &src)
void resize(Index newSize)
const mpreal sum(const mpreal tab[], const unsigned long int n, int &status, mp_rnd_t mode=mpreal::get_default_rnd())
EIGEN_STRONG_INLINE Scalar * valuePtr()
Index nonZerosEstimate() const
SparseVector(Index rows, Index cols)
void startVec(Index outer)
#define EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(Derived, Op)
EIGEN_DEPRECATED Scalar & fill(Index i)
evaluator_base< SparseVectorType > Base
static void run(Dest &dst, const Src &src)
EIGEN_STRONG_INLINE Index outerSize() const
void resize(Index rows, Index cols)
Scalar & insertBackByOuterInner(Index outer, Index inner)
const unsigned int NestByRefBit
Scalar & insertBackUnordered(Index i)
void swap(SparseMatrix< Scalar, OtherOptions, StorageIndex > &other)
EIGEN_DEPRECATED void startFill(Index reserve)
EIGEN_DEPRECATED void endFill()
EIGEN_DEPRECATED Scalar & fill(Index r, Index c)
const Derived & derived() const
EIGEN_STRONG_INLINE Index cols() const
EIGEN_STRONG_INLINE StorageIndex * innerIndexPtr()
EIGEN_STRONG_INLINE const Scalar * valuePtr() const
SparseCompressedBase< SparseVector > Base
StorageIndex * outerIndexPtr()
static void check_template_parameters()
#define eigen_internal_assert(x)
_StorageIndex StorageIndex
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
NumTraits< Scalar >::Real RealScalar
void run(Expr &expr, Dev &dev)
Common base class for sparse [compressed]-{row|column}-storage format.
EIGEN_DEPRECATED Storage & _data()
EIGEN_DEPRECATED Scalar & fillrand(Index r, Index c)
SparseVector & operator=(const SparseVector &other)
SparseVector & operator=(const SparseMatrixBase< OtherDerived > &other)
Scalar & coeffRef(Index row, Index col)
const StorageIndex * outerIndexPtr() const
void swap(mpfr::mpreal &x, mpfr::mpreal &y)
#define EIGEN_UNUSED_VARIABLE(var)
void swap(scoped_array< T > &a, scoped_array< T > &b)
Base::InnerIterator InnerIterator
Derived & const_cast_derived() const
#define EIGEN_ONLY_USED_FOR_DEBUG(x)
SparseVector< _Scalar, _Options, _Index > SparseVectorType