00001
00002
00003
00004
00005
00006
00007
00008
00009
00010 #ifndef EIGEN_SPARSEVECTOR_H
00011 #define EIGEN_SPARSEVECTOR_H
00012
00013 namespace Eigen {
00014
00028 namespace internal {
00029 template<typename _Scalar, int _Options, typename _Index>
00030 struct traits<SparseVector<_Scalar, _Options, _Index> >
00031 {
00032 typedef _Scalar Scalar;
00033 typedef _Index Index;
00034 typedef Sparse StorageKind;
00035 typedef MatrixXpr XprKind;
00036 enum {
00037 IsColVector = (_Options & RowMajorBit) ? 0 : 1,
00038
00039 RowsAtCompileTime = IsColVector ? Dynamic : 1,
00040 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
00041 MaxRowsAtCompileTime = RowsAtCompileTime,
00042 MaxColsAtCompileTime = ColsAtCompileTime,
00043 Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit),
00044 CoeffReadCost = NumTraits<Scalar>::ReadCost,
00045 SupportedAccessPatterns = InnerRandomAccessPattern
00046 };
00047 };
00048
00049
00050 enum {
00051 SVA_RuntimeSwitch,
00052 SVA_Inner,
00053 SVA_Outer
00054 };
00055
00056 template< typename Dest, typename Src,
00057 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
00058 : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
00059 : SVA_Inner>
00060 struct sparse_vector_assign_selector;
00061
00062 }
00063
00064 template<typename _Scalar, int _Options, typename _Index>
00065 class SparseVector
00066 : public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
00067 {
00068 typedef SparseMatrixBase<SparseVector> SparseBase;
00069
00070 public:
00071 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
00072 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
00073 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
00074
00075 typedef internal::CompressedStorage<Scalar,Index> Storage;
00076 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
00077
00078 enum {
00079 Options = _Options
00080 };
00081
00082 EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; }
00083 EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; }
00084 EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
00085 EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
00086
00087 EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); }
00088 EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); }
00089
00090 EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); }
00091 EIGEN_STRONG_INLINE Index* innerIndexPtr() { return &m_data.index(0); }
00092
00094 inline Storage& data() { return m_data; }
00096 inline const Storage& data() const { return m_data; }
00097
00098 inline Scalar coeff(Index row, Index col) const
00099 {
00100 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
00101 return coeff(IsColVector ? row : col);
00102 }
00103 inline Scalar coeff(Index i) const
00104 {
00105 eigen_assert(i>=0 && i<m_size);
00106 return m_data.at(i);
00107 }
00108
00109 inline Scalar& coeffRef(Index row, Index col)
00110 {
00111 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
00112 return coeff(IsColVector ? row : col);
00113 }
00114
00121 inline Scalar& coeffRef(Index i)
00122 {
00123 eigen_assert(i>=0 && i<m_size);
00124 return m_data.atWithInsertion(i);
00125 }
00126
00127 public:
00128
00129 class InnerIterator;
00130 class ReverseInnerIterator;
00131
00132 inline void setZero() { m_data.clear(); }
00133
00135 inline Index nonZeros() const { return static_cast<Index>(m_data.size()); }
00136
00137 inline void startVec(Index outer)
00138 {
00139 EIGEN_UNUSED_VARIABLE(outer);
00140 eigen_assert(outer==0);
00141 }
00142
00143 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
00144 {
00145 EIGEN_UNUSED_VARIABLE(outer);
00146 eigen_assert(outer==0);
00147 return insertBack(inner);
00148 }
00149 inline Scalar& insertBack(Index i)
00150 {
00151 m_data.append(0, i);
00152 return m_data.value(m_data.size()-1);
00153 }
00154
00155 inline Scalar& insert(Index row, Index col)
00156 {
00157 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
00158
00159 Index inner = IsColVector ? row : col;
00160 Index outer = IsColVector ? col : row;
00161 eigen_assert(outer==0);
00162 return insert(inner);
00163 }
00164 Scalar& insert(Index i)
00165 {
00166 eigen_assert(i>=0 && i<m_size);
00167
00168 Index startId = 0;
00169 Index p = Index(m_data.size()) - 1;
00170
00171 m_data.resize(p+2,1);
00172
00173 while ( (p >= startId) && (m_data.index(p) > i) )
00174 {
00175 m_data.index(p+1) = m_data.index(p);
00176 m_data.value(p+1) = m_data.value(p);
00177 --p;
00178 }
00179 m_data.index(p+1) = i;
00180 m_data.value(p+1) = 0;
00181 return m_data.value(p+1);
00182 }
00183
00186 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
00187
00188
00189 inline void finalize() {}
00190
00191 void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
00192 {
00193 m_data.prune(reference,epsilon);
00194 }
00195
00196 void resize(Index rows, Index cols)
00197 {
00198 eigen_assert(rows==1 || cols==1);
00199 resize(IsColVector ? rows : cols);
00200 }
00201
00202 void resize(Index newSize)
00203 {
00204 m_size = newSize;
00205 m_data.clear();
00206 }
00207
00208 void resizeNonZeros(Index size) { m_data.resize(size); }
00209
00210 inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
00211
00212 inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); }
00213
00214 inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
00215
00216 template<typename OtherDerived>
00217 inline SparseVector(const SparseMatrixBase<OtherDerived>& other)
00218 : m_size(0)
00219 {
00220 check_template_parameters();
00221 *this = other.derived();
00222 }
00223
00224 inline SparseVector(const SparseVector& other)
00225 : SparseBase(other), m_size(0)
00226 {
00227 check_template_parameters();
00228 *this = other.derived();
00229 }
00230
00235 inline void swap(SparseVector& other)
00236 {
00237 std::swap(m_size, other.m_size);
00238 m_data.swap(other.m_data);
00239 }
00240
00241 inline SparseVector& operator=(const SparseVector& other)
00242 {
00243 if (other.isRValue())
00244 {
00245 swap(other.const_cast_derived());
00246 }
00247 else
00248 {
00249 resize(other.size());
00250 m_data = other.m_data;
00251 }
00252 return *this;
00253 }
00254
00255 template<typename OtherDerived>
00256 inline SparseVector& operator=(const SparseMatrixBase<OtherDerived>& other)
00257 {
00258 SparseVector tmp(other.size());
00259 internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
00260 this->swap(tmp);
00261 return *this;
00262 }
00263
00264 #ifndef EIGEN_PARSED_BY_DOXYGEN
00265 template<typename Lhs, typename Rhs>
00266 inline SparseVector& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
00267 {
00268 return Base::operator=(product);
00269 }
00270 #endif
00271
00272 friend std::ostream & operator << (std::ostream & s, const SparseVector& m)
00273 {
00274 for (Index i=0; i<m.nonZeros(); ++i)
00275 s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
00276 s << std::endl;
00277 return s;
00278 }
00279
00281 inline ~SparseVector() {}
00282
00284 Scalar sum() const;
00285
00286 public:
00287
00289 EIGEN_DEPRECATED void startFill(Index reserve)
00290 {
00291 setZero();
00292 m_data.reserve(reserve);
00293 }
00294
00296 EIGEN_DEPRECATED Scalar& fill(Index r, Index c)
00297 {
00298 eigen_assert(r==0 || c==0);
00299 return fill(IsColVector ? r : c);
00300 }
00301
00303 EIGEN_DEPRECATED Scalar& fill(Index i)
00304 {
00305 m_data.append(0, i);
00306 return m_data.value(m_data.size()-1);
00307 }
00308
00310 EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c)
00311 {
00312 eigen_assert(r==0 || c==0);
00313 return fillrand(IsColVector ? r : c);
00314 }
00315
00317 EIGEN_DEPRECATED Scalar& fillrand(Index i)
00318 {
00319 return insert(i);
00320 }
00321
00323 EIGEN_DEPRECATED void endFill() {}
00324
00325
00327 EIGEN_DEPRECATED Storage& _data() { return m_data; }
00329 EIGEN_DEPRECATED const Storage& _data() const { return m_data; }
00330
00331 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
00332 # include EIGEN_SPARSEVECTOR_PLUGIN
00333 # endif
00334
00335 protected:
00336
00337 static void check_template_parameters()
00338 {
00339 EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
00340 EIGEN_STATIC_ASSERT((_Options&(ColMajor|RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
00341 }
00342
00343 Storage m_data;
00344 Index m_size;
00345 };
00346
00347 template<typename Scalar, int _Options, typename _Index>
00348 class SparseVector<Scalar,_Options,_Index>::InnerIterator
00349 {
00350 public:
00351 InnerIterator(const SparseVector& vec, Index outer=0)
00352 : m_data(vec.m_data), m_id(0), m_end(static_cast<Index>(m_data.size()))
00353 {
00354 EIGEN_UNUSED_VARIABLE(outer);
00355 eigen_assert(outer==0);
00356 }
00357
00358 InnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
00359 : m_data(data), m_id(0), m_end(static_cast<Index>(m_data.size()))
00360 {}
00361
00362 inline InnerIterator& operator++() { m_id++; return *this; }
00363
00364 inline Scalar value() const { return m_data.value(m_id); }
00365 inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id)); }
00366
00367 inline Index index() const { return m_data.index(m_id); }
00368 inline Index row() const { return IsColVector ? index() : 0; }
00369 inline Index col() const { return IsColVector ? 0 : index(); }
00370
00371 inline operator bool() const { return (m_id < m_end); }
00372
00373 protected:
00374 const internal::CompressedStorage<Scalar,Index>& m_data;
00375 Index m_id;
00376 const Index m_end;
00377 };
00378
00379 template<typename Scalar, int _Options, typename _Index>
00380 class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
00381 {
00382 public:
00383 ReverseInnerIterator(const SparseVector& vec, Index outer=0)
00384 : m_data(vec.m_data), m_id(static_cast<Index>(m_data.size())), m_start(0)
00385 {
00386 EIGEN_UNUSED_VARIABLE(outer);
00387 eigen_assert(outer==0);
00388 }
00389
00390 ReverseInnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
00391 : m_data(data), m_id(static_cast<Index>(m_data.size())), m_start(0)
00392 {}
00393
00394 inline ReverseInnerIterator& operator--() { m_id--; return *this; }
00395
00396 inline Scalar value() const { return m_data.value(m_id-1); }
00397 inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id-1)); }
00398
00399 inline Index index() const { return m_data.index(m_id-1); }
00400 inline Index row() const { return IsColVector ? index() : 0; }
00401 inline Index col() const { return IsColVector ? 0 : index(); }
00402
00403 inline operator bool() const { return (m_id > m_start); }
00404
00405 protected:
00406 const internal::CompressedStorage<Scalar,Index>& m_data;
00407 Index m_id;
00408 const Index m_start;
00409 };
00410
00411 namespace internal {
00412
00413 template< typename Dest, typename Src>
00414 struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
00415 static void run(Dest& dst, const Src& src) {
00416 eigen_internal_assert(src.innerSize()==src.size());
00417 for(typename Src::InnerIterator it(src, 0); it; ++it)
00418 dst.insert(it.index()) = it.value();
00419 }
00420 };
00421
00422 template< typename Dest, typename Src>
00423 struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
00424 static void run(Dest& dst, const Src& src) {
00425 eigen_internal_assert(src.outerSize()==src.size());
00426 for(typename Dest::Index i=0; i<src.size(); ++i)
00427 {
00428 typename Src::InnerIterator it(src, i);
00429 if(it)
00430 dst.insert(i) = it.value();
00431 }
00432 }
00433 };
00434
00435 template< typename Dest, typename Src>
00436 struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
00437 static void run(Dest& dst, const Src& src) {
00438 if(src.outerSize()==1) sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
00439 else sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
00440 }
00441 };
00442
00443 }
00444
00445 }
00446
00447 #endif // EIGEN_SPARSEVECTOR_H