00001
00002
00003
00004
00005
00006
00007
00008
00009
00010 #ifndef EIGEN_SPARSEMATRIX_H
00011 #define EIGEN_SPARSEMATRIX_H
00012
00013 namespace Eigen {
00014
00041 namespace internal {
00042 template<typename _Scalar, int _Options, typename _Index>
00043 struct traits<SparseMatrix<_Scalar, _Options, _Index> >
00044 {
00045 typedef _Scalar Scalar;
00046 typedef _Index Index;
00047 typedef Sparse StorageKind;
00048 typedef MatrixXpr XprKind;
00049 enum {
00050 RowsAtCompileTime = Dynamic,
00051 ColsAtCompileTime = Dynamic,
00052 MaxRowsAtCompileTime = Dynamic,
00053 MaxColsAtCompileTime = Dynamic,
00054 Flags = _Options | NestByRefBit | LvalueBit,
00055 CoeffReadCost = NumTraits<Scalar>::ReadCost,
00056 SupportedAccessPatterns = InnerRandomAccessPattern
00057 };
00058 };
00059
00060 template<typename _Scalar, int _Options, typename _Index, int DiagIndex>
00061 struct traits<Diagonal<const SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
00062 {
00063 typedef SparseMatrix<_Scalar, _Options, _Index> MatrixType;
00064 typedef typename nested<MatrixType>::type MatrixTypeNested;
00065 typedef typename remove_reference<MatrixTypeNested>::type _MatrixTypeNested;
00066
00067 typedef _Scalar Scalar;
00068 typedef Dense StorageKind;
00069 typedef _Index Index;
00070 typedef MatrixXpr XprKind;
00071
00072 enum {
00073 RowsAtCompileTime = Dynamic,
00074 ColsAtCompileTime = 1,
00075 MaxRowsAtCompileTime = Dynamic,
00076 MaxColsAtCompileTime = 1,
00077 Flags = 0,
00078 CoeffReadCost = _MatrixTypeNested::CoeffReadCost*10
00079 };
00080 };
00081
00082 }
00083
00084 template<typename _Scalar, int _Options, typename _Index>
00085 class SparseMatrix
00086 : public SparseMatrixBase<SparseMatrix<_Scalar, _Options, _Index> >
00087 {
00088 public:
00089 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseMatrix)
00090 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, +=)
00091 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, -=)
00092
00093 typedef MappedSparseMatrix<Scalar,Flags> Map;
00094 using Base::IsRowMajor;
00095 typedef internal::CompressedStorage<Scalar,Index> Storage;
00096 enum {
00097 Options = _Options
00098 };
00099
00100 protected:
00101
00102 typedef SparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix;
00103
00104 Index m_outerSize;
00105 Index m_innerSize;
00106 Index* m_outerIndex;
00107 Index* m_innerNonZeros;
00108 Storage m_data;
00109
00110 Eigen::Map<Matrix<Index,Dynamic,1> > innerNonZeros() { return Eigen::Map<Matrix<Index,Dynamic,1> >(m_innerNonZeros, m_innerNonZeros?m_outerSize:0); }
00111 const Eigen::Map<const Matrix<Index,Dynamic,1> > innerNonZeros() const { return Eigen::Map<const Matrix<Index,Dynamic,1> >(m_innerNonZeros, m_innerNonZeros?m_outerSize:0); }
00112
00113 public:
00114
00116 inline bool isCompressed() const { return m_innerNonZeros==0; }
00117
00119 inline Index rows() const { return IsRowMajor ? m_outerSize : m_innerSize; }
00121 inline Index cols() const { return IsRowMajor ? m_innerSize : m_outerSize; }
00122
00124 inline Index innerSize() const { return m_innerSize; }
00126 inline Index outerSize() const { return m_outerSize; }
00127
00131 inline const Scalar* valuePtr() const { return &m_data.value(0); }
00135 inline Scalar* valuePtr() { return &m_data.value(0); }
00136
00140 inline const Index* innerIndexPtr() const { return &m_data.index(0); }
00144 inline Index* innerIndexPtr() { return &m_data.index(0); }
00145
00149 inline const Index* outerIndexPtr() const { return m_outerIndex; }
00153 inline Index* outerIndexPtr() { return m_outerIndex; }
00154
00158 inline const Index* innerNonZeroPtr() const { return m_innerNonZeros; }
00162 inline Index* innerNonZeroPtr() { return m_innerNonZeros; }
00163
00165 inline Storage& data() { return m_data; }
00167 inline const Storage& data() const { return m_data; }
00168
00171 inline Scalar coeff(Index row, Index col) const
00172 {
00173 const Index outer = IsRowMajor ? row : col;
00174 const Index inner = IsRowMajor ? col : row;
00175 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
00176 return m_data.atInRange(m_outerIndex[outer], end, inner);
00177 }
00178
00187 inline Scalar& coeffRef(Index row, Index col)
00188 {
00189 const Index outer = IsRowMajor ? row : col;
00190 const Index inner = IsRowMajor ? col : row;
00191
00192 Index start = m_outerIndex[outer];
00193 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
00194 eigen_assert(end>=start && "you probably called coeffRef on a non finalized matrix");
00195 if(end<=start)
00196 return insert(row,col);
00197 const Index p = m_data.searchLowerIndex(start,end-1,inner);
00198 if((p<end) && (m_data.index(p)==inner))
00199 return m_data.value(p);
00200 else
00201 return insert(row,col);
00202 }
00203
00216 EIGEN_DONT_INLINE Scalar& insert(Index row, Index col)
00217 {
00218 if(isCompressed())
00219 {
00220 reserve(VectorXi::Constant(outerSize(), 2));
00221 }
00222 return insertUncompressed(row,col);
00223 }
00224
00225 public:
00226
00227 class InnerIterator;
00228 class ReverseInnerIterator;
00229
00231 inline void setZero()
00232 {
00233 m_data.clear();
00234 memset(m_outerIndex, 0, (m_outerSize+1)*sizeof(Index));
00235 if(m_innerNonZeros)
00236 memset(m_innerNonZeros, 0, (m_outerSize)*sizeof(Index));
00237 }
00238
00240 inline Index nonZeros() const
00241 {
00242 if(m_innerNonZeros)
00243 return innerNonZeros().sum();
00244 return static_cast<Index>(m_data.size());
00245 }
00246
00250 inline void reserve(Index reserveSize)
00251 {
00252 eigen_assert(isCompressed() && "This function does not make sense in non compressed mode.");
00253 m_data.reserve(reserveSize);
00254 }
00255
00256 #ifdef EIGEN_PARSED_BY_DOXYGEN
00257
00260 template<class SizesType>
00261 inline void reserve(const SizesType& reserveSizes);
00262 #else
00263 template<class SizesType>
00264 inline void reserve(const SizesType& reserveSizes, const typename SizesType::value_type& enableif = typename SizesType::value_type())
00265 {
00266 EIGEN_UNUSED_VARIABLE(enableif);
00267 reserveInnerVectors(reserveSizes);
00268 }
00269 template<class SizesType>
00270 inline void reserve(const SizesType& reserveSizes, const typename SizesType::Scalar& enableif =
00271 #if (!defined(_MSC_VER)) || (_MSC_VER>=1500)
00272 typename
00273 #endif
00274 SizesType::Scalar())
00275 {
00276 EIGEN_UNUSED_VARIABLE(enableif);
00277 reserveInnerVectors(reserveSizes);
00278 }
00279 #endif // EIGEN_PARSED_BY_DOXYGEN
00280 protected:
00281 template<class SizesType>
00282 inline void reserveInnerVectors(const SizesType& reserveSizes)
00283 {
00284
00285 if(isCompressed())
00286 {
00287 std::size_t totalReserveSize = 0;
00288
00289 m_innerNonZeros = new Index[m_outerSize];
00290
00291
00292 Index* newOuterIndex = m_innerNonZeros;
00293
00294 Index count = 0;
00295 for(Index j=0; j<m_outerSize; ++j)
00296 {
00297 newOuterIndex[j] = count;
00298 count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
00299 totalReserveSize += reserveSizes[j];
00300 }
00301 m_data.reserve(totalReserveSize);
00302 std::ptrdiff_t previousOuterIndex = m_outerIndex[m_outerSize];
00303 for(std::ptrdiff_t j=m_outerSize-1; j>=0; --j)
00304 {
00305 ptrdiff_t innerNNZ = previousOuterIndex - m_outerIndex[j];
00306 for(std::ptrdiff_t i=innerNNZ-1; i>=0; --i)
00307 {
00308 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
00309 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
00310 }
00311 previousOuterIndex = m_outerIndex[j];
00312 m_outerIndex[j] = newOuterIndex[j];
00313 m_innerNonZeros[j] = innerNNZ;
00314 }
00315 m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
00316
00317 m_data.resize(m_outerIndex[m_outerSize]);
00318 }
00319 else
00320 {
00321 Index* newOuterIndex = new Index[m_outerSize+1];
00322 Index count = 0;
00323 for(Index j=0; j<m_outerSize; ++j)
00324 {
00325 newOuterIndex[j] = count;
00326 Index alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
00327 Index toReserve = std::max<std::ptrdiff_t>(reserveSizes[j], alreadyReserved);
00328 count += toReserve + m_innerNonZeros[j];
00329 }
00330 newOuterIndex[m_outerSize] = count;
00331
00332 m_data.resize(count);
00333 for(ptrdiff_t j=m_outerSize-1; j>=0; --j)
00334 {
00335 std::ptrdiff_t offset = newOuterIndex[j] - m_outerIndex[j];
00336 if(offset>0)
00337 {
00338 std::ptrdiff_t innerNNZ = m_innerNonZeros[j];
00339 for(std::ptrdiff_t i=innerNNZ-1; i>=0; --i)
00340 {
00341 m_data.index(newOuterIndex[j]+i) = m_data.index(m_outerIndex[j]+i);
00342 m_data.value(newOuterIndex[j]+i) = m_data.value(m_outerIndex[j]+i);
00343 }
00344 }
00345 }
00346
00347 std::swap(m_outerIndex, newOuterIndex);
00348 delete[] newOuterIndex;
00349 }
00350
00351 }
00352 public:
00353
00354
00355
00366 inline Scalar& insertBack(Index row, Index col)
00367 {
00368 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
00369 }
00370
00373 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
00374 {
00375 eigen_assert(size_t(m_outerIndex[outer+1]) == m_data.size() && "Invalid ordered insertion (invalid outer index)");
00376 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.index(m_data.size()-1)<inner) && "Invalid ordered insertion (invalid inner index)");
00377 Index p = m_outerIndex[outer+1];
00378 ++m_outerIndex[outer+1];
00379 m_data.append(0, inner);
00380 return m_data.value(p);
00381 }
00382
00385 inline Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner)
00386 {
00387 Index p = m_outerIndex[outer+1];
00388 ++m_outerIndex[outer+1];
00389 m_data.append(0, inner);
00390 return m_data.value(p);
00391 }
00392
00395 inline void startVec(Index outer)
00396 {
00397 eigen_assert(m_outerIndex[outer]==int(m_data.size()) && "You must call startVec for each inner vector sequentially");
00398 eigen_assert(m_outerIndex[outer+1]==0 && "You must call startVec for each inner vector sequentially");
00399 m_outerIndex[outer+1] = m_outerIndex[outer];
00400 }
00401
00405 inline void finalize()
00406 {
00407 if(isCompressed())
00408 {
00409 Index size = static_cast<Index>(m_data.size());
00410 Index i = m_outerSize;
00411
00412 while (i>=0 && m_outerIndex[i]==0)
00413 --i;
00414 ++i;
00415 while (i<=m_outerSize)
00416 {
00417 m_outerIndex[i] = size;
00418 ++i;
00419 }
00420 }
00421 }
00422
00423
00424
00425 template<typename InputIterators>
00426 void setFromTriplets(const InputIterators& begin, const InputIterators& end);
00427
00428 void sumupDuplicates();
00429
00430
00431
00434 EIGEN_DONT_INLINE Scalar& insertByOuterInner(Index j, Index i)
00435 {
00436 return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
00437 }
00438
00441 void makeCompressed()
00442 {
00443 if(isCompressed())
00444 return;
00445
00446 Index oldStart = m_outerIndex[1];
00447 m_outerIndex[1] = m_innerNonZeros[0];
00448 for(Index j=1; j<m_outerSize; ++j)
00449 {
00450 Index nextOldStart = m_outerIndex[j+1];
00451 std::ptrdiff_t offset = oldStart - m_outerIndex[j];
00452 if(offset>0)
00453 {
00454 for(Index k=0; k<m_innerNonZeros[j]; ++k)
00455 {
00456 m_data.index(m_outerIndex[j]+k) = m_data.index(oldStart+k);
00457 m_data.value(m_outerIndex[j]+k) = m_data.value(oldStart+k);
00458 }
00459 }
00460 m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
00461 oldStart = nextOldStart;
00462 }
00463 delete[] m_innerNonZeros;
00464 m_innerNonZeros = 0;
00465 m_data.resize(m_outerIndex[m_outerSize]);
00466 m_data.squeeze();
00467 }
00468
00470 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
00471 {
00472 prune(default_prunning_func(reference,epsilon));
00473 }
00474
00482 template<typename KeepFunc>
00483 void prune(const KeepFunc& keep = KeepFunc())
00484 {
00485
00486
00487 makeCompressed();
00488
00489 Index k = 0;
00490 for(Index j=0; j<m_outerSize; ++j)
00491 {
00492 Index previousStart = m_outerIndex[j];
00493 m_outerIndex[j] = k;
00494 Index end = m_outerIndex[j+1];
00495 for(Index i=previousStart; i<end; ++i)
00496 {
00497 if(keep(IsRowMajor?j:m_data.index(i), IsRowMajor?m_data.index(i):j, m_data.value(i)))
00498 {
00499 m_data.value(k) = m_data.value(i);
00500 m_data.index(k) = m_data.index(i);
00501 ++k;
00502 }
00503 }
00504 }
00505 m_outerIndex[m_outerSize] = k;
00506 m_data.resize(k,0);
00507 }
00508
00512 void resize(Index rows, Index cols)
00513 {
00514 const Index outerSize = IsRowMajor ? rows : cols;
00515 m_innerSize = IsRowMajor ? cols : rows;
00516 m_data.clear();
00517 if (m_outerSize != outerSize || m_outerSize==0)
00518 {
00519 delete[] m_outerIndex;
00520 m_outerIndex = new Index [outerSize+1];
00521 m_outerSize = outerSize;
00522 }
00523 if(m_innerNonZeros)
00524 {
00525 delete[] m_innerNonZeros;
00526 m_innerNonZeros = 0;
00527 }
00528 memset(m_outerIndex, 0, (m_outerSize+1)*sizeof(Index));
00529 }
00530
00533 void resizeNonZeros(Index size)
00534 {
00535
00536 m_data.resize(size);
00537 }
00538
00540 const Diagonal<const SparseMatrix> diagonal() const { return *this; }
00541
00543 inline SparseMatrix()
00544 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
00545 {
00546 check_template_parameters();
00547 resize(0, 0);
00548 }
00549
00551 inline SparseMatrix(Index rows, Index cols)
00552 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
00553 {
00554 check_template_parameters();
00555 resize(rows, cols);
00556 }
00557
00559 template<typename OtherDerived>
00560 inline SparseMatrix(const SparseMatrixBase<OtherDerived>& other)
00561 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
00562 {
00563 check_template_parameters();
00564 *this = other.derived();
00565 }
00566
00568 inline SparseMatrix(const SparseMatrix& other)
00569 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
00570 {
00571 check_template_parameters();
00572 *this = other.derived();
00573 }
00574
00576 template<typename OtherDerived>
00577 SparseMatrix(const ReturnByValue<OtherDerived>& other)
00578 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
00579 {
00580 check_template_parameters();
00581 initAssignment(other);
00582 other.evalTo(*this);
00583 }
00584
00587 inline void swap(SparseMatrix& other)
00588 {
00589
00590 std::swap(m_outerIndex, other.m_outerIndex);
00591 std::swap(m_innerSize, other.m_innerSize);
00592 std::swap(m_outerSize, other.m_outerSize);
00593 std::swap(m_innerNonZeros, other.m_innerNonZeros);
00594 m_data.swap(other.m_data);
00595 }
00596
00597 inline SparseMatrix& operator=(const SparseMatrix& other)
00598 {
00599 if (other.isRValue())
00600 {
00601 swap(other.const_cast_derived());
00602 }
00603 else
00604 {
00605 initAssignment(other);
00606 if(other.isCompressed())
00607 {
00608 memcpy(m_outerIndex, other.m_outerIndex, (m_outerSize+1)*sizeof(Index));
00609 m_data = other.m_data;
00610 }
00611 else
00612 {
00613 Base::operator=(other);
00614 }
00615 }
00616 return *this;
00617 }
00618
00619 #ifndef EIGEN_PARSED_BY_DOXYGEN
00620 template<typename Lhs, typename Rhs>
00621 inline SparseMatrix& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
00622 { return Base::operator=(product); }
00623
00624 template<typename OtherDerived>
00625 inline SparseMatrix& operator=(const ReturnByValue<OtherDerived>& other)
00626 {
00627 initAssignment(other);
00628 return Base::operator=(other.derived());
00629 }
00630
00631 template<typename OtherDerived>
00632 inline SparseMatrix& operator=(const EigenBase<OtherDerived>& other)
00633 { return Base::operator=(other.derived()); }
00634 #endif
00635
00636 template<typename OtherDerived>
00637 EIGEN_DONT_INLINE SparseMatrix& operator=(const SparseMatrixBase<OtherDerived>& other)
00638 {
00639 const bool needToTranspose = (Flags & RowMajorBit) != (OtherDerived::Flags & RowMajorBit);
00640 if (needToTranspose)
00641 {
00642
00643
00644
00645
00646 typedef typename internal::nested<OtherDerived,2>::type OtherCopy;
00647 typedef typename internal::remove_all<OtherCopy>::type _OtherCopy;
00648 OtherCopy otherCopy(other.derived());
00649
00650 SparseMatrix dest(other.rows(),other.cols());
00651 Eigen::Map<Matrix<Index, Dynamic, 1> > (dest.m_outerIndex,dest.outerSize()).setZero();
00652
00653
00654
00655 for (Index j=0; j<otherCopy.outerSize(); ++j)
00656 for (typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
00657 ++dest.m_outerIndex[it.index()];
00658
00659
00660 Index count = 0;
00661 VectorXi positions(dest.outerSize());
00662 for (Index j=0; j<dest.outerSize(); ++j)
00663 {
00664 Index tmp = dest.m_outerIndex[j];
00665 dest.m_outerIndex[j] = count;
00666 positions[j] = count;
00667 count += tmp;
00668 }
00669 dest.m_outerIndex[dest.outerSize()] = count;
00670
00671 dest.m_data.resize(count);
00672
00673 for (Index j=0; j<otherCopy.outerSize(); ++j)
00674 {
00675 for (typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
00676 {
00677 Index pos = positions[it.index()]++;
00678 dest.m_data.index(pos) = j;
00679 dest.m_data.value(pos) = it.value();
00680 }
00681 }
00682 this->swap(dest);
00683 return *this;
00684 }
00685 else
00686 {
00687 if(other.isRValue())
00688 initAssignment(other.derived());
00689
00690 return Base::operator=(other.derived());
00691 }
00692 }
00693
00694 friend std::ostream & operator << (std::ostream & s, const SparseMatrix& m)
00695 {
00696 EIGEN_DBG_SPARSE(
00697 s << "Nonzero entries:\n";
00698 if(m.isCompressed())
00699 for (Index i=0; i<m.nonZeros(); ++i)
00700 s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
00701 else
00702 for (Index i=0; i<m.outerSize(); ++i)
00703 {
00704 int p = m.m_outerIndex[i];
00705 int pe = m.m_outerIndex[i]+m.m_innerNonZeros[i];
00706 Index k=p;
00707 for (; k<pe; ++k)
00708 s << "(" << m.m_data.value(k) << "," << m.m_data.index(k) << ") ";
00709 for (; k<m.m_outerIndex[i+1]; ++k)
00710 s << "(_,_) ";
00711 }
00712 s << std::endl;
00713 s << std::endl;
00714 s << "Outer pointers:\n";
00715 for (Index i=0; i<m.outerSize(); ++i)
00716 s << m.m_outerIndex[i] << " ";
00717 s << " $" << std::endl;
00718 if(!m.isCompressed())
00719 {
00720 s << "Inner non zeros:\n";
00721 for (Index i=0; i<m.outerSize(); ++i)
00722 s << m.m_innerNonZeros[i] << " ";
00723 s << " $" << std::endl;
00724 }
00725 s << std::endl;
00726 );
00727 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
00728 return s;
00729 }
00730
00732 inline ~SparseMatrix()
00733 {
00734 delete[] m_outerIndex;
00735 delete[] m_innerNonZeros;
00736 }
00737
00738 #ifndef EIGEN_PARSED_BY_DOXYGEN
00739
00740 Scalar sum() const;
00741 #endif
00742
00743 # ifdef EIGEN_SPARSEMATRIX_PLUGIN
00744 # include EIGEN_SPARSEMATRIX_PLUGIN
00745 # endif
00746
00747 protected:
00748
00749 template<typename Other>
00750 void initAssignment(const Other& other)
00751 {
00752 resize(other.rows(), other.cols());
00753 if(m_innerNonZeros)
00754 {
00755 delete[] m_innerNonZeros;
00756 m_innerNonZeros = 0;
00757 }
00758 }
00759
00762 EIGEN_DONT_INLINE Scalar& insertCompressed(Index row, Index col)
00763 {
00764 eigen_assert(isCompressed());
00765
00766 const Index outer = IsRowMajor ? row : col;
00767 const Index inner = IsRowMajor ? col : row;
00768
00769 Index previousOuter = outer;
00770 if (m_outerIndex[outer+1]==0)
00771 {
00772
00773 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
00774 {
00775 m_outerIndex[previousOuter] = static_cast<Index>(m_data.size());
00776 --previousOuter;
00777 }
00778 m_outerIndex[outer+1] = m_outerIndex[outer];
00779 }
00780
00781
00782
00783
00784 bool isLastVec = (!(previousOuter==-1 && m_data.size()!=0))
00785 && (size_t(m_outerIndex[outer+1]) == m_data.size());
00786
00787 size_t startId = m_outerIndex[outer];
00788
00789 size_t p = m_outerIndex[outer+1];
00790 ++m_outerIndex[outer+1];
00791
00792 float reallocRatio = 1;
00793 if (m_data.allocatedSize()<=m_data.size())
00794 {
00795
00796 if (m_data.size()==0)
00797 {
00798 m_data.reserve(32);
00799 }
00800 else
00801 {
00802
00803
00804
00805 float nnzEstimate = float(m_outerIndex[outer])*float(m_outerSize)/float(outer+1);
00806 reallocRatio = (nnzEstimate-float(m_data.size()))/float(m_data.size());
00807
00808
00809
00810 reallocRatio = (std::min)((std::max)(reallocRatio,1.5f),8.f);
00811 }
00812 }
00813 m_data.resize(m_data.size()+1,reallocRatio);
00814
00815 if (!isLastVec)
00816 {
00817 if (previousOuter==-1)
00818 {
00819
00820
00821 for (Index k=0; k<=(outer+1); ++k)
00822 m_outerIndex[k] = 0;
00823 Index k=outer+1;
00824 while(m_outerIndex[k]==0)
00825 m_outerIndex[k++] = 1;
00826 while (k<=m_outerSize && m_outerIndex[k]!=0)
00827 m_outerIndex[k++]++;
00828 p = 0;
00829 --k;
00830 k = m_outerIndex[k]-1;
00831 while (k>0)
00832 {
00833 m_data.index(k) = m_data.index(k-1);
00834 m_data.value(k) = m_data.value(k-1);
00835 k--;
00836 }
00837 }
00838 else
00839 {
00840
00841
00842 Index j = outer+2;
00843 while (j<=m_outerSize && m_outerIndex[j]!=0)
00844 m_outerIndex[j++]++;
00845 --j;
00846
00847 Index k = m_outerIndex[j]-1;
00848 while (k>=Index(p))
00849 {
00850 m_data.index(k) = m_data.index(k-1);
00851 m_data.value(k) = m_data.value(k-1);
00852 k--;
00853 }
00854 }
00855 }
00856
00857 while ( (p > startId) && (m_data.index(p-1) > inner) )
00858 {
00859 m_data.index(p) = m_data.index(p-1);
00860 m_data.value(p) = m_data.value(p-1);
00861 --p;
00862 }
00863
00864 m_data.index(p) = inner;
00865 return (m_data.value(p) = 0);
00866 }
00867
00870 class SingletonVector
00871 {
00872 Index m_index;
00873 Index m_value;
00874 public:
00875 typedef Index value_type;
00876 SingletonVector(Index i, Index v)
00877 : m_index(i), m_value(v)
00878 {}
00879
00880 Index operator[](Index i) const { return i==m_index ? m_value : 0; }
00881 };
00882
00885 EIGEN_DONT_INLINE Scalar& insertUncompressed(Index row, Index col)
00886 {
00887 eigen_assert(!isCompressed());
00888
00889 const Index outer = IsRowMajor ? row : col;
00890 const Index inner = IsRowMajor ? col : row;
00891
00892 std::ptrdiff_t room = m_outerIndex[outer+1] - m_outerIndex[outer];
00893 std::ptrdiff_t innerNNZ = m_innerNonZeros[outer];
00894 if(innerNNZ>=room)
00895 {
00896
00897 reserve(SingletonVector(outer,std::max<std::ptrdiff_t>(2,innerNNZ)));
00898 }
00899
00900 Index startId = m_outerIndex[outer];
00901 Index p = startId + m_innerNonZeros[outer];
00902 while ( (p > startId) && (m_data.index(p-1) > inner) )
00903 {
00904 m_data.index(p) = m_data.index(p-1);
00905 m_data.value(p) = m_data.value(p-1);
00906 --p;
00907 }
00908 eigen_assert((p<=startId || m_data.index(p-1)!=inner) && "you cannot insert an element that already exist, you must call coeffRef to this end");
00909
00910 m_innerNonZeros[outer]++;
00911
00912 m_data.index(p) = inner;
00913 return (m_data.value(p) = 0);
00914 }
00915
00916 public:
00919 inline Scalar& insertBackUncompressed(Index row, Index col)
00920 {
00921 const Index outer = IsRowMajor ? row : col;
00922 const Index inner = IsRowMajor ? col : row;
00923
00924 eigen_assert(!isCompressed());
00925 eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
00926
00927 Index p = m_outerIndex[outer] + m_innerNonZeros[outer];
00928 m_innerNonZeros[outer]++;
00929 m_data.index(p) = inner;
00930 return (m_data.value(p) = 0);
00931 }
00932
00933 private:
00934 static void check_template_parameters()
00935 {
00936 EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
00937 }
00938
00939 struct default_prunning_func {
00940 default_prunning_func(Scalar ref, RealScalar eps) : reference(ref), epsilon(eps) {}
00941 inline bool operator() (const Index&, const Index&, const Scalar& value) const
00942 {
00943 return !internal::isMuchSmallerThan(value, reference, epsilon);
00944 }
00945 Scalar reference;
00946 RealScalar epsilon;
00947 };
00948 };
00949
00950 template<typename Scalar, int _Options, typename _Index>
00951 class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
00952 {
00953 public:
00954 InnerIterator(const SparseMatrix& mat, Index outer)
00955 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
00956 {
00957 if(mat.isCompressed())
00958 m_end = mat.m_outerIndex[outer+1];
00959 else
00960 m_end = m_id + mat.m_innerNonZeros[outer];
00961 }
00962
00963 inline InnerIterator& operator++() { m_id++; return *this; }
00964
00965 inline const Scalar& value() const { return m_values[m_id]; }
00966 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
00967
00968 inline Index index() const { return m_indices[m_id]; }
00969 inline Index outer() const { return m_outer; }
00970 inline Index row() const { return IsRowMajor ? m_outer : index(); }
00971 inline Index col() const { return IsRowMajor ? index() : m_outer; }
00972
00973 inline operator bool() const { return (m_id < m_end); }
00974
00975 protected:
00976 const Scalar* m_values;
00977 const Index* m_indices;
00978 const Index m_outer;
00979 Index m_id;
00980 Index m_end;
00981 };
00982
00983 template<typename Scalar, int _Options, typename _Index>
00984 class SparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator
00985 {
00986 public:
00987 ReverseInnerIterator(const SparseMatrix& mat, Index outer)
00988 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
00989 {
00990 if(mat.isCompressed())
00991 m_id = mat.m_outerIndex[outer+1];
00992 else
00993 m_id = m_start + mat.m_innerNonZeros[outer];
00994 }
00995
00996 inline ReverseInnerIterator& operator--() { --m_id; return *this; }
00997
00998 inline const Scalar& value() const { return m_values[m_id-1]; }
00999 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
01000
01001 inline Index index() const { return m_indices[m_id-1]; }
01002 inline Index outer() const { return m_outer; }
01003 inline Index row() const { return IsRowMajor ? m_outer : index(); }
01004 inline Index col() const { return IsRowMajor ? index() : m_outer; }
01005
01006 inline operator bool() const { return (m_id > m_start); }
01007
01008 protected:
01009 const Scalar* m_values;
01010 const Index* m_indices;
01011 const Index m_outer;
01012 Index m_id;
01013 const Index m_start;
01014 };
01015
01016 namespace internal {
01017
01018 template<typename InputIterator, typename SparseMatrixType>
01019 void set_from_triplets(const InputIterator& begin, const InputIterator& end, SparseMatrixType& mat, int Options = 0)
01020 {
01021 EIGEN_UNUSED_VARIABLE(Options);
01022 enum { IsRowMajor = SparseMatrixType::IsRowMajor };
01023 typedef typename SparseMatrixType::Scalar Scalar;
01024 typedef typename SparseMatrixType::Index Index;
01025 SparseMatrix<Scalar,IsRowMajor?ColMajor:RowMajor> trMat(mat.rows(),mat.cols());
01026
01027
01028 VectorXi wi(trMat.outerSize());
01029 wi.setZero();
01030 for(InputIterator it(begin); it!=end; ++it)
01031 wi(IsRowMajor ? it->col() : it->row())++;
01032
01033
01034 trMat.reserve(wi);
01035 for(InputIterator it(begin); it!=end; ++it)
01036 trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
01037
01038
01039 trMat.sumupDuplicates();
01040
01041
01042 mat = trMat;
01043 }
01044
01045 }
01046
01047
01085 template<typename Scalar, int _Options, typename _Index>
01086 template<typename InputIterators>
01087 void SparseMatrix<Scalar,_Options,_Index>::setFromTriplets(const InputIterators& begin, const InputIterators& end)
01088 {
01089 internal::set_from_triplets(begin, end, *this);
01090 }
01091
01093 template<typename Scalar, int _Options, typename _Index>
01094 void SparseMatrix<Scalar,_Options,_Index>::sumupDuplicates()
01095 {
01096 eigen_assert(!isCompressed());
01097
01098 VectorXi wi(innerSize());
01099 wi.fill(-1);
01100 Index count = 0;
01101
01102 for(int j=0; j<outerSize(); ++j)
01103 {
01104 Index start = count;
01105 Index oldEnd = m_outerIndex[j]+m_innerNonZeros[j];
01106 for(Index k=m_outerIndex[j]; k<oldEnd; ++k)
01107 {
01108 Index i = m_data.index(k);
01109 if(wi(i)>=start)
01110 {
01111
01112 m_data.value(wi(i)) += m_data.value(k);
01113 }
01114 else
01115 {
01116 m_data.value(count) = m_data.value(k);
01117 m_data.index(count) = m_data.index(k);
01118 wi(i) = count;
01119 ++count;
01120 }
01121 }
01122 m_outerIndex[j] = start;
01123 }
01124 m_outerIndex[m_outerSize] = count;
01125
01126
01127 delete[] m_innerNonZeros;
01128 m_innerNonZeros = 0;
01129 m_data.resize(m_outerIndex[m_outerSize]);
01130 }
01131
01132 }
01133
01134 #endif // EIGEN_SPARSEMATRIX_H