10 #ifndef EIGEN_SPARSEMATRIX_H    11 #define EIGEN_SPARSEMATRIX_H    42 template<
typename _Scalar, 
int _Options, 
typename _Index>
    59 template<
typename _Scalar, 
int _Options, 
typename _Index, 
int DiagIndex>
    73     ColsAtCompileTime = 1,
    75     MaxColsAtCompileTime = 1,
    80 template<
typename _Scalar, 
int _Options, 
typename _Index, 
int DiagIndex>
    82  : 
public traits<Diagonal<SparseMatrix<_Scalar, _Options, _Index>, DiagIndex> >
    91 template<
typename _Scalar, 
int _Options, 
typename _Index>
    99     using Base::isCompressed;
   100     using Base::nonZeros;
   102     using Base::operator+=;
   103     using Base::operator-=;
   112     using Base::IsRowMajor;
   132     inline Index rows()
 const { 
return IsRowMajor ? m_outerSize : m_innerSize; }
   134     inline Index cols()
 const { 
return IsRowMajor ? m_innerSize : m_outerSize; }
   178     inline Storage& 
data() { 
return m_data; }
   180     inline const Storage& 
data()
 const { 
return m_data; }
   186       eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
   190       Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
   204       eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
   210       Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
   211       eigen_assert(end>=start && 
"you probably called coeffRef on a non finalized matrix");
   213         return insert(row,col);
   215       if((p<end) && (m_data.
index(p)==inner))
   216         return m_data.
value(p);
   218         return insert(row,col);
   250       memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(
StorageIndex));
   252         memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(
StorageIndex));
   260       eigen_assert(isCompressed() && 
"This function does not make sense in non compressed mode.");
   264     #ifdef EIGEN_PARSED_BY_DOXYGEN   277     template<
class SizesType>
   278     inline void reserve(
const SizesType& reserveSizes);
   280     template<
class SizesType>
   281     inline void reserve(
const SizesType& reserveSizes, 
const typename SizesType::value_type& enableif =
   285         SizesType::value_type())
   288       reserveInnerVectors(reserveSizes);
   290     #endif // EIGEN_PARSED_BY_DOXYGEN   292     template<
class SizesType>
   297         Index totalReserveSize = 0;
   306         for(
Index j=0; j<m_outerSize; ++j)
   308           newOuterIndex[j] = count;
   309           count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
   310           totalReserveSize += reserveSizes[j];
   312         m_data.
reserve(totalReserveSize);
   313         StorageIndex previousOuterIndex = m_outerIndex[m_outerSize];
   314         for(
Index j=m_outerSize-1; j>=0; --j)
   316           StorageIndex innerNNZ = previousOuterIndex - m_outerIndex[j];
   317           for(
Index i=innerNNZ-1; i>=0; --i)
   319             m_data.
index(newOuterIndex[j]+i) = m_data.
index(m_outerIndex[j]+i);
   320             m_data.
value(newOuterIndex[j]+i) = m_data.
value(m_outerIndex[j]+i);
   322           previousOuterIndex = m_outerIndex[j];
   323           m_outerIndex[j] = newOuterIndex[j];
   324           m_innerNonZeros[j] = innerNNZ;
   326         m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
   328         m_data.
resize(m_outerIndex[m_outerSize]);
   336         for(
Index j=0; j<m_outerSize; ++j)
   338           newOuterIndex[j] = count;
   339           StorageIndex alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
   340           StorageIndex toReserve = std::max<StorageIndex>(reserveSizes[j], alreadyReserved);
   341           count += toReserve + m_innerNonZeros[j];
   343         newOuterIndex[m_outerSize] = count;
   346         for(
Index j=m_outerSize-1; j>=0; --j)
   348           Index offset = newOuterIndex[j] - m_outerIndex[j];
   352             for(
Index i=innerNNZ-1; i>=0; --i)
   354               m_data.
index(newOuterIndex[j]+i) = m_data.
index(m_outerIndex[j]+i);
   355               m_data.
value(newOuterIndex[j]+i) = m_data.
value(m_outerIndex[j]+i);
   361         std::free(newOuterIndex);
   381       return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
   388       eigen_assert(
Index(m_outerIndex[outer+1]) == m_data.
size() && 
"Invalid ordered insertion (invalid outer index)");
   389       eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.
index(m_data.
size()-1)<inner) && 
"Invalid ordered insertion (invalid inner index)");
   390       Index p = m_outerIndex[outer+1];
   391       ++m_outerIndex[outer+1];
   393       return m_data.
value(p);
   400       Index p = m_outerIndex[outer+1];
   401       ++m_outerIndex[outer+1];
   403       return m_data.
value(p);
   410       eigen_assert(m_outerIndex[outer]==
Index(m_data.
size()) && 
"You must call startVec for each inner vector sequentially");
   411       eigen_assert(m_outerIndex[outer+1]==0 && 
"You must call startVec for each inner vector sequentially");
   412       m_outerIndex[outer+1] = m_outerIndex[outer];
   423         Index i = m_outerSize;
   425         while (i>=0 && m_outerIndex[i]==0)
   428         while (i<=m_outerSize)
   430           m_outerIndex[i] = size;
   438     template<
typename InputIterators>
   439     void setFromTriplets(
const InputIterators& begin, 
const InputIterators& end);
   441     template<
typename InputIterators,
typename DupFunctor>
   442     void setFromTriplets(
const InputIterators& begin, 
const InputIterators& end, DupFunctor dup_func);
   446     template<
typename DupFunctor>
   447     void collapseDuplicates(DupFunctor dup_func = DupFunctor());
   455       return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
   467       Index oldStart = m_outerIndex[1];
   468       m_outerIndex[1] = m_innerNonZeros[0];
   469       for(
Index j=1; j<m_outerSize; ++j)
   471         Index nextOldStart = m_outerIndex[j+1];
   472         Index offset = oldStart - m_outerIndex[j];
   475           for(
Index k=0; k<m_innerNonZeros[j]; ++k)
   477             m_data.
index(m_outerIndex[j]+k) = m_data.
index(oldStart+k);
   478             m_data.
value(m_outerIndex[j]+k) = m_data.
value(oldStart+k);
   481         m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
   482         oldStart = nextOldStart;
   484       std::free(m_innerNonZeros);
   486       m_data.
resize(m_outerIndex[m_outerSize]);
   493       if(m_innerNonZeros != 0)
   496       for (
Index i = 0; i < m_outerSize; i++)
   498         m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i]; 
   505       prune(default_prunning_func(reference,
epsilon));
   515     template<
typename KeepFunc>
   516     void prune(
const KeepFunc& keep = KeepFunc())
   522       for(
Index j=0; j<m_outerSize; ++j)
   524         Index previousStart = m_outerIndex[j];
   526         Index end = m_outerIndex[j+1];
   527         for(
Index i=previousStart; i<end; ++i)
   529           if(keep(IsRowMajor?j:m_data.
index(i), IsRowMajor?m_data.
index(i):j, m_data.
value(i)))
   537       m_outerIndex[m_outerSize] = k;
   552       if (this->rows() == rows && this->cols() == cols) 
return;
   555       if(rows==0 || cols==0) 
return resize(rows,cols);
   557       Index innerChange = IsRowMajor ? cols - this->cols() : rows - this->rows();
   558       Index outerChange = IsRowMajor ? rows - this->rows() : cols - this->cols();
   567         m_innerNonZeros = newInnerNonZeros;
   569         for(
Index i=m_outerSize; i<m_outerSize+outerChange; i++)          
   570           m_innerNonZeros[i] = 0;
   572       else if (innerChange < 0) 
   577         for(
Index i = 0; i < m_outerSize; i++)
   578           m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
   582       if (m_innerNonZeros && innerChange < 0)
   588           while (n > 0 && m_data.
index(start+n-1) >= newInnerSize) --n; 
   592       m_innerSize = newInnerSize;
   595       if (outerChange == 0)
   600       m_outerIndex = newOuterIndex;
   603         StorageIndex last = m_outerSize == 0 ? 0 : m_outerIndex[m_outerSize];
   604         for(
Index i=m_outerSize; i<m_outerSize+outerChange+1; i++)          
   605           m_outerIndex[i] = last; 
   607       m_outerSize += outerChange;
   619       const Index outerSize = IsRowMajor ? rows : cols;
   620       m_innerSize = IsRowMajor ? cols : rows;
   622       if (m_outerSize != outerSize || m_outerSize==0)
   624         std::free(m_outerIndex);
   628         m_outerSize = outerSize;
   632         std::free(m_innerNonZeros);
   635       memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(
StorageIndex));
   646     const ConstDiagonalReturnType 
diagonal()
 const { 
return ConstDiagonalReturnType(*
this); }
   652     DiagonalReturnType 
diagonal() { 
return DiagonalReturnType(*
this); }
   656       : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
   658       check_template_parameters();
   664       : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
   666       check_template_parameters();
   671     template<
typename OtherDerived>
   673       : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
   676         YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
   677       check_template_parameters();
   683         #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN   684           EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
   691     template<
typename OtherDerived, 
unsigned int UpLo>
   693       : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
   695       check_template_parameters();
   696       Base::operator=(other);
   701       : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
   703       check_template_parameters();
   708     template<
typename OtherDerived>
   710       : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
   712       check_template_parameters();
   713       initAssignment(other);
   718     template<
typename OtherDerived>
   720       : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
   722       check_template_parameters();
   742       eigen_assert(rows() == cols() && 
"ONLY FOR SQUARED MATRICES");
   743       this->m_data.
resize(rows());
   747       std::free(m_innerNonZeros);
   756       else if(
this!=&other)
   758         #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN   759           EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
   761         initAssignment(other);
   769           Base::operator=(other);
   775 #ifndef EIGEN_PARSED_BY_DOXYGEN   776     template<
typename OtherDerived>
   778     { 
return Base::operator=(other.
derived()); }
   779 #endif // EIGEN_PARSED_BY_DOXYGEN   781     template<
typename OtherDerived>
   787         s << 
"Nonzero entries:\n";
   810         s << 
"Outer pointers:\n";
   814         s << 
" $" << std::endl;
   817           s << 
"Inner non zeros:\n";
   821           s << 
" $" << std::endl;
   825       s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
   832       std::free(m_outerIndex);
   833       std::free(m_innerNonZeros);
   839 #   ifdef EIGEN_SPARSEMATRIX_PLUGIN   840 #     include EIGEN_SPARSEMATRIX_PLUGIN   845     template<
typename Other>
   848       resize(other.rows(), other.cols());
   851         std::free(m_innerNonZeros);
   888       eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
   890       Index p = m_outerIndex[outer] + m_innerNonZeros[outer]++;
   892       return (m_data.
value(p) = 0);
   904     inline bool operator() (
const Index&, 
const Index&, 
const Scalar& value)
 const   906       return !internal::isMuchSmallerThan(value, reference, 
epsilon);
   915 template<
typename InputIterator, 
typename SparseMatrixType, 
typename DupFunctor>
   916 void set_from_triplets(
const InputIterator& begin, 
const InputIterator& end, SparseMatrixType& mat, DupFunctor dup_func)
   918   enum { IsRowMajor = SparseMatrixType::IsRowMajor };
   919   typedef typename SparseMatrixType::Scalar Scalar;
   920   typedef typename SparseMatrixType::StorageIndex StorageIndex;
   926     typename SparseMatrixType::IndexVector wi(trMat.outerSize());
   928     for(InputIterator it(begin); it!=end; ++it)
   930       eigen_assert(it->row()>=0 && it->row()<mat.rows() && it->col()>=0 && it->col()<mat.cols());
   931       wi(IsRowMajor ? it->col() : it->row())++;
   936     for(InputIterator it(begin); it!=end; ++it)
   937       trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
   940     trMat.collapseDuplicates(dup_func);
   987 template<
typename Scalar, 
int _Options, 
typename _Index>
   988 template<
typename InputIterators>
  1003 template<
typename Scalar, 
int _Options, 
typename _Index>
  1004 template<
typename InputIterators,
typename DupFunctor>
  1007   internal::set_from_triplets<InputIterators, SparseMatrix<Scalar,_Options,_Index>, DupFunctor>(begin, end, *
this, dup_func);
  1011 template<
typename Scalar, 
int _Options, 
typename _Index>
  1012 template<
typename DupFunctor>
  1017   IndexVector wi(innerSize());
  1021   for(
Index j=0; j<outerSize(); ++j)
  1024     Index oldEnd  = m_outerIndex[j]+m_innerNonZeros[j];
  1025     for(
Index k=m_outerIndex[j]; k<oldEnd; ++k)
  1041     m_outerIndex[j] = start;
  1043   m_outerIndex[m_outerSize] = count;
  1046   std::free(m_innerNonZeros);
  1047   m_innerNonZeros = 0;
  1048   m_data.
resize(m_outerIndex[m_outerSize]);
  1051 template<
typename Scalar, 
int _Options, 
typename _Index>
  1052 template<
typename OtherDerived>
  1056         YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
  1058   #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN  1059     EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
  1063   if (needToTranspose)
  1065     #ifdef EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN  1066       EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN
  1075     OtherCopy otherCopy(other.
derived());
  1076     OtherCopyEval otherCopyEval(otherCopy);
  1083     for (
Index j=0; j<otherCopy.outerSize(); ++j)
  1084       for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
  1085         ++dest.m_outerIndex[it.index()];
  1089     IndexVector positions(dest.outerSize());
  1090     for (
Index j=0; j<dest.outerSize(); ++j)
  1093       dest.m_outerIndex[j] = count;
  1094       positions[j] = count;
  1097     dest.m_outerIndex[dest.outerSize()] = count;
  1099     dest.m_data.resize(count);
  1103       for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
  1105         Index pos = positions[it.index()]++;
  1106         dest.m_data.index(pos) = j;
  1107         dest.m_data.value(pos) = it.value();
  1117       initAssignment(other.
derived());
  1120     return Base::operator=(other.
derived());
  1124 template<
typename _Scalar, 
int _Options, 
typename _Index>
  1127   eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
  1138         m_data.
reserve(2*m_innerSize);
  1144       memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(
StorageIndex));
  1149       for(
Index j=1; j<=m_outerSize; ++j)
  1150         m_outerIndex[j] = end;
  1157       for(
Index j=0; j<m_outerSize; ++j)
  1158         m_innerNonZeros[j] = m_outerIndex[j+1]-m_outerIndex[j];
  1167   if(m_outerIndex[outer]==data_end)
  1175     while(j>=0 && m_innerNonZeros[j]==0)
  1176       m_outerIndex[j--] = p;
  1179     ++m_innerNonZeros[outer];
  1190       for(
Index k=outer+1; k<=m_outerSize; ++k)
  1191         if(m_outerIndex[k]==data_end)
  1192           m_outerIndex[k] = new_end;
  1194     return m_data.
value(p);
  1199   if(m_outerIndex[outer+1]==data_end && m_outerIndex[outer]+m_innerNonZeros[outer]==m_data.
size())
  1204     ++m_innerNonZeros[outer];
  1215       for(
Index k=outer+1; k<=m_outerSize; ++k)
  1216         if(m_outerIndex[k]==data_end)
  1217           m_outerIndex[k] = new_end;
  1221     Index startId = m_outerIndex[outer];
  1222     Index p = m_outerIndex[outer]+m_innerNonZeros[outer]-1;
  1223     while ( (p > startId) && (m_data.
index(p-1) > inner) )
  1231     return (m_data.
value(p) = 0);
  1241   return insertUncompressed(row,col);
  1244 template<
typename _Scalar, 
int _Options, 
typename _Index>
  1252   Index room = m_outerIndex[outer+1] - m_outerIndex[outer];
  1257     reserve(SingletonVector(outer,std::max<StorageIndex>(2,innerNNZ)));
  1260   Index startId = m_outerIndex[outer];
  1261   Index p = startId + m_innerNonZeros[outer];
  1262   while ( (p > startId) && (m_data.
index(p-1) > inner) )
  1268   eigen_assert((p<=startId || m_data.
index(p-1)!=inner) && 
"you cannot insert an element that already exists, you must call coeffRef to this end");
  1270   m_innerNonZeros[outer]++;
  1272   m_data.
index(p) = inner;
  1273   return (m_data.
value(p) = 0);
  1276 template<
typename _Scalar, 
int _Options, 
typename _Index>
  1284   Index previousOuter = outer;
  1285   if (m_outerIndex[outer+1]==0)
  1288     while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
  1293     m_outerIndex[outer+1] = m_outerIndex[outer];
  1299   bool isLastVec = (!(previousOuter==-1 && m_data.
size()!=0))
  1300                 && (size_t(m_outerIndex[outer+1]) == m_data.
size());
  1302   size_t startId = m_outerIndex[outer];
  1304   size_t p = m_outerIndex[outer+1];
  1305   ++m_outerIndex[outer+1];
  1307   double reallocRatio = 1;
  1311     if (m_data.
size()==0)
  1320       double nnzEstimate = double(m_outerIndex[outer])*double(m_outerSize)/double(outer+1);
  1321       reallocRatio = (nnzEstimate-double(m_data.
size()))/double(m_data.
size());
  1332     if (previousOuter==-1)
  1336       for (
Index k=0; k<=(outer+1); ++k)
  1337         m_outerIndex[k] = 0;
  1339       while(m_outerIndex[k]==0)
  1340         m_outerIndex[k++] = 1;
  1341       while (k<=m_outerSize && m_outerIndex[k]!=0)
  1342         m_outerIndex[k++]++;
  1345       k = m_outerIndex[k]-1;
  1358       while (j<=m_outerSize && m_outerIndex[j]!=0)
  1359         m_outerIndex[j++]++;
  1362       Index k = m_outerIndex[j]-1;
  1372   while ( (p > startId) && (m_data.
index(p-1) > inner) )
  1379   m_data.
index(p) = inner;
  1380   return (m_data.
value(p) = 0);
  1385 template<
typename _Scalar, 
int _Options, 
typename _Index>
  1387   : 
evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > >
  1392   explicit evaluator(
const SparseMatrixType &mat) : Base(mat) {}
  1399 #endif // EIGEN_SPARSEMATRIX_H 
StorageIndex * outerIndexPtr()
const ConstDiagonalReturnType diagonal() const
bool isCompressed() const
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void call_assignment_no_alias(Dst &dst, const Src &src, const Func &func)
Scalar & insert(Index row, Index col)
void reserve(const SizesType &reserveSizes, const typename SizesType::value_type &enableif=typename SizesType::value_type())
#define EIGEN_STRONG_INLINE
StorageIndex & index(Index i)
Diagonal< SparseMatrix > DiagonalReturnType
void conservativeResize(Index rows, Index cols)
const unsigned int CompressedAccessBit
const Scalar * valuePtr() const
ref_selector< MatrixType >::type MatrixTypeNested
StorageIndex * m_outerIndex
Scalar & insertBackByOuterInner(Index outer, Index inner)
A versatible sparse matrix representation. 
void prune(const KeepFunc &keep=KeepFunc())
Base::ReverseInnerIterator ReverseInnerIterator
DiagonalReturnType diagonal()
std::ostream & operator<<(std::ostream &s, const Packet16uc &v)
evaluator(const SparseMatrixType &mat)
A matrix or vector expression mapping an existing array of data. 
const int InnerRandomAccessPattern
EIGEN_DEVICE_FUNC ColXpr col(Index i)
This is the const version of col(). */. 
Scalar & coeffRef(Index row, Index col)
internal::CompressedStorage< Scalar, StorageIndex > Storage
const unsigned int LvalueBit
Diagonal< const SparseMatrix > ConstDiagonalReturnType
void resizeNonZeros(Index size)
const Scalar * valuePtr() const
const StorageIndex * indexPtr() const
void startVec(Index outer)
SparseMatrix(const SparseMatrix &other)
Pseudo expression to manipulate a triangular sparse matrix as a selfadjoint matrix. 
StorageIndex * innerNonZeroPtr()
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
StorageIndex * m_innerNonZeros
#define EIGEN_STATIC_ASSERT(CONDITION, MSG)
EIGEN_DEVICE_FUNC IndexDest convert_index(const IndexSrc &idx)
Eigen::Index Index
The interface type of indices. 
const unsigned int RowMajorBit
EIGEN_STRONG_INLINE Scalar & insertBackUncompressed(Index row, Index col)
SparseMatrix(const DiagonalBase< OtherDerived > &other)
Copy constructor with in-place evaluation. 
Scalar & insertByOuterInner(Index j, Index i)
#define EIGEN_DONT_INLINE
void collapseDuplicates(DupFunctor dup_func=DupFunctor())
#define EIGEN_DBG_SPARSE(X)
EIGEN_DEVICE_FUNC void smart_copy(const T *start, const T *end, T *target)
internal::traits< Derived >::StorageIndex StorageIndex
void setFromTriplets(const InputIterators &begin, const InputIterators &end)
void initAssignment(const Other &other)
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
Base class of any sparse matrices or sparse expressions. 
evaluator< SparseCompressedBase< SparseMatrix< _Scalar, _Options, _Index > > > Base
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
static void check_template_parameters()
SparseMatrix & operator=(const SparseMatrix &other)
SparseMatrix & operator=(const EigenBase< OtherDerived > &other)
internal::traits< Derived >::Scalar Scalar
const StorageIndex * innerNonZeroPtr() const
Scalar & insertBack(Index row, Index col)
void append(const Scalar &v, Index i)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API. 
void swap(SparseMatrix &other)
EIGEN_DONT_INLINE Scalar & insertUncompressed(Index row, Index col)
void resize(Index rows, Index cols)
void reserve(Index reserveSize)
EIGEN_DEVICE_FUNC void evalTo(Dest &dst) const
StorageIndex operator[](Index i) const
void resize(Index size, double reserveSizeFactor=0)
Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue=Scalar(0)) const
SparseMatrix(Index rows, Index cols)
MappedSparseMatrix< Scalar, Flags > Map
SparseMatrix(const SparseMatrixBase< OtherDerived > &other)
SparseCompressedBase< SparseMatrix > Base
SparseMatrix(const ReturnByValue< OtherDerived > &other)
Copy constructor with in-place evaluation. 
const unsigned int NestByRefBit
const StorageIndex * innerIndexPtr() const
Base::InnerIterator InnerIterator
SparseMatrix< Scalar,(Flags &~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix
SingletonVector(Index i, Index v)
StorageIndex * innerIndexPtr()
int64_t max(int64_t a, const int b)
const StorageIndex * outerIndexPtr() const
Derived & const_cast_derived() const
EIGEN_DONT_INLINE Scalar & insertCompressed(Index row, Index col)
SparseMatrix< _Scalar, _Options, _Index > SparseMatrixType
General-purpose arrays with easy API for coefficient-wise operations. 
EIGEN_DEVICE_FUNC RowXpr row(Index i)
This is the const version of row(). */. 
Scalar coeff(Index row, Index col) const
const Derived & derived() const
Base::IndexVector IndexVector
SparseMatrix< _Scalar, _Options, _Index > MatrixType
Expression of a diagonal/subdiagonal/superdiagonal in a matrix. 
void reserveInnerVectors(const SizesType &reserveSizes)
Scalar & insertBackByOuterInnerUnordered(Index outer, Index inner)
EIGEN_DEVICE_FUNC void throw_std_bad_alloc()
#define eigen_internal_assert(x)
SparseMatrix(const SparseSelfAdjointView< OtherDerived, UpLo > &other)
const Storage & data() const
NumTraits< Scalar >::Real RealScalar
Common base class for sparse [compressed]-{row|column}-storage format. 
Index allocatedSize() const
EIGEN_DEVICE_FUNC Derived & derived()
remove_reference< MatrixTypeNested >::type _MatrixTypeNested
void swap(CompressedStorage &other)
Index searchLowerIndex(Index key) const
#define EIGEN_UNUSED_VARIABLE(var)
Base::ScalarVector ScalarVector
void set_from_triplets(const InputIterator &begin, const InputIterator &end, SparseMatrixType &mat, DupFunctor dup_func)
void swap(scoped_array< T > &a, scoped_array< T > &b)
default_prunning_func(const Scalar &ref, const RealScalar &eps)
EIGEN_DEVICE_FUNC const Derived & derived() const