10 #ifndef EIGEN_SPARSEMATRIX_H 11 #define EIGEN_SPARSEMATRIX_H 42 template<
typename _Scalar,
int _Options,
typename _Index>
60 template<
typename _Scalar,
int _Options,
typename _Index,
int DiagIndex>
74 ColsAtCompileTime = 1,
76 MaxColsAtCompileTime = 1,
78 CoeffReadCost = _MatrixTypeNested::CoeffReadCost*10
84 template<
typename _Scalar,
int _Options,
typename _Index>
94 using
Base::IsRowMajor;
119 inline Index
rows()
const {
return IsRowMajor ? m_outerSize : m_innerSize; }
121 inline Index
cols()
const {
return IsRowMajor ? m_innerSize : m_outerSize; }
173 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
175 const Index outer = IsRowMajor ? row :
col;
176 const Index inner = IsRowMajor ? col :
row;
177 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
178 return m_data.
atInRange(m_outerIndex[outer], end, inner);
191 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
193 const Index outer = IsRowMajor ? row :
col;
194 const Index inner = IsRowMajor ? col :
row;
196 Index start = m_outerIndex[outer];
197 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
198 eigen_assert(end>=start &&
"you probably called coeffRef on a non finalized matrix");
202 if((p<end) && (m_data.
index(p)==inner))
203 return m_data.
value(p);
222 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
226 reserve(VectorXi::Constant(outerSize(), 2));
228 return insertUncompressed(row,col);
234 class ReverseInnerIterator;
240 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(Index));
242 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(Index));
249 return innerNonZeros().sum();
250 return static_cast<Index
>(m_data.
size());
258 eigen_assert(isCompressed() &&
"This function does not make sense in non compressed mode.");
262 #ifdef EIGEN_PARSED_BY_DOXYGEN 266 template<
class SizesType>
267 inline void reserve(
const SizesType& reserveSizes);
269 template<
class SizesType>
270 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::value_type& enableif =
typename SizesType::value_type())
273 reserveInnerVectors(reserveSizes);
275 template<
class SizesType>
276 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::Scalar& enableif =
277 #
if (!defined(_MSC_VER)) || (_MSC_VER>=1500)
283 reserveInnerVectors(reserveSizes);
285 #endif // EIGEN_PARSED_BY_DOXYGEN 287 template<
class SizesType>
292 std::size_t totalReserveSize = 0;
294 m_innerNonZeros =
static_cast<Index*
>(std::malloc(m_outerSize *
sizeof(Index)));
298 Index* newOuterIndex = m_innerNonZeros;
301 for(Index j=0; j<m_outerSize; ++j)
303 newOuterIndex[j] = count;
304 count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
305 totalReserveSize += reserveSizes[j];
307 m_data.
reserve(totalReserveSize);
308 Index previousOuterIndex = m_outerIndex[m_outerSize];
309 for(Index j=m_outerSize-1; j>=0; --j)
311 Index innerNNZ = previousOuterIndex - m_outerIndex[j];
312 for(Index i=innerNNZ-1; i>=0; --i)
314 m_data.
index(newOuterIndex[j]+i) = m_data.
index(m_outerIndex[j]+i);
315 m_data.
value(newOuterIndex[j]+i) = m_data.
value(m_outerIndex[j]+i);
317 previousOuterIndex = m_outerIndex[j];
318 m_outerIndex[j] = newOuterIndex[j];
319 m_innerNonZeros[j] = innerNNZ;
321 m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
323 m_data.
resize(m_outerIndex[m_outerSize]);
327 Index* newOuterIndex =
static_cast<Index*
>(std::malloc((m_outerSize+1)*
sizeof(Index)));
331 for(Index j=0; j<m_outerSize; ++j)
333 newOuterIndex[j] = count;
334 Index alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
335 Index toReserve = std::max<Index>(reserveSizes[j], alreadyReserved);
336 count += toReserve + m_innerNonZeros[j];
338 newOuterIndex[m_outerSize] = count;
341 for(Index j=m_outerSize-1; j>=0; --j)
343 Index offset = newOuterIndex[j] - m_outerIndex[j];
346 Index innerNNZ = m_innerNonZeros[j];
347 for(Index i=innerNNZ-1; i>=0; --i)
349 m_data.
index(newOuterIndex[j]+i) = m_data.
index(m_outerIndex[j]+i);
350 m_data.
value(newOuterIndex[j]+i) = m_data.
value(m_outerIndex[j]+i);
355 std::swap(m_outerIndex, newOuterIndex);
356 std::free(newOuterIndex);
376 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
383 eigen_assert(
size_t(m_outerIndex[outer+1]) == m_data.
size() &&
"Invalid ordered insertion (invalid outer index)");
384 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.
index(m_data.
size()-1)<inner) &&
"Invalid ordered insertion (invalid inner index)");
385 Index p = m_outerIndex[outer+1];
386 ++m_outerIndex[outer+1];
388 return m_data.
value(p);
395 Index p = m_outerIndex[outer+1];
396 ++m_outerIndex[outer+1];
398 return m_data.
value(p);
405 eigen_assert(m_outerIndex[outer]==
int(m_data.
size()) &&
"You must call startVec for each inner vector sequentially");
406 eigen_assert(m_outerIndex[outer+1]==0 &&
"You must call startVec for each inner vector sequentially");
407 m_outerIndex[outer+1] = m_outerIndex[outer];
417 Index size =
static_cast<Index
>(m_data.
size());
418 Index i = m_outerSize;
420 while (i>=0 && m_outerIndex[i]==0)
423 while (i<=m_outerSize)
425 m_outerIndex[i] = size;
433 template<
typename InputIterators>
434 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end);
436 void sumupDuplicates();
444 return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
454 Index oldStart = m_outerIndex[1];
455 m_outerIndex[1] = m_innerNonZeros[0];
456 for(Index j=1; j<m_outerSize; ++j)
458 Index nextOldStart = m_outerIndex[j+1];
459 Index offset = oldStart - m_outerIndex[j];
462 for(Index k=0; k<m_innerNonZeros[j]; ++k)
464 m_data.
index(m_outerIndex[j]+k) = m_data.
index(oldStart+k);
465 m_data.
value(m_outerIndex[j]+k) = m_data.
value(oldStart+k);
468 m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
469 oldStart = nextOldStart;
471 std::free(m_innerNonZeros);
473 m_data.
resize(m_outerIndex[m_outerSize]);
480 if(m_innerNonZeros != 0)
482 m_innerNonZeros =
static_cast<Index*
>(std::malloc(m_outerSize *
sizeof(Index)));
483 for (
int i = 0; i < m_outerSize; i++)
485 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
492 prune(default_prunning_func(reference,epsilon));
502 template<
typename KeepFunc>
503 void prune(
const KeepFunc& keep = KeepFunc())
510 for(Index j=0; j<m_outerSize; ++j)
512 Index previousStart = m_outerIndex[j];
514 Index end = m_outerIndex[j+1];
515 for(Index i=previousStart; i<end; ++i)
517 if(keep(IsRowMajor?j:m_data.
index(i), IsRowMajor?m_data.
index(i):j, m_data.
value(i)))
525 m_outerIndex[m_outerSize] = k;
535 if (this->rows() == rows && this->cols() == cols)
return;
538 if(rows==0 || cols==0)
return resize(rows,cols);
540 Index innerChange = IsRowMajor ? cols - this->cols() : rows - this->rows();
541 Index outerChange = IsRowMajor ? rows - this->rows() : cols - this->cols();
542 Index newInnerSize = IsRowMajor ? cols : rows;
548 Index *newInnerNonZeros =
static_cast<Index*
>(std::realloc(m_innerNonZeros, (m_outerSize + outerChange) *
sizeof(Index)));
550 m_innerNonZeros = newInnerNonZeros;
552 for(Index i=m_outerSize; i<m_outerSize+outerChange; i++)
553 m_innerNonZeros[i] = 0;
555 else if (innerChange < 0)
558 m_innerNonZeros =
static_cast<Index*
>(std::malloc((m_outerSize+outerChange+1) *
sizeof(Index)));
560 for(Index i = 0; i < m_outerSize; i++)
561 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
565 if (m_innerNonZeros && innerChange < 0)
567 for(Index i = 0; i < m_outerSize + (std::min)(outerChange, Index(0)); i++)
569 Index &n = m_innerNonZeros[i];
570 Index start = m_outerIndex[i];
571 while (n > 0 && m_data.
index(start+n-1) >= newInnerSize) --n;
575 m_innerSize = newInnerSize;
578 if (outerChange == 0)
581 Index *newOuterIndex =
static_cast<Index*
>(std::realloc(m_outerIndex, (m_outerSize + outerChange + 1) *
sizeof(Index)));
583 m_outerIndex = newOuterIndex;
586 Index last = m_outerSize == 0 ? 0 : m_outerIndex[m_outerSize];
587 for(Index i=m_outerSize; i<m_outerSize+outerChange+1; i++)
588 m_outerIndex[i] = last;
590 m_outerSize += outerChange;
598 const Index outerSize = IsRowMajor ? rows : cols;
599 m_innerSize = IsRowMajor ? cols : rows;
601 if (m_outerSize != outerSize || m_outerSize==0)
603 std::free(m_outerIndex);
604 m_outerIndex =
static_cast<Index*
>(std::malloc((outerSize + 1) *
sizeof(Index)));
607 m_outerSize = outerSize;
611 std::free(m_innerNonZeros);
614 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(Index));
630 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
632 check_template_parameters();
638 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
640 check_template_parameters();
645 template<
typename OtherDerived>
647 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
650 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
651 check_template_parameters();
656 template<
typename OtherDerived,
unsigned int UpLo>
658 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
660 check_template_parameters();
666 :
Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
668 check_template_parameters();
673 template<
typename OtherDerived>
675 :
Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
677 check_template_parameters();
678 initAssignment(other);
697 eigen_assert(rows() == cols() &&
"ONLY FOR SQUARED MATRICES");
698 this->m_data.
resize(rows());
709 else if(
this!=&other)
711 initAssignment(other);
714 memcpy(m_outerIndex, other.
m_outerIndex, (m_outerSize+1)*
sizeof(Index));
719 Base::operator=(other);
725 #ifndef EIGEN_PARSED_BY_DOXYGEN 726 template<
typename Lhs,
typename Rhs>
728 {
return Base::operator=(product); }
730 template<
typename OtherDerived>
733 initAssignment(other);
734 return Base::operator=(other.derived());
737 template<
typename OtherDerived>
739 {
return Base::operator=(other.
derived()); }
742 template<
typename OtherDerived>
748 s <<
"Nonzero entries:\n";
750 for (Index i=0; i<m.
nonZeros(); ++i)
765 s <<
"Outer pointers:\n";
768 s <<
" $" << std::endl;
771 s <<
"Inner non zeros:\n";
774 s <<
" $" << std::endl;
778 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
785 std::free(m_outerIndex);
786 std::free(m_innerNonZeros);
789 #ifndef EIGEN_PARSED_BY_DOXYGEN 794 # ifdef EIGEN_SPARSEMATRIX_PLUGIN 795 # include EIGEN_SPARSEMATRIX_PLUGIN 800 template<
typename Other>
803 resize(other.rows(), other.cols());
806 std::free(m_innerNonZeros);
824 : m_index(i), m_value(v)
827 Index
operator[](Index i)
const {
return i==m_index ? m_value : 0; }
839 const Index outer = IsRowMajor ? row :
col;
840 const Index inner = IsRowMajor ? col :
row;
843 eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
845 Index p = m_outerIndex[outer] + m_innerNonZeros[outer]++;
846 m_data.
index(p) = inner;
847 return (m_data.
value(p) = 0);
868 template<
typename Scalar,
int _Options,
typename _Index>
873 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
883 inline const Scalar&
value()
const {
return m_values[m_id]; }
884 inline Scalar&
valueRef() {
return const_cast<Scalar&
>(m_values[m_id]); }
886 inline Index
index()
const {
return m_indices[m_id]; }
887 inline Index
outer()
const {
return m_outer; }
888 inline Index
row()
const {
return IsRowMajor ? m_outer : index(); }
889 inline Index
col()
const {
return IsRowMajor ? index() : m_outer; }
891 inline operator bool()
const {
return (m_id < m_end); }
901 template<
typename Scalar,
int _Options,
typename _Index>
906 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
921 inline Index row()
const {
return IsRowMajor ? m_outer : index(); }
922 inline Index col()
const {
return IsRowMajor ? index() : m_outer; }
924 inline operator bool()
const {
return (m_id > m_start); }
936 template<
typename InputIterator,
typename SparseMatrixType>
940 enum { IsRowMajor = SparseMatrixType::IsRowMajor };
941 typedef typename SparseMatrixType::Scalar Scalar;
947 VectorXi wi(trMat.outerSize());
949 for(InputIterator it(begin); it!=end; ++it)
951 eigen_assert(it->row()>=0 && it->row()<mat.rows() && it->col()>=0 && it->col()<mat.cols());
952 wi(IsRowMajor ? it->col() : it->row())++;
957 for(InputIterator it(begin); it!=end; ++it)
958 trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
961 trMat.sumupDuplicates();
1008 template<
typename Scalar,
int _Options,
typename _Index>
1009 template<
typename InputIterators>
1016 template<
typename Scalar,
int _Options,
typename _Index>
1021 VectorXi wi(innerSize());
1025 for(
int j=0; j<outerSize(); ++j)
1027 Index start = count;
1028 Index oldEnd = m_outerIndex[j]+m_innerNonZeros[j];
1029 for(Index k=m_outerIndex[j]; k<oldEnd; ++k)
1031 Index i = m_data.
index(k);
1045 m_outerIndex[j] = start;
1047 m_outerIndex[m_outerSize] = count;
1050 std::free(m_innerNonZeros);
1051 m_innerNonZeros = 0;
1052 m_data.
resize(m_outerIndex[m_outerSize]);
1055 template<
typename Scalar,
int _Options,
typename _Index>
1056 template<
typename OtherDerived>
1060 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
1063 if (needToTranspose)
1071 OtherCopy otherCopy(other.
derived());
1078 for (Index j=0; j<otherCopy.outerSize(); ++j)
1079 for (
typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
1080 ++dest.m_outerIndex[it.index()];
1084 VectorXi positions(dest.outerSize());
1085 for (Index j=0; j<dest.outerSize(); ++j)
1087 Index tmp = dest.m_outerIndex[j];
1088 dest.m_outerIndex[j] = count;
1089 positions[j] = count;
1092 dest.m_outerIndex[dest.outerSize()] = count;
1094 dest.m_data.resize(count);
1096 for (Index j=0; j<otherCopy.outerSize(); ++j)
1098 for (
typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
1100 Index pos = positions[it.index()]++;
1101 dest.m_data.index(pos) = j;
1102 dest.m_data.value(pos) = it.value();
1111 initAssignment(other.
derived());
1113 return Base::operator=(other.
derived());
1117 template<
typename _Scalar,
int _Options,
typename _Index>
1122 const Index outer = IsRowMajor ? row :
col;
1123 const Index inner = IsRowMajor ? col :
row;
1125 Index room = m_outerIndex[outer+1] - m_outerIndex[outer];
1126 Index innerNNZ = m_innerNonZeros[outer];
1133 Index startId = m_outerIndex[outer];
1134 Index p = startId + m_innerNonZeros[outer];
1135 while ( (p > startId) && (m_data.
index(p-1) > inner) )
1141 eigen_assert((p<=startId || m_data.
index(p-1)!=inner) &&
"you cannot insert an element that already exist, you must call coeffRef to this end");
1143 m_innerNonZeros[outer]++;
1145 m_data.
index(p) = inner;
1146 return (m_data.
value(p) = 0);
1149 template<
typename _Scalar,
int _Options,
typename _Index>
1154 const Index outer = IsRowMajor ? row :
col;
1155 const Index inner = IsRowMajor ? col :
row;
1157 Index previousOuter = outer;
1158 if (m_outerIndex[outer+1]==0)
1161 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
1163 m_outerIndex[previousOuter] =
static_cast<Index
>(m_data.
size());
1166 m_outerIndex[outer+1] = m_outerIndex[outer];
1172 bool isLastVec = (!(previousOuter==-1 && m_data.
size()!=0))
1173 && (size_t(m_outerIndex[outer+1]) == m_data.
size());
1175 size_t startId = m_outerIndex[outer];
1177 size_t p = m_outerIndex[outer+1];
1178 ++m_outerIndex[outer+1];
1180 float reallocRatio = 1;
1184 if (m_data.
size()==0)
1193 float nnzEstimate = float(m_outerIndex[outer])*float(m_outerSize)/float(outer+1);
1194 reallocRatio = (nnzEstimate-float(m_data.
size()))/float(m_data.
size());
1198 reallocRatio = (std::min)((std::max)(reallocRatio,1.5f),8.f);
1205 if (previousOuter==-1)
1209 for (Index k=0; k<=(outer+1); ++k)
1210 m_outerIndex[k] = 0;
1212 while(m_outerIndex[k]==0)
1213 m_outerIndex[k++] = 1;
1214 while (k<=m_outerSize && m_outerIndex[k]!=0)
1215 m_outerIndex[k++]++;
1218 k = m_outerIndex[k]-1;
1231 while (j<=m_outerSize && m_outerIndex[j]!=0)
1232 m_outerIndex[j++]++;
1235 Index k = m_outerIndex[j]-1;
1245 while ( (p > startId) && (m_data.
index(p-1) > inner) )
1252 m_data.
index(p) = inner;
1253 return (m_data.
value(p) = 0);
1258 #endif // EIGEN_SPARSEMATRIX_H
const Diagonal< const SparseMatrix > diagonal() const
void reserve(const SizesType &reserveSizes, const typename SizesType::value_type &enableif=typename SizesType::value_type())
#define EIGEN_STRONG_INLINE
void conservativeResize(Index rows, Index cols)
Scalar & insertBackByOuterInner(Index outer, Index inner)
A versatible sparse matrix representation.
void prune(const KeepFunc &keep=KeepFunc())
Index searchLowerIndex(Index key) const
A matrix or vector expression mapping an existing array of data.
const int InnerRandomAccessPattern
Scalar & coeffRef(Index row, Index col)
#define EIGEN_UNUSED_VARIABLE(var)
const Scalar & value() const
const unsigned int LvalueBit
void resizeNonZeros(Index size)
EIGEN_DONT_INLINE Scalar & insertCompressed(Index row, Index col)
void startVec(Index outer)
SparseMatrix(const SparseMatrix &other)
remove_reference< MatrixTypeNested >::type _MatrixTypeNested
iterative scaling algorithm to equilibrate rows and column norms in matrices
Pseudo expression to manipulate a triangular sparse matrix as a selfadjoint matrix.
const Scalar & value() const
void append(const Scalar &v, Index i)
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
#define EIGEN_STATIC_ASSERT(CONDITION, MSG)
bool isMuchSmallerThan(const Scalar &x, const OtherScalar &y, typename NumTraits< Scalar >::Real precision=NumTraits< Scalar >::dummy_precision())
const unsigned int RowMajorBit
EIGEN_STRONG_INLINE Scalar & insertBackUncompressed(Index row, Index col)
const Index * outerIndexPtr() const
Scalar & insertByOuterInner(Index j, Index i)
ReverseInnerIterator & operator--()
EIGEN_DONT_INLINE Scalar & insertUncompressed(Index row, Index col)
#define EIGEN_DBG_SPARSE(X)
void reserve(size_t size)
void initAssignment(const Other &other)
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
void throw_std_bad_alloc()
Base class of any sparse matrices or sparse expressions.
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
static void check_template_parameters()
SparseMatrix & operator=(const SparseMatrix &other)
void reserve(const SizesType &reserveSizes, const typename SizesType::Scalar &enableif=typename SizesType::Scalar())
Scalar & insert(Index row, Index col)
SparseMatrix & operator=(const EigenBase< OtherDerived > &other)
SparseMatrix< _Scalar, _Options, _Index > MatrixType
internal::traits< Derived >::Scalar Scalar
size_t allocatedSize() const
InnerIterator(const SparseMatrix &mat, Index outer)
Scalar & insertBack(Index row, Index col)
Index operator[](Index i) const
void swap(SparseMatrix &other)
Provides a generic way to set and pass user-specified options.
SparseMatrix & operator=(const ReturnByValue< OtherDerived > &other)
void resize(Index rows, Index cols)
void reserve(Index reserveSize)
Eigen::Map< Matrix< Index, Dynamic, 1 > > innerNonZeros()
#define EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(Derived, Op)
SparseMatrix(Index rows, Index cols)
internal::traits< Derived >::Index Index
SparseMatrix & operator=(const SparseSparseProduct< Lhs, Rhs > &product)
ExportStatementBlock & operator<<(ExportStatementBlock &_block, const ExportStatement &_statement)
SparseMatrix(const SparseMatrixBase< OtherDerived > &other)
Scalar coeff(Index row, Index col) const
SparseMatrix(const ReturnByValue< OtherDerived > &other)
Copy constructor with in-place evaluation.
const unsigned int NestByRefBit
void swap(CompressedStorage &other)
const Storage & data() const
InnerIterator & operator++()
bool isCompressed() const
SparseMatrix< Scalar,(Flags &~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix
An InnerIterator allows to loop over the element of a sparse (or dense) matrix or expression...
SingletonVector(Index i, Index v)
const Derived & derived() const
void resize(size_t size, float reserveSizeFactor=0)
void setFromTriplets(const InputIterators &begin, const InputIterators &end)
void set_from_triplets(const InputIterator &begin, const InputIterator &end, SparseMatrixType &mat, int Options=0)
const Eigen::Map< const Matrix< Index, Dynamic, 1 > > innerNonZeros() const
const Index * innerIndexPtr() const
Expression of a diagonal/subdiagonal/superdiagonal in a matrix.
void reserveInnerVectors(const SizesType &reserveSizes)
Scalar & insertBackByOuterInnerUnordered(Index outer, Index inner)
#define EIGEN_DONT_INLINE
SparseMatrix(const SparseSelfAdjointView< OtherDerived, UpLo > &other)
NumTraits< Scalar >::Real RealScalar
const Scalar * valuePtr() const
void evalTo(Dest &dst) const
Index * innerNonZeroPtr()
Scalar atInRange(size_t start, size_t end, Index key, const Scalar &defaultValue=Scalar(0)) const
const Index * innerNonZeroPtr() const
ReverseInnerIterator(const SparseMatrix &mat, Index outer)
default_prunning_func(const Scalar &ref, const RealScalar &eps)
Derived & const_cast_derived() const
nested< MatrixType >::type MatrixTypeNested
static void insert(double *xd, double *xa, double *u, double *p)