10 #ifndef EIGEN_SPARSEMATRIX_H 11 #define EIGEN_SPARSEMATRIX_H 46 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
63 template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int DiagIndex>
77 ColsAtCompileTime = 1,
79 MaxColsAtCompileTime = 1,
84 template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int DiagIndex>
86 :
public traits<Diagonal<SparseMatrix<_Scalar, _Options, _StorageIndex>, DiagIndex> >
95 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
103 using Base::isCompressed;
104 using Base::nonZeros;
106 using Base::operator+=;
107 using Base::operator-=;
116 using Base::IsRowMajor;
136 inline Index rows()
const {
return IsRowMajor ? m_outerSize : m_innerSize; }
138 inline Index cols()
const {
return IsRowMajor ? m_innerSize : m_outerSize; }
190 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
192 const Index outer = IsRowMajor ? row :
col;
193 const Index inner = IsRowMajor ? col :
row;
194 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
195 return m_data.
atInRange(m_outerIndex[outer], end, StorageIndex(inner));
208 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
210 const Index outer = IsRowMajor ? row :
col;
211 const Index inner = IsRowMajor ? col :
row;
213 Index start = m_outerIndex[outer];
214 Index end = m_innerNonZeros ? m_outerIndex[outer] + m_innerNonZeros[outer] : m_outerIndex[outer+1];
215 eigen_assert(end>=start &&
"you probably called coeffRef on a non finalized matrix");
217 return insert(row,col);
219 if((p<end) && (m_data.
index(p)==inner))
220 return m_data.
value(p);
222 return insert(row,col);
254 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
256 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
264 eigen_assert(isCompressed() &&
"This function does not make sense in non compressed mode.");
268 #ifdef EIGEN_PARSED_BY_DOXYGEN 281 template<
class SizesType>
282 inline void reserve(
const SizesType& reserveSizes);
284 template<
class SizesType>
285 inline void reserve(
const SizesType& reserveSizes,
const typename SizesType::value_type& enableif =
289 SizesType::value_type())
292 reserveInnerVectors(reserveSizes);
294 #endif // EIGEN_PARSED_BY_DOXYGEN 296 template<
class SizesType>
301 Index totalReserveSize = 0;
303 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
307 StorageIndex* newOuterIndex = m_innerNonZeros;
309 StorageIndex count = 0;
310 for(
Index j=0; j<m_outerSize; ++j)
312 newOuterIndex[j] = count;
313 count += reserveSizes[j] + (m_outerIndex[j+1]-m_outerIndex[j]);
314 totalReserveSize += reserveSizes[j];
316 m_data.
reserve(totalReserveSize);
317 StorageIndex previousOuterIndex = m_outerIndex[m_outerSize];
318 for(
Index j=m_outerSize-1; j>=0; --j)
320 StorageIndex innerNNZ = previousOuterIndex - m_outerIndex[j];
321 for(
Index i=innerNNZ-1; i>=0; --i)
323 m_data.
index(newOuterIndex[j]+i) = m_data.
index(m_outerIndex[j]+i);
324 m_data.
value(newOuterIndex[j]+i) = m_data.
value(m_outerIndex[j]+i);
326 previousOuterIndex = m_outerIndex[j];
327 m_outerIndex[j] = newOuterIndex[j];
328 m_innerNonZeros[j] = innerNNZ;
330 m_outerIndex[m_outerSize] = m_outerIndex[m_outerSize-1] + m_innerNonZeros[m_outerSize-1] + reserveSizes[m_outerSize-1];
332 m_data.
resize(m_outerIndex[m_outerSize]);
336 StorageIndex* newOuterIndex =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+1)*
sizeof(StorageIndex)));
339 StorageIndex count = 0;
340 for(
Index j=0; j<m_outerSize; ++j)
342 newOuterIndex[j] = count;
343 StorageIndex alreadyReserved = (m_outerIndex[j+1]-m_outerIndex[j]) - m_innerNonZeros[j];
344 StorageIndex toReserve = std::max<StorageIndex>(reserveSizes[j], alreadyReserved);
345 count += toReserve + m_innerNonZeros[j];
347 newOuterIndex[m_outerSize] = count;
350 for(
Index j=m_outerSize-1; j>=0; --j)
352 Index offset = newOuterIndex[j] - m_outerIndex[j];
355 StorageIndex innerNNZ = m_innerNonZeros[j];
356 for(
Index i=innerNNZ-1; i>=0; --i)
358 m_data.
index(newOuterIndex[j]+i) = m_data.
index(m_outerIndex[j]+i);
359 m_data.
value(newOuterIndex[j]+i) = m_data.
value(m_outerIndex[j]+i);
365 std::free(newOuterIndex);
385 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
392 eigen_assert(
Index(m_outerIndex[outer+1]) == m_data.
size() &&
"Invalid ordered insertion (invalid outer index)");
393 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.
index(m_data.
size()-1)<inner) &&
"Invalid ordered insertion (invalid inner index)");
394 Index p = m_outerIndex[outer+1];
395 ++m_outerIndex[outer+1];
397 return m_data.
value(p);
404 Index p = m_outerIndex[outer+1];
405 ++m_outerIndex[outer+1];
407 return m_data.
value(p);
414 eigen_assert(m_outerIndex[outer]==
Index(m_data.
size()) &&
"You must call startVec for each inner vector sequentially");
415 eigen_assert(m_outerIndex[outer+1]==0 &&
"You must call startVec for each inner vector sequentially");
416 m_outerIndex[outer+1] = m_outerIndex[outer];
426 StorageIndex
size = internal::convert_index<StorageIndex>(m_data.
size());
427 Index i = m_outerSize;
429 while (i>=0 && m_outerIndex[i]==0)
432 while (i<=m_outerSize)
434 m_outerIndex[i] =
size;
442 template<
typename InputIterators>
443 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end);
445 template<
typename InputIterators,
typename DupFunctor>
446 void setFromTriplets(
const InputIterators& begin,
const InputIterators& end, DupFunctor dup_func);
450 template<
typename DupFunctor>
451 void collapseDuplicates(DupFunctor dup_func = DupFunctor());
459 return insert(IsRowMajor ? j : i, IsRowMajor ? i : j);
471 Index oldStart = m_outerIndex[1];
472 m_outerIndex[1] = m_innerNonZeros[0];
473 for(
Index j=1; j<m_outerSize; ++j)
475 Index nextOldStart = m_outerIndex[j+1];
476 Index offset = oldStart - m_outerIndex[j];
479 for(
Index k=0; k<m_innerNonZeros[j]; ++k)
481 m_data.
index(m_outerIndex[j]+k) = m_data.
index(oldStart+k);
482 m_data.
value(m_outerIndex[j]+k) = m_data.
value(oldStart+k);
485 m_outerIndex[j+1] = m_outerIndex[j] + m_innerNonZeros[j];
486 oldStart = nextOldStart;
488 std::free(m_innerNonZeros);
490 m_data.
resize(m_outerIndex[m_outerSize]);
497 if(m_innerNonZeros != 0)
499 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
500 for (
Index i = 0; i < m_outerSize; i++)
502 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
509 prune(default_prunning_func(reference,
epsilon));
519 template<
typename KeepFunc>
520 void prune(
const KeepFunc& keep = KeepFunc())
526 for(
Index j=0; j<m_outerSize; ++j)
528 Index previousStart = m_outerIndex[j];
530 Index end = m_outerIndex[j+1];
531 for(
Index i=previousStart; i<end; ++i)
533 if(keep(IsRowMajor?j:m_data.
index(i), IsRowMajor?m_data.
index(i):j, m_data.
value(i)))
541 m_outerIndex[m_outerSize] = k;
556 if (this->rows() == rows && this->cols() == cols)
return;
559 if(rows==0 || cols==0)
return resize(rows,cols);
561 Index innerChange = IsRowMajor ? cols - this->cols() : rows - this->rows();
562 Index outerChange = IsRowMajor ? rows - this->rows() : cols - this->cols();
563 StorageIndex newInnerSize =
convert_index(IsRowMajor ? cols : rows);
569 StorageIndex *newInnerNonZeros =
static_cast<StorageIndex*
>(std::realloc(m_innerNonZeros, (m_outerSize + outerChange) *
sizeof(StorageIndex)));
571 m_innerNonZeros = newInnerNonZeros;
573 for(
Index i=m_outerSize; i<m_outerSize+outerChange; i++)
574 m_innerNonZeros[i] = 0;
576 else if (innerChange < 0)
579 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc((m_outerSize+outerChange+1) *
sizeof(StorageIndex)));
581 for(
Index i = 0; i < m_outerSize; i++)
582 m_innerNonZeros[i] = m_outerIndex[i+1] - m_outerIndex[i];
586 if (m_innerNonZeros && innerChange < 0)
588 for(
Index i = 0; i < m_outerSize + (std::min)(outerChange,
Index(0)); i++)
590 StorageIndex &n = m_innerNonZeros[i];
591 StorageIndex start = m_outerIndex[i];
592 while (n > 0 && m_data.
index(start+n-1) >= newInnerSize) --n;
596 m_innerSize = newInnerSize;
599 if (outerChange == 0)
602 StorageIndex *newOuterIndex =
static_cast<StorageIndex*
>(std::realloc(m_outerIndex, (m_outerSize + outerChange + 1) *
sizeof(StorageIndex)));
604 m_outerIndex = newOuterIndex;
607 StorageIndex last = m_outerSize == 0 ? 0 : m_outerIndex[m_outerSize];
608 for(
Index i=m_outerSize; i<m_outerSize+outerChange+1; i++)
609 m_outerIndex[i] = last;
611 m_outerSize += outerChange;
623 const Index outerSize = IsRowMajor ? rows : cols;
624 m_innerSize = IsRowMajor ? cols : rows;
626 if (m_outerSize != outerSize || m_outerSize==0)
628 std::free(m_outerIndex);
629 m_outerIndex =
static_cast<StorageIndex*
>(std::malloc((outerSize + 1) *
sizeof(StorageIndex)));
632 m_outerSize = outerSize;
636 std::free(m_innerNonZeros);
639 memset(m_outerIndex, 0, (m_outerSize+1)*
sizeof(StorageIndex));
660 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
662 check_template_parameters();
668 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
670 check_template_parameters();
675 template<
typename OtherDerived>
677 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
680 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
681 check_template_parameters();
687 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 688 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
695 template<
typename OtherDerived,
unsigned int UpLo>
697 : m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
699 check_template_parameters();
700 Base::operator=(other);
705 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
707 check_template_parameters();
712 template<
typename OtherDerived>
714 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
716 check_template_parameters();
717 initAssignment(other);
722 template<
typename OtherDerived>
724 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0), m_innerNonZeros(0)
726 check_template_parameters();
746 eigen_assert(rows() == cols() &&
"ONLY FOR SQUARED MATRICES");
747 this->m_data.
resize(rows());
751 std::free(m_innerNonZeros);
760 else if(
this!=&other)
762 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 763 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
765 initAssignment(other);
773 Base::operator=(other);
779 #ifndef EIGEN_PARSED_BY_DOXYGEN 780 template<
typename OtherDerived>
782 {
return Base::operator=(other.
derived()); }
783 #endif // EIGEN_PARSED_BY_DOXYGEN 785 template<
typename OtherDerived>
791 s <<
"Nonzero entries:\n";
814 s <<
"Outer pointers:\n";
818 s <<
" $" << std::endl;
821 s <<
"Inner non zeros:\n";
825 s <<
" $" << std::endl;
829 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
836 std::free(m_outerIndex);
837 std::free(m_innerNonZeros);
843 # ifdef EIGEN_SPARSEMATRIX_PLUGIN 844 # include EIGEN_SPARSEMATRIX_PLUGIN 849 template<
typename Other>
852 resize(other.rows(), other.cols());
855 std::free(m_innerNonZeros);
888 const Index outer = IsRowMajor ? row :
col;
889 const Index inner = IsRowMajor ? col :
row;
892 eigen_assert(m_innerNonZeros[outer]<=(m_outerIndex[outer+1] - m_outerIndex[outer]));
894 Index p = m_outerIndex[outer] + m_innerNonZeros[outer]++;
896 return (m_data.
value(p) = 0);
908 inline bool operator() (
const Index&,
const Index&,
const Scalar& value)
const 910 return !internal::isMuchSmallerThan(value, reference,
epsilon);
919 template<
typename InputIterator,
typename SparseMatrixType,
typename DupFunctor>
920 void set_from_triplets(
const InputIterator& begin,
const InputIterator& end, SparseMatrixType& mat, DupFunctor dup_func)
922 enum { IsRowMajor = SparseMatrixType::IsRowMajor };
923 typedef typename SparseMatrixType::Scalar Scalar;
924 typedef typename SparseMatrixType::StorageIndex StorageIndex;
930 typename SparseMatrixType::IndexVector wi(trMat.outerSize());
932 for(InputIterator it(begin); it!=end; ++it)
934 eigen_assert(it->row()>=0 && it->row()<mat.rows() && it->col()>=0 && it->col()<mat.cols());
935 wi(IsRowMajor ? it->col() : it->row())++;
940 for(InputIterator it(begin); it!=end; ++it)
941 trMat.insertBackUncompressed(it->row(),it->col()) = it->value();
944 trMat.collapseDuplicates(dup_func);
991 template<
typename Scalar,
int _Options,
typename _StorageIndex>
992 template<
typename InputIterators>
1007 template<
typename Scalar,
int _Options,
typename _StorageIndex>
1008 template<
typename InputIterators,
typename DupFunctor>
1011 internal::set_from_triplets<InputIterators, SparseMatrix<Scalar,_Options,_StorageIndex>, DupFunctor>(begin, end, *
this, dup_func);
1015 template<
typename Scalar,
int _Options,
typename _StorageIndex>
1016 template<
typename DupFunctor>
1021 IndexVector wi(innerSize());
1023 StorageIndex count = 0;
1025 for(
Index j=0; j<outerSize(); ++j)
1027 StorageIndex start = count;
1028 Index oldEnd = m_outerIndex[j]+m_innerNonZeros[j];
1029 for(
Index k=m_outerIndex[j]; k<oldEnd; ++k)
1045 m_outerIndex[j] = start;
1047 m_outerIndex[m_outerSize] = count;
1050 std::free(m_innerNonZeros);
1051 m_innerNonZeros = 0;
1052 m_data.
resize(m_outerIndex[m_outerSize]);
1055 template<
typename Scalar,
int _Options,
typename _StorageIndex>
1056 template<
typename OtherDerived>
1060 YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
1062 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN 1063 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
1067 if (needToTranspose)
1069 #ifdef EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN 1070 EIGEN_SPARSE_TRANSPOSED_COPY_PLUGIN
1079 OtherCopy otherCopy(other.
derived());
1080 OtherCopyEval otherCopyEval(otherCopy);
1087 for (
Index j=0; j<otherCopy.outerSize(); ++j)
1088 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1089 ++dest.m_outerIndex[it.index()];
1092 StorageIndex count = 0;
1093 IndexVector positions(dest.outerSize());
1094 for (
Index j=0; j<dest.outerSize(); ++j)
1096 StorageIndex tmp = dest.m_outerIndex[j];
1097 dest.m_outerIndex[j] = count;
1098 positions[j] = count;
1101 dest.m_outerIndex[dest.outerSize()] = count;
1103 dest.m_data.resize(count);
1105 for (StorageIndex j=0; j<otherCopy.outerSize(); ++j)
1107 for (
typename OtherCopyEval::InnerIterator it(otherCopyEval, j); it; ++it)
1109 Index pos = positions[it.index()]++;
1110 dest.m_data.index(pos) = j;
1111 dest.m_data.value(pos) = it.value();
1121 initAssignment(other.
derived());
1124 return Base::operator=(other.
derived());
1128 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
1131 eigen_assert(row>=0 && row<rows() && col>=0 && col<cols());
1133 const Index outer = IsRowMajor ? row :
col;
1134 const Index inner = IsRowMajor ? col :
row;
1142 m_data.
reserve(2*m_innerSize);
1145 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
1148 memset(m_innerNonZeros, 0, (m_outerSize)*
sizeof(StorageIndex));
1153 for(
Index j=1; j<=m_outerSize; ++j)
1154 m_outerIndex[j] = end;
1159 m_innerNonZeros =
static_cast<StorageIndex*
>(std::malloc(m_outerSize *
sizeof(StorageIndex)));
1161 for(
Index j=0; j<m_outerSize; ++j)
1162 m_innerNonZeros[j] = m_outerIndex[j+1]-m_outerIndex[j];
1171 if(m_outerIndex[outer]==data_end)
1179 while(j>=0 && m_innerNonZeros[j]==0)
1180 m_outerIndex[j--] = p;
1183 ++m_innerNonZeros[outer];
1194 for(
Index k=outer+1; k<=m_outerSize; ++k)
1195 if(m_outerIndex[k]==data_end)
1196 m_outerIndex[k] = new_end;
1198 return m_data.
value(p);
1203 if(m_outerIndex[outer+1]==data_end && m_outerIndex[outer]+m_innerNonZeros[outer]==m_data.
size())
1208 ++m_innerNonZeros[outer];
1219 for(
Index k=outer+1; k<=m_outerSize; ++k)
1220 if(m_outerIndex[k]==data_end)
1221 m_outerIndex[k] = new_end;
1225 Index startId = m_outerIndex[outer];
1226 Index p = m_outerIndex[outer]+m_innerNonZeros[outer]-1;
1227 while ( (p > startId) && (m_data.
index(p-1) > inner) )
1235 return (m_data.
value(p) = 0);
1245 return insertUncompressed(row,col);
1248 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
1253 const Index outer = IsRowMajor ? row :
col;
1254 const StorageIndex inner =
convert_index(IsRowMajor ? col : row);
1256 Index room = m_outerIndex[outer+1] - m_outerIndex[outer];
1257 StorageIndex innerNNZ = m_innerNonZeros[outer];
1264 Index startId = m_outerIndex[outer];
1265 Index p = startId + m_innerNonZeros[outer];
1266 while ( (p > startId) && (m_data.
index(p-1) > inner) )
1272 eigen_assert((p<=startId || m_data.
index(p-1)!=inner) &&
"you cannot insert an element that already exists, you must call coeffRef to this end");
1274 m_innerNonZeros[outer]++;
1276 m_data.
index(p) = inner;
1277 return (m_data.
value(p) = 0);
1280 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
1285 const Index outer = IsRowMajor ? row :
col;
1286 const Index inner = IsRowMajor ? col :
row;
1288 Index previousOuter = outer;
1289 if (m_outerIndex[outer+1]==0)
1292 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
1297 m_outerIndex[outer+1] = m_outerIndex[outer];
1303 bool isLastVec = (!(previousOuter==-1 && m_data.
size()!=0))
1304 && (std::size_t(m_outerIndex[outer+1]) == m_data.
size());
1306 std::size_t startId = m_outerIndex[outer];
1308 std::size_t p = m_outerIndex[outer+1];
1309 ++m_outerIndex[outer+1];
1311 double reallocRatio = 1;
1315 if (m_data.
size()==0)
1324 double nnzEstimate = double(m_outerIndex[outer])*double(m_outerSize)/double(outer+1);
1325 reallocRatio = (nnzEstimate-double(m_data.
size()))/double(m_data.
size());
1329 reallocRatio = (std::min)((
std::max)(reallocRatio,1.5),8.);
1336 if (previousOuter==-1)
1340 for (
Index k=0; k<=(outer+1); ++k)
1341 m_outerIndex[k] = 0;
1343 while(m_outerIndex[k]==0)
1344 m_outerIndex[k++] = 1;
1345 while (k<=m_outerSize && m_outerIndex[k]!=0)
1346 m_outerIndex[k++]++;
1349 k = m_outerIndex[k]-1;
1362 while (j<=m_outerSize && m_outerIndex[j]!=0)
1363 m_outerIndex[j++]++;
1366 Index k = m_outerIndex[j]-1;
1376 while ( (p > startId) && (m_data.
index(p-1) > inner) )
1383 m_data.
index(p) = inner;
1384 return (m_data.
value(p) = 0);
1389 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
1391 :
evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_StorageIndex> > >
1396 explicit evaluator(
const SparseMatrixType &mat) : Base(mat) {}
1403 #endif // EIGEN_SPARSEMATRIX_H SparseMatrix(const DiagonalBase< OtherDerived > &other)
Copy constructor with in-place evaluation.
EIGEN_DEVICE_FUNC const Derived & derived() const
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void call_assignment_no_alias(Dst &dst, const Src &src, const Func &func)
#define EIGEN_STRONG_INLINE
StorageIndex & index(Index i)
StorageIndex * m_outerIndex
SparseMatrix(const SparseSelfAdjointView< OtherDerived, UpLo > &other)
Scalar coeff(Index row, Index col) const
const unsigned int CompressedAccessBit
remove_reference< MatrixTypeNested >::type _MatrixTypeNested
SparseMatrix(const SparseMatrix &other)
A versatible sparse matrix representation.
const StorageIndex * innerNonZeroPtr() const
std::ostream & operator<<(std::ostream &s, const Packet16uc &v)
SparseMatrix< _Scalar, _Options, _StorageIndex > MatrixType
SparseMatrix(const ReturnByValue< OtherDerived > &other)
Copy constructor with in-place evaluation.
A matrix or vector expression mapping an existing array of data.
EIGEN_DONT_INLINE Scalar & insertCompressed(Index row, Index col)
const int InnerRandomAccessPattern
Base::ReverseInnerIterator ReverseInnerIterator
void resize(Index rows, Index cols)
const unsigned int LvalueBit
Scalar & insertByOuterInner(Index j, Index i)
void swap(SparseMatrix &other)
static constexpr size_t size(Tuple< Args... > &)
Provides access to the number of elements in a tuple as a compile-time constant expression.
Pseudo expression to manipulate a triangular sparse matrix as a selfadjoint matrix.
EIGEN_DEVICE_FUNC void evalTo(Dest &dst) const
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
SparseMatrix & operator=(const SparseMatrix &other)
#define EIGEN_STATIC_ASSERT(CONDITION, MSG)
evaluator< SparseCompressedBase< SparseMatrix< _Scalar, _Options, _StorageIndex > > > Base
EIGEN_DEVICE_FUNC IndexDest convert_index(const IndexSrc &idx)
evaluator(const SparseMatrixType &mat)
Eigen::Index Index
The interface type of indices.
StorageIndex * innerNonZeroPtr()
const Scalar * valuePtr() const
const unsigned int RowMajorBit
void resizeNonZeros(Index size)
const Scalar * valuePtr() const
#define EIGEN_DONT_INLINE
SparseMatrix< _Scalar, _Options, _StorageIndex > SparseMatrixType
SparseMatrix< Scalar,(Flags &~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix
EIGEN_STRONG_INLINE EIGEN_DEVICE_FUNC half() max(const half &a, const half &b)
#define EIGEN_DBG_SPARSE(X)
void collapseDuplicates(DupFunctor dup_func=DupFunctor())
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
EIGEN_DEVICE_FUNC void smart_copy(const T *start, const T *end, T *target)
ref_selector< MatrixType >::type MatrixTypeNested
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
EIGEN_DEVICE_FUNC ColXpr col(Index i)
This is the const version of col().
EIGEN_STRONG_INLINE Scalar & insertBackUncompressed(Index row, Index col)
Scalar & insertBack(Index row, Index col)
Base class of any sparse matrices or sparse expressions.
internal::traits< Derived >::Scalar Scalar
void append(const Scalar &v, Index i)
Index searchLowerIndex(Index key) const
void startVec(Index outer)
SparseMatrix(const SparseMatrixBase< OtherDerived > &other)
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
_StorageIndex StorageIndex
Scalar & insert(Index row, Index col)
EIGEN_DEVICE_FUNC RowXpr row(Index i)
This is the const version of row(). */.
Base::InnerIterator InnerIterator
const mpreal sum(const mpreal tab[], const unsigned long int n, int &status, mp_rnd_t mode=mpreal::get_default_rnd())
StorageIndex operator[](Index i) const
void resize(Index size, double reserveSizeFactor=0)
void reserve(const SizesType &reserveSizes, const typename SizesType::value_type &enableif=typename SizesType::value_type())
Base::IndexVector IndexVector
void conservativeResize(Index rows, Index cols)
void prune(const KeepFunc &keep=KeepFunc())
StorageIndex * outerIndexPtr()
Scalar & insertBackByOuterInnerUnordered(Index outer, Index inner)
const StorageIndex * outerIndexPtr() const
Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue=Scalar(0)) const
default_prunning_func(const Scalar &ref, const RealScalar &eps)
bool isCompressed() const
const unsigned int NestByRefBit
Index allocatedSize() const
StorageIndex * m_innerNonZeros
Scalar & coeffRef(Index row, Index col)
void setFromTriplets(const InputIterators &begin, const InputIterators &end)
Base::ScalarVector ScalarVector
_StorageIndex StorageIndex
const StorageIndex * indexPtr() const
SparseCompressedBase< SparseMatrix > Base
static void check_template_parameters()
const Derived & derived() const
SparseMatrix & operator=(const EigenBase< OtherDerived > &other)
SingletonVector(Index i, Index v)
General-purpose arrays with easy API for coefficient-wise operations.
const Storage & data() const
void reserve(Index reserveSize)
EIGEN_DONT_INLINE Scalar & insertUncompressed(Index row, Index col)
void reserveInnerVectors(const SizesType &reserveSizes)
const ConstDiagonalReturnType diagonal() const
Expression of a diagonal/subdiagonal/superdiagonal in a matrix.
EIGEN_DEVICE_FUNC void throw_std_bad_alloc()
#define eigen_internal_assert(x)
const StorageIndex * innerIndexPtr() const
NumTraits< Scalar >::Real RealScalar
Common base class for sparse [compressed]-{row|column}-storage format.
EIGEN_DEVICE_FUNC Derived & derived()
DiagonalReturnType diagonal()
StorageIndex * innerIndexPtr()
void swap(CompressedStorage &other)
void swap(mpfr::mpreal &x, mpfr::mpreal &y)
SparseMatrix(Index rows, Index cols)
#define EIGEN_UNUSED_VARIABLE(var)
Scalar & insertBackByOuterInner(Index outer, Index inner)
void set_from_triplets(const InputIterator &begin, const InputIterator &end, SparseMatrixType &mat, DupFunctor dup_func)
void swap(scoped_array< T > &a, scoped_array< T > &b)
Derived & const_cast_derived() const
void initAssignment(const Other &other)