11 #ifndef EIGEN_SPARSEBLOCKMATRIX_H 12 #define EIGEN_SPARSEBLOCKMATRIX_H 54 template<
typename _Scalar,
int _BlockAtCompileTime=Dynamic,
int _Options=ColMajor,
typename _StorageIndex=
int>
class BlockSparseMatrix;
59 template<
typename _Scalar,
int _BlockAtCompileTime,
int _Options,
typename _Index>
71 BlockSize = _BlockAtCompileTime,
77 template<
typename BlockSparseMatrixT>
86 template<
typename Iterator,
bool IsColMajor>
89 typedef typename Iterator::value_type
Triplet;
92 return ((a.col() == b.col() && a.row() < b.row()) || (a.col() < b.col()));
94 return ((a.row() == b.row() && a.col() < b.col()) || (a.row() < b.row()));
101 template<
typename BlockSparseMatrixT>
110 Flags = BlockSparseMatrixT::Options,
111 Options = BlockSparseMatrixT::Options,
112 RowsAtCompileTime = BlockSparseMatrixT::RowsAtCompileTime,
113 ColsAtCompileTime = BlockSparseMatrixT::ColsAtCompileTime,
114 MaxColsAtCompileTime = BlockSparseMatrixT::MaxColsAtCompileTime,
115 MaxRowsAtCompileTime = BlockSparseMatrixT::MaxRowsAtCompileTime
119 : m_spblockmat(spblockmat)
124 return (Flags&
RowMajorBit) == 1 ? this->rows() : this->cols();
128 return m_spblockmat.blockCols();
132 return m_spblockmat.blockRows();
136 return m_spblockmat.coeff(row, col);
140 return m_spblockmat.coeffRef(row, col);
147 : BlockSparseMatrixT::BlockInnerIterator(mat.m_spblockmat, outer)
157 template<
typename BlockSparseMatrixT,
typename VectorType>
162 BlockSize = BlockSparseMatrixT::BlockSize,
163 ColsAtCompileTime = VectorType::ColsAtCompileTime,
164 RowsAtCompileTime = VectorType::RowsAtCompileTime,
165 Flags = VectorType::Flags
171 : m_spblockmat(spblockmat),m_vec(vec)
179 return m_spblockmat.blockRows();
183 Index startRow = m_spblockmat.blockRowsIndex(bi);
184 Index rowSize = m_spblockmat.blockRowsIndex(bi+1) - startRow;
185 return m_vec.middleRows(startRow, rowSize);
187 inline Scalar
coeff(Index bi, Index j)
const 189 Index startRow = m_spblockmat.blockRowsIndex(bi);
190 Index rowSize = m_spblockmat.blockRowsIndex(bi+1) - startRow;
191 return m_vec.block(startRow, j, rowSize, 1);
202 template<
typename BlockSparseMatrixT,
typename VectorType>
207 ColsAtCompileTime = VectorType::ColsAtCompileTime,
208 RowsAtCompileTime = VectorType::RowsAtCompileTime,
209 Flags = VectorType::Flags
215 : m_spblockmat(spblockmat),m_vec(vec)
219 return m_spblockmat.blockRows();
223 Index startRow = m_spblockmat.blockRowsIndex(bi);
224 Index rowSize = m_spblockmat.blockRowsIndex(bi+1) - startRow;
225 return m_vec.middleRows(startRow, rowSize);
229 Index startRow = m_spblockmat.blockRowsIndex(bi);
230 Index rowSize = m_spblockmat.blockRowsIndex(bi+1) - startRow;
231 return m_vec.block(startRow, j, rowSize, 1);
240 template<
typename Lhs,
typename Rhs>
245 template<
typename BlockSparseMatrixT,
typename VecType>
250 typedef typename BlockSparseMatrixT::Scalar
Scalar;
263 template<
typename Lhs,
typename Rhs>
265 :
public ProductBase<BlockSparseTimeDenseProduct<Lhs,Rhs>, Lhs, Rhs>
273 template<
typename Dest>
void scaleAndAddTo(Dest& dest,
const typename Rhs::Scalar& alpha)
const 283 template<
typename _Scalar,
int _BlockAtCompileTime,
int _Options,
typename _StorageIndex>
295 BlockSize=_BlockAtCompileTime,
300 IsVectorAtCompileTime = 0,
310 : m_innerBSize(0),m_outerBSize(0),m_innerOffset(0),m_outerOffset(0),
311 m_nonzerosblocks(0),m_values(0),m_blockPtr(0),m_indices(0),
312 m_outerIndex(0),m_blockSize(BlockSize)
321 : m_innerBSize(IsColMajor ? brow : bcol),
322 m_outerBSize(IsColMajor ? bcol : brow),
323 m_innerOffset(0),m_outerOffset(0),m_nonzerosblocks(0),
324 m_values(0),m_blockPtr(0),m_indices(0),
325 m_outerIndex(0),m_blockSize(BlockSize)
332 : m_innerBSize(other.m_innerBSize),m_outerBSize(other.m_outerBSize),
333 m_nonzerosblocks(other.m_nonzerosblocks),m_nonzeros(other.m_nonzeros),
334 m_blockPtr(0),m_blockSize(other.m_blockSize)
337 eigen_assert(m_blockSize == BlockSize &&
" CAN NOT COPY BETWEEN FIXED-SIZE AND VARIABLE-SIZE BLOCKS");
375 delete[] m_outerIndex;
376 delete[] m_innerOffset;
377 delete[] m_outerOffset;
388 template<
typename MatrixType>
404 template<
typename MatrixType>
408 &&
"Trying to assign to a zero-size matrix, call resize() first");
411 MatrixPatternType blockPattern(blockRows(), blockCols());
415 for(StorageIndex bj = 0; bj < m_outerBSize; ++bj)
418 std::vector<bool> nzblocksFlag(m_innerBSize,
false);
419 blockPattern.startVec(bj);
420 for(StorageIndex j = blockOuterIndex(bj); j < blockOuterIndex(bj+1); ++j)
422 typename MatrixType::InnerIterator it_spmat(spmat, j);
423 for(; it_spmat; ++it_spmat)
425 StorageIndex bi = innerToBlock(it_spmat.index());
426 if(!nzblocksFlag[bi])
429 nzblocksFlag[bi] =
true;
430 blockPattern.insertBackByOuterInnerUnordered(bj, bi) =
true;
432 m_nonzeros += blockOuterSize(bj) * blockInnerSize(bi);
437 blockPattern.finalize();
440 setBlockStructure(blockPattern);
442 for(StorageIndex nz = 0; nz < m_nonzeros; ++nz) m_values[nz] = Scalar(0);
443 for(StorageIndex bj = 0; bj < m_outerBSize; ++bj)
446 for(StorageIndex j = blockOuterIndex(bj); j < blockOuterIndex(bj+1); ++j)
449 typename MatrixType::InnerIterator it_spmat(spmat, j);
450 for(; it_spmat; ++it_spmat)
452 StorageIndex idx = 0;
453 StorageIndex bi = innerToBlock(it_spmat.index());
455 while(bi > m_indices[m_outerIndex[bj]+idx]) ++idx;
460 idxVal = m_blockPtr[m_outerIndex[bj]+idx];
462 idxVal += (j - blockOuterIndex(bj)) * blockOuterSize(bj) + it_spmat.index() - m_innerOffset[bi];
467 idxVal = (m_outerIndex[bj] + idx) * m_blockSize * m_blockSize;
469 idxVal += (j - blockOuterIndex(bj)) * m_blockSize + (it_spmat.index()%m_blockSize);
472 m_values[idxVal] = it_spmat.value();
497 template<
typename MatrixType>
500 resize(blockPattern.rows(), blockPattern.cols());
501 reserve(blockPattern.nonZeros());
505 if(m_blockSize ==
Dynamic) m_blockPtr[0] = 0;
506 for(StorageIndex nz = 0; nz < m_nonzeros; ++nz) m_values[nz] = Scalar(0);
507 for(StorageIndex bj = 0; bj < m_outerBSize; ++bj)
513 std::vector<int> nzBlockIdx;
514 typename MatrixType::InnerIterator it(blockPattern, bj);
517 nzBlockIdx.push_back(it.index());
519 std::sort(nzBlockIdx.begin(), nzBlockIdx.end());
522 for(StorageIndex idx = 0; idx < nzBlockIdx.size(); ++idx)
524 StorageIndex offset = m_outerIndex[bj]+idx;
525 m_indices[offset] = nzBlockIdx[idx];
527 m_blockPtr[offset] = m_blockPtr[offset-1] + blockInnerSize(nzBlockIdx[idx]) * blockOuterSize(bj);
531 m_outerIndex[bj+1] = m_outerIndex[bj] + nzBlockIdx.size();
540 m_innerBSize = IsColMajor ? brow : bcol;
541 m_outerBSize = IsColMajor ? bcol : brow;
551 m_blockSize = blockSize;
565 const VectorXi& innerBlocks = IsColMajor ? rowBlocks : colBlocks;
566 const VectorXi& outerBlocks = IsColMajor ? colBlocks : rowBlocks;
567 eigen_assert(m_innerBSize == innerBlocks.size() &&
"CHECK THE NUMBER OF ROW OR COLUMN BLOCKS");
568 eigen_assert(m_outerBSize == outerBlocks.size() &&
"CHECK THE NUMBER OF ROW OR COLUMN BLOCKS");
569 m_outerBSize = outerBlocks.size();
571 m_innerOffset =
new StorageIndex[m_innerBSize+1];
572 m_outerOffset =
new StorageIndex[m_outerBSize+1];
573 m_innerOffset[0] = 0;
574 m_outerOffset[0] = 0;
575 std::partial_sum(&innerBlocks[0], &innerBlocks[m_innerBSize-1]+1, &m_innerOffset[1]);
576 std::partial_sum(&outerBlocks[0], &outerBlocks[m_outerBSize-1]+1, &m_outerOffset[1]);
580 for(StorageIndex bj = 0; bj < m_outerBSize; ++bj)
581 for(StorageIndex bi = 0; bi < m_innerBSize; ++bi)
582 m_nonzeros += outerBlocks[bj] * innerBlocks[bi];
598 eigen_assert((m_innerBSize != 0 && m_outerBSize != 0) &&
599 "TRYING TO RESERVE ZERO-SIZE MATRICES, CALL resize() first");
602 m_outerIndex =
new StorageIndex[m_outerBSize+1];
604 m_nonzerosblocks = nonzerosblocks;
607 m_nonzeros = nonzerosblocks * (m_blockSize * m_blockSize);
613 m_blockPtr =
new StorageIndex[m_nonzerosblocks+1];
615 m_indices =
new StorageIndex[m_nonzerosblocks+1];
616 m_values =
new Scalar[m_nonzeros];
630 template<
typename InputIterator>
633 eigen_assert((m_innerBSize!=0 && m_outerBSize !=0) &&
"ZERO BLOCKS, PLEASE CALL resize() before");
640 std::sort(begin, end, tripletcomp);
645 VectorXi rowBlocks(m_innerBSize);
646 VectorXi colBlocks(m_outerBSize);
647 rowBlocks.setZero(); colBlocks.setZero();
648 VectorXi nzblock_outer(m_outerBSize);
649 VectorXi nz_outer(m_outerBSize);
650 nzblock_outer.setZero();
652 for(InputIterator it(begin); it !=end; ++it)
654 eigen_assert(it->row() >= 0 && it->row() < this->blockRows() && it->col() >= 0 && it->col() < this->blockCols());
655 eigen_assert((it->value().rows() == it->value().cols() && (it->value().rows() == m_blockSize))
660 eigen_assert((rowBlocks[it->row()] == 0 || rowBlocks[it->row()] == it->value().rows()) &&
661 "NON CORRESPONDING SIZES FOR ROW BLOCKS");
662 eigen_assert((colBlocks[it->col()] == 0 || colBlocks[it->col()] == it->value().cols()) &&
663 "NON CORRESPONDING SIZES FOR COLUMN BLOCKS");
664 rowBlocks[it->row()] =it->value().rows();
665 colBlocks[it->col()] = it->value().cols();
667 nz_outer(IsColMajor ? it->col() : it->row()) += it->value().rows() * it->value().cols();
668 nzblock_outer(IsColMajor ? it->col() : it->row())++;
671 if(m_blockSize ==
Dynamic) setBlockLayout(rowBlocks, colBlocks);
672 StorageIndex nzblocks = nzblock_outer.sum();
676 VectorXi block_id(m_outerBSize);
680 if (m_blockSize ==
Dynamic) m_blockPtr[0] = 0;
681 for(StorageIndex bj = 0; bj < m_outerBSize; ++bj)
683 m_outerIndex[bj+1] = m_outerIndex[bj] + nzblock_outer(bj);
684 block_id(bj) = m_outerIndex[bj];
687 m_blockPtr[m_outerIndex[bj+1]] = m_blockPtr[m_outerIndex[bj]] + nz_outer(bj);
692 for(InputIterator it(begin); it!=end; ++it)
694 StorageIndex outer = IsColMajor ? it->col() : it->row();
695 StorageIndex inner = IsColMajor ? it->row() : it->col();
696 m_indices[block_id(outer)] = inner;
697 StorageIndex block_size = it->value().rows()*it->value().cols();
698 StorageIndex nz_marker = blockPtr(block_id[outer]);
699 memcpy(&(m_values[nz_marker]), it->value().data(), block_size *
sizeof(Scalar));
702 m_blockPtr[block_id(outer)+1] = m_blockPtr[block_id(outer)] + block_size;
741 return (IsColMajor ? innerSize() : outerSize());
750 return (IsColMajor ? outerSize() : innerSize());
755 if(m_blockSize ==
Dynamic)
return m_innerOffset[m_innerBSize];
756 else return (m_innerBSize * m_blockSize) ;
761 if(m_blockSize ==
Dynamic)
return m_outerOffset[m_outerBSize];
762 else return (m_outerBSize * m_blockSize) ;
767 return (IsColMajor ? m_innerBSize : m_outerBSize);
772 return (IsColMajor ? m_outerBSize : m_innerBSize);
781 eigen_assert(outer < outerSize() &&
"OUTER INDEX OUT OF BOUNDS");
784 return (outer / m_blockSize);
786 StorageIndex b_outer = 0;
787 while(m_outerOffset[b_outer] <= outer) ++b_outer;
793 eigen_assert(inner < innerSize() &&
"OUTER INDEX OUT OF BOUNDS");
796 return (inner / m_blockSize);
798 StorageIndex b_inner = 0;
799 while(m_innerOffset[b_inner] <= inner) ++b_inner;
808 eigen_assert(brow < blockRows() &&
"BLOCK ROW INDEX OUT OF BOUNDS");
809 eigen_assert(bcol < blockCols() &&
"BLOCK nzblocksFlagCOLUMN OUT OF BOUNDS");
811 StorageIndex rsize = IsColMajor ? blockInnerSize(brow): blockOuterSize(bcol);
812 StorageIndex csize = IsColMajor ? blockOuterSize(bcol) : blockInnerSize(brow);
813 StorageIndex inner = IsColMajor ? brow : bcol;
814 StorageIndex outer = IsColMajor ? bcol : brow;
815 StorageIndex offset = m_outerIndex[outer];
816 while(offset < m_outerIndex[outer+1] && m_indices[offset] != inner)
818 if(m_indices[offset] == inner)
834 eigen_assert(brow < blockRows() &&
"BLOCK ROW INDEX OUT OF BOUNDS");
835 eigen_assert(bcol < blockCols() &&
"BLOCK COLUMN OUT OF BOUNDS");
837 StorageIndex rsize = IsColMajor ? blockInnerSize(brow): blockOuterSize(bcol);
838 StorageIndex csize = IsColMajor ? blockOuterSize(bcol) : blockInnerSize(brow);
839 StorageIndex inner = IsColMajor ? brow : bcol;
840 StorageIndex outer = IsColMajor ? bcol : brow;
841 StorageIndex offset = m_outerIndex[outer];
842 while(offset < m_outerIndex[outer+1] && m_indices[offset] != inner) offset++;
843 if(m_indices[offset] == inner)
853 template<
typename VecType>
864 inline BlockScalarReturnType *
valuePtr() {
return static_cast<BlockScalarReturnType *
>(m_values);}
878 return IsColMajor ? blockInnerIndex(bi) : blockOuterIndex(bi);
886 return IsColMajor ? blockOuterIndex(bj) : blockInnerIndex(bj);
891 return (m_blockSize ==
Dynamic) ? m_outerOffset[bj] : (bj * m_blockSize);
895 return (m_blockSize ==
Dynamic) ? m_innerOffset[bi] : (bi * m_blockSize);
901 return (m_blockSize ==
Dynamic) ? (m_innerOffset[bi+1] - m_innerOffset[bi]) : m_blockSize;
905 return (m_blockSize ==
Dynamic) ? (m_outerOffset[bj+1]- m_outerOffset[bj]) : m_blockSize;
916 class BlockInnerIterator;
922 BlockInnerIterator itb(m, j);
925 s <<
"("<<itb.row() <<
", " << itb.col() <<
")\n";
926 s << itb.value() <<
"\n";
938 if(m_blockSize ==
Dynamic)
return m_blockPtr[id];
939 else return id * m_blockSize * m_blockSize;
971 template<
typename _Scalar,
int _BlockAtCompileTime,
int _Options,
typename _StorageIndex>
981 : m_mat(mat),m_outer(outer),
982 m_id(mat.m_outerIndex[outer]),
983 m_end(mat.m_outerIndex[outer+1])
1007 inline Index rows()
const {
return (m_mat.m_blockSize==
Dynamic) ? (m_mat.m_innerOffset[index()+1] - m_mat.m_innerOffset[index()]) : m_mat.m_blockSize; }
1009 inline Index cols()
const {
return (m_mat.m_blockSize==
Dynamic) ? (m_mat.m_outerOffset[m_outer+1]-m_mat.m_outerOffset[m_outer]) : m_mat.m_blockSize;}
1010 inline operator bool()
const {
return (m_id < m_end); }
1019 template<
typename _Scalar,
int _BlockAtCompileTime,
int _Options,
typename _StorageIndex>
1024 : m_mat(mat),m_outerB(mat.outerToBlock(outer)),m_outer(outer),
1025 itb(mat, mat.outerToBlock(outer)),
1026 m_offset(outer - mat.blockOuterIndex(m_outerB))
1030 m_id = m_mat.blockInnerIndex(itb.index());
1032 m_end = m_mat.blockInnerIndex(itb.index()+1);
1043 m_id = m_mat.blockInnerIndex(itb.index());
1045 m_end = m_mat.blockInnerIndex(itb.index()+1);
1052 return itb.value().coeff(m_id - m_start, m_offset);
1056 return itb.valueRef().coeff(m_id - m_start, m_offset);
1062 inline operator bool()
const 1079 #endif // EIGEN_SPARSEBLOCKMATRIX_H const Scalar & value() const
Scalar coeffRef(Index bi)
void sparse_time_dense_product(const SparseLhsType &lhs, const DenseRhsType &rhs, DenseResType &res, const AlphaType &alpha)
void setBlockSize(Index blockSize)
set the block size at runtime for fixed-size block layout
void setFromTriplets(const InputIterator &begin, const InputIterator &end)
Fill values in a matrix from a triplet list.
Matrix< RealScalar, _BlockAtCompileTime, _BlockAtCompileTime, IsColMajor?ColMajor:RowMajor > BlockRealScalar
StorageIndex * m_blockPtr
StorageIndex * m_innerOffset
const BlockSparseMatrixT & m_spblockmat
void setBlockStructure(const MatrixType &blockPattern)
Set the nonzero block pattern of the matrix.
A versatible sparse matrix representation.
const BlockSparseMatrixT & m_spblockmat
BlockSparseMatrix< Scalar, BlockSize, IsColMajor?ColMajor:RowMajor, StorageIndex > PlainObject
std::ostream & operator<<(std::ostream &s, const Packet16uc &v)
A matrix or vector expression mapping an existing array of data.
const int InnerRandomAccessPattern
BlockSparseMatrixT::Index Index
BlockSparseMatrixView(const BlockSparseMatrixT &spblockmat)
const BlockSparseMatrix< _Scalar, _BlockAtCompileTime, _Options, StorageIndex > & m_mat
const unsigned int LvalueBit
InnerIterator(const BlockSparseMatrix &mat, Index outer)
BlockSparseMatrix(const MatrixType &spmat)
Constructor from a sparse matrix.
StorageIndex * m_outerOffset
StorageIndex * outerIndexPtr()
BlockScalarReturnType * valuePtr()
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
#define EIGEN_STATIC_ASSERT(CONDITION, MSG)
Index nonZerosBlocks() const
const StorageIndex * outerIndexPtr() const
BlockSparseMatrix(Index brow, Index bcol)
Construct and resize.
BlockInnerIterator(const BlockSparseMatrix &mat, const Index outer)
Map< const BlockScalar > coeff(Index brow, Index bcol) const
Eigen::Index Index
The interface type of indices.
Map< BlockScalar > valueRef()
const unsigned int RowMajorBit
Scalar coeff(Index row, Index col)
const BlockSparseMatrixT & m_spblockmat
NumTraits< Scalar >::Real RealScalar
StorageIndex * innerIndexPtr()
internal::conditional< _BlockAtCompileTime==Dynamic, Scalar, BlockScalar >::type BlockScalarReturnType
BlockInnerIterator & operator++()
void reserve(const Index nonzerosblocks)
Allocate the internal array of pointers to blocks and their inner indices.
Ref< typename BlockSparseMatrixT::BlockRealScalar > RealScalar
BlockSparseMatrix & operator=(BlockSparseMatrix other)
EIGEN_DEVICE_FUNC ColXpr col(Index i)
This is the const version of col().
Scalar coeff(Index bi, Index j) const
Index blockPtr(Index id) const
Base class of any sparse matrices or sparse expressions.
BlockSparseMatrix & operator=(const MatrixType &spmat)
Assignment from a sparse matrix with the same storage order.
InnerIterator & operator++()
bool isCompressed() const
for compatibility purposes with the SparseMatrix class
void setBlockLayout(const VectorXi &rowBlocks, const VectorXi &colBlocks)
Set the row and column block layouts,.
Index outerToBlock(Index outer) const
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
BlockSparseMatrixT::Index Index
Index blockOuterIndex(Index bj) const
EIGEN_DEVICE_FUNC RowXpr row(Index i)
This is the const version of row(). */.
friend void swap(BlockSparseMatrix &first, BlockSparseMatrix &second)
BlockSparseMatrix(const BlockSparseMatrix &other)
Copy-constructor.
Ref< typename BlockSparseMatrixT::BlockScalar > Scalar
Scalar coeffRef(Index bi, Index j)
Matrix< Scalar, _BlockAtCompileTime, _BlockAtCompileTime, IsColMajor?ColMajor:RowMajor > BlockScalar
Ref< const Matrix< typename BlockSparseMatrixT::Scalar,(RowsAtCompileTime==1)?1:BlockSize,(ColsAtCompileTime==1)?1:BlockSize > > Scalar
Index blockInnerIndex(Index bi) const
Ref< Matrix< typename BlockSparseMatrixT::RealScalar, BlockSparseMatrixT::BlockSize, BlockSparseMatrixT::BlockSize > > RealScalar
Iterator::value_type Triplet
BlockSparseMatrixT::Scalar Scalar
Index innerBlocks() const
Index innerToBlock(Index inner) const
BlockSparseMatrixT::Index Index
A matrix or vector expression mapping an existing expression.
const unsigned int NestByRefBit
BlockSparseMatrixT::Index Index
const StorageIndex * innerIndexPtr() const
const BlockSparseMatrix & m_mat
BlockVectorReturn(const BlockSparseMatrixT &spblockmat, VectorType &vec)
Scalar coeff(Index bi) const
BlockSparseTimeDenseProduct< BlockSparseMatrix, VecType > operator*(const VecType &lhs) const
StorageIndex * m_outerIndex
const Map< const BlockScalar > value() const
Ref< Matrix< typename BlockSparseMatrixT::Scalar, BlockSparseMatrixT::BlockSize, BlockSparseMatrixT::BlockSize > > Scalar
A versatile sparse matrix representation where each element is a block.
Index outerBlocks() const
Index blockInnerSize(Index bi) const
_StorageIndex StorageIndex
Index blockOuterSize(Index bj) const
Ref< Matrix< typename VectorType::Scalar, RowsAtCompileTime, ColsAtCompileTime > > Scalar
The matrix class, also used for vectors and row-vectors.
BlockSparseMatrixT Nested
Index blockRowsIndex(Index bi) const
EIGEN_DEVICE_FUNC const Scalar & b
void scaleAndAddTo(Dest &dest, const typename Rhs::Scalar &alpha) const
BlockVectorView(const BlockSparseMatrixT &spblockmat, const VectorType &vec)
void swap(mpfr::mpreal &x, mpfr::mpreal &y)
Scalar coeffRef(Index row, Index col)
bool operator()(const Triplet &a, const Triplet &b)
internal::ref_selector< BlockSparseMatrix< _Scalar, _BlockAtCompileTime, _Options, _StorageIndex > >::type Nested
void resize(Index brow, Index bcol)
Set the number of rows and columns blocks.
Index blockColsIndex(Index bj) const
InnerIterator(const BlockSparseMatrixView &mat, Index outer)
void swap(scoped_array< T > &a, scoped_array< T > &b)
An InnerIterator allows to loop over the element of any matrix expression.
Ref< BlockScalar > coeffRef(Index brow, Index bcol)