10 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
11 #define EIGEN_SPARSE_COMPRESSED_BASE_H
19 template<
typename Derived>
35 template<
typename Derived>
42 using Base::operator=;
46 class ReverseInnerIterator;
59 return derived().nonZeros();
62 else if(derived().outerSize()==0)
80 inline const StorageIndex*
innerIndexPtr()
const {
return derived().innerIndexPtr(); }
84 inline StorageIndex*
innerIndexPtr() {
return derived().innerIndexPtr(); }
90 inline const StorageIndex*
outerIndexPtr()
const {
return derived().outerIndexPtr(); }
95 inline StorageIndex*
outerIndexPtr() {
return derived().outerIndexPtr(); }
100 inline const StorageIndex*
innerNonZeroPtr()
const {
return derived().innerNonZeroPtr(); }
135 template<
typename Derived>
140 : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0)
144 : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end)
160 if(Derived::IsVectorAtCompileTime &&
mat.outerIndexPtr()==0)
163 m_end =
mat.nonZeros();
167 m_id =
mat.outerIndexPtr()[outer];
168 if(
mat.isCompressed())
169 m_end =
mat.outerIndexPtr()[outer+1];
171 m_end = m_id +
mat.innerNonZeroPtr()[outer];
182 : m_values(data.
valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.
size())
192 inline StorageIndex
index()
const {
return m_indices[m_id]; }
197 inline operator bool()
const {
return (m_id < m_end); }
213 template<
typename Derived>
220 if(Derived::IsVectorAtCompileTime &&
mat.outerIndexPtr()==0)
223 m_id =
mat.nonZeros();
227 m_start =
mat.outerIndexPtr()[outer];
228 if(
mat.isCompressed())
229 m_id =
mat.outerIndexPtr()[outer+1];
231 m_id = m_start +
mat.innerNonZeroPtr()[outer];
242 : m_values(data.
valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.
size())
252 inline StorageIndex
index()
const {
return m_indices[m_id-1]; }
257 inline operator bool()
const {
return (m_id > m_start); }
270 template<
typename Derived>
279 Flags = Derived::Flags
292 return m_matrix->nonZeros();
295 operator Derived&() {
return m_matrix->const_cast_derived(); }
296 operator const Derived&()
const {
return *m_matrix; }
306 return m_matrix->const_cast_derived().valuePtr()[p];
313 return m_matrix->const_cast_derived().valuePtr()[p];
322 const Index outer = Derived::IsRowMajor ?
row :
col;
323 const Index inner = Derived::IsRowMajor ?
col :
row;
325 Index start = m_matrix->outerIndexPtr()[outer];
326 Index end = m_matrix->isCompressed() ? m_matrix->outerIndexPtr()[outer+1] : m_matrix->outerIndexPtr()[outer] + m_matrix->innerNonZeroPtr()[outer];
327 eigen_assert(end>=start &&
"you are using a non finalized sparse matrix or written coefficient does not exist");
328 const Index p = std::lower_bound(m_matrix->innerIndexPtr()+start, m_matrix->innerIndexPtr()+end,inner) - m_matrix->innerIndexPtr();
330 return ((p<end) && (m_matrix->innerIndexPtr()[p]==inner)) ? p :
Dynamic;
341 #endif // EIGEN_SPARSE_COMPRESSED_BASE_H