CompressedStorage.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_COMPRESSED_STORAGE_H
11 #define EIGEN_COMPRESSED_STORAGE_H
12 
13 namespace Eigen {
14 
15 namespace internal {
16 
21 template<typename _Scalar,typename _Index>
23 {
24  public:
25 
26  typedef _Scalar Scalar;
27  typedef _Index Index;
28 
29  protected:
30 
32 
33  public:
34 
36  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
37  {}
38 
40  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
41  {
42  resize(size);
43  }
44 
46  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
47  {
48  *this = other;
49  }
50 
52  {
53  resize(other.size());
54  memcpy(m_values, other.m_values, m_size * sizeof(Scalar));
55  memcpy(m_indices, other.m_indices, m_size * sizeof(Index));
56  return *this;
57  }
58 
59  void swap(CompressedStorage& other)
60  {
61  std::swap(m_values, other.m_values);
62  std::swap(m_indices, other.m_indices);
63  std::swap(m_size, other.m_size);
64  std::swap(m_allocatedSize, other.m_allocatedSize);
65  }
66 
68  {
69  delete[] m_values;
70  delete[] m_indices;
71  }
72 
73  void reserve(size_t size)
74  {
75  size_t newAllocatedSize = m_size + size;
76  if (newAllocatedSize > m_allocatedSize)
77  reallocate(newAllocatedSize);
78  }
79 
80  void squeeze()
81  {
84  }
85 
86  void resize(size_t size, float reserveSizeFactor = 0)
87  {
88  if (m_allocatedSize<size)
89  reallocate(size + size_t(reserveSizeFactor*size));
90  m_size = size;
91  }
92 
93  void append(const Scalar& v, Index i)
94  {
95  Index id = static_cast<Index>(m_size);
96  resize(m_size+1, 1);
97  m_values[id] = v;
98  m_indices[id] = i;
99  }
100 
101  inline size_t size() const { return m_size; }
102  inline size_t allocatedSize() const { return m_allocatedSize; }
103  inline void clear() { m_size = 0; }
104 
105  inline Scalar& value(size_t i) { return m_values[i]; }
106  inline const Scalar& value(size_t i) const { return m_values[i]; }
107 
108  inline Index& index(size_t i) { return m_indices[i]; }
109  inline const Index& index(size_t i) const { return m_indices[i]; }
110 
111  static CompressedStorage Map(Index* indices, Scalar* values, size_t size)
112  {
113  CompressedStorage res;
114  res.m_indices = indices;
115  res.m_values = values;
116  res.m_allocatedSize = res.m_size = size;
117  return res;
118  }
119 
121  inline Index searchLowerIndex(Index key) const
122  {
123  return searchLowerIndex(0, m_size, key);
124  }
125 
127  inline Index searchLowerIndex(size_t start, size_t end, Index key) const
128  {
129  while(end>start)
130  {
131  size_t mid = (end+start)>>1;
132  if (m_indices[mid]<key)
133  start = mid+1;
134  else
135  end = mid;
136  }
137  return static_cast<Index>(start);
138  }
139 
142  inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const
143  {
144  if (m_size==0)
145  return defaultValue;
146  else if (key==m_indices[m_size-1])
147  return m_values[m_size-1];
148  // ^^ optimization: let's first check if it is the last coefficient
149  // (very common in high level algorithms)
150  const size_t id = searchLowerIndex(0,m_size-1,key);
151  return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
152  }
153 
155  inline Scalar atInRange(size_t start, size_t end, Index key, const Scalar& defaultValue = Scalar(0)) const
156  {
157  if (start>=end)
158  return Scalar(0);
159  else if (end>start && key==m_indices[end-1])
160  return m_values[end-1];
161  // ^^ optimization: let's first check if it is the last coefficient
162  // (very common in high level algorithms)
163  const size_t id = searchLowerIndex(start,end-1,key);
164  return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
165  }
166 
170  inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0))
171  {
172  size_t id = searchLowerIndex(0,m_size,key);
173  if (id>=m_size || m_indices[id]!=key)
174  {
175  resize(m_size+1,1);
176  for (size_t j=m_size-1; j>id; --j)
177  {
178  m_indices[j] = m_indices[j-1];
179  m_values[j] = m_values[j-1];
180  }
181  m_indices[id] = key;
182  m_values[id] = defaultValue;
183  }
184  return m_values[id];
185  }
186 
187  void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
188  {
189  size_t k = 0;
190  size_t n = size();
191  for (size_t i=0; i<n; ++i)
192  {
193  if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
194  {
195  value(k) = value(i);
196  index(k) = index(i);
197  ++k;
198  }
199  }
200  resize(k,0);
201  }
202 
203  protected:
204 
205  inline void reallocate(size_t size)
206  {
207  Scalar* newValues = new Scalar[size];
208  Index* newIndices = new Index[size];
209  size_t copySize = (std::min)(size, m_size);
210  // copy
211  internal::smart_copy(m_values, m_values+copySize, newValues);
212  internal::smart_copy(m_indices, m_indices+copySize, newIndices);
213  // delete old stuff
214  delete[] m_values;
215  delete[] m_indices;
216  m_values = newValues;
217  m_indices = newIndices;
219  }
220 
221  protected:
222  Scalar* m_values;
223  Index* m_indices;
224  size_t m_size;
226 
227 };
228 
229 } // end namespace internal
230 
231 } // end namespace Eigen
232 
233 #endif // EIGEN_COMPRESSED_STORAGE_H
static CompressedStorage Map(Index *indices, Scalar *values, size_t size)
Index searchLowerIndex(Index key) const
iterative scaling algorithm to equilibrate rows and column norms in matrices
Definition: matrix.hpp:471
void append(const Scalar &v, Index i)
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
Definition: NumTraits.h:88
Scalar & atWithInsertion(Index key, const Scalar &defaultValue=Scalar(0))
bool isMuchSmallerThan(const Scalar &x, const OtherScalar &y, typename NumTraits< Scalar >::Real precision=NumTraits< Scalar >::dummy_precision())
Index searchLowerIndex(size_t start, size_t end, Index key) const
const Index & index(size_t i) const
void smart_copy(const T *start, const T *end, T *target)
const Scalar & value(size_t i) const
CompressedStorage & operator=(const CompressedStorage &other)
Scalar at(Index key, const Scalar &defaultValue=Scalar(0)) const
CompressedStorage(const CompressedStorage &other)
#define v
void swap(CompressedStorage &other)
void resize(size_t size, float reserveSizeFactor=0)
NumTraits< Scalar >::Real RealScalar
void prune(const Scalar &reference, const RealScalar &epsilon=NumTraits< RealScalar >::dummy_precision())
Scalar atInRange(size_t start, size_t end, Index key, const Scalar &defaultValue=Scalar(0)) const


acado
Author(s): Milan Vukov, Rien Quirynen
autogenerated on Mon Jun 10 2019 12:34:31