AmbiVector.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_AMBIVECTOR_H
11 #define EIGEN_AMBIVECTOR_H
12 
13 namespace Eigen {
14 
15 namespace internal {
16 
22 template<typename _Scalar, typename _StorageIndex>
24 {
25  public:
26  typedef _Scalar Scalar;
27  typedef _StorageIndex StorageIndex;
29 
30  explicit AmbiVector(Index size)
32  {
33  resize(size);
34  }
35 
36  void init(double estimatedDensity);
37  void init(int mode);
38 
39  Index nonZeros() const;
40 
42  void setBounds(Index start, Index end) { m_start = convert_index(start); m_end = convert_index(end); }
43 
44  void setZero();
45 
46  void restart();
47  Scalar& coeffRef(Index i);
48  Scalar& coeff(Index i);
49 
50  class Iterator;
51 
52  ~AmbiVector() { delete[] m_buffer; }
53 
55  {
56  if (m_allocatedSize < size)
57  reallocate(size);
58  m_size = convert_index(size);
59  }
60 
61  StorageIndex size() const { return m_size; }
62 
63  protected:
64  StorageIndex convert_index(Index idx)
65  {
66  return internal::convert_index<StorageIndex>(idx);
67  }
68 
70  {
71  // if the size of the matrix is not too large, let's allocate a bit more than needed such
72  // that we can handle dense vector even in sparse mode.
73  delete[] m_buffer;
74  if (size<1000)
75  {
76  Index allocSize = (size * sizeof(ListEl) + sizeof(Scalar) - 1)/sizeof(Scalar);
77  m_allocatedElements = convert_index((allocSize*sizeof(Scalar))/sizeof(ListEl));
78  m_buffer = new Scalar[allocSize];
79  }
80  else
81  {
82  m_allocatedElements = convert_index((size*sizeof(Scalar))/sizeof(ListEl));
83  m_buffer = new Scalar[size];
84  }
85  m_size = convert_index(size);
86  m_start = 0;
87  m_end = m_size;
88  }
89 
91  {
92  Index copyElements = m_allocatedElements;
94  Index allocSize = m_allocatedElements * sizeof(ListEl);
95  allocSize = (allocSize + sizeof(Scalar) - 1)/sizeof(Scalar);
96  Scalar* newBuffer = new Scalar[allocSize];
97  memcpy(newBuffer, m_buffer, copyElements * sizeof(ListEl));
98  delete[] m_buffer;
99  m_buffer = newBuffer;
100  }
101 
102  protected:
103  // element type of the linked list
104  struct ListEl
105  {
106  StorageIndex next;
107  StorageIndex index;
108  Scalar value;
109  };
110 
111  // used to store data in both mode
112  Scalar* m_buffer;
113  Scalar m_zero;
114  StorageIndex m_size;
115  StorageIndex m_start;
116  StorageIndex m_end;
117  StorageIndex m_allocatedSize;
118  StorageIndex m_allocatedElements;
119  StorageIndex m_mode;
120 
121  // linked list mode
122  StorageIndex m_llStart;
123  StorageIndex m_llCurrent;
124  StorageIndex m_llSize;
125 };
126 
128 template<typename _Scalar,typename _StorageIndex>
130 {
131  if (m_mode==IsSparse)
132  return m_llSize;
133  else
134  return m_end - m_start;
135 }
136 
137 template<typename _Scalar,typename _StorageIndex>
138 void AmbiVector<_Scalar,_StorageIndex>::init(double estimatedDensity)
139 {
140  if (estimatedDensity>0.1)
141  init(IsDense);
142  else
143  init(IsSparse);
144 }
145 
146 template<typename _Scalar,typename _StorageIndex>
148 {
149  m_mode = mode;
150  if (m_mode==IsSparse)
151  {
152  m_llSize = 0;
153  m_llStart = -1;
154  }
155 }
156 
162 template<typename _Scalar,typename _StorageIndex>
164 {
166 }
167 
169 template<typename _Scalar,typename _StorageIndex>
171 {
172  if (m_mode==IsDense)
173  {
174  for (Index i=m_start; i<m_end; ++i)
175  m_buffer[i] = Scalar(0);
176  }
177  else
178  {
180  m_llSize = 0;
181  m_llStart = -1;
182  }
183 }
184 
185 template<typename _Scalar,typename _StorageIndex>
187 {
188  if (m_mode==IsDense)
189  return m_buffer[i];
190  else
191  {
192  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
193  // TODO factorize the following code to reduce code generation
195  if (m_llSize==0)
196  {
197  // this is the first element
198  m_llStart = 0;
199  m_llCurrent = 0;
200  ++m_llSize;
201  llElements[0].value = Scalar(0);
202  llElements[0].index = convert_index(i);
203  llElements[0].next = -1;
204  return llElements[0].value;
205  }
206  else if (i<llElements[m_llStart].index)
207  {
208  // this is going to be the new first element of the list
209  ListEl& el = llElements[m_llSize];
210  el.value = Scalar(0);
211  el.index = convert_index(i);
212  el.next = m_llStart;
214  ++m_llSize;
216  return el.value;
217  }
218  else
219  {
220  StorageIndex nextel = llElements[m_llCurrent].next;
221  eigen_assert(i>=llElements[m_llCurrent].index && "you must call restart() before inserting an element with lower or equal index");
222  while (nextel >= 0 && llElements[nextel].index<=i)
223  {
224  m_llCurrent = nextel;
225  nextel = llElements[nextel].next;
226  }
227 
228  if (llElements[m_llCurrent].index==i)
229  {
230  // the coefficient already exists and we found it !
231  return llElements[m_llCurrent].value;
232  }
233  else
234  {
236  {
238  llElements = reinterpret_cast<ListEl*>(m_buffer);
239  }
240  eigen_internal_assert(m_llSize<m_allocatedElements && "internal error: overflow in sparse mode");
241  // let's insert a new coefficient
242  ListEl& el = llElements[m_llSize];
243  el.value = Scalar(0);
244  el.index = convert_index(i);
245  el.next = llElements[m_llCurrent].next;
246  llElements[m_llCurrent].next = m_llSize;
247  ++m_llSize;
248  return el.value;
249  }
250  }
251  }
252 }
253 
254 template<typename _Scalar,typename _StorageIndex>
256 {
257  if (m_mode==IsDense)
258  return m_buffer[i];
259  else
260  {
261  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
263  if ((m_llSize==0) || (i<llElements[m_llStart].index))
264  {
265  return m_zero;
266  }
267  else
268  {
269  Index elid = m_llStart;
270  while (elid >= 0 && llElements[elid].index<i)
271  elid = llElements[elid].next;
272 
273  if (llElements[elid].index==i)
274  return llElements[m_llCurrent].value;
275  else
276  return m_zero;
277  }
278  }
279 }
280 
282 template<typename _Scalar,typename _StorageIndex>
283 class AmbiVector<_Scalar,_StorageIndex>::Iterator
284 {
285  public:
286  typedef _Scalar Scalar;
288 
295  explicit Iterator(const AmbiVector& vec, const RealScalar& epsilon = 0)
296  : m_vector(vec)
297  {
298  using std::abs;
299  m_epsilon = epsilon;
300  m_isDense = m_vector.m_mode==IsDense;
301  if (m_isDense)
302  {
303  m_currentEl = 0; // this is to avoid a compilation warning
304  m_cachedValue = 0; // this is to avoid a compilation warning
305  m_cachedIndex = m_vector.m_start-1;
306  ++(*this);
307  }
308  else
309  {
310  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
311  m_currentEl = m_vector.m_llStart;
312  while (m_currentEl>=0 && abs(llElements[m_currentEl].value)<=m_epsilon)
313  m_currentEl = llElements[m_currentEl].next;
314  if (m_currentEl<0)
315  {
316  m_cachedValue = 0; // this is to avoid a compilation warning
317  m_cachedIndex = -1;
318  }
319  else
320  {
321  m_cachedIndex = llElements[m_currentEl].index;
322  m_cachedValue = llElements[m_currentEl].value;
323  }
324  }
325  }
326 
327  StorageIndex index() const { return m_cachedIndex; }
328  Scalar value() const { return m_cachedValue; }
329 
330  operator bool() const { return m_cachedIndex>=0; }
331 
333  {
334  using std::abs;
335  if (m_isDense)
336  {
337  do {
338  ++m_cachedIndex;
339  } while (m_cachedIndex<m_vector.m_end && abs(m_vector.m_buffer[m_cachedIndex])<=m_epsilon);
340  if (m_cachedIndex<m_vector.m_end)
341  m_cachedValue = m_vector.m_buffer[m_cachedIndex];
342  else
343  m_cachedIndex=-1;
344  }
345  else
346  {
347  ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
348  do {
349  m_currentEl = llElements[m_currentEl].next;
350  } while (m_currentEl>=0 && abs(llElements[m_currentEl].value)<=m_epsilon);
351  if (m_currentEl<0)
352  {
353  m_cachedIndex = -1;
354  }
355  else
356  {
357  m_cachedIndex = llElements[m_currentEl].index;
358  m_cachedValue = llElements[m_currentEl].value;
359  }
360  }
361  return *this;
362  }
363 
364  protected:
365  const AmbiVector& m_vector; // the target vector
366  StorageIndex m_currentEl; // the current element in sparse/linked-list mode
367  RealScalar m_epsilon; // epsilon used to prune zero coefficients
368  StorageIndex m_cachedIndex; // current coordinate
369  Scalar m_cachedValue; // current value
370  bool m_isDense; // mode of the vector
371 };
372 
373 } // end namespace internal
374 
375 } // end namespace Eigen
376 
377 #endif // EIGEN_AMBIVECTOR_H
double epsilon
NumTraits< Scalar >::Real RealScalar
Definition: AmbiVector.h:287
Iterator(const AmbiVector &vec, const RealScalar &epsilon=0)
Definition: AmbiVector.h:295
Definition: LDLT.h:16
Scalar & coeff(Index i)
Definition: AmbiVector.h:255
void setBounds(Index start, Index end)
Definition: AmbiVector.h:42
Holds information about the various numeric (i.e. scalar) types allowed by Eigen. ...
Definition: NumTraits.h:150
void init(double estimatedDensity)
Definition: AmbiVector.h:138
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const AbsReturnType abs() const
StorageIndex size() const
Definition: AmbiVector.h:61
void reallocate(Index size)
Definition: AmbiVector.h:69
StorageIndex convert_index(Index idx)
Definition: AmbiVector.h:64
void resize(Index size)
Definition: AmbiVector.h:54
Scalar & coeffRef(Index i)
Definition: AmbiVector.h:186
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:33
#define eigen_assert(x)
Definition: Macros.h:577
#define EIGEN_RESTRICT
Definition: Macros.h:794
StorageIndex m_allocatedSize
Definition: AmbiVector.h:117
StorageIndex m_allocatedElements
Definition: AmbiVector.h:118
NumTraits< Scalar >::Real RealScalar
Definition: AmbiVector.h:28
#define eigen_internal_assert(x)
Definition: Macros.h:583
_StorageIndex StorageIndex
Definition: AmbiVector.h:27


hebiros
Author(s): Xavier Artache , Matthew Tesch
autogenerated on Thu Sep 3 2020 04:07:59