Go to the documentation of this file.00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025 #ifndef EIGEN_AMBIVECTOR_H
00026 #define EIGEN_AMBIVECTOR_H
00027
00033 template<typename _Scalar, typename _Index>
00034 class AmbiVector
00035 {
00036 public:
00037 typedef _Scalar Scalar;
00038 typedef _Index Index;
00039 typedef typename NumTraits<Scalar>::Real RealScalar;
00040
00041 AmbiVector(Index size)
00042 : m_buffer(0), m_zero(0), m_size(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
00043 {
00044 resize(size);
00045 }
00046
00047 void init(double estimatedDensity);
00048 void init(int mode);
00049
00050 Index nonZeros() const;
00051
00053 void setBounds(Index start, Index end) { m_start = start; m_end = end; }
00054
00055 void setZero();
00056
00057 void restart();
00058 Scalar& coeffRef(Index i);
00059 Scalar& coeff(Index i);
00060
00061 class Iterator;
00062
00063 ~AmbiVector() { delete[] m_buffer; }
00064
00065 void resize(Index size)
00066 {
00067 if (m_allocatedSize < size)
00068 reallocate(size);
00069 m_size = size;
00070 }
00071
00072 Index size() const { return m_size; }
00073
00074 protected:
00075
00076 void reallocate(Index size)
00077 {
00078
00079
00080 delete[] m_buffer;
00081 if (size<1000)
00082 {
00083 Index allocSize = (size * sizeof(ListEl))/sizeof(Scalar);
00084 m_allocatedElements = (allocSize*sizeof(Scalar))/sizeof(ListEl);
00085 m_buffer = new Scalar[allocSize];
00086 }
00087 else
00088 {
00089 m_allocatedElements = (size*sizeof(Scalar))/sizeof(ListEl);
00090 m_buffer = new Scalar[size];
00091 }
00092 m_size = size;
00093 m_start = 0;
00094 m_end = m_size;
00095 }
00096
00097 void reallocateSparse()
00098 {
00099 Index copyElements = m_allocatedElements;
00100 m_allocatedElements = (std::min)(Index(m_allocatedElements*1.5),m_size);
00101 Index allocSize = m_allocatedElements * sizeof(ListEl);
00102 allocSize = allocSize/sizeof(Scalar) + (allocSize%sizeof(Scalar)>0?1:0);
00103 Scalar* newBuffer = new Scalar[allocSize];
00104 memcpy(newBuffer, m_buffer, copyElements * sizeof(ListEl));
00105 delete[] m_buffer;
00106 m_buffer = newBuffer;
00107 }
00108
00109 protected:
00110
00111 struct ListEl
00112 {
00113 Index next;
00114 Index index;
00115 Scalar value;
00116 };
00117
00118
00119 Scalar* m_buffer;
00120 Scalar m_zero;
00121 Index m_size;
00122 Index m_start;
00123 Index m_end;
00124 Index m_allocatedSize;
00125 Index m_allocatedElements;
00126 Index m_mode;
00127
00128
00129 Index m_llStart;
00130 Index m_llCurrent;
00131 Index m_llSize;
00132 };
00133
00135 template<typename _Scalar,typename _Index>
00136 _Index AmbiVector<_Scalar,_Index>::nonZeros() const
00137 {
00138 if (m_mode==IsSparse)
00139 return m_llSize;
00140 else
00141 return m_end - m_start;
00142 }
00143
00144 template<typename _Scalar,typename _Index>
00145 void AmbiVector<_Scalar,_Index>::init(double estimatedDensity)
00146 {
00147 if (estimatedDensity>0.1)
00148 init(IsDense);
00149 else
00150 init(IsSparse);
00151 }
00152
00153 template<typename _Scalar,typename _Index>
00154 void AmbiVector<_Scalar,_Index>::init(int mode)
00155 {
00156 m_mode = mode;
00157 if (m_mode==IsSparse)
00158 {
00159 m_llSize = 0;
00160 m_llStart = -1;
00161 }
00162 }
00163
00169 template<typename _Scalar,typename _Index>
00170 void AmbiVector<_Scalar,_Index>::restart()
00171 {
00172 m_llCurrent = m_llStart;
00173 }
00174
00176 template<typename _Scalar,typename _Index>
00177 void AmbiVector<_Scalar,_Index>::setZero()
00178 {
00179 if (m_mode==IsDense)
00180 {
00181 for (Index i=m_start; i<m_end; ++i)
00182 m_buffer[i] = Scalar(0);
00183 }
00184 else
00185 {
00186 eigen_assert(m_mode==IsSparse);
00187 m_llSize = 0;
00188 m_llStart = -1;
00189 }
00190 }
00191
00192 template<typename _Scalar,typename _Index>
00193 _Scalar& AmbiVector<_Scalar,_Index>::coeffRef(_Index i)
00194 {
00195 if (m_mode==IsDense)
00196 return m_buffer[i];
00197 else
00198 {
00199 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
00200
00201 eigen_assert(m_mode==IsSparse);
00202 if (m_llSize==0)
00203 {
00204
00205 m_llStart = 0;
00206 m_llCurrent = 0;
00207 ++m_llSize;
00208 llElements[0].value = Scalar(0);
00209 llElements[0].index = i;
00210 llElements[0].next = -1;
00211 return llElements[0].value;
00212 }
00213 else if (i<llElements[m_llStart].index)
00214 {
00215
00216 ListEl& el = llElements[m_llSize];
00217 el.value = Scalar(0);
00218 el.index = i;
00219 el.next = m_llStart;
00220 m_llStart = m_llSize;
00221 ++m_llSize;
00222 m_llCurrent = m_llStart;
00223 return el.value;
00224 }
00225 else
00226 {
00227 Index nextel = llElements[m_llCurrent].next;
00228 eigen_assert(i>=llElements[m_llCurrent].index && "you must call restart() before inserting an element with lower or equal index");
00229 while (nextel >= 0 && llElements[nextel].index<=i)
00230 {
00231 m_llCurrent = nextel;
00232 nextel = llElements[nextel].next;
00233 }
00234
00235 if (llElements[m_llCurrent].index==i)
00236 {
00237
00238 return llElements[m_llCurrent].value;
00239 }
00240 else
00241 {
00242 if (m_llSize>=m_allocatedElements)
00243 {
00244 reallocateSparse();
00245 llElements = reinterpret_cast<ListEl*>(m_buffer);
00246 }
00247 eigen_internal_assert(m_llSize<m_allocatedElements && "internal error: overflow in sparse mode");
00248
00249 ListEl& el = llElements[m_llSize];
00250 el.value = Scalar(0);
00251 el.index = i;
00252 el.next = llElements[m_llCurrent].next;
00253 llElements[m_llCurrent].next = m_llSize;
00254 ++m_llSize;
00255 return el.value;
00256 }
00257 }
00258 }
00259 }
00260
00261 template<typename _Scalar,typename _Index>
00262 _Scalar& AmbiVector<_Scalar,_Index>::coeff(_Index i)
00263 {
00264 if (m_mode==IsDense)
00265 return m_buffer[i];
00266 else
00267 {
00268 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
00269 eigen_assert(m_mode==IsSparse);
00270 if ((m_llSize==0) || (i<llElements[m_llStart].index))
00271 {
00272 return m_zero;
00273 }
00274 else
00275 {
00276 Index elid = m_llStart;
00277 while (elid >= 0 && llElements[elid].index<i)
00278 elid = llElements[elid].next;
00279
00280 if (llElements[elid].index==i)
00281 return llElements[m_llCurrent].value;
00282 else
00283 return m_zero;
00284 }
00285 }
00286 }
00287
00289 template<typename _Scalar,typename _Index>
00290 class AmbiVector<_Scalar,_Index>::Iterator
00291 {
00292 public:
00293 typedef _Scalar Scalar;
00294 typedef typename NumTraits<Scalar>::Real RealScalar;
00295
00302 Iterator(const AmbiVector& vec, RealScalar epsilon = RealScalar(0.1)*NumTraits<RealScalar>::dummy_precision())
00303 : m_vector(vec)
00304 {
00305 m_epsilon = epsilon;
00306 m_isDense = m_vector.m_mode==IsDense;
00307 if (m_isDense)
00308 {
00309 m_currentEl = 0;
00310 m_cachedValue = 0;
00311 m_cachedIndex = m_vector.m_start-1;
00312 ++(*this);
00313 }
00314 else
00315 {
00316 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
00317 m_currentEl = m_vector.m_llStart;
00318 while (m_currentEl>=0 && internal::abs(llElements[m_currentEl].value)<m_epsilon)
00319 m_currentEl = llElements[m_currentEl].next;
00320 if (m_currentEl<0)
00321 {
00322 m_cachedValue = 0;
00323 m_cachedIndex = -1;
00324 }
00325 else
00326 {
00327 m_cachedIndex = llElements[m_currentEl].index;
00328 m_cachedValue = llElements[m_currentEl].value;
00329 }
00330 }
00331 }
00332
00333 Index index() const { return m_cachedIndex; }
00334 Scalar value() const { return m_cachedValue; }
00335
00336 operator bool() const { return m_cachedIndex>=0; }
00337
00338 Iterator& operator++()
00339 {
00340 if (m_isDense)
00341 {
00342 do {
00343 ++m_cachedIndex;
00344 } while (m_cachedIndex<m_vector.m_end && internal::abs(m_vector.m_buffer[m_cachedIndex])<m_epsilon);
00345 if (m_cachedIndex<m_vector.m_end)
00346 m_cachedValue = m_vector.m_buffer[m_cachedIndex];
00347 else
00348 m_cachedIndex=-1;
00349 }
00350 else
00351 {
00352 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
00353 do {
00354 m_currentEl = llElements[m_currentEl].next;
00355 } while (m_currentEl>=0 && internal::abs(llElements[m_currentEl].value)<m_epsilon);
00356 if (m_currentEl<0)
00357 {
00358 m_cachedIndex = -1;
00359 }
00360 else
00361 {
00362 m_cachedIndex = llElements[m_currentEl].index;
00363 m_cachedValue = llElements[m_currentEl].value;
00364 }
00365 }
00366 return *this;
00367 }
00368
00369 protected:
00370 const AmbiVector& m_vector;
00371 Index m_currentEl;
00372 RealScalar m_epsilon;
00373 Index m_cachedIndex;
00374 Scalar m_cachedValue;
00375 bool m_isDense;
00376 };
00377
00378
00379 #endif // EIGEN_AMBIVECTOR_H