CompressedStorage.h
Go to the documentation of this file.
00001 // This file is part of Eigen, a lightweight C++ template library
00002 // for linear algebra.
00003 //
00004 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
00005 //
00006 // This Source Code Form is subject to the terms of the Mozilla
00007 // Public License v. 2.0. If a copy of the MPL was not distributed
00008 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
00009 
00010 #ifndef EIGEN_COMPRESSED_STORAGE_H
00011 #define EIGEN_COMPRESSED_STORAGE_H
00012 
00013 namespace Eigen { 
00014 
00015 namespace internal {
00016 
00021 template<typename _Scalar,typename _Index>
00022 class CompressedStorage
00023 {
00024   public:
00025 
00026     typedef _Scalar Scalar;
00027     typedef _Index Index;
00028 
00029   protected:
00030 
00031     typedef typename NumTraits<Scalar>::Real RealScalar;
00032 
00033   public:
00034 
00035     CompressedStorage()
00036       : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
00037     {}
00038 
00039     CompressedStorage(size_t size)
00040       : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
00041     {
00042       resize(size);
00043     }
00044 
00045     CompressedStorage(const CompressedStorage& other)
00046       : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
00047     {
00048       *this = other;
00049     }
00050 
00051     CompressedStorage& operator=(const CompressedStorage& other)
00052     {
00053       resize(other.size());
00054       internal::smart_copy(other.m_values,  other.m_values  + m_size, m_values);
00055       internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
00056       return *this;
00057     }
00058 
00059     void swap(CompressedStorage& other)
00060     {
00061       std::swap(m_values, other.m_values);
00062       std::swap(m_indices, other.m_indices);
00063       std::swap(m_size, other.m_size);
00064       std::swap(m_allocatedSize, other.m_allocatedSize);
00065     }
00066 
00067     ~CompressedStorage()
00068     {
00069       delete[] m_values;
00070       delete[] m_indices;
00071     }
00072 
00073     void reserve(size_t size)
00074     {
00075       size_t newAllocatedSize = m_size + size;
00076       if (newAllocatedSize > m_allocatedSize)
00077         reallocate(newAllocatedSize);
00078     }
00079 
00080     void squeeze()
00081     {
00082       if (m_allocatedSize>m_size)
00083         reallocate(m_size);
00084     }
00085 
00086     void resize(size_t size, double reserveSizeFactor = 0)
00087     {
00088       if (m_allocatedSize<size)
00089         reallocate(size + size_t(reserveSizeFactor*double(size)));
00090       m_size = size;
00091     }
00092 
00093     void append(const Scalar& v, Index i)
00094     {
00095       Index id = static_cast<Index>(m_size);
00096       resize(m_size+1, 1);
00097       m_values[id] = v;
00098       m_indices[id] = i;
00099     }
00100 
00101     inline size_t size() const { return m_size; }
00102     inline size_t allocatedSize() const { return m_allocatedSize; }
00103     inline void clear() { m_size = 0; }
00104 
00105     inline Scalar& value(size_t i) { return m_values[i]; }
00106     inline const Scalar& value(size_t i) const { return m_values[i]; }
00107 
00108     inline Index& index(size_t i) { return m_indices[i]; }
00109     inline const Index& index(size_t i) const { return m_indices[i]; }
00110 
00111     static CompressedStorage Map(Index* indices, Scalar* values, size_t size)
00112     {
00113       CompressedStorage res;
00114       res.m_indices = indices;
00115       res.m_values = values;
00116       res.m_allocatedSize = res.m_size = size;
00117       return res;
00118     }
00119 
00121     inline Index searchLowerIndex(Index key) const
00122     {
00123       return searchLowerIndex(0, m_size, key);
00124     }
00125 
00127     inline Index searchLowerIndex(size_t start, size_t end, Index key) const
00128     {
00129       while(end>start)
00130       {
00131         size_t mid = (end+start)>>1;
00132         if (m_indices[mid]<key)
00133           start = mid+1;
00134         else
00135           end = mid;
00136       }
00137       return static_cast<Index>(start);
00138     }
00139 
00142     inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const
00143     {
00144       if (m_size==0)
00145         return defaultValue;
00146       else if (key==m_indices[m_size-1])
00147         return m_values[m_size-1];
00148       // ^^  optimization: let's first check if it is the last coefficient
00149       // (very common in high level algorithms)
00150       const size_t id = searchLowerIndex(0,m_size-1,key);
00151       return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
00152     }
00153 
00155     inline Scalar atInRange(size_t start, size_t end, Index key, const Scalar& defaultValue = Scalar(0)) const
00156     {
00157       if (start>=end)
00158         return Scalar(0);
00159       else if (end>start && key==m_indices[end-1])
00160         return m_values[end-1];
00161       // ^^  optimization: let's first check if it is the last coefficient
00162       // (very common in high level algorithms)
00163       const size_t id = searchLowerIndex(start,end-1,key);
00164       return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
00165     }
00166 
00170     inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0))
00171     {
00172       size_t id = searchLowerIndex(0,m_size,key);
00173       if (id>=m_size || m_indices[id]!=key)
00174       {
00175         resize(m_size+1,1);
00176         for (size_t j=m_size-1; j>id; --j)
00177         {
00178           m_indices[j] = m_indices[j-1];
00179           m_values[j] = m_values[j-1];
00180         }
00181         m_indices[id] = key;
00182         m_values[id] = defaultValue;
00183       }
00184       return m_values[id];
00185     }
00186 
00187     void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
00188     {
00189       size_t k = 0;
00190       size_t n = size();
00191       for (size_t i=0; i<n; ++i)
00192       {
00193         if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
00194         {
00195           value(k) = value(i);
00196           index(k) = index(i);
00197           ++k;
00198         }
00199       }
00200       resize(k,0);
00201     }
00202 
00203   protected:
00204 
00205     inline void reallocate(size_t size)
00206     {
00207       Scalar* newValues  = new Scalar[size];
00208       Index* newIndices = new Index[size];
00209       size_t copySize = (std::min)(size, m_size);
00210       // copy
00211       internal::smart_copy(m_values, m_values+copySize, newValues);
00212       internal::smart_copy(m_indices, m_indices+copySize, newIndices);
00213       // delete old stuff
00214       delete[] m_values;
00215       delete[] m_indices;
00216       m_values = newValues;
00217       m_indices = newIndices;
00218       m_allocatedSize = size;
00219     }
00220 
00221   protected:
00222     Scalar* m_values;
00223     Index* m_indices;
00224     size_t m_size;
00225     size_t m_allocatedSize;
00226 
00227 };
00228 
00229 } // end namespace internal
00230 
00231 } // end namespace Eigen
00232 
00233 #endif // EIGEN_COMPRESSED_STORAGE_H


turtlebot_exploration_3d
Author(s): Bona , Shawn
autogenerated on Thu Jun 6 2019 20:57:57