CompressedStorage.h
Go to the documentation of this file.
00001 // This file is part of Eigen, a lightweight C++ template library
00002 // for linear algebra.
00003 //
00004 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
00005 //
00006 // Eigen is free software; you can redistribute it and/or
00007 // modify it under the terms of the GNU Lesser General Public
00008 // License as published by the Free Software Foundation; either
00009 // version 3 of the License, or (at your option) any later version.
00010 //
00011 // Alternatively, you can redistribute it and/or
00012 // modify it under the terms of the GNU General Public License as
00013 // published by the Free Software Foundation; either version 2 of
00014 // the License, or (at your option) any later version.
00015 //
00016 // Eigen is distributed in the hope that it will be useful, but WITHOUT ANY
00017 // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
00018 // FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License or the
00019 // GNU General Public License for more details.
00020 //
00021 // You should have received a copy of the GNU Lesser General Public
00022 // License and a copy of the GNU General Public License along with
00023 // Eigen. If not, see <http://www.gnu.org/licenses/>.
00024 
00025 #ifndef EIGEN_COMPRESSED_STORAGE_H
00026 #define EIGEN_COMPRESSED_STORAGE_H
00027 
00031 template<typename _Scalar,typename _Index>
00032 class CompressedStorage
00033 {
00034   public:
00035 
00036     typedef _Scalar Scalar;
00037     typedef _Index Index;
00038 
00039   protected:
00040 
00041     typedef typename NumTraits<Scalar>::Real RealScalar;
00042 
00043   public:
00044 
00045     CompressedStorage()
00046       : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
00047     {}
00048 
00049     CompressedStorage(size_t size)
00050       : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
00051     {
00052       resize(size);
00053     }
00054 
00055     CompressedStorage(const CompressedStorage& other)
00056       : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
00057     {
00058       *this = other;
00059     }
00060 
00061     CompressedStorage& operator=(const CompressedStorage& other)
00062     {
00063       resize(other.size());
00064       memcpy(m_values, other.m_values, m_size * sizeof(Scalar));
00065       memcpy(m_indices, other.m_indices, m_size * sizeof(Index));
00066       return *this;
00067     }
00068 
00069     void swap(CompressedStorage& other)
00070     {
00071       std::swap(m_values, other.m_values);
00072       std::swap(m_indices, other.m_indices);
00073       std::swap(m_size, other.m_size);
00074       std::swap(m_allocatedSize, other.m_allocatedSize);
00075     }
00076 
00077     ~CompressedStorage()
00078     {
00079       delete[] m_values;
00080       delete[] m_indices;
00081     }
00082 
00083     void reserve(size_t size)
00084     {
00085       size_t newAllocatedSize = m_size + size;
00086       if (newAllocatedSize > m_allocatedSize)
00087         reallocate(newAllocatedSize);
00088     }
00089 
00090     void squeeze()
00091     {
00092       if (m_allocatedSize>m_size)
00093         reallocate(m_size);
00094     }
00095 
00096     void resize(size_t size, float reserveSizeFactor = 0)
00097     {
00098       if (m_allocatedSize<size)
00099         reallocate(size + size_t(reserveSizeFactor*size));
00100       m_size = size;
00101     }
00102 
00103     void append(const Scalar& v, Index i)
00104     {
00105       Index id = static_cast<Index>(m_size);
00106       resize(m_size+1, 1);
00107       m_values[id] = v;
00108       m_indices[id] = i;
00109     }
00110 
00111     inline size_t size() const { return m_size; }
00112     inline size_t allocatedSize() const { return m_allocatedSize; }
00113     inline void clear() { m_size = 0; }
00114 
00115     inline Scalar& value(size_t i) { return m_values[i]; }
00116     inline const Scalar& value(size_t i) const { return m_values[i]; }
00117 
00118     inline Index& index(size_t i) { return m_indices[i]; }
00119     inline const Index& index(size_t i) const { return m_indices[i]; }
00120 
00121     static CompressedStorage Map(Index* indices, Scalar* values, size_t size)
00122     {
00123       CompressedStorage res;
00124       res.m_indices = indices;
00125       res.m_values = values;
00126       res.m_allocatedSize = res.m_size = size;
00127       return res;
00128     }
00129 
00131     inline Index searchLowerIndex(Index key) const
00132     {
00133       return searchLowerIndex(0, m_size, key);
00134     }
00135 
00137     inline Index searchLowerIndex(size_t start, size_t end, Index key) const
00138     {
00139       while(end>start)
00140       {
00141         size_t mid = (end+start)>>1;
00142         if (m_indices[mid]<key)
00143           start = mid+1;
00144         else
00145           end = mid;
00146       }
00147       return static_cast<Index>(start);
00148     }
00149 
00152     inline Scalar at(Index key, Scalar defaultValue = Scalar(0)) const
00153     {
00154       if (m_size==0)
00155         return defaultValue;
00156       else if (key==m_indices[m_size-1])
00157         return m_values[m_size-1];
00158       // ^^  optimization: let's first check if it is the last coefficient
00159       // (very common in high level algorithms)
00160       const size_t id = searchLowerIndex(0,m_size-1,key);
00161       return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
00162     }
00163 
00165     inline Scalar atInRange(size_t start, size_t end, Index key, Scalar defaultValue = Scalar(0)) const
00166     {
00167       if (start>=end)
00168         return Scalar(0);
00169       else if (end>start && key==m_indices[end-1])
00170         return m_values[end-1];
00171       // ^^  optimization: let's first check if it is the last coefficient
00172       // (very common in high level algorithms)
00173       const size_t id = searchLowerIndex(start,end-1,key);
00174       return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
00175     }
00176 
00180     inline Scalar& atWithInsertion(Index key, Scalar defaultValue = Scalar(0))
00181     {
00182       size_t id = searchLowerIndex(0,m_size,key);
00183       if (id>=m_size || m_indices[id]!=key)
00184       {
00185         resize(m_size+1,1);
00186         for (size_t j=m_size-1; j>id; --j)
00187         {
00188           m_indices[j] = m_indices[j-1];
00189           m_values[j] = m_values[j-1];
00190         }
00191         m_indices[id] = key;
00192         m_values[id] = defaultValue;
00193       }
00194       return m_values[id];
00195     }
00196 
00197     void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
00198     {
00199       size_t k = 0;
00200       size_t n = size();
00201       for (size_t i=0; i<n; ++i)
00202       {
00203         if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
00204         {
00205           value(k) = value(i);
00206           index(k) = index(i);
00207           ++k;
00208         }
00209       }
00210       resize(k,0);
00211     }
00212 
00213   protected:
00214 
00215     inline void reallocate(size_t size)
00216     {
00217       Scalar* newValues  = new Scalar[size];
00218       Index* newIndices = new Index[size];
00219       size_t copySize = std::min(size, m_size);
00220       // copy
00221       memcpy(newValues,  m_values,  copySize * sizeof(Scalar));
00222       memcpy(newIndices, m_indices, copySize * sizeof(Index));
00223       // delete old stuff
00224       delete[] m_values;
00225       delete[] m_indices;
00226       m_values = newValues;
00227       m_indices = newIndices;
00228       m_allocatedSize = size;
00229     }
00230 
00231   protected:
00232     Scalar* m_values;
00233     Index* m_indices;
00234     size_t m_size;
00235     size_t m_allocatedSize;
00236 
00237 };
00238 
00239 #endif // EIGEN_COMPRESSED_STORAGE_H


re_vision
Author(s): Dorian Galvez-Lopez
autogenerated on Sun Jan 5 2014 11:30:59