![]() |
Eigen
3.3.3
|
00001 // This file is part of Eigen, a lightweight C++ template library 00002 // for linear algebra. 00003 // 00004 // Copyright (C) 2008-2014 Gael Guennebaud <gael.guennebaud@inria.fr> 00005 // 00006 // This Source Code Form is subject to the terms of the Mozilla 00007 // Public License v. 2.0. If a copy of the MPL was not distributed 00008 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/. 00009 00010 #ifndef EIGEN_COMPRESSED_STORAGE_H 00011 #define EIGEN_COMPRESSED_STORAGE_H 00012 00013 namespace Eigen { 00014 00015 namespace internal { 00016 00021 template<typename _Scalar,typename _StorageIndex> 00022 class CompressedStorage 00023 { 00024 public: 00025 00026 typedef _Scalar Scalar; 00027 typedef _StorageIndex StorageIndex; 00028 00029 protected: 00030 00031 typedef typename NumTraits<Scalar>::Real RealScalar; 00032 00033 public: 00034 00035 CompressedStorage() 00036 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0) 00037 {} 00038 00039 explicit CompressedStorage(Index size) 00040 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0) 00041 { 00042 resize(size); 00043 } 00044 00045 CompressedStorage(const CompressedStorage& other) 00046 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0) 00047 { 00048 *this = other; 00049 } 00050 00051 CompressedStorage& operator=(const CompressedStorage& other) 00052 { 00053 resize(other.size()); 00054 if(other.size()>0) 00055 { 00056 internal::smart_copy(other.m_values, other.m_values + m_size, m_values); 00057 internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices); 00058 } 00059 return *this; 00060 } 00061 00062 void swap(CompressedStorage& other) 00063 { 00064 std::swap(m_values, other.m_values); 00065 std::swap(m_indices, other.m_indices); 00066 std::swap(m_size, other.m_size); 00067 std::swap(m_allocatedSize, other.m_allocatedSize); 00068 } 00069 00070 ~CompressedStorage() 00071 { 00072 delete[] m_values; 00073 delete[] m_indices; 00074 } 00075 00076 void reserve(Index size) 00077 { 00078 Index newAllocatedSize = m_size + size; 00079 if (newAllocatedSize > m_allocatedSize) 00080 reallocate(newAllocatedSize); 00081 } 00082 00083 void squeeze() 00084 { 00085 if (m_allocatedSize>m_size) 00086 reallocate(m_size); 00087 } 00088 00089 void resize(Index size, double reserveSizeFactor = 0) 00090 { 00091 if (m_allocatedSize<size) 00092 { 00093 Index realloc_size = (std::min<Index>)(NumTraits<StorageIndex>::highest(), size + Index(reserveSizeFactor*double(size))); 00094 if(realloc_size<size) 00095 internal::throw_std_bad_alloc(); 00096 reallocate(realloc_size); 00097 } 00098 m_size = size; 00099 } 00100 00101 void append(const Scalar& v, Index i) 00102 { 00103 Index id = m_size; 00104 resize(m_size+1, 1); 00105 m_values[id] = v; 00106 m_indices[id] = internal::convert_index<StorageIndex>(i); 00107 } 00108 00109 inline Index size() const { return m_size; } 00110 inline Index allocatedSize() const { return m_allocatedSize; } 00111 inline void clear() { m_size = 0; } 00112 00113 const Scalar* valuePtr() const { return m_values; } 00114 Scalar* valuePtr() { return m_values; } 00115 const StorageIndex* indexPtr() const { return m_indices; } 00116 StorageIndex* indexPtr() { return m_indices; } 00117 00118 inline Scalar& value(Index i) { eigen_internal_assert(m_values!=0); return m_values[i]; } 00119 inline const Scalar& value(Index i) const { eigen_internal_assert(m_values!=0); return m_values[i]; } 00120 00121 inline StorageIndex& index(Index i) { eigen_internal_assert(m_indices!=0); return m_indices[i]; } 00122 inline const StorageIndex& index(Index i) const { eigen_internal_assert(m_indices!=0); return m_indices[i]; } 00123 00125 inline Index searchLowerIndex(Index key) const 00126 { 00127 return searchLowerIndex(0, m_size, key); 00128 } 00129 00131 inline Index searchLowerIndex(Index start, Index end, Index key) const 00132 { 00133 while(end>start) 00134 { 00135 Index mid = (end+start)>>1; 00136 if (m_indices[mid]<key) 00137 start = mid+1; 00138 else 00139 end = mid; 00140 } 00141 return start; 00142 } 00143 00146 inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const 00147 { 00148 if (m_size==0) 00149 return defaultValue; 00150 else if (key==m_indices[m_size-1]) 00151 return m_values[m_size-1]; 00152 // ^^ optimization: let's first check if it is the last coefficient 00153 // (very common in high level algorithms) 00154 const Index id = searchLowerIndex(0,m_size-1,key); 00155 return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue; 00156 } 00157 00159 inline Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue = Scalar(0)) const 00160 { 00161 if (start>=end) 00162 return defaultValue; 00163 else if (end>start && key==m_indices[end-1]) 00164 return m_values[end-1]; 00165 // ^^ optimization: let's first check if it is the last coefficient 00166 // (very common in high level algorithms) 00167 const Index id = searchLowerIndex(start,end-1,key); 00168 return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue; 00169 } 00170 00174 inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0)) 00175 { 00176 Index id = searchLowerIndex(0,m_size,key); 00177 if (id>=m_size || m_indices[id]!=key) 00178 { 00179 if (m_allocatedSize<m_size+1) 00180 { 00181 m_allocatedSize = 2*(m_size+1); 00182 internal::scoped_array<Scalar> newValues(m_allocatedSize); 00183 internal::scoped_array<StorageIndex> newIndices(m_allocatedSize); 00184 00185 // copy first chunk 00186 internal::smart_copy(m_values, m_values +id, newValues.ptr()); 00187 internal::smart_copy(m_indices, m_indices+id, newIndices.ptr()); 00188 00189 // copy the rest 00190 if(m_size>id) 00191 { 00192 internal::smart_copy(m_values +id, m_values +m_size, newValues.ptr() +id+1); 00193 internal::smart_copy(m_indices+id, m_indices+m_size, newIndices.ptr()+id+1); 00194 } 00195 std::swap(m_values,newValues.ptr()); 00196 std::swap(m_indices,newIndices.ptr()); 00197 } 00198 else if(m_size>id) 00199 { 00200 internal::smart_memmove(m_values +id, m_values +m_size, m_values +id+1); 00201 internal::smart_memmove(m_indices+id, m_indices+m_size, m_indices+id+1); 00202 } 00203 m_size++; 00204 m_indices[id] = internal::convert_index<StorageIndex>(key); 00205 m_values[id] = defaultValue; 00206 } 00207 return m_values[id]; 00208 } 00209 00210 void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision()) 00211 { 00212 Index k = 0; 00213 Index n = size(); 00214 for (Index i=0; i<n; ++i) 00215 { 00216 if (!internal::isMuchSmallerThan(value(i), reference, epsilon)) 00217 { 00218 value(k) = value(i); 00219 index(k) = index(i); 00220 ++k; 00221 } 00222 } 00223 resize(k,0); 00224 } 00225 00226 protected: 00227 00228 inline void reallocate(Index size) 00229 { 00230 #ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN 00231 EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN 00232 #endif 00233 eigen_internal_assert(size!=m_allocatedSize); 00234 internal::scoped_array<Scalar> newValues(size); 00235 internal::scoped_array<StorageIndex> newIndices(size); 00236 Index copySize = (std::min)(size, m_size); 00237 if (copySize>0) { 00238 internal::smart_copy(m_values, m_values+copySize, newValues.ptr()); 00239 internal::smart_copy(m_indices, m_indices+copySize, newIndices.ptr()); 00240 } 00241 std::swap(m_values,newValues.ptr()); 00242 std::swap(m_indices,newIndices.ptr()); 00243 m_allocatedSize = size; 00244 } 00245 00246 protected: 00247 Scalar* m_values; 00248 StorageIndex* m_indices; 00249 Index m_size; 00250 Index m_allocatedSize; 00251 00252 }; 00253 00254 } // end namespace internal 00255 00256 } // end namespace Eigen 00257 00258 #endif // EIGEN_COMPRESSED_STORAGE_H