Eigen  3.3.3
SparseCompressedBase.h
00001 // This file is part of Eigen, a lightweight C++ template library
00002 // for linear algebra.
00003 //
00004 // Copyright (C) 2015 Gael Guennebaud <gael.guennebaud@inria.fr>
00005 //
00006 // This Source Code Form is subject to the terms of the Mozilla
00007 // Public License v. 2.0. If a copy of the MPL was not distributed
00008 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
00009 
00010 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
00011 #define EIGEN_SPARSE_COMPRESSED_BASE_H
00012 
00013 namespace Eigen { 
00014 
00015 template<typename Derived> class SparseCompressedBase;
00016   
00017 namespace internal {
00018 
00019 template<typename Derived>
00020 struct traits<SparseCompressedBase<Derived> > : traits<Derived>
00021 {};
00022 
00023 } // end namespace internal
00024 
00035 template<typename Derived>
00036 class SparseCompressedBase
00037   : public SparseMatrixBase<Derived>
00038 {
00039   public:
00040     typedef SparseMatrixBase<Derived> Base;
00041     EIGEN_SPARSE_PUBLIC_INTERFACE(SparseCompressedBase)
00042     using Base::operator=;
00043     using Base::IsRowMajor;
00044     
00045     class InnerIterator;
00046     class ReverseInnerIterator;
00047     
00048   protected:
00049     typedef typename Base::IndexVector IndexVector;
00050     Eigen::Map<IndexVector> innerNonZeros() { return Eigen::Map<IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
00051     const  Eigen::Map<const IndexVector> innerNonZeros() const { return Eigen::Map<const IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
00052         
00053   public:
00054     
00056     inline Index nonZeros() const
00057     {
00058       if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0)
00059         return derived().nonZeros();
00060       else if(isCompressed())
00061         return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
00062       else if(derived().outerSize()==0)
00063         return 0;
00064       else
00065         return innerNonZeros().sum();
00066     }
00067     
00071     inline const Scalar* valuePtr() const { return derived().valuePtr(); }
00075     inline Scalar* valuePtr() { return derived().valuePtr(); }
00076 
00080     inline const StorageIndex* innerIndexPtr() const { return derived().innerIndexPtr(); }
00084     inline StorageIndex* innerIndexPtr() { return derived().innerIndexPtr(); }
00085 
00090     inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
00095     inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
00096 
00100     inline const StorageIndex* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); }
00104     inline StorageIndex* innerNonZeroPtr() { return derived().innerNonZeroPtr(); }
00105     
00107     inline bool isCompressed() const { return innerNonZeroPtr()==0; }
00108 
00114     const Map<const Array<Scalar,Dynamic,1> > coeffs() const { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); }
00115 
00126     Map<Array<Scalar,Dynamic,1> > coeffs() { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); }
00127 
00128   protected:
00130     SparseCompressedBase() {}
00131   private:
00132     template<typename OtherDerived> explicit SparseCompressedBase(const SparseCompressedBase<OtherDerived>&);
00133 };
00134 
00135 template<typename Derived>
00136 class SparseCompressedBase<Derived>::InnerIterator
00137 {
00138   public:
00139     InnerIterator()
00140       : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0)
00141     {}
00142 
00143     InnerIterator(const InnerIterator& other)
00144       : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end)
00145     {}
00146 
00147     InnerIterator& operator=(const InnerIterator& other)
00148     {
00149       m_values = other.m_values;
00150       m_indices = other.m_indices;
00151       const_cast<OuterType&>(m_outer).setValue(other.m_outer.value());
00152       m_id = other.m_id;
00153       m_end = other.m_end;
00154       return *this;
00155     }
00156 
00157     InnerIterator(const SparseCompressedBase& mat, Index outer)
00158       : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
00159     {
00160       if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
00161       {
00162         m_id = 0;
00163         m_end = mat.nonZeros();
00164       }
00165       else
00166       {
00167         m_id = mat.outerIndexPtr()[outer];
00168         if(mat.isCompressed())
00169           m_end = mat.outerIndexPtr()[outer+1];
00170         else
00171           m_end = m_id + mat.innerNonZeroPtr()[outer];
00172       }
00173     }
00174 
00175     explicit InnerIterator(const SparseCompressedBase& mat)
00176       : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros())
00177     {
00178       EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
00179     }
00180 
00181     explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
00182       : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.size())
00183     {
00184       EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
00185     }
00186 
00187     inline InnerIterator& operator++() { m_id++; return *this; }
00188 
00189     inline const Scalar& value() const { return m_values[m_id]; }
00190     inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
00191 
00192     inline StorageIndex index() const { return m_indices[m_id]; }
00193     inline Index outer() const { return m_outer.value(); }
00194     inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
00195     inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
00196 
00197     inline operator bool() const { return (m_id < m_end); }
00198 
00199   protected:
00200     const Scalar* m_values;
00201     const StorageIndex* m_indices;
00202     typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
00203     const OuterType m_outer;
00204     Index m_id;
00205     Index m_end;
00206   private:
00207     // If you get here, then you're not using the right InnerIterator type, e.g.:
00208     //   SparseMatrix<double,RowMajor> A;
00209     //   SparseMatrix<double>::InnerIterator it(A,0);
00210     template<typename T> InnerIterator(const SparseMatrixBase<T>&, Index outer);
00211 };
00212 
00213 template<typename Derived>
00214 class SparseCompressedBase<Derived>::ReverseInnerIterator
00215 {
00216   public:
00217     ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
00218       : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
00219     {
00220       if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
00221       {
00222         m_start = 0;
00223         m_id = mat.nonZeros();
00224       }
00225       else
00226       {
00227         m_start = mat.outerIndexPtr()[outer];
00228         if(mat.isCompressed())
00229           m_id = mat.outerIndexPtr()[outer+1];
00230         else
00231           m_id = m_start + mat.innerNonZeroPtr()[outer];
00232       }
00233     }
00234 
00235     explicit ReverseInnerIterator(const SparseCompressedBase& mat)
00236       : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
00237     {
00238       EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
00239     }
00240 
00241     explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
00242       : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.size())
00243     {
00244       EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
00245     }
00246 
00247     inline ReverseInnerIterator& operator--() { --m_id; return *this; }
00248 
00249     inline const Scalar& value() const { return m_values[m_id-1]; }
00250     inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
00251 
00252     inline StorageIndex index() const { return m_indices[m_id-1]; }
00253     inline Index outer() const { return m_outer.value(); }
00254     inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
00255     inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
00256 
00257     inline operator bool() const { return (m_id > m_start); }
00258 
00259   protected:
00260     const Scalar* m_values;
00261     const StorageIndex* m_indices;
00262     typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
00263     const OuterType m_outer;
00264     Index m_start;
00265     Index m_id;
00266 };
00267 
00268 namespace internal {
00269 
00270 template<typename Derived>
00271 struct evaluator<SparseCompressedBase<Derived> >
00272   : evaluator_base<Derived>
00273 {
00274   typedef typename Derived::Scalar Scalar;
00275   typedef typename Derived::InnerIterator InnerIterator;
00276   
00277   enum {
00278     CoeffReadCost = NumTraits<Scalar>::ReadCost,
00279     Flags = Derived::Flags
00280   };
00281   
00282   evaluator() : m_matrix(0), m_zero(0)
00283   {
00284     EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
00285   }
00286   explicit evaluator(const Derived &mat) : m_matrix(&mat), m_zero(0)
00287   {
00288     EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
00289   }
00290   
00291   inline Index nonZerosEstimate() const {
00292     return m_matrix->nonZeros();
00293   }
00294   
00295   operator Derived&() { return m_matrix->const_cast_derived(); }
00296   operator const Derived&() const { return *m_matrix; }
00297   
00298   typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
00299   const Scalar& coeff(Index row, Index col) const
00300   {
00301     Index p = find(row,col);
00302 
00303     if(p==Dynamic)
00304       return m_zero;
00305     else
00306       return m_matrix->const_cast_derived().valuePtr()[p];
00307   }
00308 
00309   Scalar& coeffRef(Index row, Index col)
00310   {
00311     Index p = find(row,col);
00312     eigen_assert(p!=Dynamic && "written coefficient does not exist");
00313     return m_matrix->const_cast_derived().valuePtr()[p];
00314   }
00315 
00316 protected:
00317 
00318   Index find(Index row, Index col) const
00319   {
00320     eigen_internal_assert(row>=0 && row<m_matrix->rows() && col>=0 && col<m_matrix->cols());
00321 
00322     const Index outer = Derived::IsRowMajor ? row : col;
00323     const Index inner = Derived::IsRowMajor ? col : row;
00324 
00325     Index start = m_matrix->outerIndexPtr()[outer];
00326     Index end = m_matrix->isCompressed() ? m_matrix->outerIndexPtr()[outer+1] : m_matrix->outerIndexPtr()[outer] + m_matrix->innerNonZeroPtr()[outer];
00327     eigen_assert(end>=start && "you are using a non finalized sparse matrix or written coefficient does not exist");
00328     const Index p = std::lower_bound(m_matrix->innerIndexPtr()+start, m_matrix->innerIndexPtr()+end,inner) - m_matrix->innerIndexPtr();
00329 
00330     return ((p<end) && (m_matrix->innerIndexPtr()[p]==inner)) ? p : Dynamic;
00331   }
00332 
00333   const Derived *m_matrix;
00334   const Scalar m_zero;
00335 };
00336 
00337 }
00338 
00339 } // end namespace Eigen
00340 
00341 #endif // EIGEN_SPARSE_COMPRESSED_BASE_H
 All Classes Functions Variables Typedefs Enumerations Enumerator Friends