SparseVector.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // Eigen is free software; you can redistribute it and/or
7 // modify it under the terms of the GNU Lesser General Public
8 // License as published by the Free Software Foundation; either
9 // version 3 of the License, or (at your option) any later version.
10 //
11 // Alternatively, you can redistribute it and/or
12 // modify it under the terms of the GNU General Public License as
13 // published by the Free Software Foundation; either version 2 of
14 // the License, or (at your option) any later version.
15 //
16 // Eigen is distributed in the hope that it will be useful, but WITHOUT ANY
17 // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
18 // FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License or the
19 // GNU General Public License for more details.
20 //
21 // You should have received a copy of the GNU Lesser General Public
22 // License and a copy of the GNU General Public License along with
23 // Eigen. If not, see <http://www.gnu.org/licenses/>.
24 
25 #ifndef EIGEN_SPARSEVECTOR_H
26 #define EIGEN_SPARSEVECTOR_H
27 
28 namespace Eigen {
29 
43 namespace internal {
44 template<typename _Scalar, int _Options, typename _Index>
45 struct traits<SparseVector<_Scalar, _Options, _Index> >
46 {
47  typedef _Scalar Scalar;
48  typedef _Index Index;
49  typedef Sparse StorageKind;
50  typedef MatrixXpr XprKind;
51  enum {
52  IsColVector = (_Options & RowMajorBit) ? 0 : 1,
53 
54  RowsAtCompileTime = IsColVector ? Dynamic : 1,
55  ColsAtCompileTime = IsColVector ? 1 : Dynamic,
56  MaxRowsAtCompileTime = RowsAtCompileTime,
57  MaxColsAtCompileTime = ColsAtCompileTime,
58  Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit),
59  CoeffReadCost = NumTraits<Scalar>::ReadCost,
60  SupportedAccessPatterns = InnerRandomAccessPattern
61  };
62 };
63 }
64 
65 template<typename _Scalar, int _Options, typename _Index>
67  : public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
68 {
69  public:
73 
74  protected:
75  public:
76 
78  enum { IsColVector = internal::traits<SparseVector>::IsColVector };
79 
80  enum {
81  Options = _Options
82  };
83 
84  internal::CompressedStorage<Scalar,Index> m_data;
86 
87  internal::CompressedStorage<Scalar,Index>& _data() { return m_data; }
88  internal::CompressedStorage<Scalar,Index>& _data() const { return m_data; }
89 
90  public:
91 
92  EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; }
93  EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; }
95  EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
96 
97  EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); }
98  EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); }
99 
100  EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); }
102 
103  inline Scalar coeff(Index row, Index col) const
104  {
105  eigen_assert((IsColVector ? col : row)==0);
106  return coeff(IsColVector ? row : col);
107  }
108  inline Scalar coeff(Index i) const { return m_data.at(i); }
109 
111  {
112  eigen_assert((IsColVector ? col : row)==0);
113  return coeff(IsColVector ? row : col);
114  }
115 
122  inline Scalar& coeffRef(Index i)
123  {
124  return m_data.atWithInsertion(i);
125  }
126 
127  public:
128 
129  class InnerIterator;
130  class ReverseInnerIterator;
131 
132  inline void setZero() { m_data.clear(); }
133 
135  inline Index nonZeros() const { return static_cast<Index>(m_data.size()); }
136 
137  inline void startVec(Index outer)
138  {
139  EIGEN_UNUSED_VARIABLE(outer);
140  eigen_assert(outer==0);
141  }
142 
143  inline Scalar& insertBackByOuterInner(Index outer, Index inner)
144  {
145  EIGEN_UNUSED_VARIABLE(outer);
146  eigen_assert(outer==0);
147  return insertBack(inner);
148  }
150  {
151  m_data.append(0, i);
152  return m_data.value(m_data.size()-1);
153  }
154 
156  {
157  Index inner = IsColVector ? row : col;
158  Index outer = IsColVector ? col : row;
159  eigen_assert(outer==0);
160  return insert(inner);
161  }
163  {
164  Index startId = 0;
165  Index p = Index(m_data.size()) - 1;
166  // TODO smart realloc
167  m_data.resize(p+2,1);
168 
169  while ( (p >= startId) && (m_data.index(p) > i) )
170  {
171  m_data.index(p+1) = m_data.index(p);
172  m_data.value(p+1) = m_data.value(p);
173  --p;
174  }
175  m_data.index(p+1) = i;
176  m_data.value(p+1) = 0;
177  return m_data.value(p+1);
178  }
179 
182  inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
183 
184 
185  inline void finalize() {}
186 
187  void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
188  {
189  m_data.prune(reference,epsilon);
190  }
191 
193  {
194  eigen_assert(rows==1 || cols==1);
195  resize(IsColVector ? rows : cols);
196  }
197 
198  void resize(Index newSize)
199  {
200  m_size = newSize;
201  m_data.clear();
202  }
203 
204  void resizeNonZeros(Index size) { m_data.resize(size); }
205 
206  inline SparseVector() : m_size(0) { resize(0); }
207 
208  inline SparseVector(Index size) : m_size(0) { resize(size); }
209 
210  inline SparseVector(Index rows, Index cols) : m_size(0) { resize(rows,cols); }
211 
212  template<typename OtherDerived>
214  : m_size(0)
215  {
216  *this = other.derived();
217  }
218 
219  inline SparseVector(const SparseVector& other)
220  : m_size(0)
221  {
222  *this = other.derived();
223  }
224 
225  inline void swap(SparseVector& other)
226  {
227  std::swap(m_size, other.m_size);
228  m_data.swap(other.m_data);
229  }
230 
231  inline SparseVector& operator=(const SparseVector& other)
232  {
233  if (other.isRValue())
234  {
235  swap(other.const_cast_derived());
236  }
237  else
238  {
239  resize(other.size());
240  m_data = other.m_data;
241  }
242  return *this;
243  }
244 
245  template<typename OtherDerived>
247  {
248  if (int(RowsAtCompileTime)!=int(OtherDerived::RowsAtCompileTime))
249  return assign(other.transpose());
250  else
251  return assign(other);
252  }
253 
254  #ifndef EIGEN_PARSED_BY_DOXYGEN
255  template<typename Lhs, typename Rhs>
256  inline SparseVector& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
257  {
258  return Base::operator=(product);
259  }
260  #endif
261 
262  friend std::ostream & operator << (std::ostream & s, const SparseVector& m)
263  {
264  for (Index i=0; i<m.nonZeros(); ++i)
265  s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
266  s << std::endl;
267  return s;
268  }
269 
271  inline ~SparseVector() {}
272 
274  Scalar sum() const;
275 
276  public:
277 
280  {
281  setZero();
282  m_data.reserve(reserve);
283  }
284 
287  {
288  eigen_assert(r==0 || c==0);
289  return fill(IsColVector ? r : c);
290  }
291 
294  {
295  m_data.append(0, i);
296  return m_data.value(m_data.size()-1);
297  }
298 
301  {
302  eigen_assert(r==0 || c==0);
303  return fillrand(IsColVector ? r : c);
304  }
305 
308  {
309  return insert(i);
310  }
311 
314 
315 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
316 # include EIGEN_SPARSEVECTOR_PLUGIN
317 # endif
318 
319 protected:
320  template<typename OtherDerived>
322  {
323  const OtherDerived& other(_other.derived());
324  const bool needToTranspose = (Flags & RowMajorBit) != (OtherDerived::Flags & RowMajorBit);
325  if(needToTranspose)
326  {
327  Index size = other.size();
328  Index nnz = other.nonZeros();
329  resize(size);
330  reserve(nnz);
331  for(Index i=0; i<size; ++i)
332  {
333  typename OtherDerived::InnerIterator it(other, i);
334  if(it)
335  insert(i) = it.value();
336  }
337  return *this;
338  }
339  else
340  {
341  // there is no special optimization
342  return Base::operator=(other);
343  }
344  }
345 };
346 
347 template<typename Scalar, int _Options, typename _Index>
348 class SparseVector<Scalar,_Options,_Index>::InnerIterator
349 {
350  public:
351  InnerIterator(const SparseVector& vec, Index outer=0)
352  : m_data(vec.m_data), m_id(0), m_end(static_cast<Index>(m_data.size()))
353  {
354  EIGEN_UNUSED_VARIABLE(outer);
355  eigen_assert(outer==0);
356  }
357 
358  InnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
359  : m_data(data), m_id(0), m_end(static_cast<Index>(m_data.size()))
360  {}
361 
362  inline InnerIterator& operator++() { m_id++; return *this; }
363 
364  inline Scalar value() const { return m_data.value(m_id); }
365  inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id)); }
366 
367  inline Index index() const { return m_data.index(m_id); }
368  inline Index row() const { return IsColVector ? index() : 0; }
369  inline Index col() const { return IsColVector ? 0 : index(); }
370 
371  inline operator bool() const { return (m_id < m_end); }
372 
373  protected:
374  const internal::CompressedStorage<Scalar,Index>& m_data;
375  Index m_id;
376  const Index m_end;
377 };
378 
379 template<typename Scalar, int _Options, typename _Index>
380 class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
381 {
382  public:
383  ReverseInnerIterator(const SparseVector& vec, Index outer=0)
384  : m_data(vec.m_data), m_id(static_cast<Index>(m_data.size())), m_start(0)
385  {
386  EIGEN_UNUSED_VARIABLE(outer);
387  eigen_assert(outer==0);
388  }
389 
390  ReverseInnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
391  : m_data(data), m_id(static_cast<Index>(m_data.size())), m_start(0)
392  {}
393 
394  inline ReverseInnerIterator& operator--() { m_id--; return *this; }
395 
396  inline Scalar value() const { return m_data.value(m_id-1); }
397  inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id-1)); }
398 
399  inline Index index() const { return m_data.index(m_id-1); }
400  inline Index row() const { return IsColVector ? index() : 0; }
401  inline Index col() const { return IsColVector ? 0 : index(); }
402 
403  inline operator bool() const { return (m_id > m_start); }
404 
405  protected:
406  const internal::CompressedStorage<Scalar,Index>& m_data;
407  Index m_id;
408  const Index m_start;
409 };
410 
411 } // end namespace Eigen
412 
413 #endif // EIGEN_SPARSEVECTOR_H