CompressedStorage.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // Eigen is free software; you can redistribute it and/or
7 // modify it under the terms of the GNU Lesser General Public
8 // License as published by the Free Software Foundation; either
9 // version 3 of the License, or (at your option) any later version.
10 //
11 // Alternatively, you can redistribute it and/or
12 // modify it under the terms of the GNU General Public License as
13 // published by the Free Software Foundation; either version 2 of
14 // the License, or (at your option) any later version.
15 //
16 // Eigen is distributed in the hope that it will be useful, but WITHOUT ANY
17 // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
18 // FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License or the
19 // GNU General Public License for more details.
20 //
21 // You should have received a copy of the GNU Lesser General Public
22 // License and a copy of the GNU General Public License along with
23 // Eigen. If not, see <http://www.gnu.org/licenses/>.
24 
25 #ifndef EIGEN_COMPRESSED_STORAGE_H
26 #define EIGEN_COMPRESSED_STORAGE_H
27 
28 namespace Eigen {
29 
30 namespace internal {
31 
36 template<typename _Scalar,typename _Index>
37 class CompressedStorage
38 {
39  public:
40 
41  typedef _Scalar Scalar;
42  typedef _Index Index;
43 
44  protected:
45 
46  typedef typename NumTraits<Scalar>::Real RealScalar;
47 
48  public:
49 
50  CompressedStorage()
51  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
52  {}
53 
54  CompressedStorage(size_t size)
55  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
56  {
57  resize(size);
58  }
59 
60  CompressedStorage(const CompressedStorage& other)
61  : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
62  {
63  *this = other;
64  }
65 
66  CompressedStorage& operator=(const CompressedStorage& other)
67  {
68  resize(other.size());
69  memcpy(m_values, other.m_values, m_size * sizeof(Scalar));
70  memcpy(m_indices, other.m_indices, m_size * sizeof(Index));
71  return *this;
72  }
73 
74  void swap(CompressedStorage& other)
75  {
76  std::swap(m_values, other.m_values);
77  std::swap(m_indices, other.m_indices);
78  std::swap(m_size, other.m_size);
79  std::swap(m_allocatedSize, other.m_allocatedSize);
80  }
81 
82  ~CompressedStorage()
83  {
84  delete[] m_values;
85  delete[] m_indices;
86  }
87 
88  void reserve(size_t size)
89  {
90  size_t newAllocatedSize = m_size + size;
91  if (newAllocatedSize > m_allocatedSize)
92  reallocate(newAllocatedSize);
93  }
94 
95  void squeeze()
96  {
97  if (m_allocatedSize>m_size)
98  reallocate(m_size);
99  }
100 
101  void resize(size_t size, float reserveSizeFactor = 0)
102  {
103  if (m_allocatedSize<size)
104  reallocate(size + size_t(reserveSizeFactor*size));
105  m_size = size;
106  }
107 
108  void append(const Scalar& v, Index i)
109  {
110  Index id = static_cast<Index>(m_size);
111  resize(m_size+1, 1);
112  m_values[id] = v;
113  m_indices[id] = i;
114  }
115 
116  inline size_t size() const { return m_size; }
117  inline size_t allocatedSize() const { return m_allocatedSize; }
118  inline void clear() { m_size = 0; }
119 
120  inline Scalar& value(size_t i) { return m_values[i]; }
121  inline const Scalar& value(size_t i) const { return m_values[i]; }
122 
123  inline Index& index(size_t i) { return m_indices[i]; }
124  inline const Index& index(size_t i) const { return m_indices[i]; }
125 
126  static CompressedStorage Map(Index* indices, Scalar* values, size_t size)
127  {
128  CompressedStorage res;
129  res.m_indices = indices;
130  res.m_values = values;
131  res.m_allocatedSize = res.m_size = size;
132  return res;
133  }
134 
136  inline Index searchLowerIndex(Index key) const
137  {
138  return searchLowerIndex(0, m_size, key);
139  }
140 
142  inline Index searchLowerIndex(size_t start, size_t end, Index key) const
143  {
144  while(end>start)
145  {
146  size_t mid = (end+start)>>1;
147  if (m_indices[mid]<key)
148  start = mid+1;
149  else
150  end = mid;
151  }
152  return static_cast<Index>(start);
153  }
154 
157  inline Scalar at(Index key, Scalar defaultValue = Scalar(0)) const
158  {
159  if (m_size==0)
160  return defaultValue;
161  else if (key==m_indices[m_size-1])
162  return m_values[m_size-1];
163  // ^^ optimization: let's first check if it is the last coefficient
164  // (very common in high level algorithms)
165  const size_t id = searchLowerIndex(0,m_size-1,key);
166  return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
167  }
168 
170  inline Scalar atInRange(size_t start, size_t end, Index key, Scalar defaultValue = Scalar(0)) const
171  {
172  if (start>=end)
173  return Scalar(0);
174  else if (end>start && key==m_indices[end-1])
175  return m_values[end-1];
176  // ^^ optimization: let's first check if it is the last coefficient
177  // (very common in high level algorithms)
178  const size_t id = searchLowerIndex(start,end-1,key);
179  return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
180  }
181 
185  inline Scalar& atWithInsertion(Index key, Scalar defaultValue = Scalar(0))
186  {
187  size_t id = searchLowerIndex(0,m_size,key);
188  if (id>=m_size || m_indices[id]!=key)
189  {
190  resize(m_size+1,1);
191  for (size_t j=m_size-1; j>id; --j)
192  {
193  m_indices[j] = m_indices[j-1];
194  m_values[j] = m_values[j-1];
195  }
196  m_indices[id] = key;
197  m_values[id] = defaultValue;
198  }
199  return m_values[id];
200  }
201 
202  void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
203  {
204  size_t k = 0;
205  size_t n = size();
206  for (size_t i=0; i<n; ++i)
207  {
208  if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
209  {
210  value(k) = value(i);
211  index(k) = index(i);
212  ++k;
213  }
214  }
215  resize(k,0);
216  }
217 
218  protected:
219 
220  inline void reallocate(size_t size)
221  {
222  Scalar* newValues = new Scalar[size];
223  Index* newIndices = new Index[size];
224  size_t copySize = (std::min)(size, m_size);
225  // copy
226  internal::smart_copy(m_values, m_values+copySize, newValues);
227  internal::smart_copy(m_indices, m_indices+copySize, newIndices);
228  // delete old stuff
229  delete[] m_values;
230  delete[] m_indices;
231  m_values = newValues;
232  m_indices = newIndices;
233  m_allocatedSize = size;
234  }
235 
236  protected:
237  Scalar* m_values;
238  Index* m_indices;
239  size_t m_size;
240  size_t m_allocatedSize;
241 
242 };
243 
244 } // end namespace internal
245 
246 } // end namespace Eigen
247 
248 #endif // EIGEN_COMPRESSED_STORAGE_H