10 #ifndef EIGEN_SPARSEVECTOR_H
11 #define EIGEN_SPARSEVECTOR_H
29 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
32 typedef _Scalar Scalar;
33 typedef _StorageIndex StorageIndex;
39 RowsAtCompileTime = IsColVector ?
Dynamic : 1,
40 ColsAtCompileTime = IsColVector ? 1 :
Dynamic,
41 MaxRowsAtCompileTime = RowsAtCompileTime,
42 MaxColsAtCompileTime = ColsAtCompileTime,
44 SupportedAccessPatterns = InnerRandomAccessPattern
55 template<
typename Dest,
typename Src,
56 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
57 : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
63 template<
typename _Scalar,
int _Options,
typename _StorageIndex>
68 using Base::convert_index;
71 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(
SparseVector, +=)
72 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(
SparseVector, -=)
81 EIGEN_STRONG_INLINE
Index rows()
const {
return IsColVector ? m_size : 1; }
82 EIGEN_STRONG_INLINE
Index cols()
const {
return IsColVector ? 1 : m_size; }
83 EIGEN_STRONG_INLINE
Index innerSize()
const {
return m_size; }
84 EIGEN_STRONG_INLINE
Index outerSize()
const {
return 1; }
86 EIGEN_STRONG_INLINE
const Scalar* valuePtr()
const {
return m_data.valuePtr(); }
87 EIGEN_STRONG_INLINE Scalar* valuePtr() {
return m_data.valuePtr(); }
89 EIGEN_STRONG_INLINE
const StorageIndex* innerIndexPtr()
const {
return m_data.indexPtr(); }
90 EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() {
return m_data.indexPtr(); }
92 inline const StorageIndex* outerIndexPtr()
const {
return 0; }
93 inline StorageIndex* outerIndexPtr() {
return 0; }
94 inline const StorageIndex* innerNonZeroPtr()
const {
return 0; }
95 inline StorageIndex* innerNonZeroPtr() {
return 0; }
98 inline Storage& data() {
return m_data; }
100 inline const Storage& data()
const {
return m_data; }
102 inline Scalar coeff(
Index row,
Index col)
const
104 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
105 return coeff(IsColVector ? row : col);
107 inline Scalar coeff(
Index i)
const
109 eigen_assert(i>=0 && i<m_size);
110 return m_data.
at(StorageIndex(i));
115 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
116 return coeffRef(IsColVector ? row : col);
127 eigen_assert(i>=0 && i<m_size);
135 typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
137 inline void setZero() { m_data.clear(); }
142 inline void startVec(
Index outer)
144 EIGEN_UNUSED_VARIABLE(outer);
145 eigen_assert(outer==0);
148 inline Scalar& insertBackByOuterInner(
Index outer,
Index inner)
150 EIGEN_UNUSED_VARIABLE(outer);
151 eigen_assert(outer==0);
152 return insertBack(inner);
154 inline Scalar& insertBack(
Index i)
157 return m_data.value(m_data.size()-1);
160 Scalar& insertBackByOuterInnerUnordered(
Index outer,
Index inner)
162 EIGEN_UNUSED_VARIABLE(outer);
163 eigen_assert(outer==0);
164 return insertBackUnordered(inner);
166 inline Scalar& insertBackUnordered(
Index i)
169 return m_data.value(m_data.size()-1);
174 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
176 Index inner = IsColVector ? row : col;
177 Index outer = IsColVector ? col : row;
178 EIGEN_ONLY_USED_FOR_DEBUG(outer);
179 eigen_assert(outer==0);
180 return insert(inner);
182 Scalar& insert(
Index i)
184 eigen_assert(i>=0 && i<m_size);
189 m_data.resize(p+2,1);
191 while ( (p >= startId) && (m_data.index(p) > i) )
193 m_data.index(p+1) = m_data.index(p);
194 m_data.value(p+1) = m_data.value(p);
197 m_data.index(p+1) = convert_index(i);
198 m_data.value(p+1) = 0;
199 return m_data.value(p+1);
204 inline void reserve(
Index reserveSize) { m_data.reserve(reserveSize); }
207 inline void finalize() {}
212 m_data.prune(reference,epsilon);
225 eigen_assert((IsColVector ? cols : rows)==1 &&
"Outer dimension must equal 1");
226 resize(IsColVector ? rows : cols);
248 if (newSize < m_size)
251 while (i<m_data.size() && m_data.index(i)<newSize) ++i;
257 void resizeNonZeros(
Index size) { m_data.resize(size); }
259 inline SparseVector() : m_size(0) { check_template_parameters();
resize(0); }
261 explicit inline SparseVector(
Index size) : m_size(0) { check_template_parameters();
resize(size); }
263 inline SparseVector(
Index rows,
Index cols) : m_size(0) { check_template_parameters();
resize(rows,cols); }
265 template<
typename OtherDerived>
266 inline SparseVector(
const SparseMatrixBase<OtherDerived>& other)
269 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
270 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
272 check_template_parameters();
273 *
this = other.derived();
276 inline SparseVector(
const SparseVector& other)
277 : Base(other), m_size(0)
279 check_template_parameters();
280 *
this = other.derived();
289 std::swap(m_size, other.m_size);
290 m_data.swap(other.m_data);
293 template<
int OtherOptions>
297 std::swap(m_size, other.m_innerSize);
298 m_data.swap(other.m_data);
301 inline SparseVector& operator=(
const SparseVector& other)
303 if (other.isRValue())
305 swap(other.const_cast_derived());
310 m_data = other.m_data;
315 template<
typename OtherDerived>
316 inline SparseVector& operator=(
const SparseMatrixBase<OtherDerived>& other)
318 SparseVector tmp(other.size());
319 internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
324 #ifndef EIGEN_PARSED_BY_DOXYGEN
325 template<
typename Lhs,
typename Rhs>
326 inline SparseVector& operator=(
const SparseSparseProduct<Lhs,Rhs>& product)
328 return Base::operator=(product);
332 friend std::ostream & operator << (std::ostream & s,
const SparseVector& m)
334 for (
Index i=0; i<m.nonZeros(); ++i)
335 s <<
"(" << m.m_data.value(i) <<
"," << m.m_data.index(i) <<
") ";
349 EIGEN_DEPRECATED
void startFill(
Index reserve)
352 m_data.reserve(reserve);
356 EIGEN_DEPRECATED Scalar& fill(
Index r,
Index c)
358 eigen_assert(r==0 || c==0);
359 return fill(IsColVector ? r : c);
363 EIGEN_DEPRECATED Scalar& fill(
Index i)
366 return m_data.value(m_data.size()-1);
370 EIGEN_DEPRECATED Scalar& fillrand(
Index r,
Index c)
372 eigen_assert(r==0 || c==0);
373 return fillrand(IsColVector ? r : c);
377 EIGEN_DEPRECATED Scalar& fillrand(
Index i)
383 EIGEN_DEPRECATED
void endFill() {}
387 EIGEN_DEPRECATED Storage& _data() {
return m_data; }
389 EIGEN_DEPRECATED
const Storage& _data()
const {
return m_data; }
391 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
392 # include EIGEN_SPARSEVECTOR_PLUGIN
397 static void check_template_parameters()
399 EIGEN_STATIC_ASSERT(NumTraits<StorageIndex>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
400 EIGEN_STATIC_ASSERT((_Options&(
ColMajor|
RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
409 template<
typename _Scalar,
int _Options,
typename _Index>
420 Flags = SparseVectorType::Flags
427 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
430 inline Index nonZerosEstimate()
const {
431 return m_matrix->nonZeros();
440 template<
typename Dest,
typename Src>
442 static void run(Dest& dst,
const Src& src) {
443 eigen_internal_assert(src.innerSize()==src.size());
445 SrcEvaluatorType srcEval(src);
446 for(
typename SrcEvaluatorType::InnerIterator it(srcEval, 0); it; ++it)
447 dst.insert(it.index()) = it.value();
451 template<
typename Dest,
typename Src>
453 static void run(Dest& dst,
const Src& src) {
454 eigen_internal_assert(src.outerSize()==src.size());
456 SrcEvaluatorType srcEval(src);
457 for(
Index i=0; i<src.size(); ++i)
459 typename SrcEvaluatorType::InnerIterator it(srcEval, i);
461 dst.insert(i) = it.value();
466 template<
typename Dest,
typename Src>
468 static void run(Dest& dst,
const Src& src) {
478 #endif // EIGEN_SPARSEVECTOR_H