00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025 #ifndef EIGEN_SPARSEMATRIX_H
00026 #define EIGEN_SPARSEMATRIX_H
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
00045
00046 namespace internal {
00047 template<typename _Scalar, int _Options, typename _Index>
00048 struct traits<SparseMatrix<_Scalar, _Options, _Index> >
00049 {
00050 typedef _Scalar Scalar;
00051 typedef _Index Index;
00052 typedef Sparse StorageKind;
00053 typedef MatrixXpr XprKind;
00054 enum {
00055 RowsAtCompileTime = Dynamic,
00056 ColsAtCompileTime = Dynamic,
00057 MaxRowsAtCompileTime = Dynamic,
00058 MaxColsAtCompileTime = Dynamic,
00059 Flags = _Options | NestByRefBit | LvalueBit,
00060 CoeffReadCost = NumTraits<Scalar>::ReadCost,
00061 SupportedAccessPatterns = InnerRandomAccessPattern
00062 };
00063 };
00064 }
00065
00066 template<typename _Scalar, int _Options, typename _Index>
00067 class SparseMatrix
00068 : public SparseMatrixBase<SparseMatrix<_Scalar, _Options, _Index> >
00069 {
00070 public:
00071 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseMatrix)
00072 using Base::operator=;
00073 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, +=)
00074 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, -=)
00075
00076
00077
00078
00079 typedef MappedSparseMatrix<Scalar,Flags> Map;
00080 using Base::IsRowMajor;
00081 typedef CompressedStorage<Scalar,Index> Storage;
00082 enum {
00083 Options = _Options
00084 };
00085
00086 protected:
00087
00088 typedef SparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix;
00089
00090 Index m_outerSize;
00091 Index m_innerSize;
00092 Index* m_outerIndex;
00093 CompressedStorage<Scalar,Index> m_data;
00094
00095 public:
00096
00097 inline Index rows() const { return IsRowMajor ? m_outerSize : m_innerSize; }
00098 inline Index cols() const { return IsRowMajor ? m_innerSize : m_outerSize; }
00099
00100 inline Index innerSize() const { return m_innerSize; }
00101 inline Index outerSize() const { return m_outerSize; }
00102 inline Index innerNonZeros(Index j) const { return m_outerIndex[j+1]-m_outerIndex[j]; }
00103
00104 inline const Scalar* _valuePtr() const { return &m_data.value(0); }
00105 inline Scalar* _valuePtr() { return &m_data.value(0); }
00106
00107 inline const Index* _innerIndexPtr() const { return &m_data.index(0); }
00108 inline Index* _innerIndexPtr() { return &m_data.index(0); }
00109
00110 inline const Index* _outerIndexPtr() const { return m_outerIndex; }
00111 inline Index* _outerIndexPtr() { return m_outerIndex; }
00112
00113 inline Storage& data() { return m_data; }
00114 inline const Storage& data() const { return m_data; }
00115
00116 inline Scalar coeff(Index row, Index col) const
00117 {
00118 const Index outer = IsRowMajor ? row : col;
00119 const Index inner = IsRowMajor ? col : row;
00120 return m_data.atInRange(m_outerIndex[outer], m_outerIndex[outer+1], inner);
00121 }
00122
00123 inline Scalar& coeffRef(Index row, Index col)
00124 {
00125 const Index outer = IsRowMajor ? row : col;
00126 const Index inner = IsRowMajor ? col : row;
00127
00128 Index start = m_outerIndex[outer];
00129 Index end = m_outerIndex[outer+1];
00130 eigen_assert(end>=start && "you probably called coeffRef on a non finalized matrix");
00131 eigen_assert(end>start && "coeffRef cannot be called on a zero coefficient");
00132 const Index p = m_data.searchLowerIndex(start,end-1,inner);
00133 eigen_assert((p<end) && (m_data.index(p)==inner) && "coeffRef cannot be called on a zero coefficient");
00134 return m_data.value(p);
00135 }
00136
00137 public:
00138
00139 class InnerIterator;
00140
00141
00142 inline void setZero()
00143 {
00144 m_data.clear();
00145 memset(m_outerIndex, 0, (m_outerSize+1)*sizeof(Index));
00146 }
00147
00148
00149 inline Index nonZeros() const { return static_cast<Index>(m_data.size()); }
00150
00151
00152 inline void reserve(Index reserveSize)
00153 {
00154 m_data.reserve(reserveSize);
00155 }
00156
00157
00158
00159
00160
00161
00162
00163
00164
00165
00166
00167
00168 inline Scalar& insertBack(Index row, Index col)
00169 {
00170 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
00171 }
00172
00173
00174 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
00175 {
00176 eigen_assert(size_t(m_outerIndex[outer+1]) == m_data.size() && "Invalid ordered insertion (invalid outer index)");
00177 eigen_assert( (m_outerIndex[outer+1]-m_outerIndex[outer]==0 || m_data.index(m_data.size()-1)<inner) && "Invalid ordered insertion (invalid inner index)");
00178 Index p = m_outerIndex[outer+1];
00179 ++m_outerIndex[outer+1];
00180 m_data.append(0, inner);
00181 return m_data.value(p);
00182 }
00183
00184
00185 inline Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner)
00186 {
00187 Index p = m_outerIndex[outer+1];
00188 ++m_outerIndex[outer+1];
00189 m_data.append(0, inner);
00190 return m_data.value(p);
00191 }
00192
00193
00194 inline void startVec(Index outer)
00195 {
00196 eigen_assert(m_outerIndex[outer]==int(m_data.size()) && "You must call startVec for each inner vector sequentially");
00197 eigen_assert(m_outerIndex[outer+1]==0 && "You must call startVec for each inner vector sequentially");
00198 m_outerIndex[outer+1] = m_outerIndex[outer];
00199 }
00200
00201
00202
00203
00204
00205
00206
00207
00208
00209
00210
00211 EIGEN_DONT_INLINE Scalar& insert(Index row, Index col)
00212 {
00213 const Index outer = IsRowMajor ? row : col;
00214 const Index inner = IsRowMajor ? col : row;
00215
00216 Index previousOuter = outer;
00217 if (m_outerIndex[outer+1]==0)
00218 {
00219
00220 while (previousOuter>=0 && m_outerIndex[previousOuter]==0)
00221 {
00222 m_outerIndex[previousOuter] = static_cast<Index>(m_data.size());
00223 --previousOuter;
00224 }
00225 m_outerIndex[outer+1] = m_outerIndex[outer];
00226 }
00227
00228
00229
00230
00231 bool isLastVec = (!(previousOuter==-1 && m_data.size()!=0))
00232 && (size_t(m_outerIndex[outer+1]) == m_data.size());
00233
00234 size_t startId = m_outerIndex[outer];
00235
00236 size_t p = m_outerIndex[outer+1];
00237 ++m_outerIndex[outer+1];
00238
00239 float reallocRatio = 1;
00240 if (m_data.allocatedSize()<=m_data.size())
00241 {
00242
00243 if (m_data.size()==0)
00244 {
00245 m_data.reserve(32);
00246 }
00247 else
00248 {
00249
00250
00251
00252 float nnzEstimate = float(m_outerIndex[outer])*float(m_outerSize)/float(outer+1);
00253 reallocRatio = (nnzEstimate-float(m_data.size()))/float(m_data.size());
00254
00255
00256
00257 reallocRatio = std::min(std::max(reallocRatio,1.5f),8.f);
00258 }
00259 }
00260 m_data.resize(m_data.size()+1,reallocRatio);
00261
00262 if (!isLastVec)
00263 {
00264 if (previousOuter==-1)
00265 {
00266
00267
00268 for (Index k=0; k<=(outer+1); ++k)
00269 m_outerIndex[k] = 0;
00270 Index k=outer+1;
00271 while(m_outerIndex[k]==0)
00272 m_outerIndex[k++] = 1;
00273 while (k<=m_outerSize && m_outerIndex[k]!=0)
00274 m_outerIndex[k++]++;
00275 p = 0;
00276 --k;
00277 k = m_outerIndex[k]-1;
00278 while (k>0)
00279 {
00280 m_data.index(k) = m_data.index(k-1);
00281 m_data.value(k) = m_data.value(k-1);
00282 k--;
00283 }
00284 }
00285 else
00286 {
00287
00288
00289 Index j = outer+2;
00290 while (j<=m_outerSize && m_outerIndex[j]!=0)
00291 m_outerIndex[j++]++;
00292 --j;
00293
00294 Index k = m_outerIndex[j]-1;
00295 while (k>=Index(p))
00296 {
00297 m_data.index(k) = m_data.index(k-1);
00298 m_data.value(k) = m_data.value(k-1);
00299 k--;
00300 }
00301 }
00302 }
00303
00304 while ( (p > startId) && (m_data.index(p-1) > inner) )
00305 {
00306 m_data.index(p) = m_data.index(p-1);
00307 m_data.value(p) = m_data.value(p-1);
00308 --p;
00309 }
00310
00311 m_data.index(p) = inner;
00312 return (m_data.value(p) = 0);
00313 }
00314
00315
00316
00317
00318
00319
00320 inline void finalize()
00321 {
00322 Index size = static_cast<Index>(m_data.size());
00323 Index i = m_outerSize;
00324
00325 while (i>=0 && m_outerIndex[i]==0)
00326 --i;
00327 ++i;
00328 while (i<=m_outerSize)
00329 {
00330 m_outerIndex[i] = size;
00331 ++i;
00332 }
00333 }
00334
00335
00336 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
00337 {
00338 prune(default_prunning_func(reference,epsilon));
00339 }
00340
00341
00342
00343
00344
00345
00346
00347
00348 template<typename KeepFunc>
00349 void prune(const KeepFunc& keep = KeepFunc())
00350 {
00351 Index k = 0;
00352 for(Index j=0; j<m_outerSize; ++j)
00353 {
00354 Index previousStart = m_outerIndex[j];
00355 m_outerIndex[j] = k;
00356 Index end = m_outerIndex[j+1];
00357 for(Index i=previousStart; i<end; ++i)
00358 {
00359 if(keep(IsRowMajor?j:m_data.index(i), IsRowMajor?m_data.index(i):j, m_data.value(i)))
00360 {
00361 m_data.value(k) = m_data.value(i);
00362 m_data.index(k) = m_data.index(i);
00363 ++k;
00364 }
00365 }
00366 }
00367 m_outerIndex[m_outerSize] = k;
00368 m_data.resize(k,0);
00369 }
00370
00371
00372
00373
00374 void resize(Index rows, Index cols)
00375 {
00376 const Index outerSize = IsRowMajor ? rows : cols;
00377 m_innerSize = IsRowMajor ? cols : rows;
00378 m_data.clear();
00379 if (m_outerSize != outerSize || m_outerSize==0)
00380 {
00381 delete[] m_outerIndex;
00382 m_outerIndex = new Index [outerSize+1];
00383 m_outerSize = outerSize;
00384 }
00385 memset(m_outerIndex, 0, (m_outerSize+1)*sizeof(Index));
00386 }
00387
00388
00389
00390 void resizeNonZeros(Index size)
00391 {
00392 m_data.resize(size);
00393 }
00394
00395
00396 inline SparseMatrix()
00397 : m_outerSize(-1), m_innerSize(0), m_outerIndex(0)
00398 {
00399 resize(0, 0);
00400 }
00401
00402
00403 inline SparseMatrix(Index rows, Index cols)
00404 : m_outerSize(0), m_innerSize(0), m_outerIndex(0)
00405 {
00406 resize(rows, cols);
00407 }
00408
00409
00410 template<typename OtherDerived>
00411 inline SparseMatrix(const SparseMatrixBase<OtherDerived>& other)
00412 : m_outerSize(0), m_innerSize(0), m_outerIndex(0)
00413 {
00414 *this = other.derived();
00415 }
00416
00417
00418 inline SparseMatrix(const SparseMatrix& other)
00419 : Base(), m_outerSize(0), m_innerSize(0), m_outerIndex(0)
00420 {
00421 *this = other.derived();
00422 }
00423
00424
00425 inline void swap(SparseMatrix& other)
00426 {
00427
00428 std::swap(m_outerIndex, other.m_outerIndex);
00429 std::swap(m_innerSize, other.m_innerSize);
00430 std::swap(m_outerSize, other.m_outerSize);
00431 m_data.swap(other.m_data);
00432 }
00433
00434 inline SparseMatrix& operator=(const SparseMatrix& other)
00435 {
00436
00437 if (other.isRValue())
00438 {
00439 swap(other.const_cast_derived());
00440 }
00441 else
00442 {
00443 resize(other.rows(), other.cols());
00444 memcpy(m_outerIndex, other.m_outerIndex, (m_outerSize+1)*sizeof(Index));
00445 m_data = other.m_data;
00446 }
00447 return *this;
00448 }
00449
00450 #ifndef EIGEN_PARSED_BY_DOXYGEN
00451 template<typename Lhs, typename Rhs>
00452 inline SparseMatrix& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
00453 {
00454 return Base::operator=(product);
00455 }
00456
00457 template<typename OtherDerived>
00458 EIGEN_STRONG_INLINE SparseMatrix& operator=(const ReturnByValue<OtherDerived>& func)
00459 {
00460 return Base::operator=(func);
00461 }
00462 #endif
00463
00464 template<typename OtherDerived>
00465 EIGEN_DONT_INLINE SparseMatrix& operator=(const SparseMatrixBase<OtherDerived>& other)
00466 {
00467 const bool needToTranspose = (Flags & RowMajorBit) != (OtherDerived::Flags & RowMajorBit);
00468 if (needToTranspose)
00469 {
00470
00471
00472
00473
00474 typedef typename internal::nested<OtherDerived,2>::type OtherCopy;
00475 typedef typename internal::remove_all<OtherCopy>::type _OtherCopy;
00476 OtherCopy otherCopy(other.derived());
00477
00478 resize(other.rows(), other.cols());
00479 Eigen::Map<Matrix<Index, Dynamic, 1> > (m_outerIndex,outerSize()).setZero();
00480
00481
00482 for (Index j=0; j<otherCopy.outerSize(); ++j)
00483 for (typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
00484 ++m_outerIndex[it.index()];
00485
00486
00487 Index count = 0;
00488 VectorXi positions(outerSize());
00489 for (Index j=0; j<outerSize(); ++j)
00490 {
00491 Index tmp = m_outerIndex[j];
00492 m_outerIndex[j] = count;
00493 positions[j] = count;
00494 count += tmp;
00495 }
00496 m_outerIndex[outerSize()] = count;
00497
00498 m_data.resize(count);
00499
00500 for (Index j=0; j<otherCopy.outerSize(); ++j)
00501 {
00502 for (typename _OtherCopy::InnerIterator it(otherCopy, j); it; ++it)
00503 {
00504 Index pos = positions[it.index()]++;
00505 m_data.index(pos) = j;
00506 m_data.value(pos) = it.value();
00507 }
00508 }
00509 return *this;
00510 }
00511 else
00512 {
00513
00514 return SparseMatrixBase<SparseMatrix>::operator=(other.derived());
00515 }
00516 }
00517
00518 friend std::ostream & operator << (std::ostream & s, const SparseMatrix& m)
00519 {
00520 EIGEN_DBG_SPARSE(
00521 s << "Nonzero entries:\n";
00522 for (Index i=0; i<m.nonZeros(); ++i)
00523 {
00524 s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
00525 }
00526 s << std::endl;
00527 s << std::endl;
00528 s << "Column pointers:\n";
00529 for (Index i=0; i<m.outerSize(); ++i)
00530 {
00531 s << m.m_outerIndex[i] << " ";
00532 }
00533 s << " $" << std::endl;
00534 s << std::endl;
00535 );
00536 s << static_cast<const SparseMatrixBase<SparseMatrix>&>(m);
00537 return s;
00538 }
00539
00540
00541 inline ~SparseMatrix()
00542 {
00543 delete[] m_outerIndex;
00544 }
00545
00546
00547 Scalar sum() const;
00548
00549 public:
00550
00551
00552
00553
00554
00555
00556 EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
00557 {
00558 setZero();
00559 m_data.reserve(reserveSize);
00560 }
00561
00562
00563
00564
00565 EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
00566 {
00567 return insert(row,col);
00568 }
00569
00570
00571
00572 EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
00573 {
00574 const Index outer = IsRowMajor ? row : col;
00575 const Index inner = IsRowMajor ? col : row;
00576
00577 if (m_outerIndex[outer+1]==0)
00578 {
00579
00580 Index i = outer;
00581 while (i>=0 && m_outerIndex[i]==0)
00582 {
00583 m_outerIndex[i] = m_data.size();
00584 --i;
00585 }
00586 m_outerIndex[outer+1] = m_outerIndex[outer];
00587 }
00588 else
00589 {
00590 eigen_assert(m_data.index(m_data.size()-1)<inner && "wrong sorted insertion");
00591 }
00592
00593 assert(size_t(m_outerIndex[outer+1]) == m_data.size());
00594 Index p = m_outerIndex[outer+1];
00595 ++m_outerIndex[outer+1];
00596
00597 m_data.append(0, inner);
00598 return m_data.value(p);
00599 }
00600
00601
00602 EIGEN_DEPRECATED void endFill() { finalize(); }
00603
00604 private:
00605 struct default_prunning_func {
00606 default_prunning_func(Scalar ref, RealScalar eps) : reference(ref), epsilon(eps) {}
00607 inline bool operator() (const Index&, const Index&, const Scalar& value) const
00608 {
00609 return !internal::isMuchSmallerThan(value, reference, epsilon);
00610 }
00611 Scalar reference;
00612 RealScalar epsilon;
00613 };
00614 };
00615
00616 template<typename Scalar, int _Options, typename _Index>
00617 class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
00618 {
00619 public:
00620 InnerIterator(const SparseMatrix& mat, Index outer)
00621 : m_values(mat._valuePtr()), m_indices(mat._innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer]), m_end(mat.m_outerIndex[outer+1])
00622 {}
00623
00624 inline InnerIterator& operator++() { m_id++; return *this; }
00625
00626 inline const Scalar& value() const { return m_values[m_id]; }
00627 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
00628
00629 inline Index index() const { return m_indices[m_id]; }
00630 inline Index outer() const { return m_outer; }
00631 inline Index row() const { return IsRowMajor ? m_outer : index(); }
00632 inline Index col() const { return IsRowMajor ? index() : m_outer; }
00633
00634 inline operator bool() const { return (m_id < m_end); }
00635
00636 protected:
00637 const Scalar* m_values;
00638 const Index* m_indices;
00639 const Index m_outer;
00640 Index m_id;
00641 const Index m_end;
00642 };
00643
00644 #endif // EIGEN_SPARSEMATRIX_H