11 #ifndef EIGEN_INCOMPLETE_LUT_H
12 #define EIGEN_INCOMPLETE_LUT_H
28 template <
typename VectorV,
typename VectorI>
29 Index QuickSplit(VectorV &row, VectorI &ind,
Index ncut)
31 typedef typename VectorV::RealScalar RealScalar;
41 if (ncut < first || ncut > last )
return 0;
45 RealScalar abskey = abs(row(mid));
46 for (
Index j = first + 1; j <= last; j++) {
47 if ( abs(row(j)) > abskey) {
49 swap(row(mid), row(j));
50 swap(ind(mid), ind(j));
54 swap(row(mid), row(first));
55 swap(ind(mid), ind(first));
57 if (mid > ncut) last = mid - 1;
58 else if (mid < ncut ) first = mid + 1;
59 }
while (mid != ncut );
98 template <
typename _Scalar,
typename _StorageIndex =
int>
103 using Base::m_isInitialized;
105 typedef _Scalar Scalar;
106 typedef _StorageIndex StorageIndex;
121 m_analysisIsOk(
false), m_factorizationIsOk(
false)
124 template<
typename MatrixType>
126 : m_droptol(droptol),m_fillfactor(fillfactor),
127 m_analysisIsOk(
false),m_factorizationIsOk(
false)
129 eigen_assert(fillfactor != 0);
133 Index rows()
const {
return m_lu.
rows(); }
135 Index cols()
const {
return m_lu.
cols(); }
144 eigen_assert(m_isInitialized &&
"IncompleteLUT is not initialized.");
148 template<
typename MatrixType>
149 void analyzePattern(
const MatrixType& amat);
151 template<
typename MatrixType>
152 void factorize(
const MatrixType& amat);
159 template<
typename MatrixType>
162 analyzePattern(amat);
170 template<
typename Rhs,
typename Dest>
171 void _solve_impl(
const Rhs& b, Dest& x)
const
174 x = m_lu.template triangularView<UnitLower>().solve(x);
175 x = m_lu.template triangularView<Upper>().solve(x);
183 inline bool operator() (
const Index& row,
const Index& col,
const Scalar&)
const
192 RealScalar m_droptol;
195 bool m_factorizationIsOk;
205 template<
typename Scalar,
typename StorageIndex>
208 this->m_droptol = droptol;
215 template<
typename Scalar,
typename StorageIndex>
218 this->m_fillfactor = fillfactor;
221 template <
typename Scalar,
typename StorageIndex>
222 template<
typename _MatrixType>
236 m_Pinv = m_P.inverse();
237 m_analysisIsOk =
true;
238 m_factorizationIsOk =
false;
239 m_isInitialized =
true;
242 template <
typename Scalar,
typename StorageIndex>
243 template<
typename _MatrixType>
244 void IncompleteLUT<Scalar,StorageIndex>::factorize(
const _MatrixType& amat)
249 using internal::convert_index;
251 eigen_assert((amat.rows() == amat.cols()) &&
"The factorization should be done on a square matrix");
252 Index n = amat.cols();
260 eigen_assert(m_analysisIsOk &&
"You must first call analyzePattern()");
261 SparseMatrix<Scalar,RowMajor, StorageIndex> mat;
262 mat = amat.twistedBy(m_Pinv);
270 Index fill_in = (amat.nonZeros()*m_fillfactor)/n + 1;
271 if (fill_in > n) fill_in = n;
274 Index nnzL = fill_in/2;
276 m_lu.reserve(n * (nnzL + nnzU + 1));
279 for (
Index ii = 0; ii < n; ii++)
285 ju(ii) = convert_index<StorageIndex>(ii);
287 jr(ii) = convert_index<StorageIndex>(ii);
288 RealScalar rownorm = 0;
290 typename FactorType::InnerIterator j_it(mat, ii);
293 Index k = j_it.index();
297 ju(sizel) = convert_index<StorageIndex>(k);
298 u(sizel) = j_it.value();
299 jr(k) = convert_index<StorageIndex>(sizel);
304 u(ii) = j_it.value();
309 Index jpos = ii + sizeu;
310 ju(jpos) = convert_index<StorageIndex>(k);
311 u(jpos) = j_it.value();
312 jr(k) = convert_index<StorageIndex>(jpos);
315 rownorm += numext::abs2(j_it.value());
325 rownorm = sqrt(rownorm);
335 Index minrow = ju.segment(jj,sizel-jj).minCoeff(&k);
337 if (minrow != ju(jj))
342 jr(minrow) = convert_index<StorageIndex>(jj);
343 jr(j) = convert_index<StorageIndex>(k);
350 typename FactorType::InnerIterator ki_it(m_lu, minrow);
351 while (ki_it && ki_it.index() < minrow) ++ki_it;
352 eigen_internal_assert(ki_it && ki_it.col()==minrow);
353 Scalar fact = u(jj) / ki_it.value();
356 if(abs(fact) <= m_droptol)
364 for (; ki_it; ++ki_it)
366 Scalar prod = fact * ki_it.value();
367 Index j = ki_it.index();
376 eigen_internal_assert(sizeu<=n);
382 eigen_internal_assert(sizel<=ii);
384 ju(newpos) = convert_index<StorageIndex>(j);
386 jr(j) = convert_index<StorageIndex>(newpos);
393 ju(len) = convert_index<StorageIndex>(minrow);
400 for(
Index k = 0; k <sizeu; k++) jr(ju(ii+k)) = -1;
406 len = (std::min)(sizel, nnzL);
407 typename Vector::SegmentReturnType ul(u.segment(0, sizel));
408 typename VectorI::SegmentReturnType jul(ju.segment(0, sizel));
409 internal::QuickSplit(ul, jul, len);
413 for(
Index k = 0; k < len; k++)
414 m_lu.insertBackByOuterInnerUnordered(ii,ju(k)) = u(k);
418 if (u(ii) == Scalar(0))
419 u(ii) = sqrt(m_droptol) * rownorm;
420 m_lu.insertBackByOuterInnerUnordered(ii, ii) = u(ii);
425 for(
Index k = 1; k < sizeu; k++)
427 if(abs(u(ii+k)) > m_droptol * rownorm )
430 u(ii + len) = u(ii + k);
431 ju(ii + len) = ju(ii + k);
435 len = (std::min)(sizeu, nnzU);
436 typename Vector::SegmentReturnType uu(u.segment(ii+1, sizeu-1));
437 typename VectorI::SegmentReturnType juu(ju.segment(ii+1, sizeu-1));
438 internal::QuickSplit(uu, juu, len);
441 for(
Index k = ii + 1; k < ii + len; k++)
442 m_lu.insertBackByOuterInnerUnordered(ii,ju(k)) = u(k);
445 m_lu.makeCompressed();
447 m_factorizationIsOk =
true;
453 #endif // EIGEN_INCOMPLETE_LUT_H