00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025 #ifndef EIGEN_SUPERLUSUPPORT_H
00026 #define EIGEN_SUPERLUSUPPORT_H
00027
00028 #define DECL_GSSVX(PREFIX,FLOATTYPE,KEYTYPE) \
00029 extern "C" { \
00030 typedef struct { FLOATTYPE for_lu; FLOATTYPE total_needed; int expansions; } PREFIX##mem_usage_t; \
00031 extern void PREFIX##gssvx(superlu_options_t *, SuperMatrix *, int *, int *, int *, \
00032 char *, FLOATTYPE *, FLOATTYPE *, SuperMatrix *, SuperMatrix *, \
00033 void *, int, SuperMatrix *, SuperMatrix *, \
00034 FLOATTYPE *, FLOATTYPE *, FLOATTYPE *, FLOATTYPE *, \
00035 PREFIX##mem_usage_t *, SuperLUStat_t *, int *); \
00036 } \
00037 inline float SuperLU_gssvx(superlu_options_t *options, SuperMatrix *A, \
00038 int *perm_c, int *perm_r, int *etree, char *equed, \
00039 FLOATTYPE *R, FLOATTYPE *C, SuperMatrix *L, \
00040 SuperMatrix *U, void *work, int lwork, \
00041 SuperMatrix *B, SuperMatrix *X, \
00042 FLOATTYPE *recip_pivot_growth, \
00043 FLOATTYPE *rcond, FLOATTYPE *ferr, FLOATTYPE *berr, \
00044 SuperLUStat_t *stats, int *info, KEYTYPE) { \
00045 PREFIX##mem_usage_t mem_usage; \
00046 PREFIX##gssvx(options, A, perm_c, perm_r, etree, equed, R, C, L, \
00047 U, work, lwork, B, X, recip_pivot_growth, rcond, \
00048 ferr, berr, &mem_usage, stats, info); \
00049 return mem_usage.for_lu; \
00050 }
00051
00052 DECL_GSSVX(s,float,float)
00053 DECL_GSSVX(c,float,std::complex<float>)
00054 DECL_GSSVX(d,double,double)
00055 DECL_GSSVX(z,double,std::complex<double>)
00056
00057 #ifdef MILU_ALPHA
00058 #define EIGEN_SUPERLU_HAS_ILU
00059 #endif
00060
00061 #ifdef EIGEN_SUPERLU_HAS_ILU
00062
00063
00064 #define DECL_GSISX(PREFIX,FLOATTYPE,KEYTYPE) \
00065 extern "C" { \
00066 extern void PREFIX##gsisx(superlu_options_t *, SuperMatrix *, int *, int *, int *, \
00067 char *, FLOATTYPE *, FLOATTYPE *, SuperMatrix *, SuperMatrix *, \
00068 void *, int, SuperMatrix *, SuperMatrix *, FLOATTYPE *, FLOATTYPE *, \
00069 PREFIX##mem_usage_t *, SuperLUStat_t *, int *); \
00070 } \
00071 inline float SuperLU_gsisx(superlu_options_t *options, SuperMatrix *A, \
00072 int *perm_c, int *perm_r, int *etree, char *equed, \
00073 FLOATTYPE *R, FLOATTYPE *C, SuperMatrix *L, \
00074 SuperMatrix *U, void *work, int lwork, \
00075 SuperMatrix *B, SuperMatrix *X, \
00076 FLOATTYPE *recip_pivot_growth, \
00077 FLOATTYPE *rcond, \
00078 SuperLUStat_t *stats, int *info, KEYTYPE) { \
00079 PREFIX##mem_usage_t mem_usage; \
00080 PREFIX##gsisx(options, A, perm_c, perm_r, etree, equed, R, C, L, \
00081 U, work, lwork, B, X, recip_pivot_growth, rcond, \
00082 &mem_usage, stats, info); \
00083 return mem_usage.for_lu; \
00084 }
00085
00086 DECL_GSISX(s,float,float)
00087 DECL_GSISX(c,float,std::complex<float>)
00088 DECL_GSISX(d,double,double)
00089 DECL_GSISX(z,double,std::complex<double>)
00090
00091 #endif
00092
00093 template<typename MatrixType>
00094 struct SluMatrixMapHelper;
00095
00103 struct SluMatrix : SuperMatrix
00104 {
00105 SluMatrix()
00106 {
00107 Store = &storage;
00108 }
00109
00110 SluMatrix(const SluMatrix& other)
00111 : SuperMatrix(other)
00112 {
00113 Store = &storage;
00114 storage = other.storage;
00115 }
00116
00117 SluMatrix& operator=(const SluMatrix& other)
00118 {
00119 SuperMatrix::operator=(static_cast<const SuperMatrix&>(other));
00120 Store = &storage;
00121 storage = other.storage;
00122 return *this;
00123 }
00124
00125 struct
00126 {
00127 union {int nnz;int lda;};
00128 void *values;
00129 int *innerInd;
00130 int *outerInd;
00131 } storage;
00132
00133 void setStorageType(Stype_t t)
00134 {
00135 Stype = t;
00136 if (t==SLU_NC || t==SLU_NR || t==SLU_DN)
00137 Store = &storage;
00138 else
00139 {
00140 eigen_assert(false && "storage type not supported");
00141 Store = 0;
00142 }
00143 }
00144
00145 template<typename Scalar>
00146 void setScalarType()
00147 {
00148 if (internal::is_same<Scalar,float>::value)
00149 Dtype = SLU_S;
00150 else if (internal::is_same<Scalar,double>::value)
00151 Dtype = SLU_D;
00152 else if (internal::is_same<Scalar,std::complex<float> >::value)
00153 Dtype = SLU_C;
00154 else if (internal::is_same<Scalar,std::complex<double> >::value)
00155 Dtype = SLU_Z;
00156 else
00157 {
00158 eigen_assert(false && "Scalar type not supported by SuperLU");
00159 }
00160 }
00161
00162 template<typename Scalar, int Rows, int Cols, int Options, int MRows, int MCols>
00163 static SluMatrix Map(Matrix<Scalar,Rows,Cols,Options,MRows,MCols>& mat)
00164 {
00165 typedef Matrix<Scalar,Rows,Cols,Options,MRows,MCols> MatrixType;
00166 eigen_assert( ((Options&RowMajor)!=RowMajor) && "row-major dense matrices is not supported by SuperLU");
00167 SluMatrix res;
00168 res.setStorageType(SLU_DN);
00169 res.setScalarType<Scalar>();
00170 res.Mtype = SLU_GE;
00171
00172 res.nrow = mat.rows();
00173 res.ncol = mat.cols();
00174
00175 res.storage.lda = MatrixType::IsVectorAtCompileTime ? mat.size() : mat.outerStride();
00176 res.storage.values = mat.data();
00177 return res;
00178 }
00179
00180 template<typename MatrixType>
00181 static SluMatrix Map(SparseMatrixBase<MatrixType>& mat)
00182 {
00183 SluMatrix res;
00184 if ((MatrixType::Flags&RowMajorBit)==RowMajorBit)
00185 {
00186 res.setStorageType(SLU_NR);
00187 res.nrow = mat.cols();
00188 res.ncol = mat.rows();
00189 }
00190 else
00191 {
00192 res.setStorageType(SLU_NC);
00193 res.nrow = mat.rows();
00194 res.ncol = mat.cols();
00195 }
00196
00197 res.Mtype = SLU_GE;
00198
00199 res.storage.nnz = mat.nonZeros();
00200 res.storage.values = mat.derived()._valuePtr();
00201 res.storage.innerInd = mat.derived()._innerIndexPtr();
00202 res.storage.outerInd = mat.derived()._outerIndexPtr();
00203
00204 res.setScalarType<typename MatrixType::Scalar>();
00205
00206
00207 if (MatrixType::Flags & Upper)
00208 res.Mtype = SLU_TRU;
00209 if (MatrixType::Flags & Lower)
00210 res.Mtype = SLU_TRL;
00211 if (MatrixType::Flags & SelfAdjoint)
00212 eigen_assert(false && "SelfAdjoint matrix shape not supported by SuperLU");
00213 return res;
00214 }
00215 };
00216
00217 template<typename Scalar, int Rows, int Cols, int Options, int MRows, int MCols>
00218 struct SluMatrixMapHelper<Matrix<Scalar,Rows,Cols,Options,MRows,MCols> >
00219 {
00220 typedef Matrix<Scalar,Rows,Cols,Options,MRows,MCols> MatrixType;
00221 static void run(MatrixType& mat, SluMatrix& res)
00222 {
00223 eigen_assert( ((Options&RowMajor)!=RowMajor) && "row-major dense matrices is not supported by SuperLU");
00224 res.setStorageType(SLU_DN);
00225 res.setScalarType<Scalar>();
00226 res.Mtype = SLU_GE;
00227
00228 res.nrow = mat.rows();
00229 res.ncol = mat.cols();
00230
00231 res.storage.lda = mat.outerStride();
00232 res.storage.values = mat.data();
00233 }
00234 };
00235
00236 template<typename Derived>
00237 struct SluMatrixMapHelper<SparseMatrixBase<Derived> >
00238 {
00239 typedef Derived MatrixType;
00240 static void run(MatrixType& mat, SluMatrix& res)
00241 {
00242 if ((MatrixType::Flags&RowMajorBit)==RowMajorBit)
00243 {
00244 res.setStorageType(SLU_NR);
00245 res.nrow = mat.cols();
00246 res.ncol = mat.rows();
00247 }
00248 else
00249 {
00250 res.setStorageType(SLU_NC);
00251 res.nrow = mat.rows();
00252 res.ncol = mat.cols();
00253 }
00254
00255 res.Mtype = SLU_GE;
00256
00257 res.storage.nnz = mat.nonZeros();
00258 res.storage.values = mat._valuePtr();
00259 res.storage.innerInd = mat._innerIndexPtr();
00260 res.storage.outerInd = mat._outerIndexPtr();
00261
00262 res.setScalarType<typename MatrixType::Scalar>();
00263
00264
00265 if (MatrixType::Flags & Upper)
00266 res.Mtype = SLU_TRU;
00267 if (MatrixType::Flags & Lower)
00268 res.Mtype = SLU_TRL;
00269 if (MatrixType::Flags & SelfAdjoint)
00270 eigen_assert(false && "SelfAdjoint matrix shape not supported by SuperLU");
00271 }
00272 };
00273
00274 namespace internal {
00275
00276 template<typename MatrixType>
00277 SluMatrix asSluMatrix(MatrixType& mat)
00278 {
00279 return SluMatrix::Map(mat);
00280 }
00281
00283 template<typename Scalar, int Flags, typename Index>
00284 MappedSparseMatrix<Scalar,Flags,Index> map_superlu(SluMatrix& sluMat)
00285 {
00286 eigen_assert((Flags&RowMajor)==RowMajor && sluMat.Stype == SLU_NR
00287 || (Flags&ColMajor)==ColMajor && sluMat.Stype == SLU_NC);
00288
00289 Index outerSize = (Flags&RowMajor)==RowMajor ? sluMat.ncol : sluMat.nrow;
00290
00291 return MappedSparseMatrix<Scalar,Flags,Index>(
00292 sluMat.nrow, sluMat.ncol, sluMat.storage.outerInd[outerSize],
00293 sluMat.storage.outerInd, sluMat.storage.innerInd, reinterpret_cast<Scalar*>(sluMat.storage.values) );
00294 }
00295
00296 }
00297
00298 template<typename MatrixType>
00299 class SparseLU<MatrixType,SuperLU> : public SparseLU<MatrixType>
00300 {
00301 protected:
00302 typedef SparseLU<MatrixType> Base;
00303 typedef typename Base::Scalar Scalar;
00304 typedef typename Base::RealScalar RealScalar;
00305 typedef Matrix<Scalar,Dynamic,1> Vector;
00306 typedef Matrix<int, 1, MatrixType::ColsAtCompileTime> IntRowVectorType;
00307 typedef Matrix<int, MatrixType::RowsAtCompileTime, 1> IntColVectorType;
00308 typedef SparseMatrix<Scalar,Lower|UnitDiag> LMatrixType;
00309 typedef SparseMatrix<Scalar,Upper> UMatrixType;
00310 using Base::m_flags;
00311 using Base::m_status;
00312
00313 public:
00314
00315 SparseLU(int flags = NaturalOrdering)
00316 : Base(flags)
00317 {
00318 }
00319
00320 SparseLU(const MatrixType& matrix, int flags = NaturalOrdering)
00321 : Base(flags)
00322 {
00323 compute(matrix);
00324 }
00325
00326 ~SparseLU()
00327 {
00328 Destroy_SuperNode_Matrix(&m_sluL);
00329 Destroy_CompCol_Matrix(&m_sluU);
00330 }
00331
00332 inline const LMatrixType& matrixL() const
00333 {
00334 if (m_extractedDataAreDirty) extractData();
00335 return m_l;
00336 }
00337
00338 inline const UMatrixType& matrixU() const
00339 {
00340 if (m_extractedDataAreDirty) extractData();
00341 return m_u;
00342 }
00343
00344 inline const IntColVectorType& permutationP() const
00345 {
00346 if (m_extractedDataAreDirty) extractData();
00347 return m_p;
00348 }
00349
00350 inline const IntRowVectorType& permutationQ() const
00351 {
00352 if (m_extractedDataAreDirty) extractData();
00353 return m_q;
00354 }
00355
00356 Scalar determinant() const;
00357
00358 template<typename BDerived, typename XDerived>
00359 bool solve(const MatrixBase<BDerived> &b, MatrixBase<XDerived>* x, const int transposed = SvNoTrans) const;
00360
00361 void compute(const MatrixType& matrix);
00362
00363 protected:
00364
00365 void extractData() const;
00366
00367 protected:
00368
00369 mutable LMatrixType m_l;
00370 mutable UMatrixType m_u;
00371 mutable IntColVectorType m_p;
00372 mutable IntRowVectorType m_q;
00373
00374 mutable SparseMatrix<Scalar> m_matrix;
00375 mutable SluMatrix m_sluA;
00376 mutable SuperMatrix m_sluL, m_sluU;
00377 mutable SluMatrix m_sluB, m_sluX;
00378 mutable SuperLUStat_t m_sluStat;
00379 mutable superlu_options_t m_sluOptions;
00380 mutable std::vector<int> m_sluEtree;
00381 mutable std::vector<RealScalar> m_sluRscale, m_sluCscale;
00382 mutable std::vector<RealScalar> m_sluFerr, m_sluBerr;
00383 mutable char m_sluEqued;
00384 mutable bool m_extractedDataAreDirty;
00385 };
00386
00387 template<typename MatrixType>
00388 void SparseLU<MatrixType,SuperLU>::compute(const MatrixType& a)
00389 {
00390 const int size = a.rows();
00391 m_matrix = a;
00392
00393 set_default_options(&m_sluOptions);
00394 m_sluOptions.ColPerm = NATURAL;
00395 m_sluOptions.PrintStat = NO;
00396 m_sluOptions.ConditionNumber = NO;
00397 m_sluOptions.Trans = NOTRANS;
00398
00399
00400 switch (Base::orderingMethod())
00401 {
00402 case NaturalOrdering : m_sluOptions.ColPerm = NATURAL; break;
00403 case MinimumDegree_AT_PLUS_A : m_sluOptions.ColPerm = MMD_AT_PLUS_A; break;
00404 case MinimumDegree_ATA : m_sluOptions.ColPerm = MMD_ATA; break;
00405 case ColApproxMinimumDegree : m_sluOptions.ColPerm = COLAMD; break;
00406 default:
00407
00408 m_sluOptions.ColPerm = NATURAL;
00409 };
00410
00411 m_sluA = internal::asSluMatrix(m_matrix);
00412 memset(&m_sluL,0,sizeof m_sluL);
00413 memset(&m_sluU,0,sizeof m_sluU);
00414
00415 int info = 0;
00416
00417 m_p.resize(size);
00418 m_q.resize(size);
00419 m_sluRscale.resize(size);
00420 m_sluCscale.resize(size);
00421 m_sluEtree.resize(size);
00422
00423 RealScalar recip_pivot_gross, rcond;
00424 RealScalar ferr, berr;
00425
00426
00427 m_sluB.setStorageType(SLU_DN);
00428 m_sluB.setScalarType<Scalar>();
00429 m_sluB.Mtype = SLU_GE;
00430 m_sluB.storage.values = 0;
00431 m_sluB.nrow = m_sluB.ncol = 0;
00432 m_sluB.storage.lda = size;
00433 m_sluX = m_sluB;
00434
00435 StatInit(&m_sluStat);
00436 if (m_flags&IncompleteFactorization)
00437 {
00438 #ifdef EIGEN_SUPERLU_HAS_ILU
00439 ilu_set_default_options(&m_sluOptions);
00440
00441
00442 m_sluOptions.ILU_MILU = SILU;
00443
00444
00445
00446
00447 m_sluOptions.ILU_DropRule = DROP_BASIC;
00448 m_sluOptions.ILU_DropTol = Base::m_precision;
00449
00450 SuperLU_gsisx(&m_sluOptions, &m_sluA, m_q.data(), m_p.data(), &m_sluEtree[0],
00451 &m_sluEqued, &m_sluRscale[0], &m_sluCscale[0],
00452 &m_sluL, &m_sluU,
00453 NULL, 0,
00454 &m_sluB, &m_sluX,
00455 &recip_pivot_gross, &rcond,
00456 &m_sluStat, &info, Scalar());
00457 #else
00458
00459 Base::m_succeeded = false;
00460 return;
00461 #endif
00462 }
00463 else
00464 {
00465 SuperLU_gssvx(&m_sluOptions, &m_sluA, m_q.data(), m_p.data(), &m_sluEtree[0],
00466 &m_sluEqued, &m_sluRscale[0], &m_sluCscale[0],
00467 &m_sluL, &m_sluU,
00468 NULL, 0,
00469 &m_sluB, &m_sluX,
00470 &recip_pivot_gross, &rcond,
00471 &ferr, &berr,
00472 &m_sluStat, &info, Scalar());
00473 }
00474 StatFree(&m_sluStat);
00475
00476 m_extractedDataAreDirty = true;
00477
00478
00479 Base::m_succeeded = (info == 0);
00480 }
00481
00482 template<typename MatrixType>
00483 template<typename BDerived,typename XDerived>
00484 bool SparseLU<MatrixType,SuperLU>::solve(const MatrixBase<BDerived> &b,
00485 MatrixBase<XDerived> *x, const int transposed) const
00486 {
00487 const int size = m_matrix.rows();
00488 const int rhsCols = b.cols();
00489 eigen_assert(size==b.rows());
00490
00491 switch (transposed) {
00492 case SvNoTrans : m_sluOptions.Trans = NOTRANS; break;
00493 case SvTranspose : m_sluOptions.Trans = TRANS; break;
00494 case SvAdjoint : m_sluOptions.Trans = CONJ; break;
00495 default:
00496
00497 m_sluOptions.Trans = NOTRANS;
00498 }
00499
00500 m_sluOptions.Fact = FACTORED;
00501 m_sluOptions.IterRefine = NOREFINE;
00502
00503 m_sluFerr.resize(rhsCols);
00504 m_sluBerr.resize(rhsCols);
00505 m_sluB = SluMatrix::Map(b.const_cast_derived());
00506 m_sluX = SluMatrix::Map(x->derived());
00507
00508 StatInit(&m_sluStat);
00509 int info = 0;
00510 RealScalar recip_pivot_gross, rcond;
00511
00512 if (m_flags&IncompleteFactorization)
00513 {
00514 #ifdef EIGEN_SUPERLU_HAS_ILU
00515 SuperLU_gsisx(&m_sluOptions, &m_sluA, m_q.data(), m_p.data(), &m_sluEtree[0],
00516 &m_sluEqued, &m_sluRscale[0], &m_sluCscale[0],
00517 &m_sluL, &m_sluU,
00518 NULL, 0,
00519 &m_sluB, &m_sluX,
00520 &recip_pivot_gross, &rcond,
00521 &m_sluStat, &info, Scalar());
00522 #else
00523
00524 return false;
00525 #endif
00526 }
00527 else
00528 {
00529 SuperLU_gssvx(
00530 &m_sluOptions, &m_sluA,
00531 m_q.data(), m_p.data(),
00532 &m_sluEtree[0], &m_sluEqued,
00533 &m_sluRscale[0], &m_sluCscale[0],
00534 &m_sluL, &m_sluU,
00535 NULL, 0,
00536 &m_sluB, &m_sluX,
00537 &recip_pivot_gross, &rcond,
00538 &m_sluFerr[0], &m_sluBerr[0],
00539 &m_sluStat, &info, Scalar());
00540 }
00541 StatFree(&m_sluStat);
00542
00543
00544 m_sluOptions.Trans = NOTRANS;
00545 return info==0;
00546 }
00547
00548
00549
00550
00551
00552
00553
00554
00555
00556 template<typename MatrixType>
00557 void SparseLU<MatrixType,SuperLU>::extractData() const
00558 {
00559 if (m_extractedDataAreDirty)
00560 {
00561 int upper;
00562 int fsupc, istart, nsupr;
00563 int lastl = 0, lastu = 0;
00564 SCformat *Lstore = static_cast<SCformat*>(m_sluL.Store);
00565 NCformat *Ustore = static_cast<NCformat*>(m_sluU.Store);
00566 Scalar *SNptr;
00567
00568 const int size = m_matrix.rows();
00569 m_l.resize(size,size);
00570 m_l.resizeNonZeros(Lstore->nnz);
00571 m_u.resize(size,size);
00572 m_u.resizeNonZeros(Ustore->nnz);
00573
00574 int* Lcol = m_l._outerIndexPtr();
00575 int* Lrow = m_l._innerIndexPtr();
00576 Scalar* Lval = m_l._valuePtr();
00577
00578 int* Ucol = m_u._outerIndexPtr();
00579 int* Urow = m_u._innerIndexPtr();
00580 Scalar* Uval = m_u._valuePtr();
00581
00582 Ucol[0] = 0;
00583 Ucol[0] = 0;
00584
00585
00586 for (int k = 0; k <= Lstore->nsuper; ++k)
00587 {
00588 fsupc = L_FST_SUPC(k);
00589 istart = L_SUB_START(fsupc);
00590 nsupr = L_SUB_START(fsupc+1) - istart;
00591 upper = 1;
00592
00593
00594 for (int j = fsupc; j < L_FST_SUPC(k+1); ++j)
00595 {
00596 SNptr = &((Scalar*)Lstore->nzval)[L_NZ_START(j)];
00597
00598
00599 for (int i = U_NZ_START(j); i < U_NZ_START(j+1); ++i)
00600 {
00601 Uval[lastu] = ((Scalar*)Ustore->nzval)[i];
00602
00603 if (Uval[lastu] != 0.0)
00604 Urow[lastu++] = U_SUB(i);
00605 }
00606 for (int i = 0; i < upper; ++i)
00607 {
00608
00609 Uval[lastu] = SNptr[i];
00610
00611 if (Uval[lastu] != 0.0)
00612 Urow[lastu++] = L_SUB(istart+i);
00613 }
00614 Ucol[j+1] = lastu;
00615
00616
00617 Lval[lastl] = 1.0;
00618 Lrow[lastl++] = L_SUB(istart + upper - 1);
00619 for (int i = upper; i < nsupr; ++i)
00620 {
00621 Lval[lastl] = SNptr[i];
00622
00623 if (Lval[lastl] != 0.0)
00624 Lrow[lastl++] = L_SUB(istart+i);
00625 }
00626 Lcol[j+1] = lastl;
00627
00628 ++upper;
00629 }
00630
00631 }
00632
00633
00634 m_l.resizeNonZeros(lastl);
00635 m_u.resizeNonZeros(lastu);
00636
00637 m_extractedDataAreDirty = false;
00638 }
00639 }
00640
00641 template<typename MatrixType>
00642 typename SparseLU<MatrixType,SuperLU>::Scalar SparseLU<MatrixType,SuperLU>::determinant() const
00643 {
00644 assert((!NumTraits<Scalar>::IsComplex) && "This function is not implemented for complex yet");
00645 if (m_extractedDataAreDirty)
00646 extractData();
00647
00648
00649
00650 Scalar det = Scalar(1);
00651 for (int j=0; j<m_u.cols(); ++j)
00652 {
00653 if (m_u._outerIndexPtr()[j+1]-m_u._outerIndexPtr()[j] > 0)
00654 {
00655 int lastId = m_u._outerIndexPtr()[j+1]-1;
00656 eigen_assert(m_u._innerIndexPtr()[lastId]<=j);
00657 if (m_u._innerIndexPtr()[lastId]==j)
00658 {
00659 det *= m_u._valuePtr()[lastId];
00660 }
00661 }
00662
00663 }
00664 return det;
00665 }
00666
00667 #endif // EIGEN_SUPERLUSUPPORT_H