40 #include "petscversion.h"
46 namespace MultiRegions
48 std::string GlobalLinSysPETSc::matMult =
49 LibUtilities::SessionReader::RegisterDefaultSolverInfo(
50 "PETScMatMult",
"Sparse");
51 std::string GlobalLinSysPETSc::matMultIds[] = {
52 LibUtilities::SessionReader::RegisterEnumValue(
53 "PETScMatMult",
"Sparse",
55 LibUtilities::SessionReader::RegisterEnumValue(
56 "PETScMatMult",
"Shell",
65 GlobalLinSysPETSc::GlobalLinSysPETSc(
67 const boost::weak_ptr<ExpList> &pExp,
68 const boost::shared_ptr<AssemblyMap> &pLocToGloMap)
80 PetscBool isInitialized;
81 PetscInitialized(&isInitialized);
85 std::string commType =
86 m_expList.lock()->GetSession()->GetComm()->GetType();
87 if (commType.find(
"MPI") != std::string::npos)
91 m_expList.lock()->GetSession()->GetComm());
92 PETSC_COMM_WORLD = comm->
GetComm();
95 PetscInitializeNoArguments();
99 MatCreate(PETSC_COMM_WORLD, &
m_matrix);
112 PetscBool isFinalized;
113 PetscFinalized(&isFinalized);
120 #ifdef NEKTAR_USE_MPI
122 MPI_Finalized(&mpiFinal);
123 isFinalized = isFinalized || mpiFinal ? PETSC_TRUE : PETSC_FALSE;
154 const int nHomDofs = pNumRows - pNumDir;
164 &pInput[pNumDir], INSERT_VALUES);
167 VecAssemblyBegin(
m_b);
168 VecAssemblyEnd (
m_b);
173 KSPConvergedReason reason;
174 KSPGetConvergedReason(
m_ksp, &reason);
176 "PETSc solver diverged, reason is: " +
177 std::string(KSPConvergedReasons[reason]));
182 INSERT_VALUES, SCATTER_FORWARD);
184 INSERT_VALUES, SCATTER_FORWARD);
205 IS isGlobal, isLocal;
207 PETSC_COPY_VALUES, &isGlobal);
208 ISCreateStride (PETSC_COMM_SELF, nHomDofs, 0, 1, &isLocal);
211 VecCreate (PETSC_COMM_SELF, &
m_locVec);
212 VecSetSizes (
m_locVec, nHomDofs, PETSC_DECIDE);
219 ISDestroy(&isGlobal);
241 =
m_expList.lock()->GetSession()->GetComm();
243 const int nDirDofs = pLocToGloMap->GetNumGlobalDirBndCoeffs();
244 const int nHomDofs = glo2uniMap.num_elements() - nDirDofs;
245 const int nProc = vComm->GetSize();
246 const int rank = vComm->GetRank();
256 localCounts[rank] = nHomDofs;
259 for (n = 1; n < nProc; ++n)
261 localOffset[n] = localOffset[n-1] + localCounts[n-1];
264 int totHomDofs =
Vmath::Vsum(nProc, localCounts, 1);
265 vector<unsigned int> allUniIds(totHomDofs, 0);
268 for (n = 0; n < nHomDofs; ++n)
270 int gid = n + nDirDofs;
271 allUniIds[n + localOffset[rank]] = glo2uniMap[gid];
277 std::sort(allUniIds.begin(), allUniIds.end());
278 map<int,int> uniIdReorder;
281 for (cnt = n = 0; n < allUniIds.size(); ++n)
283 if (uniIdReorder.count(allUniIds[n]) > 0)
288 uniIdReorder[allUniIds[n]] = cnt++;
292 for (n = 0; n < nHomDofs; ++n)
294 int gid = n + nDirDofs;
295 int uniId = glo2uniMap[gid];
296 ASSERTL0(uniIdReorder.count(uniId) > 0,
"Error in ordering");
315 VecCreate (PETSC_COMM_WORLD, &
m_x);
317 VecSetFromOptions(
m_x);
328 ctx1->
nGlobal = ctx2->nGlobal = nGlobal;
329 ctx1->
nDir = ctx2->nDir = nDir;
330 ctx1->
linSys = ctx2->linSys =
this;
334 PETSC_DETERMINE, PETSC_DETERMINE,
336 MatShellSetOperation(
m_matrix, MATOP_MULT,
338 MatShellSetOperation(
m_matrix, MATOP_DESTROY,
342 PCCreate (PETSC_COMM_WORLD, &
m_pc);
343 #if PETSC_VERSION_GE(3,5,0)
348 PCSetType (
m_pc, PCSHELL);
351 PCShellSetContext(
m_pc, ctx2);
357 MatCreate (PETSC_COMM_WORLD, &
m_matrix);
360 PETSC_DETERMINE, PETSC_DETERMINE);
385 Vec &in, Vec &out,
ShellCtx *ctx,
bool precon)
387 const int nGlobal = ctx->
nGlobal;
388 const int nDir = ctx->
nDir;
389 const int nHomDofs = nGlobal - nDir;
396 INSERT_VALUES, SCATTER_FORWARD);
398 INSERT_VALUES, SCATTER_FORWARD);
404 PetscScalar *tmpLocIn;
405 VecGetArray (linSys->
m_locVec, &tmpLocIn);
407 VecRestoreArray(linSys->
m_locVec, &tmpLocIn);
412 linSys->
m_precon->DoPreconditioner(tmpIn, tmpOut);
421 &tmpOut[0], INSERT_VALUES);
422 VecAssemblyBegin(out);
423 VecAssemblyEnd (out);
439 Mat M, Vec in, Vec out)
443 MatShellGetContext(M, &ptr);
464 PC pc, Vec in, Vec out)
468 PCShellGetContext(pc, &ptr);
489 MatShellGetContext(M, &ptr);
507 PCShellGetContext(pc, &ptr);
523 KSPCreate(PETSC_COMM_WORLD, &
m_ksp);
525 m_ksp, tolerance, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT);
526 KSPSetFromOptions(
m_ksp);
527 #if PETSC_VERSION_GE(3,5,0)
VecScatter m_ctx
PETSc scatter context that takes us between Nektar++ global ordering and PETSc vector ordering...
#define ASSERTL0(condition, msg)
GlobalLinSysPETSc * linSys
Pointer to the original calling object.
PreconditionerSharedPtr m_precon
boost::shared_ptr< AssemblyMap > AssemblyMapSharedPtr
void CalculateReordering(const Array< OneD, const int > &glo2uniMap, const Array< OneD, const int > &glo2unique, const AssemblyMapSharedPtr &pLocToGloMap)
Calculate a reordering of universal IDs for PETSc.
PC m_pc
PCShell for preconditioner.
void SetUpSolver(NekDouble tolerance)
Set up KSP solver object.
Internal struct for MatShell and PCShell calls to store current context for callback.
Vec m_x
PETSc vector objects used for local storage.
static PetscErrorCode DoPreconditioner(PC pc, Vec in, Vec out)
Apply preconditioning using Nektar++ routines.
int nGlobal
Number of global degrees of freedom.
boost::shared_ptr< Comm > CommSharedPtr
Pointer to a Communicator object.
Mat m_matrix
PETSc matrix object.
boost::shared_ptr< CommMpi > CommMpiSharedPtr
Pointer to a Communicator object.
int m_nLocal
Number of unique degrees of freedom on this process.
KSP m_ksp
KSP object that represents solver system.
virtual void v_DoMatrixMultiply(const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput)=0
virtual ~GlobalLinSysPETSc()
Clean up PETSc objects.
static PetscErrorCode DoMatrixMultiply(Mat M, Vec in, Vec out)
Perform matrix multiplication using Nektar++ routines.
Describe a linear system.
A PETSc global linear system.
void SetUpScatter()
Set up PETSc local (equivalent to Nektar++ global) and global (equivalent to universal) scatter maps...
int nDir
Number of Dirichlet degrees of freedom.
virtual void v_SolveLinearSystem(const int pNumRows, const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput, const AssemblyMapSharedPtr &locToGloMap, const int pNumDir)
Solve linear system using PETSc.
static PetscErrorCode DoDestroyMatCtx(Mat M)
Destroy matrix shell context object.
void SetUpMatVec(int nGlobal, int nDir)
Construct PETSc matrix and vector handles.
static void DoNekppOperation(Vec &in, Vec &out, ShellCtx *ctx, bool precon)
Perform either matrix multiplication or preconditioning using Nektar++ routines.
T Vsum(int n, const T *x, const int incx)
Subtract return sum(x)
std::vector< int > m_reorderedMap
Reordering that takes universal IDs to a unique row in the PETSc matrix.
void Vcopy(int n, const T *x, const int incx, T *y, const int incy)
static PetscErrorCode DoDestroyPCCtx(PC pc)
Destroy preconditioner context object.
PETScMatMult m_matMult
Enumerator to select matrix multiplication type.
PreconditionerSharedPtr CreatePrecon(AssemblyMapSharedPtr asmMap)
Create a preconditioner object from the parameters defined in the supplied assembly map...
const boost::weak_ptr< ExpList > m_expList
Local Matrix System.