39#include "petscversion.h"
63 const std::shared_ptr<AssemblyMap> &pLocToGloMap)
74 PetscBool isInitialized;
75 PetscInitialized(&isInitialized);
79 std::string commType =
80 m_expList.lock()->GetSession()->GetComm()->GetType();
81 if (commType.find(
"MPI") != std::string::npos)
84 std::static_pointer_cast<LibUtilities::CommMpi>(
85 m_expList.lock()->GetSession()->GetComm());
86 PETSC_COMM_WORLD = comm->GetComm();
89 PetscInitializeNoArguments();
93 MatCreate(PETSC_COMM_WORLD, &
m_matrix);
106 PetscBool isFinalized;
107 PetscFinalized(&isFinalized);
116 MPI_Finalized(&mpiFinal);
117 isFinalized = isFinalized || mpiFinal ? PETSC_TRUE : PETSC_FALSE;
146 const int nHomDofs = pNumRows - pNumDir;
155 locToGloMap->Assemble(pInput, Glo);
162 VecAssemblyBegin(
m_b);
168 KSPConvergedReason reason;
169 KSPGetConvergedReason(
m_ksp, &reason);
170 ASSERTL0(reason > 0,
"PETSc solver diverged, reason is: " +
171 std::string(KSPConvergedReasons[reason]));
182 locToGloMap->GlobalToLocal(Glo, pOutput);
198 IS isGlobal, isLocal;
200 PETSC_COPY_VALUES, &isGlobal);
201 ISCreateStride(PETSC_COMM_SELF, nHomDofs, 0, 1, &isLocal);
204 VecCreate(PETSC_COMM_SELF, &
m_locVec);
205 VecSetSizes(
m_locVec, nHomDofs, PETSC_DECIDE);
212 ISDestroy(&isGlobal);
234 m_expList.lock()->GetSession()->GetComm();
236 const int nDirDofs = pLocToGloMap->GetNumGlobalDirBndCoeffs();
237 const int nHomDofs = glo2uniMap.size() - nDirDofs;
238 const int nProc = vComm->GetSize();
239 const int rank = vComm->GetRank();
249 localCounts[rank] = nHomDofs;
252 for (n = 1; n < nProc; ++n)
254 localOffset[n] = localOffset[n - 1] + localCounts[n - 1];
257 int totHomDofs =
Vmath::Vsum(nProc, localCounts, 1);
258 vector<unsigned int> allUniIds(totHomDofs, 0);
261 for (n = 0; n < nHomDofs; ++n)
263 int gid = n + nDirDofs;
264 allUniIds[n + localOffset[rank]] = glo2uniMap[gid];
270 std::sort(allUniIds.begin(), allUniIds.end());
271 map<int, int> uniIdReorder;
274 for (cnt = n = 0; n < allUniIds.size(); ++n)
276 if (uniIdReorder.count(allUniIds[n]) > 0)
281 uniIdReorder[allUniIds[n]] = cnt++;
285 for (n = 0; n < nHomDofs; ++n)
287 int gid = n + nDirDofs;
288 int uniId = glo2uniMap[gid];
289 ASSERTL0(uniIdReorder.count(uniId) > 0,
"Error in ordering");
308 VecCreate(PETSC_COMM_WORLD, &
m_x);
310 VecSetFromOptions(
m_x);
321 ctx1->
nGlobal = ctx2->nGlobal = nGlobal;
322 ctx1->
nDir = ctx2->nDir = nDir;
323 ctx1->
linSys = ctx2->linSys =
this;
327 PETSC_DETERMINE, (
void *)ctx1, &
m_matrix);
328 MatShellSetOperation(
m_matrix, MATOP_MULT,
330 MatShellSetOperation(
m_matrix, MATOP_DESTROY,
334 PCCreate(PETSC_COMM_WORLD, &
m_pc);
335#if PETSC_VERSION_GE(3, 5, 0)
340 PCSetType(
m_pc, PCSHELL);
343 PCShellSetContext(
m_pc, ctx2);
349 MatCreate(PETSC_COMM_WORLD, &
m_matrix);
379 const int nGlobal = ctx->
nGlobal;
380 const int nDir = ctx->
nDir;
381 const int nHomDofs = nGlobal - nDir;
387 VecScatterBegin(linSys->
m_ctx, in, linSys->
m_locVec, INSERT_VALUES,
389 VecScatterEnd(linSys->
m_ctx, in, linSys->
m_locVec, INSERT_VALUES,
396 PetscScalar *tmpLocIn;
397 VecGetArray(linSys->
m_locVec, &tmpLocIn);
399 VecRestoreArray(linSys->
m_locVec, &tmpLocIn);
404 linSys->
m_precon->DoPreconditioner(tmpIn, tmpOut);
412 VecSetValues(out, nHomDofs, &linSys->
m_reorderedMap[0], &tmpOut[0],
414 VecAssemblyBegin(out);
434 MatShellGetContext(M, &ptr);
458 PCShellGetContext(pc, &ptr);
479 MatShellGetContext(M, &ptr);
497 PCShellGetContext(pc, &ptr);
513 KSPCreate(PETSC_COMM_WORLD, &
m_ksp);
514 KSPSetTolerances(
m_ksp, tolerance, PETSC_DEFAULT, PETSC_DEFAULT,
516 KSPSetFromOptions(
m_ksp);
517#if PETSC_VERSION_GE(3, 5, 0)
#define ASSERTL0(condition, msg)
static std::string RegisterEnumValue(std::string pEnum, std::string pString, int pEnumValue)
Registers an enumeration value.
static std::string RegisterDefaultSolverInfo(const std::string &pName, const std::string &pValue)
Registers the default string value of a solver info property.
const std::weak_ptr< ExpList > m_expList
Local Matrix System.
PreconditionerSharedPtr CreatePrecon(AssemblyMapSharedPtr asmMap)
Create a preconditioner object from the parameters defined in the supplied assembly map.
Describe a linear system.
A PETSc global linear system.
Vec m_x
PETSc vector objects used for local storage.
PETScMatMult m_matMult
Enumerator to select matrix multiplication type.
static void DoNekppOperation(Vec &in, Vec &out, ShellCtx *ctx, bool precon)
Perform either matrix multiplication or preconditioning using Nektar++ routines.
std::vector< int > m_reorderedMap
Reordering that takes universal IDs to a unique row in the PETSc matrix.
void SetUpScatter()
Set up PETSc local (equivalent to Nektar++ global) and global (equivalent to universal) scatter maps.
virtual void v_DoMatrixMultiply(const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput)=0
Mat m_matrix
PETSc matrix object.
PreconditionerSharedPtr m_precon
static PetscErrorCode DoPreconditioner(PC pc, Vec in, Vec out)
Apply preconditioning using Nektar++ routines.
void SetUpSolver(NekDouble tolerance)
Set up KSP solver object.
GlobalLinSysPETSc(const GlobalLinSysKey &pKey, const std::weak_ptr< ExpList > &pExp, const std::shared_ptr< AssemblyMap > &pLocToGloMap)
Constructor for full direct matrix solve.
static std::string matMult
static PetscErrorCode DoMatrixMultiply(Mat M, Vec in, Vec out)
Perform matrix multiplication using Nektar++ routines.
void SetUpMatVec(int nGlobal, int nDir)
Construct PETSc matrix and vector handles.
static PetscErrorCode DoDestroyMatCtx(Mat M)
Destroy matrix shell context object.
VecScatter m_ctx
PETSc scatter context that takes us between Nektar++ global ordering and PETSc vector ordering.
static std::string matMultIds[]
virtual ~GlobalLinSysPETSc()
Clean up PETSc objects.
PC m_pc
PCShell for preconditioner.
void CalculateReordering(const Array< OneD, const int > &glo2uniMap, const Array< OneD, const int > &glo2unique, const AssemblyMapSharedPtr &pLocToGloMap)
Calculate a reordering of universal IDs for PETSc.
virtual void v_SolveLinearSystem(const int pNumRows, const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput, const AssemblyMapSharedPtr &locToGloMap, const int pNumDir) override
Solve linear system using PETSc.
KSP m_ksp
KSP object that represents solver system.
static PetscErrorCode DoDestroyPCCtx(PC pc)
Destroy preconditioner context object.
int m_nLocal
Number of unique degrees of freedom on this process.
std::shared_ptr< CommMpi > CommMpiSharedPtr
Pointer to a Communicator object.
std::shared_ptr< Comm > CommSharedPtr
Pointer to a Communicator object.
std::shared_ptr< AssemblyMap > AssemblyMapSharedPtr
The above copyright notice and this permission notice shall be included.
T Vsum(int n, const T *x, const int incx)
Subtract return sum(x)
void Zero(int n, T *x, const int incx)
Zero vector.
void Vcopy(int n, const T *x, const int incx, T *y, const int incy)
Internal struct for MatShell and PCShell calls to store current context for callback.
int nDir
Number of Dirichlet degrees of freedom.
int nGlobal
Number of global degrees of freedom.
GlobalLinSysPETSc * linSys
Pointer to the original calling object.