39 #include "petscversion.h"
45 namespace MultiRegions
47 std::string GlobalLinSysPETSc::matMult =
48 LibUtilities::SessionReader::RegisterDefaultSolverInfo(
"PETScMatMult",
50 std::string GlobalLinSysPETSc::matMultIds[] = {
51 LibUtilities::SessionReader::RegisterEnumValue(
53 LibUtilities::SessionReader::RegisterEnumValue(
61 GlobalLinSysPETSc::GlobalLinSysPETSc(
63 const std::shared_ptr<AssemblyMap> &pLocToGloMap)
74 PetscBool isInitialized;
75 PetscInitialized(&isInitialized);
79 std::string commType =
80 m_expList.lock()->GetSession()->GetComm()->GetType();
81 if (commType.find(
"MPI") != std::string::npos)
84 std::static_pointer_cast<LibUtilities::CommMpi>(
85 m_expList.lock()->GetSession()->GetComm());
86 PETSC_COMM_WORLD = comm->GetComm();
89 PetscInitializeNoArguments();
93 MatCreate(PETSC_COMM_WORLD, &
m_matrix);
106 PetscBool isFinalized;
107 PetscFinalized(&isFinalized);
114 #ifdef NEKTAR_USE_MPI
116 MPI_Finalized(&mpiFinal);
117 isFinalized = isFinalized || mpiFinal ? PETSC_TRUE : PETSC_FALSE;
146 const int nHomDofs = pNumRows - pNumDir;
159 VecAssemblyBegin(
m_b);
165 KSPConvergedReason reason;
166 KSPGetConvergedReason(
m_ksp, &reason);
167 ASSERTL0(reason > 0,
"PETSc solver diverged, reason is: " +
168 std::string(KSPConvergedReasons[reason]));
193 IS isGlobal, isLocal;
195 PETSC_COPY_VALUES, &isGlobal);
196 ISCreateStride(PETSC_COMM_SELF, nHomDofs, 0, 1, &isLocal);
199 VecCreate(PETSC_COMM_SELF, &
m_locVec);
200 VecSetSizes(
m_locVec, nHomDofs, PETSC_DECIDE);
207 ISDestroy(&isGlobal);
229 m_expList.lock()->GetSession()->GetComm();
231 const int nDirDofs = pLocToGloMap->GetNumGlobalDirBndCoeffs();
232 const int nHomDofs = glo2uniMap.size() - nDirDofs;
233 const int nProc = vComm->GetSize();
234 const int rank = vComm->GetRank();
244 localCounts[rank] = nHomDofs;
247 for (n = 1; n < nProc; ++n)
249 localOffset[n] = localOffset[n - 1] + localCounts[n - 1];
252 int totHomDofs =
Vmath::Vsum(nProc, localCounts, 1);
253 vector<unsigned int> allUniIds(totHomDofs, 0);
256 for (n = 0; n < nHomDofs; ++n)
258 int gid = n + nDirDofs;
259 allUniIds[n + localOffset[rank]] = glo2uniMap[gid];
265 std::sort(allUniIds.begin(), allUniIds.end());
266 map<int, int> uniIdReorder;
269 for (cnt = n = 0; n < allUniIds.size(); ++n)
271 if (uniIdReorder.count(allUniIds[n]) > 0)
276 uniIdReorder[allUniIds[n]] = cnt++;
280 for (n = 0; n < nHomDofs; ++n)
282 int gid = n + nDirDofs;
283 int uniId = glo2uniMap[gid];
284 ASSERTL0(uniIdReorder.count(uniId) > 0,
"Error in ordering");
303 VecCreate(PETSC_COMM_WORLD, &
m_x);
305 VecSetFromOptions(
m_x);
316 ctx1->
nGlobal = ctx2->nGlobal = nGlobal;
317 ctx1->
nDir = ctx2->nDir = nDir;
318 ctx1->
linSys = ctx2->linSys =
this;
322 PETSC_DETERMINE, (
void *)ctx1, &
m_matrix);
323 MatShellSetOperation(
m_matrix, MATOP_MULT,
325 MatShellSetOperation(
m_matrix, MATOP_DESTROY,
329 PCCreate(PETSC_COMM_WORLD, &
m_pc);
330 #if PETSC_VERSION_GE(3, 5, 0)
335 PCSetType(
m_pc, PCSHELL);
338 PCShellSetContext(
m_pc, ctx2);
344 MatCreate(PETSC_COMM_WORLD, &
m_matrix);
374 const int nGlobal = ctx->
nGlobal;
375 const int nDir = ctx->
nDir;
376 const int nHomDofs = nGlobal - nDir;
382 VecScatterBegin(linSys->
m_ctx, in, linSys->
m_locVec, INSERT_VALUES,
384 VecScatterEnd(linSys->
m_ctx, in, linSys->
m_locVec, INSERT_VALUES,
391 PetscScalar *tmpLocIn;
392 VecGetArray(linSys->
m_locVec, &tmpLocIn);
394 VecRestoreArray(linSys->
m_locVec, &tmpLocIn);
399 linSys->
m_precon->DoPreconditioner(tmpIn, tmpOut);
407 VecSetValues(out, nHomDofs, &linSys->
m_reorderedMap[0], &tmpOut[0],
409 VecAssemblyBegin(out);
429 MatShellGetContext(M, &ptr);
453 PCShellGetContext(pc, &ptr);
474 MatShellGetContext(M, &ptr);
492 PCShellGetContext(pc, &ptr);
508 KSPCreate(PETSC_COMM_WORLD, &
m_ksp);
509 KSPSetTolerances(
m_ksp, tolerance, PETSC_DEFAULT, PETSC_DEFAULT,
511 KSPSetFromOptions(
m_ksp);
512 #if PETSC_VERSION_GE(3, 5, 0)
#define ASSERTL0(condition, msg)
const std::weak_ptr< ExpList > m_expList
Local Matrix System.
PreconditionerSharedPtr CreatePrecon(AssemblyMapSharedPtr asmMap)
Create a preconditioner object from the parameters defined in the supplied assembly map.
Describe a linear system.
A PETSc global linear system.
Vec m_x
PETSc vector objects used for local storage.
PETScMatMult m_matMult
Enumerator to select matrix multiplication type.
static void DoNekppOperation(Vec &in, Vec &out, ShellCtx *ctx, bool precon)
Perform either matrix multiplication or preconditioning using Nektar++ routines.
std::vector< int > m_reorderedMap
Reordering that takes universal IDs to a unique row in the PETSc matrix.
void SetUpScatter()
Set up PETSc local (equivalent to Nektar++ global) and global (equivalent to universal) scatter maps.
virtual void v_DoMatrixMultiply(const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput)=0
Mat m_matrix
PETSc matrix object.
PreconditionerSharedPtr m_precon
static PetscErrorCode DoPreconditioner(PC pc, Vec in, Vec out)
Apply preconditioning using Nektar++ routines.
void SetUpSolver(NekDouble tolerance)
Set up KSP solver object.
static PetscErrorCode DoMatrixMultiply(Mat M, Vec in, Vec out)
Perform matrix multiplication using Nektar++ routines.
void SetUpMatVec(int nGlobal, int nDir)
Construct PETSc matrix and vector handles.
static PetscErrorCode DoDestroyMatCtx(Mat M)
Destroy matrix shell context object.
VecScatter m_ctx
PETSc scatter context that takes us between Nektar++ global ordering and PETSc vector ordering.
virtual ~GlobalLinSysPETSc()
Clean up PETSc objects.
PC m_pc
PCShell for preconditioner.
void CalculateReordering(const Array< OneD, const int > &glo2uniMap, const Array< OneD, const int > &glo2unique, const AssemblyMapSharedPtr &pLocToGloMap)
Calculate a reordering of universal IDs for PETSc.
virtual void v_SolveLinearSystem(const int pNumRows, const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput, const AssemblyMapSharedPtr &locToGloMap, const int pNumDir) override
Solve linear system using PETSc.
KSP m_ksp
KSP object that represents solver system.
static PetscErrorCode DoDestroyPCCtx(PC pc)
Destroy preconditioner context object.
int m_nLocal
Number of unique degrees of freedom on this process.
std::shared_ptr< CommMpi > CommMpiSharedPtr
Pointer to a Communicator object.
std::shared_ptr< Comm > CommSharedPtr
Pointer to a Communicator object.
std::shared_ptr< AssemblyMap > AssemblyMapSharedPtr
The above copyright notice and this permission notice shall be included.
T Vsum(int n, const T *x, const int incx)
Subtract return sum(x)
void Vcopy(int n, const T *x, const int incx, T *y, const int incy)
Internal struct for MatShell and PCShell calls to store current context for callback.
int nDir
Number of Dirichlet degrees of freedom.
int nGlobal
Number of global degrees of freedom.
GlobalLinSysPETSc * linSys
Pointer to the original calling object.