39 #include "petscversion.h"
45 namespace MultiRegions
47 std::string GlobalLinSysPETSc::matMult =
48 LibUtilities::SessionReader::RegisterDefaultSolverInfo(
49 "PETScMatMult",
"Sparse");
50 std::string GlobalLinSysPETSc::matMultIds[] = {
51 LibUtilities::SessionReader::RegisterEnumValue(
52 "PETScMatMult",
"Sparse",
54 LibUtilities::SessionReader::RegisterEnumValue(
55 "PETScMatMult",
"Shell",
64 GlobalLinSysPETSc::GlobalLinSysPETSc(
66 const std::weak_ptr<ExpList> &pExp,
67 const std::shared_ptr<AssemblyMap> &pLocToGloMap)
79 PetscBool isInitialized;
80 PetscInitialized(&isInitialized);
84 std::string commType =
85 m_expList.lock()->GetSession()->GetComm()->GetType();
86 if (commType.find(
"MPI") != std::string::npos)
89 std::static_pointer_cast<LibUtilities::CommMpi>(
90 m_expList.lock()->GetSession()->GetComm());
91 PETSC_COMM_WORLD = comm->GetComm();
94 PetscInitializeNoArguments();
98 MatCreate(PETSC_COMM_WORLD, &
m_matrix);
111 PetscBool isFinalized;
112 PetscFinalized(&isFinalized);
119 #ifdef NEKTAR_USE_MPI
121 MPI_Finalized(&mpiFinal);
122 isFinalized = isFinalized || mpiFinal ? PETSC_TRUE : PETSC_FALSE;
153 const int nHomDofs = pNumRows - pNumDir;
163 &pInput[pNumDir], INSERT_VALUES);
166 VecAssemblyBegin(
m_b);
167 VecAssemblyEnd (
m_b);
172 KSPConvergedReason reason;
173 KSPGetConvergedReason(
m_ksp, &reason);
175 "PETSc solver diverged, reason is: " +
176 std::string(KSPConvergedReasons[reason]));
181 INSERT_VALUES, SCATTER_FORWARD);
183 INSERT_VALUES, SCATTER_FORWARD);
204 IS isGlobal, isLocal;
206 PETSC_COPY_VALUES, &isGlobal);
207 ISCreateStride (PETSC_COMM_SELF, nHomDofs, 0, 1, &isLocal);
210 VecCreate (PETSC_COMM_SELF, &
m_locVec);
211 VecSetSizes (
m_locVec, nHomDofs, PETSC_DECIDE);
218 ISDestroy(&isGlobal);
240 =
m_expList.lock()->GetSession()->GetComm();
242 const int nDirDofs = pLocToGloMap->GetNumGlobalDirBndCoeffs();
243 const int nHomDofs = glo2uniMap.size() - nDirDofs;
244 const int nProc = vComm->GetSize();
245 const int rank = vComm->GetRank();
255 localCounts[rank] = nHomDofs;
258 for (n = 1; n < nProc; ++n)
260 localOffset[n] = localOffset[n-1] + localCounts[n-1];
263 int totHomDofs =
Vmath::Vsum(nProc, localCounts, 1);
264 vector<unsigned int> allUniIds(totHomDofs, 0);
267 for (n = 0; n < nHomDofs; ++n)
269 int gid = n + nDirDofs;
270 allUniIds[n + localOffset[rank]] = glo2uniMap[gid];
276 std::sort(allUniIds.begin(), allUniIds.end());
277 map<int,int> uniIdReorder;
280 for (cnt = n = 0; n < allUniIds.size(); ++n)
282 if (uniIdReorder.count(allUniIds[n]) > 0)
287 uniIdReorder[allUniIds[n]] = cnt++;
291 for (n = 0; n < nHomDofs; ++n)
293 int gid = n + nDirDofs;
294 int uniId = glo2uniMap[gid];
295 ASSERTL0(uniIdReorder.count(uniId) > 0,
"Error in ordering");
314 VecCreate (PETSC_COMM_WORLD, &
m_x);
316 VecSetFromOptions(
m_x);
327 ctx1->
nGlobal = ctx2->nGlobal = nGlobal;
328 ctx1->
nDir = ctx2->nDir = nDir;
329 ctx1->
linSys = ctx2->linSys =
this;
333 PETSC_DETERMINE, PETSC_DETERMINE,
335 MatShellSetOperation(
m_matrix, MATOP_MULT,
337 MatShellSetOperation(
m_matrix, MATOP_DESTROY,
341 PCCreate (PETSC_COMM_WORLD, &
m_pc);
342 #if PETSC_VERSION_GE(3,5,0)
347 PCSetType (
m_pc, PCSHELL);
350 PCShellSetContext(
m_pc, ctx2);
356 MatCreate (PETSC_COMM_WORLD, &
m_matrix);
359 PETSC_DETERMINE, PETSC_DETERMINE);
384 Vec &in, Vec &out,
ShellCtx *ctx,
bool precon)
386 const int nGlobal = ctx->
nGlobal;
387 const int nDir = ctx->
nDir;
388 const int nHomDofs = nGlobal - nDir;
395 INSERT_VALUES, SCATTER_FORWARD);
397 INSERT_VALUES, SCATTER_FORWARD);
403 PetscScalar *tmpLocIn;
404 VecGetArray (linSys->
m_locVec, &tmpLocIn);
406 VecRestoreArray(linSys->
m_locVec, &tmpLocIn);
411 linSys->
m_precon->DoPreconditioner(tmpIn, tmpOut);
420 &tmpOut[0], INSERT_VALUES);
421 VecAssemblyBegin(out);
422 VecAssemblyEnd (out);
438 Mat M, Vec in, Vec out)
442 MatShellGetContext(M, &ptr);
463 PC pc, Vec in, Vec out)
467 PCShellGetContext(pc, &ptr);
488 MatShellGetContext(M, &ptr);
506 PCShellGetContext(pc, &ptr);
522 KSPCreate(PETSC_COMM_WORLD, &
m_ksp);
524 m_ksp, tolerance, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT);
525 KSPSetFromOptions(
m_ksp);
526 #if PETSC_VERSION_GE(3,5,0)
#define ASSERTL0(condition, msg)
const std::weak_ptr< ExpList > m_expList
Local Matrix System.
PreconditionerSharedPtr CreatePrecon(AssemblyMapSharedPtr asmMap)
Create a preconditioner object from the parameters defined in the supplied assembly map.
Describe a linear system.
A PETSc global linear system.
Vec m_x
PETSc vector objects used for local storage.
virtual void v_SolveLinearSystem(const int pNumRows, const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput, const AssemblyMapSharedPtr &locToGloMap, const int pNumDir)
Solve linear system using PETSc.
PETScMatMult m_matMult
Enumerator to select matrix multiplication type.
static void DoNekppOperation(Vec &in, Vec &out, ShellCtx *ctx, bool precon)
Perform either matrix multiplication or preconditioning using Nektar++ routines.
std::vector< int > m_reorderedMap
Reordering that takes universal IDs to a unique row in the PETSc matrix.
void SetUpScatter()
Set up PETSc local (equivalent to Nektar++ global) and global (equivalent to universal) scatter maps.
virtual void v_DoMatrixMultiply(const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput)=0
Mat m_matrix
PETSc matrix object.
PreconditionerSharedPtr m_precon
static PetscErrorCode DoPreconditioner(PC pc, Vec in, Vec out)
Apply preconditioning using Nektar++ routines.
void SetUpSolver(NekDouble tolerance)
Set up KSP solver object.
static PetscErrorCode DoMatrixMultiply(Mat M, Vec in, Vec out)
Perform matrix multiplication using Nektar++ routines.
void SetUpMatVec(int nGlobal, int nDir)
Construct PETSc matrix and vector handles.
static PetscErrorCode DoDestroyMatCtx(Mat M)
Destroy matrix shell context object.
VecScatter m_ctx
PETSc scatter context that takes us between Nektar++ global ordering and PETSc vector ordering.
virtual ~GlobalLinSysPETSc()
Clean up PETSc objects.
PC m_pc
PCShell for preconditioner.
void CalculateReordering(const Array< OneD, const int > &glo2uniMap, const Array< OneD, const int > &glo2unique, const AssemblyMapSharedPtr &pLocToGloMap)
Calculate a reordering of universal IDs for PETSc.
KSP m_ksp
KSP object that represents solver system.
static PetscErrorCode DoDestroyPCCtx(PC pc)
Destroy preconditioner context object.
int m_nLocal
Number of unique degrees of freedom on this process.
std::shared_ptr< CommMpi > CommMpiSharedPtr
Pointer to a Communicator object.
std::shared_ptr< Comm > CommSharedPtr
Pointer to a Communicator object.
std::shared_ptr< AssemblyMap > AssemblyMapSharedPtr
The above copyright notice and this permission notice shall be included.
T Vsum(int n, const T *x, const int incx)
Subtract return sum(x)
void Vcopy(int n, const T *x, const int incx, T *y, const int incy)
Internal struct for MatShell and PCShell calls to store current context for callback.
int nDir
Number of Dirichlet degrees of freedom.
int nGlobal
Number of global degrees of freedom.
GlobalLinSysPETSc * linSys
Pointer to the original calling object.