Nektar++
Loading...
Searching...
No Matches
Public Member Functions | Static Public Member Functions | Static Public Attributes | Protected Member Functions | Protected Attributes | List of all members
Nektar::LibUtilities::CommMpi Class Reference

A global linear system. More...

#include <CommMpi.h>

Inheritance diagram for Nektar::LibUtilities::CommMpi:
[legend]

Public Member Functions

 CommMpi (int narg, char *arg[])
 
 ~CommMpi () override
 
MPI_Comm GetComm ()
 
- Public Member Functions inherited from Nektar::LibUtilities::Comm
 Comm (int narg, char *arg[])
 
virtual ~Comm ()
 
void Finalise ()
 
int GetSize () const
 Returns number of processes.
 
int GetRank ()
 
const std::string & GetType () const
 
bool TreatAsRankZero ()
 
bool IsSerial ()
 
bool IsParallelInTime ()
 
std::tuple< int, int, int > GetVersion ()
 
void Block ()
 Block execution until all processes reach this point.
 
NekDouble Wtime ()
 Return the time in seconds.
 
template<class T >
void Send (int pProc, T &pData)
 
template<class T >
void Recv (int pProc, T &pData)
 
template<class T >
void SendRecv (int pSendProc, T &pSendData, int pRecvProc, T &pRecvData)
 
template<class T >
void AllReduce (T &pData, enum ReduceOperator pOp)
 
template<class T >
void AlltoAll (T &pSendData, T &pRecvData)
 
template<class T1 , class T2 >
void AlltoAllv (T1 &pSendData, T2 &pSendDataSizeMap, T2 &pSendDataOffsetMap, T1 &pRecvData, T2 &pRecvDataSizeMap, T2 &pRecvDataOffsetMap)
 
template<class T >
void AllGather (T &pSendData, T &pRecvData)
 
template<class T >
void AllGatherv (T &pSendData, T &pRecvData, Array< OneD, int > &pRecvDataSizeMap, Array< OneD, int > &pRecvDataOffsetMap)
 
template<class T >
void AllGatherv (T &pRecvData, Array< OneD, int > &pRecvDataSizeMap, Array< OneD, int > &pRecvDataOffsetMap)
 
template<class T >
void Bcast (T &pData, int pRoot)
 
template<class T >
Gather (int rootProc, T &val)
 
template<class T >
Scatter (int rootProc, T &pData)
 
template<class T >
void DistGraphCreateAdjacent (T &sources, T &sourceweights, int reorder)
 
template<class T1 , class T2 >
void NeighborAlltoAllv (T1 &pSendData, T2 &pSendDataSizeMap, T2 &pSendDataOffsetMap, T1 &pRecvData, T2 &pRecvDataSizeMap, T2 &pRecvDataOffsetMap)
 
template<class T >
void Irsend (int pProc, T &pData, int count, const CommRequestSharedPtr &request, int loc)
 
template<class T >
void Isend (int pProc, T &pData, int count, const CommRequestSharedPtr &request, int loc)
 
template<class T >
void SendInit (int pProc, T &pData, int count, const CommRequestSharedPtr &request, int loc)
 
template<class T >
void Irecv (int pProc, T &pData, int count, const CommRequestSharedPtr &request, int loc)
 
template<class T >
void RecvInit (int pProc, T &pData, int count, const CommRequestSharedPtr &request, int loc)
 
void StartAll (const CommRequestSharedPtr &request)
 
void WaitAll (const CommRequestSharedPtr &request)
 
CommRequestSharedPtr CreateRequest (int num)
 
CommSharedPtr CommCreateIf (int flag)
 If the flag is non-zero create a new communicator.
 
void SplitComm (int pRows, int pColumns, int pTime=0)
 Splits this communicator into a grid of size pRows*pColumns and creates row and column communicators. By default the communicator is a single row.
 
CommSharedPtr GetRowComm ()
 Retrieve the row communicator to which this process belongs.
 
CommSharedPtr GetColumnComm ()
 Retrieve the column communicator to which this process belongs.
 
CommSharedPtr GetTimeComm ()
 Retrieve the time communicator to which this process belongs.
 
CommSharedPtr GetSpaceComm ()
 Retrieve the space communicator to which this process belongs.
 
bool RemoveExistingFiles ()
 
std::pair< CommSharedPtr, CommSharedPtrSplitCommNode ()
 

Static Public Member Functions

static CommSharedPtr create (int narg, char *arg[])
 Creates an instance of this class.
 

Static Public Attributes

static std::string className
 Name of class.
 

Protected Member Functions

 CommMpi (MPI_Comm pComm)
 
void v_Finalise () override
 
int v_GetRank () final
 
bool v_TreatAsRankZero () final
 
bool v_IsSerial () final
 
std::tuple< int, int, int > v_GetVersion () final
 
void v_Block () final
 
double v_Wtime () final
 
void v_Send (const void *buf, int count, CommDataType dt, int dest) final
 
void v_Recv (void *buf, int count, CommDataType dt, int source) final
 
void v_SendRecv (const void *sendbuf, int sendcount, CommDataType sendtype, int dest, void *recvbuf, int recvcount, CommDataType recvtype, int source) final
 
void v_AllReduce (void *buf, int count, CommDataType dt, enum ReduceOperator pOp) final
 
void v_AlltoAll (const void *sendbuf, int sendcount, CommDataType sendtype, void *recvbuf, int recvcount, CommDataType recvtype) final
 
void v_AlltoAllv (const void *sendbuf, const int *sendcounts, const int *senddispls, CommDataType sendtype, void *recvbuf, const int *recvcounts, const int *recvdispls, CommDataType recvtype) final
 
void v_AllGather (const void *sendbuf, int sendcount, CommDataType sendtype, void *recvbuf, int recvcount, CommDataType recvtype) final
 
void v_AllGatherv (const void *sendbuf, int sendcount, CommDataType sendtype, void *recvbuf, const int *recvcounts, const int *recvdispls, CommDataType recvtype) final
 
void v_AllGatherv (void *recvbuf, const int *recvcounts, const int *recvdispls, CommDataType recvtype) final
 
void v_Bcast (void *buffer, int count, CommDataType dt, int root) final
 
void v_Gather (const void *sendbuf, int sendcount, CommDataType sendtype, void *recvbuf, int recvcount, CommDataType recvtype, int root) final
 
void v_Scatter (const void *sendbuf, int sendcount, CommDataType sendtype, void *recvbuf, int recvcount, CommDataType recvtype, int root) final
 
void v_DistGraphCreateAdjacent (int indegree, const int *sources, const int *sourceweights, int reorder) final
 
void v_NeighborAlltoAllv (const void *sendbuf, const int *sendcounts, const int *senddispls, CommDataType sendtype, void *recvbuf, const int *recvcounts, const int *recvdispls, CommDataType recvtype) final
 
void v_Irsend (const void *buf, int count, CommDataType dt, int dest, CommRequestSharedPtr request, int loc) final
 
void v_Isend (const void *buf, int count, CommDataType dt, int dest, CommRequestSharedPtr request, int loc) final
 
void v_SendInit (const void *buf, int count, CommDataType dt, int dest, CommRequestSharedPtr request, int loc) final
 
void v_Irecv (void *buf, int count, CommDataType dt, int source, CommRequestSharedPtr request, int loc) final
 
void v_RecvInit (void *buf, int count, CommDataType dt, int source, CommRequestSharedPtr request, int loc) final
 
void v_StartAll (CommRequestSharedPtr request) final
 
void v_WaitAll (CommRequestSharedPtr request) final
 
CommRequestSharedPtr v_CreateRequest (int num) final
 
void v_SplitComm (int pRows, int pColumns, int pTime) override
 
CommSharedPtr v_CommCreateIf (int flag) final
 
std::pair< CommSharedPtr, CommSharedPtrv_SplitCommNode () final
 
- Protected Member Functions inherited from Nektar::LibUtilities::Comm
 Comm ()
 

Protected Attributes

MPI_Comm m_comm
 
int m_rank {}
 
bool m_controls_mpi
 
- Protected Attributes inherited from Nektar::LibUtilities::Comm
int m_size
 Number of processes.
 
std::string m_type
 Type of communication.
 
CommSharedPtr m_commRow
 Row communicator.
 
CommSharedPtr m_commColumn
 Column communicator.
 
CommSharedPtr m_commTime
 
CommSharedPtr m_commSpace
 

Detailed Description

A global linear system.

Definition at line 89 of file CommMpi.h.

Constructor & Destructor Documentation

◆ CommMpi() [1/2]

Nektar::LibUtilities::CommMpi::CommMpi ( int  narg,
char *  arg[] 
)

Definition at line 50 of file CommMpi.cpp.

50 : Comm(narg, arg)
51{
52 int init = 0;
53 MPI_Initialized(&init);
54
55 if (!init)
56 {
57 int thread_support = 0;
58 if (MPI_Init_thread(&narg, &arg, MPI_THREAD_MULTIPLE,
59 &thread_support) != MPI_SUCCESS)
60 {
63 "Initializing MPI using MPI_Init, if scotch version > 6 and is "
64 "compiled with multi-threading, it might cause deadlocks.")
65 ASSERTL0(MPI_Init(&narg, &arg) == MPI_SUCCESS,
66 "Failed to initialise MPI");
67 }
68 // store bool to indicate that Nektar++ is in charge of finalizing MPI.
69 m_controls_mpi = true;
70 }
71 else
72 {
73 // Another code is in charge of finalizing MPI and this is not the
74 // responsiblity of Nektar++
75 m_controls_mpi = false;
76 }
77
78 m_comm = MPI_COMM_WORLD;
79 MPI_Comm_size(m_comm, &m_size);
80 MPI_Comm_rank(m_comm, &m_rank);
81
82#ifdef NEKTAR_USING_PETSC
83 PetscInitializeNoArguments();
84#endif
85
86 m_type = "Parallel MPI";
87}
#define ASSERTL0(condition, msg)
#define NEKERROR(type, msg)
Assert Level 0 – Fundamental assert which is used whether in FULLDEBUG, DEBUG or OPT compilation mode...
int m_size
Number of processes.
Definition Comm.h:175
std::string m_type
Type of communication.
Definition Comm.h:176

References ASSERTL0, Nektar::ErrorUtil::ewarning, m_comm, m_controls_mpi, m_rank, Nektar::LibUtilities::Comm::m_size, Nektar::LibUtilities::Comm::m_type, and NEKERROR.

◆ ~CommMpi()

Nektar::LibUtilities::CommMpi::~CommMpi ( )
override

Definition at line 104 of file CommMpi.cpp.

105{
106 int flag;
107 MPI_Finalized(&flag);
108 if (!flag && m_comm != MPI_COMM_WORLD)
109 {
110 MPI_Comm_free(&m_comm);
111 }
112}

References m_comm.

◆ CommMpi() [2/2]

Nektar::LibUtilities::CommMpi::CommMpi ( MPI_Comm  pComm)
explicitprotected

Definition at line 92 of file CommMpi.cpp.

92 : Comm()
93{
94 m_comm = pComm;
95 MPI_Comm_size(m_comm, &m_size);
96 MPI_Comm_rank(m_comm, &m_rank);
97
98 m_type = "Parallel MPI";
99}

References m_comm, m_rank, Nektar::LibUtilities::Comm::m_size, and Nektar::LibUtilities::Comm::m_type.

Member Function Documentation

◆ create()

static CommSharedPtr Nektar::LibUtilities::CommMpi::create ( int  narg,
char *  arg[] 
)
inlinestatic

Creates an instance of this class.

Definition at line 93 of file CommMpi.h.

94 {
96 }
static std::shared_ptr< DataType > AllocateSharedPtr(const Args &...args)
Allocate a shared pointer from the memory pool.

References Nektar::MemoryManager< DataType >::AllocateSharedPtr().

◆ GetComm()

MPI_Comm Nektar::LibUtilities::CommMpi::GetComm ( )

Definition at line 117 of file CommMpi.cpp.

118{
119 return m_comm;
120}

References m_comm.

◆ v_AllGather()

void Nektar::LibUtilities::CommMpi::v_AllGather ( const void sendbuf,
int  sendcount,
CommDataType  sendtype,
void recvbuf,
int  recvcount,
CommDataType  recvtype 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 290 of file CommMpi.cpp.

293{
294 int retval = MPI_Allgather(sendbuf, sendcount, sendtype, recvbuf, recvcount,
295 recvtype, m_comm);
296
297 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing Allgather.");
298}

References ASSERTL0, and m_comm.

◆ v_AllGatherv() [1/2]

void Nektar::LibUtilities::CommMpi::v_AllGatherv ( const void sendbuf,
int  sendcount,
CommDataType  sendtype,
void recvbuf,
const int *  recvcounts,
const int *  recvdispls,
CommDataType  recvtype 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 303 of file CommMpi.cpp.

307{
308 int retval = MPI_Allgatherv(sendbuf, sendcount, sendtype, recvbuf,
309 recvcounts, recvdispls, recvtype, m_comm);
310
311 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing Allgatherv.");
312}

References ASSERTL0, and m_comm.

◆ v_AllGatherv() [2/2]

void Nektar::LibUtilities::CommMpi::v_AllGatherv ( void recvbuf,
const int *  recvcounts,
const int *  recvdispls,
CommDataType  recvtype 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 317 of file CommMpi.cpp.

319{
320 int retval = MPI_Allgatherv(MPI_IN_PLACE, 0, MPI_DATATYPE_NULL, recvbuf,
321 recvcounts, recvdispls, recvtype, m_comm);
322
323 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing Allgatherv.");
324}

References ASSERTL0, and m_comm.

◆ v_AllReduce()

void Nektar::LibUtilities::CommMpi::v_AllReduce ( void buf,
int  count,
CommDataType  dt,
enum ReduceOperator  pOp 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 232 of file CommMpi.cpp.

234{
235 if (GetSize() == 1)
236 {
237 return;
238 }
239
240 MPI_Op vOp;
241 switch (pOp)
242 {
243 case ReduceMax:
244 vOp = MPI_MAX;
245 break;
246 case ReduceMin:
247 vOp = MPI_MIN;
248 break;
249 case ReduceSum:
250 default:
251 vOp = MPI_SUM;
252 break;
253 }
254 int retval = MPI_Allreduce(MPI_IN_PLACE, buf, count, dt, vOp, m_comm);
255
256 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing All-reduce.");
257}
int GetSize() const
Returns number of processes.
Definition Comm.h:269

References ASSERTL0, Nektar::LibUtilities::Comm::GetSize(), m_comm, Nektar::LibUtilities::ReduceMax, Nektar::LibUtilities::ReduceMin, and Nektar::LibUtilities::ReduceSum.

◆ v_AlltoAll()

void Nektar::LibUtilities::CommMpi::v_AlltoAll ( const void sendbuf,
int  sendcount,
CommDataType  sendtype,
void recvbuf,
int  recvcount,
CommDataType  recvtype 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 262 of file CommMpi.cpp.

265{
266 int retval = MPI_Alltoall(sendbuf, sendcount, sendtype, recvbuf, recvcount,
267 recvtype, m_comm);
268
269 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing All-to-All.");
270}

References ASSERTL0, and m_comm.

◆ v_AlltoAllv()

void Nektar::LibUtilities::CommMpi::v_AlltoAllv ( const void sendbuf,
const int *  sendcounts,
const int *  senddispls,
CommDataType  sendtype,
void recvbuf,
const int *  recvcounts,
const int *  recvdispls,
CommDataType  recvtype 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 275 of file CommMpi.cpp.

279{
280 int retval =
281 MPI_Alltoallv(sendbuf, sendcounts, senddispls, sendtype, recvbuf,
282 recvcounts, recvdispls, recvtype, m_comm);
283
284 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing All-to-All-v.");
285}

References ASSERTL0, and m_comm.

◆ v_Bcast()

void Nektar::LibUtilities::CommMpi::v_Bcast ( void buffer,
int  count,
CommDataType  dt,
int  root 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 329 of file CommMpi.cpp.

330{
331 int retval = MPI_Bcast(buffer, count, dt, root, m_comm);
332
333 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing Bcast-v.");
334}

References ASSERTL0, and m_comm.

◆ v_Block()

void Nektar::LibUtilities::CommMpi::v_Block ( )
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 178 of file CommMpi.cpp.

179{
180 MPI_Barrier(m_comm);
181}

References m_comm.

◆ v_CommCreateIf()

CommSharedPtr Nektar::LibUtilities::CommMpi::v_CommCreateIf ( int  flag)
finalprotectedvirtual

Create a new communicator if the flag is non-zero.

Implements Nektar::LibUtilities::Comm.

Definition at line 571 of file CommMpi.cpp.

572{
573 MPI_Comm newComm;
574 // color == MPI_UNDEF => not in the new communicator
575 // key == 0 on all => use rank to order them. OpenMPI, at least,
576 // implies this is faster than ordering them ourselves.
577 MPI_Comm_split(m_comm, flag ? flag : MPI_UNDEFINED, 0, &newComm);
578
579 if (flag == 0)
580 {
581 // flag == 0 => get back MPI_COMM_NULL, return a null ptr instead.
582 return std::shared_ptr<Comm>();
583 }
584 else
585 {
586 // Return a real communicator
587 return std::shared_ptr<Comm>(new CommMpi(newComm));
588 }
589}
CommMpi(int narg, char *arg[])
Definition CommMpi.cpp:50

References m_comm.

Referenced by v_SplitCommNode().

◆ v_CreateRequest()

CommRequestSharedPtr Nektar::LibUtilities::CommMpi::v_CreateRequest ( int  num)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 491 of file CommMpi.cpp.

492{
493 return std::shared_ptr<CommRequest>(new CommRequestMpi(num));
494}

◆ v_DistGraphCreateAdjacent()

void Nektar::LibUtilities::CommMpi::v_DistGraphCreateAdjacent ( int  indegree,
const int *  sources,
const int *  sourceweights,
int  reorder 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 365 of file CommMpi.cpp.

368{
369#if MPI_VERSION < 3
370 ASSERTL0(false, "MPI_Dist_graph_create_adjacent is not supported in your "
371 "installed MPI version.");
372#else
373 int retval = MPI_Dist_graph_create_adjacent(
374 m_comm, indegree, sources, sourceweights, indegree, sources,
375 sourceweights, MPI_INFO_NULL, reorder, &m_comm);
376
377 ASSERTL0(retval == MPI_SUCCESS,
378 "MPI error performing Dist_graph_create_adjacent.")
379#endif
380}

References ASSERTL0, and m_comm.

◆ v_Finalise()

void Nektar::LibUtilities::CommMpi::v_Finalise ( )
overrideprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 125 of file CommMpi.cpp.

126{
127#ifdef NEKTAR_USING_PETSC
128 PetscFinalize();
129#endif
130 int flag;
131 MPI_Finalized(&flag);
132 if ((!flag) && m_controls_mpi)
133 {
134 MPI_Finalize();
135 }
136}

References m_controls_mpi.

Referenced by Nektar::LibUtilities::CommCwipi::v_Finalise().

◆ v_Gather()

void Nektar::LibUtilities::CommMpi::v_Gather ( const void sendbuf,
int  sendcount,
CommDataType  sendtype,
void recvbuf,
int  recvcount,
CommDataType  recvtype,
int  root 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 339 of file CommMpi.cpp.

342{
343 int retval = MPI_Gather(sendbuf, sendcount, sendtype, recvbuf, recvcount,
344 recvtype, root, m_comm);
345
346 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing Gather.");
347}

References ASSERTL0, and m_comm.

◆ v_GetRank()

int Nektar::LibUtilities::CommMpi::v_GetRank ( void  )
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 141 of file CommMpi.cpp.

142{
143 return m_rank;
144}

References m_rank.

◆ v_GetVersion()

std::tuple< int, int, int > Nektar::LibUtilities::CommMpi::v_GetVersion ( )
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 165 of file CommMpi.cpp.

166{
167 int version, subversion;
168 int retval = MPI_Get_version(&version, &subversion);
169
170 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing GetVersion.");
171
172 return std::make_tuple(version, subversion, 0);
173}

References ASSERTL0.

◆ v_Irecv()

void Nektar::LibUtilities::CommMpi::v_Irecv ( void buf,
int  count,
CommDataType  dt,
int  source,
CommRequestSharedPtr  request,
int  loc 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 442 of file CommMpi.cpp.

444{
446 std::static_pointer_cast<CommRequestMpi>(request);
447 MPI_Irecv(buf, count, dt, source, 0, m_comm, req->GetRequest(loc));
448}
std::shared_ptr< CommRequestMpi > CommRequestMpiSharedPtr
Definition CommMpi.h:86

References m_comm.

◆ v_Irsend()

void Nektar::LibUtilities::CommMpi::v_Irsend ( const void buf,
int  count,
CommDataType  dt,
int  dest,
CommRequestSharedPtr  request,
int  loc 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 409 of file CommMpi.cpp.

411{
413 std::static_pointer_cast<CommRequestMpi>(request);
414 MPI_Irsend(buf, count, dt, dest, 0, m_comm, req->GetRequest(loc));
415}

References m_comm.

◆ v_Isend()

void Nektar::LibUtilities::CommMpi::v_Isend ( const void buf,
int  count,
CommDataType  dt,
int  dest,
CommRequestSharedPtr  request,
int  loc 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 420 of file CommMpi.cpp.

422{
424 std::static_pointer_cast<CommRequestMpi>(request);
425 MPI_Isend(buf, count, dt, dest, 0, m_comm, req->GetRequest(loc));
426}

References m_comm.

◆ v_IsSerial()

bool Nektar::LibUtilities::CommMpi::v_IsSerial ( void  )
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 157 of file CommMpi.cpp.

158{
159 return m_size == 1;
160}

References Nektar::LibUtilities::Comm::m_size.

◆ v_NeighborAlltoAllv()

void Nektar::LibUtilities::CommMpi::v_NeighborAlltoAllv ( const void sendbuf,
const int *  sendcounts,
const int *  senddispls,
CommDataType  sendtype,
void recvbuf,
const int *  recvcounts,
const int *  recvdispls,
CommDataType  recvtype 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 385 of file CommMpi.cpp.

393{
394#if MPI_VERSION < 3
395 ASSERTL0(false, "MPI_Neighbor_alltoallv is not supported in your "
396 "installed MPI version.");
397#else
398 int retval = MPI_Neighbor_alltoallv(sendbuf, sendcounts, senddispls,
399 sendtype, recvbuf, recvcounts,
400 recvdispls, recvtype, m_comm);
401
402 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing NeighborAllToAllV.");
403#endif
404}

References ASSERTL0, and m_comm.

◆ v_Recv()

void Nektar::LibUtilities::CommMpi::v_Recv ( void buf,
int  count,
CommDataType  dt,
int  source 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 209 of file CommMpi.cpp.

210{
211 MPI_Recv(buf, count, dt, source, 0, m_comm, MPI_STATUS_IGNORE);
212}

References m_comm.

◆ v_RecvInit()

void Nektar::LibUtilities::CommMpi::v_RecvInit ( void buf,
int  count,
CommDataType  dt,
int  source,
CommRequestSharedPtr  request,
int  loc 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 453 of file CommMpi.cpp.

455{
457 std::static_pointer_cast<CommRequestMpi>(request);
458 MPI_Recv_init(buf, count, dt, source, 0, m_comm, req->GetRequest(loc));
459}

References m_comm.

◆ v_Scatter()

void Nektar::LibUtilities::CommMpi::v_Scatter ( const void sendbuf,
int  sendcount,
CommDataType  sendtype,
void recvbuf,
int  recvcount,
CommDataType  recvtype,
int  root 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 352 of file CommMpi.cpp.

355{
356 int retval = MPI_Scatter(sendbuf, sendcount, sendtype, recvbuf, recvcount,
357 recvtype, root, m_comm);
358
359 ASSERTL0(retval == MPI_SUCCESS, "MPI error performing Scatter.");
360}

References ASSERTL0, and m_comm.

◆ v_Send()

void Nektar::LibUtilities::CommMpi::v_Send ( const void buf,
int  count,
CommDataType  dt,
int  dest 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 194 of file CommMpi.cpp.

195{
196 if (MPISYNC)
197 {
198 MPI_Ssend(buf, count, dt, dest, 0, m_comm);
199 }
200 else
201 {
202 MPI_Send(buf, count, dt, dest, 0, m_comm);
203 }
204}
#define MPISYNC
Definition CommMpi.h:45

References m_comm, and MPISYNC.

◆ v_SendInit()

void Nektar::LibUtilities::CommMpi::v_SendInit ( const void buf,
int  count,
CommDataType  dt,
int  dest,
CommRequestSharedPtr  request,
int  loc 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 431 of file CommMpi.cpp.

433{
435 std::static_pointer_cast<CommRequestMpi>(request);
436 MPI_Send_init(buf, count, dt, dest, 0, m_comm, req->GetRequest(loc));
437}

References m_comm.

◆ v_SendRecv()

void Nektar::LibUtilities::CommMpi::v_SendRecv ( const void sendbuf,
int  sendcount,
CommDataType  sendtype,
int  dest,
void recvbuf,
int  recvcount,
CommDataType  recvtype,
int  source 
)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 217 of file CommMpi.cpp.

220{
221 MPI_Status status;
222 int retval = MPI_Sendrecv(sendbuf, sendcount, sendtype, dest, 0, recvbuf,
223 recvcount, recvtype, source, 0, m_comm, &status);
224
225 ASSERTL0(retval == MPI_SUCCESS,
226 "MPI error performing send-receive of data.");
227}

References ASSERTL0, and m_comm.

◆ v_SplitComm()

void Nektar::LibUtilities::CommMpi::v_SplitComm ( int  pRows,
int  pColumns,
int  pTime 
)
overrideprotectedvirtual

Processes are considered as a grid of size pRows*pColumns. Comm objects are created corresponding to the rows and columns of this grid. The row and column to which this process belongs is stored in m_commRow and m_commColumn.

Implements Nektar::LibUtilities::Comm.

Definition at line 502 of file CommMpi.cpp.

503{
504 if (pTime == 0)
505 {
506 ASSERTL0(pRows * pColumns == m_size,
507 "Rows/Columns do not match comm size.");
508
509 MPI_Comm newComm;
510
511 // Compute row and column in grid.
512 int myCol = m_rank % pColumns;
513 int myRow = (m_rank - myCol) / pColumns;
514
515 // Split Comm into rows - all processes with same myRow are put in
516 // the same communicator. The rank within this communicator is the
517 // column index.
518 MPI_Comm_split(m_comm, myRow, myCol, &newComm);
519 m_commRow = std::shared_ptr<Comm>(new CommMpi(newComm));
520
521 // Split Comm into columns - all processes with same myCol are put
522 // in the same communicator. The rank within this communicator is
523 // the row index.
524 MPI_Comm_split(m_comm, myCol, myRow, &newComm);
525 m_commColumn = std::shared_ptr<Comm>(new CommMpi(newComm));
526 }
527 else
528 {
529 ASSERTL0(pRows * pColumns * pTime == m_size,
530 "Rows/Columns/Time do not match comm size.");
531
532 MPI_Comm newComm;
533
534 // Compute row and column in grid.
535 int mySpace = m_rank % (pRows * pColumns);
536 int myTime = (m_rank - mySpace) / (pRows * pColumns);
537 int myCol = mySpace % pColumns;
538 int myRow = (mySpace - myCol) / pColumns;
539
540 // Split Comm - all processes with same mySpace are put in
541 // the same communicator. The rank within this communicator is the
542 // time index.
543 MPI_Comm_split(m_comm, mySpace, myTime, &newComm);
544 m_commTime = std::shared_ptr<Comm>(new CommMpi(newComm));
545
546 // Split Comm - all processes with same myTime are put in
547 // the same communicator. The rank within this communicator is the
548 // spatial index.
549 MPI_Comm_split(m_comm, myTime, mySpace, &newComm);
550 m_commSpace = std::shared_ptr<Comm>(new CommMpi(newComm));
551
552 // Split Comm into rows - all processes with same myRow are put in
553 // the same communicator. The rank within this communicator is the
554 // column index.
555 MPI_Comm_split(m_comm, myRow + pRows * pColumns * myTime, myCol,
556 &newComm);
557 m_commRow = std::shared_ptr<Comm>(new CommMpi(newComm));
558
559 // Split Comm into columns - all processes with same myCol are put
560 // in the same communicator. The rank within this communicator is
561 // the row index.
562 MPI_Comm_split(m_comm, myCol + pRows * pColumns * myTime, myRow,
563 &newComm);
564 m_commColumn = std::shared_ptr<Comm>(new CommMpi(newComm));
565 }
566}
CommSharedPtr m_commColumn
Column communicator.
Definition Comm.h:178
CommSharedPtr m_commRow
Row communicator.
Definition Comm.h:177
CommSharedPtr m_commTime
Definition Comm.h:179
CommSharedPtr m_commSpace
Definition Comm.h:180

References ASSERTL0, m_comm, Nektar::LibUtilities::Comm::m_commColumn, Nektar::LibUtilities::Comm::m_commRow, Nektar::LibUtilities::Comm::m_commSpace, Nektar::LibUtilities::Comm::m_commTime, m_rank, and Nektar::LibUtilities::Comm::m_size.

◆ v_SplitCommNode()

std::pair< CommSharedPtr, CommSharedPtr > Nektar::LibUtilities::CommMpi::v_SplitCommNode ( )
finalprotectedvirtual

Reimplemented from Nektar::LibUtilities::Comm.

Definition at line 594 of file CommMpi.cpp.

595{
596 std::pair<CommSharedPtr, CommSharedPtr> ret;
597
598#if MPI_VERSION < 3
599 ASSERTL0(false, "Not implemented for non-MPI-3 versions.");
600#else
601 // Create an intra-node communicator.
602 MPI_Comm nodeComm;
603 MPI_Comm_split_type(MPI_COMM_WORLD, MPI_COMM_TYPE_SHARED, m_rank,
604 MPI_INFO_NULL, &nodeComm);
605
606 // For rank 0 of the intra-node communicator, split the main
607 // communicator. Everyone else will get a null communicator.
608 ret.first = std::shared_ptr<Comm>(new CommMpi(nodeComm));
609 ret.second = CommMpi::v_CommCreateIf(ret.first->GetRank() == 0);
610 if (ret.first->GetRank() == 0)
611 {
612 ret.second->SplitComm(1, ret.second->GetSize());
613 }
614#endif
615
616 return ret;
617}
CommSharedPtr v_CommCreateIf(int flag) final
Definition CommMpi.cpp:571

References ASSERTL0, m_rank, and v_CommCreateIf().

◆ v_StartAll()

void Nektar::LibUtilities::CommMpi::v_StartAll ( CommRequestSharedPtr  request)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 464 of file CommMpi.cpp.

465{
467 std::static_pointer_cast<CommRequestMpi>(request);
468 if (req->GetNumRequest() != 0)
469 {
470 MPI_Startall(req->GetNumRequest(), req->GetRequest(0));
471 }
472}

◆ v_TreatAsRankZero()

bool Nektar::LibUtilities::CommMpi::v_TreatAsRankZero ( void  )
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 149 of file CommMpi.cpp.

150{
151 return m_rank == 0;
152}

References m_rank.

◆ v_WaitAll()

void Nektar::LibUtilities::CommMpi::v_WaitAll ( CommRequestSharedPtr  request)
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 477 of file CommMpi.cpp.

478{
480 std::static_pointer_cast<CommRequestMpi>(request);
481 if (req->GetNumRequest() != 0)
482 {
483 MPI_Waitall(req->GetNumRequest(), req->GetRequest(0),
484 MPI_STATUSES_IGNORE);
485 }
486}

◆ v_Wtime()

double Nektar::LibUtilities::CommMpi::v_Wtime ( )
finalprotectedvirtual

Implements Nektar::LibUtilities::Comm.

Definition at line 186 of file CommMpi.cpp.

187{
188 return MPI_Wtime();
189}

Member Data Documentation

◆ className

std::string Nektar::LibUtilities::CommMpi::className
static
Initial value:
"ParallelMPI", CommMpi::create, "Parallel communication using MPI.")
static CommSharedPtr create(int narg, char *arg[])
Creates an instance of this class.
Definition CommMpi.h:93
tKey RegisterCreatorFunction(tKey idKey, CreatorFunction classCreator, std::string pDesc="")
Register a class with the factory.
CommFactory & GetCommFactory()

Name of class.

Definition at line 99 of file CommMpi.h.

◆ m_comm

MPI_Comm Nektar::LibUtilities::CommMpi::m_comm
protected

◆ m_controls_mpi

bool Nektar::LibUtilities::CommMpi::m_controls_mpi
protected

Definition at line 109 of file CommMpi.h.

Referenced by CommMpi(), and v_Finalise().

◆ m_rank

int Nektar::LibUtilities::CommMpi::m_rank {}
protected