41 namespace LibUtilities
47 "Parallel communication using MPI.");
56 MPI_Initialized(&init);
57 ASSERTL0(!init,
"MPI has already been initialised.");
59 int retval = MPI_Init(&narg, &arg);
60 if (retval != MPI_SUCCESS)
62 ASSERTL0(
false,
"Failed to initialise MPI");
154 MPI_Ssend( pData.get(),
155 (int) pData.num_elements(),
163 MPI_Send( pData.get(),
164 (int) pData.num_elements(),
179 MPI_Recv( pData.get(),
180 (int) pData.num_elements(),
199 MPI_Ssend( pData.get(),
200 (int) pData.num_elements(),
208 MPI_Send( pData.get(),
209 (int) pData.num_elements(),
224 MPI_Recv( pData.get(),
225 (int) pData.num_elements(),
244 MPI_Ssend( &pData[0],
286 Array<OneD, NekDouble>& pSendData,
288 Array<OneD, NekDouble>& pRecvData)
291 int retval = MPI_Sendrecv(pSendData.get(),
292 (int) pSendData.num_elements(),
297 (int) pRecvData.num_elements(),
305 "MPI error performing send-receive of data.");
313 Array<OneD, int>& pSendData,
315 Array<OneD, int>& pRecvData)
318 int retval = MPI_Sendrecv(pSendData.get(),
319 (int) pSendData.num_elements(),
324 (int) pRecvData.num_elements(),
332 "MPI error performing send-receive of data.");
340 Array<OneD, NekDouble>& pSendData)
343 int retval = MPI_Sendrecv_replace(pSendData.get(),
344 (int) pSendData.num_elements(),
354 "MPI error performing Send-Receive-Replace of data.");
363 Array<OneD, int>& pSendData)
367 int retval = MPI_Sendrecv_replace(pSendData.get(),
368 (int) pSendData.num_elements(),
378 "MPI error performing Send-Receive-Replace of data.");
398 default: vOp = MPI_SUM;
break;
400 int retval = MPI_Allreduce( MPI_IN_PLACE,
408 "MPI error performing All-reduce.");
428 default: vOp = MPI_SUM;
break;
430 int retval = MPI_Allreduce( MPI_IN_PLACE,
438 "MPI error performing All-reduce.");
458 default: vOp = MPI_SUM;
break;
460 int retval = MPI_Allreduce( MPI_IN_PLACE,
462 (int) pData.num_elements(),
468 "MPI error performing All-reduce.");
488 default: vOp = MPI_SUM;
break;
490 int retval = MPI_Allreduce( MPI_IN_PLACE,
492 (int) pData.num_elements(),
498 "MPI error performing All-reduce.");
518 default: vOp = MPI_SUM;
break;
520 int retval = MPI_Allreduce( MPI_IN_PLACE,
528 "MPI error performing All-reduce.");
537 int retval = MPI_Alltoall(pSendData.get(),
538 (int) pSendData.num_elements()/
GetSize(),
541 (int) pRecvData.num_elements()/
GetSize(),
546 "MPI error performing All-to-All.");
555 int retval = MPI_Alltoall(pSendData.get(),
556 (int) pSendData.num_elements()/
GetSize(),
559 (int) pRecvData.num_elements()/
GetSize(),
564 "MPI error performing All-to-All.");
572 Array<OneD, int>& pSendDataSizeMap,
573 Array<OneD, int>& pSendDataOffsetMap,
574 Array<OneD, NekDouble>& pRecvData,
575 Array<OneD, int>& pRecvDataSizeMap,
576 Array<OneD, int>& pRecvDataOffsetMap)
578 int retval = MPI_Alltoallv(pSendData.get(),
579 pSendDataSizeMap.get(),
580 pSendDataOffsetMap.get(),
583 pRecvDataSizeMap.get(),
584 pRecvDataOffsetMap.get(),
589 "MPI error performing All-to-All-v.");
596 Array<OneD, int>& pSendDataSizeMap,
597 Array<OneD, int>& pSendDataOffsetMap,
598 Array<OneD, int>& pRecvData,
599 Array<OneD, int>& pRecvDataSizeMap,
600 Array<OneD, int>& pRecvDataOffsetMap)
602 int retval = MPI_Alltoallv(pSendData.get(),
603 pSendDataSizeMap.get(),
604 pSendDataOffsetMap.get(),
607 pRecvDataSizeMap.get(),
608 pRecvDataOffsetMap.get(),
613 "MPI error performing All-to-All-v.");
626 "Rows/Columns do not match comm size.");
631 int myCol =
m_rank % pColumns;
632 int myRow = (
m_rank - myCol) / pColumns;
637 MPI_Comm_split(
m_comm, myRow, myCol, &newComm);
643 MPI_Comm_split(
m_comm, myCol, myRow, &newComm);