Nektar++
GlobalLinSysPETSc.cpp
Go to the documentation of this file.
1 ///////////////////////////////////////////////////////////////////////////////
2 //
3 // File GlobalLinSys.cpp
4 //
5 // For more information, please see: http://www.nektar.info
6 //
7 // The MIT License
8 //
9 // Copyright (c) 2006 Division of Applied Mathematics, Brown University (USA),
10 // Department of Aeronautics, Imperial College London (UK), and Scientific
11 // Computing and Imaging Institute, University of Utah (USA).
12 //
13 // License for the specific language governing rights and limitations under
14 // Permission is hereby granted, free of charge, to any person obtaining a
15 // copy of this software and associated documentation files (the "Software"),
16 // to deal in the Software without restriction, including without limitation
17 // the rights to use, copy, modify, merge, publish, distribute, sublicense,
18 // and/or sell copies of the Software, and to permit persons to whom the
19 // Software is furnished to do so, subject to the following conditions:
20 //
21 // The above copyright notice and this permission notice shall be included
22 // in all copies or substantial portions of the Software.
23 //
24 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
25 // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
26 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
27 // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
28 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
29 // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
30 // DEALINGS IN THE SOFTWARE.
31 //
32 // Description: GlobalLinSys definition
33 //
34 ///////////////////////////////////////////////////////////////////////////////
35 
37 
38 #include "petscis.h"
39 #include "petscversion.h"
40 
41 namespace Nektar
42 {
43  namespace MultiRegions
44  {
45  /**
46  * @class GlobalLinSysPETSc
47  *
48  * Solves a linear system using PETSc.
49  */
50 
51 
53  const GlobalLinSysKey &pKey,
54  const boost::weak_ptr<ExpList> &pExp,
55  const boost::shared_ptr<AssemblyMap> &pLocToGloMap)
56  : GlobalLinSys(pKey, pExp, pLocToGloMap)
57  {
58  // Check PETSc is initialized
59  // For some reason, this is needed on OS X as logging is not
60  // initialized properly in the call within CommMpi.
61  PetscInitializeNoArguments();
62 
63  // Create matrix
64  MatCreate(PETSC_COMM_WORLD, &m_matrix);
65  }
66 
68  {
69  }
70 
71  /**
72  * @brief Solve linear system using PETSc.
73  *
74  * The general strategy being a PETSc solve is to:
75  *
76  * - Copy values into the PETSc vector #m_b
77  * - Solve the system #m_ksp and place result into #m_x.
78  * - Scatter results back into #m_locVec using #m_ctx scatter object.
79  * - Copy from #m_locVec to output array #pOutput.
80  */
82  const int pNumRows,
83  const Array<OneD,const NekDouble> &pInput,
84  Array<OneD, NekDouble> &pOutput,
85  const AssemblyMapSharedPtr &locToGloMap,
86  const int pNumDir)
87  {
88  const int nHomDofs = pNumRows - pNumDir;
89 
90  // Populate RHS vector from input
91  VecSetValues(m_b, nHomDofs, &m_reorderedMap[0],
92  &pInput[pNumDir], INSERT_VALUES);
93 
94  // Assemble RHS vector
95  VecAssemblyBegin(m_b);
96  VecAssemblyEnd (m_b);
97 
98  // Do system solve
99  KSPSolve(m_ksp, m_b, m_x);
100 
101  // Scatter results to local vector
102  VecScatterBegin(m_ctx, m_x, m_locVec,
103  INSERT_VALUES, SCATTER_FORWARD);
104  VecScatterEnd (m_ctx, m_x, m_locVec,
105  INSERT_VALUES, SCATTER_FORWARD);
106 
107  // Copy results into output vector
108  PetscScalar *tmp;
109  VecGetArray (m_locVec, &tmp);
110  Vmath::Vcopy (nHomDofs, tmp, 1, &pOutput[pNumDir], 1);
111  VecRestoreArray(m_locVec, &tmp);
112  }
113 
114  /**
115  * @brief Set up PETSc local (equivalent to Nektar++ global) and global
116  * (equivalent to universal) scatter maps.
117  *
118  * These maps are used in GlobalLinSysPETSc::v_SolveLinearSystem to
119  * scatter the solution vector back to each process.
120  */
122  {
123  const int nHomDofs = m_reorderedMap.size();
124 
125  // Create local and global numbering systems for vector
126  IS isGlobal, isLocal;
127  ISCreateGeneral(PETSC_COMM_SELF, nHomDofs, &m_reorderedMap[0],
128  PETSC_COPY_VALUES, &isGlobal);
129  ISCreateStride (PETSC_COMM_SELF, nHomDofs, 0, 1, &isLocal);
130 
131  // Create local vector for output
132  VecCreate (PETSC_COMM_SELF, &m_locVec);
133  VecSetSizes (m_locVec, nHomDofs, PETSC_DECIDE);
134  VecSetFromOptions(m_locVec);
135 
136  // Create scatter context
137  VecScatterCreate (m_x, isGlobal, m_locVec, isLocal, &m_ctx);
138 
139  // Clean up
140  ISDestroy(&isGlobal);
141  ISDestroy(&isLocal);
142  }
143 
144  /**
145  * @brief Calculate a reordering of universal IDs for PETSc.
146  *
147  * PETSc requires a unique, contiguous index of all global and universal
148  * degrees of freedom which represents its position inside the
149  * matrix. Presently Gs does not guarantee this, so this routine
150  * constructs a new universal mapping.
151  */
153  const Array<OneD, const int> &glo2uniMap,
154  const Array<OneD, const int> &glo2unique,
155  const AssemblyMapSharedPtr &pLocToGloMap)
156  {
158  = m_expList.lock()->GetSession()->GetComm();
159 
160  const int nDirDofs = pLocToGloMap->GetNumGlobalDirBndCoeffs();
161  const int nHomDofs = glo2uniMap.num_elements() - nDirDofs;
162  const int nProc = vComm->GetSize();
163  const int rank = vComm->GetRank();
164 
165  int n, cnt;
166 
167  // Count number of unique degrees of freedom on each process.
168  m_nLocal = Vmath::Vsum(nHomDofs, glo2unique + nDirDofs, 1);
169  m_reorderedMap.resize(nHomDofs);
170 
171  // Reduce coefficient counts across all processors.
172  Array<OneD, int> localCounts(nProc, 0), localOffset(nProc, 0);
173  localCounts[rank] = nHomDofs;
174  vComm->AllReduce(localCounts, LibUtilities::ReduceSum);
175 
176  for (n = 1; n < nProc; ++n)
177  {
178  localOffset[n] = localOffset[n-1] + localCounts[n-1];
179  }
180 
181  int totHomDofs = Vmath::Vsum(nProc, localCounts, 1);
182  vector<unsigned int> allUniIds(totHomDofs, 0);
183 
184  // Assemble list of universal IDs
185  for (n = 0; n < nHomDofs; ++n)
186  {
187  int gid = n + nDirDofs;
188  allUniIds[n + localOffset[rank]] = glo2uniMap[gid];
189  }
190 
191  // Reduce this across processors so that each process has a list of
192  // all universal IDs.
193  vComm->AllReduce(allUniIds, LibUtilities::ReduceSum);
194  std::sort(allUniIds.begin(), allUniIds.end());
195  map<int,int> uniIdReorder;
196 
197  // Renumber starting from 0.
198  for (cnt = n = 0; n < allUniIds.size(); ++n)
199  {
200  if (uniIdReorder.count(allUniIds[n]) > 0)
201  {
202  continue;
203  }
204 
205  uniIdReorder[allUniIds[n]] = cnt++;
206  }
207 
208  // Populate reordering map.
209  for (n = 0; n < nHomDofs; ++n)
210  {
211  int gid = n + nDirDofs;
212  int uniId = glo2uniMap[gid];
213  ASSERTL0(uniIdReorder.count(uniId) > 0, "Error in ordering");
214  m_reorderedMap[n] = uniIdReorder[uniId];
215  }
216  }
217 
218  /**
219  * @brief Construct PETSc matrix and vector handles.
220  *
221  * @todo Preallocation should be done at this point, since presently
222  * matrix allocation takes a significant amount of time.
223  */
225  {
226  // CREATE VECTORS
227  VecCreate (PETSC_COMM_WORLD, &m_x);
228  VecSetSizes (m_x, m_nLocal, PETSC_DECIDE);
229  VecSetFromOptions(m_x);
230  VecDuplicate (m_x, &m_b);
231 
232  // CREATE MATRICES
233  MatCreate (PETSC_COMM_WORLD, &m_matrix);
234  MatSetType (m_matrix, MATAIJ);
235  MatSetSizes (m_matrix, m_nLocal, m_nLocal,
236  PETSC_DETERMINE, PETSC_DETERMINE);
237  MatSetFromOptions(m_matrix);
238  MatSetUp (m_matrix);
239  }
240 
241  /**
242  * @brief Set up KSP solver object.
243  *
244  * This is reasonably generic setup -- most solver types can be changed
245  * using the .petscrc file.
246  */
248  {
249  KSPCreate(PETSC_COMM_WORLD, &m_ksp);
250  KSPSetTolerances(
251  m_ksp, tolerance, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT);
252  KSPSetFromOptions(m_ksp);
253 #if PETSC_VERSION_GE(3,5,0)
254  KSPSetOperators(m_ksp, m_matrix, m_matrix);
255 #else
256  KSPSetOperators(m_ksp, m_matrix, m_matrix, SAME_NONZERO_PATTERN);
257 #endif
258  }
259  }
260 }
#define ASSERTL0(condition, msg)
Definition: ErrorUtil.hpp:161
void SetUpMatVec()
Construct PETSc matrix and vector handles.
boost::shared_ptr< AssemblyMap > AssemblyMapSharedPtr
Definition: AssemblyMap.h:53
void CalculateReordering(const Array< OneD, const int > &glo2uniMap, const Array< OneD, const int > &glo2unique, const AssemblyMapSharedPtr &pLocToGloMap)
Calculate a reordering of universal IDs for PETSc.
void SetUpSolver(NekDouble tolerance)
Set up KSP solver object.
boost::shared_ptr< Comm > CommSharedPtr
Pointer to a Communicator object.
Definition: Comm.h:53
GlobalLinSysPETSc(const GlobalLinSysKey &pKey, const boost::weak_ptr< ExpList > &pExp, const boost::shared_ptr< AssemblyMap > &pLocToGloMap)
Constructor for full direct matrix solve.
double NekDouble
Describe a linear system.
void SetUpScatter()
Set up PETSc local (equivalent to Nektar++ global) and global (equivalent to universal) scatter maps...
A global linear system.
Definition: GlobalLinSys.h:70
virtual void v_SolveLinearSystem(const int pNumRows, const Array< OneD, const NekDouble > &pInput, Array< OneD, NekDouble > &pOutput, const AssemblyMapSharedPtr &locToGloMap, const int pNumDir)
Solve linear system using PETSc.
T Vsum(int n, const T *x, const int incx)
Subtract return sum(x)
Definition: Vmath.cpp:714
void Vcopy(int n, const T *x, const int incx, T *y, const int incy)
Definition: Vmath.cpp:1038
const boost::weak_ptr< ExpList > m_expList
Local Matrix System.
Definition: GlobalLinSys.h:125