Nektar++
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
GlobalLinSysPETScStaticCond.cpp
Go to the documentation of this file.
1 ///////////////////////////////////////////////////////////////////////////////
2 //
3 // File GlobalLinSys.cpp
4 //
5 // For more information, please see: http://www.nektar.info
6 //
7 // The MIT License
8 //
9 // Copyright (c) 2006 Division of Applied Mathematics, Brown University (USA),
10 // Department of Aeronautics, Imperial College London (UK), and Scientific
11 // Computing and Imaging Institute, University of Utah (USA).
12 //
13 // License for the specific language governing rights and limitations under
14 // Permission is hereby granted, free of charge, to any person obtaining a
15 // copy of this software and associated documentation files (the "Software"),
16 // to deal in the Software without restriction, including without limitation
17 // the rights to use, copy, modify, merge, publish, distribute, sublicense,
18 // and/or sell copies of the Software, and to permit persons to whom the
19 // Software is furnished to do so, subject to the following conditions:
20 //
21 // The above copyright notice and this permission notice shall be included
22 // in all copies or substantial portions of the Software.
23 //
24 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
25 // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
26 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
27 // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
28 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
29 // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
30 // DEALINGS IN THE SOFTWARE.
31 //
32 // Description: GlobalLinSys definition
33 //
34 ///////////////////////////////////////////////////////////////////////////////
35 
37 
38 #include <petscsys.h>
39 #include <petscksp.h>
40 #include <petscmat.h>
41 
42 using namespace std;
43 
44 namespace Nektar
45 {
46  namespace MultiRegions
47  {
48  /**
49  * @class GlobalLinSysPETSc
50  *
51  * Solves a linear system using single- or multi-level static
52  * condensation.
53  */
54 
55  /**
56  * Registers the class with the Factory.
57  */
58  string GlobalLinSysPETScStaticCond::className
60  "PETScStaticCond",
61  GlobalLinSysPETScStaticCond::create,
62  "PETSc static condensation.");
63 
64  string GlobalLinSysPETScStaticCond::className2
66  "PETScMultiLevelStaticCond",
67  GlobalLinSysPETScStaticCond::create,
68  "PETSc multi-level static condensation.");
69 
70  /**
71  * For a matrix system of the form @f[
72  * \left[ \begin{array}{cc}
73  * \boldsymbol{A} & \boldsymbol{B}\\
74  * \boldsymbol{C} & \boldsymbol{D}
75  * \end{array} \right]
76  * \left[ \begin{array}{c} \boldsymbol{x_1}\\ \boldsymbol{x_2}
77  * \end{array}\right]
78  * = \left[ \begin{array}{c} \boldsymbol{y_1}\\ \boldsymbol{y_2}
79  * \end{array}\right],
80  * @f]
81  * where @f$\boldsymbol{D}@f$ and
82  * @f$(\boldsymbol{A-BD^{-1}C})@f$ are invertible, store and assemble
83  * a static condensation system, according to a given local to global
84  * mapping. #m_linSys is constructed by AssembleSchurComplement().
85  * @param mKey Associated matrix key.
86  * @param pLocMatSys LocalMatrixSystem
87  * @param locToGloMap Local to global mapping.
88  */
89  GlobalLinSysPETScStaticCond::GlobalLinSysPETScStaticCond(
90  const GlobalLinSysKey &pKey,
91  const boost::weak_ptr<ExpList> &pExpList,
92  const boost::shared_ptr<AssemblyMap> &pLocToGloMap)
93  : GlobalLinSys (pKey, pExpList, pLocToGloMap),
94  GlobalLinSysPETSc (pKey, pExpList, pLocToGloMap),
95  GlobalLinSysStaticCond(pKey, pExpList, pLocToGloMap)
96  {
99  "This constructor is only valid when using static "
100  "condensation");
102  == pLocToGloMap->GetGlobalSysSolnType(),
103  "The local to global map is not set up for the requested "
104  "solution type");
105  }
106 
107  /**
108  *
109  */
111  const GlobalLinSysKey &pKey,
112  const boost::weak_ptr<ExpList> &pExpList,
113  const DNekScalBlkMatSharedPtr pSchurCompl,
114  const DNekScalBlkMatSharedPtr pBinvD,
115  const DNekScalBlkMatSharedPtr pC,
116  const DNekScalBlkMatSharedPtr pInvD,
117  const boost::shared_ptr<AssemblyMap> &pLocToGloMap,
118  const PreconditionerSharedPtr pPrecon)
119  : GlobalLinSys (pKey, pExpList, pLocToGloMap),
120  GlobalLinSysPETSc (pKey, pExpList, pLocToGloMap),
121  GlobalLinSysStaticCond(pKey, pExpList, pLocToGloMap)
122  {
123  m_schurCompl = pSchurCompl;
124  m_BinvD = pBinvD;
125  m_C = pC;
126  m_invD = pInvD;
127  m_precon = pPrecon;
128  }
129 
130  /**
131  *
132  */
134  {
135 
136  }
137 
139  {
141 
142  // Allocate memory for top-level structure
144 
145  // Setup Block Matrix systems
146  int n, n_exp = m_expList.lock()->GetNumElmts();
147 
148  MatrixStorage blkmatStorage = eDIAGONAL;
149  const Array<OneD,const unsigned int>& nbdry_size
150  = m_locToGloMap->GetNumLocalBndCoeffsPerPatch();
151 
153  ::AllocateSharedPtr(nbdry_size, nbdry_size, blkmatStorage);
154 
155  // Preserve original matrix in m_S1Blk
156  for (n = 0; n < n_exp; ++n)
157  {
158  DNekScalMatSharedPtr mat = m_schurCompl->GetBlock(n, n);
159  m_S1Blk->SetBlock(n, n, mat);
160  }
161 
162  // Build preconditioner
163  m_precon->BuildPreconditioner();
164 
165  // Do transform of Schur complement matrix
166  for (n = 0; n < n_exp; ++n)
167  {
168  if (m_linSysKey.GetMatrixType() !=
170  {
171  DNekScalMatSharedPtr mat = m_S1Blk->GetBlock(n, n);
172  DNekScalMatSharedPtr t = m_precon->TransformedSchurCompl(
173  m_expList.lock()->GetOffset_Elmt_Id(n), mat);
174  m_schurCompl->SetBlock(n, n, t);
175  }
176  }
177 
178  // Construct this level
180  }
181 
182  /**
183  * Assemble the schur complement matrix from the block matrices stored
184  * in #m_blkMatrices and the given local to global mapping information.
185  * @param locToGloMap Local to global mapping information.
186  */
188  AssemblyMapSharedPtr pLocToGloMap)
189  {
190  int i, j, n, cnt, gid1, gid2, loc_lda;
191  NekDouble sign1, sign2, value;
192 
193  const int nDirDofs = pLocToGloMap->GetNumGlobalDirBndCoeffs();
194 
199  DNekScalMatSharedPtr loc_mat;
200 
201  // Build precon again if we in multi-level static condensation (a
202  // bit of a hack)
205  {
207  m_precon->BuildPreconditioner();
208  }
209 
210  // CALCULATE REORDERING MAPPING
211  CalculateReordering(pLocToGloMap->GetGlobalToUniversalBndMap(),
212  pLocToGloMap->GetGlobalToUniversalBndMapUnique(),
213  pLocToGloMap);
214 
215  // SET UP VECTORS AND MATRIX
216  SetUpMatVec(pLocToGloMap->GetNumGlobalBndCoeffs(), nDirDofs);
217 
218  // SET UP SCATTER OBJECTS
219  SetUpScatter();
220 
221  // CONSTRUCT KSP OBJECT
222  SetUpSolver(pLocToGloMap->GetIterativeTolerance());
223 
224  // If we are using the matrix multiplication shell don't try to
225  // populate the matrix.
227  {
228  return;
229  }
230 
231  // POPULATE MATRIX
232  for(n = cnt = 0; n < m_schurCompl->GetNumberOfBlockRows(); ++n)
233  {
234  loc_mat = m_schurCompl->GetBlock(n,n);
235  loc_lda = loc_mat->GetRows();
236 
237  for(i = 0; i < loc_lda; ++i)
238  {
239  gid1 = pLocToGloMap->GetLocalToGlobalBndMap(cnt + i)-nDirDofs;
240  sign1 = pLocToGloMap->GetLocalToGlobalBndSign(cnt + i);
241  if(gid1 >= 0)
242  {
243  int gid1ro = m_reorderedMap[gid1];
244  for(j = 0; j < loc_lda; ++j)
245  {
246  gid2 = pLocToGloMap->GetLocalToGlobalBndMap(cnt + j)
247  - nDirDofs;
248  sign2 = pLocToGloMap->GetLocalToGlobalBndSign(cnt + j);
249  if(gid2 >= 0)
250  {
251  int gid2ro = m_reorderedMap[gid2];
252  value = sign1*sign2*(*loc_mat)(i,j);
253  MatSetValue(
254  m_matrix, gid1ro, gid2ro, value, ADD_VALUES);
255  }
256  }
257  }
258  }
259  cnt += loc_lda;
260  }
261 
262  // ASSEMBLE MATRIX
263  MatAssemblyBegin(m_matrix, MAT_FINAL_ASSEMBLY);
264  MatAssemblyEnd (m_matrix, MAT_FINAL_ASSEMBLY);
265  }
266 
268  v_GetStaticCondBlock(unsigned int n)
269  {
270  DNekScalBlkMatSharedPtr schurComplBlock;
271  int scLevel = m_locToGloMap->GetStaticCondLevel();
272  DNekScalBlkMatSharedPtr sc = scLevel == 0 ? m_S1Blk : m_schurCompl;
273  DNekScalMatSharedPtr localMat = sc->GetBlock(n,n);
274  unsigned int nbdry = localMat->GetRows();
275  unsigned int nblks = 1;
276  unsigned int esize[1] = {nbdry};
277 
278  schurComplBlock = MemoryManager<DNekScalBlkMat>
279  ::AllocateSharedPtr(nblks, nblks, esize, esize);
280  schurComplBlock->SetBlock(0, 0, localMat);
281 
282  return schurComplBlock;
283  }
284 
286  int scLevel,
287  NekVector<NekDouble> &F_GlobBnd)
288  {
289  if (scLevel == 0)
290  {
291  // When matrices are supplied to the constructor at the top
292  // level, the preconditioner is never set up.
293  if (!m_precon)
294  {
296  m_precon->BuildPreconditioner();
297  }
298 
299  return m_S1Blk;
300  }
301  else
302  {
303  return m_schurCompl;
304  }
305  }
306 
308  Array<OneD, NekDouble>& pInOut,
309  int offset)
310  {
311  m_precon->DoTransformToLowEnergy(pInOut, offset);
312  }
313 
315  Array<OneD, NekDouble>& pInOut)
316  {
317  m_precon->DoTransformFromLowEnergy(pInOut);
318  }
319 
320  /**
321  * @brief Apply matrix-vector multiplication using local approach and
322  * the assembly map.
323  *
324  * @param input Vector input.
325  * @param output Result of multiplication.
326  *
327  * @todo This can possibly be made faster by using the sparse
328  * block-matrix multiplication code from the iterative elastic
329  * systems.
330  */
332  const Array<OneD, const NekDouble> &input,
333  Array<OneD, NekDouble> &output)
334  {
335  int nLocBndDofs = m_locToGloMap->GetNumLocalBndCoeffs();
336  int nDirDofs = m_locToGloMap->GetNumGlobalDirBndCoeffs();
337 
338  NekVector<NekDouble> in(nLocBndDofs), out(nLocBndDofs);
339  m_locToGloMap->GlobalToLocalBnd(input, in.GetPtr(), nDirDofs);
340  out = (*m_schurCompl) * in;
341  m_locToGloMap->AssembleBnd(out.GetPtr(), output, nDirDofs);
342  }
343 
345  const GlobalLinSysKey &mkey,
346  const boost::weak_ptr<ExpList> &pExpList,
347  const DNekScalBlkMatSharedPtr pSchurCompl,
348  const DNekScalBlkMatSharedPtr pBinvD,
349  const DNekScalBlkMatSharedPtr pC,
350  const DNekScalBlkMatSharedPtr pInvD,
351  const boost::shared_ptr<AssemblyMap> &l2gMap)
352  {
354  GlobalLinSysPETScStaticCond>::AllocateSharedPtr(
355  mkey, pExpList, pSchurCompl, pBinvD, pC, pInvD, l2gMap,
356  m_precon);
357  sys->Initialise(l2gMap);
358  return sys;
359  }
360  }
361 }
GlobalSysSolnType GetGlobalSysSolnType() const
Return the associated solution type.
boost::shared_ptr< GlobalLinSysPETScStaticCond > GlobalLinSysPETScStaticCondSharedPtr
virtual void v_BasisInvTransform(Array< OneD, NekDouble > &pInOut)
virtual void v_BasisTransform(Array< OneD, NekDouble > &pInOut, int offset)
static boost::shared_ptr< DataType > AllocateSharedPtr()
Allocate a shared pointer from the memory pool.
boost::shared_ptr< AssemblyMap > AssemblyMapSharedPtr
Definition: AssemblyMap.h:53
DNekScalBlkMatSharedPtr m_schurCompl
Block Schur complement matrix.
General purpose memory allocation routines with the ability to allocate from thread specific memory p...
virtual DNekScalBlkMatSharedPtr v_PreSolve(int scLevel, NekVector< NekDouble > &F_GlobBnd)
DNekScalBlkMatSharedPtr m_C
Block matrix.
void CalculateReordering(const Array< OneD, const int > &glo2uniMap, const Array< OneD, const int > &glo2unique, const AssemblyMapSharedPtr &pLocToGloMap)
Calculate a reordering of universal IDs for PETSc.
STL namespace.
GlobalLinSysPETScStaticCond(const GlobalLinSysKey &mkey, const boost::weak_ptr< ExpList > &pExpList, const boost::shared_ptr< AssemblyMap > &locToGloMap)
Constructor for full direct matrix solve.
boost::shared_ptr< Preconditioner > PreconditionerSharedPtr
Definition: GlobalLinSys.h:62
void SetUpSolver(NekDouble tolerance)
Set up KSP solver object.
void Initialise(const boost::shared_ptr< AssemblyMap > &pLocToGloMap)
Definition: GlobalLinSys.h:216
DNekScalBlkMatSharedPtr m_invD
Block matrix.
boost::shared_ptr< DNekScalMat > DNekScalMatSharedPtr
virtual GlobalLinSysStaticCondSharedPtr v_Recurse(const GlobalLinSysKey &mkey, const boost::weak_ptr< ExpList > &pExpList, const DNekScalBlkMatSharedPtr pSchurCompl, const DNekScalBlkMatSharedPtr pBinvD, const DNekScalBlkMatSharedPtr pC, const DNekScalBlkMatSharedPtr pInvD, const boost::shared_ptr< AssemblyMap > &locToGloMap)
virtual DNekScalBlkMatSharedPtr v_GetStaticCondBlock(unsigned int n)
Retrieves a the static condensation block matrices from n-th expansion using the matrix key provided ...
boost::shared_ptr< DNekScalBlkMat > DNekScalBlkMatSharedPtr
Definition: NekTypeDefs.hpp:74
double NekDouble
void SetupTopLevel(const boost::shared_ptr< AssemblyMap > &locToGloMap)
Set up the storage for the Schur complement or the top level of the multi-level Schur complement...
Describe a linear system.
StdRegions::MatrixType GetMatrixType() const
Return the matrix type.
A PETSc global linear system.
void SetUpScatter()
Set up PETSc local (equivalent to Nektar++ global) and global (equivalent to universal) scatter maps...
const GlobalLinSysKey m_linSysKey
Key associated with this linear system.
Definition: GlobalLinSys.h:127
A global linear system.
Definition: GlobalLinSys.h:74
virtual void v_DoMatrixMultiply(const Array< OneD, const NekDouble > &input, Array< OneD, NekDouble > &output)
Apply matrix-vector multiplication using local approach and the assembly map.
virtual void v_AssembleSchurComplement(boost::shared_ptr< AssemblyMap > locToGloMap)
Assemble the Schur complement matrix.
void SetUpMatVec(int nGlobal, int nDir)
Construct PETSc matrix and vector handles.
boost::shared_ptr< AssemblyMap > m_locToGloMap
Local to global map.
GlobalLinSysFactory & GetGlobalLinSysFactory()
std::vector< int > m_reorderedMap
Reordering that takes universal IDs to a unique row in the PETSc matrix.
#define ASSERTL1(condition, msg)
Assert Level 1 – Debugging which is used whether in FULLDEBUG or DEBUG compilation mode...
Definition: ErrorUtil.hpp:218
DNekScalBlkMatSharedPtr m_BinvD
Block matrix.
Array< OneD, DataType > & GetPtr()
Definition: NekVector.cpp:230
PETScMatMult m_matMult
Enumerator to select matrix multiplication type.
PreconditionerSharedPtr CreatePrecon(AssemblyMapSharedPtr asmMap)
Create a preconditioner object from the parameters defined in the supplied assembly map...
tKey RegisterCreatorFunction(tKey idKey, CreatorFunction classCreator, tDescription pDesc="")
Register a class with the factory.
Definition: NekFactory.hpp:215
boost::shared_ptr< GlobalLinSysStaticCond > GlobalLinSysStaticCondSharedPtr
const boost::weak_ptr< ExpList > m_expList
Local Matrix System.
Definition: GlobalLinSys.h:129