MFEM  v4.2.0
Finite element discretization library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Pages
petsc.cpp
Go to the documentation of this file.
1 // Copyright (c) 2010-2020, Lawrence Livermore National Security, LLC. Produced
2 // at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3 // LICENSE and NOTICE for details. LLNL-CODE-806117.
4 //
5 // This file is part of the MFEM library. For more information and source code
6 // availability visit https://mfem.org.
7 //
8 // MFEM is free software; you can redistribute it and/or modify it under the
9 // terms of the BSD-3 license. We welcome feedback and contributions, see file
10 // CONTRIBUTING.md for details.
11 
12 // Author: Stefano Zampini <stefano.zampini@gmail.com>
13 
14 #include "../config/config.hpp"
15 
16 #ifdef MFEM_USE_MPI
17 #ifdef MFEM_USE_PETSC
18 
19 #include "linalg.hpp"
20 #include "../fem/fem.hpp"
21 
22 #include "petsc.h"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
25 #endif
26 
27 // Backward compatibility
28 #if PETSC_VERSION_LT(3,12,0)
29 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
30 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
31 #endif
32 
33 #include <fstream>
34 #include <iomanip>
35 #include <cmath>
36 #include <cstdlib>
37 
38 // Note: there are additional #include statements below.
39 
40 #include "petscinternals.hpp"
41 
42 // Callback functions: these functions will be called by PETSc
43 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
44 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
45 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
46  void*);
47 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
48 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
49  PetscReal,Mat,
50  Mat,void*);
51 static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
52  Mat,Mat,Mat,Mat);
53 static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
54 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
55 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
56 static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
57 static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
58 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
59  PetscBool*,PetscBool*,void*);
60 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
61 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
62 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
63 static PetscErrorCode __mfem_pc_shell_setup(PC);
64 static PetscErrorCode __mfem_pc_shell_destroy(PC);
65 static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
66 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
67 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
68 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
69 static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
70 static PetscErrorCode __mfem_array_container_destroy(void*);
71 static PetscErrorCode __mfem_matarray_container_destroy(void*);
72 static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
73 
74 // auxiliary functions
75 static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
76  PetscInt,IS*);
77 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
78  const mfem::Array<int>*,PetscInt,IS*);
79 static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
80 static PetscErrorCode MakeShellPCWithFactory(PC,
82 
83 // Equivalent functions are present in PETSc source code
84 // if PETSc has been compiled with hypre support
85 // We provide them here in case PETSC_HAVE_HYPRE is not defined
86 #if !defined(PETSC_HAVE_HYPRE)
87 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
88 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
89 #endif
90 
91 // structs used by PETSc code
92 typedef struct
93 {
94  mfem::Solver *op;
96  bool ownsop;
97  unsigned long int numprec;
98 } __mfem_pc_shell_ctx;
99 
100 typedef struct
101 {
102  mfem::Operator *op; // The nonlinear operator
103  mfem::PetscBCHandler *bchandler; // Handling of essential bc
104  mfem::Vector *work; // Work vector
105  mfem::Operator::Type jacType; // OperatorType for the Jacobian
106  // Objective for line search
107  void (*objective)(mfem::Operator *op, const mfem::Vector&, double*);
108  // PostCheck function (to be called after successful line search)
109  void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
110  mfem::Vector&, bool&, bool&);
111  // General purpose update function (to be called at the beginning of
112  // each nonlinear step)
113  void (*update)(mfem::Operator *op, int,
114  const mfem::Vector&, const mfem::Vector&,
115  const mfem::Vector&, const mfem::Vector&);
116 } __mfem_snes_ctx;
117 
118 typedef struct
119 {
120  mfem::TimeDependentOperator *op; // The time-dependent operator
121  mfem::PetscBCHandler *bchandler; // Handling of essential bc
122  mfem::Vector *work; // Work vector
123  mfem::Vector *work2; // Work vector
124  mfem::Operator::Type jacType; // OperatorType for the Jacobian
125  enum mfem::PetscODESolver::Type type;
126  PetscReal cached_shift;
127  PetscObjectState cached_ijacstate;
128  PetscObjectState cached_rhsjacstate;
129  PetscObjectState cached_splits_xstate;
130  PetscObjectState cached_splits_xdotstate;
131 } __mfem_ts_ctx;
132 
133 typedef struct
134 {
135  mfem::PetscSolver *solver; // The solver object
136  mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
137 } __mfem_monitor_ctx;
138 
139 // use global scope ierr to check PETSc errors inside mfem calls
140 static PetscErrorCode ierr;
141 
142 using namespace std;
143 
144 namespace mfem
145 {
146 
148 {
149  MFEMInitializePetsc(NULL,NULL,NULL,NULL);
150 }
151 
152 void MFEMInitializePetsc(int *argc,char*** argv)
153 {
154  MFEMInitializePetsc(argc,argv,NULL,NULL);
155 }
156 
157 void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
158  const char help[])
159 {
160  ierr = PetscInitialize(argc,argv,rc_file,help);
161  MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
162 }
163 
165 {
166  ierr = PetscFinalize();
167  MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
168 }
169 
170 // PetscParVector methods
171 
172 void PetscParVector::_SetDataAndSize_()
173 {
174  const PetscScalar *array;
175  PetscInt n;
176 
177  ierr = VecGetArrayRead(x,&array); PCHKERRQ(x,ierr);
178  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
179  SetDataAndSize((PetscScalar*)array,n);
180  ierr = VecRestoreArrayRead(x,&array); PCHKERRQ(x,ierr);
181 }
182 
183 PetscInt PetscParVector::GlobalSize() const
184 {
185  PetscInt N;
186  ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
187  return N;
188 }
189 
190 PetscParVector::PetscParVector(MPI_Comm comm, const Vector &_x,
191  bool copy) : Vector()
192 {
193  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
194  ierr = VecSetSizes(x,_x.Size(),PETSC_DECIDE); PCHKERRQ(x,ierr);
195  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
197  if (copy)
198  {
199  PetscScalar *array;
200 
201  ierr = VecGetArray(x,&array); PCHKERRQ(x,ierr);
202  for (int i = 0; i < _x.Size(); i++) { array[i] = _x[i]; }
203  ierr = VecRestoreArray(x,&array); PCHKERRQ(x,ierr);
204  }
205 }
206 
207 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
208  PetscInt *col) : Vector()
209 {
210  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
211  if (col)
212  {
213  PetscMPIInt myid;
214  MPI_Comm_rank(comm, &myid);
215  ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
216  }
217  else
218  {
219  ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
220  }
221  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
223 }
224 
226 {
227  MPI_Comm comm = PetscObjectComm((PetscObject)x);
228  ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
229 }
230 
231 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
232  PetscScalar *_data, PetscInt *col) : Vector()
233 {
234  MFEM_VERIFY(col,"Missing distribution");
235  PetscMPIInt myid;
236  MPI_Comm_rank(comm, &myid);
237  ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,_data,
238  &x); CCHKERRQ(comm,ierr)
240 }
241 
243 {
244  ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
246 }
247 
248 PetscParVector::PetscParVector(MPI_Comm comm, const Operator &op,
249  bool transpose, bool allocate) : Vector()
250 {
251  PetscInt loc = transpose ? op.Height() : op.Width();
252  if (allocate)
253  {
254  ierr = VecCreate(comm,&x);
255  CCHKERRQ(comm,ierr);
256  ierr = VecSetSizes(x,loc,PETSC_DECIDE);
257  PCHKERRQ(x,ierr);
258  ierr = VecSetType(x,VECSTANDARD);
259  PCHKERRQ(x,ierr);
260  ierr = VecSetUp(x);
261  PCHKERRQ(x,ierr);
262  }
263  else
264  {
265  ierr = VecCreateMPIWithArray(comm,1,loc,PETSC_DECIDE,NULL,
266  &x); CCHKERRQ(comm,ierr);
267  }
269 }
270 
272  bool transpose, bool allocate) : Vector()
273 {
274  Mat pA = const_cast<PetscParMatrix&>(A);
275  if (!transpose)
276  {
277  ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
278  }
279  else
280  {
281  ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
282  }
283  if (!allocate)
284  {
285  ierr = VecReplaceArray(x,NULL); PCHKERRQ(x,ierr);
286  }
288 }
289 
291 {
292  if (ref)
293  {
294  ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
295  }
296  x = y;
298 }
299 
301 {
302 
303  HYPRE_Int* offsets = pfes->GetTrueDofOffsets();
304  MPI_Comm comm = pfes->GetComm();
305  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
306 
307  PetscMPIInt myid = 0;
308  if (!HYPRE_AssumedPartitionCheck())
309  {
310  MPI_Comm_rank(comm,&myid);
311  }
312  ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
313  PCHKERRQ(x,ierr);
314  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
316 }
317 
318 MPI_Comm PetscParVector::GetComm() const
319 {
320  return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
321 }
322 
324 {
325  VecScatter scctx;
326  Vec vout;
327  PetscScalar *array;
328  PetscInt size;
329 
330  ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
331  ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
332  PCHKERRQ(x,ierr);
333  ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
334  PCHKERRQ(x,ierr);
335  ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
336  ierr = VecGetArray(vout,&array); PCHKERRQ(x,ierr);
337  ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
338  Array<PetscScalar> data(size);
339  data.Assign(array);
340  ierr = VecRestoreArray(vout,&array); PCHKERRQ(x,ierr);
341  ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
342  Vector *v = new Vector(data, internal::to_int(size));
343  v->MakeDataOwner();
344  data.LoseData();
345  return v;
346 }
347 
349 {
350  ierr = VecSet(x,d); PCHKERRQ(x,ierr);
351  return *this;
352 }
353 
355  const Array<PetscScalar>& vals)
356 {
357  MFEM_VERIFY(idx.Size() == vals.Size(),
358  "Size mismatch between indices and values");
359  PetscInt n = idx.Size();
360  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
361  PCHKERRQ(x,ierr);
362  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
363  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
364  return *this;
365 }
366 
368  const Array<PetscScalar>& vals)
369 {
370  MFEM_VERIFY(idx.Size() == vals.Size(),
371  "Size mismatch between indices and values");
372  PetscInt n = idx.Size();
373  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
374  PCHKERRQ(x,ierr);
375  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
376  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
377  return *this;
378 }
379 
381 {
382  ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
383  return *this;
384 }
385 
387 {
388  ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
389  return *this;
390 }
391 
393 {
394  ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
395  return *this;
396 }
397 
399 {
400  ierr = VecScale(x,s); PCHKERRQ(x,ierr);
401  return *this;
402 }
403 
405 {
406  ierr = VecShift(x,s); PCHKERRQ(x,ierr);
407  return *this;
408 }
409 
411 {
412  ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
413 }
414 
416 {
417  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
418 }
419 
421 {
422  PetscRandom rctx = NULL;
423 
424  if (seed)
425  {
426  ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
427  PCHKERRQ(x,ierr);
428  ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
429  ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
430  }
431  ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
432  ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
433 }
434 
435 void PetscParVector::Print(const char *fname, bool binary) const
436 {
437  if (fname)
438  {
439  PetscViewer view;
440 
441  if (binary)
442  {
443  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
444  FILE_MODE_WRITE,&view);
445  }
446  else
447  {
448  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
449  }
450  PCHKERRQ(x,ierr);
451  ierr = VecView(x,view); PCHKERRQ(x,ierr);
452  ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
453  }
454  else
455  {
456  ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
457  }
458 }
459 
460 // PetscParMatrix methods
461 
463 {
464  PetscInt N;
465  ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
466  return N;
467 }
468 
470 {
471  PetscInt N;
472  ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
473  return N;
474 }
475 
477 {
478  PetscInt N;
479  ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
480  return N;
481 }
482 
484 {
485  PetscInt N;
486  ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
487  return N;
488 }
489 
491 {
492  PetscInt N;
493  ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
494  return N;
495 }
496 
498 {
499  PetscInt N;
500  ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
501  return N;
502 }
503 
505 {
506  MatInfo info;
507  ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
508  return (PetscInt)info.nz_used;
509 }
510 
512 {
513  A = NULL;
514  X = Y = NULL;
515  height = width = 0;
516 }
517 
519 {
520  Init();
521 }
522 
524  const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
525 {
526  Init();
527 
528  Mat B = const_cast<PetscParMatrix&>(pB);
529 
530  IS isr,isc;
531  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
532  rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
533  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
534  cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
535  ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
536  ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
537  ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
538 
539  height = GetNumRows();
540  width = GetNumCols();
541 }
542 
544 {
545  Init();
546  height = pa->Height();
547  width = pa->Width();
548  ConvertOperator(pa->GetComm(),*pa,&A,tid);
549 }
550 
552 {
553  Init();
554  height = ha->Height();
555  width = ha->Width();
556  ConvertOperator(ha->GetComm(),*ha,&A,tid);
557 }
558 
560 {
561  Init();
562  height = sa->Height();
563  width = sa->Width();
564  ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
565 }
566 
567 PetscParMatrix::PetscParMatrix(MPI_Comm comm, const Operator *op,
568  Operator::Type tid)
569 {
570  Init();
571  height = op->Height();
572  width = op->Width();
573  ConvertOperator(comm,*op,&A,tid);
574 }
575 
576 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt glob_size,
577  PetscInt *row_starts, SparseMatrix *diag,
578  Operator::Type tid)
579 {
580  Init();
581  BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
582  tid==PETSC_MATAIJ,&A);
583  // update base class
584  height = GetNumRows();
585  width = GetNumCols();
586 }
587 
588 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
589  PetscInt global_num_cols, PetscInt *row_starts,
590  PetscInt *col_starts, SparseMatrix *diag,
591  Operator::Type tid)
592 {
593  Init();
594  BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
595  tid==PETSC_MATAIJ,&A);
596  // update base class
597  height = GetNumRows();
598  width = GetNumCols();
599 }
600 
602 {
603  if (A)
604  {
605  MPI_Comm comm = PetscObjectComm((PetscObject)A);
606  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
607  if (X) { delete X; }
608  if (Y) { delete Y; }
609  X = Y = NULL;
610  }
611  height = B.Height();
612  width = B.Width();
613 #if defined(PETSC_HAVE_HYPRE)
614  ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
615 #else
616  ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
617 #endif
618  return *this;
619 }
620 
622 {
623  if (A)
624  {
625  MPI_Comm comm = PetscObjectComm((PetscObject)A);
626  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
627  if (X) { delete X; }
628  if (Y) { delete Y; }
629  X = Y = NULL;
630  }
631  height = B.Height();
632  width = B.Width();
633  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
634  return *this;
635 }
636 
638 {
639  if (!A)
640  {
641  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
642  }
643  else
644  {
645  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
646  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
647  ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
648  }
649  return *this;
650 }
651 
653 {
654  if (!A)
655  {
656  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
657  ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
658  }
659  else
660  {
661  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
662  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
663  ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
664  }
665  return *this;
666 }
667 
668 void PetscParMatrix::
669 BlockDiagonalConstructor(MPI_Comm comm,
670  PetscInt *row_starts, PetscInt *col_starts,
671  SparseMatrix *diag, bool assembled, Mat* Ad)
672 {
673  Mat A;
674  PetscInt lrsize,lcsize,rstart,cstart;
675  PetscMPIInt myid = 0,commsize;
676 
677  ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
678  if (!HYPRE_AssumedPartitionCheck())
679  {
680  ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
681  }
682  lrsize = row_starts[myid+1]-row_starts[myid];
683  rstart = row_starts[myid];
684  lcsize = col_starts[myid+1]-col_starts[myid];
685  cstart = col_starts[myid];
686 
687  if (!assembled)
688  {
689  IS is;
690  ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
691  ISLocalToGlobalMapping rl2g,cl2g;
692  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
693  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
694  if (row_starts != col_starts)
695  {
696  ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
697  CCHKERRQ(comm,ierr);
698  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
699  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
700  }
701  else
702  {
703  ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
704  cl2g = rl2g;
705  }
706 
707  // Create the PETSc object (MATIS format)
708  ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
709  ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
710  PCHKERRQ(A,ierr);
711  ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
712  ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
713  ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
714  ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
715 
716  // Copy SparseMatrix into PETSc SeqAIJ format
717  Mat lA;
718  ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
719  int *II = diag->GetI();
720  int *JJ = diag->GetJ();
721 #if defined(PETSC_USE_64BIT_INDICES)
722  PetscInt *pII,*pJJ;
723  int m = diag->Height()+1, nnz = II[diag->Height()];
724  ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
725  for (int i = 0; i < m; i++) { pII[i] = II[i]; }
726  for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
727  ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
728  diag->GetData()); PCHKERRQ(lA,ierr);
729  ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
730 #else
731  ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
732  diag->GetData()); PCHKERRQ(lA,ierr);
733 #endif
734  }
735  else
736  {
737  PetscScalar *da;
738  PetscInt *dii,*djj,*oii,
739  m = diag->Height()+1, nnz = diag->NumNonZeroElems();
740 
741  diag->SortColumnIndices();
742  // if we can take ownership of the SparseMatrix arrays, we can avoid this
743  // step
744  ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
745  ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
746  ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
747  if (sizeof(PetscInt) == sizeof(int))
748  {
749  ierr = PetscMemcpy(dii,diag->GetI(),m*sizeof(PetscInt));
750  CCHKERRQ(PETSC_COMM_SELF,ierr);
751  ierr = PetscMemcpy(djj,diag->GetJ(),nnz*sizeof(PetscInt));
752  CCHKERRQ(PETSC_COMM_SELF,ierr);
753  }
754  else
755  {
756  int *iii = diag->GetI();
757  int *jjj = diag->GetJ();
758  for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
759  for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
760  }
761  ierr = PetscMemcpy(da,diag->GetData(),nnz*sizeof(PetscScalar));
762  CCHKERRQ(PETSC_COMM_SELF,ierr);
763  ierr = PetscCalloc1(m,&oii);
764  CCHKERRQ(PETSC_COMM_SELF,ierr);
765  if (commsize > 1)
766  {
767  ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
768  PETSC_DECIDE,
769  dii,djj,da,oii,NULL,NULL,&A);
770  CCHKERRQ(comm,ierr);
771  }
772  else
773  {
774  ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
775  CCHKERRQ(comm,ierr);
776  }
777 
778  void *ptrs[4] = {dii,djj,da,oii};
779  const char *names[4] = {"_mfem_csr_dii",
780  "_mfem_csr_djj",
781  "_mfem_csr_da",
782  "_mfem_csr_oii",
783  };
784  for (PetscInt i=0; i<4; i++)
785  {
786  PetscContainer c;
787 
788  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
789  ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
790  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
791  CCHKERRQ(comm,ierr);
792  ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
793  CCHKERRQ(comm,ierr);
794  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
795  }
796  }
797 
798  // Tell PETSc the matrix is ready to be used
799  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
800  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
801 
802  *Ad = A;
803 }
804 
805 MPI_Comm PetscParMatrix::GetComm() const
806 {
807  return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
808 }
809 
810 // TODO ADD THIS CONSTRUCTOR
811 //PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
812 // PetscInt glob_ncols, int *I, PetscInt *J,
813 // double *data, PetscInt *rows, PetscInt *cols)
814 //{
815 //}
816 
817 // TODO This should take a reference on op but how?
818 void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
819 {
820  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
821  ierr = MatSetSizes(*A,op->Height(),op->Width(),
822  PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
823  ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
824  ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
825  ierr = MatShellSetOperation(*A,MATOP_MULT,
826  (void (*)())__mfem_mat_shell_apply);
827  PCHKERRQ(A,ierr);
828  ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
829  (void (*)())__mfem_mat_shell_apply_transpose);
830  PCHKERRQ(A,ierr);
831  ierr = MatShellSetOperation(*A,MATOP_COPY,
832  (void (*)())__mfem_mat_shell_copy);
833  PCHKERRQ(A,ierr);
834  ierr = MatShellSetOperation(*A,MATOP_DESTROY,
835  (void (*)())__mfem_mat_shell_destroy);
836  PCHKERRQ(A,ierr);
837  ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
838 }
839 
840 void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
841  Operator::Type tid)
842 {
843  PetscParMatrix *pA = const_cast<PetscParMatrix *>
844  (dynamic_cast<const PetscParMatrix *>(&op));
845  HypreParMatrix *pH = const_cast<HypreParMatrix *>
846  (dynamic_cast<const HypreParMatrix *>(&op));
847  BlockOperator *pB = const_cast<BlockOperator *>
848  (dynamic_cast<const BlockOperator *>(&op));
849  IdentityOperator *pI = const_cast<IdentityOperator *>
850  (dynamic_cast<const IdentityOperator *>(&op));
851  SparseMatrix *pS = const_cast<SparseMatrix *>
852  (dynamic_cast<const SparseMatrix *>(&op));
853 
854  if (pA && tid == ANY_TYPE) // use same object and return
855  {
856  ierr = PetscObjectReference((PetscObject)(pA->A));
857  CCHKERRQ(pA->GetComm(),ierr);
858  *A = pA->A;
859  return;
860  }
861 
862  PetscBool avoidmatconvert = PETSC_FALSE;
863  if (pA) // we test for these types since MatConvert will fail
864  {
865  ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
866  MATSHELL,"");
867  CCHKERRQ(comm,ierr);
868  }
869  if (pA && !avoidmatconvert)
870  {
871  Mat At = NULL;
872  PetscBool istrans;
873 #if PETSC_VERSION_LT(3,10,0)
874  PetscBool ismatis;
875 #endif
876 
877  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
878  CCHKERRQ(pA->GetComm(),ierr);
879  if (!istrans)
880  {
881  if (tid == pA->GetType()) // use same object and return
882  {
883  ierr = PetscObjectReference((PetscObject)(pA->A));
884  CCHKERRQ(pA->GetComm(),ierr);
885  *A = pA->A;
886  return;
887  }
888 #if PETSC_VERSION_LT(3,10,0)
889  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
890  CCHKERRQ(pA->GetComm(),ierr);
891 #endif
892  }
893  else
894  {
895  ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
896 #if PETSC_VERSION_LT(3,10,0)
897  ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
898 #endif
899  CCHKERRQ(pA->GetComm(),ierr);
900  }
901 
902  // Try to convert
903  if (tid == PETSC_MATAIJ)
904  {
905 #if PETSC_VERSION_LT(3,10,0)
906  if (ismatis)
907  {
908  if (istrans)
909  {
910  Mat B;
911 
912  ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
913  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
914  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
915  }
916  else
917  {
918  ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
919  PCHKERRQ(pA->A,ierr);
920  }
921  }
922  else
923 #endif
924  {
925  PetscMPIInt size;
926  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
927 
928  // call MatConvert and see if a converter is available
929  if (istrans)
930  {
931  Mat B;
932  ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
933  PCHKERRQ(pA->A,ierr);
934  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
935  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
936  }
937  else
938  {
939  ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
940  PCHKERRQ(pA->A,ierr);
941  }
942  }
943  }
944  else if (tid == PETSC_MATIS)
945  {
946  if (istrans)
947  {
948  Mat B;
949  ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
950  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
951  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
952  }
953  else
954  {
955  ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
956  }
957  }
958  else if (tid == PETSC_MATHYPRE)
959  {
960 #if defined(PETSC_HAVE_HYPRE)
961  if (istrans)
962  {
963  Mat B;
964  ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
965  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
966  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
967  }
968  else
969  {
970  ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
971  }
972 #else
973  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
974 #endif
975  }
976  else if (tid == PETSC_MATSHELL)
977  {
978  MakeWrapper(comm,&op,A);
979  }
980  else
981  {
982  MFEM_ABORT("Unsupported operator type conversion " << tid)
983  }
984  }
985  else if (pH)
986  {
987  if (tid == PETSC_MATAIJ)
988  {
989 #if defined(PETSC_HAVE_HYPRE)
990  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
991  PETSC_USE_POINTER,A);
992 #else
993  ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
994 #endif
995  CCHKERRQ(pH->GetComm(),ierr);
996  }
997  else if (tid == PETSC_MATIS)
998  {
999 #if defined(PETSC_HAVE_HYPRE)
1000  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1001  PETSC_USE_POINTER,A);
1002 #else
1003  ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1004 #endif
1005  CCHKERRQ(pH->GetComm(),ierr);
1006  }
1007  else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1008  {
1009 #if defined(PETSC_HAVE_HYPRE)
1010  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1011  PETSC_USE_POINTER,A);
1012  CCHKERRQ(pH->GetComm(),ierr);
1013 #else
1014  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1015 #endif
1016  }
1017  else if (tid == PETSC_MATSHELL)
1018  {
1019  MakeWrapper(comm,&op,A);
1020  }
1021  else
1022  {
1023  MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1024  " is not implemented");
1025  }
1026  }
1027  else if (pB)
1028  {
1029  Mat *mats,*matsl2l = NULL;
1030  PetscInt i,j,nr,nc;
1031 
1032  nr = pB->NumRowBlocks();
1033  nc = pB->NumColBlocks();
1034  ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1035  if (tid == PETSC_MATIS)
1036  {
1037  ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1038  }
1039  for (i=0; i<nr; i++)
1040  {
1041  PetscBool needl2l = PETSC_TRUE;
1042 
1043  for (j=0; j<nc; j++)
1044  {
1045  if (!pB->IsZeroBlock(i,j))
1046  {
1047  ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1048  if (tid == PETSC_MATIS && needl2l)
1049  {
1050  PetscContainer c;
1051  ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1052  (PetscObject*)&c);
1053  PCHKERRQ(mats[i*nc+j],ierr);
1054  // special case for block operators: the local Vdofs should be
1055  // ordered as:
1056  // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1057  // with m fields, Ni the number of Vdofs for the i-th field
1058  if (c)
1059  {
1060  Array<Mat> *l2l = NULL;
1061  ierr = PetscContainerGetPointer(c,(void**)&l2l);
1062  PCHKERRQ(c,ierr);
1063  MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1064  << l2l->Size() << " for block row " << i );
1065  ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1066  PCHKERRQ(c,ierr);
1067  matsl2l[i] = (*l2l)[0];
1068  needl2l = PETSC_FALSE;
1069  }
1070  }
1071  }
1072  }
1073  }
1074  ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1075  if (tid == PETSC_MATIS)
1076  {
1077  ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1078 
1079  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1080  for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1081  ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1082 
1083  PetscContainer c;
1084  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1085  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1086  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1087  PCHKERRQ(c,ierr);
1088  ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1089  PCHKERRQ((*A),ierr);
1090  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1091  }
1092  for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1093  ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1094  }
1095  else if (pI && tid == PETSC_MATAIJ)
1096  {
1097  PetscInt rst;
1098 
1099  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1100  ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1101  PCHKERRQ(A,ierr);
1102  ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1103  ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1104  ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1105  ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1106  ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1107  for (PetscInt i = rst; i < rst+pI->Height(); i++)
1108  {
1109  ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1110  }
1111  ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1112  ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1113  }
1114  else if (pS)
1115  {
1116  if (tid == PETSC_MATSHELL)
1117  {
1118  MakeWrapper(comm,&op,A);
1119  }
1120  else
1121  {
1122  Mat B;
1123  PetscScalar *pdata;
1124  PetscInt *pii,*pjj,*oii;
1125  PetscMPIInt size;
1126 
1127  int m = pS->Height();
1128  int n = pS->Width();
1129  int *ii = pS->GetI();
1130  int *jj = pS->GetJ();
1131  double *data = pS->GetData();
1132 
1133  ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1134  ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1135  ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1136  pii[0] = ii[0];
1137  for (int i = 0; i < m; i++)
1138  {
1139  bool issorted = true;
1140  pii[i+1] = ii[i+1];
1141  for (int j = ii[i]; j < ii[i+1]; j++)
1142  {
1143  pjj[j] = jj[j];
1144  if (j != ii[i] && issorted) { issorted = (pjj[j] > pjj[j-1]); }
1145  pdata[j] = data[j];
1146  }
1147  if (!issorted)
1148  {
1149  ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1150  CCHKERRQ(PETSC_COMM_SELF,ierr);
1151  }
1152  }
1153 
1154  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1155  if (size == 1)
1156  {
1157  ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1158  CCHKERRQ(comm,ierr);
1159  oii = NULL;
1160  }
1161  else // block diagonal constructor
1162  {
1163  ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1164  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1165  PETSC_DECIDE,
1166  pii,pjj,pdata,oii,NULL,NULL,&B);
1167  CCHKERRQ(comm,ierr);
1168  }
1169  void *ptrs[4] = {pii,pjj,pdata,oii};
1170  const char *names[4] = {"_mfem_csr_pii",
1171  "_mfem_csr_pjj",
1172  "_mfem_csr_pdata",
1173  "_mfem_csr_oii"
1174  };
1175  for (int i=0; i<4; i++)
1176  {
1177  PetscContainer c;
1178 
1179  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1180  ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1181  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1182  PCHKERRQ(B,ierr);
1183  ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1184  PCHKERRQ(B,ierr);
1185  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1186  }
1187  if (tid == PETSC_MATAIJ)
1188  {
1189  *A = B;
1190  }
1191  else if (tid == PETSC_MATHYPRE)
1192  {
1193  ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1194  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1195  }
1196  else if (tid == PETSC_MATIS)
1197  {
1198  ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1199  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1200  }
1201  else
1202  {
1203  MFEM_ABORT("Unsupported operator type conversion " << tid)
1204  }
1205  }
1206  }
1207  else // fallback to general operator
1208  {
1209  MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1210  "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1211  MakeWrapper(comm,&op,A);
1212  if (tid == PETSC_MATAIJ)
1213  {
1214  Mat B;
1215  PetscBool isaij;
1216 
1217  ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1218  ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1219  CCHKERRQ(comm,ierr);
1220  ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1221  if (!isaij)
1222  {
1223  ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1224  ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1225  }
1226  else
1227  {
1228  *A = B;
1229  }
1230  }
1231  }
1232 }
1233 
1235 {
1236  if (A != NULL)
1237  {
1238  MPI_Comm comm = MPI_COMM_NULL;
1239  ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1240  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1241  }
1242  delete X;
1243  delete Y;
1244  X = Y = NULL;
1245 }
1246 
1248 {
1249  if (ref)
1250  {
1251  ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1252  }
1253  Init();
1254  A = a;
1255  height = GetNumRows();
1256  width = GetNumCols();
1257 }
1258 
1260 {
1261  if (_A == A) { return; }
1262  Destroy();
1263  ierr = PetscObjectReference((PetscObject)_A); PCHKERRQ(_A,ierr);
1264  A = _A;
1265  height = GetNumRows();
1266  width = GetNumCols();
1267 }
1268 
1269 // Computes y = alpha * A * x + beta * y
1270 // or y = alpha * A^T* x + beta * y
1271 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1272  bool transpose)
1273 {
1274  PetscErrorCode (*f)(Mat,Vec,Vec);
1275  PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1276  if (transpose)
1277  {
1278  f = MatMultTranspose;
1279  fadd = MatMultTransposeAdd;
1280  }
1281  else
1282  {
1283  f = MatMult;
1284  fadd = MatMultAdd;
1285  }
1286  if (a != 0.)
1287  {
1288  if (b == 1.)
1289  {
1290  ierr = VecScale(X,a); PCHKERRQ(A,ierr);
1291  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1292  ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
1293  }
1294  else if (b != 0.)
1295  {
1296  ierr = VecScale(X,a); PCHKERRQ(A,ierr);
1297  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1298  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1299  ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
1300  }
1301  else
1302  {
1303  ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1304  if (a != 1.)
1305  {
1306  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1307  }
1308  }
1309  }
1310  else
1311  {
1312  if (b == 1.)
1313  {
1314  // do nothing
1315  }
1316  else if (b != 0.)
1317  {
1318  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1319  }
1320  else
1321  {
1322  ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1323  }
1324  }
1325 }
1326 
1328 {
1329  ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1330  Destroy();
1331  Init();
1332  A = master.A;
1333  height = master.height;
1334  width = master.width;
1335 }
1336 
1338 {
1339  if (!X)
1340  {
1341  MFEM_VERIFY(A,"Mat not present");
1342  X = new PetscParVector(*this,false); PCHKERRQ(A,ierr);
1343  }
1344  return X;
1345 }
1346 
1348 {
1349  if (!Y)
1350  {
1351  MFEM_VERIFY(A,"Mat not present");
1352  Y = new PetscParVector(*this,true); PCHKERRQ(A,ierr);
1353  }
1354  return Y;
1355 }
1356 
1358 {
1359  Mat B;
1360  if (action)
1361  {
1362  ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1363  }
1364  else
1365  {
1366  ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1367  }
1368  return new PetscParMatrix(B,false);
1369 }
1370 
1372 {
1373  ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1374 }
1375 
1376 void PetscParMatrix::Mult(double a, const Vector &x, double b, Vector &y) const
1377 {
1378  MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1379  << ", expected size = " << Width());
1380  MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1381  << ", expected size = " << Height());
1382 
1383  PetscParVector *XX = GetX();
1384  PetscParVector *YY = GetY();
1385  XX->PlaceArray(x.GetData());
1386  YY->PlaceArray(y.GetData());
1387  MatMultKernel(A,a,XX->x,b,YY->x,false);
1388  XX->ResetArray();
1389  YY->ResetArray();
1390 }
1391 
1392 void PetscParMatrix::MultTranspose(double a, const Vector &x, double b,
1393  Vector &y) const
1394 {
1395  MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
1396  << ", expected size = " << Height());
1397  MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
1398  << ", expected size = " << Width());
1399 
1400  PetscParVector *XX = GetX();
1401  PetscParVector *YY = GetY();
1402  YY->PlaceArray(x.GetData());
1403  XX->PlaceArray(y.GetData());
1404  MatMultKernel(A,a,YY->x,b,XX->x,true);
1405  XX->ResetArray();
1406  YY->ResetArray();
1407 }
1408 
1409 void PetscParMatrix::Print(const char *fname, bool binary) const
1410 {
1411  if (fname)
1412  {
1413  PetscViewer view;
1414 
1415  if (binary)
1416  {
1417  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
1418  FILE_MODE_WRITE,&view);
1419  }
1420  else
1421  {
1422  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
1423  }
1424  PCHKERRQ(A,ierr);
1425  ierr = MatView(A,view); PCHKERRQ(A,ierr);
1426  ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1427  }
1428  else
1429  {
1430  ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1431  }
1432 }
1433 
1435 {
1436  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1437  << ", expected size = " << Height());
1438 
1439  PetscParVector *YY = GetY();
1440  YY->PlaceArray(s.GetData());
1441  ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1442  YY->ResetArray();
1443 }
1444 
1446 {
1447  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1448  << ", expected size = " << Width());
1449 
1450  PetscParVector *XX = GetX();
1451  XX->PlaceArray(s.GetData());
1452  ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
1453  XX->ResetArray();
1454 }
1455 
1457 {
1458  ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
1459 }
1460 
1462 {
1463  // for matrices with square diagonal blocks only
1464  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1465  << ", expected size = " << Height());
1466  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1467  << ", expected size = " << Width());
1468 
1469  PetscParVector *XX = GetX();
1470  XX->PlaceArray(s.GetData());
1471  ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
1472  XX->ResetArray();
1473 }
1474 
1476  PetscParMatrix *P)
1477 {
1478  MFEM_VERIFY(A->Width() == P->Height(),
1479  "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
1480  " differs from number of local rows of P " << P->Height());
1481  MFEM_VERIFY(A->Height() == R->Width(),
1482  "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
1483  " differs from number of local cols of R " << R->Width());
1484  Mat B;
1485  ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1486  PCHKERRQ(*R,ierr);
1487  return new PetscParMatrix(B);
1488 }
1489 
1491 {
1492  Mat pA = *A,pP = *P,pRt = *Rt;
1493  Mat B;
1494  PetscBool Aismatis,Pismatis,Rtismatis;
1495 
1496  MFEM_VERIFY(A->Width() == P->Height(),
1497  "Petsc RAP: Number of local cols of A " << A->Width() <<
1498  " differs from number of local rows of P " << P->Height());
1499  MFEM_VERIFY(A->Height() == Rt->Height(),
1500  "Petsc RAP: Number of local rows of A " << A->Height() <<
1501  " differs from number of local rows of Rt " << Rt->Height());
1502  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
1503  PCHKERRQ(pA,ierr);
1504  ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
1505  PCHKERRQ(pA,ierr);
1506  ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
1507  PCHKERRQ(pA,ierr);
1508  if (Aismatis &&
1509  Pismatis &&
1510  Rtismatis) // handle special case (this code will eventually go into PETSc)
1511  {
1512  Mat lA,lP,lB,lRt;
1513  ISLocalToGlobalMapping cl2gP,cl2gRt;
1514  PetscInt rlsize,clsize,rsize,csize;
1515 
1516  ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
1517  ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
1518  ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
1519  ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
1520  ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
1521  ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
1522  ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
1523  ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
1524  ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
1525  ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
1526  ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
1527  ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
1528  ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
1529  if (lRt == lP)
1530  {
1531  ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1532  PCHKERRQ(lA,ierr);
1533  }
1534  else
1535  {
1536  Mat lR;
1537  ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
1538  ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1539  PCHKERRQ(lRt,ierr);
1540  ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
1541  }
1542 
1543  // attach lRt matrix to the subdomain local matrix
1544  // it may be used if markers on vdofs have to be mapped on
1545  // subdomain true dofs
1546  {
1547  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
1548  ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
1549  (*vmatsl2l)[0] = lRt;
1550 
1551  PetscContainer c;
1552  ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
1553  PCHKERRQ(B,ierr);
1554  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1555  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1556  PCHKERRQ(c,ierr);
1557  ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
1558  PCHKERRQ(B,ierr);
1559  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1560  }
1561 
1562  // Set local problem
1563  ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
1564  ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
1565  ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1566  ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1567  }
1568  else // it raises an error if the PtAP is not supported in PETSc
1569  {
1570  if (pP == pRt)
1571  {
1572  ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1573  PCHKERRQ(pA,ierr);
1574  }
1575  else
1576  {
1577  Mat pR;
1578  ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
1579  ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1580  PCHKERRQ(pRt,ierr);
1581  ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
1582  }
1583  }
1584  return new PetscParMatrix(B);
1585 }
1586 
1588 {
1589  PetscParMatrix *out = RAP(P,A,P);
1590  return out;
1591 }
1592 
1594 {
1595  PetscParMatrix *out,*A;
1596 #if defined(PETSC_HAVE_HYPRE)
1598 #else
1599  A = new PetscParMatrix(hA);
1600 #endif
1601  out = RAP(P,A,P);
1602  delete A;
1603  return out;
1604 }
1605 
1606 
1608 {
1609  Mat AB;
1610 
1611  ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
1612  CCHKERRQ(A->GetComm(),ierr);
1613  return new PetscParMatrix(AB);
1614 }
1615 
1617 {
1618  Mat Ae;
1619 
1620  PetscParVector dummy(GetComm(),0);
1621  ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
1622  EliminateRowsCols(rows_cols,dummy,dummy);
1623  ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
1624  return new PetscParMatrix(Ae);
1625 }
1626 
1628  const HypreParVector &X,
1629  HypreParVector &B,
1630  double diag)
1631 {
1632  MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
1633 }
1634 
1636  const PetscParVector &X,
1637  PetscParVector &B,
1638  double diag)
1639 {
1640  PetscInt M,N;
1641  ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
1642  MFEM_VERIFY(M == N,"Rectangular case unsupported");
1643 
1644  // TODO: what if a diagonal term is not present?
1645  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
1646 
1647  // rows need to be in global numbering
1648  PetscInt rst;
1649  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
1650 
1651  IS dir;
1652  ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
1653  if (!X.GlobalSize() && !B.GlobalSize())
1654  {
1655  ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
1656  }
1657  else
1658  {
1659  ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
1660  }
1661  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
1662 }
1663 
1665 {
1666  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
1667 
1668  // rows need to be in global numbering
1669  PetscInt rst;
1670  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
1671 
1672  IS dir;
1673  ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
1674  ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
1675  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
1676 }
1677 
1679 {
1680 
1681  Mat B = A;
1682  if (dereference)
1683  {
1684  MPI_Comm comm = GetComm();
1685  ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
1686  }
1687  A = NULL;
1688  return B;
1689 }
1690 
1692 {
1693  PetscBool ok;
1694  MFEM_VERIFY(A, "no associated PETSc Mat object");
1695  PetscObject oA = (PetscObject)(this->A);
1696  // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
1697  ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
1698  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1699  ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
1700  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1701  ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
1702  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1703  ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
1704  if (ok == PETSC_TRUE) { return PETSC_MATIS; }
1705  ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
1706  if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
1707  ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
1708  if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
1709 #if defined(PETSC_HAVE_HYPRE)
1710  ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
1711  if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
1712 #endif
1713  return PETSC_MATGENERIC;
1714 }
1715 
1717  const Array<int> &ess_dof_list,
1718  const Vector &X, Vector &B)
1719 {
1720  const PetscScalar *array;
1721  Mat pA = const_cast<PetscParMatrix&>(A);
1722 
1723  // B -= Ae*X
1724  Ae.Mult(-1.0, X, 1.0, B);
1725 
1726  Vec diag = const_cast<PetscParVector&>((*A.GetX()));
1727  ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
1728  ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1729  for (int i = 0; i < ess_dof_list.Size(); i++)
1730  {
1731  int r = ess_dof_list[i];
1732  B(r) = array[r] * X(r);
1733  }
1734  ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1735 }
1736 
1737 // PetscSolver methods
1738 
1739 PetscSolver::PetscSolver() : clcustom(false)
1740 {
1741  obj = NULL;
1742  B = X = NULL;
1743  cid = -1;
1744  operatorset = false;
1745  bchandler = NULL;
1746  private_ctx = NULL;
1747 }
1748 
1750 {
1751  delete B;
1752  delete X;
1754 }
1755 
1756 void PetscSolver::SetTol(double tol)
1757 {
1758  SetRelTol(tol);
1759 }
1760 
1761 void PetscSolver::SetRelTol(double tol)
1762 {
1763  if (cid == KSP_CLASSID)
1764  {
1765  KSP ksp = (KSP)obj;
1766  ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
1767  }
1768  else if (cid == SNES_CLASSID)
1769  {
1770  SNES snes = (SNES)obj;
1771  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
1772  PETSC_DEFAULT);
1773  }
1774  else if (cid == TS_CLASSID)
1775  {
1776  TS ts = (TS)obj;
1777  ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
1778  }
1779  else
1780  {
1781  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1782  }
1783  PCHKERRQ(obj,ierr);
1784 }
1785 
1786 void PetscSolver::SetAbsTol(double tol)
1787 {
1788  if (cid == KSP_CLASSID)
1789  {
1790  KSP ksp = (KSP)obj;
1791  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
1792  }
1793  else if (cid == SNES_CLASSID)
1794  {
1795  SNES snes = (SNES)obj;
1796  ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1797  PETSC_DEFAULT);
1798  }
1799  else if (cid == TS_CLASSID)
1800  {
1801  TS ts = (TS)obj;
1802  ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
1803  }
1804  else
1805  {
1806  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1807  }
1808  PCHKERRQ(obj,ierr);
1809 }
1810 
1811 void PetscSolver::SetMaxIter(int max_iter)
1812 {
1813  if (cid == KSP_CLASSID)
1814  {
1815  KSP ksp = (KSP)obj;
1816  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1817  max_iter);
1818  }
1819  else if (cid == SNES_CLASSID)
1820  {
1821  SNES snes = (SNES)obj;
1822  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1823  max_iter,PETSC_DEFAULT);
1824  }
1825  else if (cid == TS_CLASSID)
1826  {
1827  TS ts = (TS)obj;
1828  ierr = TSSetMaxSteps(ts,max_iter);
1829  }
1830  else
1831  {
1832  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1833  }
1834  PCHKERRQ(obj,ierr);
1835 }
1836 
1837 
1839 {
1840  typedef PetscErrorCode (*myPetscFunc)(void**);
1841  PetscViewerAndFormat *vf = NULL;
1842  PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
1843 
1844  if (plev > 0)
1845  {
1846  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1847  PCHKERRQ(obj,ierr);
1848  }
1849  if (cid == KSP_CLASSID)
1850  {
1851  // there are many other options, see the function KSPSetFromOptions() in
1852  // src/ksp/ksp/interface/itcl.c
1853  typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
1854  KSP ksp = (KSP)obj;
1855  if (plev >= 0)
1856  {
1857  ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
1858  }
1859  if (plev == 1)
1860  {
1861  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
1862  (myPetscFunc)PetscViewerAndFormatDestroy);
1863  PCHKERRQ(ksp,ierr);
1864  }
1865  else if (plev > 1)
1866  {
1867  ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
1868  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
1869  (myPetscFunc)PetscViewerAndFormatDestroy);
1870  PCHKERRQ(ksp,ierr);
1871  if (plev > 2)
1872  {
1873  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1874  PCHKERRQ(viewer,ierr);
1875  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
1876  (myPetscFunc)PetscViewerAndFormatDestroy);
1877  PCHKERRQ(ksp,ierr);
1878  }
1879  }
1880  }
1881  else if (cid == SNES_CLASSID)
1882  {
1883  typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
1884  SNES snes = (SNES)obj;
1885  if (plev >= 0)
1886  {
1887  ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
1888  }
1889  if (plev > 0)
1890  {
1891  ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
1892  (myPetscFunc)PetscViewerAndFormatDestroy);
1893  PCHKERRQ(snes,ierr);
1894  }
1895  }
1896  else if (cid == TS_CLASSID)
1897  {
1898  TS ts = (TS)obj;
1899  if (plev >= 0)
1900  {
1901  ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
1902  }
1903  }
1904  else
1905  {
1906  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1907  }
1908 }
1909 
1910 MPI_Comm PetscSolver::GetComm() const
1911 {
1912  return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
1913 }
1914 
1916 {
1917  __mfem_monitor_ctx *monctx;
1918  ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
1919  monctx->solver = this;
1920  monctx->monitor = ctx;
1921  if (cid == KSP_CLASSID)
1922  {
1923  ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
1924  __mfem_monitor_ctx_destroy);
1925  PCHKERRQ(obj,ierr);
1926  }
1927  else if (cid == SNES_CLASSID)
1928  {
1929  ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
1930  __mfem_monitor_ctx_destroy);
1931  PCHKERRQ(obj,ierr);
1932  }
1933  else if (cid == TS_CLASSID)
1934  {
1935  ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
1936  __mfem_monitor_ctx_destroy);
1937  PCHKERRQ(obj,ierr);
1938  }
1939  else
1940  {
1941  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1942  }
1943 }
1944 
1946 {
1947  bchandler = bch;
1948  if (cid == SNES_CLASSID)
1949  {
1950  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
1951  snes_ctx->bchandler = bchandler;
1952  }
1953  else if (cid == TS_CLASSID)
1954  {
1955  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
1956  ts_ctx->bchandler = bchandler;
1957  }
1958  else
1959  {
1960  MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
1961  }
1962 }
1963 
1965 {
1966  PC pc = NULL;
1967  if (cid == TS_CLASSID)
1968  {
1969  SNES snes;
1970  KSP ksp;
1971 
1972  ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
1973  ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
1974  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
1975  }
1976  else if (cid == SNES_CLASSID)
1977  {
1978  KSP ksp;
1979 
1980  ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
1981  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
1982  }
1983  else if (cid == KSP_CLASSID)
1984  {
1985  ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
1986  }
1987  else if (cid == PC_CLASSID)
1988  {
1989  pc = (PC)obj;
1990  }
1991  else
1992  {
1993  MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
1994  }
1995  if (factory)
1996  {
1997  ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
1998  }
1999  else
2000  {
2001  ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2002  }
2003 }
2004 
2005 void PetscSolver::Customize(bool customize) const
2006 {
2007  if (!customize) { clcustom = true; }
2008  if (!clcustom)
2009  {
2010  if (cid == PC_CLASSID)
2011  {
2012  PC pc = (PC)obj;
2013  ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2014  }
2015  else if (cid == KSP_CLASSID)
2016  {
2017  KSP ksp = (KSP)obj;
2018  ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2019  }
2020  else if (cid == SNES_CLASSID)
2021  {
2022  SNES snes = (SNES)obj;
2023  ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2024  }
2025  else if (cid == TS_CLASSID)
2026  {
2027  TS ts = (TS)obj;
2028  ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2029  }
2030  else
2031  {
2032  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2033  }
2034  }
2035  clcustom = true;
2036 }
2037 
2039 {
2040  if (cid == KSP_CLASSID)
2041  {
2042  KSP ksp = (KSP)obj;
2043  KSPConvergedReason reason;
2044  ierr = KSPGetConvergedReason(ksp,&reason);
2045  PCHKERRQ(ksp,ierr);
2046  return reason > 0 ? 1 : 0;
2047  }
2048  else if (cid == SNES_CLASSID)
2049  {
2050  SNES snes = (SNES)obj;
2051  SNESConvergedReason reason;
2052  ierr = SNESGetConvergedReason(snes,&reason);
2053  PCHKERRQ(snes,ierr);
2054  return reason > 0 ? 1 : 0;
2055  }
2056  else if (cid == TS_CLASSID)
2057  {
2058  TS ts = (TS)obj;
2059  TSConvergedReason reason;
2060  ierr = TSGetConvergedReason(ts,&reason);
2061  PCHKERRQ(ts,ierr);
2062  return reason > 0 ? 1 : 0;
2063  }
2064  else
2065  {
2066  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2067  return -1;
2068  }
2069 }
2070 
2072 {
2073  if (cid == KSP_CLASSID)
2074  {
2075  KSP ksp = (KSP)obj;
2076  PetscInt its;
2077  ierr = KSPGetIterationNumber(ksp,&its);
2078  PCHKERRQ(ksp,ierr);
2079  return its;
2080  }
2081  else if (cid == SNES_CLASSID)
2082  {
2083  SNES snes = (SNES)obj;
2084  PetscInt its;
2085  ierr = SNESGetIterationNumber(snes,&its);
2086  PCHKERRQ(snes,ierr);
2087  return its;
2088  }
2089  else if (cid == TS_CLASSID)
2090  {
2091  TS ts = (TS)obj;
2092  PetscInt its;
2093  ierr = TSGetStepNumber(ts,&its);
2094  PCHKERRQ(ts,ierr);
2095  return its;
2096  }
2097  else
2098  {
2099  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2100  return -1;
2101  }
2102 }
2103 
2105 {
2106  if (cid == KSP_CLASSID)
2107  {
2108  KSP ksp = (KSP)obj;
2109  PetscReal norm;
2110  ierr = KSPGetResidualNorm(ksp,&norm);
2111  PCHKERRQ(ksp,ierr);
2112  return norm;
2113  }
2114  if (cid == SNES_CLASSID)
2115  {
2116  SNES snes = (SNES)obj;
2117  PetscReal norm;
2118  ierr = SNESGetFunctionNorm(snes,&norm);
2119  PCHKERRQ(snes,ierr);
2120  return norm;
2121  }
2122  else
2123  {
2124  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2125  return PETSC_MAX_REAL;
2126  }
2127 }
2128 
2130 {
2132  if (cid == SNES_CLASSID)
2133  {
2134  __mfem_snes_ctx *snes_ctx;
2135  ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2136  snes_ctx->op = NULL;
2137  snes_ctx->bchandler = NULL;
2138  snes_ctx->work = NULL;
2139  snes_ctx->jacType = Operator::PETSC_MATAIJ;
2140  private_ctx = (void*) snes_ctx;
2141  }
2142  else if (cid == TS_CLASSID)
2143  {
2144  __mfem_ts_ctx *ts_ctx;
2145  ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2146  ts_ctx->op = NULL;
2147  ts_ctx->bchandler = NULL;
2148  ts_ctx->work = NULL;
2149  ts_ctx->work2 = NULL;
2150  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2151  ts_ctx->cached_ijacstate = -1;
2152  ts_ctx->cached_rhsjacstate = -1;
2153  ts_ctx->cached_splits_xstate = -1;
2154  ts_ctx->cached_splits_xdotstate = -1;
2155  ts_ctx->type = PetscODESolver::ODE_SOLVER_GENERAL;
2156  ts_ctx->jacType = Operator::PETSC_MATAIJ;
2157  private_ctx = (void*) ts_ctx;
2158  }
2159 }
2160 
2162 {
2163  if (!private_ctx) { return; }
2164  // free private context's owned objects
2165  if (cid == SNES_CLASSID)
2166  {
2167  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2168  delete snes_ctx->work;
2169  }
2170  else if (cid == TS_CLASSID)
2171  {
2172  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2173  delete ts_ctx->work;
2174  delete ts_ctx->work2;
2175  }
2176  ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2177 }
2178 
2179 // PetscBCHandler methods
2180 
2182  enum PetscBCHandler::Type _type)
2183  : bctype(_type), setup(false), eval_t(0.0),
2184  eval_t_cached(std::numeric_limits<double>::min())
2185 {
2186  SetTDofs(ess_tdof_list);
2187 }
2188 
2190 {
2191  ess_tdof_list.SetSize(list.Size());
2192  ess_tdof_list.Assign(list);
2193  setup = false;
2194 }
2195 
2197 {
2198  if (setup) { return; }
2199  if (bctype == CONSTANT)
2200  {
2201  eval_g.SetSize(n);
2202  this->Eval(eval_t,eval_g);
2203  eval_t_cached = eval_t;
2204  }
2205  else if (bctype == TIME_DEPENDENT)
2206  {
2207  eval_g.SetSize(n);
2208  }
2209  setup = true;
2210 }
2211 
2213 {
2214  (*this).SetUp(x.Size());
2215  y = x;
2216  if (bctype == ZERO)
2217  {
2218  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2219  {
2220  y[ess_tdof_list[i]] = 0.0;
2221  }
2222  }
2223  else
2224  {
2225  if (bctype != CONSTANT && eval_t != eval_t_cached)
2226  {
2227  Eval(eval_t,eval_g);
2228  eval_t_cached = eval_t;
2229  }
2230  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2231  {
2232  y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2233  }
2234  }
2235 }
2236 
2238 {
2239  (*this).SetUp(x.Size());
2240  if (bctype == ZERO)
2241  {
2242  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2243  {
2244  x[ess_tdof_list[i]] = 0.0;
2245  }
2246  }
2247  else
2248  {
2249  if (bctype != CONSTANT && eval_t != eval_t_cached)
2250  {
2251  Eval(eval_t,eval_g);
2252  eval_t_cached = eval_t;
2253  }
2254  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2255  {
2256  x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2257  }
2258  }
2259 }
2260 
2262 {
2263  (*this).SetUp(x.Size());
2264  if (bctype == ZERO)
2265  {
2266  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2267  {
2268  y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2269  }
2270  }
2271  else
2272  {
2273  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2274  {
2275  y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2276  }
2277  }
2278 }
2279 
2281 {
2282  (*this).SetUp(x.Size());
2283  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2284  {
2285  x[ess_tdof_list[i]] = 0.0;
2286  }
2287 }
2288 
2290 {
2291  (*this).SetUp(x.Size());
2292  y = x;
2293  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2294  {
2295  y[ess_tdof_list[i]] = 0.0;
2296  }
2297 }
2298 
2299 // PetscLinearSolver methods
2300 
2301 PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2302  bool wrapin)
2303  : PetscSolver(), Solver(), wrap(wrapin)
2304 {
2305  KSP ksp;
2306  ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2307  obj = (PetscObject)ksp;
2308  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2309  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2310 }
2311 
2313  const std::string &prefix)
2314  : PetscSolver(), Solver(), wrap(false)
2315 {
2316  KSP ksp;
2317  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2318  obj = (PetscObject)ksp;
2319  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2320  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2321  SetOperator(A);
2322 }
2323 
2325  const std::string &prefix)
2326  : PetscSolver(), Solver(), wrap(wrapin)
2327 {
2328  KSP ksp;
2329  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2330  obj = (PetscObject)ksp;
2331  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2332  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2333  SetOperator(A);
2334 }
2335 
2337 {
2338  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2339  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2340  (dynamic_cast<const PetscParMatrix *>(&op));
2341  const Operator *oA = dynamic_cast<const Operator *>(&op);
2342 
2343  // update base classes: Operator, Solver, PetscLinearSolver
2344  bool delete_pA = false;
2345  if (!pA)
2346  {
2347  if (hA)
2348  {
2349  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2350  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2351  delete_pA = true;
2352  }
2353  else if (oA) // fallback to general operator
2354  {
2355  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2356  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2357  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2358  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2359  delete_pA = true;
2360  }
2361  }
2362  MFEM_VERIFY(pA, "Unsupported operation!");
2363 
2364  // Set operators into PETSc KSP
2365  KSP ksp = (KSP)obj;
2366  Mat A = pA->A;
2367  if (operatorset)
2368  {
2369  Mat C;
2370  PetscInt nheight,nwidth,oheight,owidth;
2371 
2372  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2373  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2374  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2375  if (nheight != oheight || nwidth != owidth)
2376  {
2377  // reinit without destroying the KSP
2378  // communicator remains the same
2379  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2380  delete X;
2381  delete B;
2382  X = B = NULL;
2383  wrap = false;
2384  }
2385  }
2386  ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2387 
2388  // Update PetscSolver
2389  operatorset = true;
2390 
2391  // Update the Operator fields.
2392  height = pA->Height();
2393  width = pA->Width();
2394 
2395  if (delete_pA) { delete pA; }
2396 }
2397 
2399 {
2400  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2401  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2402  (dynamic_cast<const PetscParMatrix *>(&op));
2403  const Operator *oA = dynamic_cast<const Operator *>(&op);
2404 
2405  PetscParMatrix *ppA = const_cast<PetscParMatrix *>
2406  (dynamic_cast<const PetscParMatrix *>(&pop));
2407  const Operator *poA = dynamic_cast<const Operator *>(&pop);
2408 
2409  // Convert Operator for linear system
2410  bool delete_pA = false;
2411  if (!pA)
2412  {
2413  if (hA)
2414  {
2415  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2416  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2417  delete_pA = true;
2418  }
2419  else if (oA) // fallback to general operator
2420  {
2421  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2422  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2423  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2424  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2425  delete_pA = true;
2426  }
2427  }
2428  MFEM_VERIFY(pA, "Unsupported operation!");
2429 
2430  // Convert Operator to be preconditioned
2431  bool delete_ppA = false;
2432  if (!ppA)
2433  {
2434  if (oA == poA && !wrap) // Same operator, already converted
2435  {
2436  ppA = pA;
2437  }
2438  else
2439  {
2440  ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
2441  delete_ppA = true;
2442  }
2443  }
2444  MFEM_VERIFY(ppA, "Unsupported operation!");
2445 
2446  // Set operators into PETSc KSP
2447  KSP ksp = (KSP)obj;
2448  Mat A = pA->A;
2449  Mat P = ppA->A;
2450  if (operatorset)
2451  {
2452  Mat C;
2453  PetscInt nheight,nwidth,oheight,owidth;
2454 
2455  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2456  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2457  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2458  if (nheight != oheight || nwidth != owidth)
2459  {
2460  // reinit without destroying the KSP
2461  // communicator remains the same
2462  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2463  delete X;
2464  delete B;
2465  X = B = NULL;
2466  wrap = false;
2467  }
2468  }
2469  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
2470 
2471  // Update PetscSolver
2472  operatorset = true;
2473 
2474  // Update the Operator fields.
2475  height = pA->Height();
2476  width = pA->Width();
2477 
2478  if (delete_pA) { delete pA; }
2479  if (delete_ppA) { delete ppA; }
2480 }
2481 
2483 {
2484  KSP ksp = (KSP)obj;
2485 
2486  // Preserve Amat if already set
2487  Mat A = NULL;
2488  PetscBool amat;
2489  ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
2490  if (amat)
2491  {
2492  ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
2493  ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
2494  }
2495  PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
2496  if (ppc)
2497  {
2498  ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
2499  }
2500  else
2501  {
2502  // wrap the Solver action
2503  // Solver is assumed to be already setup
2504  // ownership of precond is not transferred,
2505  // consistently with other MFEM's linear solvers
2506  PC pc;
2507  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
2508  ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
2509  }
2510  if (A)
2511  {
2512  Mat P;
2513 
2514  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
2515  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
2516  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
2517  ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
2518  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
2519  }
2520 }
2521 
2522 void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
2523 {
2524  KSP ksp = (KSP)obj;
2525 
2526  if (!B || !X)
2527  {
2528  Mat pA = NULL;
2529  ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
2530  if (!B)
2531  {
2532  PetscParMatrix A = PetscParMatrix(pA, true);
2533  B = new PetscParVector(A, true, false);
2534  }
2535  if (!X)
2536  {
2537  PetscParMatrix A = PetscParMatrix(pA, true);
2538  X = new PetscParVector(A, false, false);
2539  }
2540  }
2541  B->PlaceArray(b.GetData());
2542  X->PlaceArray(x.GetData());
2543 
2544  Customize();
2545 
2546  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2547  PCHKERRQ(ksp, ierr);
2548 
2549  // Solve the system.
2550  if (trans)
2551  {
2552  ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
2553  }
2554  else
2555  {
2556  ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
2557  }
2558  B->ResetArray();
2559  X->ResetArray();
2560 }
2561 
2562 void PetscLinearSolver::Mult(const Vector &b, Vector &x) const
2563 {
2564  (*this).MultKernel(b,x,false);
2565 }
2566 
2568 {
2569  (*this).MultKernel(b,x,true);
2570 }
2571 
2573 {
2574  MPI_Comm comm;
2575  KSP ksp = (KSP)obj;
2576  ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
2577  ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
2578 }
2579 
2580 // PetscPCGSolver methods
2581 
2582 PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix)
2583  : PetscLinearSolver(comm,prefix)
2584 {
2585  KSP ksp = (KSP)obj;
2586  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2587  // this is to obtain a textbook PCG
2588  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2589 }
2590 
2591 PetscPCGSolver::PetscPCGSolver(PetscParMatrix& A, const std::string &prefix)
2592  : PetscLinearSolver(A,prefix)
2593 {
2594  KSP ksp = (KSP)obj;
2595  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2596  // this is to obtain a textbook PCG
2597  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2598 }
2599 
2601  const std::string &prefix)
2602  : PetscLinearSolver(A,wrap,prefix)
2603 {
2604  KSP ksp = (KSP)obj;
2605  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2606  // this is to obtain a textbook PCG
2607  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2608 }
2609 
2610 // PetscPreconditioner methods
2611 
2613  const std::string &prefix)
2614  : PetscSolver(), Solver()
2615 {
2616  PC pc;
2617  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
2618  obj = (PetscObject)pc;
2619  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2620  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2621 }
2622 
2624  const string &prefix)
2625  : PetscSolver(), Solver()
2626 {
2627  PC pc;
2628  ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
2629  obj = (PetscObject)pc;
2630  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2631  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2632  SetOperator(A);
2633 }
2634 
2636  const string &prefix)
2637  : PetscSolver(), Solver()
2638 {
2639  PC pc;
2640  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
2641  obj = (PetscObject)pc;
2642  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2643  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2644  SetOperator(op);
2645 }
2646 
2648 {
2649  bool delete_pA = false;
2650  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2651  (dynamic_cast<const PetscParMatrix *>(&op));
2652 
2653  if (!pA)
2654  {
2655  const Operator *cop = dynamic_cast<const Operator *>(&op);
2656  pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
2657  delete_pA = true;
2658  }
2659 
2660  // Set operators into PETSc PC
2661  PC pc = (PC)obj;
2662  Mat A = pA->A;
2663  if (operatorset)
2664  {
2665  Mat C;
2666  PetscInt nheight,nwidth,oheight,owidth;
2667 
2668  ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
2669  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2670  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2671  if (nheight != oheight || nwidth != owidth)
2672  {
2673  // reinit without destroying the PC
2674  // communicator remains the same
2675  ierr = PCReset(pc); PCHKERRQ(pc,ierr);
2676  delete X;
2677  delete B;
2678  X = B = NULL;
2679  }
2680  }
2681  ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
2682 
2683  // Update PetscSolver
2684  operatorset = true;
2685 
2686  // Update the Operator fields.
2687  height = pA->Height();
2688  width = pA->Width();
2689 
2690  if (delete_pA) { delete pA; };
2691 }
2692 
2693 void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
2694  bool trans) const
2695 {
2696  PC pc = (PC)obj;
2697 
2698  if (!B || !X)
2699  {
2700  Mat pA = NULL;
2701  ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
2702  if (!B)
2703  {
2704  PetscParMatrix A(pA, true);
2705  B = new PetscParVector(A, true, false);
2706  }
2707  if (!X)
2708  {
2709  PetscParMatrix A(pA, true);
2710  X = new PetscParVector(A, false, false);
2711  }
2712  }
2713  B->PlaceArray(b.GetData());
2714  X->PlaceArray(x.GetData());
2715 
2716  Customize();
2717 
2718  // Apply the preconditioner.
2719  if (trans)
2720  {
2721  ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
2722  }
2723  else
2724  {
2725  ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
2726  }
2727  B->ResetArray();
2728  X->ResetArray();
2729 }
2730 
2731 void PetscPreconditioner::Mult(const Vector &b, Vector &x) const
2732 {
2733  (*this).MultKernel(b,x,false);
2734 }
2735 
2737 {
2738  (*this).MultKernel(b,x,true);
2739 }
2740 
2742 {
2743  MPI_Comm comm;
2744  PC pc = (PC)obj;
2745  ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
2746  ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
2747 }
2748 
2749 // PetscBDDCSolver methods
2750 
2751 // Coordinates sampling function
2752 static void func_coords(const Vector &x, Vector &y)
2753 {
2754  for (int i = 0; i < x.Size(); i++) { y(i) = x(i); }
2755 }
2756 
2757 void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
2758 {
2759  MPI_Comm comm = PetscObjectComm(obj);
2760 
2761  // get PETSc object
2762  PC pc = (PC)obj;
2763  Mat pA;
2764  ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
2765 
2766  // matrix type should be of type MATIS
2767  PetscBool ismatis;
2768  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
2769  PCHKERRQ(pA,ierr);
2770  MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
2771 
2772  // Check options
2773  ParFiniteElementSpace *fespace = opts.fespace;
2774  if (opts.netflux && !fespace)
2775  {
2776  MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
2777  }
2778 
2779  // Attach default near-null space to local matrices
2780  {
2781  MatNullSpace nnsp;
2782  Mat lA;
2783  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
2784  ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
2785  &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2786  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2787  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2788  }
2789 
2790  // set PETSc PC type to PCBDDC
2791  ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
2792 
2793  PetscInt rst,nl;
2794  ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
2795  ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
2796 
2797  // index sets for fields splitting and coordinates for nodal spaces
2798  IS *fields = NULL;
2799  PetscInt nf = 0;
2800  PetscInt sdim = 0;
2801  PetscReal *coords = NULL;
2802  if (fespace)
2803  {
2804  int vdim = fespace->GetVDim();
2805 
2806  // Ideally, the block size should be set at matrix creation
2807  // but the MFEM assembly does not allow to do so
2808  if (fespace->GetOrdering() == Ordering::byVDIM)
2809  {
2810  Mat lA;
2811  ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
2812  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
2813  ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
2814  }
2815 
2816  // fields
2817  if (vdim > 1)
2818  {
2819  PetscInt st = rst, bs, inc, nlf;
2820  nf = vdim;
2821  nlf = nl/nf;
2822  ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
2823  if (fespace->GetOrdering() == Ordering::byVDIM)
2824  {
2825  inc = 1;
2826  bs = vdim;
2827  }
2828  else
2829  {
2830  inc = nlf;
2831  bs = 1;
2832  }
2833  for (PetscInt i = 0; i < nf; i++)
2834  {
2835  ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
2836  st += inc;
2837  }
2838  }
2839 
2840  // coordinates
2841  const FiniteElementCollection *fec = fespace->FEColl();
2842  bool h1space = dynamic_cast<const H1_FECollection*>(fec);
2843  if (h1space)
2844  {
2845  ParFiniteElementSpace *fespace_coords = fespace;
2846 
2847  sdim = fespace->GetParMesh()->SpaceDimension();
2848  if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
2849  {
2850  fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
2852  }
2853  VectorFunctionCoefficient coeff_coords(sdim, func_coords);
2854  ParGridFunction gf_coords(fespace_coords);
2855  gf_coords.ProjectCoefficient(coeff_coords);
2856  HypreParVector *hvec_coords = gf_coords.ParallelProject();
2857 
2858  // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
2859  if (vdim == sdim)
2860  {
2861  MatNullSpace nnsp;
2862  Mat lA;
2863  Vec pvec_coords,lvec_coords;
2864  ISLocalToGlobalMapping l2g;
2865  PetscSF sf;
2866  PetscLayout rmap;
2867  const PetscInt *gidxs;
2868  PetscInt nleaves;
2869 
2870  ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
2871  hvec_coords->GlobalSize(),hvec_coords->GetData(),&pvec_coords);
2872  CCHKERRQ(comm,ierr);
2873  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
2874  ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
2875  ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
2876  ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
2877  ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
2878  ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
2879  ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
2880  ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
2881  ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
2882  CCHKERRQ(comm,ierr);
2883  ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
2884  {
2885  const PetscScalar *garray;
2886  PetscScalar *larray;
2887 
2888  ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2889  ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2890  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
2891  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
2892  ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2893  ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2894  }
2895  ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
2896  ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
2897  CCHKERRQ(PETSC_COMM_SELF,ierr);
2898  ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
2899  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2900  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2901  ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
2902  }
2903 
2904  // each single dof has associated a tuple of coordinates
2905  ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
2906  if (nf > 0)
2907  {
2908  PetscScalar *data = hvec_coords->GetData();
2909  for (PetscInt i = 0; i < nf; i++)
2910  {
2911  const PetscInt *idxs;
2912  PetscInt nn;
2913 
2914  // It also handles the case of fespace not ordered by VDIM
2915  ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
2916  ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
2917  for (PetscInt j = 0; j < nn; j++)
2918  {
2919  PetscInt idx = idxs[j]-rst;
2920  for (PetscInt d = 0; d < sdim; d++)
2921  {
2922  coords[sdim*idx+d] = data[sdim*j+d];
2923  }
2924  }
2925  ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
2926  }
2927  }
2928  else
2929  {
2930  ierr = PetscMemcpy(coords,hvec_coords->GetData(),nl*sdim*sizeof(PetscReal));
2931  CCHKERRQ(comm,ierr);
2932  }
2933  if (fespace_coords != fespace)
2934  {
2935  delete fespace_coords;
2936  }
2937  delete hvec_coords;
2938  }
2939  }
2940 
2941  // index sets for boundary dofs specification (Essential = dir, Natural = neu)
2942  IS dir = NULL, neu = NULL;
2943 
2944  // Extract l2l matrices
2945  Array<Mat> *l2l = NULL;
2946  if (opts.ess_dof_local || opts.nat_dof_local)
2947  {
2948  PetscContainer c;
2949 
2950  ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
2951  MFEM_VERIFY(c,"Local-to-local PETSc container not present");
2952  ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
2953  }
2954 
2955  // check information about index sets (essential dofs, fields, etc.)
2956 #ifdef MFEM_DEBUG
2957  {
2958  // make sure ess/nat_dof have been collectively set
2959  PetscBool lpr = PETSC_FALSE,pr;
2960  if (opts.ess_dof) { lpr = PETSC_TRUE; }
2961  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
2962  PCHKERRQ(pA,ierr);
2963  MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
2964  lpr = PETSC_FALSE;
2965  if (opts.nat_dof) { lpr = PETSC_TRUE; }
2966  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
2967  PCHKERRQ(pA,ierr);
2968  MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
2969  // make sure fields have been collectively set
2970  PetscInt ms[2],Ms[2];
2971  ms[0] = -nf; ms[1] = nf;
2972  ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
2973  PCHKERRQ(pA,ierr);
2974  MFEM_VERIFY(-Ms[0] == Ms[1],
2975  "number of fields should be the same across processes");
2976  }
2977 #endif
2978 
2979  // boundary sets
2980  if (opts.ess_dof)
2981  {
2982  PetscInt st = opts.ess_dof_local ? 0 : rst;
2983  if (!opts.ess_dof_local)
2984  {
2985  // need to compute the boundary dofs in global ordering
2986  ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
2987  CCHKERRQ(comm,ierr);
2988  ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
2989  }
2990  else
2991  {
2992  // need to compute a list for the marked boundary dofs in local ordering
2993  ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
2994  CCHKERRQ(comm,ierr);
2995  ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
2996  }
2997  }
2998  if (opts.nat_dof)
2999  {
3000  PetscInt st = opts.nat_dof_local ? 0 : rst;
3001  if (!opts.nat_dof_local)
3002  {
3003  // need to compute the boundary dofs in global ordering
3004  ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3005  CCHKERRQ(comm,ierr);
3006  ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3007  }
3008  else
3009  {
3010  // need to compute a list for the marked boundary dofs in local ordering
3011  ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3012  CCHKERRQ(comm,ierr);
3013  ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3014  }
3015  }
3016 
3017  // field splitting
3018  if (fields)
3019  {
3020  ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3021  }
3022  for (PetscInt i = 0; i < nf; i++)
3023  {
3024  ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3025  }
3026  ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3027 
3028  // coordinates
3029  if (coords)
3030  {
3031  ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3032  }
3033  ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3034 
3035  // code for block size is disabled since we cannot change the matrix
3036  // block size after it has been setup
3037  // int bs = 1;
3038 
3039  // Customize using the finite element space (if any)
3040  if (fespace)
3041  {
3042  const FiniteElementCollection *fec = fespace->FEColl();
3043  bool edgespace, rtspace, h1space;
3044  bool needint = opts.netflux;
3045  bool tracespace, rt_tracespace, edge_tracespace;
3046  int vdim, dim, p;
3047  PetscBool B_is_Trans = PETSC_FALSE;
3048 
3049  ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3050  dim = pmesh->Dimension();
3051  vdim = fespace->GetVDim();
3052  h1space = dynamic_cast<const H1_FECollection*>(fec);
3053  rtspace = dynamic_cast<const RT_FECollection*>(fec);
3054  edgespace = dynamic_cast<const ND_FECollection*>(fec);
3055  edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3056  rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3057  tracespace = edge_tracespace || rt_tracespace;
3058 
3059  p = 1;
3060  if (fespace->GetNE() > 0)
3061  {
3062  if (!tracespace)
3063  {
3064  p = fespace->GetOrder(0);
3065  }
3066  else
3067  {
3068  p = fespace->GetFaceOrder(0);
3069  if (dim == 2) { p++; }
3070  }
3071  }
3072 
3073  if (edgespace) // H(curl)
3074  {
3075  if (dim == 2)
3076  {
3077  needint = true;
3078  if (tracespace)
3079  {
3080  MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3081  " not using auxiliary quadrature");
3082  needint = false;
3083  }
3084  }
3085  else
3086  {
3087  FiniteElementCollection *vfec;
3088  if (tracespace)
3089  {
3090  vfec = new H1_Trace_FECollection(p,dim);
3091  }
3092  else
3093  {
3094  vfec = new H1_FECollection(p,dim);
3095  }
3096  ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3097  ParDiscreteLinearOperator *grad;
3098  grad = new ParDiscreteLinearOperator(vfespace,fespace);
3099  if (tracespace)
3100  {
3101  grad->AddTraceFaceInterpolator(new GradientInterpolator);
3102  }
3103  else
3104  {
3105  grad->AddDomainInterpolator(new GradientInterpolator);
3106  }
3107  grad->Assemble();
3108  grad->Finalize();
3109  HypreParMatrix *hG = grad->ParallelAssemble();
3110  PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3111  delete hG;
3112  delete grad;
3113 
3114  PetscBool conforming = PETSC_TRUE;
3115  if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3116  ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3117  PCHKERRQ(pc,ierr);
3118  delete vfec;
3119  delete vfespace;
3120  delete G;
3121  }
3122  }
3123  else if (rtspace) // H(div)
3124  {
3125  needint = true;
3126  if (tracespace)
3127  {
3128  MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3129  " auxiliary quadrature");
3130  needint = false;
3131  }
3132  }
3133  else if (h1space) // H(grad), only for the vector case
3134  {
3135  if (vdim != dim) { needint = false; }
3136  }
3137 
3138  PetscParMatrix *B = NULL;
3139  if (needint)
3140  {
3141  // Generate bilinear form in unassembled format which is used to
3142  // compute the net-flux across subdomain boundaries for H(div) and
3143  // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3144  FiniteElementCollection *auxcoll;
3145  if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3146  else
3147  {
3148  if (h1space)
3149  {
3150  auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3151  }
3152  else
3153  {
3154  auxcoll = new L2_FECollection(p,dim);
3155  }
3156  }
3157  ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3158  ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3159 
3160  if (edgespace)
3161  {
3162  if (tracespace)
3163  {
3164  b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3165  }
3166  else
3167  {
3168  b->AddDomainIntegrator(new VectorFECurlIntegrator);
3169  }
3170  }
3171  else if (rtspace)
3172  {
3173  if (tracespace)
3174  {
3175  b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3176  }
3177  else
3178  {
3179  b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3180  }
3181  }
3182  else
3183  {
3184  b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3185  }
3186  b->Assemble();
3187  b->Finalize();
3188  OperatorHandle Bh(Operator::PETSC_MATIS);
3189  b->ParallelAssemble(Bh);
3190  Bh.Get(B);
3191  Bh.SetOperatorOwner(false);
3192 
3193  if (dir) // if essential dofs are present, we need to zero the columns
3194  {
3195  Mat pB = *B;
3196  ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3197  if (!opts.ess_dof_local)
3198  {
3199  ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3200  }
3201  else
3202  {
3203  ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3204  }
3205  B_is_Trans = PETSC_TRUE;
3206  }
3207  delete b;
3208  delete pspace;
3209  delete auxcoll;
3210  }
3211 
3212  if (B)
3213  {
3214  ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3215  }
3216  delete B;
3217  }
3218  ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3219  ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3220 }
3221 
3223  const PetscBDDCSolverParams &opts,
3224  const std::string &prefix)
3225  : PetscPreconditioner(A,prefix)
3226 {
3227  BDDCSolverConstructor(opts);
3228  Customize();
3229 }
3230 
3232  const PetscBDDCSolverParams &opts,
3233  const std::string &prefix)
3234  : PetscPreconditioner(comm,op,prefix)
3235 {
3236  BDDCSolverConstructor(opts);
3237  Customize();
3238 }
3239 
3241  const string &prefix)
3242  : PetscPreconditioner(comm,op,prefix)
3243 {
3244  PC pc = (PC)obj;
3245  ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3246 
3247  Mat pA;
3248  ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3249 
3250  // Check if pA is of type MATNEST
3251  PetscBool isnest;
3252  ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3253 
3254  PetscInt nr = 0;
3255  IS *isrow = NULL;
3256  if (isnest) // we now the fields
3257  {
3258  ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3259  ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3260  ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3261  }
3262 
3263  // We need to customize here, before setting the index sets.
3264  // This is because PCFieldSplitSetType customizes the function
3265  // pointers. SubSolver options will be processed during PCApply
3266  Customize();
3267 
3268  for (PetscInt i=0; i<nr; i++)
3269  {
3270  ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3271  }
3272  ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3273 }
3274 
3275 // PetscNonlinearSolver methods
3276 
3278  const std::string &prefix)
3279  : PetscSolver(), Solver()
3280 {
3281  // Create the actual solver object
3282  SNES snes;
3283  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3284  obj = (PetscObject)snes;
3285  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3286  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3287 
3288  // Allocate private solver context
3290 }
3291 
3293  const std::string &prefix)
3294  : PetscSolver(), Solver()
3295 {
3296  // Create the actual solver object
3297  SNES snes;
3298  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3299  obj = (PetscObject)snes;
3300  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3301  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3302 
3303  // Allocate private solver context
3305 
3306  SetOperator(op);
3307 }
3308 
3310 {
3311  MPI_Comm comm;
3312  SNES snes = (SNES)obj;
3313  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
3314  ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3315 }
3316 
3318 {
3319  SNES snes = (SNES)obj;
3320 
3321  if (operatorset)
3322  {
3323  PetscBool ls,gs;
3324  void *fctx,*jctx;
3325 
3326  ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3327  PCHKERRQ(snes, ierr);
3328  ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3329  PCHKERRQ(snes, ierr);
3330 
3331  ls = (PetscBool)(height == op.Height() && width == op.Width() &&
3332  (void*)&op == fctx &&
3333  (void*)&op == jctx);
3334  ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3335  PetscObjectComm((PetscObject)snes));
3336  PCHKERRQ(snes,ierr);
3337  if (!gs)
3338  {
3339  ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3340  delete X;
3341  delete B;
3342  X = B = NULL;
3343  }
3344  }
3345  else
3346  {
3347  /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
3348  yet set. We default to backtracking */
3349  SNESLineSearch ls;
3350  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3351  ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3352  }
3353 
3354  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3355  snes_ctx->op = (mfem::Operator*)&op;
3356  ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
3357  PCHKERRQ(snes, ierr);
3358  ierr = SNESSetJacobian(snes, NULL, NULL, __mfem_snes_jacobian,
3359  (void *)snes_ctx);
3360  PCHKERRQ(snes, ierr);
3361 
3362  // Update PetscSolver
3363  operatorset = true;
3364 
3365  // Update the Operator fields.
3366  height = op.Height();
3367  width = op.Width();
3368 }
3369 
3371 {
3372  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3373  snes_ctx->jacType = jacType;
3374 }
3375 
3377  double*))
3378 {
3379  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3380  snes_ctx->objective = objfn;
3381 
3382  SNES snes = (SNES)obj;
3383  ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
3384  PCHKERRQ(snes, ierr);
3385 }
3386 
3388  Vector&, Vector&,
3389  bool&, bool&))
3390 {
3391  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3392  snes_ctx->postcheck = post;
3393 
3394  SNES snes = (SNES)obj;
3395  SNESLineSearch ls;
3396  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3397  ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
3398  PCHKERRQ(ls, ierr);
3399 }
3400 
3401 void PetscNonlinearSolver::SetUpdate(void (*update)(Operator *,int,
3402  const mfem::Vector&,
3403  const mfem::Vector&,
3404  const mfem::Vector&,
3405  const mfem::Vector&))
3406 {
3407  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3408  snes_ctx->update = update;
3409 
3410  SNES snes = (SNES)obj;
3411  ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
3412 }
3413 
3414 void PetscNonlinearSolver::Mult(const Vector &b, Vector &x) const
3415 {
3416  SNES snes = (SNES)obj;
3417 
3418  bool b_nonempty = b.Size();
3419  if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
3420  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
3421  X->PlaceArray(x.GetData());
3422  if (b_nonempty) { B->PlaceArray(b.GetData()); }
3423  else { *B = 0.0; }
3424 
3425  Customize();
3426 
3427  if (!iterative_mode) { *X = 0.; }
3428 
3429  // Solve the system.
3430  ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
3431  X->ResetArray();
3432  if (b_nonempty) { B->ResetArray(); }
3433 }
3434 
3435 // PetscODESolver methods
3436 
3437 PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
3438  : PetscSolver(), ODESolver()
3439 {
3440  // Create the actual solver object
3441  TS ts;
3442  ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
3443  obj = (PetscObject)ts;
3444  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3445  ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
3446 
3447  // Allocate private solver context
3449 
3450  // Default options, to comply with the current interface to ODESolver.
3451  ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
3452  PCHKERRQ(ts,ierr);
3453  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
3454  PCHKERRQ(ts,ierr);
3455  TSAdapt tsad;
3456  ierr = TSGetAdapt(ts,&tsad);
3457  PCHKERRQ(ts,ierr);
3458  ierr = TSAdaptSetType(tsad,TSADAPTNONE);
3459  PCHKERRQ(ts,ierr);
3460 }
3461 
3463 {
3464  MPI_Comm comm;
3465  TS ts = (TS)obj;
3466  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
3467  ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
3468 }
3469 
3471  enum PetscODESolver::Type type)
3472 {
3473  TS ts = (TS)obj;
3474 
3475  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3476  if (operatorset)
3477  {
3478  ierr = TSReset(ts); PCHKERRQ(ts,ierr);
3479  delete X;
3480  X = NULL;
3481  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
3482  ts_ctx->cached_ijacstate = -1;
3483  ts_ctx->cached_rhsjacstate = -1;
3484  ts_ctx->cached_splits_xstate = -1;
3485  ts_ctx->cached_splits_xdotstate = -1;
3486  }
3487  f = &f_;
3488 
3489  // Set functions in TS
3490  ts_ctx->op = &f_;
3491  if (f_.isImplicit())
3492  {
3493  ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
3494  PCHKERRQ(ts, ierr);
3495  ierr = TSSetIJacobian(ts, NULL, NULL, __mfem_ts_ijacobian, (void *)ts_ctx);
3496  PCHKERRQ(ts, ierr);
3497  ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
3498  PCHKERRQ(ts, ierr);
3499  }
3500  if (!f_.isHomogeneous())
3501  {
3502  if (!f_.isImplicit())
3503  {
3504  ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
3505  PCHKERRQ(ts, ierr);
3506  }
3507  ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
3508  PCHKERRQ(ts, ierr);
3509  ierr = TSSetRHSJacobian(ts, NULL, NULL, __mfem_ts_rhsjacobian, (void *)ts_ctx);
3510  PCHKERRQ(ts, ierr);
3511  }
3512  operatorset = true;
3513 
3514  SetType(type);
3515 
3516  // Set solution vector
3517  PetscParVector X(PetscObjectComm(obj),*f,false,true);
3518  ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
3519 
3520  // Compose special purpose function for PDE-constrained optimization
3521  PetscBool use = PETSC_TRUE;
3522  ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
3523  if (use && f_.isImplicit())
3524  {
3525  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
3526  __mfem_ts_computesplits);
3527  PCHKERRQ(ts,ierr);
3528  }
3529  else
3530  {
3531  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
3532  NULL);
3533  PCHKERRQ(ts,ierr);
3534  }
3535 }
3536 
3538 {
3539  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3540  ts_ctx->jacType = jacType;
3541 }
3542 
3544 {
3545  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3546  return ts_ctx->type;
3547 }
3548 
3550 {
3551  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3552 
3553  TS ts = (TS)obj;
3554  ts_ctx->type = type;
3555  if (type == ODE_SOLVER_LINEAR)
3556  {
3557  ierr = TSSetProblemType(ts, TS_LINEAR);
3558  PCHKERRQ(ts, ierr);
3559  }
3560  else
3561  {
3562  ierr = TSSetProblemType(ts, TS_NONLINEAR);
3563  PCHKERRQ(ts, ierr);
3564  }
3565 }
3566 
3567 void PetscODESolver::Step(Vector &x, double &t, double &dt)
3568 {
3569  // Pass the parameters to PETSc.
3570  TS ts = (TS)obj;
3571  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
3572  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
3573 
3574  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
3575  X->PlaceArray(x.GetData());
3576 
3577  Customize();
3578 
3579  // Take the step.
3580  ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
3581  ierr = TSStep(ts); PCHKERRQ(ts, ierr);
3582  X->ResetArray();
3583 
3584  // Get back current time and the time step used to caller.
3585  // We cannot use TSGetTimeStep() as it returns the next candidate step
3586  PetscReal pt;
3587  ierr = TSGetTime(ts,&pt); PCHKERRQ(ts,ierr);
3588  dt = pt - (PetscReal)t;
3589  t = pt;
3590 }
3591 
3592 void PetscODESolver::Run(Vector &x, double &t, double &dt, double t_final)
3593 {
3594  // Give the parameters to PETSc.
3595  TS ts = (TS)obj;
3596  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
3597  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
3598  ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
3599  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
3600  PCHKERRQ(ts, ierr);
3601 
3602  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
3603  X->PlaceArray(x.GetData());
3604 
3605  Customize();
3606 
3607  // Reset Jacobian caching since the user may have changed
3608  // the parameters of the solver
3609  // We don't do this in the Step method because two consecutive
3610  // Step() calls are done with the same operator
3611  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3612  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
3613  ts_ctx->cached_ijacstate = -1;
3614  ts_ctx->cached_rhsjacstate = -1;
3615  ts_ctx->cached_splits_xstate = -1;
3616  ts_ctx->cached_splits_xdotstate = -1;
3617 
3618  // Take the steps.
3619  ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
3620  X->ResetArray();
3621 
3622  // Get back final time and time step to caller.
3623  PetscReal pt;
3624  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
3625  t = pt;
3626  ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
3627  dt = pt;
3628 }
3629 
3630 } // namespace mfem
3631 
3632 #include "petsc/private/petscimpl.h"
3633 #include "petsc/private/matimpl.h"
3634 
3635 // auxiliary functions
3636 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
3637  void* ctx)
3638 {
3639  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
3640 
3641  PetscFunctionBeginUser;
3642  if (!monctx)
3643  {
3644  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
3645  }
3646  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
3648  monctx->monitor);
3649 
3650  if (user_monitor->mon_sol)
3651  {
3652  mfem::PetscParVector V(x,true);
3653  user_monitor->MonitorSolution(it,t,V);
3654  }
3655  user_monitor->MonitorSolver(solver);
3656  PetscFunctionReturn(0);
3657 }
3658 
3659 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
3660  Vec f,void *ctx)
3661 {
3662  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3663  PetscErrorCode ierr;
3664 
3665  PetscFunctionBeginUser;
3666  mfem::PetscParVector xx(x,true);
3667  mfem::PetscParVector yy(xp,true);
3668  mfem::PetscParVector ff(f,true);
3669 
3670  mfem::TimeDependentOperator *op = ts_ctx->op;
3671  op->SetTime(t);
3672 
3673  if (ts_ctx->bchandler)
3674  {
3675  // we evaluate the ImplicitMult method with the correct bc
3676  // this means the correct time derivative for essential boundary
3677  // dofs is zero
3678  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
3679  if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
3680  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3681  mfem::Vector* txx = ts_ctx->work;
3682  mfem::Vector* txp = ts_ctx->work2;
3683  bchandler->SetTime(t);
3684  bchandler->ApplyBC(xx,*txx);
3685  bchandler->ZeroBC(yy,*txp);
3686  op->ImplicitMult(*txx,*txp,ff);
3687  // and fix the residual (i.e. f_\partial\Omega = u - g(t))
3688  bchandler->FixResidualBC(xx,ff);
3689  }
3690  else
3691  {
3692  // use the ImplicitMult method of the class
3693  op->ImplicitMult(xx,yy,ff);
3694  }
3695 
3696  // need to tell PETSc the Vec has been updated
3697  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
3698  PetscFunctionReturn(0);
3699 }
3700 
3701 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
3702  void *ctx)
3703 {
3704  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3705  PetscErrorCode ierr;
3706 
3707  PetscFunctionBeginUser;
3708  if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
3709  mfem::PetscParVector xx(x,true);
3710  mfem::PetscParVector ff(f,true);
3711  mfem::TimeDependentOperator *top = ts_ctx->op;
3712  top->SetTime(t);
3713 
3714  // use the ExplicitMult method - compute the RHS function
3715  top->ExplicitMult(xx,ff);
3716 
3717  // need to tell PETSc the Vec has been updated
3718  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
3719  PetscFunctionReturn(0);
3720 }
3721 
3722 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
3723  Vec xp, PetscReal shift, Mat A, Mat P,
3724  void *ctx)
3725 {
3726  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3727  mfem::Vector *xx;
3728  PetscScalar *array;
3729  PetscReal eps = 0.001; /* 0.1% difference */
3730  PetscInt n;
3731  PetscObjectState state;
3732  PetscErrorCode ierr;
3733 
3734  PetscFunctionBeginUser;
3735  // prevent to recompute a Jacobian if we already did so
3736  // the relative tolerance comparison should be fine given the fact
3737  // that two consecutive shifts should have similar magnitude
3738  ierr = PetscObjectStateGet((PetscObject)A,&state); CHKERRQ(ierr);
3739  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3740  std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
3741  state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
3742 
3743  // update time
3744  mfem::TimeDependentOperator *op = ts_ctx->op;
3745  op->SetTime(t);
3746 
3747  // wrap Vecs with Vectors
3748  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
3749  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3750  mfem::Vector yy(array,n);
3751  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3752  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3753  if (!ts_ctx->bchandler)
3754  {
3755  xx = new mfem::Vector(array,n);
3756  }
3757  else
3758  {
3759  // make sure we compute a Jacobian with the correct boundary values
3760  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
3761  mfem::Vector txx(array,n);
3762  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3763  xx = ts_ctx->work;
3764  bchandler->SetTime(t);
3765  bchandler->ApplyBC(txx,*xx);
3766  }
3767  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3768 
3769  // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
3770  mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
3771  if (!ts_ctx->bchandler) { delete xx; }
3772  ts_ctx->cached_shift = shift;
3773 
3774  // Convert to the operator type requested if needed
3775  bool delete_pA = false;
3776  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
3777  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
3778  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3779  pA->GetType() != ts_ctx->jacType))
3780  {
3781  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
3782  ts_ctx->jacType);
3783  delete_pA = true;
3784  }
3785 
3786  // Eliminate essential dofs
3787  if (ts_ctx->bchandler)
3788  {
3789  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3790  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3791  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
3792  }
3793 
3794  // Get nonzerostate
3795  PetscObjectState nonzerostate;
3796  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
3797 
3798  // Avoid unneeded copy of the matrix by hacking
3799  Mat B;
3800  B = pA->ReleaseMat(false);
3801  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
3802  if (delete_pA) { delete pA; }
3803 
3804  // Matrix-free case
3805  if (A && A != P)
3806  {
3807  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3808  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3809  }
3810 
3811  // When using MATNEST and PCFIELDSPLIT, the second setup of the
3812  // preconditioner fails because MatCreateSubMatrix_Nest does not
3813  // actually return a matrix. Instead, for efficiency reasons,
3814  // it returns a reference to the submatrix. The second time it
3815  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
3816  // aborts since the two submatrices are actually different.
3817  // We circumvent this issue by incrementing the nonzero state
3818  // (i.e. PETSc thinks the operator sparsity pattern has changed)
3819  // This does not impact performances in the case of MATNEST
3820  PetscBool isnest;
3821  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
3822  CHKERRQ(ierr);
3823  if (isnest) { P->nonzerostate = nonzerostate + 1; }
3824 
3825  // Jacobian reusage
3826  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
3827  CHKERRQ(ierr);
3828 
3829  // Fool DM
3830  DM dm;
3831  MatType mtype;
3832  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
3833  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
3834  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
3835  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
3836  PetscFunctionReturn(0);
3837 }
3838 
3839 static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
3840  Mat Ax,Mat Jx,
3841  Mat Axp,Mat Jxp)
3842 {
3843  __mfem_ts_ctx* ts_ctx;
3844  mfem::Vector *xx;
3845  PetscScalar *array;
3846  PetscInt n;
3847  PetscObjectState state;
3848  PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
3849  PetscBool assembled;
3850  PetscErrorCode ierr;
3851 
3852  PetscFunctionBeginUser;
3853  ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
3854 
3855  // prevent to recompute the Jacobians if we already did so
3856  ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
3857  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3858  state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
3859  ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
3860  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3861  state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
3862  if (!rx && !rxp) { PetscFunctionReturn(0); }
3863 
3864  // update time
3865  mfem::TimeDependentOperator *op = ts_ctx->op;
3866  op->SetTime(t);
3867 
3868  // wrap Vecs with Vectors
3869  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
3870  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3871  mfem::Vector yy(array,n);
3872  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3873  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3874  if (!ts_ctx->bchandler)
3875  {
3876  xx = new mfem::Vector(array,n);
3877  }
3878  else
3879  {
3880  // make sure we compute a Jacobian with the correct boundary values
3881  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
3882  mfem::Vector txx(array,n);
3883  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3884  xx = ts_ctx->work;
3885  bchandler->SetTime(t);
3886  bchandler->ApplyBC(txx,*xx);
3887  }
3888  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3889 
3890  // We don't have a specialized interface, so we just compute the split jacobians
3891  // evaluating twice the implicit gradient method with the correct shifts
3892 
3893  // first we do the state jacobian
3894  mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
3895 
3896  // Convert to the operator type requested if needed
3897  bool delete_mat = false;
3898  mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
3899  (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
3900  if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3901  pJx->GetType() != ts_ctx->jacType))
3902  {
3903  if (pJx)
3904  {
3905  Mat B = *pJx;
3906  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
3907  }
3908  pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
3909  ts_ctx->jacType);
3910  delete_mat = true;
3911  }
3912  if (rx)
3913  {
3914  ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
3915  if (assembled)
3916  {
3917  ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
3918  }
3919  else
3920  {
3921  Mat B;
3922  ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
3923  ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
3924  }
3925  }
3926  if (delete_mat) { delete pJx; }
3927  pJx = new mfem::PetscParMatrix(Jx,true);
3928 
3929  // Eliminate essential dofs
3930  if (ts_ctx->bchandler)
3931  {
3932  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3933  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3934  pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
3935  }
3936 
3937  // Then we do the jacobian wrt the time derivative of the state
3938  // Note that this is usually the mass matrix
3939  mfem::PetscParMatrix *pJxp = NULL;
3940  if (rxp)
3941  {
3942  delete_mat = false;
3943  mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
3944  pJxp = const_cast<mfem::PetscParMatrix *>
3945  (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
3946  if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3947  pJxp->GetType() != ts_ctx->jacType))
3948  {
3949  if (pJxp)
3950  {
3951  Mat B = *pJxp;
3952  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
3953  }
3954  pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
3955  &oJxp,ts_ctx->jacType);
3956  delete_mat = true;
3957  }
3958 
3959  ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
3960  if (assembled)
3961  {
3962  ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
3963  }
3964  else
3965  {
3966  Mat B;
3967  ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
3968  ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
3969  }
3970  if (delete_mat) { delete pJxp; }
3971  pJxp = new mfem::PetscParMatrix(Jxp,true);
3972 
3973  // Eliminate essential dofs
3974  if (ts_ctx->bchandler)
3975  {
3976  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3977  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3978  pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
3979  }
3980 
3981  // Obtain the time dependent part of the jacobian by subtracting
3982  // the state jacobian
3983  // We don't do it with the class operator "-=" since we know that
3984  // the sparsity pattern of the two matrices is the same
3985  ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
3986  }
3987 
3988  // Matrix-free cases
3989  if (Ax && Ax != Jx)
3990  {
3991  ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3992  ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3993  }
3994  if (Axp && Axp != Jxp)
3995  {
3996  ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3997  ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3998  }
3999 
4000  // Jacobian reusage
4001  ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4002  CHKERRQ(ierr);
4003  ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4004  CHKERRQ(ierr);
4005 
4006  delete pJx;
4007  delete pJxp;
4008  if (!ts_ctx->bchandler) { delete xx; }
4009  PetscFunctionReturn(0);
4010 }
4011 
4012 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4013  Mat A, Mat P, void *ctx)
4014 {
4015  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4016  mfem::Vector *xx;
4017  PetscScalar *array;
4018  PetscInt n;
4019  PetscObjectState state;
4020  PetscErrorCode ierr;
4021 
4022  PetscFunctionBeginUser;
4023  // prevent to recompute a Jacobian if we already did so
4024  ierr = PetscObjectStateGet((PetscObject)A,&state); CHKERRQ(ierr);
4025  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4026  state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4027 
4028  // update time
4029  mfem::TimeDependentOperator *op = ts_ctx->op;
4030  op->SetTime(t);
4031 
4032  // wrap Vec with Vector
4033  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4034  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4035  if (!ts_ctx->bchandler)
4036  {
4037  xx = new mfem::Vector(array,n);
4038  }
4039  else
4040  {
4041  // make sure we compute a Jacobian with the correct boundary values
4042  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4043  mfem::Vector txx(array,n);
4044  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4045  xx = ts_ctx->work;
4046  bchandler->SetTime(t);
4047  bchandler->ApplyBC(txx,*xx);
4048  }
4049  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4050 
4051  // Use TimeDependentOperator::GetExplicitGradient(x)
4052  mfem::Operator& J = op->GetExplicitGradient(*xx);
4053  if (!ts_ctx->bchandler) { delete xx; }
4054 
4055  // Convert to the operator type requested if needed
4056  bool delete_pA = false;
4057  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4058  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4059  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4060  pA->GetType() != ts_ctx->jacType))
4061  {
4062  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4063  ts_ctx->jacType);
4064  delete_pA = true;
4065  }
4066 
4067  // Eliminate essential dofs
4068  if (ts_ctx->bchandler)
4069  {
4070  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4071  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4072  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4073  }
4074 
4075  // Get nonzerostate
4076  PetscObjectState nonzerostate;
4077  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4078 
4079  // Avoid unneeded copy of the matrix by hacking
4080  Mat B;
4081  B = pA->ReleaseMat(false);
4082  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4083  if (delete_pA) { delete pA; }
4084 
4085  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4086  // preconditioner fails because MatCreateSubMatrix_Nest does not
4087  // actually return a matrix. Instead, for efficiency reasons,
4088  // it returns a reference to the submatrix. The second time it
4089  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4090  // aborts since the two submatrices are actually different.
4091  // We circumvent this issue by incrementing the nonzero state
4092  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4093  // This does not impact performances in the case of MATNEST
4094  PetscBool isnest;
4095  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4096  CHKERRQ(ierr);
4097  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4098 
4099  // Matrix-free case
4100  if (A && A != P)
4101  {
4102  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4103  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4104  }
4105 
4106  // Jacobian reusage
4107  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4108  {
4109  ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4110  }
4111  ierr = PetscObjectStateGet((PetscObject)A,&ts_ctx->cached_rhsjacstate);
4112  CHKERRQ(ierr);
4113 
4114  // Fool DM
4115  DM dm;
4116  MatType mtype;
4117  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4118  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4119  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4120  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4121  PetscFunctionReturn(0);
4122 }
4123 
4124 static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4125  void* ctx)
4126 {
4127  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4128 
4129  PetscFunctionBeginUser;
4130  if (!monctx)
4131  {
4132  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4133  }
4134 
4135  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4137  monctx->monitor);
4138  if (user_monitor->mon_sol)
4139  {
4140  Vec x;
4141  PetscErrorCode ierr;
4142 
4143  ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4144  mfem::PetscParVector V(x,true);
4145  user_monitor->MonitorSolution(it,res,V);
4146  }
4147  if (user_monitor->mon_res)
4148  {
4149  Vec x;
4150  PetscErrorCode ierr;
4151 
4152  ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4153  mfem::PetscParVector V(x,true);
4154  user_monitor->MonitorResidual(it,res,V);
4155  }
4156  user_monitor->MonitorSolver(solver);
4157  PetscFunctionReturn(0);
4158 }
4159 
4160 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4161  void *ctx)
4162 {
4163  PetscScalar *array;
4164  PetscInt n;
4165  PetscErrorCode ierr;
4166  mfem::Vector *xx;
4167  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4168 
4169  PetscFunctionBeginUser;
4170  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4171  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4172  if (!snes_ctx->bchandler)
4173  {
4174  xx = new mfem::Vector(array,n);
4175  }
4176  else
4177  {
4178  // make sure we compute a Jacobian with the correct boundary values
4179  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4180  mfem::Vector txx(array,n);
4181  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4182  xx = snes_ctx->work;
4183  bchandler->ApplyBC(txx,*xx);
4184  }
4185 
4186  // Use Operator::GetGradient(x)
4187  mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4188  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4189  if (!snes_ctx->bchandler) { delete xx; }
4190 
4191  // Convert to the operator type requested if needed
4192  bool delete_pA = false;
4193  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4194  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4195  if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4196  pA->GetType() != snes_ctx->jacType))
4197  {
4198  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4199  snes_ctx->jacType);
4200  delete_pA = true;
4201  }
4202 
4203  // Eliminate essential dofs
4204  if (snes_ctx->bchandler)
4205  {
4206  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4207  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4208  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4209  }
4210 
4211  // Get nonzerostate
4212  PetscObjectState nonzerostate;
4213  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4214 
4215  // Avoid unneeded copy of the matrix by hacking
4216  Mat B = pA->ReleaseMat(false);
4217  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4218  if (delete_pA) { delete pA; }
4219 
4220  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4221  // preconditioner fails because MatCreateSubMatrix_Nest does not
4222  // actually return a matrix. Instead, for efficiency reasons,
4223  // it returns a reference to the submatrix. The second time it
4224  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4225  // aborts since the two submatrices are actually different.
4226  // We circumvent this issue by incrementing the nonzero state
4227  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4228  // This does not impact performances in the case of MATNEST
4229  PetscBool isnest;
4230  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4231  CHKERRQ(ierr);
4232  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4233 
4234  // Matrix-free case
4235  if (A && A != P)
4236  {
4237  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4238  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4239  }
4240 
4241  // Fool DM
4242  DM dm;
4243  MatType mtype;
4244  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4245  ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4246  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4247  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4248  PetscFunctionReturn(0);
4249 }
4250 
4251 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4252 {
4253  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4254 
4255  PetscFunctionBeginUser;
4256  mfem::PetscParVector xx(x,true);
4257  mfem::PetscParVector ff(f,true);
4258  if (snes_ctx->bchandler)
4259  {
4260  // we evaluate the Mult method with the correct bc
4261  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4262  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4263  mfem::Vector* txx = snes_ctx->work;
4264  bchandler->ApplyBC(xx,*txx);
4265  snes_ctx->op->Mult(*txx,ff);
4266  // and fix the residual (i.e. f_\partial\Omega = u - g)
4267  bchandler->FixResidualBC(xx,ff);
4268  }
4269  else
4270  {
4271  // use the Mult method of the class
4272  snes_ctx->op->Mult(xx,ff);
4273  }
4274  // need to tell PETSc the Vec has been updated
4275  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
4276  PetscFunctionReturn(0);
4277 }
4278 
4279 static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
4280  void *ctx)
4281 {
4282  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4283 
4284  PetscFunctionBeginUser;
4285  if (!snes_ctx->objective)
4286  {
4287  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
4288  }
4289  mfem::PetscParVector xx(x,true);
4290  double lf;
4291  (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4292  *f = (PetscReal)lf;
4293  PetscFunctionReturn(0);
4294 }
4295 
4296 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4297  PetscBool *cy,PetscBool *cw, void* ctx)
4298 {
4299  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4300  bool lcy = false,lcw = false;
4301 
4302  PetscFunctionBeginUser;
4303  mfem::PetscParVector x(X,true);
4304  mfem::PetscParVector y(Y,true);
4305  mfem::PetscParVector w(W,true);
4306  (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4307  if (lcy) { *cy = PETSC_TRUE; }
4308  if (lcw) { *cw = PETSC_TRUE; }
4309  PetscFunctionReturn(0);
4310 }
4311 
4312 static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
4313 {
4314  Vec F,X,dX,pX;
4315  __mfem_snes_ctx* snes_ctx;
4316 
4317  PetscFunctionBeginUser;
4318  /* Update callback does not use the context */
4319  ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
4320  ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4321  if (!it)
4322  {
4323  ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4324  ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
4325  CHKERRQ(ierr);
4326  ierr = VecDestroy(&pX); CHKERRQ(ierr);
4327  }
4328  ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
4329  CHKERRQ(ierr);
4330  if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
4331  "Missing previous solution");
4332  ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4333  mfem::PetscParVector f(F,true);
4334  mfem::PetscParVector x(X,true);
4335  mfem::PetscParVector dx(dX,true);
4336  mfem::PetscParVector px(pX,true);
4337  (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4338  /* Store previous solution */
4339  ierr = VecCopy(X,pX); CHKERRQ(ierr);
4340  PetscFunctionReturn(0);
4341 }
4342 
4343 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
4344  void* ctx)
4345 {
4346  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4347 
4348  PetscFunctionBeginUser;
4349  if (!monctx)
4350  {
4351  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4352  }
4353 
4354  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4356  monctx->monitor);
4357  if (user_monitor->mon_sol)
4358  {
4359  Vec x;
4360  PetscErrorCode ierr;
4361 
4362  ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
4363  mfem::PetscParVector V(x,true);
4364  user_monitor->MonitorSolution(it,res,V);
4365  }
4366  if (user_monitor->mon_res)
4367  {
4368  Vec x;
4369  PetscErrorCode ierr;
4370 
4371  ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
4372  mfem::PetscParVector V(x,true);
4373  user_monitor->MonitorResidual(it,res,V);
4374  }
4375  user_monitor->MonitorSolver(solver);
4376  PetscFunctionReturn(0);
4377 }
4378 
4379 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
4380 {
4381  mfem::Operator *op;
4382  PetscErrorCode ierr;
4383 
4384  PetscFunctionBeginUser;
4385  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
4386  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
4387  mfem::PetscParVector xx(x,true);
4388  mfem::PetscParVector yy(y,true);
4389  op->Mult(xx,yy);
4390  // need to tell PETSc the Vec has been updated
4391  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4392  PetscFunctionReturn(0);
4393 }
4394 
4395 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
4396 {
4397  mfem::Operator *op;
4398  PetscErrorCode ierr;
4399 
4400  PetscFunctionBeginUser;
4401  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
4402  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
4403  mfem::PetscParVector xx(x,true);
4404  mfem::PetscParVector yy(y,true);
4405  op->MultTranspose(xx,yy);
4406  // need to tell PETSc the Vec has been updated
4407  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4408  PetscFunctionReturn(0);
4409 }
4410 
4411 static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
4412 {
4413  mfem::Operator *op;
4414  PetscErrorCode ierr;
4415 
4416  PetscFunctionBeginUser;
4417  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
4418  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
4419  ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
4420  PetscFunctionReturn(0);
4421 }
4422 
4423 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
4424 {
4425  PetscFunctionBeginUser;
4426  PetscFunctionReturn(0);
4427 }
4428 
4429 static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
4430 {
4431  __mfem_pc_shell_ctx *ctx;
4432  PetscErrorCode ierr;
4433 
4434  PetscFunctionBeginUser;
4435  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4436  if (ctx->op)
4437  {
4438  PetscBool isascii;
4439  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
4440  CHKERRQ(ierr);
4441 
4443  (ctx->op);
4444  if (ppc)
4445  {
4446  ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
4447  }
4448  else
4449  {
4450  if (isascii)
4451  {
4452  ierr = PetscViewerASCIIPrintf(viewer,
4453  "No information available on the mfem::Solver\n");
4454  CHKERRQ(ierr);
4455  }
4456  }
4457  if (isascii && ctx->factory)
4458  {
4459  ierr = PetscViewerASCIIPrintf(viewer,
4460  "Number of preconditioners created by the factory %lu\n",ctx->numprec);
4461  CHKERRQ(ierr);
4462  }
4463  }
4464  PetscFunctionReturn(0);
4465 }
4466 
4467 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
4468 {
4469  __mfem_pc_shell_ctx *ctx;
4470  PetscErrorCode ierr;
4471 
4472  PetscFunctionBeginUser;
4473  mfem::PetscParVector xx(x,true);
4474  mfem::PetscParVector yy(y,true);
4475  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4476  if (ctx->op)
4477  {
4478  ctx->op->Mult(xx,yy);
4479  // need to tell PETSc the Vec has been updated
4480  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4481  }
4482  else // operator is not present, copy x
4483  {
4484  yy = xx;
4485  }
4486  PetscFunctionReturn(0);
4487 }
4488 
4489 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
4490 {
4491  __mfem_pc_shell_ctx *ctx;
4492  PetscErrorCode ierr;
4493 
4494  PetscFunctionBeginUser;
4495  mfem::PetscParVector xx(x,true);
4496  mfem::PetscParVector yy(y,true);
4497  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4498  if (ctx->op)
4499  {
4500  ctx->op->MultTranspose(xx,yy);
4501  // need to tell PETSc the Vec has been updated
4502  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4503  }
4504  else // operator is not present, copy x
4505  {
4506  yy = xx;
4507  }
4508  PetscFunctionReturn(0);
4509 }
4510 
4511 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
4512 {
4513  __mfem_pc_shell_ctx *ctx;
4514 
4515  PetscFunctionBeginUser;
4516  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4517  if (ctx->factory)
4518  {
4519  // Delete any owned operator
4520  if (ctx->ownsop)
4521  {
4522  delete ctx->op;
4523  }
4524 
4525  // Get current preconditioning Mat
4526  Mat B;
4527  ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
4528 
4529  // Call user-defined setup
4530  mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
4531  mfem::PetscPreconditionerFactory *factory = ctx->factory;
4532  ctx->op = factory->NewPreconditioner(hB);
4533  ctx->ownsop = true;
4534  ctx->numprec++;
4535  }
4536  PetscFunctionReturn(0);
4537 }
4538 
4539 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
4540 {
4541  __mfem_pc_shell_ctx *ctx;
4542  PetscErrorCode ierr;
4543 
4544  PetscFunctionBeginUser;
4545  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4546  if (ctx->ownsop)
4547  {
4548  delete ctx->op;
4549  }
4550  delete ctx;
4551  PetscFunctionReturn(0);
4552 }
4553 
4554 static PetscErrorCode __mfem_array_container_destroy(void *ptr)
4555 {
4556  PetscErrorCode ierr;
4557 
4558  PetscFunctionBeginUser;
4559  ierr = PetscFree(ptr); CHKERRQ(ierr);
4560  PetscFunctionReturn(0);
4561 }
4562 
4563 static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
4564 {
4566  PetscErrorCode ierr;
4567 
4568  PetscFunctionBeginUser;
4569  for (int i=0; i<a->Size(); i++)
4570  {
4571  Mat M = (*a)[i];
4572  MPI_Comm comm = PetscObjectComm((PetscObject)M);
4573  ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
4574  }
4575  delete a;
4576  PetscFunctionReturn(0);
4577 }
4578 
4579 static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
4580 {
4581  PetscErrorCode ierr;
4582 
4583  PetscFunctionBeginUser;
4584  ierr = PetscFree(*ctx); CHKERRQ(ierr);
4585  PetscFunctionReturn(0);
4586 }
4587 
4588 // Sets the type of PC to PCSHELL and wraps the solver action
4589 // if ownsop is true, ownership of precond is transferred to the PETSc object
4590 PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
4591 {
4592  PetscFunctionBeginUser;
4593  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
4594  ctx->op = &precond;
4595  ctx->ownsop = ownsop;
4596  ctx->factory = NULL;
4597  ctx->numprec = 0;
4598 
4599  // In case the PC was already of type SHELL, this will destroy any
4600  // previous user-defined data structure
4601  // We cannot call PCReset as it will wipe out any operator already set
4602  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
4603 
4604  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
4605  ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
4606  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
4607  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
4608  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
4609  CHKERRQ(ierr);
4610  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
4611  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
4612  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
4613  PetscFunctionReturn(0);
4614 }
4615 
4616 // Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
4617 // Takes ownership of the solver created by the factory
4618 PetscErrorCode MakeShellPCWithFactory(PC pc,
4620 {
4621  PetscFunctionBeginUser;
4622  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
4623  ctx->op = NULL;
4624  ctx->ownsop = true;
4625  ctx->factory = factory;
4626  ctx->numprec = 0;
4627 
4628  // In case the PC was already of type SHELL, this will destroy any
4629  // previous user-defined data structure
4630  // We cannot call PCReset as it will wipe out any operator already set
4631  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
4632 
4633  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
4634  ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
4635  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
4636  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
4637  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
4638  CHKERRQ(ierr);
4639  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
4640  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
4641  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
4642  PetscFunctionReturn(0);
4643 }
4644 
4645 // Converts from a list (or a marked Array if islist is false) to an IS
4646 // st indicates the offset where to start numbering
4647 static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
4648  const mfem::Array<int> *list,
4649  PetscInt st, IS* is)
4650 {
4651  PetscInt n = list ? list->Size() : 0,*idxs;
4652  const int *data = list ? list->GetData() : NULL;
4653  PetscErrorCode ierr;
4654 
4655  PetscFunctionBeginUser;
4656  ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
4657  if (islist)
4658  {
4659  for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
4660  }
4661  else
4662  {
4663  PetscInt cum = 0;
4664  for (PetscInt i=0; i<n; i++)
4665  {
4666  if (data[i]) { idxs[cum++] = i+st; }
4667  }
4668  n = cum;
4669  }
4670  ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
4671  CHKERRQ(ierr);
4672  PetscFunctionReturn(0);
4673 }
4674 
4675 // Converts from a marked Array of Vdofs to an IS
4676 // st indicates the offset where to start numbering
4677 // l2l is a vector of matrices generated during RAP
4678 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
4679  mfem::Array<Mat> &pl2l,
4680  const mfem::Array<int> *mark,
4681  PetscInt st, IS* is)
4682 {
4683  mfem::Array<int> sub_dof_marker;
4685  PetscInt nl;
4686  PetscErrorCode ierr;
4687 
4688  PetscFunctionBeginUser;
4689  for (int i = 0; i < pl2l.Size(); i++)
4690  {
4691  PetscInt m,n,*ii,*jj;
4692  PetscBool done;
4693  ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
4694  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
4695  MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
4696  ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
4697 #if defined(PETSC_USE_64BIT_INDICES)
4698  int nnz = (int)ii[m];
4699  int *mii = new int[m+1];
4700  int *mjj = new int[nnz];
4701  for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
4702  for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
4703  l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
4704 #else
4705  l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
4706 #endif
4707  ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
4708  (const PetscInt**)&ii,
4709  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
4710  MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
4711  << i << " l2l matrix");
4712  }
4713  nl = 0;
4714  for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
4715  sub_dof_marker.SetSize(nl);
4716  const int* vdata = mark->GetData();
4717  int* sdata = sub_dof_marker.GetData();
4718  int cumh = 0, cumw = 0;
4719  for (int i = 0; i < l2l.Size(); i++)
4720  {
4721  const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
4722  l2l[i]->Height());
4723  mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
4724  l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
4725  cumh += l2l[i]->Height();
4726  cumw += l2l[i]->Width();
4727  }
4728  ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
4729  for (int i = 0; i < pl2l.Size(); i++)
4730  {
4731  delete l2l[i];
4732  }
4733  PetscFunctionReturn(0);
4734 }
4735 
4736 #if !defined(PETSC_HAVE_HYPRE)
4737 
4738 #if defined(HYPRE_MIXEDINT)
4739 #error "HYPRE_MIXEDINT not supported"
4740 #endif
4741 
4742 #include "_hypre_parcsr_mv.h"
4743 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
4744 {
4745  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
4746  hypre_CSRMatrix *hdiag,*hoffd;
4747  PetscScalar *da,*oa,*aptr;
4748  PetscInt *dii,*djj,*oii,*ojj,*iptr;
4749  PetscInt i,dnnz,onnz,m,n;
4750  PetscMPIInt size;
4751  PetscErrorCode ierr;
4752 
4753  PetscFunctionBeginUser;
4754  hdiag = hypre_ParCSRMatrixDiag(hA);
4755  hoffd = hypre_ParCSRMatrixOffd(hA);
4756  m = hypre_CSRMatrixNumRows(hdiag);
4757  n = hypre_CSRMatrixNumCols(hdiag);
4758  dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
4759  onnz = hypre_CSRMatrixNumNonzeros(hoffd);
4760  ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
4761  ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
4762  ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
4763  ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
4764  CHKERRQ(ierr);
4765  ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
4766  CHKERRQ(ierr);
4767  ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
4768  CHKERRQ(ierr);
4769  iptr = djj;
4770  aptr = da;
4771  for (i=0; i<m; i++)
4772  {
4773  PetscInt nc = dii[i+1]-dii[i];
4774  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
4775  iptr += nc;
4776  aptr += nc;
4777  }
4778  ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
4779  if (size > 1)
4780  {
4781  PetscInt *offdj,*coffd;
4782 
4783  ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
4784  ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
4785  ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
4786  ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
4787  CHKERRQ(ierr);
4788  offdj = hypre_CSRMatrixJ(hoffd);
4789  coffd = hypre_ParCSRMatrixColMapOffd(hA);
4790  for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
4791  ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
4792  CHKERRQ(ierr);
4793  iptr = ojj;
4794  aptr = oa;
4795  for (i=0; i<m; i++)
4796  {
4797  PetscInt nc = oii[i+1]-oii[i];
4798  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
4799  iptr += nc;
4800  aptr += nc;
4801  }
4802  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
4803  djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
4804  }
4805  else
4806  {
4807  oii = ojj = NULL;
4808  oa = NULL;
4809  ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
4810  }
4811  /* We are responsible to free the CSR arrays. However, since we can take
4812  references of a PetscParMatrix but we cannot take reference of PETSc
4813  arrays, we need to create a PetscContainer object to take reference of
4814  these arrays in reference objects */
4815  void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
4816  const char *names[6] = {"_mfem_csr_dii",
4817  "_mfem_csr_djj",
4818  "_mfem_csr_da",
4819  "_mfem_csr_oii",
4820  "_mfem_csr_ojj",
4821  "_mfem_csr_oa"
4822  };
4823  for (i=0; i<6; i++)
4824  {
4825  PetscContainer c;
4826 
4827  ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
4828  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
4829  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
4830  CHKERRQ(ierr);
4831  ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
4832  CHKERRQ(ierr);
4833  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
4834  }
4835  PetscFunctionReturn(0);
4836 }
4837 
4838 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
4839 {
4840  Mat lA;
4841  ISLocalToGlobalMapping rl2g,cl2g;
4842  IS is;
4843  hypre_CSRMatrix *hdiag,*hoffd;
4844  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
4845  void *ptrs[2];
4846  const char *names[2] = {"_mfem_csr_aux",
4847  "_mfem_csr_data"
4848  };
4849  PetscScalar *hdd,*hod,*aa,*data;
4850  PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
4851  PetscInt *aux,*ii,*jj;
4852  PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
4853  PetscErrorCode ierr;
4854 
4855  PetscFunctionBeginUser;
4856  /* access relevant information in ParCSR */
4857  str = hypre_ParCSRMatrixFirstRowIndex(hA);
4858  stc = hypre_ParCSRMatrixFirstColDiag(hA);
4859  hdiag = hypre_ParCSRMatrixDiag(hA);
4860  hoffd = hypre_ParCSRMatrixOffd(hA);
4861  dr = hypre_CSRMatrixNumRows(hdiag);
4862  dc = hypre_CSRMatrixNumCols(hdiag);
4863  nnz = hypre_CSRMatrixNumNonzeros(hdiag);
4864  hdi = hypre_CSRMatrixI(hdiag);
4865  hdj = hypre_CSRMatrixJ(hdiag);
4866  hdd = hypre_CSRMatrixData(hdiag);
4867  oc = hypre_CSRMatrixNumCols(hoffd);
4868  nnz += hypre_CSRMatrixNumNonzeros(hoffd);
4869  hoi = hypre_CSRMatrixI(hoffd);
4870  hoj = hypre_CSRMatrixJ(hoffd);
4871  hod = hypre_CSRMatrixData(hoffd);
4872 
4873  /* generate l2g maps for rows and cols */
4874  ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
4875  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
4876  ierr = ISDestroy(&is); CHKERRQ(ierr);
4877  col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
4878  ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
4879  for (i=0; i<dc; i++) { aux[i] = i+stc; }
4880  for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
4881  ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
4882  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
4883  ierr = ISDestroy(&is); CHKERRQ(ierr);
4884 
4885  /* create MATIS object */
4886  ierr = MatCreate(comm,pA); CHKERRQ(ierr);
4887  ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
4888  ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
4889  ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
4890  ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
4891  ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
4892 
4893  /* merge local matrices */
4894  ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
4895  ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
4896  ii = aux;
4897  jj = aux+dr+1;
4898  aa = data;
4899  *ii = *(hdi++) + *(hoi++);
4900  for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
4901  {
4902  PetscScalar *aold = aa;
4903  PetscInt *jold = jj,nc = jd+jo;
4904  for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
4905  for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
4906  *(++ii) = *(hdi++) + *(hoi++);
4907  ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
4908  }
4909  for (; cum<dr; cum++) { *(++ii) = nnz; }
4910  ii = aux;
4911  jj = aux+dr+1;
4912  aa = data;
4913  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
4914  CHKERRQ(ierr);
4915  ptrs[0] = aux;
4916  ptrs[1] = data;
4917  for (i=0; i<2; i++)
4918  {
4919  PetscContainer c;
4920 
4921  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
4922  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
4923  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
4924  CHKERRQ(ierr);
4925  ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
4926  CHKERRQ(ierr);
4927  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
4928  }
4929  ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
4930  ierr = MatDestroy(&lA); CHKERRQ(ierr);
4931  ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4932  ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4933  PetscFunctionReturn(0);
4934 }
4935 #endif
4936 
4937 #endif // MFEM_USE_PETSC
4938 #endif // MFEM_USE_MPI
int Size() const
Return the logical size of the array.
Definition: array.hpp:124
void SetPreconditioner(Solver &precond)
Definition: petsc.cpp:2482
void SetPrintLevel(int plev)
Definition: petsc.cpp:1838
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
Definition: sparsemat.cpp:1359
PetscParVector * B
Right-hand side and solution vector.
Definition: petsc.hpp:549
void CreatePrivateContext()
Definition: petsc.cpp:2129
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition: petsc.cpp:1409
Vector()
Default constructor for Vector. Sets size = 0 and data = NULL.
Definition: vector.hpp:61
void trans(const Vector &u, Vector &x)
Definition: ex27.cpp:421
MPI_Comm GetComm() const
MPI communicator.
Definition: hypre.hpp:330
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition: operator.hpp:316
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition: operator.hpp:318
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition: petsc.cpp:3317
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition: vector.hpp:154
Memory< double > data
Definition: vector.hpp:55
Abstract class for PETSc&#39;s preconditioners.
Definition: petsc.hpp:663
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition: petsc.cpp:637
double PetscScalar
Definition: petsc.hpp:47
struct _p_TS * TS
Definition: petsc.hpp:59
virtual void SetOperator(const Operator &op)
Definition: petsc.cpp:2336
HypreParMatrix * RAP(const HypreParMatrix *A, const HypreParMatrix *P)
Returns the matrix P^t * A * P.
Definition: hypre.cpp:1640
bool clcustom
Boolean to handle SetFromOptions calls.
Definition: petsc.hpp:540
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Definition: petsc.cpp:3376
HYPRE_Int PetscInt
Definition: petsc.hpp:46
void ConvertOperator(MPI_Comm comm, const Operator &op, Mat *B, Operator::Type tid)
Definition: petsc.cpp:840
double GetFinalNorm()
Definition: petsc.cpp:2104
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:2736
Wrapper for PETSc&#39;s matrix class.
Definition: petsc.hpp:197
PetscParVector & operator+=(const PetscParVector &y)
Definition: petsc.cpp:386
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition: petsc.cpp:2212
Abstract class for PETSc&#39;s linear solvers.
Definition: petsc.hpp:610
Vec x
The actual PETSc object.
Definition: petsc.hpp:79
void SetSize(int s)
Resize the vector to size s.
Definition: vector.hpp:459
Abstract class for PETSc&#39;s solvers.
Definition: petsc.hpp:536
PetscODESolver::Type GetType() const
Definition: petsc.cpp:3543
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
Definition: petsc.cpp:3592
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition: petsc.cpp:3387
Base abstract class for first order time dependent operators.
Definition: operator.hpp:267
PetscInt GetNumCols() const
Returns the local number of columns.
Definition: petsc.cpp:483
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition: operator.hpp:97
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition: operator.hpp:71
void SetType(PetscODESolver::Type)
Definition: petsc.cpp:3549
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition: petsc.hpp:854
Pointer to an Operator of a specified type.
Definition: handle.hpp:33
int * GetJ()
Return the array J.
Definition: sparsemat.hpp:178
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
Definition: petsc.cpp:1376
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition: petsc.cpp:3231
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition: petsc.cpp:367
void SetMat(Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition: petsc.cpp:1259
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition: petsc.cpp:1749
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3277
T * GetData()
Returns the data.
Definition: array.hpp:98
Type GetType() const
Definition: petsc.cpp:1691
virtual void SetTime(const double _t)
Set the current time.
Definition: operator.hpp:311
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition: operator.cpp:244
int Size() const
Returns the size of the vector.
Definition: vector.hpp:160
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Definition: petsc.cpp:3567
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition: ode.hpp:22
int * GetI()
Return the array I.
Definition: sparsemat.hpp:173
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition: petsc.cpp:511
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:2612
PetscClassId cid
The class id of the actual PETSc object.
Definition: petsc.hpp:546
ID for class PetscParMatrix, MATHYPRE format.
Definition: operator.hpp:246
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Abstract parallel finite element space.
Definition: pfespace.hpp:28
Constant in time b.c.
Definition: petsc.hpp:458
void SetRelTol(double tol)
Definition: petsc.cpp:1761
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition: operator.hpp:638
void Randomize(PetscInt seed=0)
Set random values.
Definition: petsc.cpp:420
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:2582
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition: petsc.cpp:1234
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition: petsc.cpp:1915
void SetMaxIter(int max_iter)
Definition: petsc.cpp:1811
double * GetData() const
Return a pointer to the beginning of the Vector data.
Definition: vector.hpp:169
PetscParVector & operator*=(PetscScalar d)
Definition: petsc.cpp:398
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition: hypre.cpp:1623
void SortColumnIndices()
Sort the column indices corresponding to each row.
Definition: sparsemat.cpp:409
struct _p_SNES * SNES
Definition: petsc.hpp:58
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition: petsc.cpp:1635
double PetscReal
Definition: petsc.hpp:48
Wrapper for PETSc&#39;s vector class.
Definition: petsc.hpp:75
void Customize(bool customize=true) const
Customize object with options set.
Definition: petsc.cpp:2005
PetscInt M() const
Returns the global number of rows.
Definition: petsc.cpp:490
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: operator.hpp:92
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:3414
PetscParVector * Y
Definition: petsc.hpp:204
virtual ~PetscLinearSolver()
Definition: petsc.cpp:2572
double * GetData()
Return the element data, i.e. the array A.
Definition: sparsemat.hpp:183
PetscInt GetNumRows() const
Returns the local number of rows.
Definition: petsc.cpp:476
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
Definition: operator.cpp:214
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition: petsc.cpp:435
struct _p_PC * PC
Definition: petsc.hpp:57
void EliminateBC(HypreParMatrix &A, HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Definition: hypre.cpp:1981
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
Definition: petsc.cpp:415
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition: petsc.cpp:354
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition: petsc.cpp:3470
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition: petsc.cpp:1357
ID for the base class Operator, i.e. any type.
Definition: operator.hpp:239
MPI_Comm GetComm() const
Definition: pfespace.hpp:239
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition: petsc.hpp:543
Abstract class for monitoring PETSc&#39;s solvers.
Definition: petsc.hpp:832
Data type sparse matrix.
Definition: sparsemat.hpp:46
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition: operator.hpp:65
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3370
double b
Definition: lissajous.cpp:42
int to_int(const std::string &str)
Convert a string to an int.
Definition: text.hpp:71
Auxiliary class for BDDC customization.
Definition: petsc.hpp:689
virtual ~PetscPreconditioner()
Definition: petsc.cpp:2741
ID for class PetscParMatrix, unspecified format.
Definition: operator.hpp:247
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:805
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition: petsc.cpp:2261
void LoseData()
NULL-ifies the data.
Definition: array.hpp:118
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition: petsc.cpp:621
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition: petsc.cpp:469
virtual ~PetscParVector()
Calls PETSc&#39;s destroy function.
Definition: petsc.cpp:225
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition: petsc.cpp:3240
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition: petsc.hpp:552
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition: petsc.cpp:652
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition: petsc.cpp:1434
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition: petsc.cpp:2280
void Assign(const T *)
Copy data from a pointer. &#39;Size()&#39; elements are copied.
Definition: array.hpp:875
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:1910
void Shift(double s)
Shift diagonal by a constant.
Definition: petsc.cpp:1456
ID for class PetscParMatrix, MATNEST format.
Definition: operator.hpp:245
void SetTime(double t)
Sets the current time.
Definition: petsc.hpp:488
ID for class PetscParMatrix, MATSHELL format.
Definition: operator.hpp:244
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition: petsc.cpp:518
int GetNumIterations()
Definition: petsc.cpp:2071
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition: petsc.cpp:1945
Wrapper for hypre&#39;s parallel vector class.
Definition: hypre.hpp:70
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition: petsc.cpp:2289
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Definition: petsc.hpp:478
Type
Enumeration defining IDs for some classes derived from Operator.
Definition: operator.hpp:237
PetscBCHandler(Type _type=ZERO)
Definition: petsc.hpp:462
PetscInt NNZ() const
Returns the number of nonzeros.
Definition: petsc.cpp:504
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true)
Definition: petsc.cpp:2301
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition: petsc.cpp:2731
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition: array.hpp:654
Mat A
The actual PETSc object.
Definition: petsc.hpp:201
struct _p_Mat * Mat
Definition: petsc.hpp:55
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
Definition: operator.cpp:236
PetscInt N() const
Returns the global number of columns.
Definition: petsc.cpp:497
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition: petsc.cpp:1347
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3537
void MakeWrapper(MPI_Comm comm, const Operator *op, Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc&#39;s MATSHELL object and returns the Mat in B...
Definition: petsc.cpp:818
void SetUpdate(void(*update)(Operator *op, int it, const mfem::Vector &F, const mfem::Vector &X, const mfem::Vector &D, const mfem::Vector &P))
Definition: petsc.cpp:3401
bool operatorset
Boolean to handle SetOperator calls.
Definition: petsc.hpp:558
PetscParVector * X
Auxiliary vectors for typecasting.
Definition: petsc.hpp:204
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition: petsc.cpp:1392
int height
Dimension of the output / number of rows in the matrix.
Definition: operator.hpp:27
PetscParVector & operator-=(const PetscParVector &y)
Definition: petsc.cpp:392
double a
Definition: lissajous.cpp:41
void MFEMFinalizePetsc()
Definition: petsc.cpp:164
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition: operator.cpp:219
virtual ~PetscNonlinearSolver()
Definition: petsc.cpp:3309
virtual ~PetscODESolver()
Definition: petsc.cpp:3462
int GetConverged()
Definition: petsc.cpp:2038
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition: petsc.cpp:1739
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition: petsc.cpp:1964
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition: petsc.cpp:147
Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
Definition: petsc.cpp:1678
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition: petsc.cpp:207
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition: petsc.hpp:842
ID for class PetscParMatrix, MATAIJ format.
Definition: operator.hpp:242
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:2647
int dim
Definition: ex24.cpp:53
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:318
PetscInt GlobalSize() const
Returns the global number of rows.
Definition: petsc.cpp:183
PetscParVector * X
Definition: petsc.hpp:549
void _SetDataAndSize_()
Definition: petsc.cpp:172
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition: petsc.cpp:462
struct _p_PetscObject * PetscObject
Definition: petsc.hpp:50
void SetAbsTol(double tol)
Definition: petsc.cpp:1786
Helper class for handling essential boundary conditions.
Definition: petsc.hpp:452
struct _p_KSP * KSP
Definition: petsc.hpp:56
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition: petsc.cpp:1337
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition: petsc.hpp:485
void SetTol(double tol)
Definition: petsc.cpp:1756
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition: petsc.cpp:348
HYPRE_Int * GetTrueDofOffsets() const
Definition: pfespace.hpp:248
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition: petsc.hpp:848
int NumRowBlocks() const
Return the number of row blocks.
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition: petsc.cpp:1475
Vector data type.
Definition: vector.hpp:51
struct _p_Vec * Vec
Definition: petsc.hpp:54
Identity Operator I: x -&gt; x.
Definition: operator.hpp:657
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
Definition: petsc.cpp:410
void FreePrivateContext()
Definition: petsc.cpp:2161
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition: ode.hpp:26
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition: petsc.cpp:323
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3437
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition: petsc.cpp:1664
Base class for solvers.
Definition: operator.hpp:634
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition: globals.hpp:66
Abstract operator.
Definition: operator.hpp:24
Wrapper for hypre&#39;s ParCSR matrix class.
Definition: hypre.hpp:181
A class to handle Block systems in a matrix-free implementation.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:2562
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
Definition: petsc.cpp:1371
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition: petsc.cpp:2189
ID for class PetscParMatrix, MATIS format.
Definition: operator.hpp:243
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:2567
int width
Dimension of the input / number of columns in the matrix.
Definition: operator.hpp:28
void * private_ctx
Private context for solver.
Definition: petsc.hpp:555
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition: petsc.cpp:2196
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition: petsc.cpp:1327
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition: petsc.cpp:1445
double f(const Vector &p)