MFEM  v4.1.0
Finite element discretization library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Pages
petsc.cpp
Go to the documentation of this file.
1 // Copyright (c) 2010-2020, Lawrence Livermore National Security, LLC. Produced
2 // at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3 // LICENSE and NOTICE for details. LLNL-CODE-806117.
4 //
5 // This file is part of the MFEM library. For more information and source code
6 // availability visit https://mfem.org.
7 //
8 // MFEM is free software; you can redistribute it and/or modify it under the
9 // terms of the BSD-3 license. We welcome feedback and contributions, see file
10 // CONTRIBUTING.md for details.
11 
12 // Author: Stefano Zampini <stefano.zampini@gmail.com>
13 
14 #include "../config/config.hpp"
15 
16 #ifdef MFEM_USE_MPI
17 #ifdef MFEM_USE_PETSC
18 
19 #include "linalg.hpp"
20 #include "../fem/fem.hpp"
21 
22 #include "petsc.h"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
25 #endif
26 
27 // Backward compatibility
28 #if PETSC_VERSION_LT(3,12,0)
29 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
30 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
31 #endif
32 
33 #include <fstream>
34 #include <iomanip>
35 #include <cmath>
36 #include <cstdlib>
37 
38 // Note: there are additional #include statements below.
39 
40 // Error handling
41 // Prints PETSc's stacktrace and then calls MFEM_ABORT
42 // We cannot use PETSc's CHKERRQ since it returns a PetscErrorCode
43 #define PCHKERRQ(obj,err) do { \
44  if ((err)) \
45  { \
46  PetscError(PetscObjectComm((PetscObject)(obj)),__LINE__,_MFEM_FUNC_NAME, \
47  __FILE__,(err),PETSC_ERROR_REPEAT,NULL); \
48  MFEM_ABORT("Error in PETSc. See stacktrace above."); \
49  } \
50  } while(0);
51 #define CCHKERRQ(comm,err) do { \
52  if ((err)) \
53  { \
54  PetscError(comm,__LINE__,_MFEM_FUNC_NAME, \
55  __FILE__,(err),PETSC_ERROR_REPEAT,NULL); \
56  MFEM_ABORT("Error in PETSc. See stacktrace above."); \
57  } \
58  } while(0);
59 
60 // Callback functions: these functions will be called by PETSc
61 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
62 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
63 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
64  void*);
65 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
66 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
67  PetscReal,Mat,
68  Mat,void*);
69 static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
70  Mat,Mat,Mat,Mat);
71 static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
72 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
73 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
74 static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
75 static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
76 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
77  PetscBool*,PetscBool*,void*);
78 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
79 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
80 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
81 static PetscErrorCode __mfem_pc_shell_setup(PC);
82 static PetscErrorCode __mfem_pc_shell_destroy(PC);
83 static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
84 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
85 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
86 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
87 static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
88 static PetscErrorCode __mfem_array_container_destroy(void*);
89 static PetscErrorCode __mfem_matarray_container_destroy(void*);
90 static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
91 
92 // auxiliary functions
93 static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
94  PetscInt,IS*);
95 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
96  const mfem::Array<int>*,PetscInt,IS*);
97 static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
98 static PetscErrorCode MakeShellPCWithFactory(PC,
100 
101 // Equivalent functions are present in PETSc source code
102 // if PETSc has been compiled with hypre support
103 // We provide them here in case PETSC_HAVE_HYPRE is not defined
104 #if !defined(PETSC_HAVE_HYPRE)
105 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
106 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
107 #endif
108 
109 // structs used by PETSc code
110 typedef struct
111 {
112  mfem::Solver *op;
114  bool ownsop;
115  unsigned long int numprec;
116 } __mfem_pc_shell_ctx;
117 
118 typedef struct
119 {
120  mfem::Operator *op; // The nonlinear operator
121  mfem::PetscBCHandler *bchandler; // Handling of essential bc
122  mfem::Vector *work; // Work vector
123  mfem::Operator::Type jacType; // OperatorType for the Jacobian
124  // Objective for line search
125  void (*objective)(mfem::Operator *op, const mfem::Vector&, double*);
126  // PostCheck function (to be called after successful line search)
127  void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
128  mfem::Vector&, bool&, bool&);
129  // General purpose update function (to be called at the beginning of
130  // each nonlinear step)
131  void (*update)(mfem::Operator *op, int,
132  const mfem::Vector&, const mfem::Vector&,
133  const mfem::Vector&, const mfem::Vector&);
134 } __mfem_snes_ctx;
135 
136 typedef struct
137 {
138  mfem::TimeDependentOperator *op; // The time-dependent operator
139  mfem::PetscBCHandler *bchandler; // Handling of essential bc
140  mfem::Vector *work; // Work vector
141  mfem::Vector *work2; // Work vector
142  mfem::Operator::Type jacType; // OperatorType for the Jacobian
143  enum mfem::PetscODESolver::Type type;
144  PetscReal cached_shift;
145  PetscObjectState cached_ijacstate;
146  PetscObjectState cached_rhsjacstate;
147  PetscObjectState cached_splits_xstate;
148  PetscObjectState cached_splits_xdotstate;
149 } __mfem_ts_ctx;
150 
151 typedef struct
152 {
153  mfem::PetscSolver *solver; // The solver object
154  mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
155 } __mfem_monitor_ctx;
156 
157 // use global scope ierr to check PETSc errors inside mfem calls
158 static PetscErrorCode ierr;
159 
160 using namespace std;
161 
162 namespace mfem
163 {
164 
166 {
167  MFEMInitializePetsc(NULL,NULL,NULL,NULL);
168 }
169 
170 void MFEMInitializePetsc(int *argc,char*** argv)
171 {
172  MFEMInitializePetsc(argc,argv,NULL,NULL);
173 }
174 
175 void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
176  const char help[])
177 {
178  ierr = PetscInitialize(argc,argv,rc_file,help);
179  MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
180 }
181 
183 {
184  ierr = PetscFinalize();
185  MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
186 }
187 
188 // PetscParVector methods
189 
190 void PetscParVector::_SetDataAndSize_()
191 {
192  const PetscScalar *array;
193  PetscInt n;
194 
195  ierr = VecGetArrayRead(x,&array); PCHKERRQ(x,ierr);
196  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
197  SetDataAndSize((PetscScalar*)array,n);
198  ierr = VecRestoreArrayRead(x,&array); PCHKERRQ(x,ierr);
199 }
200 
201 PetscInt PetscParVector::GlobalSize() const
202 {
203  PetscInt N;
204  ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
205  return N;
206 }
207 
208 PetscParVector::PetscParVector(MPI_Comm comm, const Vector &_x,
209  bool copy) : Vector()
210 {
211  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
212  ierr = VecSetSizes(x,_x.Size(),PETSC_DECIDE); PCHKERRQ(x,ierr);
213  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
215  if (copy)
216  {
217  PetscScalar *array;
218 
219  ierr = VecGetArray(x,&array); PCHKERRQ(x,ierr);
220  for (int i = 0; i < _x.Size(); i++) { array[i] = _x[i]; }
221  ierr = VecRestoreArray(x,&array); PCHKERRQ(x,ierr);
222  }
223 }
224 
225 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
226  PetscInt *col) : Vector()
227 {
228  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
229  if (col)
230  {
231  PetscMPIInt myid;
232  MPI_Comm_rank(comm, &myid);
233  ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
234  }
235  else
236  {
237  ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
238  }
239  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
241 }
242 
244 {
245  MPI_Comm comm = PetscObjectComm((PetscObject)x);
246  ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
247 }
248 
249 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
250  PetscScalar *_data, PetscInt *col) : Vector()
251 {
252  MFEM_VERIFY(col,"Missing distribution");
253  PetscMPIInt myid;
254  MPI_Comm_rank(comm, &myid);
255  ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,_data,
256  &x); CCHKERRQ(comm,ierr)
258 }
259 
261 {
262  ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
264 }
265 
266 PetscParVector::PetscParVector(MPI_Comm comm, const Operator &op,
267  bool transpose, bool allocate) : Vector()
268 {
269  PetscInt loc = transpose ? op.Height() : op.Width();
270  if (allocate)
271  {
272  ierr = VecCreate(comm,&x);
273  CCHKERRQ(comm,ierr);
274  ierr = VecSetSizes(x,loc,PETSC_DECIDE);
275  PCHKERRQ(x,ierr);
276  ierr = VecSetType(x,VECSTANDARD);
277  PCHKERRQ(x,ierr);
278  ierr = VecSetUp(x);
279  PCHKERRQ(x,ierr);
280  }
281  else
282  {
283  ierr = VecCreateMPIWithArray(comm,1,loc,PETSC_DECIDE,NULL,
284  &x); CCHKERRQ(comm,ierr);
285  }
287 }
288 
290  bool transpose, bool allocate) : Vector()
291 {
292  Mat pA = const_cast<PetscParMatrix&>(A);
293  if (!transpose)
294  {
295  ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
296  }
297  else
298  {
299  ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
300  }
301  if (!allocate)
302  {
303  ierr = VecReplaceArray(x,NULL); PCHKERRQ(x,ierr);
304  }
306 }
307 
309 {
310  if (ref)
311  {
312  ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
313  }
314  x = y;
316 }
317 
319 {
320 
321  HYPRE_Int* offsets = pfes->GetTrueDofOffsets();
322  MPI_Comm comm = pfes->GetComm();
323  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
324 
325  PetscMPIInt myid = 0;
326  if (!HYPRE_AssumedPartitionCheck())
327  {
328  MPI_Comm_rank(comm,&myid);
329  }
330  ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
331  PCHKERRQ(x,ierr);
332  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
334 }
335 
336 MPI_Comm PetscParVector::GetComm() const
337 {
338  return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
339 }
340 
342 {
343  VecScatter scctx;
344  Vec vout;
345  PetscScalar *array;
346  PetscInt size;
347 
348  ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
349  ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
350  PCHKERRQ(x,ierr);
351  ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
352  PCHKERRQ(x,ierr);
353  ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
354  ierr = VecGetArray(vout,&array); PCHKERRQ(x,ierr);
355  ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
356  Array<PetscScalar> data(size);
357  data.Assign(array);
358  ierr = VecRestoreArray(vout,&array); PCHKERRQ(x,ierr);
359  ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
360  Vector *v = new Vector(data, internal::to_int(size));
361  v->MakeDataOwner();
362  data.LoseData();
363  return v;
364 }
365 
367 {
368  ierr = VecSet(x,d); PCHKERRQ(x,ierr);
369  return *this;
370 }
371 
373  const Array<PetscScalar>& vals)
374 {
375  MFEM_VERIFY(idx.Size() == vals.Size(),
376  "Size mismatch between indices and values");
377  PetscInt n = idx.Size();
378  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
379  PCHKERRQ(x,ierr);
380  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
381  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
382  return *this;
383 }
384 
386  const Array<PetscScalar>& vals)
387 {
388  MFEM_VERIFY(idx.Size() == vals.Size(),
389  "Size mismatch between indices and values");
390  PetscInt n = idx.Size();
391  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
392  PCHKERRQ(x,ierr);
393  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
394  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
395  return *this;
396 }
397 
399 {
400  ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
401  return *this;
402 }
403 
405 {
406  ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
407  return *this;
408 }
409 
411 {
412  ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
413  return *this;
414 }
415 
417 {
418  ierr = VecScale(x,s); PCHKERRQ(x,ierr);
419  return *this;
420 }
421 
423 {
424  ierr = VecShift(x,s); PCHKERRQ(x,ierr);
425  return *this;
426 }
427 
429 {
430  ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
431 }
432 
434 {
435  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
436 }
437 
439 {
440  PetscRandom rctx = NULL;
441 
442  if (seed)
443  {
444  ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
445  PCHKERRQ(x,ierr);
446  ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
447  ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
448  }
449  ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
450  ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
451 }
452 
453 void PetscParVector::Print(const char *fname, bool binary) const
454 {
455  if (fname)
456  {
457  PetscViewer view;
458 
459  if (binary)
460  {
461  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
462  FILE_MODE_WRITE,&view);
463  }
464  else
465  {
466  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
467  }
468  PCHKERRQ(x,ierr);
469  ierr = VecView(x,view); PCHKERRQ(x,ierr);
470  ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
471  }
472  else
473  {
474  ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
475  }
476 }
477 
478 // PetscParMatrix methods
479 
481 {
482  PetscInt N;
483  ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
484  return N;
485 }
486 
488 {
489  PetscInt N;
490  ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
491  return N;
492 }
493 
495 {
496  PetscInt N;
497  ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
498  return N;
499 }
500 
502 {
503  PetscInt N;
504  ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
505  return N;
506 }
507 
509 {
510  PetscInt N;
511  ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
512  return N;
513 }
514 
516 {
517  PetscInt N;
518  ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
519  return N;
520 }
521 
523 {
524  MatInfo info;
525  ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
526  return (PetscInt)info.nz_used;
527 }
528 
530 {
531  A = NULL;
532  X = Y = NULL;
533  height = width = 0;
534 }
535 
537 {
538  Init();
539 }
540 
542  const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
543 {
544  Init();
545 
546  Mat B = const_cast<PetscParMatrix&>(pB);
547 
548  IS isr,isc;
549  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
550  rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
551  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
552  cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
553  ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
554  ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
555  ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
556 
557  height = GetNumRows();
558  width = GetNumCols();
559 }
560 
562 {
563  Init();
564  height = pa->Height();
565  width = pa->Width();
566  ConvertOperator(pa->GetComm(),*pa,&A,tid);
567 }
568 
570 {
571  Init();
572  height = ha->Height();
573  width = ha->Width();
574  ConvertOperator(ha->GetComm(),*ha,&A,tid);
575 }
576 
578 {
579  Init();
580  height = sa->Height();
581  width = sa->Width();
582  ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
583 }
584 
585 PetscParMatrix::PetscParMatrix(MPI_Comm comm, const Operator *op,
586  Operator::Type tid)
587 {
588  Init();
589  height = op->Height();
590  width = op->Width();
591  ConvertOperator(comm,*op,&A,tid);
592 }
593 
594 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt glob_size,
595  PetscInt *row_starts, SparseMatrix *diag,
596  Operator::Type tid)
597 {
598  Init();
599  BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
600  tid==PETSC_MATAIJ,&A);
601  // update base class
602  height = GetNumRows();
603  width = GetNumCols();
604 }
605 
606 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
607  PetscInt global_num_cols, PetscInt *row_starts,
608  PetscInt *col_starts, SparseMatrix *diag,
609  Operator::Type tid)
610 {
611  Init();
612  BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
613  tid==PETSC_MATAIJ,&A);
614  // update base class
615  height = GetNumRows();
616  width = GetNumCols();
617 }
618 
620 {
621  if (A)
622  {
623  MPI_Comm comm = PetscObjectComm((PetscObject)A);
624  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
625  if (X) { delete X; }
626  if (Y) { delete Y; }
627  X = Y = NULL;
628  }
629  height = B.Height();
630  width = B.Width();
631 #if defined(PETSC_HAVE_HYPRE)
632  ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
633 #else
634  ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
635 #endif
636  return *this;
637 }
638 
640 {
641  if (A)
642  {
643  MPI_Comm comm = PetscObjectComm((PetscObject)A);
644  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
645  if (X) { delete X; }
646  if (Y) { delete Y; }
647  X = Y = NULL;
648  }
649  height = B.Height();
650  width = B.Width();
651  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
652  return *this;
653 }
654 
656 {
657  if (!A)
658  {
659  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
660  }
661  else
662  {
663  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
664  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
665  ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
666  }
667  return *this;
668 }
669 
671 {
672  if (!A)
673  {
674  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
675  ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
676  }
677  else
678  {
679  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
680  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
681  ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
682  }
683  return *this;
684 }
685 
686 void PetscParMatrix::
687 BlockDiagonalConstructor(MPI_Comm comm,
688  PetscInt *row_starts, PetscInt *col_starts,
689  SparseMatrix *diag, bool assembled, Mat* Ad)
690 {
691  Mat A;
692  PetscInt lrsize,lcsize,rstart,cstart;
693  PetscMPIInt myid = 0,commsize;
694 
695  ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
696  if (!HYPRE_AssumedPartitionCheck())
697  {
698  ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
699  }
700  lrsize = row_starts[myid+1]-row_starts[myid];
701  rstart = row_starts[myid];
702  lcsize = col_starts[myid+1]-col_starts[myid];
703  cstart = col_starts[myid];
704 
705  if (!assembled)
706  {
707  IS is;
708  ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
709  ISLocalToGlobalMapping rl2g,cl2g;
710  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
711  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
712  if (row_starts != col_starts)
713  {
714  ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
715  CCHKERRQ(comm,ierr);
716  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
717  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
718  }
719  else
720  {
721  ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
722  cl2g = rl2g;
723  }
724 
725  // Create the PETSc object (MATIS format)
726  ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
727  ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
728  PCHKERRQ(A,ierr);
729  ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
730  ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
731  ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
732  ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
733 
734  // Copy SparseMatrix into PETSc SeqAIJ format
735  Mat lA;
736  ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
737  int *II = diag->GetI();
738  int *JJ = diag->GetJ();
739 #if defined(PETSC_USE_64BIT_INDICES)
740  PetscInt *pII,*pJJ;
741  int m = diag->Height()+1, nnz = II[diag->Height()];
742  ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
743  for (int i = 0; i < m; i++) { pII[i] = II[i]; }
744  for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
745  ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
746  diag->GetData()); PCHKERRQ(lA,ierr);
747  ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
748 #else
749  ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
750  diag->GetData()); PCHKERRQ(lA,ierr);
751 #endif
752  }
753  else
754  {
755  PetscScalar *da;
756  PetscInt *dii,*djj,*oii,
757  m = diag->Height()+1, nnz = diag->NumNonZeroElems();
758 
759  diag->SortColumnIndices();
760  // if we can take ownership of the SparseMatrix arrays, we can avoid this
761  // step
762  ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
763  ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
764  ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
765  if (sizeof(PetscInt) == sizeof(int))
766  {
767  ierr = PetscMemcpy(dii,diag->GetI(),m*sizeof(PetscInt));
768  CCHKERRQ(PETSC_COMM_SELF,ierr);
769  ierr = PetscMemcpy(djj,diag->GetJ(),nnz*sizeof(PetscInt));
770  CCHKERRQ(PETSC_COMM_SELF,ierr);
771  }
772  else
773  {
774  int *iii = diag->GetI();
775  int *jjj = diag->GetJ();
776  for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
777  for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
778  }
779  ierr = PetscMemcpy(da,diag->GetData(),nnz*sizeof(PetscScalar));
780  CCHKERRQ(PETSC_COMM_SELF,ierr);
781  ierr = PetscCalloc1(m,&oii);
782  CCHKERRQ(PETSC_COMM_SELF,ierr);
783  if (commsize > 1)
784  {
785  ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
786  PETSC_DECIDE,
787  dii,djj,da,oii,NULL,NULL,&A);
788  CCHKERRQ(comm,ierr);
789  }
790  else
791  {
792  ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
793  CCHKERRQ(comm,ierr);
794  }
795 
796  void *ptrs[4] = {dii,djj,da,oii};
797  const char *names[4] = {"_mfem_csr_dii",
798  "_mfem_csr_djj",
799  "_mfem_csr_da",
800  "_mfem_csr_oii",
801  };
802  for (PetscInt i=0; i<4; i++)
803  {
804  PetscContainer c;
805 
806  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
807  ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
808  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
809  CCHKERRQ(comm,ierr);
810  ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
811  CCHKERRQ(comm,ierr);
812  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
813  }
814  }
815 
816  // Tell PETSc the matrix is ready to be used
817  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
818  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
819 
820  *Ad = A;
821 }
822 
823 MPI_Comm PetscParMatrix::GetComm() const
824 {
825  return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
826 }
827 
828 // TODO ADD THIS CONSTRUCTOR
829 //PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
830 // PetscInt glob_ncols, int *I, PetscInt *J,
831 // double *data, PetscInt *rows, PetscInt *cols)
832 //{
833 //}
834 
835 // TODO This should take a reference on op but how?
836 void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
837 {
838  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
839  ierr = MatSetSizes(*A,op->Height(),op->Width(),
840  PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
841  ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
842  ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
843  ierr = MatShellSetOperation(*A,MATOP_MULT,
844  (void (*)())__mfem_mat_shell_apply);
845  PCHKERRQ(A,ierr);
846  ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
847  (void (*)())__mfem_mat_shell_apply_transpose);
848  PCHKERRQ(A,ierr);
849  ierr = MatShellSetOperation(*A,MATOP_COPY,
850  (void (*)())__mfem_mat_shell_copy);
851  PCHKERRQ(A,ierr);
852  ierr = MatShellSetOperation(*A,MATOP_DESTROY,
853  (void (*)())__mfem_mat_shell_destroy);
854  PCHKERRQ(A,ierr);
855  ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
856 }
857 
858 void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
859  Operator::Type tid)
860 {
861  PetscParMatrix *pA = const_cast<PetscParMatrix *>
862  (dynamic_cast<const PetscParMatrix *>(&op));
863  HypreParMatrix *pH = const_cast<HypreParMatrix *>
864  (dynamic_cast<const HypreParMatrix *>(&op));
865  BlockOperator *pB = const_cast<BlockOperator *>
866  (dynamic_cast<const BlockOperator *>(&op));
867  IdentityOperator *pI = const_cast<IdentityOperator *>
868  (dynamic_cast<const IdentityOperator *>(&op));
869  SparseMatrix *pS = const_cast<SparseMatrix *>
870  (dynamic_cast<const SparseMatrix *>(&op));
871 
872  if (pA && tid == ANY_TYPE) // use same object and return
873  {
874  ierr = PetscObjectReference((PetscObject)(pA->A));
875  CCHKERRQ(pA->GetComm(),ierr);
876  *A = pA->A;
877  return;
878  }
879 
880  PetscBool avoidmatconvert = PETSC_FALSE;
881  if (pA) // we test for these types since MatConvert will fail
882  {
883  ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
884  MATSHELL,"");
885  CCHKERRQ(comm,ierr);
886  }
887  if (pA && !avoidmatconvert)
888  {
889  Mat At = NULL;
890  PetscBool istrans;
891 #if PETSC_VERSION_LT(3,10,0)
892  PetscBool ismatis;
893 #endif
894 
895  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
896  CCHKERRQ(pA->GetComm(),ierr);
897  if (!istrans)
898  {
899  if (tid == pA->GetType()) // use same object and return
900  {
901  ierr = PetscObjectReference((PetscObject)(pA->A));
902  CCHKERRQ(pA->GetComm(),ierr);
903  *A = pA->A;
904  return;
905  }
906 #if PETSC_VERSION_LT(3,10,0)
907  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
908  CCHKERRQ(pA->GetComm(),ierr);
909 #endif
910  }
911  else
912  {
913  ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
914 #if PETSC_VERSION_LT(3,10,0)
915  ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
916 #endif
917  CCHKERRQ(pA->GetComm(),ierr);
918  }
919 
920  // Try to convert
921  if (tid == PETSC_MATAIJ)
922  {
923 #if PETSC_VERSION_LT(3,10,0)
924  if (ismatis)
925  {
926  if (istrans)
927  {
928  Mat B;
929 
930  ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
931  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
932  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
933  }
934  else
935  {
936  ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
937  PCHKERRQ(pA->A,ierr);
938  }
939  }
940  else
941 #endif
942  {
943  PetscMPIInt size;
944  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
945 
946  // call MatConvert and see if a converter is available
947  if (istrans)
948  {
949  Mat B;
950  ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
951  PCHKERRQ(pA->A,ierr);
952  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
953  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
954  }
955  else
956  {
957  ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
958  PCHKERRQ(pA->A,ierr);
959  }
960  }
961  }
962  else if (tid == PETSC_MATIS)
963  {
964  if (istrans)
965  {
966  Mat B;
967  ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
968  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
969  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
970  }
971  else
972  {
973  ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
974  }
975  }
976  else if (tid == PETSC_MATHYPRE)
977  {
978 #if defined(PETSC_HAVE_HYPRE)
979  if (istrans)
980  {
981  Mat B;
982  ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
983  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
984  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
985  }
986  else
987  {
988  ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
989  }
990 #else
991  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
992 #endif
993  }
994  else if (tid == PETSC_MATSHELL)
995  {
996  MakeWrapper(comm,&op,A);
997  }
998  else
999  {
1000  MFEM_ABORT("Unsupported operator type conversion " << tid)
1001  }
1002  }
1003  else if (pH)
1004  {
1005  if (tid == PETSC_MATAIJ)
1006  {
1007 #if defined(PETSC_HAVE_HYPRE)
1008  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1009  PETSC_USE_POINTER,A);
1010 #else
1011  ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1012 #endif
1013  CCHKERRQ(pH->GetComm(),ierr);
1014  }
1015  else if (tid == PETSC_MATIS)
1016  {
1017 #if defined(PETSC_HAVE_HYPRE)
1018  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1019  PETSC_USE_POINTER,A);
1020 #else
1021  ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1022 #endif
1023  CCHKERRQ(pH->GetComm(),ierr);
1024  }
1025  else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1026  {
1027 #if defined(PETSC_HAVE_HYPRE)
1028  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1029  PETSC_USE_POINTER,A);
1030  CCHKERRQ(pH->GetComm(),ierr);
1031 #else
1032  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1033 #endif
1034  }
1035  else if (tid == PETSC_MATSHELL)
1036  {
1037  MakeWrapper(comm,&op,A);
1038  }
1039  else
1040  {
1041  MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1042  " is not implemented");
1043  }
1044  }
1045  else if (pB)
1046  {
1047  Mat *mats,*matsl2l = NULL;
1048  PetscInt i,j,nr,nc;
1049 
1050  nr = pB->NumRowBlocks();
1051  nc = pB->NumColBlocks();
1052  ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1053  if (tid == PETSC_MATIS)
1054  {
1055  ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1056  }
1057  for (i=0; i<nr; i++)
1058  {
1059  PetscBool needl2l = PETSC_TRUE;
1060 
1061  for (j=0; j<nc; j++)
1062  {
1063  if (!pB->IsZeroBlock(i,j))
1064  {
1065  ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1066  if (tid == PETSC_MATIS && needl2l)
1067  {
1068  PetscContainer c;
1069  ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1070  (PetscObject*)&c);
1071  PCHKERRQ(mats[i*nc+j],ierr);
1072  // special case for block operators: the local Vdofs should be
1073  // ordered as:
1074  // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1075  // with m fields, Ni the number of Vdofs for the i-th field
1076  if (c)
1077  {
1078  Array<Mat> *l2l = NULL;
1079  ierr = PetscContainerGetPointer(c,(void**)&l2l);
1080  PCHKERRQ(c,ierr);
1081  MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1082  << l2l->Size() << " for block row " << i );
1083  ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1084  PCHKERRQ(c,ierr);
1085  matsl2l[i] = (*l2l)[0];
1086  needl2l = PETSC_FALSE;
1087  }
1088  }
1089  }
1090  }
1091  }
1092  ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1093  if (tid == PETSC_MATIS)
1094  {
1095  ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1096 
1097  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1098  for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1099  ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1100 
1101  PetscContainer c;
1102  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1103  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1104  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1105  PCHKERRQ(c,ierr);
1106  ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1107  PCHKERRQ((*A),ierr);
1108  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1109  }
1110  for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1111  ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1112  }
1113  else if (pI && tid == PETSC_MATAIJ)
1114  {
1115  PetscInt rst;
1116 
1117  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1118  ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1119  PCHKERRQ(A,ierr);
1120  ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1121  ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1122  ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1123  ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1124  ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1125  for (PetscInt i = rst; i < rst+pI->Height(); i++)
1126  {
1127  ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1128  }
1129  ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1130  ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1131  }
1132  else if (pS)
1133  {
1134  if (tid == PETSC_MATSHELL)
1135  {
1136  MakeWrapper(comm,&op,A);
1137  }
1138  else
1139  {
1140  Mat B;
1141  PetscScalar *pdata;
1142  PetscInt *pii,*pjj,*oii;
1143  PetscMPIInt size;
1144 
1145  int m = pS->Height();
1146  int n = pS->Width();
1147  int *ii = pS->GetI();
1148  int *jj = pS->GetJ();
1149  double *data = pS->GetData();
1150 
1151  ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1152  ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1153  ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1154  pii[0] = ii[0];
1155  for (int i = 0; i < m; i++)
1156  {
1157  bool issorted = true;
1158  pii[i+1] = ii[i+1];
1159  for (int j = ii[i]; j < ii[i+1]; j++)
1160  {
1161  pjj[j] = jj[j];
1162  if (j != ii[i] && issorted) { issorted = (pjj[j] > pjj[j-1]); }
1163  pdata[j] = data[j];
1164  }
1165  if (!issorted)
1166  {
1167  ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1168  CCHKERRQ(PETSC_COMM_SELF,ierr);
1169  }
1170  }
1171 
1172  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1173  if (size == 1)
1174  {
1175  ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1176  CCHKERRQ(comm,ierr);
1177  oii = NULL;
1178  }
1179  else // block diagonal constructor
1180  {
1181  ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1182  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1183  PETSC_DECIDE,
1184  pii,pjj,pdata,oii,NULL,NULL,&B);
1185  CCHKERRQ(comm,ierr);
1186  }
1187  void *ptrs[4] = {pii,pjj,pdata,oii};
1188  const char *names[4] = {"_mfem_csr_pii",
1189  "_mfem_csr_pjj",
1190  "_mfem_csr_pdata",
1191  "_mfem_csr_oii"
1192  };
1193  for (int i=0; i<4; i++)
1194  {
1195  PetscContainer c;
1196 
1197  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1198  ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1199  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1200  PCHKERRQ(B,ierr);
1201  ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1202  PCHKERRQ(B,ierr);
1203  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1204  }
1205  if (tid == PETSC_MATAIJ)
1206  {
1207  *A = B;
1208  }
1209  else if (tid == PETSC_MATHYPRE)
1210  {
1211  ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1212  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1213  }
1214  else if (tid == PETSC_MATIS)
1215  {
1216  ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1217  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1218  }
1219  else
1220  {
1221  MFEM_ABORT("Unsupported operator type conversion " << tid)
1222  }
1223  }
1224  }
1225  else // fallback to general operator
1226  {
1227  MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1228  "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1229  MakeWrapper(comm,&op,A);
1230  if (tid == PETSC_MATAIJ)
1231  {
1232  Mat B;
1233  PetscBool isaij;
1234 
1235  ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1236  ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1237  CCHKERRQ(comm,ierr);
1238  ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1239  if (!isaij)
1240  {
1241  ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1242  ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1243  }
1244  else
1245  {
1246  *A = B;
1247  }
1248  }
1249  }
1250 }
1251 
1253 {
1254  if (A != NULL)
1255  {
1256  MPI_Comm comm = MPI_COMM_NULL;
1257  ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1258  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1259  }
1260  delete X;
1261  delete Y;
1262  X = Y = NULL;
1263 }
1264 
1266 {
1267  if (ref)
1268  {
1269  ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1270  }
1271  Init();
1272  A = a;
1273  height = GetNumRows();
1274  width = GetNumCols();
1275 }
1276 
1278 {
1279  if (_A == A) { return; }
1280  Destroy();
1281  ierr = PetscObjectReference((PetscObject)_A); PCHKERRQ(_A,ierr);
1282  A = _A;
1283  height = GetNumRows();
1284  width = GetNumCols();
1285 }
1286 
1287 // Computes y = alpha * A * x + beta * y
1288 // or y = alpha * A^T* x + beta * y
1289 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1290  bool transpose)
1291 {
1292  PetscErrorCode (*f)(Mat,Vec,Vec);
1293  PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1294  if (transpose)
1295  {
1296  f = MatMultTranspose;
1297  fadd = MatMultTransposeAdd;
1298  }
1299  else
1300  {
1301  f = MatMult;
1302  fadd = MatMultAdd;
1303  }
1304  if (a != 0.)
1305  {
1306  if (b == 1.)
1307  {
1308  ierr = VecScale(X,a); PCHKERRQ(A,ierr);
1309  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1310  ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
1311  }
1312  else if (b != 0.)
1313  {
1314  ierr = VecScale(X,a); PCHKERRQ(A,ierr);
1315  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1316  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1317  ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
1318  }
1319  else
1320  {
1321  ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1322  if (a != 1.)
1323  {
1324  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1325  }
1326  }
1327  }
1328  else
1329  {
1330  if (b == 1.)
1331  {
1332  // do nothing
1333  }
1334  else if (b != 0.)
1335  {
1336  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1337  }
1338  else
1339  {
1340  ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1341  }
1342  }
1343 }
1344 
1346 {
1347  ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1348  Destroy();
1349  Init();
1350  A = master.A;
1351  height = master.height;
1352  width = master.width;
1353 }
1354 
1356 {
1357  if (!X)
1358  {
1359  MFEM_VERIFY(A,"Mat not present");
1360  X = new PetscParVector(*this,false); PCHKERRQ(A,ierr);
1361  }
1362  return X;
1363 }
1364 
1366 {
1367  if (!Y)
1368  {
1369  MFEM_VERIFY(A,"Mat not present");
1370  Y = new PetscParVector(*this,true); PCHKERRQ(A,ierr);
1371  }
1372  return Y;
1373 }
1374 
1376 {
1377  Mat B;
1378  if (action)
1379  {
1380  ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1381  }
1382  else
1383  {
1384  ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1385  }
1386  return new PetscParMatrix(B,false);
1387 }
1388 
1390 {
1391  ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1392 }
1393 
1394 void PetscParMatrix::Mult(double a, const Vector &x, double b, Vector &y) const
1395 {
1396  MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1397  << ", expected size = " << Width());
1398  MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1399  << ", expected size = " << Height());
1400 
1401  PetscParVector *XX = GetX();
1402  PetscParVector *YY = GetY();
1403  XX->PlaceArray(x.GetData());
1404  YY->PlaceArray(y.GetData());
1405  MatMultKernel(A,a,XX->x,b,YY->x,false);
1406  XX->ResetArray();
1407  YY->ResetArray();
1408 }
1409 
1410 void PetscParMatrix::MultTranspose(double a, const Vector &x, double b,
1411  Vector &y) const
1412 {
1413  MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
1414  << ", expected size = " << Height());
1415  MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
1416  << ", expected size = " << Width());
1417 
1418  PetscParVector *XX = GetX();
1419  PetscParVector *YY = GetY();
1420  YY->PlaceArray(x.GetData());
1421  XX->PlaceArray(y.GetData());
1422  MatMultKernel(A,a,YY->x,b,XX->x,true);
1423  XX->ResetArray();
1424  YY->ResetArray();
1425 }
1426 
1427 void PetscParMatrix::Print(const char *fname, bool binary) const
1428 {
1429  if (fname)
1430  {
1431  PetscViewer view;
1432 
1433  if (binary)
1434  {
1435  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
1436  FILE_MODE_WRITE,&view);
1437  }
1438  else
1439  {
1440  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
1441  }
1442  PCHKERRQ(A,ierr);
1443  ierr = MatView(A,view); PCHKERRQ(A,ierr);
1444  ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1445  }
1446  else
1447  {
1448  ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1449  }
1450 }
1451 
1453 {
1454  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1455  << ", expected size = " << Height());
1456 
1457  PetscParVector *YY = GetY();
1458  YY->PlaceArray(s.GetData());
1459  ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1460  YY->ResetArray();
1461 }
1462 
1464 {
1465  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1466  << ", expected size = " << Width());
1467 
1468  PetscParVector *XX = GetX();
1469  XX->PlaceArray(s.GetData());
1470  ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
1471  XX->ResetArray();
1472 }
1473 
1475 {
1476  ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
1477 }
1478 
1480 {
1481  // for matrices with square diagonal blocks only
1482  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1483  << ", expected size = " << Height());
1484  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1485  << ", expected size = " << Width());
1486 
1487  PetscParVector *XX = GetX();
1488  XX->PlaceArray(s.GetData());
1489  ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
1490  XX->ResetArray();
1491 }
1492 
1494  PetscParMatrix *P)
1495 {
1496  MFEM_VERIFY(A->Width() == P->Height(),
1497  "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
1498  " differs from number of local rows of P " << P->Height());
1499  MFEM_VERIFY(A->Height() == R->Width(),
1500  "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
1501  " differs from number of local cols of R " << R->Width());
1502  Mat B;
1503  ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1504  PCHKERRQ(*R,ierr);
1505  return new PetscParMatrix(B);
1506 }
1507 
1509 {
1510  Mat pA = *A,pP = *P,pRt = *Rt;
1511  Mat B;
1512  PetscBool Aismatis,Pismatis,Rtismatis;
1513 
1514  MFEM_VERIFY(A->Width() == P->Height(),
1515  "Petsc RAP: Number of local cols of A " << A->Width() <<
1516  " differs from number of local rows of P " << P->Height());
1517  MFEM_VERIFY(A->Height() == Rt->Height(),
1518  "Petsc RAP: Number of local rows of A " << A->Height() <<
1519  " differs from number of local rows of Rt " << Rt->Height());
1520  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
1521  PCHKERRQ(pA,ierr);
1522  ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
1523  PCHKERRQ(pA,ierr);
1524  ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
1525  PCHKERRQ(pA,ierr);
1526  if (Aismatis &&
1527  Pismatis &&
1528  Rtismatis) // handle special case (this code will eventually go into PETSc)
1529  {
1530  Mat lA,lP,lB,lRt;
1531  ISLocalToGlobalMapping cl2gP,cl2gRt;
1532  PetscInt rlsize,clsize,rsize,csize;
1533 
1534  ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
1535  ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
1536  ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
1537  ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
1538  ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
1539  ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
1540  ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
1541  ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
1542  ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
1543  ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
1544  ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
1545  ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
1546  ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
1547  if (lRt == lP)
1548  {
1549  ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1550  PCHKERRQ(lA,ierr);
1551  }
1552  else
1553  {
1554  Mat lR;
1555  ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
1556  ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1557  PCHKERRQ(lRt,ierr);
1558  ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
1559  }
1560 
1561  // attach lRt matrix to the subdomain local matrix
1562  // it may be used if markers on vdofs have to be mapped on
1563  // subdomain true dofs
1564  {
1565  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
1566  ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
1567  (*vmatsl2l)[0] = lRt;
1568 
1569  PetscContainer c;
1570  ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
1571  PCHKERRQ(B,ierr);
1572  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1573  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1574  PCHKERRQ(c,ierr);
1575  ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
1576  PCHKERRQ(B,ierr);
1577  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1578  }
1579 
1580  // Set local problem
1581  ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
1582  ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
1583  ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1584  ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1585  }
1586  else // it raises an error if the PtAP is not supported in PETSc
1587  {
1588  if (pP == pRt)
1589  {
1590  ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1591  PCHKERRQ(pA,ierr);
1592  }
1593  else
1594  {
1595  Mat pR;
1596  ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
1597  ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1598  PCHKERRQ(pRt,ierr);
1599  ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
1600  }
1601  }
1602  return new PetscParMatrix(B);
1603 }
1604 
1606 {
1607  PetscParMatrix *out = RAP(P,A,P);
1608  return out;
1609 }
1610 
1612 {
1613  PetscParMatrix *out,*A;
1614 #if defined(PETSC_HAVE_HYPRE)
1616 #else
1617  A = new PetscParMatrix(hA);
1618 #endif
1619  out = RAP(P,A,P);
1620  delete A;
1621  return out;
1622 }
1623 
1624 
1626 {
1627  Mat AB;
1628 
1629  ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
1630  CCHKERRQ(A->GetComm(),ierr);
1631  return new PetscParMatrix(AB);
1632 }
1633 
1635 {
1636  Mat Ae;
1637 
1638  PetscParVector dummy(GetComm(),0);
1639  ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
1640  EliminateRowsCols(rows_cols,dummy,dummy);
1641  ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
1642  return new PetscParMatrix(Ae);
1643 }
1644 
1646  const HypreParVector &X,
1647  HypreParVector &B,
1648  double diag)
1649 {
1650  MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
1651 }
1652 
1654  const PetscParVector &X,
1655  PetscParVector &B,
1656  double diag)
1657 {
1658  PetscInt M,N;
1659  ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
1660  MFEM_VERIFY(M == N,"Rectangular case unsupported");
1661 
1662  // TODO: what if a diagonal term is not present?
1663  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
1664 
1665  // rows need to be in global numbering
1666  PetscInt rst;
1667  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
1668 
1669  IS dir;
1670  ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
1671  if (!X.GlobalSize() && !B.GlobalSize())
1672  {
1673  ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
1674  }
1675  else
1676  {
1677  ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
1678  }
1679  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
1680 }
1681 
1683 {
1684  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
1685 
1686  // rows need to be in global numbering
1687  PetscInt rst;
1688  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
1689 
1690  IS dir;
1691  ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
1692  ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
1693  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
1694 }
1695 
1697 {
1698 
1699  Mat B = A;
1700  if (dereference)
1701  {
1702  MPI_Comm comm = GetComm();
1703  ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
1704  }
1705  A = NULL;
1706  return B;
1707 }
1708 
1710 {
1711  PetscBool ok;
1712  MFEM_VERIFY(A, "no associated PETSc Mat object");
1713  PetscObject oA = (PetscObject)(this->A);
1714  // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
1715  ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
1716  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1717  ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
1718  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1719  ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
1720  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1721  ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
1722  if (ok == PETSC_TRUE) { return PETSC_MATIS; }
1723  ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
1724  if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
1725  ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
1726  if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
1727 #if defined(PETSC_HAVE_HYPRE)
1728  ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
1729  if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
1730 #endif
1731  return PETSC_MATGENERIC;
1732 }
1733 
1735  const Array<int> &ess_dof_list,
1736  const Vector &X, Vector &B)
1737 {
1738  const PetscScalar *array;
1739  Mat pA = const_cast<PetscParMatrix&>(A);
1740 
1741  // B -= Ae*X
1742  Ae.Mult(-1.0, X, 1.0, B);
1743 
1744  Vec diag = const_cast<PetscParVector&>((*A.GetX()));
1745  ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
1746  ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1747  for (int i = 0; i < ess_dof_list.Size(); i++)
1748  {
1749  int r = ess_dof_list[i];
1750  B(r) = array[r] * X(r);
1751  }
1752  ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1753 }
1754 
1755 // PetscSolver methods
1756 
1757 PetscSolver::PetscSolver() : clcustom(false)
1758 {
1759  obj = NULL;
1760  B = X = NULL;
1761  cid = -1;
1762  operatorset = false;
1763  bchandler = NULL;
1764  private_ctx = NULL;
1765 }
1766 
1768 {
1769  delete B;
1770  delete X;
1772 }
1773 
1774 void PetscSolver::SetTol(double tol)
1775 {
1776  SetRelTol(tol);
1777 }
1778 
1779 void PetscSolver::SetRelTol(double tol)
1780 {
1781  if (cid == KSP_CLASSID)
1782  {
1783  KSP ksp = (KSP)obj;
1784  ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
1785  }
1786  else if (cid == SNES_CLASSID)
1787  {
1788  SNES snes = (SNES)obj;
1789  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
1790  PETSC_DEFAULT);
1791  }
1792  else if (cid == TS_CLASSID)
1793  {
1794  TS ts = (TS)obj;
1795  ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
1796  }
1797  else
1798  {
1799  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1800  }
1801  PCHKERRQ(obj,ierr);
1802 }
1803 
1804 void PetscSolver::SetAbsTol(double tol)
1805 {
1806  if (cid == KSP_CLASSID)
1807  {
1808  KSP ksp = (KSP)obj;
1809  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
1810  }
1811  else if (cid == SNES_CLASSID)
1812  {
1813  SNES snes = (SNES)obj;
1814  ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1815  PETSC_DEFAULT);
1816  }
1817  else if (cid == TS_CLASSID)
1818  {
1819  TS ts = (TS)obj;
1820  ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
1821  }
1822  else
1823  {
1824  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1825  }
1826  PCHKERRQ(obj,ierr);
1827 }
1828 
1829 void PetscSolver::SetMaxIter(int max_iter)
1830 {
1831  if (cid == KSP_CLASSID)
1832  {
1833  KSP ksp = (KSP)obj;
1834  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1835  max_iter);
1836  }
1837  else if (cid == SNES_CLASSID)
1838  {
1839  SNES snes = (SNES)obj;
1840  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1841  max_iter,PETSC_DEFAULT);
1842  }
1843  else if (cid == TS_CLASSID)
1844  {
1845  TS ts = (TS)obj;
1846  ierr = TSSetMaxSteps(ts,max_iter);
1847  }
1848  else
1849  {
1850  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1851  }
1852  PCHKERRQ(obj,ierr);
1853 }
1854 
1855 
1857 {
1858  typedef PetscErrorCode (*myPetscFunc)(void**);
1859  PetscViewerAndFormat *vf = NULL;
1860  PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
1861 
1862  if (plev > 0)
1863  {
1864  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1865  PCHKERRQ(obj,ierr);
1866  }
1867  if (cid == KSP_CLASSID)
1868  {
1869  // there are many other options, see the function KSPSetFromOptions() in
1870  // src/ksp/ksp/interface/itcl.c
1871  typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
1872  KSP ksp = (KSP)obj;
1873  if (plev >= 0)
1874  {
1875  ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
1876  }
1877  if (plev == 1)
1878  {
1879  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
1880  (myPetscFunc)PetscViewerAndFormatDestroy);
1881  PCHKERRQ(ksp,ierr);
1882  }
1883  else if (plev > 1)
1884  {
1885  ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
1886  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
1887  (myPetscFunc)PetscViewerAndFormatDestroy);
1888  PCHKERRQ(ksp,ierr);
1889  if (plev > 2)
1890  {
1891  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1892  PCHKERRQ(viewer,ierr);
1893  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
1894  (myPetscFunc)PetscViewerAndFormatDestroy);
1895  PCHKERRQ(ksp,ierr);
1896  }
1897  }
1898  }
1899  else if (cid == SNES_CLASSID)
1900  {
1901  typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
1902  SNES snes = (SNES)obj;
1903  if (plev >= 0)
1904  {
1905  ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
1906  }
1907  if (plev > 0)
1908  {
1909  ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
1910  (myPetscFunc)PetscViewerAndFormatDestroy);
1911  PCHKERRQ(snes,ierr);
1912  }
1913  }
1914  else if (cid == TS_CLASSID)
1915  {
1916  TS ts = (TS)obj;
1917  if (plev >= 0)
1918  {
1919  ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
1920  }
1921  }
1922  else
1923  {
1924  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1925  }
1926 }
1927 
1928 MPI_Comm PetscSolver::GetComm() const
1929 {
1930  return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
1931 }
1932 
1934 {
1935  __mfem_monitor_ctx *monctx;
1936  ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
1937  monctx->solver = this;
1938  monctx->monitor = ctx;
1939  if (cid == KSP_CLASSID)
1940  {
1941  ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
1942  __mfem_monitor_ctx_destroy);
1943  PCHKERRQ(obj,ierr);
1944  }
1945  else if (cid == SNES_CLASSID)
1946  {
1947  ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
1948  __mfem_monitor_ctx_destroy);
1949  PCHKERRQ(obj,ierr);
1950  }
1951  else if (cid == TS_CLASSID)
1952  {
1953  ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
1954  __mfem_monitor_ctx_destroy);
1955  PCHKERRQ(obj,ierr);
1956  }
1957  else
1958  {
1959  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1960  }
1961 }
1962 
1964 {
1965  bchandler = bch;
1966  if (cid == SNES_CLASSID)
1967  {
1968  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
1969  snes_ctx->bchandler = bchandler;
1970  }
1971  else if (cid == TS_CLASSID)
1972  {
1973  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
1974  ts_ctx->bchandler = bchandler;
1975  }
1976  else
1977  {
1978  MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
1979  }
1980 }
1981 
1983 {
1984  PC pc = NULL;
1985  if (cid == TS_CLASSID)
1986  {
1987  SNES snes;
1988  KSP ksp;
1989 
1990  ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
1991  ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
1992  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
1993  }
1994  else if (cid == SNES_CLASSID)
1995  {
1996  KSP ksp;
1997 
1998  ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
1999  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2000  }
2001  else if (cid == KSP_CLASSID)
2002  {
2003  ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2004  }
2005  else if (cid == PC_CLASSID)
2006  {
2007  pc = (PC)obj;
2008  }
2009  else
2010  {
2011  MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2012  }
2013  if (factory)
2014  {
2015  ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2016  }
2017  else
2018  {
2019  ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2020  }
2021 }
2022 
2023 void PetscSolver::Customize(bool customize) const
2024 {
2025  if (!customize) { clcustom = true; }
2026  if (!clcustom)
2027  {
2028  if (cid == PC_CLASSID)
2029  {
2030  PC pc = (PC)obj;
2031  ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2032  }
2033  else if (cid == KSP_CLASSID)
2034  {
2035  KSP ksp = (KSP)obj;
2036  ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2037  }
2038  else if (cid == SNES_CLASSID)
2039  {
2040  SNES snes = (SNES)obj;
2041  ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2042  }
2043  else if (cid == TS_CLASSID)
2044  {
2045  TS ts = (TS)obj;
2046  ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2047  }
2048  else
2049  {
2050  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2051  }
2052  }
2053  clcustom = true;
2054 }
2055 
2057 {
2058  if (cid == KSP_CLASSID)
2059  {
2060  KSP ksp = (KSP)obj;
2061  KSPConvergedReason reason;
2062  ierr = KSPGetConvergedReason(ksp,&reason);
2063  PCHKERRQ(ksp,ierr);
2064  return reason > 0 ? 1 : 0;
2065  }
2066  else if (cid == SNES_CLASSID)
2067  {
2068  SNES snes = (SNES)obj;
2069  SNESConvergedReason reason;
2070  ierr = SNESGetConvergedReason(snes,&reason);
2071  PCHKERRQ(snes,ierr);
2072  return reason > 0 ? 1 : 0;
2073  }
2074  else if (cid == TS_CLASSID)
2075  {
2076  TS ts = (TS)obj;
2077  TSConvergedReason reason;
2078  ierr = TSGetConvergedReason(ts,&reason);
2079  PCHKERRQ(ts,ierr);
2080  return reason > 0 ? 1 : 0;
2081  }
2082  else
2083  {
2084  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2085  return -1;
2086  }
2087 }
2088 
2090 {
2091  if (cid == KSP_CLASSID)
2092  {
2093  KSP ksp = (KSP)obj;
2094  PetscInt its;
2095  ierr = KSPGetIterationNumber(ksp,&its);
2096  PCHKERRQ(ksp,ierr);
2097  return its;
2098  }
2099  else if (cid == SNES_CLASSID)
2100  {
2101  SNES snes = (SNES)obj;
2102  PetscInt its;
2103  ierr = SNESGetIterationNumber(snes,&its);
2104  PCHKERRQ(snes,ierr);
2105  return its;
2106  }
2107  else if (cid == TS_CLASSID)
2108  {
2109  TS ts = (TS)obj;
2110  PetscInt its;
2111  ierr = TSGetStepNumber(ts,&its);
2112  PCHKERRQ(ts,ierr);
2113  return its;
2114  }
2115  else
2116  {
2117  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2118  return -1;
2119  }
2120 }
2121 
2123 {
2124  if (cid == KSP_CLASSID)
2125  {
2126  KSP ksp = (KSP)obj;
2127  PetscReal norm;
2128  ierr = KSPGetResidualNorm(ksp,&norm);
2129  PCHKERRQ(ksp,ierr);
2130  return norm;
2131  }
2132  if (cid == SNES_CLASSID)
2133  {
2134  SNES snes = (SNES)obj;
2135  PetscReal norm;
2136  ierr = SNESGetFunctionNorm(snes,&norm);
2137  PCHKERRQ(snes,ierr);
2138  return norm;
2139  }
2140  else
2141  {
2142  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2143  return PETSC_MAX_REAL;
2144  }
2145 }
2146 
2148 {
2150  if (cid == SNES_CLASSID)
2151  {
2152  __mfem_snes_ctx *snes_ctx;
2153  ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2154  snes_ctx->op = NULL;
2155  snes_ctx->bchandler = NULL;
2156  snes_ctx->work = NULL;
2157  snes_ctx->jacType = Operator::PETSC_MATAIJ;
2158  private_ctx = (void*) snes_ctx;
2159  }
2160  else if (cid == TS_CLASSID)
2161  {
2162  __mfem_ts_ctx *ts_ctx;
2163  ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2164  ts_ctx->op = NULL;
2165  ts_ctx->bchandler = NULL;
2166  ts_ctx->work = NULL;
2167  ts_ctx->work2 = NULL;
2168  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2169  ts_ctx->cached_ijacstate = -1;
2170  ts_ctx->cached_rhsjacstate = -1;
2171  ts_ctx->cached_splits_xstate = -1;
2172  ts_ctx->cached_splits_xdotstate = -1;
2173  ts_ctx->type = PetscODESolver::ODE_SOLVER_GENERAL;
2174  ts_ctx->jacType = Operator::PETSC_MATAIJ;
2175  private_ctx = (void*) ts_ctx;
2176  }
2177 }
2178 
2180 {
2181  if (!private_ctx) { return; }
2182  // free private context's owned objects
2183  if (cid == SNES_CLASSID)
2184  {
2185  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2186  delete snes_ctx->work;
2187  }
2188  else if (cid == TS_CLASSID)
2189  {
2190  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2191  delete ts_ctx->work;
2192  delete ts_ctx->work2;
2193  }
2194  ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2195 }
2196 
2197 // PetscBCHandler methods
2198 
2200  enum PetscBCHandler::Type _type)
2201  : bctype(_type), setup(false), eval_t(0.0),
2202  eval_t_cached(std::numeric_limits<double>::min())
2203 {
2204  SetTDofs(ess_tdof_list);
2205 }
2206 
2208 {
2209  ess_tdof_list.SetSize(list.Size());
2210  ess_tdof_list.Assign(list);
2211  setup = false;
2212 }
2213 
2215 {
2216  if (setup) { return; }
2217  if (bctype == CONSTANT)
2218  {
2219  eval_g.SetSize(n);
2220  this->Eval(eval_t,eval_g);
2221  eval_t_cached = eval_t;
2222  }
2223  else if (bctype == TIME_DEPENDENT)
2224  {
2225  eval_g.SetSize(n);
2226  }
2227  setup = true;
2228 }
2229 
2231 {
2232  (*this).SetUp(x.Size());
2233  y = x;
2234  if (bctype == ZERO)
2235  {
2236  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2237  {
2238  y[ess_tdof_list[i]] = 0.0;
2239  }
2240  }
2241  else
2242  {
2243  if (bctype != CONSTANT && eval_t != eval_t_cached)
2244  {
2245  Eval(eval_t,eval_g);
2246  eval_t_cached = eval_t;
2247  }
2248  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2249  {
2250  y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2251  }
2252  }
2253 }
2254 
2256 {
2257  (*this).SetUp(x.Size());
2258  if (bctype == ZERO)
2259  {
2260  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2261  {
2262  x[ess_tdof_list[i]] = 0.0;
2263  }
2264  }
2265  else
2266  {
2267  if (bctype != CONSTANT && eval_t != eval_t_cached)
2268  {
2269  Eval(eval_t,eval_g);
2270  eval_t_cached = eval_t;
2271  }
2272  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2273  {
2274  x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2275  }
2276  }
2277 }
2278 
2280 {
2281  (*this).SetUp(x.Size());
2282  if (bctype == ZERO)
2283  {
2284  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2285  {
2286  y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2287  }
2288  }
2289  else
2290  {
2291  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2292  {
2293  y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2294  }
2295  }
2296 }
2297 
2299 {
2300  (*this).SetUp(x.Size());
2301  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2302  {
2303  x[ess_tdof_list[i]] = 0.0;
2304  }
2305 }
2306 
2308 {
2309  (*this).SetUp(x.Size());
2310  y = x;
2311  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2312  {
2313  y[ess_tdof_list[i]] = 0.0;
2314  }
2315 }
2316 
2317 // PetscLinearSolver methods
2318 
2319 PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2320  bool wrapin)
2321  : PetscSolver(), Solver(), wrap(wrapin)
2322 {
2323  KSP ksp;
2324  ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2325  obj = (PetscObject)ksp;
2326  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2327  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2328 }
2329 
2331  const std::string &prefix)
2332  : PetscSolver(), Solver(), wrap(false)
2333 {
2334  KSP ksp;
2335  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2336  obj = (PetscObject)ksp;
2337  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2338  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2339  SetOperator(A);
2340 }
2341 
2343  const std::string &prefix)
2344  : PetscSolver(), Solver(), wrap(wrapin)
2345 {
2346  KSP ksp;
2347  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2348  obj = (PetscObject)ksp;
2349  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2350  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2351  SetOperator(A);
2352 }
2353 
2355 {
2356  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2357  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2358  (dynamic_cast<const PetscParMatrix *>(&op));
2359  const Operator *oA = dynamic_cast<const Operator *>(&op);
2360 
2361  // update base classes: Operator, Solver, PetscLinearSolver
2362  bool delete_pA = false;
2363  if (!pA)
2364  {
2365  if (hA)
2366  {
2367  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2368  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2369  delete_pA = true;
2370  }
2371  else if (oA) // fallback to general operator
2372  {
2373  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2374  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2375  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2376  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2377  delete_pA = true;
2378  }
2379  }
2380  MFEM_VERIFY(pA, "Unsupported operation!");
2381 
2382  // Set operators into PETSc KSP
2383  KSP ksp = (KSP)obj;
2384  Mat A = pA->A;
2385  if (operatorset)
2386  {
2387  Mat C;
2388  PetscInt nheight,nwidth,oheight,owidth;
2389 
2390  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2391  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2392  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2393  if (nheight != oheight || nwidth != owidth)
2394  {
2395  // reinit without destroying the KSP
2396  // communicator remains the same
2397  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2398  delete X;
2399  delete B;
2400  X = B = NULL;
2401  wrap = false;
2402  }
2403  }
2404  ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2405 
2406  // Update PetscSolver
2407  operatorset = true;
2408 
2409  // Update the Operator fields.
2410  height = pA->Height();
2411  width = pA->Width();
2412 
2413  if (delete_pA) { delete pA; }
2414 }
2415 
2417 {
2418  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2419  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2420  (dynamic_cast<const PetscParMatrix *>(&op));
2421  const Operator *oA = dynamic_cast<const Operator *>(&op);
2422 
2423  PetscParMatrix *ppA = const_cast<PetscParMatrix *>
2424  (dynamic_cast<const PetscParMatrix *>(&pop));
2425  const Operator *poA = dynamic_cast<const Operator *>(&pop);
2426 
2427  // Convert Operator for linear system
2428  bool delete_pA = false;
2429  if (!pA)
2430  {
2431  if (hA)
2432  {
2433  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2434  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2435  delete_pA = true;
2436  }
2437  else if (oA) // fallback to general operator
2438  {
2439  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2440  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2441  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2442  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2443  delete_pA = true;
2444  }
2445  }
2446  MFEM_VERIFY(pA, "Unsupported operation!");
2447 
2448  // Convert Operator to be preconditioned
2449  bool delete_ppA = false;
2450  if (!ppA)
2451  {
2452  if (oA == poA && !wrap) // Same operator, already converted
2453  {
2454  ppA = pA;
2455  }
2456  else
2457  {
2458  ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
2459  delete_ppA = true;
2460  }
2461  }
2462  MFEM_VERIFY(ppA, "Unsupported operation!");
2463 
2464  // Set operators into PETSc KSP
2465  KSP ksp = (KSP)obj;
2466  Mat A = pA->A;
2467  Mat P = ppA->A;
2468  if (operatorset)
2469  {
2470  Mat C;
2471  PetscInt nheight,nwidth,oheight,owidth;
2472 
2473  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2474  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2475  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2476  if (nheight != oheight || nwidth != owidth)
2477  {
2478  // reinit without destroying the KSP
2479  // communicator remains the same
2480  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2481  delete X;
2482  delete B;
2483  X = B = NULL;
2484  wrap = false;
2485  }
2486  }
2487  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
2488 
2489  // Update PetscSolver
2490  operatorset = true;
2491 
2492  // Update the Operator fields.
2493  height = pA->Height();
2494  width = pA->Width();
2495 
2496  if (delete_pA) { delete pA; }
2497  if (delete_ppA) { delete ppA; }
2498 }
2499 
2501 {
2502  KSP ksp = (KSP)obj;
2503 
2504  // Preserve Amat if already set
2505  Mat A = NULL;
2506  PetscBool amat;
2507  ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
2508  if (amat)
2509  {
2510  ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
2511  ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
2512  }
2513  PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
2514  if (ppc)
2515  {
2516  ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
2517  }
2518  else
2519  {
2520  // wrap the Solver action
2521  // Solver is assumed to be already setup
2522  // ownership of precond is not tranferred,
2523  // consistently with other MFEM's linear solvers
2524  PC pc;
2525  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
2526  ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
2527  }
2528  if (A)
2529  {
2530  Mat P;
2531 
2532  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
2533  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
2534  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
2535  ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
2536  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
2537  }
2538 }
2539 
2540 void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
2541 {
2542  KSP ksp = (KSP)obj;
2543 
2544  if (!B || !X)
2545  {
2546  Mat pA = NULL;
2547  ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
2548  if (!B)
2549  {
2550  PetscParMatrix A = PetscParMatrix(pA, true);
2551  B = new PetscParVector(A, true, false);
2552  }
2553  if (!X)
2554  {
2555  PetscParMatrix A = PetscParMatrix(pA, true);
2556  X = new PetscParVector(A, false, false);
2557  }
2558  }
2559  B->PlaceArray(b.GetData());
2560  X->PlaceArray(x.GetData());
2561 
2562  Customize();
2563 
2564  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2565  PCHKERRQ(ksp, ierr);
2566 
2567  // Solve the system.
2568  if (trans)
2569  {
2570  ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
2571  }
2572  else
2573  {
2574  ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
2575  }
2576  B->ResetArray();
2577  X->ResetArray();
2578 }
2579 
2580 void PetscLinearSolver::Mult(const Vector &b, Vector &x) const
2581 {
2582  (*this).MultKernel(b,x,false);
2583 }
2584 
2586 {
2587  (*this).MultKernel(b,x,true);
2588 }
2589 
2591 {
2592  MPI_Comm comm;
2593  KSP ksp = (KSP)obj;
2594  ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
2595  ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
2596 }
2597 
2598 // PetscPCGSolver methods
2599 
2600 PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix)
2601  : PetscLinearSolver(comm,prefix)
2602 {
2603  KSP ksp = (KSP)obj;
2604  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2605  // this is to obtain a textbook PCG
2606  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2607 }
2608 
2609 PetscPCGSolver::PetscPCGSolver(PetscParMatrix& A, const std::string &prefix)
2610  : PetscLinearSolver(A,prefix)
2611 {
2612  KSP ksp = (KSP)obj;
2613  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2614  // this is to obtain a textbook PCG
2615  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2616 }
2617 
2619  const std::string &prefix)
2620  : PetscLinearSolver(A,wrap,prefix)
2621 {
2622  KSP ksp = (KSP)obj;
2623  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2624  // this is to obtain a textbook PCG
2625  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2626 }
2627 
2628 // PetscPreconditioner methods
2629 
2631  const std::string &prefix)
2632  : PetscSolver(), Solver()
2633 {
2634  PC pc;
2635  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
2636  obj = (PetscObject)pc;
2637  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2638  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2639 }
2640 
2642  const string &prefix)
2643  : PetscSolver(), Solver()
2644 {
2645  PC pc;
2646  ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
2647  obj = (PetscObject)pc;
2648  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2649  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2650  SetOperator(A);
2651 }
2652 
2654  const string &prefix)
2655  : PetscSolver(), Solver()
2656 {
2657  PC pc;
2658  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
2659  obj = (PetscObject)pc;
2660  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2661  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2662  SetOperator(op);
2663 }
2664 
2666 {
2667  bool delete_pA = false;
2668  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2669  (dynamic_cast<const PetscParMatrix *>(&op));
2670 
2671  if (!pA)
2672  {
2673  const Operator *cop = dynamic_cast<const Operator *>(&op);
2674  pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
2675  delete_pA = true;
2676  }
2677 
2678  // Set operators into PETSc PC
2679  PC pc = (PC)obj;
2680  Mat A = pA->A;
2681  if (operatorset)
2682  {
2683  Mat C;
2684  PetscInt nheight,nwidth,oheight,owidth;
2685 
2686  ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
2687  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2688  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2689  if (nheight != oheight || nwidth != owidth)
2690  {
2691  // reinit without destroying the PC
2692  // communicator remains the same
2693  ierr = PCReset(pc); PCHKERRQ(pc,ierr);
2694  delete X;
2695  delete B;
2696  X = B = NULL;
2697  }
2698  }
2699  ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
2700 
2701  // Update PetscSolver
2702  operatorset = true;
2703 
2704  // Update the Operator fields.
2705  height = pA->Height();
2706  width = pA->Width();
2707 
2708  if (delete_pA) { delete pA; };
2709 }
2710 
2711 void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
2712  bool trans) const
2713 {
2714  PC pc = (PC)obj;
2715 
2716  if (!B || !X)
2717  {
2718  Mat pA = NULL;
2719  ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
2720  if (!B)
2721  {
2722  PetscParMatrix A(pA, true);
2723  B = new PetscParVector(A, true, false);
2724  }
2725  if (!X)
2726  {
2727  PetscParMatrix A(pA, true);
2728  X = new PetscParVector(A, false, false);
2729  }
2730  }
2731  B->PlaceArray(b.GetData());
2732  X->PlaceArray(x.GetData());
2733 
2734  Customize();
2735 
2736  // Apply the preconditioner.
2737  if (trans)
2738  {
2739  ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
2740  }
2741  else
2742  {
2743  ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
2744  }
2745  B->ResetArray();
2746  X->ResetArray();
2747 }
2748 
2749 void PetscPreconditioner::Mult(const Vector &b, Vector &x) const
2750 {
2751  (*this).MultKernel(b,x,false);
2752 }
2753 
2755 {
2756  (*this).MultKernel(b,x,true);
2757 }
2758 
2760 {
2761  MPI_Comm comm;
2762  PC pc = (PC)obj;
2763  ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
2764  ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
2765 }
2766 
2767 // PetscBDDCSolver methods
2768 
2769 // Coordinates sampling function
2770 static void func_coords(const Vector &x, Vector &y)
2771 {
2772  for (int i = 0; i < x.Size(); i++) { y(i) = x(i); }
2773 }
2774 
2775 void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
2776 {
2777  MPI_Comm comm = PetscObjectComm(obj);
2778 
2779  // get PETSc object
2780  PC pc = (PC)obj;
2781  Mat pA;
2782  ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
2783 
2784  // matrix type should be of type MATIS
2785  PetscBool ismatis;
2786  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
2787  PCHKERRQ(pA,ierr);
2788  MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
2789 
2790  // Check options
2791  ParFiniteElementSpace *fespace = opts.fespace;
2792  if (opts.netflux && !fespace)
2793  {
2794  MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
2795  }
2796 
2797  // Attach default near-null space to local matrices
2798  {
2799  MatNullSpace nnsp;
2800  Mat lA;
2801  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
2802  ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
2803  &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2804  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2805  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2806  }
2807 
2808  // set PETSc PC type to PCBDDC
2809  ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
2810 
2811  PetscInt rst,nl;
2812  ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
2813  ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
2814 
2815  // index sets for fields splitting and coordinates for nodal spaces
2816  IS *fields = NULL;
2817  PetscInt nf = 0;
2818  PetscInt sdim = 0;
2819  PetscReal *coords = NULL;
2820  if (fespace)
2821  {
2822  int vdim = fespace->GetVDim();
2823 
2824  // Ideally, the block size should be set at matrix creation
2825  // but the MFEM assembly does not allow to do so
2826  if (fespace->GetOrdering() == Ordering::byVDIM)
2827  {
2828  Mat lA;
2829  ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
2830  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
2831  ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
2832  }
2833 
2834  // fields
2835  if (vdim > 1)
2836  {
2837  PetscInt st = rst, bs, inc, nlf;
2838  nf = vdim;
2839  nlf = nl/nf;
2840  ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
2841  if (fespace->GetOrdering() == Ordering::byVDIM)
2842  {
2843  inc = 1;
2844  bs = vdim;
2845  }
2846  else
2847  {
2848  inc = nlf;
2849  bs = 1;
2850  }
2851  for (PetscInt i = 0; i < nf; i++)
2852  {
2853  ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
2854  st += inc;
2855  }
2856  }
2857 
2858  // coordinates
2859  const FiniteElementCollection *fec = fespace->FEColl();
2860  bool h1space = dynamic_cast<const H1_FECollection*>(fec);
2861  if (h1space)
2862  {
2863  ParFiniteElementSpace *fespace_coords = fespace;
2864 
2865  sdim = fespace->GetParMesh()->SpaceDimension();
2866  if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
2867  {
2868  fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
2870  }
2871  VectorFunctionCoefficient coeff_coords(sdim, func_coords);
2872  ParGridFunction gf_coords(fespace_coords);
2873  gf_coords.ProjectCoefficient(coeff_coords);
2874  HypreParVector *hvec_coords = gf_coords.ParallelProject();
2875 
2876  // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
2877  if (vdim == sdim)
2878  {
2879  MatNullSpace nnsp;
2880  Mat lA;
2881  Vec pvec_coords,lvec_coords;
2882  ISLocalToGlobalMapping l2g;
2883  PetscSF sf;
2884  PetscLayout rmap;
2885  const PetscInt *gidxs;
2886  PetscInt nleaves;
2887 
2888  ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
2889  hvec_coords->GlobalSize(),hvec_coords->GetData(),&pvec_coords);
2890  CCHKERRQ(comm,ierr);
2891  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
2892  ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
2893  ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
2894  ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
2895  ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
2896  ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
2897  ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
2898  ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
2899  ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
2900  CCHKERRQ(comm,ierr);
2901  ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
2902  {
2903  const PetscScalar *garray;
2904  PetscScalar *larray;
2905 
2906  ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2907  ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2908  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
2909  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
2910  ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2911  ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2912  }
2913  ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
2914  ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
2915  CCHKERRQ(PETSC_COMM_SELF,ierr);
2916  ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
2917  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2918  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2919  ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
2920  }
2921 
2922  // each single dof has associated a tuple of coordinates
2923  ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
2924  if (nf > 0)
2925  {
2926  PetscScalar *data = hvec_coords->GetData();
2927  for (PetscInt i = 0; i < nf; i++)
2928  {
2929  const PetscInt *idxs;
2930  PetscInt nn;
2931 
2932  // It also handles the case of fespace not ordered by VDIM
2933  ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
2934  ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
2935  for (PetscInt j = 0; j < nn; j++)
2936  {
2937  PetscInt idx = idxs[j]-rst;
2938  for (PetscInt d = 0; d < sdim; d++)
2939  {
2940  coords[sdim*idx+d] = data[sdim*j+d];
2941  }
2942  }
2943  ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
2944  }
2945  }
2946  else
2947  {
2948  ierr = PetscMemcpy(coords,hvec_coords->GetData(),nl*sdim*sizeof(PetscReal));
2949  CCHKERRQ(comm,ierr);
2950  }
2951  if (fespace_coords != fespace)
2952  {
2953  delete fespace_coords;
2954  }
2955  delete hvec_coords;
2956  }
2957  }
2958 
2959  // index sets for boundary dofs specification (Essential = dir, Natural = neu)
2960  IS dir = NULL, neu = NULL;
2961 
2962  // Extract l2l matrices
2963  Array<Mat> *l2l = NULL;
2964  if (opts.ess_dof_local || opts.nat_dof_local)
2965  {
2966  PetscContainer c;
2967 
2968  ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
2969  MFEM_VERIFY(c,"Local-to-local PETSc container not present");
2970  ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
2971  }
2972 
2973  // check information about index sets (essential dofs, fields, etc.)
2974 #ifdef MFEM_DEBUG
2975  {
2976  // make sure ess/nat_dof have been collectively set
2977  PetscBool lpr = PETSC_FALSE,pr;
2978  if (opts.ess_dof) { lpr = PETSC_TRUE; }
2979  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
2980  PCHKERRQ(pA,ierr);
2981  MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
2982  lpr = PETSC_FALSE;
2983  if (opts.nat_dof) { lpr = PETSC_TRUE; }
2984  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
2985  PCHKERRQ(pA,ierr);
2986  MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
2987  // make sure fields have been collectively set
2988  PetscInt ms[2],Ms[2];
2989  ms[0] = -nf; ms[1] = nf;
2990  ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
2991  PCHKERRQ(pA,ierr);
2992  MFEM_VERIFY(-Ms[0] == Ms[1],
2993  "number of fields should be the same across processes");
2994  }
2995 #endif
2996 
2997  // boundary sets
2998  if (opts.ess_dof)
2999  {
3000  PetscInt st = opts.ess_dof_local ? 0 : rst;
3001  if (!opts.ess_dof_local)
3002  {
3003  // need to compute the boundary dofs in global ordering
3004  ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3005  CCHKERRQ(comm,ierr);
3006  ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3007  }
3008  else
3009  {
3010  // need to compute a list for the marked boundary dofs in local ordering
3011  ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3012  CCHKERRQ(comm,ierr);
3013  ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3014  }
3015  }
3016  if (opts.nat_dof)
3017  {
3018  PetscInt st = opts.nat_dof_local ? 0 : rst;
3019  if (!opts.nat_dof_local)
3020  {
3021  // need to compute the boundary dofs in global ordering
3022  ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3023  CCHKERRQ(comm,ierr);
3024  ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3025  }
3026  else
3027  {
3028  // need to compute a list for the marked boundary dofs in local ordering
3029  ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3030  CCHKERRQ(comm,ierr);
3031  ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3032  }
3033  }
3034 
3035  // field splitting
3036  if (fields)
3037  {
3038  ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3039  }
3040  for (PetscInt i = 0; i < nf; i++)
3041  {
3042  ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3043  }
3044  ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3045 
3046  // coordinates
3047  if (coords)
3048  {
3049  ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3050  }
3051  ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3052 
3053  // code for block size is disabled since we cannot change the matrix
3054  // block size after it has been setup
3055  // int bs = 1;
3056 
3057  // Customize using the finite element space (if any)
3058  if (fespace)
3059  {
3060  const FiniteElementCollection *fec = fespace->FEColl();
3061  bool edgespace, rtspace, h1space;
3062  bool needint = opts.netflux;
3063  bool tracespace, rt_tracespace, edge_tracespace;
3064  int vdim, dim, p;
3065  PetscBool B_is_Trans = PETSC_FALSE;
3066 
3067  ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3068  dim = pmesh->Dimension();
3069  vdim = fespace->GetVDim();
3070  h1space = dynamic_cast<const H1_FECollection*>(fec);
3071  rtspace = dynamic_cast<const RT_FECollection*>(fec);
3072  edgespace = dynamic_cast<const ND_FECollection*>(fec);
3073  edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3074  rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3075  tracespace = edge_tracespace || rt_tracespace;
3076 
3077  p = 1;
3078  if (fespace->GetNE() > 0)
3079  {
3080  if (!tracespace)
3081  {
3082  p = fespace->GetOrder(0);
3083  }
3084  else
3085  {
3086  p = fespace->GetFaceOrder(0);
3087  if (dim == 2) { p++; }
3088  }
3089  }
3090 
3091  if (edgespace) // H(curl)
3092  {
3093  if (dim == 2)
3094  {
3095  needint = true;
3096  if (tracespace)
3097  {
3098  MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3099  " not using auxiliary quadrature");
3100  needint = false;
3101  }
3102  }
3103  else
3104  {
3105  FiniteElementCollection *vfec;
3106  if (tracespace)
3107  {
3108  vfec = new H1_Trace_FECollection(p,dim);
3109  }
3110  else
3111  {
3112  vfec = new H1_FECollection(p,dim);
3113  }
3114  ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3115  ParDiscreteLinearOperator *grad;
3116  grad = new ParDiscreteLinearOperator(vfespace,fespace);
3117  if (tracespace)
3118  {
3119  grad->AddTraceFaceInterpolator(new GradientInterpolator);
3120  }
3121  else
3122  {
3123  grad->AddDomainInterpolator(new GradientInterpolator);
3124  }
3125  grad->Assemble();
3126  grad->Finalize();
3127  HypreParMatrix *hG = grad->ParallelAssemble();
3128  PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3129  delete hG;
3130  delete grad;
3131 
3132  PetscBool conforming = PETSC_TRUE;
3133  if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3134  ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3135  PCHKERRQ(pc,ierr);
3136  delete vfec;
3137  delete vfespace;
3138  delete G;
3139  }
3140  }
3141  else if (rtspace) // H(div)
3142  {
3143  needint = true;
3144  if (tracespace)
3145  {
3146  MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3147  " auxiliary quadrature");
3148  needint = false;
3149  }
3150  }
3151  else if (h1space) // H(grad), only for the vector case
3152  {
3153  if (vdim != dim) { needint = false; }
3154  }
3155 
3156  PetscParMatrix *B = NULL;
3157  if (needint)
3158  {
3159  // Generate bilinear form in unassembled format which is used to
3160  // compute the net-flux across subdomain boundaries for H(div) and
3161  // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3162  FiniteElementCollection *auxcoll;
3163  if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3164  else
3165  {
3166  if (h1space)
3167  {
3168  auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3169  }
3170  else
3171  {
3172  auxcoll = new L2_FECollection(p,dim);
3173  }
3174  }
3175  ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3176  ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3177 
3178  if (edgespace)
3179  {
3180  if (tracespace)
3181  {
3182  b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3183  }
3184  else
3185  {
3186  b->AddDomainIntegrator(new VectorFECurlIntegrator);
3187  }
3188  }
3189  else if (rtspace)
3190  {
3191  if (tracespace)
3192  {
3193  b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3194  }
3195  else
3196  {
3197  b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3198  }
3199  }
3200  else
3201  {
3202  b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3203  }
3204  b->Assemble();
3205  b->Finalize();
3206  OperatorHandle Bh(Operator::PETSC_MATIS);
3207  b->ParallelAssemble(Bh);
3208  Bh.Get(B);
3209  Bh.SetOperatorOwner(false);
3210 
3211  if (dir) // if essential dofs are present, we need to zero the columns
3212  {
3213  Mat pB = *B;
3214  ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3215  if (!opts.ess_dof_local)
3216  {
3217  ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3218  }
3219  else
3220  {
3221  ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3222  }
3223  B_is_Trans = PETSC_TRUE;
3224  }
3225  delete b;
3226  delete pspace;
3227  delete auxcoll;
3228  }
3229 
3230  if (B)
3231  {
3232  ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3233  }
3234  delete B;
3235  }
3236  ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3237  ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3238 }
3239 
3241  const PetscBDDCSolverParams &opts,
3242  const std::string &prefix)
3243  : PetscPreconditioner(A,prefix)
3244 {
3245  BDDCSolverConstructor(opts);
3246  Customize();
3247 }
3248 
3250  const PetscBDDCSolverParams &opts,
3251  const std::string &prefix)
3252  : PetscPreconditioner(comm,op,prefix)
3253 {
3254  BDDCSolverConstructor(opts);
3255  Customize();
3256 }
3257 
3259  const string &prefix)
3260  : PetscPreconditioner(comm,op,prefix)
3261 {
3262  PC pc = (PC)obj;
3263  ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3264 
3265  Mat pA;
3266  ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3267 
3268  // Check if pA is of type MATNEST
3269  PetscBool isnest;
3270  ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3271 
3272  PetscInt nr = 0;
3273  IS *isrow = NULL;
3274  if (isnest) // we now the fields
3275  {
3276  ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3277  ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3278  ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3279  }
3280 
3281  // We need to customize here, before setting the index sets.
3282  // This is because PCFieldSplitSetType customizes the function
3283  // pointers. SubSolver options will be processed during PCApply
3284  Customize();
3285 
3286  for (PetscInt i=0; i<nr; i++)
3287  {
3288  ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3289  }
3290  ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3291 }
3292 
3293 // PetscNonlinearSolver methods
3294 
3296  const std::string &prefix)
3297  : PetscSolver(), Solver()
3298 {
3299  // Create the actual solver object
3300  SNES snes;
3301  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3302  obj = (PetscObject)snes;
3303  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3304  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3305 
3306  // Allocate private solver context
3308 }
3309 
3311  const std::string &prefix)
3312  : PetscSolver(), Solver()
3313 {
3314  // Create the actual solver object
3315  SNES snes;
3316  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3317  obj = (PetscObject)snes;
3318  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3319  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3320 
3321  // Allocate private solver context
3323 
3324  SetOperator(op);
3325 }
3326 
3328 {
3329  MPI_Comm comm;
3330  SNES snes = (SNES)obj;
3331  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
3332  ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3333 }
3334 
3336 {
3337  SNES snes = (SNES)obj;
3338 
3339  if (operatorset)
3340  {
3341  PetscBool ls,gs;
3342  void *fctx,*jctx;
3343 
3344  ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3345  PCHKERRQ(snes, ierr);
3346  ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3347  PCHKERRQ(snes, ierr);
3348 
3349  ls = (PetscBool)(height == op.Height() && width == op.Width() &&
3350  (void*)&op == fctx &&
3351  (void*)&op == jctx);
3352  ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3353  PetscObjectComm((PetscObject)snes));
3354  PCHKERRQ(snes,ierr);
3355  if (!gs)
3356  {
3357  ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3358  delete X;
3359  delete B;
3360  X = B = NULL;
3361  }
3362  }
3363  else
3364  {
3365  /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
3366  yet set. We default to backtracking */
3367  SNESLineSearch ls;
3368  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3369  ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3370  }
3371 
3372  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3373  snes_ctx->op = (mfem::Operator*)&op;
3374  ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
3375  PCHKERRQ(snes, ierr);
3376  ierr = SNESSetJacobian(snes, NULL, NULL, __mfem_snes_jacobian,
3377  (void *)snes_ctx);
3378  PCHKERRQ(snes, ierr);
3379 
3380  // Update PetscSolver
3381  operatorset = true;
3382 
3383  // Update the Operator fields.
3384  height = op.Height();
3385  width = op.Width();
3386 }
3387 
3389 {
3390  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3391  snes_ctx->jacType = jacType;
3392 }
3393 
3395  double*))
3396 {
3397  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3398  snes_ctx->objective = objfn;
3399 
3400  SNES snes = (SNES)obj;
3401  ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
3402  PCHKERRQ(snes, ierr);
3403 }
3404 
3406  Vector&, Vector&,
3407  bool&, bool&))
3408 {
3409  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3410  snes_ctx->postcheck = post;
3411 
3412  SNES snes = (SNES)obj;
3413  SNESLineSearch ls;
3414  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3415  ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
3416  PCHKERRQ(ls, ierr);
3417 }
3418 
3419 void PetscNonlinearSolver::SetUpdate(void (*update)(Operator *,int,
3420  const mfem::Vector&,
3421  const mfem::Vector&,
3422  const mfem::Vector&,
3423  const mfem::Vector&))
3424 {
3425  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3426  snes_ctx->update = update;
3427 
3428  SNES snes = (SNES)obj;
3429  ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
3430 }
3431 
3432 void PetscNonlinearSolver::Mult(const Vector &b, Vector &x) const
3433 {
3434  SNES snes = (SNES)obj;
3435 
3436  bool b_nonempty = b.Size();
3437  if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
3438  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
3439  X->PlaceArray(x.GetData());
3440  if (b_nonempty) { B->PlaceArray(b.GetData()); }
3441  else { *B = 0.0; }
3442 
3443  Customize();
3444 
3445  if (!iterative_mode) { *X = 0.; }
3446 
3447  // Solve the system.
3448  ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
3449  X->ResetArray();
3450  if (b_nonempty) { B->ResetArray(); }
3451 }
3452 
3453 // PetscODESolver methods
3454 
3455 PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
3456  : PetscSolver(), ODESolver()
3457 {
3458  // Create the actual solver object
3459  TS ts;
3460  ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
3461  obj = (PetscObject)ts;
3462  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3463  ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
3464 
3465  // Allocate private solver context
3467 
3468  // Default options, to comply with the current interface to ODESolver.
3469  ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
3470  PCHKERRQ(ts,ierr);
3471  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
3472  PCHKERRQ(ts,ierr);
3473  TSAdapt tsad;
3474  ierr = TSGetAdapt(ts,&tsad);
3475  PCHKERRQ(ts,ierr);
3476  ierr = TSAdaptSetType(tsad,TSADAPTNONE);
3477  PCHKERRQ(ts,ierr);
3478 }
3479 
3481 {
3482  MPI_Comm comm;
3483  TS ts = (TS)obj;
3484  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
3485  ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
3486 }
3487 
3489  enum PetscODESolver::Type type)
3490 {
3491  TS ts = (TS)obj;
3492 
3493  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3494  if (operatorset)
3495  {
3496  ierr = TSReset(ts); PCHKERRQ(ts,ierr);
3497  delete X;
3498  X = NULL;
3499  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
3500  ts_ctx->cached_ijacstate = -1;
3501  ts_ctx->cached_rhsjacstate = -1;
3502  ts_ctx->cached_splits_xstate = -1;
3503  ts_ctx->cached_splits_xdotstate = -1;
3504  }
3505  f = &f_;
3506 
3507  // Set functions in TS
3508  ts_ctx->op = &f_;
3509  if (f_.isImplicit())
3510  {
3511  ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
3512  PCHKERRQ(ts, ierr);
3513  ierr = TSSetIJacobian(ts, NULL, NULL, __mfem_ts_ijacobian, (void *)ts_ctx);
3514  PCHKERRQ(ts, ierr);
3515  ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
3516  PCHKERRQ(ts, ierr);
3517  }
3518  if (!f_.isHomogeneous())
3519  {
3520  if (!f_.isImplicit())
3521  {
3522  ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
3523  PCHKERRQ(ts, ierr);
3524  }
3525  ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
3526  PCHKERRQ(ts, ierr);
3527  ierr = TSSetRHSJacobian(ts, NULL, NULL, __mfem_ts_rhsjacobian, (void *)ts_ctx);
3528  PCHKERRQ(ts, ierr);
3529  }
3530  operatorset = true;
3531 
3532  SetType(type);
3533 
3534  // Set solution vector
3535  PetscParVector X(PetscObjectComm(obj),*f,false,true);
3536  ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
3537 
3538  // Compose special purpose function for PDE-constrained optimization
3539  PetscBool use = PETSC_TRUE;
3540  ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
3541  if (use && f_.isImplicit())
3542  {
3543  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
3544  __mfem_ts_computesplits);
3545  PCHKERRQ(ts,ierr);
3546  }
3547  else
3548  {
3549  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
3550  NULL);
3551  PCHKERRQ(ts,ierr);
3552  }
3553 }
3554 
3556 {
3557  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3558  ts_ctx->jacType = jacType;
3559 }
3560 
3562 {
3563  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3564  return ts_ctx->type;
3565 }
3566 
3568 {
3569  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3570 
3571  TS ts = (TS)obj;
3572  ts_ctx->type = type;
3573  if (type == ODE_SOLVER_LINEAR)
3574  {
3575  ierr = TSSetProblemType(ts, TS_LINEAR);
3576  PCHKERRQ(ts, ierr);
3577  }
3578  else
3579  {
3580  ierr = TSSetProblemType(ts, TS_NONLINEAR);
3581  PCHKERRQ(ts, ierr);
3582  }
3583 }
3584 
3585 void PetscODESolver::Step(Vector &x, double &t, double &dt)
3586 {
3587  // Pass the parameters to PETSc.
3588  TS ts = (TS)obj;
3589  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
3590  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
3591 
3592  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
3593  X->PlaceArray(x.GetData());
3594 
3595  Customize();
3596 
3597  // Take the step.
3598  ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
3599  ierr = TSStep(ts); PCHKERRQ(ts, ierr);
3600  X->ResetArray();
3601 
3602  // Get back current time and the time step used to caller.
3603  // We cannot use TSGetTimeStep() as it returns the next candidate step
3604  PetscReal pt;
3605  ierr = TSGetTime(ts,&pt); PCHKERRQ(ts,ierr);
3606  dt = pt - (PetscReal)t;
3607  t = pt;
3608 }
3609 
3610 void PetscODESolver::Run(Vector &x, double &t, double &dt, double t_final)
3611 {
3612  // Give the parameters to PETSc.
3613  TS ts = (TS)obj;
3614  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
3615  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
3616  ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
3617  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
3618  PCHKERRQ(ts, ierr);
3619 
3620  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
3621  X->PlaceArray(x.GetData());
3622 
3623  Customize();
3624 
3625  // Reset Jacobian caching since the user may have changed
3626  // the parameters of the solver
3627  // We don't do this in the Step method because two consecutive
3628  // Step() calls are done with the same operator
3629  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3630  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
3631  ts_ctx->cached_ijacstate = -1;
3632  ts_ctx->cached_rhsjacstate = -1;
3633  ts_ctx->cached_splits_xstate = -1;
3634  ts_ctx->cached_splits_xdotstate = -1;
3635 
3636  // Take the steps.
3637  ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
3638  X->ResetArray();
3639 
3640  // Get back final time and time step to caller.
3641  PetscReal pt;
3642  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
3643  t = pt;
3644  ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
3645  dt = pt;
3646 }
3647 
3648 } // namespace mfem
3649 
3650 #include "petsc/private/petscimpl.h"
3651 #include "petsc/private/matimpl.h"
3652 
3653 // auxiliary functions
3654 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
3655  void* ctx)
3656 {
3657  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
3658 
3659  PetscFunctionBeginUser;
3660  if (!monctx)
3661  {
3662  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
3663  }
3664  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
3666  monctx->monitor);
3667 
3668  if (user_monitor->mon_sol)
3669  {
3670  mfem::PetscParVector V(x,true);
3671  user_monitor->MonitorSolution(it,t,V);
3672  }
3673  user_monitor->MonitorSolver(solver);
3674  PetscFunctionReturn(0);
3675 }
3676 
3677 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
3678  Vec f,void *ctx)
3679 {
3680  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3681  PetscErrorCode ierr;
3682 
3683  PetscFunctionBeginUser;
3684  mfem::PetscParVector xx(x,true);
3685  mfem::PetscParVector yy(xp,true);
3686  mfem::PetscParVector ff(f,true);
3687 
3688  mfem::TimeDependentOperator *op = ts_ctx->op;
3689  op->SetTime(t);
3690 
3691  if (ts_ctx->bchandler)
3692  {
3693  // we evaluate the ImplicitMult method with the correct bc
3694  // this means the correct time derivative for essential boundary
3695  // dofs is zero
3696  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
3697  if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
3698  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3699  mfem::Vector* txx = ts_ctx->work;
3700  mfem::Vector* txp = ts_ctx->work2;
3701  bchandler->SetTime(t);
3702  bchandler->ApplyBC(xx,*txx);
3703  bchandler->ZeroBC(yy,*txp);
3704  op->ImplicitMult(*txx,*txp,ff);
3705  // and fix the residual (i.e. f_\partial\Omega = u - g(t))
3706  bchandler->FixResidualBC(xx,ff);
3707  }
3708  else
3709  {
3710  // use the ImplicitMult method of the class
3711  op->ImplicitMult(xx,yy,ff);
3712  }
3713 
3714  // need to tell PETSc the Vec has been updated
3715  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
3716  PetscFunctionReturn(0);
3717 }
3718 
3719 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
3720  void *ctx)
3721 {
3722  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3723  PetscErrorCode ierr;
3724 
3725  PetscFunctionBeginUser;
3726  if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
3727  mfem::PetscParVector xx(x,true);
3728  mfem::PetscParVector ff(f,true);
3729  mfem::TimeDependentOperator *top = ts_ctx->op;
3730  top->SetTime(t);
3731 
3732  // use the ExplicitMult method - compute the RHS function
3733  top->ExplicitMult(xx,ff);
3734 
3735  // need to tell PETSc the Vec has been updated
3736  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
3737  PetscFunctionReturn(0);
3738 }
3739 
3740 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
3741  Vec xp, PetscReal shift, Mat A, Mat P,
3742  void *ctx)
3743 {
3744  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3745  mfem::Vector *xx;
3746  PetscScalar *array;
3747  PetscReal eps = 0.001; /* 0.1% difference */
3748  PetscInt n;
3749  PetscObjectState state;
3750  PetscErrorCode ierr;
3751 
3752  PetscFunctionBeginUser;
3753  // prevent to recompute a Jacobian if we already did so
3754  // the relative tolerance comparison should be fine given the fact
3755  // that two consecutive shifts should have similar magnitude
3756  ierr = PetscObjectStateGet((PetscObject)A,&state); CHKERRQ(ierr);
3757  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3758  std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
3759  state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
3760 
3761  // update time
3762  mfem::TimeDependentOperator *op = ts_ctx->op;
3763  op->SetTime(t);
3764 
3765  // wrap Vecs with Vectors
3766  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
3767  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3768  mfem::Vector yy(array,n);
3769  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3770  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3771  if (!ts_ctx->bchandler)
3772  {
3773  xx = new mfem::Vector(array,n);
3774  }
3775  else
3776  {
3777  // make sure we compute a Jacobian with the correct boundary values
3778  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
3779  mfem::Vector txx(array,n);
3780  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3781  xx = ts_ctx->work;
3782  bchandler->SetTime(t);
3783  bchandler->ApplyBC(txx,*xx);
3784  }
3785  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3786 
3787  // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
3788  mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
3789  if (!ts_ctx->bchandler) { delete xx; }
3790  ts_ctx->cached_shift = shift;
3791 
3792  // Convert to the operator type requested if needed
3793  bool delete_pA = false;
3794  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
3795  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
3796  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3797  pA->GetType() != ts_ctx->jacType))
3798  {
3799  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
3800  ts_ctx->jacType);
3801  delete_pA = true;
3802  }
3803 
3804  // Eliminate essential dofs
3805  if (ts_ctx->bchandler)
3806  {
3807  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3808  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3809  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
3810  }
3811 
3812  // Get nonzerostate
3813  PetscObjectState nonzerostate;
3814  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
3815 
3816  // Avoid unneeded copy of the matrix by hacking
3817  Mat B;
3818  B = pA->ReleaseMat(false);
3819  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
3820  if (delete_pA) { delete pA; }
3821 
3822  // Matrix-free case
3823  if (A && A != P)
3824  {
3825  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3826  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3827  }
3828 
3829  // When using MATNEST and PCFIELDSPLIT, the second setup of the
3830  // preconditioner fails because MatCreateSubMatrix_Nest does not
3831  // actually return a matrix. Instead, for efficiency reasons,
3832  // it returns a reference to the submatrix. The second time it
3833  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
3834  // aborts since the two submatrices are actually different.
3835  // We circumvent this issue by incrementing the nonzero state
3836  // (i.e. PETSc thinks the operator sparsity pattern has changed)
3837  // This does not impact performances in the case of MATNEST
3838  PetscBool isnest;
3839  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
3840  CHKERRQ(ierr);
3841  if (isnest) { P->nonzerostate = nonzerostate + 1; }
3842 
3843  // Jacobian reusage
3844  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
3845  CHKERRQ(ierr);
3846 
3847  // Fool DM
3848  DM dm;
3849  MatType mtype;
3850  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
3851  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
3852  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
3853  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
3854  PetscFunctionReturn(0);
3855 }
3856 
3857 static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
3858  Mat Ax,Mat Jx,
3859  Mat Axp,Mat Jxp)
3860 {
3861  __mfem_ts_ctx* ts_ctx;
3862  mfem::Vector *xx;
3863  PetscScalar *array;
3864  PetscInt n;
3865  PetscObjectState state;
3866  PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
3867  PetscBool assembled;
3868  PetscErrorCode ierr;
3869 
3870  PetscFunctionBeginUser;
3871  ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
3872 
3873  // prevent to recompute the Jacobians if we already did so
3874  ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
3875  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3876  state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
3877  ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
3878  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3879  state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
3880  if (!rx && !rxp) { PetscFunctionReturn(0); }
3881 
3882  // update time
3883  mfem::TimeDependentOperator *op = ts_ctx->op;
3884  op->SetTime(t);
3885 
3886  // wrap Vecs with Vectors
3887  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
3888  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3889  mfem::Vector yy(array,n);
3890  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3891  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3892  if (!ts_ctx->bchandler)
3893  {
3894  xx = new mfem::Vector(array,n);
3895  }
3896  else
3897  {
3898  // make sure we compute a Jacobian with the correct boundary values
3899  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
3900  mfem::Vector txx(array,n);
3901  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3902  xx = ts_ctx->work;
3903  bchandler->SetTime(t);
3904  bchandler->ApplyBC(txx,*xx);
3905  }
3906  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3907 
3908  // We don't have a specialized interface, so we just compute the split jacobians
3909  // evaluating twice the implicit gradient method with the correct shifts
3910 
3911  // first we do the state jacobian
3912  mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
3913 
3914  // Convert to the operator type requested if needed
3915  bool delete_mat = false;
3916  mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
3917  (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
3918  if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3919  pJx->GetType() != ts_ctx->jacType))
3920  {
3921  if (pJx)
3922  {
3923  Mat B = *pJx;
3924  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
3925  }
3926  pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
3927  ts_ctx->jacType);
3928  delete_mat = true;
3929  }
3930  if (rx)
3931  {
3932  ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
3933  if (assembled)
3934  {
3935  ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
3936  }
3937  else
3938  {
3939  Mat B;
3940  ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
3941  ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
3942  }
3943  }
3944  if (delete_mat) { delete pJx; }
3945  pJx = new mfem::PetscParMatrix(Jx,true);
3946 
3947  // Eliminate essential dofs
3948  if (ts_ctx->bchandler)
3949  {
3950  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3951  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3952  pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
3953  }
3954 
3955  // Then we do the jacobian wrt the time derivative of the state
3956  // Note that this is usually the mass matrix
3957  mfem::PetscParMatrix *pJxp = NULL;
3958  if (rxp)
3959  {
3960  delete_mat = false;
3961  mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
3962  pJxp = const_cast<mfem::PetscParMatrix *>
3963  (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
3964  if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3965  pJxp->GetType() != ts_ctx->jacType))
3966  {
3967  if (pJxp)
3968  {
3969  Mat B = *pJxp;
3970  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
3971  }
3972  pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
3973  &oJxp,ts_ctx->jacType);
3974  delete_mat = true;
3975  }
3976 
3977  ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
3978  if (assembled)
3979  {
3980  ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
3981  }
3982  else
3983  {
3984  Mat B;
3985  ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
3986  ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
3987  }
3988  if (delete_mat) { delete pJxp; }
3989  pJxp = new mfem::PetscParMatrix(Jxp,true);
3990 
3991  // Eliminate essential dofs
3992  if (ts_ctx->bchandler)
3993  {
3994  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3995  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3996  pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
3997  }
3998 
3999  // Obtain the time dependent part of the jacobian by subtracting
4000  // the state jacobian
4001  // We don't do it with the class operator "-=" since we know that
4002  // the sparsity pattern of the two matrices is the same
4003  ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4004  }
4005 
4006  // Matrix-free cases
4007  if (Ax && Ax != Jx)
4008  {
4009  ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4010  ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4011  }
4012  if (Axp && Axp != Jxp)
4013  {
4014  ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4015  ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4016  }
4017 
4018  // Jacobian reusage
4019  ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4020  CHKERRQ(ierr);
4021  ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4022  CHKERRQ(ierr);
4023 
4024  delete pJx;
4025  delete pJxp;
4026  if (!ts_ctx->bchandler) { delete xx; }
4027  PetscFunctionReturn(0);
4028 }
4029 
4030 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4031  Mat A, Mat P, void *ctx)
4032 {
4033  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4034  mfem::Vector *xx;
4035  PetscScalar *array;
4036  PetscInt n;
4037  PetscObjectState state;
4038  PetscErrorCode ierr;
4039 
4040  PetscFunctionBeginUser;
4041  // prevent to recompute a Jacobian if we already did so
4042  ierr = PetscObjectStateGet((PetscObject)A,&state); CHKERRQ(ierr);
4043  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4044  state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4045 
4046  // update time
4047  mfem::TimeDependentOperator *op = ts_ctx->op;
4048  op->SetTime(t);
4049 
4050  // wrap Vec with Vector
4051  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4052  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4053  if (!ts_ctx->bchandler)
4054  {
4055  xx = new mfem::Vector(array,n);
4056  }
4057  else
4058  {
4059  // make sure we compute a Jacobian with the correct boundary values
4060  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4061  mfem::Vector txx(array,n);
4062  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4063  xx = ts_ctx->work;
4064  bchandler->SetTime(t);
4065  bchandler->ApplyBC(txx,*xx);
4066  }
4067  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4068 
4069  // Use TimeDependentOperator::GetExplicitGradient(x)
4070  mfem::Operator& J = op->GetExplicitGradient(*xx);
4071  if (!ts_ctx->bchandler) { delete xx; }
4072 
4073  // Convert to the operator type requested if needed
4074  bool delete_pA = false;
4075  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4076  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4077  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4078  pA->GetType() != ts_ctx->jacType))
4079  {
4080  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4081  ts_ctx->jacType);
4082  delete_pA = true;
4083  }
4084 
4085  // Eliminate essential dofs
4086  if (ts_ctx->bchandler)
4087  {
4088  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4089  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4090  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4091  }
4092 
4093  // Get nonzerostate
4094  PetscObjectState nonzerostate;
4095  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4096 
4097  // Avoid unneeded copy of the matrix by hacking
4098  Mat B;
4099  B = pA->ReleaseMat(false);
4100  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4101  if (delete_pA) { delete pA; }
4102 
4103  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4104  // preconditioner fails because MatCreateSubMatrix_Nest does not
4105  // actually return a matrix. Instead, for efficiency reasons,
4106  // it returns a reference to the submatrix. The second time it
4107  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4108  // aborts since the two submatrices are actually different.
4109  // We circumvent this issue by incrementing the nonzero state
4110  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4111  // This does not impact performances in the case of MATNEST
4112  PetscBool isnest;
4113  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4114  CHKERRQ(ierr);
4115  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4116 
4117  // Matrix-free case
4118  if (A && A != P)
4119  {
4120  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4121  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4122  }
4123 
4124  // Jacobian reusage
4125  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4126  {
4127  ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4128  }
4129  ierr = PetscObjectStateGet((PetscObject)A,&ts_ctx->cached_rhsjacstate);
4130  CHKERRQ(ierr);
4131 
4132  // Fool DM
4133  DM dm;
4134  MatType mtype;
4135  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4136  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4137  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4138  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4139  PetscFunctionReturn(0);
4140 }
4141 
4142 static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4143  void* ctx)
4144 {
4145  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4146 
4147  PetscFunctionBeginUser;
4148  if (!monctx)
4149  {
4150  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4151  }
4152 
4153  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4155  monctx->monitor);
4156  if (user_monitor->mon_sol)
4157  {
4158  Vec x;
4159  PetscErrorCode ierr;
4160 
4161  ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4162  mfem::PetscParVector V(x,true);
4163  user_monitor->MonitorSolution(it,res,V);
4164  }
4165  if (user_monitor->mon_res)
4166  {
4167  Vec x;
4168  PetscErrorCode ierr;
4169 
4170  ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4171  mfem::PetscParVector V(x,true);
4172  user_monitor->MonitorResidual(it,res,V);
4173  }
4174  user_monitor->MonitorSolver(solver);
4175  PetscFunctionReturn(0);
4176 }
4177 
4178 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4179  void *ctx)
4180 {
4181  PetscScalar *array;
4182  PetscInt n;
4183  PetscErrorCode ierr;
4184  mfem::Vector *xx;
4185  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4186 
4187  PetscFunctionBeginUser;
4188  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4189  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4190  if (!snes_ctx->bchandler)
4191  {
4192  xx = new mfem::Vector(array,n);
4193  }
4194  else
4195  {
4196  // make sure we compute a Jacobian with the correct boundary values
4197  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4198  mfem::Vector txx(array,n);
4199  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4200  xx = snes_ctx->work;
4201  bchandler->ApplyBC(txx,*xx);
4202  }
4203 
4204  // Use Operator::GetGradient(x)
4205  mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4206  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4207  if (!snes_ctx->bchandler) { delete xx; }
4208 
4209  // Convert to the operator type requested if needed
4210  bool delete_pA = false;
4211  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4212  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4213  if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4214  pA->GetType() != snes_ctx->jacType))
4215  {
4216  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4217  snes_ctx->jacType);
4218  delete_pA = true;
4219  }
4220 
4221  // Eliminate essential dofs
4222  if (snes_ctx->bchandler)
4223  {
4224  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4225  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4226  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4227  }
4228 
4229  // Get nonzerostate
4230  PetscObjectState nonzerostate;
4231  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4232 
4233  // Avoid unneeded copy of the matrix by hacking
4234  Mat B = pA->ReleaseMat(false);
4235  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4236  if (delete_pA) { delete pA; }
4237 
4238  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4239  // preconditioner fails because MatCreateSubMatrix_Nest does not
4240  // actually return a matrix. Instead, for efficiency reasons,
4241  // it returns a reference to the submatrix. The second time it
4242  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4243  // aborts since the two submatrices are actually different.
4244  // We circumvent this issue by incrementing the nonzero state
4245  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4246  // This does not impact performances in the case of MATNEST
4247  PetscBool isnest;
4248  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4249  CHKERRQ(ierr);
4250  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4251 
4252  // Matrix-free case
4253  if (A && A != P)
4254  {
4255  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4256  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4257  }
4258 
4259  // Fool DM
4260  DM dm;
4261  MatType mtype;
4262  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4263  ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4264  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4265  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4266  PetscFunctionReturn(0);
4267 }
4268 
4269 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4270 {
4271  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4272 
4273  PetscFunctionBeginUser;
4274  mfem::PetscParVector xx(x,true);
4275  mfem::PetscParVector ff(f,true);
4276  if (snes_ctx->bchandler)
4277  {
4278  // we evaluate the Mult method with the correct bc
4279  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4280  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4281  mfem::Vector* txx = snes_ctx->work;
4282  bchandler->ApplyBC(xx,*txx);
4283  snes_ctx->op->Mult(*txx,ff);
4284  // and fix the residual (i.e. f_\partial\Omega = u - g)
4285  bchandler->FixResidualBC(xx,ff);
4286  }
4287  else
4288  {
4289  // use the Mult method of the class
4290  snes_ctx->op->Mult(xx,ff);
4291  }
4292  // need to tell PETSc the Vec has been updated
4293  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
4294  PetscFunctionReturn(0);
4295 }
4296 
4297 static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
4298  void *ctx)
4299 {
4300  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4301 
4302  PetscFunctionBeginUser;
4303  if (!snes_ctx->objective)
4304  {
4305  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
4306  }
4307  mfem::PetscParVector xx(x,true);
4308  double lf;
4309  (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4310  *f = (PetscReal)lf;
4311  PetscFunctionReturn(0);
4312 }
4313 
4314 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4315  PetscBool *cy,PetscBool *cw, void* ctx)
4316 {
4317  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4318  bool lcy = false,lcw = false;
4319 
4320  PetscFunctionBeginUser;
4321  mfem::PetscParVector x(X,true);
4322  mfem::PetscParVector y(Y,true);
4323  mfem::PetscParVector w(W,true);
4324  (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4325  if (lcy) { *cy = PETSC_TRUE; }
4326  if (lcw) { *cw = PETSC_TRUE; }
4327  PetscFunctionReturn(0);
4328 }
4329 
4330 static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
4331 {
4332  Vec F,X,dX,pX;
4333  __mfem_snes_ctx* snes_ctx;
4334 
4335  PetscFunctionBeginUser;
4336  /* Update callback does not use the context */
4337  ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
4338  ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4339  if (!it)
4340  {
4341  ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4342  ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
4343  CHKERRQ(ierr);
4344  ierr = VecDestroy(&pX); CHKERRQ(ierr);
4345  }
4346  ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
4347  CHKERRQ(ierr);
4348  if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
4349  "Missing previous solution");
4350  ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4351  mfem::PetscParVector f(F,true);
4352  mfem::PetscParVector x(X,true);
4353  mfem::PetscParVector dx(dX,true);
4354  mfem::PetscParVector px(pX,true);
4355  (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4356  /* Store previous solution */
4357  ierr = VecCopy(X,pX); CHKERRQ(ierr);
4358  PetscFunctionReturn(0);
4359 }
4360 
4361 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
4362  void* ctx)
4363 {
4364  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4365 
4366  PetscFunctionBeginUser;
4367  if (!monctx)
4368  {
4369  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4370  }
4371 
4372  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4374  monctx->monitor);
4375  if (user_monitor->mon_sol)
4376  {
4377  Vec x;
4378  PetscErrorCode ierr;
4379 
4380  ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
4381  mfem::PetscParVector V(x,true);
4382  user_monitor->MonitorSolution(it,res,V);
4383  }
4384  if (user_monitor->mon_res)
4385  {
4386  Vec x;
4387  PetscErrorCode ierr;
4388 
4389  ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
4390  mfem::PetscParVector V(x,true);
4391  user_monitor->MonitorResidual(it,res,V);
4392  }
4393  user_monitor->MonitorSolver(solver);
4394  PetscFunctionReturn(0);
4395 }
4396 
4397 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
4398 {
4399  mfem::Operator *op;
4400  PetscErrorCode ierr;
4401 
4402  PetscFunctionBeginUser;
4403  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
4404  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
4405  mfem::PetscParVector xx(x,true);
4406  mfem::PetscParVector yy(y,true);
4407  op->Mult(xx,yy);
4408  // need to tell PETSc the Vec has been updated
4409  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4410  PetscFunctionReturn(0);
4411 }
4412 
4413 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
4414 {
4415  mfem::Operator *op;
4416  PetscErrorCode ierr;
4417 
4418  PetscFunctionBeginUser;
4419  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
4420  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
4421  mfem::PetscParVector xx(x,true);
4422  mfem::PetscParVector yy(y,true);
4423  op->MultTranspose(xx,yy);
4424  // need to tell PETSc the Vec has been updated
4425  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4426  PetscFunctionReturn(0);
4427 }
4428 
4429 static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
4430 {
4431  mfem::Operator *op;
4432  PetscErrorCode ierr;
4433 
4434  PetscFunctionBeginUser;
4435  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
4436  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
4437  ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
4438  PetscFunctionReturn(0);
4439 }
4440 
4441 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
4442 {
4443  PetscFunctionBeginUser;
4444  PetscFunctionReturn(0);
4445 }
4446 
4447 static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
4448 {
4449  __mfem_pc_shell_ctx *ctx;
4450  PetscErrorCode ierr;
4451 
4452  PetscFunctionBeginUser;
4453  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4454  if (ctx->op)
4455  {
4456  PetscBool isascii;
4457  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
4458  CHKERRQ(ierr);
4459 
4461  (ctx->op);
4462  if (ppc)
4463  {
4464  ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
4465  }
4466  else
4467  {
4468  if (isascii)
4469  {
4470  ierr = PetscViewerASCIIPrintf(viewer,
4471  "No information available on the mfem::Solver\n");
4472  CHKERRQ(ierr);
4473  }
4474  }
4475  if (isascii && ctx->factory)
4476  {
4477  ierr = PetscViewerASCIIPrintf(viewer,
4478  "Number of preconditioners created by the factory %lu\n",ctx->numprec);
4479  CHKERRQ(ierr);
4480  }
4481  }
4482  PetscFunctionReturn(0);
4483 }
4484 
4485 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
4486 {
4487  __mfem_pc_shell_ctx *ctx;
4488  PetscErrorCode ierr;
4489 
4490  PetscFunctionBeginUser;
4491  mfem::PetscParVector xx(x,true);
4492  mfem::PetscParVector yy(y,true);
4493  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4494  if (ctx->op)
4495  {
4496  ctx->op->Mult(xx,yy);
4497  // need to tell PETSc the Vec has been updated
4498  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4499  }
4500  else // operator is not present, copy x
4501  {
4502  yy = xx;
4503  }
4504  PetscFunctionReturn(0);
4505 }
4506 
4507 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
4508 {
4509  __mfem_pc_shell_ctx *ctx;
4510  PetscErrorCode ierr;
4511 
4512  PetscFunctionBeginUser;
4513  mfem::PetscParVector xx(x,true);
4514  mfem::PetscParVector yy(y,true);
4515  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4516  if (ctx->op)
4517  {
4518  ctx->op->MultTranspose(xx,yy);
4519  // need to tell PETSc the Vec has been updated
4520  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4521  }
4522  else // operator is not present, copy x
4523  {
4524  yy = xx;
4525  }
4526  PetscFunctionReturn(0);
4527 }
4528 
4529 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
4530 {
4531  __mfem_pc_shell_ctx *ctx;
4532 
4533  PetscFunctionBeginUser;
4534  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4535  if (ctx->factory)
4536  {
4537  // Delete any owned operator
4538  if (ctx->ownsop)
4539  {
4540  delete ctx->op;
4541  }
4542 
4543  // Get current preconditioning Mat
4544  Mat B;
4545  ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
4546 
4547  // Call user-defined setup
4548  mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
4549  mfem::PetscPreconditionerFactory *factory = ctx->factory;
4550  ctx->op = factory->NewPreconditioner(hB);
4551  ctx->ownsop = true;
4552  ctx->numprec++;
4553  }
4554  PetscFunctionReturn(0);
4555 }
4556 
4557 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
4558 {
4559  __mfem_pc_shell_ctx *ctx;
4560  PetscErrorCode ierr;
4561 
4562  PetscFunctionBeginUser;
4563  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4564  if (ctx->ownsop)
4565  {
4566  delete ctx->op;
4567  }
4568  delete ctx;
4569  PetscFunctionReturn(0);
4570 }
4571 
4572 static PetscErrorCode __mfem_array_container_destroy(void *ptr)
4573 {
4574  PetscErrorCode ierr;
4575 
4576  PetscFunctionBeginUser;
4577  ierr = PetscFree(ptr); CHKERRQ(ierr);
4578  PetscFunctionReturn(0);
4579 }
4580 
4581 static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
4582 {
4584  PetscErrorCode ierr;
4585 
4586  PetscFunctionBeginUser;
4587  for (int i=0; i<a->Size(); i++)
4588  {
4589  Mat M = (*a)[i];
4590  MPI_Comm comm = PetscObjectComm((PetscObject)M);
4591  ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
4592  }
4593  delete a;
4594  PetscFunctionReturn(0);
4595 }
4596 
4597 static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
4598 {
4599  PetscErrorCode ierr;
4600 
4601  PetscFunctionBeginUser;
4602  ierr = PetscFree(*ctx); CHKERRQ(ierr);
4603  PetscFunctionReturn(0);
4604 }
4605 
4606 // Sets the type of PC to PCSHELL and wraps the solver action
4607 // if ownsop is true, ownership of precond is transferred to the PETSc object
4608 PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
4609 {
4610  PetscFunctionBeginUser;
4611  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
4612  ctx->op = &precond;
4613  ctx->ownsop = ownsop;
4614  ctx->factory = NULL;
4615  ctx->numprec = 0;
4616 
4617  // In case the PC was already of type SHELL, this will destroy any
4618  // previous user-defined data structure
4619  // We cannot call PCReset as it will wipe out any operator already set
4620  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
4621 
4622  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
4623  ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
4624  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
4625  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
4626  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
4627  CHKERRQ(ierr);
4628  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
4629  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
4630  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
4631  PetscFunctionReturn(0);
4632 }
4633 
4634 // Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
4635 // Takes ownership of the solver created by the factory
4636 PetscErrorCode MakeShellPCWithFactory(PC pc,
4638 {
4639  PetscFunctionBeginUser;
4640  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
4641  ctx->op = NULL;
4642  ctx->ownsop = true;
4643  ctx->factory = factory;
4644  ctx->numprec = 0;
4645 
4646  // In case the PC was already of type SHELL, this will destroy any
4647  // previous user-defined data structure
4648  // We cannot call PCReset as it will wipe out any operator already set
4649  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
4650 
4651  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
4652  ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
4653  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
4654  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
4655  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
4656  CHKERRQ(ierr);
4657  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
4658  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
4659  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
4660  PetscFunctionReturn(0);
4661 }
4662 
4663 // Converts from a list (or a marked Array if islist is false) to an IS
4664 // st indicates the offset where to start numbering
4665 static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
4666  const mfem::Array<int> *list,
4667  PetscInt st, IS* is)
4668 {
4669  PetscInt n = list ? list->Size() : 0,*idxs;
4670  const int *data = list ? list->GetData() : NULL;
4671  PetscErrorCode ierr;
4672 
4673  PetscFunctionBeginUser;
4674  ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
4675  if (islist)
4676  {
4677  for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
4678  }
4679  else
4680  {
4681  PetscInt cum = 0;
4682  for (PetscInt i=0; i<n; i++)
4683  {
4684  if (data[i]) { idxs[cum++] = i+st; }
4685  }
4686  n = cum;
4687  }
4688  ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
4689  CHKERRQ(ierr);
4690  PetscFunctionReturn(0);
4691 }
4692 
4693 // Converts from a marked Array of Vdofs to an IS
4694 // st indicates the offset where to start numbering
4695 // l2l is a vector of matrices generated during RAP
4696 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
4697  mfem::Array<Mat> &pl2l,
4698  const mfem::Array<int> *mark,
4699  PetscInt st, IS* is)
4700 {
4701  mfem::Array<int> sub_dof_marker;
4703  PetscInt nl;
4704  PetscErrorCode ierr;
4705 
4706  PetscFunctionBeginUser;
4707  for (int i = 0; i < pl2l.Size(); i++)
4708  {
4709  PetscInt m,n,*ii,*jj;
4710  PetscBool done;
4711  ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
4712  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
4713  MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
4714  ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
4715 #if defined(PETSC_USE_64BIT_INDICES)
4716  int nnz = (int)ii[m];
4717  int *mii = new int[m+1];
4718  int *mjj = new int[nnz];
4719  for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
4720  for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
4721  l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
4722 #else
4723  l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
4724 #endif
4725  ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
4726  (const PetscInt**)&ii,
4727  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
4728  MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
4729  << i << " l2l matrix");
4730  }
4731  nl = 0;
4732  for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
4733  sub_dof_marker.SetSize(nl);
4734  const int* vdata = mark->GetData();
4735  int* sdata = sub_dof_marker.GetData();
4736  int cumh = 0, cumw = 0;
4737  for (int i = 0; i < l2l.Size(); i++)
4738  {
4739  const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
4740  l2l[i]->Height());
4741  mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
4742  l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
4743  cumh += l2l[i]->Height();
4744  cumw += l2l[i]->Width();
4745  }
4746  ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
4747  for (int i = 0; i < pl2l.Size(); i++)
4748  {
4749  delete l2l[i];
4750  }
4751  PetscFunctionReturn(0);
4752 }
4753 
4754 #if !defined(PETSC_HAVE_HYPRE)
4755 
4756 #if defined(HYPRE_MIXEDINT)
4757 #error "HYPRE_MIXEDINT not supported"
4758 #endif
4759 
4760 #include "_hypre_parcsr_mv.h"
4761 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
4762 {
4763  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
4764  hypre_CSRMatrix *hdiag,*hoffd;
4765  PetscScalar *da,*oa,*aptr;
4766  PetscInt *dii,*djj,*oii,*ojj,*iptr;
4767  PetscInt i,dnnz,onnz,m,n;
4768  PetscMPIInt size;
4769  PetscErrorCode ierr;
4770 
4771  PetscFunctionBeginUser;
4772  hdiag = hypre_ParCSRMatrixDiag(hA);
4773  hoffd = hypre_ParCSRMatrixOffd(hA);
4774  m = hypre_CSRMatrixNumRows(hdiag);
4775  n = hypre_CSRMatrixNumCols(hdiag);
4776  dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
4777  onnz = hypre_CSRMatrixNumNonzeros(hoffd);
4778  ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
4779  ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
4780  ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
4781  ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
4782  CHKERRQ(ierr);
4783  ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
4784  CHKERRQ(ierr);
4785  ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
4786  CHKERRQ(ierr);
4787  iptr = djj;
4788  aptr = da;
4789  for (i=0; i<m; i++)
4790  {
4791  PetscInt nc = dii[i+1]-dii[i];
4792  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
4793  iptr += nc;
4794  aptr += nc;
4795  }
4796  ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
4797  if (size > 1)
4798  {
4799  PetscInt *offdj,*coffd;
4800 
4801  ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
4802  ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
4803  ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
4804  ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
4805  CHKERRQ(ierr);
4806  offdj = hypre_CSRMatrixJ(hoffd);
4807  coffd = hypre_ParCSRMatrixColMapOffd(hA);
4808  for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
4809  ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
4810  CHKERRQ(ierr);
4811  iptr = ojj;
4812  aptr = oa;
4813  for (i=0; i<m; i++)
4814  {
4815  PetscInt nc = oii[i+1]-oii[i];
4816  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
4817  iptr += nc;
4818  aptr += nc;
4819  }
4820  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
4821  djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
4822  }
4823  else
4824  {
4825  oii = ojj = NULL;
4826  oa = NULL;
4827  ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
4828  }
4829  /* We are responsible to free the CSR arrays. However, since we can take
4830  references of a PetscParMatrix but we cannot take reference of PETSc
4831  arrays, we need to create a PetscContainer object to take reference of
4832  these arrays in reference objects */
4833  void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
4834  const char *names[6] = {"_mfem_csr_dii",
4835  "_mfem_csr_djj",
4836  "_mfem_csr_da",
4837  "_mfem_csr_oii",
4838  "_mfem_csr_ojj",
4839  "_mfem_csr_oa"
4840  };
4841  for (i=0; i<6; i++)
4842  {
4843  PetscContainer c;
4844 
4845  ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
4846  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
4847  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
4848  CHKERRQ(ierr);
4849  ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
4850  CHKERRQ(ierr);
4851  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
4852  }
4853  PetscFunctionReturn(0);
4854 }
4855 
4856 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
4857 {
4858  Mat lA;
4859  ISLocalToGlobalMapping rl2g,cl2g;
4860  IS is;
4861  hypre_CSRMatrix *hdiag,*hoffd;
4862  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
4863  void *ptrs[2];
4864  const char *names[2] = {"_mfem_csr_aux",
4865  "_mfem_csr_data"
4866  };
4867  PetscScalar *hdd,*hod,*aa,*data;
4868  PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
4869  PetscInt *aux,*ii,*jj;
4870  PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
4871  PetscErrorCode ierr;
4872 
4873  PetscFunctionBeginUser;
4874  /* access relevant information in ParCSR */
4875  str = hypre_ParCSRMatrixFirstRowIndex(hA);
4876  stc = hypre_ParCSRMatrixFirstColDiag(hA);
4877  hdiag = hypre_ParCSRMatrixDiag(hA);
4878  hoffd = hypre_ParCSRMatrixOffd(hA);
4879  dr = hypre_CSRMatrixNumRows(hdiag);
4880  dc = hypre_CSRMatrixNumCols(hdiag);
4881  nnz = hypre_CSRMatrixNumNonzeros(hdiag);
4882  hdi = hypre_CSRMatrixI(hdiag);
4883  hdj = hypre_CSRMatrixJ(hdiag);
4884  hdd = hypre_CSRMatrixData(hdiag);
4885  oc = hypre_CSRMatrixNumCols(hoffd);
4886  nnz += hypre_CSRMatrixNumNonzeros(hoffd);
4887  hoi = hypre_CSRMatrixI(hoffd);
4888  hoj = hypre_CSRMatrixJ(hoffd);
4889  hod = hypre_CSRMatrixData(hoffd);
4890 
4891  /* generate l2g maps for rows and cols */
4892  ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
4893  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
4894  ierr = ISDestroy(&is); CHKERRQ(ierr);
4895  col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
4896  ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
4897  for (i=0; i<dc; i++) { aux[i] = i+stc; }
4898  for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
4899  ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
4900  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
4901  ierr = ISDestroy(&is); CHKERRQ(ierr);
4902 
4903  /* create MATIS object */
4904  ierr = MatCreate(comm,pA); CHKERRQ(ierr);
4905  ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
4906  ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
4907  ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
4908  ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
4909  ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
4910 
4911  /* merge local matrices */
4912  ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
4913  ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
4914  ii = aux;
4915  jj = aux+dr+1;
4916  aa = data;
4917  *ii = *(hdi++) + *(hoi++);
4918  for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
4919  {
4920  PetscScalar *aold = aa;
4921  PetscInt *jold = jj,nc = jd+jo;
4922  for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
4923  for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
4924  *(++ii) = *(hdi++) + *(hoi++);
4925  ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
4926  }
4927  for (; cum<dr; cum++) { *(++ii) = nnz; }
4928  ii = aux;
4929  jj = aux+dr+1;
4930  aa = data;
4931  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
4932  CHKERRQ(ierr);
4933  ptrs[0] = aux;
4934  ptrs[1] = data;
4935  for (i=0; i<2; i++)
4936  {
4937  PetscContainer c;
4938 
4939  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
4940  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
4941  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
4942  CHKERRQ(ierr);
4943  ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
4944  CHKERRQ(ierr);
4945  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
4946  }
4947  ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
4948  ierr = MatDestroy(&lA); CHKERRQ(ierr);
4949  ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4950  ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4951  PetscFunctionReturn(0);
4952 }
4953 #endif
4954 
4955 #endif // MFEM_USE_PETSC
4956 #endif // MFEM_USE_MPI
int Size() const
Logical size of the array.
Definition: array.hpp:124
void SetPreconditioner(Solver &precond)
Definition: petsc.cpp:2500
void SetPrintLevel(int plev)
Definition: petsc.cpp:1856
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
Definition: sparsemat.cpp:1162
PetscParVector * B
Right-hand side and solution vector.
Definition: petsc.hpp:549
void CreatePrivateContext()
Definition: petsc.cpp:2147
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition: petsc.cpp:1427
Vector()
Default constructor for Vector. Sets size = 0 and data = NULL.
Definition: vector.hpp:58
MPI_Comm GetComm() const
MPI communicator.
Definition: hypre.hpp:336
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition: operator.hpp:308
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition: operator.hpp:310
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition: petsc.cpp:3335
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition: vector.hpp:151
Memory< double > data
Definition: vector.hpp:52
Abstract class for PETSc&#39;s preconditioners.
Definition: petsc.hpp:663
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition: petsc.cpp:655
double PetscScalar
Definition: petsc.hpp:47
struct _p_TS * TS
Definition: petsc.hpp:59
virtual void SetOperator(const Operator &op)
Definition: petsc.cpp:2354
HypreParMatrix * RAP(const HypreParMatrix *A, const HypreParMatrix *P)
Returns the matrix P^t * A * P.
Definition: hypre.cpp:1636
bool clcustom
Boolean to handle SetFromOptions calls.
Definition: petsc.hpp:540
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Definition: petsc.cpp:3394
HYPRE_Int PetscInt
Definition: petsc.hpp:46
void ConvertOperator(MPI_Comm comm, const Operator &op, Mat *B, Operator::Type tid)
Definition: petsc.cpp:858
double GetFinalNorm()
Definition: petsc.cpp:2122
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:2754
Wrapper for PETSc&#39;s matrix class.
Definition: petsc.hpp:197
PetscParVector & operator+=(const PetscParVector &y)
Definition: petsc.cpp:404
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition: petsc.cpp:2230
Abstract class for PETSc&#39;s linear solvers.
Definition: petsc.hpp:610
Vec x
The actual PETSc object.
Definition: petsc.hpp:79
void SetSize(int s)
Resize the vector to size s.
Definition: vector.hpp:407
Abstract class for PETSc&#39;s solvers.
Definition: petsc.hpp:536
PetscODESolver::Type GetType() const
Definition: petsc.cpp:3561
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
Definition: petsc.cpp:3610
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition: petsc.cpp:3405
Base abstract class for first order time dependent operators.
Definition: operator.hpp:259
PetscInt GetNumCols() const
Returns the local number of columns.
Definition: petsc.cpp:501
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition: operator.hpp:89
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition: operator.hpp:63
void SetType(PetscODESolver::Type)
Definition: petsc.cpp:3567
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition: petsc.hpp:854
Pointer to an Operator of a specified type.
Definition: handle.hpp:33
int * GetJ()
Return the array J.
Definition: sparsemat.hpp:146
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
Definition: petsc.cpp:1394
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition: petsc.cpp:3249
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition: petsc.cpp:385
void SetMat(Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition: petsc.cpp:1277
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition: petsc.cpp:1767
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3295
T * GetData()
Returns the data.
Definition: array.hpp:98
Type GetType() const
Definition: petsc.cpp:1709
virtual void SetTime(const double _t)
Set the current time.
Definition: operator.hpp:303
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition: operator.cpp:244
int Size() const
Returns the size of the vector.
Definition: vector.hpp:157
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Definition: petsc.cpp:3585
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition: ode.hpp:22
int * GetI()
Return the array I.
Definition: sparsemat.hpp:141
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition: petsc.cpp:529
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:2630
PetscClassId cid
The class id of the actual PETSc object.
Definition: petsc.hpp:546
ID for class PetscParMatrix, MATHYPRE format.
Definition: operator.hpp:238
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Abstract parallel finite element space.
Definition: pfespace.hpp:28
Constant in time b.c.
Definition: petsc.hpp:458
void SetRelTol(double tol)
Definition: petsc.cpp:1779
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition: operator.hpp:504
void Randomize(PetscInt seed=0)
Set random values.
Definition: petsc.cpp:438
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:2600
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition: petsc.cpp:1252
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition: petsc.cpp:1933
void SetMaxIter(int max_iter)
Definition: petsc.cpp:1829
double * GetData() const
Return a pointer to the beginning of the Vector data.
Definition: vector.hpp:166
PetscParVector & operator*=(PetscScalar d)
Definition: petsc.cpp:416
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition: hypre.cpp:1610
void SortColumnIndices()
Sort the column indices corresponding to each row.
Definition: sparsemat.cpp:368
struct _p_SNES * SNES
Definition: petsc.hpp:58
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition: petsc.cpp:1653
double PetscReal
Definition: petsc.hpp:48
Wrapper for PETSc&#39;s vector class.
Definition: petsc.hpp:75
void Customize(bool customize=true) const
Customize object with options set.
Definition: petsc.cpp:2023
PetscInt M() const
Returns the global number of rows.
Definition: petsc.cpp:508
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: operator.hpp:84
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:3432
PetscParVector * Y
Definition: petsc.hpp:204
virtual ~PetscLinearSolver()
Definition: petsc.cpp:2590
double * GetData()
Return the element data, i.e. the array A.
Definition: sparsemat.hpp:151
PetscInt GetNumRows() const
Returns the local number of rows.
Definition: petsc.cpp:494
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
Definition: operator.cpp:214
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition: petsc.cpp:453
struct _p_PC * PC
Definition: petsc.hpp:57
void EliminateBC(HypreParMatrix &A, HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Definition: hypre.cpp:1690
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
Definition: petsc.cpp:433
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition: petsc.cpp:372
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition: petsc.cpp:3488
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition: petsc.cpp:1375
ID for the base class Operator, i.e. any type.
Definition: operator.hpp:231
MPI_Comm GetComm() const
Definition: pfespace.hpp:239
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition: petsc.hpp:543
Abstract class for monitoring PETSc&#39;s solvers.
Definition: petsc.hpp:832
Data type sparse matrix.
Definition: sparsemat.hpp:40
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition: operator.hpp:57
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3388
double b
Definition: lissajous.cpp:42
int to_int(const std::string &str)
Definition: text.hpp:70
Auxiliary class for BDDC customization.
Definition: petsc.hpp:689
virtual ~PetscPreconditioner()
Definition: petsc.cpp:2759
ID for class PetscParMatrix, unspecified format.
Definition: operator.hpp:239
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:823
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition: petsc.cpp:2279
void LoseData()
NULL-ifies the data.
Definition: array.hpp:118
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition: petsc.cpp:639
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition: petsc.cpp:487
virtual ~PetscParVector()
Calls PETSc&#39;s destroy function.
Definition: petsc.cpp:243
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition: petsc.cpp:3258
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition: petsc.hpp:552
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition: petsc.cpp:670
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition: petsc.cpp:1452
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition: petsc.cpp:2298
void Assign(const T *)
Copy data from a pointer. Size() elements are copied.
Definition: array.hpp:856
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:1928
void Shift(double s)
Shift diagonal by a constant.
Definition: petsc.cpp:1474
ID for class PetscParMatrix, MATNEST format.
Definition: operator.hpp:237
void SetTime(double t)
Sets the current time.
Definition: petsc.hpp:488
ID for class PetscParMatrix, MATSHELL format.
Definition: operator.hpp:236
void trans(const Vector &x, Vector &p)
Definition: toroid.cpp:239
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition: petsc.cpp:536
int GetNumIterations()
Definition: petsc.cpp:2089
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition: petsc.cpp:1963
Wrapper for hypre&#39;s parallel vector class.
Definition: hypre.hpp:73
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition: petsc.cpp:2307
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Definition: petsc.hpp:478
Type
Enumeration defining IDs for some classes derived from Operator.
Definition: operator.hpp:229
PetscBCHandler(Type _type=ZERO)
Definition: petsc.hpp:462
PetscInt NNZ() const
Returns the number of nonzeros.
Definition: petsc.cpp:522
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true)
Definition: petsc.cpp:2319
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition: petsc.cpp:2749
void SetSize(int nsize)
Change logical size of the array, keep existing entries.
Definition: array.hpp:635
Mat A
The actual PETSc object.
Definition: petsc.hpp:201
struct _p_Mat * Mat
Definition: petsc.hpp:55
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
Definition: operator.cpp:236
PetscInt N() const
Returns the global number of columns.
Definition: petsc.cpp:515
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition: petsc.cpp:1365
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3555
void MakeWrapper(MPI_Comm comm, const Operator *op, Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc&#39;s MATSHELL object and returns the Mat in B...
Definition: petsc.cpp:836
void SetUpdate(void(*update)(Operator *op, int it, const mfem::Vector &F, const mfem::Vector &X, const mfem::Vector &D, const mfem::Vector &P))
Definition: petsc.cpp:3419
bool operatorset
Boolean to handle SetOperator calls.
Definition: petsc.hpp:558
PetscParVector * X
Auxiliary vectors for typecasting.
Definition: petsc.hpp:204
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition: petsc.cpp:1410
int height
Dimension of the output / number of rows in the matrix.
Definition: operator.hpp:27
PetscParVector & operator-=(const PetscParVector &y)
Definition: petsc.cpp:410
double a
Definition: lissajous.cpp:41
void MFEMFinalizePetsc()
Definition: petsc.cpp:182
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition: operator.cpp:219
virtual ~PetscNonlinearSolver()
Definition: petsc.cpp:3327
virtual ~PetscODESolver()
Definition: petsc.cpp:3480
int GetConverged()
Definition: petsc.cpp:2056
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition: petsc.cpp:1757
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition: petsc.cpp:1982
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition: petsc.cpp:165
Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
Definition: petsc.cpp:1696
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition: petsc.cpp:225
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition: petsc.hpp:842
ID for class PetscParMatrix, MATAIJ format.
Definition: operator.hpp:234
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:2665
int dim
Definition: ex24.cpp:43
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:336
PetscInt GlobalSize() const
Returns the global number of rows.
Definition: petsc.cpp:201
PetscParVector * X
Definition: petsc.hpp:549
void _SetDataAndSize_()
Definition: petsc.cpp:190
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition: petsc.cpp:480
struct _p_PetscObject * PetscObject
Definition: petsc.hpp:50
void SetAbsTol(double tol)
Definition: petsc.cpp:1804
Helper class for handling essential boundary conditions.
Definition: petsc.hpp:452
struct _p_KSP * KSP
Definition: petsc.hpp:56
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition: petsc.cpp:1355
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition: petsc.hpp:485
void SetTol(double tol)
Definition: petsc.cpp:1774
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition: petsc.cpp:366
HYPRE_Int * GetTrueDofOffsets() const
Definition: pfespace.hpp:248
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition: petsc.hpp:848
int NumRowBlocks() const
Return the number of row blocks.
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition: petsc.cpp:1493
Vector data type.
Definition: vector.hpp:48
struct _p_Vec * Vec
Definition: petsc.hpp:54
Identity Operator I: x -&gt; x.
Definition: operator.hpp:523
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
Definition: petsc.cpp:428
void FreePrivateContext()
Definition: petsc.cpp:2179
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition: ode.hpp:26
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition: petsc.cpp:341
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3455
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition: petsc.cpp:1682
Base class for solvers.
Definition: operator.hpp:500
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition: globals.hpp:66
Abstract operator.
Definition: operator.hpp:24
Wrapper for hypre&#39;s ParCSR matrix class.
Definition: hypre.hpp:187
A class to handle Block systems in a matrix-free implementation.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:2580
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
Definition: petsc.cpp:1389
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition: petsc.cpp:2207
ID for class PetscParMatrix, MATIS format.
Definition: operator.hpp:235
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:2585
int width
Dimension of the input / number of columns in the matrix.
Definition: operator.hpp:28
void * private_ctx
Private context for solver.
Definition: petsc.hpp:555
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition: petsc.cpp:2214
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition: petsc.cpp:1345
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition: petsc.cpp:1463