MFEM  v4.0
Finite element discretization library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Pages
petsc.cpp
Go to the documentation of this file.
1 // Copyright (c) 2010, Lawrence Livermore National Security, LLC. Produced at
2 // the Lawrence Livermore National Laboratory. LLNL-CODE-443211. All Rights
3 // reserved. See file COPYRIGHT for details.
4 //
5 // This file is part of the MFEM library. For more information and source code
6 // availability see http://mfem.org.
7 //
8 // MFEM is free software; you can redistribute it and/or modify it under the
9 // terms of the GNU Lesser General Public License (as published by the Free
10 // Software Foundation) version 2.1 dated February 1999.
11 
12 // Author: Stefano Zampini <stefano.zampini@gmail.com>
13 
14 #include "../config/config.hpp"
15 
16 #ifdef MFEM_USE_MPI
17 #ifdef MFEM_USE_PETSC
18 
19 #include "linalg.hpp"
20 #include "../fem/fem.hpp"
21 
22 #include "petsc.h"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
25 #endif
26 
27 // Backward compatibility
28 #if PETSC_VERSION_LT(3,12,0)
29 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
30 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
31 #endif
32 
33 #include <fstream>
34 #include <iomanip>
35 #include <cmath>
36 #include <cstdlib>
37 
38 // Note: there are additional #include statements below.
39 
40 // Error handling
41 // Prints PETSc's stacktrace and then calls MFEM_ABORT
42 // We cannot use PETSc's CHKERRQ since it returns a PetscErrorCode
43 #define PCHKERRQ(obj,err) do { \
44  if ((err)) \
45  { \
46  PetscError(PetscObjectComm((PetscObject)(obj)),__LINE__,_MFEM_FUNC_NAME, \
47  __FILE__,(err),PETSC_ERROR_REPEAT,NULL); \
48  MFEM_ABORT("Error in PETSc. See stacktrace above."); \
49  } \
50  } while(0);
51 #define CCHKERRQ(comm,err) do { \
52  if ((err)) \
53  { \
54  PetscError(comm,__LINE__,_MFEM_FUNC_NAME, \
55  __FILE__,(err),PETSC_ERROR_REPEAT,NULL); \
56  MFEM_ABORT("Error in PETSc. See stacktrace above."); \
57  } \
58  } while(0);
59 
60 // Callback functions: these functions will be called by PETSc
61 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
62 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
63 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
64  void*);
65 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
66 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
67  PetscReal,Mat,
68  Mat,void*);
69 static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
70  Mat,Mat,Mat,Mat);
71 static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
72 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
73 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
74 static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
75 static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
76 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
77  PetscBool*,PetscBool*,void*);
78 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
79 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
80 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
81 static PetscErrorCode __mfem_pc_shell_setup(PC);
82 static PetscErrorCode __mfem_pc_shell_destroy(PC);
83 static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
84 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
85 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
86 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
87 static PetscErrorCode __mfem_array_container_destroy(void*);
88 static PetscErrorCode __mfem_matarray_container_destroy(void*);
89 static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
90 
91 // auxiliary functions
92 static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
93  PetscInt,IS*);
94 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
95  const mfem::Array<int>*,PetscInt,IS*);
96 static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
97 static PetscErrorCode MakeShellPCWithFactory(PC,
99 
100 // Equivalent functions are present in PETSc source code
101 // if PETSc has been compiled with hypre support
102 // We provide them here in case PETSC_HAVE_HYPRE is not defined
103 #if !defined(PETSC_HAVE_HYPRE)
104 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
105 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
106 #endif
107 
108 // structs used by PETSc code
109 typedef struct
110 {
111  mfem::Operator *op;
112 } __mfem_mat_shell_ctx;
113 
114 typedef struct
115 {
116  mfem::Solver *op;
118  bool ownsop;
119  unsigned long int numprec;
120 } __mfem_pc_shell_ctx;
121 
122 typedef struct
123 {
124  mfem::Operator *op; // The nonlinear operator
125  mfem::PetscBCHandler *bchandler; // Handling of essential bc
126  mfem::Vector *work; // Work vector
127  mfem::Operator::Type jacType; // OperatorType for the Jacobian
128  // Objective for line search
129  void (*objective)(mfem::Operator *op, const mfem::Vector&, double*);
130  // PostCheck function (to be called after successfull line search)
131  void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
132  mfem::Vector&, bool&, bool&);
133  // General purpose update function (to be called at the beginning of
134  // each nonlinear step)
135  void (*update)(mfem::Operator *op, int,
136  const mfem::Vector&, const mfem::Vector&,
137  const mfem::Vector&, const mfem::Vector&);
138 } __mfem_snes_ctx;
139 
140 typedef struct
141 {
142  mfem::TimeDependentOperator *op; // The time-dependent operator
143  mfem::PetscBCHandler *bchandler; // Handling of essential bc
144  mfem::Vector *work; // Work vector
145  mfem::Operator::Type jacType; // OperatorType for the Jacobian
146  enum mfem::PetscODESolver::Type type;
147  PetscReal cached_shift;
148  PetscObjectState cached_ijacstate;
149  PetscObjectState cached_rhsjacstate;
150  PetscObjectState cached_splits_xstate;
151  PetscObjectState cached_splits_xdotstate;
152 } __mfem_ts_ctx;
153 
154 typedef struct
155 {
156  mfem::PetscSolver *solver; // The solver object
157  mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
158 } __mfem_monitor_ctx;
159 
160 // use global scope ierr to check PETSc errors inside mfem calls
161 static PetscErrorCode ierr;
162 
163 using namespace std;
164 
165 namespace mfem
166 {
167 
169 {
170  MFEMInitializePetsc(NULL,NULL,NULL,NULL);
171 }
172 
173 void MFEMInitializePetsc(int *argc,char*** argv)
174 {
175  MFEMInitializePetsc(argc,argv,NULL,NULL);
176 }
177 
178 void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
179  const char help[])
180 {
181  ierr = PetscInitialize(argc,argv,rc_file,help);
182  MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
183 }
184 
186 {
187  ierr = PetscFinalize();
188  MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
189 }
190 
191 // PetscParVector methods
192 
193 void PetscParVector::_SetDataAndSize_()
194 {
195  const PetscScalar *array;
196  PetscInt n;
197 
198  ierr = VecGetArrayRead(x,&array); PCHKERRQ(x,ierr);
199  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
200  SetDataAndSize((PetscScalar*)array,n);
201  ierr = VecRestoreArrayRead(x,&array); PCHKERRQ(x,ierr);
202 }
203 
204 PetscInt PetscParVector::GlobalSize() const
205 {
206  PetscInt N;
207  ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
208  return N;
209 }
210 
211 PetscParVector::PetscParVector(MPI_Comm comm, const Vector &_x,
212  bool copy) : Vector()
213 {
214  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
215  ierr = VecSetSizes(x,_x.Size(),PETSC_DECIDE); PCHKERRQ(x,ierr);
216  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
218  if (copy)
219  {
220  PetscScalar *array;
221 
222  ierr = VecGetArray(x,&array); PCHKERRQ(x,ierr);
223  for (int i = 0; i < _x.Size(); i++) { array[i] = _x[i]; }
224  ierr = VecRestoreArray(x,&array); PCHKERRQ(x,ierr);
225  }
226 }
227 
228 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
229  PetscInt *col) : Vector()
230 {
231  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
232  if (col)
233  {
234  PetscMPIInt myid;
235  MPI_Comm_rank(comm, &myid);
236  ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
237  }
238  else
239  {
240  ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
241  }
242  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
244 }
245 
247 {
248  MPI_Comm comm = PetscObjectComm((PetscObject)x);
249  ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
250 }
251 
252 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
253  PetscScalar *_data, PetscInt *col) : Vector()
254 {
255  MFEM_VERIFY(col,"Missing distribution");
256  PetscMPIInt myid;
257  MPI_Comm_rank(comm, &myid);
258  ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,_data,
259  &x); CCHKERRQ(comm,ierr)
261 }
262 
264 {
265  ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
267 }
268 
269 PetscParVector::PetscParVector(MPI_Comm comm, const Operator &op,
270  bool transpose, bool allocate) : Vector()
271 {
272  PetscInt loc = transpose ? op.Height() : op.Width();
273  if (allocate)
274  {
275  ierr = VecCreate(comm,&x);
276  CCHKERRQ(comm,ierr);
277  ierr = VecSetSizes(x,loc,PETSC_DECIDE);
278  PCHKERRQ(x,ierr);
279  ierr = VecSetType(x,VECSTANDARD);
280  PCHKERRQ(x,ierr);
281  ierr = VecSetUp(x);
282  PCHKERRQ(x,ierr);
283  }
284  else
285  {
286  ierr = VecCreateMPIWithArray(comm,1,loc,PETSC_DECIDE,NULL,
287  &x); CCHKERRQ(comm,ierr);
288  }
290 }
291 
293  bool transpose, bool allocate) : Vector()
294 {
295  Mat pA = const_cast<PetscParMatrix&>(A);
296  if (!transpose)
297  {
298  ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
299  }
300  else
301  {
302  ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
303  }
304  if (!allocate)
305  {
306  ierr = VecReplaceArray(x,NULL); PCHKERRQ(x,ierr);
307  }
309 }
310 
312 {
313  if (ref)
314  {
315  ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
316  }
317  x = y;
319 }
320 
322 {
323 
324  HYPRE_Int* offsets = pfes->GetTrueDofOffsets();
325  MPI_Comm comm = pfes->GetComm();
326  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
327 
328  PetscMPIInt myid = 0;
329  if (!HYPRE_AssumedPartitionCheck())
330  {
331  MPI_Comm_rank(comm,&myid);
332  }
333  ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
334  PCHKERRQ(x,ierr);
335  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
337 }
338 
339 MPI_Comm PetscParVector::GetComm() const
340 {
341  return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
342 }
343 
345 {
346  VecScatter scctx;
347  Vec vout;
348  PetscScalar *array;
349  PetscInt size;
350 
351  ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
352  ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
353  PCHKERRQ(x,ierr);
354  ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
355  PCHKERRQ(x,ierr);
356  ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
357  ierr = VecGetArray(vout,&array); PCHKERRQ(x,ierr);
358  ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
359  Array<PetscScalar> data(size);
360  data.Assign(array);
361  ierr = VecRestoreArray(vout,&array); PCHKERRQ(x,ierr);
362  ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
363  Vector *v = new Vector(data, internal::to_int(size));
364  v->MakeDataOwner();
365  data.LoseData();
366  return v;
367 }
368 
370 {
371  ierr = VecSet(x,d); PCHKERRQ(x,ierr);
372  return *this;
373 }
374 
376  const Array<PetscScalar>& vals)
377 {
378  MFEM_VERIFY(idx.Size() == vals.Size(),
379  "Size mismatch between indices and values");
380  PetscInt n = idx.Size();
381  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
382  PCHKERRQ(x,ierr);
383  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
384  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
385  return *this;
386 }
387 
389  const Array<PetscScalar>& vals)
390 {
391  MFEM_VERIFY(idx.Size() == vals.Size(),
392  "Size mismatch between indices and values");
393  PetscInt n = idx.Size();
394  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
395  PCHKERRQ(x,ierr);
396  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
397  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
398  return *this;
399 }
400 
402 {
403  ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
404  return *this;
405 }
406 
408 {
409  ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
410  return *this;
411 }
412 
414 {
415  ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
416  return *this;
417 }
418 
420 {
421  ierr = VecScale(x,s); PCHKERRQ(x,ierr);
422  return *this;
423 }
424 
426 {
427  ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
428 }
429 
431 {
432  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
433 }
434 
436 {
437  PetscRandom rctx = NULL;
438 
439  if (seed)
440  {
441  ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
442  PCHKERRQ(x,ierr);
443  ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
444  ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
445  }
446  ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
447  ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
448 }
449 
450 void PetscParVector::Print(const char *fname, bool binary) const
451 {
452  if (fname)
453  {
454  PetscViewer view;
455 
456  if (binary)
457  {
458  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
459  FILE_MODE_WRITE,&view);
460  }
461  else
462  {
463  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
464  }
465  PCHKERRQ(x,ierr);
466  ierr = VecView(x,view); PCHKERRQ(x,ierr);
467  ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
468  }
469  else
470  {
471  ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
472  }
473 }
474 
475 // PetscParMatrix methods
476 
478 {
479  PetscInt N;
480  ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
481  return N;
482 }
483 
485 {
486  PetscInt N;
487  ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
488  return N;
489 }
490 
492 {
493  PetscInt N;
494  ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
495  return N;
496 }
497 
499 {
500  PetscInt N;
501  ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
502  return N;
503 }
504 
506 {
507  PetscInt N;
508  ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
509  return N;
510 }
511 
513 {
514  PetscInt N;
515  ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
516  return N;
517 }
518 
520 {
521  MatInfo info;
522  ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
523  return (PetscInt)info.nz_used;
524 }
525 
527 {
528  A = NULL;
529  X = Y = NULL;
530  height = width = 0;
531 }
532 
534 {
535  Init();
536 }
537 
539  const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
540 {
541  Init();
542 
543  Mat B = const_cast<PetscParMatrix&>(pB);
544 
545  IS isr,isc;
546  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
547  rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
548  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
549  cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
550  ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
551  ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
552  ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
553 
554  height = GetNumRows();
555  width = GetNumCols();
556 }
557 
559 {
560  Init();
561  height = pa->Height();
562  width = pa->Width();
563  ConvertOperator(pa->GetComm(),*pa,&A,tid);
564 }
565 
567 {
568  Init();
569  height = ha->Height();
570  width = ha->Width();
571  ConvertOperator(ha->GetComm(),*ha,&A,tid);
572 }
573 
575 {
576  Init();
577  height = sa->Height();
578  width = sa->Width();
579  ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
580 }
581 
582 PetscParMatrix::PetscParMatrix(MPI_Comm comm, const Operator *op,
583  Operator::Type tid)
584 {
585  Init();
586  height = op->Height();
587  width = op->Width();
588  ConvertOperator(comm,*op,&A,tid);
589 }
590 
591 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt glob_size,
592  PetscInt *row_starts, SparseMatrix *diag,
593  Operator::Type tid)
594 {
595  Init();
596  BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
597  tid==PETSC_MATAIJ,&A);
598  // update base class
599  height = GetNumRows();
600  width = GetNumCols();
601 }
602 
603 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
604  PetscInt global_num_cols, PetscInt *row_starts,
605  PetscInt *col_starts, SparseMatrix *diag,
606  Operator::Type tid)
607 {
608  Init();
609  BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
610  tid==PETSC_MATAIJ,&A);
611  // update base class
612  height = GetNumRows();
613  width = GetNumCols();
614 }
615 
617 {
618  if (A)
619  {
620  MPI_Comm comm = PetscObjectComm((PetscObject)A);
621  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
622  if (X) { delete X; }
623  if (Y) { delete Y; }
624  X = Y = NULL;
625  }
626  height = B.Height();
627  width = B.Width();
628 #if defined(PETSC_HAVE_HYPRE)
629  ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
630 #else
631  ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
632 #endif
633  return *this;
634 }
635 
637 {
638  if (A)
639  {
640  MPI_Comm comm = PetscObjectComm((PetscObject)A);
641  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
642  if (X) { delete X; }
643  if (Y) { delete Y; }
644  X = Y = NULL;
645  }
646  height = B.Height();
647  width = B.Width();
648  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
649  return *this;
650 }
651 
653 {
654  if (!A)
655  {
656  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
657  }
658  else
659  {
660  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
661  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
662  ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
663  }
664  return *this;
665 }
666 
668 {
669  if (!A)
670  {
671  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
672  ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
673  }
674  else
675  {
676  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
677  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
678  ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
679  }
680  return *this;
681 }
682 
683 void PetscParMatrix::
684 BlockDiagonalConstructor(MPI_Comm comm,
685  PetscInt *row_starts, PetscInt *col_starts,
686  SparseMatrix *diag, bool assembled, Mat* Ad)
687 {
688  Mat A;
689  PetscInt lrsize,lcsize,rstart,cstart;
690  PetscMPIInt myid = 0,commsize;
691 
692  ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
693  if (!HYPRE_AssumedPartitionCheck())
694  {
695  ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
696  }
697  lrsize = row_starts[myid+1]-row_starts[myid];
698  rstart = row_starts[myid];
699  lcsize = col_starts[myid+1]-col_starts[myid];
700  cstart = col_starts[myid];
701 
702  if (!assembled)
703  {
704  IS is;
705  ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
706  ISLocalToGlobalMapping rl2g,cl2g;
707  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
708  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
709  if (row_starts != col_starts)
710  {
711  ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
712  CCHKERRQ(comm,ierr);
713  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
714  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
715  }
716  else
717  {
718  ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
719  cl2g = rl2g;
720  }
721 
722  // Create the PETSc object (MATIS format)
723  ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
724  ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
725  PCHKERRQ(A,ierr);
726  ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
727  ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
728  ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
729  ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
730 
731  // Copy SparseMatrix into PETSc SeqAIJ format
732  Mat lA;
733  ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
734  int *II = diag->GetI();
735  int *JJ = diag->GetJ();
736 #if defined(PETSC_USE_64BIT_INDICES)
737  PetscInt *pII,*pJJ;
738  int m = diag->Height()+1, nnz = II[diag->Height()];
739  ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
740  for (int i = 0; i < m; i++) { pII[i] = II[i]; }
741  for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
742  ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
743  diag->GetData()); PCHKERRQ(lA,ierr);
744  ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
745 #else
746  ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
747  diag->GetData()); PCHKERRQ(lA,ierr);
748 #endif
749  }
750  else
751  {
752  PetscScalar *da;
753  PetscInt *dii,*djj,*oii,
754  m = diag->Height()+1, nnz = diag->NumNonZeroElems();
755 
756  diag->SortColumnIndices();
757  // if we can take ownership of the SparseMatrix arrays, we can avoid this
758  // step
759  ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
760  ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
761  ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
762  if (sizeof(PetscInt) == sizeof(int))
763  {
764  ierr = PetscMemcpy(dii,diag->GetI(),m*sizeof(PetscInt));
765  CCHKERRQ(PETSC_COMM_SELF,ierr);
766  ierr = PetscMemcpy(djj,diag->GetJ(),nnz*sizeof(PetscInt));
767  CCHKERRQ(PETSC_COMM_SELF,ierr);
768  }
769  else
770  {
771  int *iii = diag->GetI();
772  int *jjj = diag->GetJ();
773  for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
774  for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
775  }
776  ierr = PetscMemcpy(da,diag->GetData(),nnz*sizeof(PetscScalar));
777  CCHKERRQ(PETSC_COMM_SELF,ierr);
778  ierr = PetscCalloc1(m,&oii);
779  CCHKERRQ(PETSC_COMM_SELF,ierr);
780  if (commsize > 1)
781  {
782  ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
783  PETSC_DECIDE,
784  dii,djj,da,oii,NULL,NULL,&A);
785  CCHKERRQ(comm,ierr);
786  }
787  else
788  {
789  ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
790  CCHKERRQ(comm,ierr);
791  }
792 
793  void *ptrs[4] = {dii,djj,da,oii};
794  const char *names[4] = {"_mfem_csr_dii",
795  "_mfem_csr_djj",
796  "_mfem_csr_da",
797  "_mfem_csr_oii",
798  };
799  for (PetscInt i=0; i<4; i++)
800  {
801  PetscContainer c;
802 
803  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
804  ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
805  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
806  CCHKERRQ(comm,ierr);
807  ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
808  CCHKERRQ(comm,ierr);
809  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
810  }
811  }
812 
813  // Tell PETSc the matrix is ready to be used
814  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
815  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
816 
817  *Ad = A;
818 }
819 
820 MPI_Comm PetscParMatrix::GetComm() const
821 {
822  return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
823 }
824 
825 // TODO ADD THIS CONSTRUCTOR
826 //PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
827 // PetscInt glob_ncols, int *I, PetscInt *J,
828 // double *data, PetscInt *rows, PetscInt *cols)
829 //{
830 //}
831 
832 // TODO This should take a reference on op but how?
833 void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
834 {
835  __mfem_mat_shell_ctx *ctx = new __mfem_mat_shell_ctx;
836  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
837  ierr = MatSetSizes(*A,op->Height(),op->Width(),
838  PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
839  ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
840  ierr = MatShellSetContext(*A,(void *)ctx); PCHKERRQ(A,ierr);
841  ierr = MatShellSetOperation(*A,MATOP_MULT,
842  (void (*)())__mfem_mat_shell_apply);
843  PCHKERRQ(A,ierr);
844  ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
845  (void (*)())__mfem_mat_shell_apply_transpose);
846  PCHKERRQ(A,ierr);
847  ierr = MatShellSetOperation(*A,MATOP_DESTROY,
848  (void (*)())__mfem_mat_shell_destroy);
849  PCHKERRQ(A,ierr);
850  ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
851  ctx->op = const_cast<Operator *>(op);
852 }
853 
854 void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
855  Operator::Type tid)
856 {
857  PetscParMatrix *pA = const_cast<PetscParMatrix *>
858  (dynamic_cast<const PetscParMatrix *>(&op));
859  HypreParMatrix *pH = const_cast<HypreParMatrix *>
860  (dynamic_cast<const HypreParMatrix *>(&op));
861  BlockOperator *pB = const_cast<BlockOperator *>
862  (dynamic_cast<const BlockOperator *>(&op));
863  IdentityOperator *pI = const_cast<IdentityOperator *>
864  (dynamic_cast<const IdentityOperator *>(&op));
865  SparseMatrix *pS = const_cast<SparseMatrix *>
866  (dynamic_cast<const SparseMatrix *>(&op));
867 
868  if (pA && tid == ANY_TYPE) // use same object and return
869  {
870  ierr = PetscObjectReference((PetscObject)(pA->A));
871  CCHKERRQ(pA->GetComm(),ierr);
872  *A = pA->A;
873  return;
874  }
875 
876  PetscBool avoidmatconvert = PETSC_FALSE;
877  if (pA) // we test for these types since MatConvert will fail
878  {
879  ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
880  MATSHELL,"");
881  CCHKERRQ(comm,ierr);
882  }
883  if (pA && !avoidmatconvert)
884  {
885  Mat At = NULL;
886  PetscBool istrans;
887 #if PETSC_VERSION_LT(3,10,0)
888  PetscBool ismatis;
889 #endif
890 
891  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
892  CCHKERRQ(pA->GetComm(),ierr);
893  if (!istrans)
894  {
895  if (tid == pA->GetType()) // use same object and return
896  {
897  ierr = PetscObjectReference((PetscObject)(pA->A));
898  CCHKERRQ(pA->GetComm(),ierr);
899  *A = pA->A;
900  return;
901  }
902 #if PETSC_VERSION_LT(3,10,0)
903  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
904  CCHKERRQ(pA->GetComm(),ierr);
905 #endif
906  }
907  else
908  {
909  ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
910 #if PETSC_VERSION_LT(3,10,0)
911  ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
912 #endif
913  CCHKERRQ(pA->GetComm(),ierr);
914  }
915 
916  // Try to convert
917  if (tid == PETSC_MATAIJ)
918  {
919 #if PETSC_VERSION_LT(3,10,0)
920  if (ismatis)
921  {
922  if (istrans)
923  {
924  Mat B;
925 
926  ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
927  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
928  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
929  }
930  else
931  {
932  ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
933  PCHKERRQ(pA->A,ierr);
934  }
935  }
936  else
937 #endif
938  {
939  PetscMPIInt size;
940  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
941 
942  // call MatConvert and see if a converter is available
943  if (istrans)
944  {
945  Mat B;
946  ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
947  PCHKERRQ(pA->A,ierr);
948  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
949  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
950  }
951  else
952  {
953  ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
954  PCHKERRQ(pA->A,ierr);
955  }
956  }
957  }
958  else if (tid == PETSC_MATIS)
959  {
960  if (istrans)
961  {
962  Mat B;
963  ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
964  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
965  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
966  }
967  else
968  {
969  ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
970  }
971  }
972  else if (tid == PETSC_MATHYPRE)
973  {
974 #if defined(PETSC_HAVE_HYPRE)
975  if (istrans)
976  {
977  Mat B;
978  ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
979  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
980  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
981  }
982  else
983  {
984  ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
985  }
986 #else
987  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
988 #endif
989  }
990  else if (tid == PETSC_MATSHELL)
991  {
992  MakeWrapper(comm,&op,A);
993  }
994  else
995  {
996  MFEM_ABORT("Unsupported operator type conversion " << tid)
997  }
998  }
999  else if (pH)
1000  {
1001  if (tid == PETSC_MATAIJ)
1002  {
1003 #if defined(PETSC_HAVE_HYPRE)
1004  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1005  PETSC_USE_POINTER,A);
1006 #else
1007  ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1008 #endif
1009  CCHKERRQ(pH->GetComm(),ierr);
1010  }
1011  else if (tid == PETSC_MATIS)
1012  {
1013 #if defined(PETSC_HAVE_HYPRE)
1014  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1015  PETSC_USE_POINTER,A);
1016 #else
1017  ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1018 #endif
1019  CCHKERRQ(pH->GetComm(),ierr);
1020  }
1021  else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1022  {
1023 #if defined(PETSC_HAVE_HYPRE)
1024  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1025  PETSC_USE_POINTER,A);
1026  CCHKERRQ(pH->GetComm(),ierr);
1027 #else
1028  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1029 #endif
1030  }
1031  else if (tid == PETSC_MATSHELL)
1032  {
1033  MakeWrapper(comm,&op,A);
1034  }
1035  else
1036  {
1037  MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1038  " is not implemented");
1039  }
1040  }
1041  else if (pB)
1042  {
1043  Mat *mats,*matsl2l = NULL;
1044  PetscInt i,j,nr,nc;
1045 
1046  nr = pB->NumRowBlocks();
1047  nc = pB->NumColBlocks();
1048  ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1049  if (tid == PETSC_MATIS)
1050  {
1051  ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1052  }
1053  for (i=0; i<nr; i++)
1054  {
1055  PetscBool needl2l = PETSC_TRUE;
1056 
1057  for (j=0; j<nc; j++)
1058  {
1059  if (!pB->IsZeroBlock(i,j))
1060  {
1061  ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1062  if (tid == PETSC_MATIS && needl2l)
1063  {
1064  PetscContainer c;
1065  ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1066  (PetscObject*)&c);
1067  PCHKERRQ(mats[i*nc+j],ierr);
1068  // special case for block operators: the local Vdofs should be
1069  // ordered as:
1070  // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1071  // with m fields, Ni the number of Vdofs for the i-th field
1072  if (c)
1073  {
1074  Array<Mat> *l2l = NULL;
1075  ierr = PetscContainerGetPointer(c,(void**)&l2l);
1076  PCHKERRQ(c,ierr);
1077  MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1078  << l2l->Size() << " for block row " << i );
1079  ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1080  PCHKERRQ(c,ierr);
1081  matsl2l[i] = (*l2l)[0];
1082  needl2l = PETSC_FALSE;
1083  }
1084  }
1085  }
1086  }
1087  }
1088  ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1089  if (tid == PETSC_MATIS)
1090  {
1091  ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1092 
1093  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1094  for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1095  ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1096 
1097  PetscContainer c;
1098  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1099  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1100  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1101  PCHKERRQ(c,ierr);
1102  ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1103  PCHKERRQ((*A),ierr);
1104  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1105  }
1106  for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1107  ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1108  }
1109  else if (pI && tid == PETSC_MATAIJ)
1110  {
1111  PetscInt rst;
1112 
1113  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1114  ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1115  PCHKERRQ(A,ierr);
1116  ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1117  ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1118  ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1119  ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1120  ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1121  for (PetscInt i = rst; i < rst+pI->Height(); i++)
1122  {
1123  ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1124  }
1125  ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1126  ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1127  }
1128  else if (pS)
1129  {
1130  if (tid == PETSC_MATSHELL)
1131  {
1132  MakeWrapper(comm,&op,A);
1133  }
1134  else
1135  {
1136  Mat B;
1137  PetscScalar *pdata;
1138  PetscInt *pii,*pjj;
1139 
1140  int m = pS->Height();
1141  int n = pS->Width();
1142  int *ii = pS->GetI();
1143  int *jj = pS->GetJ();
1144  double *data = pS->GetData();
1145  bool issorted = pS->areColumnsSorted();
1146 
1147  ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(comm,ierr);
1148  ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(comm,ierr);
1149  ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(comm,ierr);
1150  pii[0] = ii[0];
1151  for (int i = 0; i < m; i++)
1152  {
1153  pii[i+1] = ii[i+1];
1154  for (int j = ii[i]; j < ii[i+1]; j++)
1155  {
1156  pjj[j] = jj[j];
1157  pdata[j] = data[j];
1158  }
1159  if (!issorted)
1160  {
1161  ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj,pdata);
1162  CCHKERRQ(comm,ierr);
1163  }
1164  }
1165 
1166  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,pii,pjj,pdata,&B);
1167  CCHKERRQ(comm,ierr);
1168 
1169  void *ptrs[3] = {pii,pjj,pdata};
1170  const char *names[3] = {"_mfem_csr_pii",
1171  "_mfem_csr_pjj",
1172  "_mfem_csr_pdata",
1173  };
1174  for (int i=0; i<3; i++)
1175  {
1176  PetscContainer c;
1177 
1178  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1179  ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1180  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1181  PCHKERRQ(B,ierr);
1182  ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1183  PCHKERRQ(B,ierr);
1184  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1185  }
1186  if (tid == PETSC_MATAIJ)
1187  {
1188  *A = B;
1189  }
1190  else if (tid == PETSC_MATHYPRE)
1191  {
1192  ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1193  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1194  }
1195  else if (tid == PETSC_MATIS)
1196  {
1197  ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1198  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1199  }
1200  else
1201  {
1202  MFEM_ABORT("Unsupported operator type conversion " << tid)
1203  }
1204  }
1205  }
1206  else // fallback to general operator
1207  {
1208  MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1209  "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1210  MakeWrapper(comm,&op,A);
1211  if (tid == PETSC_MATAIJ)
1212  {
1213  Mat B;
1214  PetscBool isaij;
1215 
1216  ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1217  ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1218  CCHKERRQ(comm,ierr);
1219  ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1220  if (!isaij)
1221  {
1222  ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1223  ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1224  }
1225  else
1226  {
1227  *A = B;
1228  }
1229  }
1230  }
1231 }
1232 
1234 {
1235  if (A != NULL)
1236  {
1237  MPI_Comm comm = MPI_COMM_NULL;
1238  ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1239  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1240  }
1241  delete X;
1242  delete Y;
1243  X = Y = NULL;
1244 }
1245 
1247 {
1248  if (ref)
1249  {
1250  ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1251  }
1252  Init();
1253  A = a;
1254  height = GetNumRows();
1255  width = GetNumCols();
1256 }
1257 
1259 {
1260  if (_A == A) { return; }
1261  Destroy();
1262  ierr = PetscObjectReference((PetscObject)_A); PCHKERRQ(_A,ierr);
1263  A = _A;
1264  height = GetNumRows();
1265  width = GetNumCols();
1266 }
1267 
1268 // Computes y = alpha * A * x + beta * y
1269 // or y = alpha * A^T* x + beta * y
1270 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1271  bool transpose)
1272 {
1273  PetscErrorCode (*f)(Mat,Vec,Vec);
1274  PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1275  if (transpose)
1276  {
1277  f = MatMultTranspose;
1278  fadd = MatMultTransposeAdd;
1279  }
1280  else
1281  {
1282  f = MatMult;
1283  fadd = MatMultAdd;
1284  }
1285  if (a != 0.)
1286  {
1287  if (b == 1.)
1288  {
1289  ierr = VecScale(X,a); PCHKERRQ(A,ierr);
1290  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1291  ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
1292  }
1293  else if (b != 0.)
1294  {
1295  ierr = VecScale(X,a); PCHKERRQ(A,ierr);
1296  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1297  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1298  ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
1299  }
1300  else
1301  {
1302  ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1303  if (a != 1.)
1304  {
1305  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1306  }
1307  }
1308  }
1309  else
1310  {
1311  if (b == 1.)
1312  {
1313  // do nothing
1314  }
1315  else if (b != 0.)
1316  {
1317  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1318  }
1319  else
1320  {
1321  ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1322  }
1323  }
1324 }
1325 
1327 {
1328  ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1329  Destroy();
1330  Init();
1331  A = master.A;
1332  height = master.height;
1333  width = master.width;
1334 }
1335 
1337 {
1338  if (!X)
1339  {
1340  MFEM_VERIFY(A,"Mat not present");
1341  X = new PetscParVector(*this,false); PCHKERRQ(A,ierr);
1342  }
1343  return X;
1344 }
1345 
1347 {
1348  if (!Y)
1349  {
1350  MFEM_VERIFY(A,"Mat not present");
1351  Y = new PetscParVector(*this,true); PCHKERRQ(A,ierr);
1352  }
1353  return Y;
1354 }
1355 
1357 {
1358  Mat B;
1359  if (action)
1360  {
1361  ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1362  }
1363  else
1364  {
1365  ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1366  }
1367  return new PetscParMatrix(B,false);
1368 }
1369 
1371 {
1372  ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1373 }
1374 
1375 void PetscParMatrix::Mult(double a, const Vector &x, double b, Vector &y) const
1376 {
1377  MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1378  << ", expected size = " << Width());
1379  MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1380  << ", expected size = " << Height());
1381 
1382  PetscParVector *XX = GetX();
1383  PetscParVector *YY = GetY();
1384  XX->PlaceArray(x.GetData());
1385  YY->PlaceArray(y.GetData());
1386  MatMultKernel(A,a,XX->x,b,YY->x,false);
1387  XX->ResetArray();
1388  YY->ResetArray();
1389 }
1390 
1391 void PetscParMatrix::MultTranspose(double a, const Vector &x, double b,
1392  Vector &y) const
1393 {
1394  MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
1395  << ", expected size = " << Height());
1396  MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
1397  << ", expected size = " << Width());
1398 
1399  PetscParVector *XX = GetX();
1400  PetscParVector *YY = GetY();
1401  YY->PlaceArray(x.GetData());
1402  XX->PlaceArray(y.GetData());
1403  MatMultKernel(A,a,YY->x,b,XX->x,true);
1404  XX->ResetArray();
1405  YY->ResetArray();
1406 }
1407 
1408 void PetscParMatrix::Print(const char *fname, bool binary) const
1409 {
1410  if (fname)
1411  {
1412  PetscViewer view;
1413 
1414  if (binary)
1415  {
1416  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
1417  FILE_MODE_WRITE,&view);
1418  }
1419  else
1420  {
1421  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
1422  }
1423  PCHKERRQ(A,ierr);
1424  ierr = MatView(A,view); PCHKERRQ(A,ierr);
1425  ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1426  }
1427  else
1428  {
1429  ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1430  }
1431 }
1432 
1434 {
1435  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1436  << ", expected size = " << Height());
1437 
1438  PetscParVector *YY = GetY();
1439  YY->PlaceArray(s.GetData());
1440  ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1441  YY->ResetArray();
1442 }
1443 
1445 {
1446  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1447  << ", expected size = " << Width());
1448 
1449  PetscParVector *XX = GetX();
1450  XX->PlaceArray(s.GetData());
1451  ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
1452  XX->ResetArray();
1453 }
1454 
1456 {
1457  ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
1458 }
1459 
1461 {
1462  // for matrices with square diagonal blocks only
1463  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1464  << ", expected size = " << Height());
1465  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1466  << ", expected size = " << Width());
1467 
1468  PetscParVector *XX = GetX();
1469  XX->PlaceArray(s.GetData());
1470  ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
1471  XX->ResetArray();
1472 }
1473 
1475 {
1476  Mat pA = *A,pP = *P,pRt = *Rt;
1477  Mat B;
1478  PetscBool Aismatis,Pismatis,Rtismatis;
1479 
1480  MFEM_VERIFY(A->Width() == P->Height(),
1481  "Petsc RAP: Number of local cols of A " << A->Width() <<
1482  " differs from number of local rows of P " << P->Height());
1483  MFEM_VERIFY(A->Height() == Rt->Height(),
1484  "Petsc RAP: Number of local rows of A " << A->Height() <<
1485  " differs from number of local rows of Rt " << Rt->Height());
1486  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
1487  PCHKERRQ(pA,ierr);
1488  ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
1489  PCHKERRQ(pA,ierr);
1490  ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
1491  PCHKERRQ(pA,ierr);
1492  if (Aismatis &&
1493  Pismatis &&
1494  Rtismatis) // handle special case (this code will eventually go into PETSc)
1495  {
1496  Mat lA,lP,lB,lRt;
1497  ISLocalToGlobalMapping cl2gP,cl2gRt;
1498  PetscInt rlsize,clsize,rsize,csize;
1499 
1500  ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
1501  ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
1502  ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
1503  ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
1504  ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
1505  ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
1506  ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
1507  ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
1508  ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
1509  ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
1510  ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
1511  ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
1512  ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
1513  if (lRt == lP)
1514  {
1515  ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1516  PCHKERRQ(lA,ierr);
1517  }
1518  else
1519  {
1520  Mat lR;
1521  ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
1522  ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1523  PCHKERRQ(lRt,ierr);
1524  ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
1525  }
1526 
1527  // attach lRt matrix to the subdomain local matrix
1528  // it may be used if markers on vdofs have to be mapped on
1529  // subdomain true dofs
1530  {
1531  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
1532  ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
1533  (*vmatsl2l)[0] = lRt;
1534 
1535  PetscContainer c;
1536  ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
1537  PCHKERRQ(B,ierr);
1538  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1539  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1540  PCHKERRQ(c,ierr);
1541  ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
1542  PCHKERRQ(B,ierr);
1543  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1544  }
1545 
1546  // Set local problem
1547  ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
1548  ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
1549  ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1550  ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1551  }
1552  else // it raises an error if the PtAP is not supported in PETSc
1553  {
1554  if (pP == pRt)
1555  {
1556  ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1557  PCHKERRQ(pA,ierr);
1558  }
1559  else
1560  {
1561  Mat pR;
1562  ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
1563  ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1564  PCHKERRQ(pRt,ierr);
1565  ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
1566  }
1567  }
1568  return new PetscParMatrix(B);
1569 }
1570 
1572 {
1573  PetscParMatrix *out = RAP(P,A,P);
1574  return out;
1575 }
1576 
1578 {
1579  PetscParMatrix *out,*A;
1580 #if defined(PETSC_HAVE_HYPRE)
1582 #else
1583  A = new PetscParMatrix(hA);
1584 #endif
1585  out = RAP(P,A,P);
1586  delete A;
1587  return out;
1588 }
1589 
1590 
1592 {
1593  Mat AB;
1594 
1595  ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
1596  CCHKERRQ(A->GetComm(),ierr);
1597  return new PetscParMatrix(AB);
1598 }
1599 
1601 {
1602  Mat Ae;
1603 
1604  PetscParVector dummy(GetComm(),0);
1605  ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
1606  EliminateRowsCols(rows_cols,dummy,dummy);
1607  ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
1608  return new PetscParMatrix(Ae);
1609 }
1610 
1612  const HypreParVector &X,
1613  HypreParVector &B,
1614  double diag)
1615 {
1616  MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
1617 }
1618 
1620  const PetscParVector &X,
1621  PetscParVector &B,
1622  double diag)
1623 {
1624  PetscInt M,N;
1625  ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
1626  MFEM_VERIFY(M == N,"Rectangular case unsupported");
1627 
1628  // TODO: what if a diagonal term is not present?
1629  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
1630 
1631  // rows need to be in global numbering
1632  PetscInt rst;
1633  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
1634 
1635  IS dir;
1636  ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
1637  if (!X.GlobalSize() && !B.GlobalSize())
1638  {
1639  ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
1640  }
1641  else
1642  {
1643  ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
1644  }
1645  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
1646 }
1647 
1649 {
1650  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
1651 
1652  // rows need to be in global numbering
1653  PetscInt rst;
1654  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
1655 
1656  IS dir;
1657  ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
1658  ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
1659  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
1660 }
1661 
1663 {
1664 
1665  Mat B = A;
1666  if (dereference)
1667  {
1668  MPI_Comm comm = GetComm();
1669  ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
1670  }
1671  A = NULL;
1672  return B;
1673 }
1674 
1676 {
1677  PetscBool ok;
1678  MFEM_VERIFY(A, "no associated PETSc Mat object");
1679  PetscObject oA = (PetscObject)(this->A);
1680  // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
1681  ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
1682  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1683  ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
1684  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1685  ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
1686  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
1687  ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
1688  if (ok == PETSC_TRUE) { return PETSC_MATIS; }
1689  ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
1690  if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
1691  ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
1692  if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
1693 #if defined(PETSC_HAVE_HYPRE)
1694  ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
1695  if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
1696 #endif
1697  return PETSC_MATGENERIC;
1698 }
1699 
1701  const Array<int> &ess_dof_list,
1702  const Vector &X, Vector &B)
1703 {
1704  const PetscScalar *array;
1705  Mat pA = const_cast<PetscParMatrix&>(A);
1706 
1707  // B -= Ae*X
1708  Ae.Mult(-1.0, X, 1.0, B);
1709 
1710  Vec diag = const_cast<PetscParVector&>((*A.GetX()));
1711  ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
1712  ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1713  for (int i = 0; i < ess_dof_list.Size(); i++)
1714  {
1715  int r = ess_dof_list[i];
1716  B(r) = array[r] * X(r);
1717  }
1718  ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1719 }
1720 
1721 // PetscSolver methods
1722 
1723 PetscSolver::PetscSolver() : clcustom(false)
1724 {
1725  obj = NULL;
1726  B = X = NULL;
1727  cid = -1;
1728  operatorset = false;
1729  bchandler = NULL;
1730  private_ctx = NULL;
1731 }
1732 
1734 {
1735  delete B;
1736  delete X;
1738 }
1739 
1740 void PetscSolver::SetTol(double tol)
1741 {
1742  SetRelTol(tol);
1743 }
1744 
1745 void PetscSolver::SetRelTol(double tol)
1746 {
1747  if (cid == KSP_CLASSID)
1748  {
1749  KSP ksp = (KSP)obj;
1750  ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
1751  }
1752  else if (cid == SNES_CLASSID)
1753  {
1754  SNES snes = (SNES)obj;
1755  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
1756  PETSC_DEFAULT);
1757  }
1758  else if (cid == TS_CLASSID)
1759  {
1760  TS ts = (TS)obj;
1761  ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
1762  }
1763  else
1764  {
1765  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1766  }
1767  PCHKERRQ(obj,ierr);
1768 }
1769 
1770 void PetscSolver::SetAbsTol(double tol)
1771 {
1772  if (cid == KSP_CLASSID)
1773  {
1774  KSP ksp = (KSP)obj;
1775  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
1776  }
1777  else if (cid == SNES_CLASSID)
1778  {
1779  SNES snes = (SNES)obj;
1780  ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1781  PETSC_DEFAULT);
1782  }
1783  else if (cid == TS_CLASSID)
1784  {
1785  TS ts = (TS)obj;
1786  ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
1787  }
1788  else
1789  {
1790  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1791  }
1792  PCHKERRQ(obj,ierr);
1793 }
1794 
1795 void PetscSolver::SetMaxIter(int max_iter)
1796 {
1797  if (cid == KSP_CLASSID)
1798  {
1799  KSP ksp = (KSP)obj;
1800  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1801  max_iter);
1802  }
1803  else if (cid == SNES_CLASSID)
1804  {
1805  SNES snes = (SNES)obj;
1806  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1807  max_iter,PETSC_DEFAULT);
1808  }
1809  else if (cid == TS_CLASSID)
1810  {
1811  TS ts = (TS)obj;
1812  ierr = TSSetMaxSteps(ts,max_iter);
1813  }
1814  else
1815  {
1816  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1817  }
1818  PCHKERRQ(obj,ierr);
1819 }
1820 
1821 
1823 {
1824  typedef PetscErrorCode (*myPetscFunc)(void**);
1825  PetscViewerAndFormat *vf = NULL;
1826  PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
1827 
1828  if (plev > 0)
1829  {
1830  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1831  PCHKERRQ(obj,ierr);
1832  }
1833  if (cid == KSP_CLASSID)
1834  {
1835  // there are many other options, see the function KSPSetFromOptions() in
1836  // src/ksp/ksp/interface/itcl.c
1837  typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
1838  KSP ksp = (KSP)obj;
1839  if (plev >= 0)
1840  {
1841  ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
1842  }
1843  if (plev == 1)
1844  {
1845  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
1846  (myPetscFunc)PetscViewerAndFormatDestroy);
1847  PCHKERRQ(ksp,ierr);
1848  }
1849  else if (plev > 1)
1850  {
1851  ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
1852  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
1853  (myPetscFunc)PetscViewerAndFormatDestroy);
1854  PCHKERRQ(ksp,ierr);
1855  if (plev > 2)
1856  {
1857  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1858  PCHKERRQ(viewer,ierr);
1859  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
1860  (myPetscFunc)PetscViewerAndFormatDestroy);
1861  PCHKERRQ(ksp,ierr);
1862  }
1863  }
1864  }
1865  else if (cid == SNES_CLASSID)
1866  {
1867  typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
1868  SNES snes = (SNES)obj;
1869  if (plev >= 0)
1870  {
1871  ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
1872  }
1873  if (plev > 0)
1874  {
1875  ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
1876  (myPetscFunc)PetscViewerAndFormatDestroy);
1877  PCHKERRQ(snes,ierr);
1878  }
1879  }
1880  else if (cid == TS_CLASSID)
1881  {
1882  TS ts = (TS)obj;
1883  if (plev >= 0)
1884  {
1885  ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
1886  }
1887  }
1888  else
1889  {
1890  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1891  }
1892 }
1893 
1894 MPI_Comm PetscSolver::GetComm() const
1895 {
1896  return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
1897 }
1898 
1900 {
1901  __mfem_monitor_ctx *monctx;
1902  ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
1903  monctx->solver = this;
1904  monctx->monitor = ctx;
1905  if (cid == KSP_CLASSID)
1906  {
1907  ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
1908  __mfem_monitor_ctx_destroy);
1909  PCHKERRQ(obj,ierr);
1910  }
1911  else if (cid == SNES_CLASSID)
1912  {
1913  ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
1914  __mfem_monitor_ctx_destroy);
1915  PCHKERRQ(obj,ierr);
1916  }
1917  else if (cid == TS_CLASSID)
1918  {
1919  ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
1920  __mfem_monitor_ctx_destroy);
1921  PCHKERRQ(obj,ierr);
1922  }
1923  else
1924  {
1925  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
1926  }
1927 }
1928 
1930 {
1931  bchandler = bch;
1932  if (cid == SNES_CLASSID)
1933  {
1934  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
1935  snes_ctx->bchandler = bchandler;
1936  }
1937  else if (cid == TS_CLASSID)
1938  {
1939  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
1940  ts_ctx->bchandler = bchandler;
1941  }
1942  else
1943  {
1944  MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
1945  }
1946 }
1947 
1949 {
1950  PC pc = NULL;
1951  if (cid == TS_CLASSID)
1952  {
1953  SNES snes;
1954  KSP ksp;
1955 
1956  ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
1957  ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
1958  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
1959  }
1960  else if (cid == SNES_CLASSID)
1961  {
1962  KSP ksp;
1963 
1964  ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
1965  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
1966  }
1967  else if (cid == KSP_CLASSID)
1968  {
1969  ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
1970  }
1971  else if (cid == PC_CLASSID)
1972  {
1973  pc = (PC)obj;
1974  }
1975  else
1976  {
1977  MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
1978  }
1979  if (factory)
1980  {
1981  ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
1982  }
1983  else
1984  {
1985  ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
1986  }
1987 }
1988 
1989 void PetscSolver::Customize(bool customize) const
1990 {
1991  if (!customize) { clcustom = true; }
1992  if (!clcustom)
1993  {
1994  if (cid == PC_CLASSID)
1995  {
1996  PC pc = (PC)obj;
1997  ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
1998  }
1999  else if (cid == KSP_CLASSID)
2000  {
2001  KSP ksp = (KSP)obj;
2002  ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2003  }
2004  else if (cid == SNES_CLASSID)
2005  {
2006  SNES snes = (SNES)obj;
2007  ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2008  }
2009  else if (cid == TS_CLASSID)
2010  {
2011  TS ts = (TS)obj;
2012  ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2013  }
2014  else
2015  {
2016  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2017  }
2018  }
2019  clcustom = true;
2020 }
2021 
2023 {
2024  if (cid == KSP_CLASSID)
2025  {
2026  KSP ksp = (KSP)obj;
2027  KSPConvergedReason reason;
2028  ierr = KSPGetConvergedReason(ksp,&reason);
2029  PCHKERRQ(ksp,ierr);
2030  return reason > 0 ? 1 : 0;
2031  }
2032  else if (cid == SNES_CLASSID)
2033  {
2034  SNES snes = (SNES)obj;
2035  SNESConvergedReason reason;
2036  ierr = SNESGetConvergedReason(snes,&reason);
2037  PCHKERRQ(snes,ierr);
2038  return reason > 0 ? 1 : 0;
2039  }
2040  else if (cid == TS_CLASSID)
2041  {
2042  TS ts = (TS)obj;
2043  TSConvergedReason reason;
2044  ierr = TSGetConvergedReason(ts,&reason);
2045  PCHKERRQ(ts,ierr);
2046  return reason > 0 ? 1 : 0;
2047  }
2048  else
2049  {
2050  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2051  return -1;
2052  }
2053 }
2054 
2056 {
2057  if (cid == KSP_CLASSID)
2058  {
2059  KSP ksp = (KSP)obj;
2060  PetscInt its;
2061  ierr = KSPGetIterationNumber(ksp,&its);
2062  PCHKERRQ(ksp,ierr);
2063  return its;
2064  }
2065  else if (cid == SNES_CLASSID)
2066  {
2067  SNES snes = (SNES)obj;
2068  PetscInt its;
2069  ierr = SNESGetIterationNumber(snes,&its);
2070  PCHKERRQ(snes,ierr);
2071  return its;
2072  }
2073  else if (cid == TS_CLASSID)
2074  {
2075  TS ts = (TS)obj;
2076  PetscInt its;
2077  ierr = TSGetStepNumber(ts,&its);
2078  PCHKERRQ(ts,ierr);
2079  return its;
2080  }
2081  else
2082  {
2083  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2084  return -1;
2085  }
2086 }
2087 
2089 {
2090  if (cid == KSP_CLASSID)
2091  {
2092  KSP ksp = (KSP)obj;
2093  PetscReal norm;
2094  ierr = KSPGetResidualNorm(ksp,&norm);
2095  PCHKERRQ(ksp,ierr);
2096  return norm;
2097  }
2098  if (cid == SNES_CLASSID)
2099  {
2100  SNES snes = (SNES)obj;
2101  PetscReal norm;
2102  ierr = SNESGetFunctionNorm(snes,&norm);
2103  PCHKERRQ(snes,ierr);
2104  return norm;
2105  }
2106  else
2107  {
2108  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2109  return PETSC_MAX_REAL;
2110  }
2111 }
2112 
2114 {
2116  if (cid == SNES_CLASSID)
2117  {
2118  __mfem_snes_ctx *snes_ctx;
2119  ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2120  snes_ctx->op = NULL;
2121  snes_ctx->bchandler = NULL;
2122  snes_ctx->work = NULL;
2123  snes_ctx->jacType = Operator::PETSC_MATAIJ;
2124  private_ctx = (void*) snes_ctx;
2125  }
2126  else if (cid == TS_CLASSID)
2127  {
2128  __mfem_ts_ctx *ts_ctx;
2129  ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2130  ts_ctx->op = NULL;
2131  ts_ctx->bchandler = NULL;
2132  ts_ctx->work = NULL;
2133  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2134  ts_ctx->cached_ijacstate = -1;
2135  ts_ctx->cached_rhsjacstate = -1;
2136  ts_ctx->cached_splits_xstate = -1;
2137  ts_ctx->cached_splits_xdotstate = -1;
2138  ts_ctx->type = PetscODESolver::ODE_SOLVER_GENERAL;
2139  ts_ctx->jacType = Operator::PETSC_MATAIJ;
2140  private_ctx = (void*) ts_ctx;
2141  }
2142 }
2143 
2145 {
2146  if (!private_ctx) { return; }
2147  // free private context's owned objects
2148  if (cid == SNES_CLASSID)
2149  {
2150  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2151  delete snes_ctx->work;
2152  }
2153  else if (cid == TS_CLASSID)
2154  {
2155  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2156  delete ts_ctx->work;
2157  }
2158  ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2159 }
2160 
2161 // PetscBCHandler methods
2162 
2164  enum PetscBCHandler::Type _type)
2165  : bctype(_type), setup(false), eval_t(0.0),
2166  eval_t_cached(std::numeric_limits<double>::min())
2167 {
2168  SetTDofs(ess_tdof_list);
2169 }
2170 
2172 {
2173  ess_tdof_list.SetSize(list.Size());
2174  ess_tdof_list.Assign(list);
2175  setup = false;
2176 }
2177 
2179 {
2180  if (setup) { return; }
2181  if (bctype == CONSTANT)
2182  {
2183  eval_g.SetSize(n);
2184  this->Eval(eval_t,eval_g);
2185  eval_t_cached = eval_t;
2186  }
2187  else if (bctype == TIME_DEPENDENT)
2188  {
2189  eval_g.SetSize(n);
2190  }
2191  setup = true;
2192 }
2193 
2195 {
2196  (*this).SetUp(x.Size());
2197  y = x;
2198  if (bctype == ZERO)
2199  {
2200  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2201  {
2202  y[ess_tdof_list[i]] = 0.0;
2203  }
2204  }
2205  else
2206  {
2207  if (bctype != CONSTANT && eval_t != eval_t_cached)
2208  {
2209  Eval(eval_t,eval_g);
2210  eval_t_cached = eval_t;
2211  }
2212  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2213  {
2214  y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2215  }
2216  }
2217 }
2218 
2220 {
2221  (*this).SetUp(x.Size());
2222  if (bctype == ZERO)
2223  {
2224  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2225  {
2226  x[ess_tdof_list[i]] = 0.0;
2227  }
2228  }
2229  else
2230  {
2231  if (bctype != CONSTANT && eval_t != eval_t_cached)
2232  {
2233  Eval(eval_t,eval_g);
2234  eval_t_cached = eval_t;
2235  }
2236  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2237  {
2238  x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2239  }
2240  }
2241 }
2242 
2244 {
2245  (*this).SetUp(x.Size());
2246  if (bctype == ZERO)
2247  {
2248  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2249  {
2250  y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2251  }
2252  }
2253  else
2254  {
2255  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2256  {
2257  y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2258  }
2259  }
2260 }
2261 
2262 // PetscLinearSolver methods
2263 
2264 PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2265  bool wrapin)
2266  : PetscSolver(), Solver(), wrap(wrapin)
2267 {
2268  KSP ksp;
2269  ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2270  obj = (PetscObject)ksp;
2271  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2272  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2273 }
2274 
2276  const std::string &prefix)
2277  : PetscSolver(), Solver(), wrap(false)
2278 {
2279  KSP ksp;
2280  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2281  obj = (PetscObject)ksp;
2282  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2283  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2284  SetOperator(A);
2285 }
2286 
2288  const std::string &prefix)
2289  : PetscSolver(), Solver(), wrap(wrapin)
2290 {
2291  KSP ksp;
2292  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2293  obj = (PetscObject)ksp;
2294  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2295  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2296  SetOperator(A);
2297 }
2298 
2300 {
2301  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2302  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2303  (dynamic_cast<const PetscParMatrix *>(&op));
2304  const Operator *oA = dynamic_cast<const Operator *>(&op);
2305 
2306  // Preserve Pmat if already set
2307  KSP ksp = (KSP)obj;
2308  Mat P = NULL;
2309  PetscBool pmat;
2310  ierr = KSPGetOperatorsSet(ksp,NULL,&pmat); PCHKERRQ(ksp,ierr);
2311  if (pmat)
2312  {
2313  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
2314  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
2315  }
2316 
2317  // update base classes: Operator, Solver, PetscLinearSolver
2318  bool delete_pA = false;
2319  if (!pA)
2320  {
2321  if (hA)
2322  {
2323  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2324  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2325  delete_pA = true;
2326  }
2327  else if (oA) // fallback to general operator
2328  {
2329  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2330  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2331  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2332  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2333  delete_pA = true;
2334  }
2335  }
2336  MFEM_VERIFY(pA, "Unsupported operation!");
2337 
2338  // Set operators into PETSc KSP
2339  Mat A = pA->A;
2340  if (operatorset)
2341  {
2342  Mat C;
2343  PetscInt nheight,nwidth,oheight,owidth;
2344 
2345  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2346  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2347  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2348  if (nheight != oheight || nwidth != owidth)
2349  {
2350  // reinit without destroying the KSP
2351  // communicator remains the same
2352  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2353  delete X;
2354  delete B;
2355  X = B = NULL;
2356  wrap = false;
2357  }
2358  }
2359  if (P)
2360  {
2361  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
2362  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
2363  }
2364  else
2365  {
2366  ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2367  }
2368 
2369  // Update PetscSolver
2370  operatorset = true;
2371 
2372  // Update the Operator fields.
2373  height = pA->Height();
2374  width = pA->Width();
2375 
2376  if (delete_pA) { delete pA; }
2377 }
2378 
2380 {
2381  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2382  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2383  (dynamic_cast<const PetscParMatrix *>(&op));
2384  const Operator *oA = dynamic_cast<const Operator *>(&op);
2385 
2386  PetscParMatrix *ppA = const_cast<PetscParMatrix *>
2387  (dynamic_cast<const PetscParMatrix *>(&pop));
2388  const Operator *poA = dynamic_cast<const Operator *>(&pop);
2389 
2390  // Convert Operator for linear system
2391  bool delete_pA = false;
2392  if (!pA)
2393  {
2394  if (hA)
2395  {
2396  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2397  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2398  delete_pA = true;
2399  }
2400  else if (oA) // fallback to general operator
2401  {
2402  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2403  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2404  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2405  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2406  delete_pA = true;
2407  }
2408  }
2409  MFEM_VERIFY(pA, "Unsupported operation!");
2410 
2411  // Convert Operator to be preconditioned
2412  bool delete_ppA = false;
2413  if (!ppA)
2414  {
2415  if (oA == poA && !wrap) // Same operator, already converted
2416  {
2417  ppA = pA;
2418  }
2419  else
2420  {
2421  ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
2422  delete_ppA = true;
2423  }
2424  }
2425  MFEM_VERIFY(ppA, "Unsupported operation!");
2426 
2427  // Set operators into PETSc KSP
2428  KSP ksp = (KSP)obj;
2429  Mat A = pA->A;
2430  Mat P = ppA->A;
2431  if (operatorset)
2432  {
2433  Mat C;
2434  PetscInt nheight,nwidth,oheight,owidth;
2435 
2436  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2437  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2438  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2439  if (nheight != oheight || nwidth != owidth)
2440  {
2441  // reinit without destroying the KSP
2442  // communicator remains the same
2443  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2444  delete X;
2445  delete B;
2446  X = B = NULL;
2447  wrap = false;
2448  }
2449  }
2450  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
2451 
2452  // Update PetscSolver
2453  operatorset = true;
2454 
2455  // Update the Operator fields.
2456  height = pA->Height();
2457  width = pA->Width();
2458 
2459  if (delete_pA) { delete pA; }
2460  if (delete_ppA) { delete ppA; }
2461 }
2462 
2464 {
2465  KSP ksp = (KSP)obj;
2466 
2467  // Preserve Amat if already set
2468  Mat A = NULL;
2469  PetscBool amat;
2470  ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
2471  if (amat)
2472  {
2473  ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
2474  ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
2475  }
2476  PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
2477  if (ppc)
2478  {
2479  ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
2480  }
2481  else
2482  {
2483  // wrap the Solver action
2484  // Solver is assumed to be already setup
2485  // ownership of precond is not tranferred,
2486  // consistently with other MFEM's linear solvers
2487  PC pc;
2488  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
2489  ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
2490  }
2491  if (A)
2492  {
2493  Mat P;
2494 
2495  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
2496  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
2497  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
2498  ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
2499  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
2500  }
2501 }
2502 
2503 void PetscLinearSolver::Mult(const Vector &b, Vector &x) const
2504 {
2505  KSP ksp = (KSP)obj;
2506 
2507  if (!B || !X)
2508  {
2509  Mat pA = NULL;
2510  ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
2511  if (!B)
2512  {
2513  PetscParMatrix A = PetscParMatrix(pA, true);
2514  B = new PetscParVector(A, true, false);
2515  }
2516  if (!X)
2517  {
2518  PetscParMatrix A = PetscParMatrix(pA, true);
2519  X = new PetscParVector(A, false, false);
2520  }
2521  }
2522  B->PlaceArray(b.GetData());
2523  X->PlaceArray(x.GetData());
2524 
2525  Customize();
2526 
2527  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2528  PCHKERRQ(ksp, ierr);
2529 
2530  // Solve the system.
2531  ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
2532  B->ResetArray();
2533  X->ResetArray();
2534 }
2535 
2537 {
2538  MPI_Comm comm;
2539  KSP ksp = (KSP)obj;
2540  ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
2541  ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
2542 }
2543 
2544 // PetscPCGSolver methods
2545 
2546 PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix)
2547  : PetscLinearSolver(comm,prefix)
2548 {
2549  KSP ksp = (KSP)obj;
2550  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2551  // this is to obtain a textbook PCG
2552  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2553 }
2554 
2555 PetscPCGSolver::PetscPCGSolver(PetscParMatrix& A, const std::string &prefix)
2556  : PetscLinearSolver(A,prefix)
2557 {
2558  KSP ksp = (KSP)obj;
2559  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2560  // this is to obtain a textbook PCG
2561  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2562 }
2563 
2565  const std::string &prefix)
2566  : PetscLinearSolver(A,wrap,prefix)
2567 {
2568  KSP ksp = (KSP)obj;
2569  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
2570  // this is to obtain a textbook PCG
2571  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
2572 }
2573 
2574 // PetscPreconditioner methods
2575 
2577  const std::string &prefix)
2578  : PetscSolver(), Solver()
2579 {
2580  PC pc;
2581  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
2582  obj = (PetscObject)pc;
2583  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2584  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2585 }
2586 
2588  const string &prefix)
2589  : PetscSolver(), Solver()
2590 {
2591  PC pc;
2592  ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
2593  obj = (PetscObject)pc;
2594  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2595  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2596  SetOperator(A);
2597 }
2598 
2600  const string &prefix)
2601  : PetscSolver(), Solver()
2602 {
2603  PC pc;
2604  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
2605  obj = (PetscObject)pc;
2606  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2607  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
2608  SetOperator(op);
2609 }
2610 
2612 {
2613  bool delete_pA = false;
2614  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2615  (dynamic_cast<const PetscParMatrix *>(&op));
2616 
2617  if (!pA)
2618  {
2619  const Operator *cop = dynamic_cast<const Operator *>(&op);
2620  pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
2621  delete_pA = true;
2622  }
2623 
2624  // Set operators into PETSc PC
2625  PC pc = (PC)obj;
2626  Mat A = pA->A;
2627  if (operatorset)
2628  {
2629  Mat C;
2630  PetscInt nheight,nwidth,oheight,owidth;
2631 
2632  ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
2633  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2634  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2635  if (nheight != oheight || nwidth != owidth)
2636  {
2637  // reinit without destroying the PC
2638  // communicator remains the same
2639  ierr = PCReset(pc); PCHKERRQ(pc,ierr);
2640  delete X;
2641  delete B;
2642  X = B = NULL;
2643  }
2644  }
2645  ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
2646 
2647  // Update PetscSolver
2648  operatorset = true;
2649 
2650  // Update the Operator fields.
2651  height = pA->Height();
2652  width = pA->Width();
2653 
2654  if (delete_pA) { delete pA; };
2655 }
2656 
2657 void PetscPreconditioner::Mult(const Vector &b, Vector &x) const
2658 {
2659  PC pc = (PC)obj;
2660 
2661  if (!B || !X)
2662  {
2663  Mat pA = NULL;
2664  ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
2665  if (!B)
2666  {
2667  PetscParMatrix A(pA, true);
2668  B = new PetscParVector(A, true, false);
2669  }
2670  if (!X)
2671  {
2672  PetscParMatrix A(pA, true);
2673  X = new PetscParVector(A, false, false);
2674  }
2675  }
2676  B->PlaceArray(b.GetData());
2677  X->PlaceArray(x.GetData());
2678 
2679  Customize();
2680 
2681  // Apply the preconditioner.
2682  ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
2683  B->ResetArray();
2684  X->ResetArray();
2685 }
2686 
2688 {
2689  MPI_Comm comm;
2690  PC pc = (PC)obj;
2691  ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
2692  ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
2693 }
2694 
2695 // PetscBDDCSolver methods
2696 
2697 // Coordinates sampling function
2698 static void func_coords(const Vector &x, Vector &y)
2699 {
2700  for (int i = 0; i < x.Size(); i++) { y(i) = x(i); }
2701 }
2702 
2703 void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
2704 {
2705  MPI_Comm comm = PetscObjectComm(obj);
2706 
2707  // get PETSc object
2708  PC pc = (PC)obj;
2709  Mat pA;
2710  ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
2711 
2712  // matrix type should be of type MATIS
2713  PetscBool ismatis;
2714  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
2715  PCHKERRQ(pA,ierr);
2716  MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
2717 
2718  // Check options
2719  ParFiniteElementSpace *fespace = opts.fespace;
2720  if (opts.netflux && !fespace)
2721  {
2722  MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
2723  }
2724 
2725  // Attach default near-null space to local matrices
2726  {
2727  MatNullSpace nnsp;
2728  Mat lA;
2729  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
2730  ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
2731  &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2732  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2733  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2734  }
2735 
2736  // set PETSc PC type to PCBDDC
2737  ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
2738 
2739  PetscInt rst,nl;
2740  ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
2741  ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
2742 
2743  // index sets for fields splitting and coordinates for nodal spaces
2744  IS *fields = NULL;
2745  PetscInt nf = 0;
2746  PetscInt sdim = 0;
2747  PetscReal *coords = NULL;
2748  if (fespace)
2749  {
2750  int vdim = fespace->GetVDim();
2751 
2752  // Ideally, the block size should be set at matrix creation
2753  // but the MFEM assembly does not allow to do so
2754  if (fespace->GetOrdering() == Ordering::byVDIM)
2755  {
2756  Mat lA;
2757  ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
2758  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
2759  ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
2760  }
2761 
2762  // fields
2763  if (vdim > 1)
2764  {
2765  PetscInt st = rst, bs, inc, nlf;
2766  nf = vdim;
2767  nlf = nl/nf;
2768  ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
2769  if (fespace->GetOrdering() == Ordering::byVDIM)
2770  {
2771  inc = 1;
2772  bs = vdim;
2773  }
2774  else
2775  {
2776  inc = nlf;
2777  bs = 1;
2778  }
2779  for (PetscInt i = 0; i < nf; i++)
2780  {
2781  ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
2782  st += inc;
2783  }
2784  }
2785 
2786  // coordinates
2787  const FiniteElementCollection *fec = fespace->FEColl();
2788  bool h1space = dynamic_cast<const H1_FECollection*>(fec);
2789  if (h1space)
2790  {
2791  ParFiniteElementSpace *fespace_coords = fespace;
2792 
2793  sdim = fespace->GetParMesh()->SpaceDimension();
2794  if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
2795  {
2796  fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
2798  }
2799  VectorFunctionCoefficient coeff_coords(sdim, func_coords);
2800  ParGridFunction gf_coords(fespace_coords);
2801  gf_coords.ProjectCoefficient(coeff_coords);
2802  HypreParVector *hvec_coords = gf_coords.ParallelProject();
2803 
2804  // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
2805  if (vdim == sdim)
2806  {
2807  MatNullSpace nnsp;
2808  Mat lA;
2809  Vec pvec_coords,lvec_coords;
2810  ISLocalToGlobalMapping l2g;
2811  PetscSF sf;
2812  PetscLayout rmap;
2813  const PetscInt *gidxs;
2814  PetscInt nleaves;
2815 
2816  ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
2817  hvec_coords->GlobalSize(),hvec_coords->GetData(),&pvec_coords);
2818  CCHKERRQ(comm,ierr);
2819  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
2820  ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
2821  ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
2822  ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
2823  ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
2824  ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
2825  ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
2826  ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
2827  ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
2828  CCHKERRQ(comm,ierr);
2829  ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
2830  {
2831  const PetscScalar *garray;
2832  PetscScalar *larray;
2833 
2834  ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2835  ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2836  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
2837  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
2838  ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2839  ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
2840  }
2841  ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
2842  ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
2843  CCHKERRQ(PETSC_COMM_SELF,ierr);
2844  ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
2845  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2846  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
2847  ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
2848  }
2849 
2850  // each single dof has associated a tuple of coordinates
2851  ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
2852  if (nf > 0)
2853  {
2854  PetscScalar *data = hvec_coords->GetData();
2855  for (PetscInt i = 0; i < nf; i++)
2856  {
2857  const PetscInt *idxs;
2858  PetscInt nn;
2859 
2860  // It also handles the case of fespace not ordered by VDIM
2861  ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
2862  ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
2863  for (PetscInt j = 0; j < nn; j++)
2864  {
2865  PetscInt idx = idxs[j]-rst;
2866  for (PetscInt d = 0; d < sdim; d++)
2867  {
2868  coords[sdim*idx+d] = data[sdim*j+d];
2869  }
2870  }
2871  ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
2872  }
2873  }
2874  else
2875  {
2876  ierr = PetscMemcpy(coords,hvec_coords->GetData(),nl*sdim*sizeof(PetscReal));
2877  CCHKERRQ(comm,ierr);
2878  }
2879  if (fespace_coords != fespace)
2880  {
2881  delete fespace_coords;
2882  }
2883  delete hvec_coords;
2884  }
2885  }
2886 
2887  // index sets for boundary dofs specification (Essential = dir, Natural = neu)
2888  IS dir = NULL, neu = NULL;
2889 
2890  // Extract l2l matrices
2891  Array<Mat> *l2l = NULL;
2892  if (opts.ess_dof_local || opts.nat_dof_local)
2893  {
2894  PetscContainer c;
2895 
2896  ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
2897  MFEM_VERIFY(c,"Local-to-local PETSc container not present");
2898  ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
2899  }
2900 
2901  // check information about index sets (essential dofs, fields, etc.)
2902 #ifdef MFEM_DEBUG
2903  {
2904  // make sure ess/nat_dof have been collectively set
2905  PetscBool lpr = PETSC_FALSE,pr;
2906  if (opts.ess_dof) { lpr = PETSC_TRUE; }
2907  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
2908  PCHKERRQ(pA,ierr);
2909  MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
2910  lpr = PETSC_FALSE;
2911  if (opts.nat_dof) { lpr = PETSC_TRUE; }
2912  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
2913  PCHKERRQ(pA,ierr);
2914  MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
2915  // make sure fields have been collectively set
2916  PetscInt ms[2],Ms[2];
2917  ms[0] = -nf; ms[1] = nf;
2918  ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
2919  PCHKERRQ(pA,ierr);
2920  MFEM_VERIFY(-Ms[0] == Ms[1],
2921  "number of fields should be the same across processes");
2922  }
2923 #endif
2924 
2925  // boundary sets
2926  if (opts.ess_dof)
2927  {
2928  PetscInt st = opts.ess_dof_local ? 0 : rst;
2929  if (!opts.ess_dof_local)
2930  {
2931  // need to compute the boundary dofs in global ordering
2932  ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
2933  CCHKERRQ(comm,ierr);
2934  ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
2935  }
2936  else
2937  {
2938  // need to compute a list for the marked boundary dofs in local ordering
2939  ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
2940  CCHKERRQ(comm,ierr);
2941  ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
2942  }
2943  }
2944  if (opts.nat_dof)
2945  {
2946  PetscInt st = opts.nat_dof_local ? 0 : rst;
2947  if (!opts.nat_dof_local)
2948  {
2949  // need to compute the boundary dofs in global ordering
2950  ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
2951  CCHKERRQ(comm,ierr);
2952  ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
2953  }
2954  else
2955  {
2956  // need to compute a list for the marked boundary dofs in local ordering
2957  ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
2958  CCHKERRQ(comm,ierr);
2959  ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
2960  }
2961  }
2962 
2963  // field splitting
2964  if (fields)
2965  {
2966  ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
2967  }
2968  for (PetscInt i = 0; i < nf; i++)
2969  {
2970  ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
2971  }
2972  ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
2973 
2974  // coordinates
2975  if (coords)
2976  {
2977  ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
2978  }
2979  ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
2980 
2981  // code for block size is disabled since we cannot change the matrix
2982  // block size after it has been setup
2983  // int bs = 1;
2984 
2985  // Customize using the finite element space (if any)
2986  if (fespace)
2987  {
2988  const FiniteElementCollection *fec = fespace->FEColl();
2989  bool edgespace, rtspace, h1space;
2990  bool needint = opts.netflux;
2991  bool tracespace, rt_tracespace, edge_tracespace;
2992  int vdim, dim, p;
2993  PetscBool B_is_Trans = PETSC_FALSE;
2994 
2995  ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
2996  dim = pmesh->Dimension();
2997  vdim = fespace->GetVDim();
2998  h1space = dynamic_cast<const H1_FECollection*>(fec);
2999  rtspace = dynamic_cast<const RT_FECollection*>(fec);
3000  edgespace = dynamic_cast<const ND_FECollection*>(fec);
3001  edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3002  rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3003  tracespace = edge_tracespace || rt_tracespace;
3004 
3005  p = 1;
3006  if (fespace->GetNE() > 0)
3007  {
3008  if (!tracespace)
3009  {
3010  p = fespace->GetOrder(0);
3011  }
3012  else
3013  {
3014  p = fespace->GetFaceOrder(0);
3015  if (dim == 2) { p++; }
3016  }
3017  }
3018 
3019  if (edgespace) // H(curl)
3020  {
3021  if (dim == 2)
3022  {
3023  needint = true;
3024  if (tracespace)
3025  {
3026  MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3027  " not using auxiliary quadrature");
3028  needint = false;
3029  }
3030  }
3031  else
3032  {
3033  FiniteElementCollection *vfec;
3034  if (tracespace)
3035  {
3036  vfec = new H1_Trace_FECollection(p,dim);
3037  }
3038  else
3039  {
3040  vfec = new H1_FECollection(p,dim);
3041  }
3042  ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3043  ParDiscreteLinearOperator *grad;
3044  grad = new ParDiscreteLinearOperator(vfespace,fespace);
3045  if (tracespace)
3046  {
3047  grad->AddTraceFaceInterpolator(new GradientInterpolator);
3048  }
3049  else
3050  {
3051  grad->AddDomainInterpolator(new GradientInterpolator);
3052  }
3053  grad->Assemble();
3054  grad->Finalize();
3055  HypreParMatrix *hG = grad->ParallelAssemble();
3056  PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3057  delete hG;
3058  delete grad;
3059 
3060  PetscBool conforming = PETSC_TRUE;
3061  if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3062  ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3063  PCHKERRQ(pc,ierr);
3064  delete vfec;
3065  delete vfespace;
3066  delete G;
3067  }
3068  }
3069  else if (rtspace) // H(div)
3070  {
3071  needint = true;
3072  if (tracespace)
3073  {
3074  MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3075  " auxiliary quadrature");
3076  needint = false;
3077  }
3078  }
3079  else if (h1space) // H(grad), only for the vector case
3080  {
3081  if (vdim != dim) { needint = false; }
3082  }
3083 
3084  PetscParMatrix *B = NULL;
3085  if (needint)
3086  {
3087  // Generate bilinear form in unassembled format which is used to
3088  // compute the net-flux across subdomain boundaries for H(div) and
3089  // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3090  FiniteElementCollection *auxcoll;
3091  if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3092  else
3093  {
3094  if (h1space)
3095  {
3096  auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3097  }
3098  else
3099  {
3100  auxcoll = new L2_FECollection(p,dim);
3101  }
3102  }
3103  ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3104  ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3105 
3106  if (edgespace)
3107  {
3108  if (tracespace)
3109  {
3110  b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3111  }
3112  else
3113  {
3114  b->AddDomainIntegrator(new VectorFECurlIntegrator);
3115  }
3116  }
3117  else if (rtspace)
3118  {
3119  if (tracespace)
3120  {
3121  b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3122  }
3123  else
3124  {
3125  b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3126  }
3127  }
3128  else
3129  {
3130  b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3131  }
3132  b->Assemble();
3133  b->Finalize();
3134  OperatorHandle Bh(Operator::PETSC_MATIS);
3135  b->ParallelAssemble(Bh);
3136  Bh.Get(B);
3137  Bh.SetOperatorOwner(false);
3138 
3139  if (dir) // if essential dofs are present, we need to zero the columns
3140  {
3141  Mat pB = *B;
3142  ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3143  if (!opts.ess_dof_local)
3144  {
3145  ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3146  }
3147  else
3148  {
3149  ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3150  }
3151  B_is_Trans = PETSC_TRUE;
3152  }
3153  delete b;
3154  delete pspace;
3155  delete auxcoll;
3156  }
3157 
3158  if (B)
3159  {
3160  ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3161  }
3162  delete B;
3163  }
3164  ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3165  ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3166 }
3167 
3169  const PetscBDDCSolverParams &opts,
3170  const std::string &prefix)
3171  : PetscPreconditioner(A,prefix)
3172 {
3173  BDDCSolverConstructor(opts);
3174  Customize();
3175 }
3176 
3178  const PetscBDDCSolverParams &opts,
3179  const std::string &prefix)
3180  : PetscPreconditioner(comm,op,prefix)
3181 {
3182  BDDCSolverConstructor(opts);
3183  Customize();
3184 }
3185 
3187  const string &prefix)
3188  : PetscPreconditioner(comm,op,prefix)
3189 {
3190  PC pc = (PC)obj;
3191 
3192  Mat pA;
3193  ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3194 
3195  // Check if pA is of type MATNEST
3196  // (this requirement can be removed when we can pass fields).
3197  PetscBool isnest;
3198  ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3199  PCHKERRQ(pA,ierr);
3200  MFEM_VERIFY(isnest,
3201  "PetscFieldSplitSolver needs the matrix in nested format.");
3202 
3203  PetscInt nr;
3204  IS *isrow;
3205  ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3206  ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3207  ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3208  ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3209 
3210  // We need to customize here, before setting the index sets.
3211  Customize();
3212 
3213  for (PetscInt i=0; i<nr; i++)
3214  {
3215  ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3216  }
3217  ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3218 }
3219 
3220 // PetscNonlinearSolver methods
3221 
3223  const std::string &prefix)
3224  : PetscSolver(), Solver()
3225 {
3226  // Create the actual solver object
3227  SNES snes;
3228  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3229  obj = (PetscObject)snes;
3230  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3231  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3232 
3233  // Allocate private solver context
3235 }
3236 
3238  const std::string &prefix)
3239  : PetscSolver(), Solver()
3240 {
3241  // Create the actual solver object
3242  SNES snes;
3243  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3244  obj = (PetscObject)snes;
3245  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3246  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3247 
3248  // Allocate private solver context
3250 
3251  SetOperator(op);
3252 }
3253 
3255 {
3256  MPI_Comm comm;
3257  SNES snes = (SNES)obj;
3258  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
3259  ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3260 }
3261 
3263 {
3264  SNES snes = (SNES)obj;
3265 
3266  if (operatorset)
3267  {
3268  PetscBool ls,gs;
3269  void *fctx,*jctx;
3270 
3271  ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3272  PCHKERRQ(snes, ierr);
3273  ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3274  PCHKERRQ(snes, ierr);
3275 
3276  ls = (PetscBool)(height == op.Height() && width == op.Width() &&
3277  (void*)&op == fctx &&
3278  (void*)&op == jctx);
3279  ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3280  PetscObjectComm((PetscObject)snes));
3281  PCHKERRQ(snes,ierr);
3282  if (!gs)
3283  {
3284  ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3285  delete X;
3286  delete B;
3287  X = B = NULL;
3288  }
3289  }
3290  else
3291  {
3292  /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
3293  yet set. We default to backtracking */
3294  SNESLineSearch ls;
3295  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3296  ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3297  }
3298 
3299  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3300  snes_ctx->op = (mfem::Operator*)&op;
3301  ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
3302  PCHKERRQ(snes, ierr);
3303  ierr = SNESSetJacobian(snes, NULL, NULL, __mfem_snes_jacobian,
3304  (void *)snes_ctx);
3305  PCHKERRQ(snes, ierr);
3306 
3307  // Update PetscSolver
3308  operatorset = true;
3309 
3310  // Update the Operator fields.
3311  height = op.Height();
3312  width = op.Width();
3313 }
3314 
3316 {
3317  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3318  snes_ctx->jacType = jacType;
3319 }
3320 
3322  double*))
3323 {
3324  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3325  snes_ctx->objective = objfn;
3326 
3327  SNES snes = (SNES)obj;
3328  ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
3329  PCHKERRQ(snes, ierr);
3330 }
3331 
3333  Vector&, Vector&,
3334  bool&, bool&))
3335 {
3336  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3337  snes_ctx->postcheck = post;
3338 
3339  SNES snes = (SNES)obj;
3340  SNESLineSearch ls;
3341  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3342  ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
3343  PCHKERRQ(ls, ierr);
3344 }
3345 
3346 void PetscNonlinearSolver::SetUpdate(void (*update)(Operator *,int,
3347  const mfem::Vector&,
3348  const mfem::Vector&,
3349  const mfem::Vector&,
3350  const mfem::Vector&))
3351 {
3352  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3353  snes_ctx->update = update;
3354 
3355  SNES snes = (SNES)obj;
3356  ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
3357 }
3358 
3359 void PetscNonlinearSolver::Mult(const Vector &b, Vector &x) const
3360 {
3361  SNES snes = (SNES)obj;
3362 
3363  bool b_nonempty = b.Size();
3364  if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
3365  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
3366  X->PlaceArray(x.GetData());
3367  if (b_nonempty) { B->PlaceArray(b.GetData()); }
3368  else { *B = 0.0; }
3369 
3370  Customize();
3371 
3372  if (!iterative_mode) { *X = 0.; }
3373 
3374  // Solve the system.
3375  ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
3376  X->ResetArray();
3377  if (b_nonempty) { B->ResetArray(); }
3378 }
3379 
3380 // PetscODESolver methods
3381 
3382 PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
3383  : PetscSolver(), ODESolver()
3384 {
3385  // Create the actual solver object
3386  TS ts;
3387  ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
3388  obj = (PetscObject)ts;
3389  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3390  ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
3391 
3392  // Allocate private solver context
3394 
3395  // Default options, to comply with the current interface to ODESolver.
3396  ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
3397  PCHKERRQ(ts,ierr);
3398  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
3399  PCHKERRQ(ts,ierr);
3400  TSAdapt tsad;
3401  ierr = TSGetAdapt(ts,&tsad);
3402  PCHKERRQ(ts,ierr);
3403  ierr = TSAdaptSetType(tsad,TSADAPTNONE);
3404  PCHKERRQ(ts,ierr);
3405 }
3406 
3408 {
3409  MPI_Comm comm;
3410  TS ts = (TS)obj;
3411  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
3412  ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
3413 }
3414 
3416  enum PetscODESolver::Type type)
3417 {
3418  TS ts = (TS)obj;
3419 
3420  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3421  if (operatorset)
3422  {
3423  ierr = TSReset(ts); PCHKERRQ(ts,ierr);
3424  delete X;
3425  X = NULL;
3426  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
3427  ts_ctx->cached_ijacstate = -1;
3428  ts_ctx->cached_rhsjacstate = -1;
3429  ts_ctx->cached_splits_xstate = -1;
3430  ts_ctx->cached_splits_xdotstate = -1;
3431  }
3432  f = &f_;
3433 
3434  // Set functions in TS
3435  ts_ctx->op = &f_;
3436  if (f_.isImplicit())
3437  {
3438  ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
3439  PCHKERRQ(ts, ierr);
3440  ierr = TSSetIJacobian(ts, NULL, NULL, __mfem_ts_ijacobian, (void *)ts_ctx);
3441  PCHKERRQ(ts, ierr);
3442  ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
3443  PCHKERRQ(ts, ierr);
3444  }
3445  if (!f_.isHomogeneous())
3446  {
3447  if (!f_.isImplicit())
3448  {
3449  ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
3450  PCHKERRQ(ts, ierr);
3451  }
3452  ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
3453  PCHKERRQ(ts, ierr);
3454  ierr = TSSetRHSJacobian(ts, NULL, NULL, __mfem_ts_rhsjacobian, (void *)ts_ctx);
3455  PCHKERRQ(ts, ierr);
3456  }
3457  operatorset = true;
3458 
3459  SetType(type);
3460 
3461  // Set solution vector
3462  PetscParVector X(PetscObjectComm(obj),*f,false,true);
3463  ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
3464 
3465  // Compose special purpose function for PDE-constrained optimization
3466  PetscBool use = PETSC_TRUE;
3467  ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
3468  if (use && f_.isImplicit())
3469  {
3470  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
3471  __mfem_ts_computesplits);
3472  PCHKERRQ(ts,ierr);
3473  }
3474  else
3475  {
3476  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
3477  NULL);
3478  PCHKERRQ(ts,ierr);
3479  }
3480 }
3481 
3483 {
3484  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3485  ts_ctx->jacType = jacType;
3486 }
3487 
3489 {
3490  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3491  return ts_ctx->type;
3492 }
3493 
3495 {
3496  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3497 
3498  TS ts = (TS)obj;
3499  ts_ctx->type = type;
3500  if (type == ODE_SOLVER_LINEAR)
3501  {
3502  ierr = TSSetProblemType(ts, TS_LINEAR);
3503  PCHKERRQ(ts, ierr);
3504  }
3505  else
3506  {
3507  ierr = TSSetProblemType(ts, TS_NONLINEAR);
3508  PCHKERRQ(ts, ierr);
3509  }
3510 }
3511 
3512 void PetscODESolver::Step(Vector &x, double &t, double &dt)
3513 {
3514  // Pass the parameters to PETSc.
3515  TS ts = (TS)obj;
3516  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
3517  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
3518 
3519  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
3520  X->PlaceArray(x.GetData());
3521 
3522  Customize();
3523 
3524  // Take the step.
3525  ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
3526  ierr = TSStep(ts); PCHKERRQ(ts, ierr);
3527  X->ResetArray();
3528 
3529  // Get back current time and the time step used to caller.
3530  // We cannot use TSGetTimeStep() as it returns the next candidate step
3531  PetscReal pt;
3532  ierr = TSGetTime(ts,&pt); PCHKERRQ(ts,ierr);
3533  dt = pt - (PetscReal)t;
3534  t = pt;
3535 }
3536 
3537 void PetscODESolver::Run(Vector &x, double &t, double &dt, double t_final)
3538 {
3539  // Give the parameters to PETSc.
3540  TS ts = (TS)obj;
3541  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
3542  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
3543  ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
3544  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
3545  PCHKERRQ(ts, ierr);
3546 
3547  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
3548  X->PlaceArray(x.GetData());
3549 
3550  Customize();
3551 
3552  // Reset Jacobian caching since the user may have changed
3553  // the parameters of the solver
3554  // We don't do this in the Step method because two consecutive
3555  // Step() calls are done with the same operator
3556  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
3557  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
3558  ts_ctx->cached_ijacstate = -1;
3559  ts_ctx->cached_rhsjacstate = -1;
3560  ts_ctx->cached_splits_xstate = -1;
3561  ts_ctx->cached_splits_xdotstate = -1;
3562 
3563  // Take the steps.
3564  ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
3565  X->ResetArray();
3566 
3567  // Get back final time and time step to caller.
3568  PetscReal pt;
3569  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
3570  t = pt;
3571  ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
3572  dt = pt;
3573 }
3574 
3575 } // namespace mfem
3576 
3577 #include "petsc/private/petscimpl.h"
3578 
3579 // auxiliary functions
3580 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
3581  void* ctx)
3582 {
3583  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
3584 
3585  PetscFunctionBeginUser;
3586  if (!monctx)
3587  {
3588  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
3589  }
3590  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
3592  monctx->monitor);
3593 
3594  if (user_monitor->mon_sol)
3595  {
3596  mfem::PetscParVector V(x,true);
3597  user_monitor->MonitorSolution(it,t,V);
3598  }
3599  user_monitor->MonitorSolver(solver);
3600  PetscFunctionReturn(0);
3601 }
3602 
3603 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
3604  Vec f,void *ctx)
3605 {
3606  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3607  PetscErrorCode ierr;
3608 
3609  PetscFunctionBeginUser;
3610  mfem::PetscParVector xx(x,true);
3611  mfem::PetscParVector yy(xp,true);
3612  mfem::PetscParVector ff(f,true);
3613 
3614  mfem::TimeDependentOperator *op = ts_ctx->op;
3615  op->SetTime(t);
3616 
3617  if (ts_ctx->bchandler)
3618  {
3619  // we evaluate the ImplicitMult method with the correct bc
3620  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
3621  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3622  mfem::Vector* txx = ts_ctx->work;
3623  bchandler->SetTime(t);
3624  bchandler->ApplyBC(xx,*txx);
3625  op->ImplicitMult(*txx,yy,ff);
3626  // and fix the residual (i.e. f_\partial\Omega = u - g(t))
3627  bchandler->FixResidualBC(xx,ff);
3628  }
3629  else
3630  {
3631  // use the ImplicitMult method of the class
3632  op->ImplicitMult(xx,yy,ff);
3633  }
3634 
3635  // need to tell PETSc the Vec has been updated
3636  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
3637  PetscFunctionReturn(0);
3638 }
3639 
3640 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
3641  void *ctx)
3642 {
3643  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3644  PetscErrorCode ierr;
3645 
3646  PetscFunctionBeginUser;
3647  if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
3648  mfem::PetscParVector xx(x,true);
3649  mfem::PetscParVector ff(f,true);
3650  mfem::TimeDependentOperator *top = ts_ctx->op;
3651  top->SetTime(t);
3652 
3653  // use the ExplicitMult method - compute the RHS function
3654  top->ExplicitMult(xx,ff);
3655 
3656  // need to tell PETSc the Vec has been updated
3657  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
3658  PetscFunctionReturn(0);
3659 }
3660 
3661 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
3662  Vec xp, PetscReal shift, Mat A, Mat P,
3663  void *ctx)
3664 {
3665  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3666  mfem::Vector *xx;
3667  PetscScalar *array;
3668  PetscReal eps = 0.001; /* 0.1% difference */
3669  PetscInt n;
3670  PetscObjectState state;
3671  PetscErrorCode ierr;
3672 
3673  PetscFunctionBeginUser;
3674  // prevent to recompute a Jacobian if we already did so
3675  // the relative tolerance comparison should be fine given the fact
3676  // that two consecutive shifts should have similar magnitude
3677  ierr = PetscObjectStateGet((PetscObject)A,&state); CHKERRQ(ierr);
3678  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3679  std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
3680  state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
3681 
3682  // update time
3683  mfem::TimeDependentOperator *op = ts_ctx->op;
3684  op->SetTime(t);
3685 
3686  // wrap Vecs with Vectors
3687  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
3688  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3689  mfem::Vector yy(array,n);
3690  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3691  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3692  if (!ts_ctx->bchandler)
3693  {
3694  xx = new mfem::Vector(array,n);
3695  }
3696  else
3697  {
3698  // make sure we compute a Jacobian with the correct boundary values
3699  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
3700  mfem::Vector txx(array,n);
3701  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3702  xx = ts_ctx->work;
3703  bchandler->SetTime(t);
3704  bchandler->ApplyBC(txx,*xx);
3705  }
3706  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3707 
3708  // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
3709  mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
3710  if (!ts_ctx->bchandler) { delete xx; }
3711  ts_ctx->cached_shift = shift;
3712 
3713  // Convert to the operator type requested if needed
3714  bool delete_pA = false;
3715  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
3716  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
3717  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3718  pA->GetType() != ts_ctx->jacType))
3719  {
3720  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
3721  ts_ctx->jacType);
3722  delete_pA = true;
3723  }
3724 
3725  // Eliminate essential dofs
3726  if (ts_ctx->bchandler)
3727  {
3728  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3729  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3730  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
3731  }
3732 
3733  // Avoid unneeded copy of the matrix by hacking
3734  Mat B;
3735  B = pA->ReleaseMat(false);
3736  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
3737  if (delete_pA) { delete pA; }
3738  if (A && A != P)
3739  {
3740  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3741  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3742  }
3743 
3744  // Jacobian reusage
3745  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
3746  CHKERRQ(ierr);
3747  PetscFunctionReturn(0);
3748 }
3749 
3750 static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
3751  Mat Ax,Mat Jx,
3752  Mat Axp,Mat Jxp)
3753 {
3754  __mfem_ts_ctx* ts_ctx;
3755  mfem::Vector *xx;
3756  PetscScalar *array;
3757  PetscInt n;
3758  PetscObjectState state;
3759  PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
3760  PetscBool assembled;
3761  PetscErrorCode ierr;
3762 
3763  PetscFunctionBeginUser;
3764  ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
3765 
3766  // prevent to recompute the Jacobians if we already did so
3767  ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
3768  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3769  state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
3770  ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
3771  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3772  state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
3773 
3774  if (!rx && !rxp) { PetscFunctionReturn(0); }
3775 
3776  // update time
3777  mfem::TimeDependentOperator *op = ts_ctx->op;
3778  op->SetTime(t);
3779 
3780  // wrap Vecs with Vectors
3781  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
3782  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3783  mfem::Vector yy(array,n);
3784  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
3785  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3786  if (!ts_ctx->bchandler)
3787  {
3788  xx = new mfem::Vector(array,n);
3789  }
3790  else
3791  {
3792  // make sure we compute a Jacobian with the correct boundary values
3793  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
3794  mfem::Vector txx(array,n);
3795  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3796  xx = ts_ctx->work;
3797  bchandler->SetTime(t);
3798  bchandler->ApplyBC(txx,*xx);
3799  }
3800  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3801 
3802  // We don't have a specialized interface, so we just compute the split jacobians
3803  // evaluating twice the implicit gradient method with the correct shifts
3804 
3805  // first we do the state jacobian
3806  mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
3807 
3808  // Convert to the operator type requested if needed
3809  bool delete_mat = false;
3810  mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
3811  (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
3812  if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3813  pJx->GetType() != ts_ctx->jacType))
3814  {
3815  if (pJx)
3816  {
3817  Mat B = *pJx;
3818  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
3819  }
3820  pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
3821  ts_ctx->jacType);
3822  delete_mat = true;
3823  }
3824  if (rx)
3825  {
3826  ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
3827  if (assembled)
3828  {
3829  ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
3830  }
3831  else
3832  {
3833  Mat B;
3834  ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
3835  ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
3836  }
3837  }
3838  if (delete_mat) { delete pJx; }
3839  pJx = new mfem::PetscParMatrix(Jx,true);
3840 
3841  // Eliminate essential dofs
3842  if (ts_ctx->bchandler)
3843  {
3844  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3845  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3846  pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
3847  }
3848 
3849  // Then we do the jacobian wrt the time derivative of the state
3850  // Note that this is usually the mass matrix
3851  mfem::PetscParMatrix *pJxp = NULL;
3852  if (rxp)
3853  {
3854  delete_mat = false;
3855  mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
3856  pJxp = const_cast<mfem::PetscParMatrix *>
3857  (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
3858  if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3859  pJxp->GetType() != ts_ctx->jacType))
3860  {
3861  if (pJxp)
3862  {
3863  Mat B = *pJxp;
3864  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
3865  }
3866  pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
3867  &oJxp,ts_ctx->jacType);
3868  delete_mat = true;
3869  }
3870 
3871  ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
3872  if (assembled)
3873  {
3874  ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
3875  }
3876  else
3877  {
3878  Mat B;
3879  ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
3880  ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
3881  }
3882  if (delete_mat) { delete pJxp; }
3883  pJxp = new mfem::PetscParMatrix(Jxp,true);
3884 
3885  // Eliminate essential dofs
3886  if (ts_ctx->bchandler)
3887  {
3888  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3889  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3890  pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
3891  }
3892 
3893  // Obtain the time dependent part of the jacobian by subtracting
3894  // the state jacobian
3895  // We don't do it with the class operator "-=" since we know that
3896  // the sparsity pattern of the two matrices is the same
3897  ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
3898  }
3899 
3900  if (Ax && Ax != Jx)
3901  {
3902  ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3903  ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3904  }
3905  if (Axp && Axp != Jxp)
3906  {
3907  ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3908  ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3909  }
3910 
3911  // Jacobian reusage
3912  ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
3913  CHKERRQ(ierr);
3914  ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
3915  CHKERRQ(ierr);
3916 
3917  delete pJx;
3918  delete pJxp;
3919  if (!ts_ctx->bchandler) { delete xx; }
3920  PetscFunctionReturn(0);
3921 }
3922 
3923 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
3924  Mat A, Mat P, void *ctx)
3925 {
3926  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
3927  mfem::Vector *xx;
3928  PetscScalar *array;
3929  PetscInt n;
3930  PetscObjectState state;
3931  PetscErrorCode ierr;
3932 
3933  PetscFunctionBeginUser;
3934  // prevent to recompute a Jacobian if we already did so
3935  ierr = PetscObjectStateGet((PetscObject)A,&state); CHKERRQ(ierr);
3936  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
3937  state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
3938 
3939  // update time
3940  mfem::TimeDependentOperator *op = ts_ctx->op;
3941  op->SetTime(t);
3942 
3943  // wrap Vec with Vector
3944  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
3945  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3946  if (!ts_ctx->bchandler)
3947  {
3948  xx = new mfem::Vector(array,n);
3949  }
3950  else
3951  {
3952  // make sure we compute a Jacobian with the correct boundary values
3953  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
3954  mfem::Vector txx(array,n);
3955  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3956  xx = ts_ctx->work;
3957  bchandler->SetTime(t);
3958  bchandler->ApplyBC(txx,*xx);
3959  }
3960  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
3961 
3962  // Use TimeDependentOperator::GetExplicitGradient(x)
3963  mfem::Operator& J = op->GetExplicitGradient(*xx);
3964  if (!ts_ctx->bchandler) { delete xx; }
3965 
3966  // Convert to the operator type requested if needed
3967  bool delete_pA = false;
3968  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
3969  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
3970  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
3971  pA->GetType() != ts_ctx->jacType))
3972  {
3973  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
3974  ts_ctx->jacType);
3975  delete_pA = true;
3976  }
3977 
3978  // Eliminate essential dofs
3979  if (ts_ctx->bchandler)
3980  {
3981  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
3982  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
3983  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
3984  }
3985 
3986  // Avoid unneeded copy of the matrix by hacking
3987  Mat B;
3988  B = pA->ReleaseMat(false);
3989  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
3990  if (delete_pA) { delete pA; }
3991 
3992  if (A && A != P)
3993  {
3994  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3995  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3996  }
3997 
3998  // Jacobian reusage
3999  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4000  {
4001  ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4002  }
4003  ierr = PetscObjectStateGet((PetscObject)A,&ts_ctx->cached_rhsjacstate);
4004  CHKERRQ(ierr);
4005  PetscFunctionReturn(0);
4006 }
4007 
4008 static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4009  void* ctx)
4010 {
4011  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4012 
4013  PetscFunctionBeginUser;
4014  if (!monctx)
4015  {
4016  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4017  }
4018 
4019  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4021  monctx->monitor);
4022  if (user_monitor->mon_sol)
4023  {
4024  Vec x;
4025  PetscErrorCode ierr;
4026 
4027  ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4028  mfem::PetscParVector V(x,true);
4029  user_monitor->MonitorSolution(it,res,V);
4030  }
4031  if (user_monitor->mon_res)
4032  {
4033  Vec x;
4034  PetscErrorCode ierr;
4035 
4036  ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4037  mfem::PetscParVector V(x,true);
4038  user_monitor->MonitorResidual(it,res,V);
4039  }
4040  user_monitor->MonitorSolver(solver);
4041  PetscFunctionReturn(0);
4042 }
4043 
4044 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4045  void *ctx)
4046 {
4047  PetscScalar *array;
4048  PetscInt n;
4049  PetscErrorCode ierr;
4050  mfem::Vector *xx;
4051  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4052 
4053  PetscFunctionBeginUser;
4054  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4055  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4056  if (!snes_ctx->bchandler)
4057  {
4058  xx = new mfem::Vector(array,n);
4059  }
4060  else
4061  {
4062  // make sure we compute a Jacobian with the correct boundary values
4063  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4064  mfem::Vector txx(array,n);
4065  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4066  xx = snes_ctx->work;
4067  bchandler->ApplyBC(txx,*xx);
4068  }
4069 
4070  // Use Operator::GetGradient(x)
4071  mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4072  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4073  if (!snes_ctx->bchandler) { delete xx; }
4074 
4075  // Convert to the operator type requested if needed
4076  bool delete_pA = false;
4077  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4078  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4079  if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4080  pA->GetType() != snes_ctx->jacType))
4081  {
4082  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4083  snes_ctx->jacType);
4084  delete_pA = true;
4085  }
4086 
4087  // Eliminate essential dofs
4088  if (snes_ctx->bchandler)
4089  {
4090  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4091  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4092  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4093  }
4094 
4095  // Avoid unneeded copy of the matrix by hacking
4096  Mat B = pA->ReleaseMat(false);
4097  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4098  if (delete_pA) { delete pA; }
4099  if (A && A != P)
4100  {
4101  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4102  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4103  }
4104  PetscFunctionReturn(0);
4105 }
4106 
4107 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4108 {
4109  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4110 
4111  PetscFunctionBeginUser;
4112  mfem::PetscParVector xx(x,true);
4113  mfem::PetscParVector ff(f,true);
4114  if (snes_ctx->bchandler)
4115  {
4116  // we evaluate the Mult method with the correct bc
4117  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4118  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4119  mfem::Vector* txx = snes_ctx->work;
4120  bchandler->ApplyBC(xx,*txx);
4121  snes_ctx->op->Mult(*txx,ff);
4122  // and fix the residual (i.e. f_\partial\Omega = u - g)
4123  bchandler->FixResidualBC(xx,ff);
4124  }
4125  else
4126  {
4127  // use the Mult method of the class
4128  snes_ctx->op->Mult(xx,ff);
4129  }
4130  // need to tell PETSc the Vec has been updated
4131  ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
4132  PetscFunctionReturn(0);
4133 }
4134 
4135 static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
4136  void *ctx)
4137 {
4138  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4139 
4140  PetscFunctionBeginUser;
4141  if (!snes_ctx->objective)
4142  {
4143  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
4144  }
4145  mfem::PetscParVector xx(x,true);
4146  double lf;
4147  (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4148  *f = (PetscReal)lf;
4149  PetscFunctionReturn(0);
4150 }
4151 
4152 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4153  PetscBool *cy,PetscBool *cw, void* ctx)
4154 {
4155  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4156  bool lcy = false,lcw = false;
4157 
4158  PetscFunctionBeginUser;
4159  mfem::PetscParVector x(X,true);
4160  mfem::PetscParVector y(Y,true);
4161  mfem::PetscParVector w(W,true);
4162  (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4163  if (lcy) { *cy = PETSC_TRUE; }
4164  if (lcw) { *cw = PETSC_TRUE; }
4165  PetscFunctionReturn(0);
4166 }
4167 
4168 static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
4169 {
4170  Vec F,X,dX,pX;
4171  __mfem_snes_ctx* snes_ctx;
4172 
4173  PetscFunctionBeginUser;
4174  /* Update callback does not use the context */
4175  ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
4176  ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4177  if (!it)
4178  {
4179  ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4180  ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
4181  CHKERRQ(ierr);
4182  ierr = VecDestroy(&pX); CHKERRQ(ierr);
4183  }
4184  ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
4185  CHKERRQ(ierr);
4186  if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
4187  "Missing previous solution");
4188  ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4189  mfem::PetscParVector f(F,true);
4190  mfem::PetscParVector x(X,true);
4191  mfem::PetscParVector dx(dX,true);
4192  mfem::PetscParVector px(pX,true);
4193  (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4194  /* Store previous solution */
4195  ierr = VecCopy(X,pX); CHKERRQ(ierr);
4196  PetscFunctionReturn(0);
4197 }
4198 
4199 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
4200  void* ctx)
4201 {
4202  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4203 
4204  PetscFunctionBeginUser;
4205  if (!monctx)
4206  {
4207  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4208  }
4209 
4210  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4212  monctx->monitor);
4213  if (user_monitor->mon_sol)
4214  {
4215  Vec x;
4216  PetscErrorCode ierr;
4217 
4218  ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
4219  mfem::PetscParVector V(x,true);
4220  user_monitor->MonitorSolution(it,res,V);
4221  }
4222  if (user_monitor->mon_res)
4223  {
4224  Vec x;
4225  PetscErrorCode ierr;
4226 
4227  ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
4228  mfem::PetscParVector V(x,true);
4229  user_monitor->MonitorResidual(it,res,V);
4230  }
4231  user_monitor->MonitorSolver(solver);
4232  PetscFunctionReturn(0);
4233 }
4234 
4235 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
4236 {
4237  __mfem_mat_shell_ctx *ctx;
4238  PetscErrorCode ierr;
4239 
4240  PetscFunctionBeginUser;
4241  ierr = MatShellGetContext(A,(void **)&ctx); CHKERRQ(ierr);
4242  mfem::PetscParVector xx(x,true);
4243  mfem::PetscParVector yy(y,true);
4244  ctx->op->Mult(xx,yy);
4245  // need to tell PETSc the Vec has been updated
4246  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4247  PetscFunctionReturn(0);
4248 }
4249 
4250 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
4251 {
4252  __mfem_mat_shell_ctx *ctx;
4253  PetscErrorCode ierr;
4254 
4255  PetscFunctionBeginUser;
4256  ierr = MatShellGetContext(A,(void **)&ctx); CHKERRQ(ierr);
4257  mfem::PetscParVector xx(x,true);
4258  mfem::PetscParVector yy(y,true);
4259  ctx->op->MultTranspose(xx,yy);
4260  // need to tell PETSc the Vec has been updated
4261  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4262  PetscFunctionReturn(0);
4263 }
4264 
4265 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
4266 {
4267  __mfem_mat_shell_ctx *ctx;
4268  PetscErrorCode ierr;
4269 
4270  PetscFunctionBeginUser;
4271  ierr = MatShellGetContext(A,(void **)&ctx); CHKERRQ(ierr);
4272  delete ctx;
4273  PetscFunctionReturn(0);
4274 }
4275 
4276 static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
4277 {
4278  __mfem_pc_shell_ctx *ctx;
4279  PetscErrorCode ierr;
4280 
4281  PetscFunctionBeginUser;
4282  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4283  if (ctx->op)
4284  {
4285  PetscBool isascii;
4286  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
4287  CHKERRQ(ierr);
4288 
4290  (ctx->op);
4291  if (ppc)
4292  {
4293  ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
4294  }
4295  else
4296  {
4297  if (isascii)
4298  {
4299  ierr = PetscViewerASCIIPrintf(viewer,
4300  "No information available on the mfem::Solver\n");
4301  CHKERRQ(ierr);
4302  }
4303  }
4304  if (isascii && ctx->factory)
4305  {
4306  ierr = PetscViewerASCIIPrintf(viewer,
4307  "Number of preconditioners created by the factory %lu\n",ctx->numprec);
4308  CHKERRQ(ierr);
4309  }
4310  }
4311  PetscFunctionReturn(0);
4312 }
4313 
4314 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
4315 {
4316  __mfem_pc_shell_ctx *ctx;
4317  PetscErrorCode ierr;
4318 
4319  PetscFunctionBeginUser;
4320  mfem::PetscParVector xx(x,true);
4321  mfem::PetscParVector yy(y,true);
4322  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4323  if (ctx->op)
4324  {
4325  ctx->op->Mult(xx,yy);
4326  // need to tell PETSc the Vec has been updated
4327  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4328  }
4329  else // operator is not present, copy x
4330  {
4331  yy = xx;
4332  }
4333  PetscFunctionReturn(0);
4334 }
4335 
4336 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
4337 {
4338  __mfem_pc_shell_ctx *ctx;
4339  PetscErrorCode ierr;
4340 
4341  PetscFunctionBeginUser;
4342  mfem::PetscParVector xx(x,true);
4343  mfem::PetscParVector yy(y,true);
4344  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4345  if (ctx->op)
4346  {
4347  ctx->op->MultTranspose(xx,yy);
4348  // need to tell PETSc the Vec has been updated
4349  ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
4350  }
4351  else // operator is not present, copy x
4352  {
4353  yy = xx;
4354  }
4355  PetscFunctionReturn(0);
4356 }
4357 
4358 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
4359 {
4360  __mfem_pc_shell_ctx *ctx;
4361 
4362  PetscFunctionBeginUser;
4363  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4364  if (ctx->factory)
4365  {
4366  // Delete any owned operator
4367  if (ctx->ownsop)
4368  {
4369  delete ctx->op;
4370  }
4371 
4372  // Get current preconditioning Mat
4373  Mat B;
4374  ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
4375 
4376  // Call user-defined setup
4377  mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
4378  mfem::PetscPreconditionerFactory *factory = ctx->factory;
4379  ctx->op = factory->NewPreconditioner(hB);
4380  ctx->ownsop = true;
4381  ctx->numprec++;
4382  }
4383  PetscFunctionReturn(0);
4384 }
4385 
4386 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
4387 {
4388  __mfem_pc_shell_ctx *ctx;
4389  PetscErrorCode ierr;
4390 
4391  PetscFunctionBeginUser;
4392  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
4393  if (ctx->ownsop)
4394  {
4395  delete ctx->op;
4396  }
4397  delete ctx;
4398  PetscFunctionReturn(0);
4399 }
4400 
4401 static PetscErrorCode __mfem_array_container_destroy(void *ptr)
4402 {
4403  PetscErrorCode ierr;
4404 
4405  PetscFunctionBeginUser;
4406  ierr = PetscFree(ptr); CHKERRQ(ierr);
4407  PetscFunctionReturn(0);
4408 }
4409 
4410 static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
4411 {
4413  PetscErrorCode ierr;
4414 
4415  PetscFunctionBeginUser;
4416  for (int i=0; i<a->Size(); i++)
4417  {
4418  Mat M = (*a)[i];
4419  MPI_Comm comm = PetscObjectComm((PetscObject)M);
4420  ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
4421  }
4422  delete a;
4423  PetscFunctionReturn(0);
4424 }
4425 
4426 static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
4427 {
4428  PetscErrorCode ierr;
4429 
4430  PetscFunctionBeginUser;
4431  ierr = PetscFree(*ctx); CHKERRQ(ierr);
4432  PetscFunctionReturn(0);
4433 }
4434 
4435 // Sets the type of PC to PCSHELL and wraps the solver action
4436 // if ownsop is true, ownership of precond is transferred to the PETSc object
4437 PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
4438 {
4439  PetscFunctionBeginUser;
4440  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
4441  ctx->op = &precond;
4442  ctx->ownsop = ownsop;
4443  ctx->factory = NULL;
4444  ctx->numprec = 0;
4445 
4446  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
4447  ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
4448  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
4449  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
4450  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
4451  CHKERRQ(ierr);
4452  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
4453  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
4454  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
4455  PetscFunctionReturn(0);
4456 }
4457 
4458 // Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
4459 // Takes ownership of the solver created by the factory
4460 PetscErrorCode MakeShellPCWithFactory(PC pc,
4462 {
4463  PetscFunctionBeginUser;
4464  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
4465  ctx->op = NULL;
4466  ctx->ownsop = true;
4467  ctx->factory = factory;
4468  ctx->numprec = 0;
4469 
4470  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
4471  ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
4472  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
4473  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
4474  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
4475  CHKERRQ(ierr);
4476  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
4477  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
4478  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
4479  PetscFunctionReturn(0);
4480 }
4481 
4482 // Converts from a list (or a marked Array if islist is false) to an IS
4483 // st indicates the offset where to start numbering
4484 static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
4485  const mfem::Array<int> *list,
4486  PetscInt st, IS* is)
4487 {
4488  PetscInt n = list ? list->Size() : 0,*idxs;
4489  const int *data = list ? list->GetData() : NULL;
4490  PetscErrorCode ierr;
4491 
4492  PetscFunctionBeginUser;
4493  ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
4494  if (islist)
4495  {
4496  for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
4497  }
4498  else
4499  {
4500  PetscInt cum = 0;
4501  for (PetscInt i=0; i<n; i++)
4502  {
4503  if (data[i]) { idxs[cum++] = i+st; }
4504  }
4505  n = cum;
4506  }
4507  ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
4508  CHKERRQ(ierr);
4509  PetscFunctionReturn(0);
4510 }
4511 
4512 // Converts from a marked Array of Vdofs to an IS
4513 // st indicates the offset where to start numbering
4514 // l2l is a vector of matrices generated during RAP
4515 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
4516  mfem::Array<Mat> &pl2l,
4517  const mfem::Array<int> *mark,
4518  PetscInt st, IS* is)
4519 {
4520  mfem::Array<int> sub_dof_marker;
4522  PetscInt nl;
4523  PetscErrorCode ierr;
4524 
4525  PetscFunctionBeginUser;
4526  for (int i = 0; i < pl2l.Size(); i++)
4527  {
4528  PetscInt m,n,*ii,*jj;
4529  PetscBool done;
4530  ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
4531  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
4532  MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
4533  ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
4534 #if defined(PETSC_USE_64BIT_INDICES)
4535  int nnz = (int)ii[m];
4536  int *mii = new int[m+1];
4537  int *mjj = new int[nnz];
4538  for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
4539  for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
4540  l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
4541 #else
4542  l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
4543 #endif
4544  ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
4545  (const PetscInt**)&ii,
4546  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
4547  MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
4548  << i << " l2l matrix");
4549  }
4550  nl = 0;
4551  for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
4552  sub_dof_marker.SetSize(nl);
4553  const int* vdata = mark->GetData();
4554  int* sdata = sub_dof_marker.GetData();
4555  int cumh = 0, cumw = 0;
4556  for (int i = 0; i < l2l.Size(); i++)
4557  {
4558  const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
4559  l2l[i]->Height());
4560  mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
4561  l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
4562  cumh += l2l[i]->Height();
4563  cumw += l2l[i]->Width();
4564  }
4565  ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
4566  for (int i = 0; i < pl2l.Size(); i++)
4567  {
4568  delete l2l[i];
4569  }
4570  PetscFunctionReturn(0);
4571 }
4572 
4573 #if !defined(PETSC_HAVE_HYPRE)
4574 
4575 #if defined(HYPRE_MIXEDINT)
4576 #error "HYPRE_MIXEDINT not supported"
4577 #endif
4578 
4579 #include "_hypre_parcsr_mv.h"
4580 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
4581 {
4582  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
4583  hypre_CSRMatrix *hdiag,*hoffd;
4584  PetscScalar *da,*oa,*aptr;
4585  PetscInt *dii,*djj,*oii,*ojj,*iptr;
4586  PetscInt i,dnnz,onnz,m,n;
4587  PetscMPIInt size;
4588  PetscErrorCode ierr;
4589 
4590  PetscFunctionBeginUser;
4591  hdiag = hypre_ParCSRMatrixDiag(hA);
4592  hoffd = hypre_ParCSRMatrixOffd(hA);
4593  m = hypre_CSRMatrixNumRows(hdiag);
4594  n = hypre_CSRMatrixNumCols(hdiag);
4595  dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
4596  onnz = hypre_CSRMatrixNumNonzeros(hoffd);
4597  ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
4598  ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
4599  ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
4600  ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
4601  CHKERRQ(ierr);
4602  ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
4603  CHKERRQ(ierr);
4604  ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
4605  CHKERRQ(ierr);
4606  iptr = djj;
4607  aptr = da;
4608  for (i=0; i<m; i++)
4609  {
4610  PetscInt nc = dii[i+1]-dii[i];
4611  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
4612  iptr += nc;
4613  aptr += nc;
4614  }
4615  ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
4616  if (size > 1)
4617  {
4618  PetscInt *offdj,*coffd;
4619 
4620  ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
4621  ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
4622  ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
4623  ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
4624  CHKERRQ(ierr);
4625  offdj = hypre_CSRMatrixJ(hoffd);
4626  coffd = hypre_ParCSRMatrixColMapOffd(hA);
4627  for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
4628  ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
4629  CHKERRQ(ierr);
4630  iptr = ojj;
4631  aptr = oa;
4632  for (i=0; i<m; i++)
4633  {
4634  PetscInt nc = oii[i+1]-oii[i];
4635  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
4636  iptr += nc;
4637  aptr += nc;
4638  }
4639  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
4640  djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
4641  }
4642  else
4643  {
4644  oii = ojj = NULL;
4645  oa = NULL;
4646  ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
4647  }
4648  /* We are responsible to free the CSR arrays. However, since we can take
4649  references of a PetscParMatrix but we cannot take reference of PETSc
4650  arrays, we need to create a PetscContainer object to take reference of
4651  these arrays in reference objects */
4652  void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
4653  const char *names[6] = {"_mfem_csr_dii",
4654  "_mfem_csr_djj",
4655  "_mfem_csr_da",
4656  "_mfem_csr_oii",
4657  "_mfem_csr_ojj",
4658  "_mfem_csr_oa"
4659  };
4660  for (i=0; i<6; i++)
4661  {
4662  PetscContainer c;
4663 
4664  ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
4665  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
4666  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
4667  CHKERRQ(ierr);
4668  ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
4669  CHKERRQ(ierr);
4670  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
4671  }
4672  PetscFunctionReturn(0);
4673 }
4674 
4675 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
4676 {
4677  Mat lA;
4678  ISLocalToGlobalMapping rl2g,cl2g;
4679  IS is;
4680  hypre_CSRMatrix *hdiag,*hoffd;
4681  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
4682  void *ptrs[2];
4683  const char *names[2] = {"_mfem_csr_aux",
4684  "_mfem_csr_data"
4685  };
4686  PetscScalar *hdd,*hod,*aa,*data;
4687  PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
4688  PetscInt *aux,*ii,*jj;
4689  PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
4690  PetscErrorCode ierr;
4691 
4692  PetscFunctionBeginUser;
4693  /* access relevant information in ParCSR */
4694  str = hypre_ParCSRMatrixFirstRowIndex(hA);
4695  stc = hypre_ParCSRMatrixFirstColDiag(hA);
4696  hdiag = hypre_ParCSRMatrixDiag(hA);
4697  hoffd = hypre_ParCSRMatrixOffd(hA);
4698  dr = hypre_CSRMatrixNumRows(hdiag);
4699  dc = hypre_CSRMatrixNumCols(hdiag);
4700  nnz = hypre_CSRMatrixNumNonzeros(hdiag);
4701  hdi = hypre_CSRMatrixI(hdiag);
4702  hdj = hypre_CSRMatrixJ(hdiag);
4703  hdd = hypre_CSRMatrixData(hdiag);
4704  oc = hypre_CSRMatrixNumCols(hoffd);
4705  nnz += hypre_CSRMatrixNumNonzeros(hoffd);
4706  hoi = hypre_CSRMatrixI(hoffd);
4707  hoj = hypre_CSRMatrixJ(hoffd);
4708  hod = hypre_CSRMatrixData(hoffd);
4709 
4710  /* generate l2g maps for rows and cols */
4711  ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
4712  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
4713  ierr = ISDestroy(&is); CHKERRQ(ierr);
4714  col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
4715  ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
4716  for (i=0; i<dc; i++) { aux[i] = i+stc; }
4717  for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
4718  ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
4719  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
4720  ierr = ISDestroy(&is); CHKERRQ(ierr);
4721 
4722  /* create MATIS object */
4723  ierr = MatCreate(comm,pA); CHKERRQ(ierr);
4724  ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
4725  ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
4726  ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
4727  ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
4728  ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
4729 
4730  /* merge local matrices */
4731  ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
4732  ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
4733  ii = aux;
4734  jj = aux+dr+1;
4735  aa = data;
4736  *ii = *(hdi++) + *(hoi++);
4737  for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
4738  {
4739  PetscScalar *aold = aa;
4740  PetscInt *jold = jj,nc = jd+jo;
4741  for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
4742  for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
4743  *(++ii) = *(hdi++) + *(hoi++);
4744  ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
4745  }
4746  for (; cum<dr; cum++) { *(++ii) = nnz; }
4747  ii = aux;
4748  jj = aux+dr+1;
4749  aa = data;
4750  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
4751  CHKERRQ(ierr);
4752  ptrs[0] = aux;
4753  ptrs[1] = data;
4754  for (i=0; i<2; i++)
4755  {
4756  PetscContainer c;
4757 
4758  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
4759  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
4760  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
4761  CHKERRQ(ierr);
4762  ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
4763  CHKERRQ(ierr);
4764  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
4765  }
4766  ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
4767  ierr = MatDestroy(&lA); CHKERRQ(ierr);
4768  ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4769  ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4770  PetscFunctionReturn(0);
4771 }
4772 #endif
4773 
4774 #endif // MFEM_USE_PETSC
4775 #endif // MFEM_USE_MPI
int Size() const
Logical size of the array.
Definition: array.hpp:118
void SetPreconditioner(Solver &precond)
Sets the solver to perform preconditioning.
Definition: petsc.cpp:2463
void SetPrintLevel(int plev)
Definition: petsc.cpp:1822
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
Definition: sparsemat.cpp:1159
PetscParVector * B
Right-hand side and solution vector.
Definition: petsc.hpp:538
void CreatePrivateContext()
Definition: petsc.cpp:2113
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition: petsc.cpp:1408
Vector()
Default constructor for Vector. Sets size = 0 and data = NULL.
Definition: vector.hpp:58
MPI_Comm GetComm() const
MPI communicator.
Definition: hypre.hpp:332
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition: operator.hpp:197
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition: operator.hpp:199
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition: petsc.cpp:3262
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition: vector.hpp:144
Memory< double > data
Definition: vector.hpp:52
Abstract class for PETSc&#39;s preconditioners.
Definition: petsc.hpp:647
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition: petsc.cpp:652
double PetscScalar
Definition: petsc.hpp:47
struct _p_TS * TS
Definition: petsc.hpp:59
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:2299
HypreParMatrix * RAP(const HypreParMatrix *A, const HypreParMatrix *P)
Returns the matrix P^t * A * P.
Definition: hypre.cpp:1563
bool clcustom
Boolean to handle SetFromOptions calls.
Definition: petsc.hpp:529
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Definition: petsc.cpp:3321
HYPRE_Int PetscInt
Definition: petsc.hpp:46
void ConvertOperator(MPI_Comm comm, const Operator &op, Mat *B, Operator::Type tid)
Definition: petsc.cpp:854
double GetFinalNorm()
Definition: petsc.cpp:2088
Wrapper for PETSc&#39;s matrix class.
Definition: petsc.hpp:196
PetscParVector & operator+=(const PetscParVector &y)
Definition: petsc.cpp:407
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition: petsc.cpp:2194
Abstract class for PETSc&#39;s linear solvers.
Definition: petsc.hpp:599
Vec x
The actual PETSc object.
Definition: petsc.hpp:79
void SetSize(int s)
Resize the vector to size s.
Definition: vector.hpp:400
Abstract class for PETSc&#39;s solvers.
Definition: petsc.hpp:525
PetscODESolver::Type GetType() const
Definition: petsc.cpp:3488
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
Definition: petsc.cpp:3537
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition: petsc.cpp:3332
Base abstract class for time dependent operators.
Definition: operator.hpp:162
PetscInt GetNumCols() const
Returns the local number of columns.
Definition: petsc.cpp:498
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition: operator.hpp:68
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition: operator.hpp:42
void SetType(PetscODESolver::Type)
Definition: petsc.cpp:3494
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition: petsc.hpp:834
Pointer to an Operator of a specified type.
Definition: handle.hpp:33
int * GetJ()
Return the array J.
Definition: sparsemat.hpp:146
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
Definition: petsc.cpp:1375
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition: petsc.cpp:3177
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition: petsc.cpp:388
void SetMat(Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition: petsc.cpp:1258
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition: petsc.cpp:1733
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3222
T * GetData()
Returns the data.
Definition: array.hpp:92
Type GetType() const
Definition: petsc.cpp:1675
virtual void SetTime(const double _t)
Set the current time.
Definition: operator.hpp:192
int Size() const
Returns the size of the vector.
Definition: vector.hpp:150
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Definition: petsc.cpp:3512
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition: ode.hpp:22
int * GetI()
Return the array I.
Definition: sparsemat.hpp:141
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition: petsc.cpp:526
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:2576
PetscClassId cid
The class id of the actual PETSc object.
Definition: petsc.hpp:535
ID for class PetscParMatrix, MATHYPRE format.
Definition: operator.hpp:144
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Abstract parallel finite element space.
Definition: pfespace.hpp:28
Constant in time b.c.
Definition: petsc.hpp:453
void SetRelTol(double tol)
Definition: petsc.cpp:1745
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition: operator.hpp:283
void Randomize(PetscInt seed=0)
Set random values.
Definition: petsc.cpp:435
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:2546
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition: petsc.cpp:1233
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition: operator.hpp:268
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition: petsc.cpp:1899
void SetMaxIter(int max_iter)
Definition: petsc.cpp:1795
double * GetData() const
Return a pointer to the beginning of the Vector data.
Definition: vector.hpp:159
PetscParVector & operator*=(PetscScalar d)
Definition: petsc.cpp:419
void SortColumnIndices()
Sort the column indices corresponding to each row.
Definition: sparsemat.cpp:365
struct _p_SNES * SNES
Definition: petsc.hpp:58
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B)
Returns the matrix A * B.
Definition: hypre.cpp:1543
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition: petsc.cpp:1619
double PetscReal
Definition: petsc.hpp:48
Wrapper for PETSc&#39;s vector class.
Definition: petsc.hpp:75
void Customize(bool customize=true) const
Customize object with options set.
Definition: petsc.cpp:1989
PetscInt M() const
Returns the global number of rows.
Definition: petsc.cpp:505
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:3359
PetscParVector * Y
Definition: petsc.hpp:203
virtual ~PetscLinearSolver()
Definition: petsc.cpp:2536
double * GetData()
Return the element data, i.e. the array A.
Definition: sparsemat.hpp:151
PetscInt GetNumRows() const
Returns the local number of rows.
Definition: petsc.cpp:491
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition: petsc.cpp:450
struct _p_PC * PC
Definition: petsc.hpp:57
void EliminateBC(HypreParMatrix &A, HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Definition: hypre.cpp:1617
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
Definition: petsc.cpp:430
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
Definition: operator.hpp:255
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition: petsc.cpp:375
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition: petsc.cpp:3415
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition: petsc.cpp:1356
ID for the base class Operator, i.e. any type.
Definition: operator.hpp:137
int dim
Definition: ex3.cpp:48
MPI_Comm GetComm() const
Definition: pfespace.hpp:239
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition: petsc.hpp:532
Abstract class for monitoring PETSc&#39;s solvers.
Definition: petsc.hpp:812
Data type sparse matrix.
Definition: sparsemat.hpp:40
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition: operator.hpp:36
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3315
bool areColumnsSorted() const
Definition: sparsemat.hpp:373
int to_int(const std::string &str)
Definition: text.hpp:70
Auxiliary class for BDDC customization.
Definition: petsc.hpp:669
virtual ~PetscPreconditioner()
Definition: petsc.cpp:2687
ID for class PetscParMatrix, unspecified format.
Definition: operator.hpp:145
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:820
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition: petsc.cpp:2243
void LoseData()
NULL-ifies the data.
Definition: array.hpp:112
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition: petsc.cpp:636
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition: petsc.cpp:484
virtual ~PetscParVector()
Calls PETSc&#39;s destroy function.
Definition: petsc.cpp:246
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition: petsc.cpp:3186
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition: petsc.hpp:541
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition: petsc.cpp:667
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition: petsc.cpp:1433
void Assign(const T *)
Copy data from a pointer. Size() elements are copied.
Definition: array.hpp:839
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:1894
void Shift(double s)
Shift diagonal by a constant.
Definition: petsc.cpp:1455
ID for class PetscParMatrix, MATNEST format.
Definition: operator.hpp:143
void SetTime(double t)
Sets the current time.
Definition: petsc.hpp:483
ID for class PetscParMatrix, MATSHELL format.
Definition: operator.hpp:142
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition: petsc.cpp:533
int GetNumIterations()
Definition: petsc.cpp:2055
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition: petsc.cpp:1929
Wrapper for hypre&#39;s parallel vector class.
Definition: hypre.hpp:73
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Definition: petsc.hpp:473
Type
Enumeration defining IDs for some classes derived from Operator.
Definition: operator.hpp:135
PetscBCHandler(Type _type=ZERO)
Definition: petsc.hpp:457
PetscInt NNZ() const
Returns the number of nonzeros.
Definition: petsc.cpp:519
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true)
Definition: petsc.cpp:2264
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
Definition: operator.hpp:206
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition: petsc.cpp:2657
void SetSize(int nsize)
Change logical size of the array, keep existing entries.
Definition: array.hpp:618
Mat A
The actual PETSc object.
Definition: petsc.hpp:200
struct _p_Mat * Mat
Definition: petsc.hpp:55
PetscInt N() const
Returns the global number of columns.
Definition: petsc.cpp:512
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition: petsc.cpp:1346
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3482
void MakeWrapper(MPI_Comm comm, const Operator *op, Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc&#39;s MATSHELL object and returns the Mat in B...
Definition: petsc.cpp:833
void SetUpdate(void(*update)(Operator *op, int it, const mfem::Vector &F, const mfem::Vector &X, const mfem::Vector &D, const mfem::Vector &P))
Definition: petsc.cpp:3346
bool operatorset
Boolean to handle SetOperator calls.
Definition: petsc.hpp:547
PetscParVector * X
Auxiliary vectors for typecasting.
Definition: petsc.hpp:203
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition: petsc.cpp:1391
int height
Dimension of the output / number of rows in the matrix.
Definition: operator.hpp:24
PetscParVector & operator-=(const PetscParVector &y)
Definition: petsc.cpp:413
void MFEMFinalizePetsc()
Definition: petsc.cpp:185
virtual ~PetscNonlinearSolver()
Definition: petsc.cpp:3254
virtual ~PetscODESolver()
Definition: petsc.cpp:3407
int GetConverged()
Definition: petsc.cpp:2022
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition: petsc.cpp:1723
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition: petsc.cpp:1948
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition: petsc.cpp:168
Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
Definition: petsc.cpp:1662
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition: petsc.cpp:228
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition: petsc.hpp:822
ID for class PetscParMatrix, MATAIJ format.
Definition: operator.hpp:140
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:2611
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:339
PetscInt GlobalSize() const
Returns the global number of rows.
Definition: petsc.cpp:204
PetscParVector * X
Definition: petsc.hpp:538
void _SetDataAndSize_()
Definition: petsc.cpp:193
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition: petsc.cpp:477
struct _p_PetscObject * PetscObject
Definition: petsc.hpp:50
void SetAbsTol(double tol)
Definition: petsc.cpp:1770
Helper class for handling essential boundary conditions.
Definition: petsc.hpp:447
struct _p_KSP * KSP
Definition: petsc.hpp:56
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition: petsc.cpp:1336
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition: petsc.hpp:480
void SetTol(double tol)
Definition: petsc.cpp:1740
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition: petsc.cpp:369
HYPRE_Int * GetTrueDofOffsets() const
Definition: pfespace.hpp:248
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition: petsc.hpp:828
int NumRowBlocks() const
Return the number of row blocks.
Vector data type.
Definition: vector.hpp:48
struct _p_Vec * Vec
Definition: petsc.hpp:54
Identity Operator I: x -&gt; x.
Definition: operator.hpp:302
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
Definition: petsc.cpp:425
void FreePrivateContext()
Definition: petsc.cpp:2144
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition: ode.hpp:26
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition: petsc.cpp:344
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3382
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition: petsc.cpp:1648
Base class for solvers.
Definition: operator.hpp:279
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition: globals.hpp:64
Abstract operator.
Definition: operator.hpp:21
Wrapper for hypre&#39;s ParCSR matrix class.
Definition: hypre.hpp:187
A class to handle Block systems in a matrix-free implementation.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:2503
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
Definition: petsc.cpp:1370
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition: petsc.cpp:2171
ID for class PetscParMatrix, MATIS format.
Definition: operator.hpp:141
int width
Dimension of the input / number of columns in the matrix.
Definition: operator.hpp:25
void * private_ctx
Private context for solver.
Definition: petsc.hpp:544
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition: petsc.cpp:2178
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition: petsc.cpp:1326
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition: operator.hpp:216
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition: petsc.cpp:1444