MFEM  v4.5.2
Finite element discretization library
petsc.cpp
Go to the documentation of this file.
1 // Copyright (c) 2010-2023, Lawrence Livermore National Security, LLC. Produced
2 // at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3 // LICENSE and NOTICE for details. LLNL-CODE-806117.
4 //
5 // This file is part of the MFEM library. For more information and source code
6 // availability visit https://mfem.org.
7 //
8 // MFEM is free software; you can redistribute it and/or modify it under the
9 // terms of the BSD-3 license. We welcome feedback and contributions, see file
10 // CONTRIBUTING.md for details.
11 
12 // Author: Stefano Zampini <stefano.zampini@gmail.com>
13 
14 #include "../config/config.hpp"
15 
16 #ifdef MFEM_USE_MPI
17 #ifdef MFEM_USE_PETSC
18 
19 #include "linalg.hpp"
20 #include "../fem/fem.hpp"
21 
22 #include "petsc.h"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
25 #endif
26 
27 // Backward compatibility
28 #if PETSC_VERSION_LT(3,11,0)
29 #define VecLockReadPush VecLockPush
30 #define VecLockReadPop VecLockPop
31 #endif
32 #if PETSC_VERSION_LT(3,12,0)
33 #define VecGetArrayWrite VecGetArray
34 #define VecRestoreArrayWrite VecRestoreArray
35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
37 #endif
38 
39 #include <fstream>
40 #include <iomanip>
41 #include <cmath>
42 #include <cstdlib>
43 
44 // Note: there are additional #include statements below.
45 
46 #include "petscinternals.hpp"
47 
48 // Callback functions: these functions will be called by PETSc
49 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
50 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
51 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
52  void*);
53 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
54 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
55  PetscReal,Mat,
56  Mat,void*);
57 static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
58  Mat,Mat,Mat,Mat);
59 static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
60 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
61 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
62 static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
63 static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
64 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
65  PetscBool*,PetscBool*,void*);
66 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
67 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
68 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
69 static PetscErrorCode __mfem_pc_shell_setup(PC);
70 static PetscErrorCode __mfem_pc_shell_destroy(PC);
71 static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
72 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
73 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
74 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
75 static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
76 static PetscErrorCode __mfem_array_container_destroy(void*);
77 static PetscErrorCode __mfem_matarray_container_destroy(void*);
78 static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
79 
80 // auxiliary functions
81 static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
82  PetscInt,IS*);
83 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
84  const mfem::Array<int>*,PetscInt,IS*);
85 static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
86 static PetscErrorCode MakeShellPCWithFactory(PC,
88 
89 // Equivalent functions are present in PETSc source code
90 // if PETSc has been compiled with hypre support
91 // We provide them here in case PETSC_HAVE_HYPRE is not defined
92 #if !defined(PETSC_HAVE_HYPRE)
93 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
94 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
95 #endif
96 
97 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
98 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
99 #ifndef _USE_DEVICE
100 #define _USE_DEVICE
101 #endif
102 #endif
103 #endif
104 
105 #if defined(PETSC_HAVE_DEVICE)
106 static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
107 #endif
108 static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
109 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject);
110 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,PetscInt,PetscInt,Mat*);
111 
112 // structs used by PETSc code
113 typedef struct
114 {
115  mfem::Solver *op;
117  bool ownsop;
118  unsigned long int numprec;
119 } __mfem_pc_shell_ctx;
120 
121 typedef struct
122 {
123  mfem::Operator *op; // The nonlinear operator
124  mfem::PetscBCHandler *bchandler; // Handling of essential bc
125  mfem::Vector *work; // Work vector
126  mfem::Operator::Type jacType; // OperatorType for the Jacobian
127  // Objective for line search
128  void (*objective)(mfem::Operator *op, const mfem::Vector&, double*);
129  // PostCheck function (to be called after successful line search)
130  void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
131  mfem::Vector&, bool&, bool&);
132  // General purpose update function (to be called at the beginning of
133  // each nonlinear step)
134  void (*update)(mfem::Operator *op, int,
135  const mfem::Vector&, const mfem::Vector&,
136  const mfem::Vector&, const mfem::Vector&);
137 } __mfem_snes_ctx;
138 
139 typedef struct
140 {
141  mfem::TimeDependentOperator *op; // The time-dependent operator
142  mfem::PetscBCHandler *bchandler; // Handling of essential bc
143  mfem::Vector *work; // Work vector
144  mfem::Vector *work2; // Work vector
145  mfem::Operator::Type jacType; // OperatorType for the Jacobian
146  enum mfem::PetscODESolver::Type type;
147  PetscReal cached_shift;
148  PetscObjectState cached_ijacstate;
149  PetscObjectState cached_rhsjacstate;
150  PetscObjectState cached_splits_xstate;
151  PetscObjectState cached_splits_xdotstate;
152 } __mfem_ts_ctx;
153 
154 typedef struct
155 {
156  mfem::PetscSolver *solver; // The solver object
157  mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
158 } __mfem_monitor_ctx;
159 
160 // use global scope ierr to check PETSc errors inside mfem calls
161 static PetscErrorCode ierr;
162 
163 using namespace std;
164 
165 namespace mfem
166 {
167 
169 {
170  MFEMInitializePetsc(NULL,NULL,NULL,NULL);
171 }
172 
173 void MFEMInitializePetsc(int *argc,char*** argv)
174 {
175  MFEMInitializePetsc(argc,argv,NULL,NULL);
176 }
177 
178 void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
179  const char help[])
180 {
182  {
183  // Tell PETSc to use the same CUDA device as MFEM:
184  ierr = PetscOptionsSetValue(NULL,"-cuda_device",
185  to_string(mfem::Device::GetId()).c_str());
186  MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
187  }
188  ierr = PetscInitialize(argc,argv,rc_file,help);
189  MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
190 }
191 
193 {
194  ierr = PetscFinalize();
195  MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
196 }
197 
198 const double* PetscMemory::GetHostPointer() const
199 {
200  int oflags = flags;
201  SetHostValid();
202  const double *v = mfem::Read(*this,Capacity(),false);
203  flags = oflags;
204  return v;
205 }
206 
207 const double* PetscMemory::GetDevicePointer() const
208 {
209  int oflags = flags;
210  SetDeviceValid();
211  const double *v = mfem::Read(*this,Capacity(),true);
212  flags = oflags;
213  return v;
214 }
215 
216 // PetscParVector methods
217 
218 void PetscParVector::SetDataAndSize_()
219 {
220  PetscScalar *array;
221  PetscInt n;
222 
223  MFEM_VERIFY(x,"Missing Vec");
224  ierr = VecSetUp(x); PCHKERRQ(x,ierr);
225  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
226  MFEM_VERIFY(n >= 0,"Invalid local size");
227  size = n;
228 #if defined(PETSC_HAVE_DEVICE)
229  PetscOffloadMask omask;
230  PetscBool iscuda;
231 
232  ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
233  if (omask != PETSC_OFFLOAD_BOTH)
234  {
235  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
236  }
237 #endif
238  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
239 #if defined(PETSC_HAVE_DEVICE)
240  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
241  ""); PCHKERRQ(x,ierr);
242  if (iscuda)
243  {
244  if (omask != PETSC_OFFLOAD_BOTH)
245  {
246  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
247  }
248  PetscScalar *darray;
249  ierr = VecCUDAGetArrayRead(x,(const PetscScalar**)&darray); PCHKERRQ(x,ierr);
250  pdata.Wrap(array,darray,size,MemoryType::HOST,false);
251  ierr = VecCUDARestoreArrayRead(x,(const PetscScalar**)&darray);
252  PCHKERRQ(x,ierr);
253  }
254  else
255 #endif
256  {
257  pdata.Wrap(array,size,MemoryType::HOST,false);
258  }
259  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
260 
261 #if defined(PETSC_HAVE_DEVICE)
262  ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
263 #endif
264  data.MakeAlias(pdata,0,size);
265  SetFlagsFromMask_();
266 }
267 
268 void PetscParVector::SetFlagsFromMask_() const
269 {
270  MFEM_VERIFY(x,"Missing Vec");
271 #if defined(_USE_DEVICE)
272  PetscOffloadMask mask;
273  PetscBool iscuda;
274  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
275  ""); PCHKERRQ(x,ierr);
276  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
277  if (iscuda)
278  {
279  switch (mask)
280  {
281  case PETSC_OFFLOAD_CPU:
282  pdata.SetHostValid();
283  pdata.SetDeviceInvalid();
284  break;
285  case PETSC_OFFLOAD_GPU:
286  pdata.SetHostInvalid();
287  pdata.SetDeviceValid();
288  break;
289  case PETSC_OFFLOAD_BOTH:
290  pdata.SetHostValid();
291  pdata.SetDeviceValid();
292  break;
293  default:
294  MFEM_ABORT("Unhandled case " << mask);
295  }
296  }
297 #endif
298  data.Sync(pdata);
299 }
300 
301 void PetscParVector::UpdateVecFromFlags()
302 {
303  MFEM_VERIFY(x,"Missing Vec");
304  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
305 #if defined(_USE_DEVICE)
306  PetscBool iscuda;
307  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
308  ""); PCHKERRQ(x,ierr);
309  if (iscuda)
310  {
311  bool dv = pdata.DeviceIsValid();
312  bool hv = pdata.HostIsValid();
313  PetscOffloadMask mask;
314  if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
315  else if (dv) { mask = PETSC_OFFLOAD_GPU; }
316  else { mask = PETSC_OFFLOAD_CPU; }
317  ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
318  }
319  else
320 #endif
321  {
322  /* Just make sure we have an up-to-date copy on the CPU for PETSc */
323  PetscScalar *v;
324  ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
325  pdata.CopyToHost(v,size);
326  ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
327  }
328 }
329 
330 void PetscParVector::SetVecType_()
331 {
332  VecType vectype;
333  MFEM_VERIFY(x,"Missing Vec");
334  ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
335 #if defined(_USE_DEVICE)
336  switch (Device::GetDeviceMemoryType())
337  {
338  case MemoryType::DEVICE:
339  case MemoryType::MANAGED:
340  ierr = VecSetType(x,VECCUDA); PCHKERRQ(x,ierr);
341  break;
342  default:
343  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
344  break;
345  }
346 #else
347  if (!vectype)
348  {
349  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
350  }
351 #endif
352 }
353 
354 const double* PetscParVector::Read(bool on_dev) const
355 {
356  const PetscScalar *dummy;
357  MFEM_VERIFY(x,"Missing Vec");
358 #if defined(PETSC_HAVE_DEVICE)
359  PetscBool iscuda;
360  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
361  ""); PCHKERRQ(x,ierr);
362  if (on_dev && iscuda)
363  {
364  ierr = VecCUDAGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
365  ierr = VecCUDARestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
366  }
367  else
368 #endif
369  {
370  ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
371  ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
372  }
373  SetFlagsFromMask_();
374  return mfem::Read(pdata, size, on_dev);
375 }
376 
377 const double* PetscParVector::HostRead() const
378 {
379  return Read(false);
380 }
381 
382 double* PetscParVector::Write(bool on_dev)
383 {
384  PetscScalar *dummy;
385  MFEM_VERIFY(x,"Missing Vec");
386 #if defined(PETSC_HAVE_DEVICE)
387  PetscBool iscuda;
388  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
389  ""); PCHKERRQ(x,ierr);
390  if (on_dev && iscuda)
391  {
392  ierr = VecCUDAGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
393  ierr = VecCUDARestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
394  }
395  else
396 #endif
397  {
398  ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
399  ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
400  }
401  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
402  SetFlagsFromMask_();
403  return mfem::Write(pdata, size, on_dev);
404 }
405 
407 {
408  return Write(false);
409 }
410 
411 double* PetscParVector::ReadWrite(bool on_dev)
412 {
413  PetscScalar *dummy;
414  MFEM_VERIFY(x,"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE)
416  PetscBool iscuda;
417  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
418  ""); PCHKERRQ(x,ierr);
419  if (on_dev && iscuda)
420  {
421  ierr = VecCUDAGetArray(x,&dummy); PCHKERRQ(x,ierr);
422  ierr = VecCUDARestoreArray(x,&dummy); PCHKERRQ(x,ierr);
423  }
424  else
425 #endif
426  {
427  ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
428  ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
429  }
430  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
431  SetFlagsFromMask_();
432  return mfem::ReadWrite(pdata, size, on_dev);
433 }
434 
436 {
437  return ReadWrite(false);
438 }
439 
440 void PetscParVector::UseDevice(bool dev) const
441 {
442  MFEM_VERIFY(x,"Missing Vec");
443 #if defined(PETSC_HAVE_DEVICE)
444  ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
445 #endif
446 }
447 
448 bool PetscParVector::UseDevice() const
449 {
450  PetscBool flg;
451  MFEM_VERIFY(x,"Missing Vec");
452  ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
453  return flg ? false : true;
454 }
455 
456 PetscInt PetscParVector::GlobalSize() const
457 {
458  PetscInt N;
459  ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
460  return N;
461 }
462 
463 void PetscParVector::SetBlockSize(PetscInt bs)
464 {
465  ierr = VecSetBlockSize(x,bs); PCHKERRQ(x,ierr);
466 }
467 
468 PetscParVector::PetscParVector(MPI_Comm comm, const Vector &x_,
469  bool copy) : Vector()
470 {
471  int n = x_.Size();
472  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
473  ierr = VecSetSizes(x,n,PETSC_DECIDE); PCHKERRQ(x,ierr);
474  SetVecType_();
475  SetDataAndSize_();
476  if (copy)
477  {
478 
479  /* we use PETSc accessors to flag valid memory location to PETSc */
480  PetscErrorCode (*rest)(Vec,PetscScalar**);
481  PetscScalar *array;
482 #if defined(PETSC_HAVE_DEVICE)
483  PetscBool iscuda;
484  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
485  ""); PCHKERRQ(x,ierr);
486  if (iscuda && x_.UseDevice())
487  {
488  UseDevice(true);
489  ierr = VecCUDAGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
490  rest = VecCUDARestoreArrayWrite;
491  }
492  else
493 #endif
494  {
495  UseDevice(false);
496  ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
497  rest = VecRestoreArrayWrite;
498  }
499  pdata.CopyFrom(x_.GetMemory(), n);
500  ierr = (*rest)(x,&array); PCHKERRQ(x,ierr);
502  }
503 }
504 
505 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
506  PetscInt *col) : Vector()
507 {
508  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
509  if (col)
510  {
511  PetscMPIInt myid;
512  MPI_Comm_rank(comm, &myid);
513  ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
514  }
515  else
516  {
517  ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
518  }
519  SetVecType_();
520  SetDataAndSize_();
521 }
522 
524 {
525  MPI_Comm comm = PetscObjectComm((PetscObject)x);
526  ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
527  pdata.Delete();
528 }
529 
530 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
531  PetscScalar *data_, PetscInt *col) : Vector()
532 {
533  MFEM_VERIFY(col,"Missing distribution");
534  PetscMPIInt myid;
535  MPI_Comm_rank(comm, &myid);
536  ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
537  &x); CCHKERRQ(comm,ierr)
538  SetVecType_();
539  SetDataAndSize_();
540 }
541 
543 {
544  ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
545  SetDataAndSize_();
546 }
547 
548 PetscParVector::PetscParVector(MPI_Comm comm, const Operator &op,
549  bool transpose, bool allocate) : Vector()
550 {
551  PetscInt loc = transpose ? op.Height() : op.Width();
552 
553  ierr = VecCreate(comm,&x);
554  CCHKERRQ(comm,ierr);
555  ierr = VecSetSizes(x,loc,PETSC_DECIDE);
556  PCHKERRQ(x,ierr);
557 
558  SetVecType_();
559  if (allocate)
560  {
561  SetDataAndSize_();
562  }
563  else /* Vector intended to be used with Place/ResetMemory calls */
564  {
565  size = loc;
566  }
567 }
568 
570  bool transpose, bool allocate) : Vector()
571 {
572  Mat pA = const_cast<PetscParMatrix&>(A);
573  if (!transpose)
574  {
575  ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
576  }
577  else
578  {
579  ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
580  }
581  SetVecType_();
582  if (!allocate) /* Vector intended to be used with Place/ResetMemory calls */
583  {
584  PetscInt n;
585  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
586  size = n;
587  }
588  else
589  {
590  SetDataAndSize_();
591  }
592 }
593 
595 {
596  if (ref)
597  {
598  ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
599  }
600  x = y;
601  SetDataAndSize_();
602 }
603 
605 {
606  HYPRE_BigInt* offsets = pfes->GetTrueDofOffsets();
607  MPI_Comm comm = pfes->GetComm();
608  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
609 
610  PetscMPIInt myid = 0;
611  if (!HYPRE_AssumedPartitionCheck())
612  {
613  MPI_Comm_rank(comm,&myid);
614  }
615  ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
616  PCHKERRQ(x,ierr);
617  SetVecType_();
618  SetDataAndSize_();
619 }
620 
621 MPI_Comm PetscParVector::GetComm() const
622 {
623  return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
624 }
625 
627 {
628  VecScatter scctx;
629  Vec vout;
630  const PetscScalar *array;
631  PetscInt size;
632 
633  ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
634  ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
635  PCHKERRQ(x,ierr);
636  ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
637  PCHKERRQ(x,ierr);
638  ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
639  ierr = VecGetArrayRead(vout,&array); PCHKERRQ(x,ierr);
640  ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
642  data.Assign(array);
643  ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(x,ierr);
644  ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
645  Vector *v = new Vector(data, internal::to_int(size));
646  v->MakeDataOwner();
647  data.LoseData();
648  return v;
649 }
650 
652 {
653  ierr = VecSet(x,d); PCHKERRQ(x,ierr);
655  return *this;
656 }
657 
659  const Array<PetscScalar>& vals)
660 {
661  MFEM_VERIFY(idx.Size() == vals.Size(),
662  "Size mismatch between indices and values");
663  PetscInt n = idx.Size();
664  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
665  PCHKERRQ(x,ierr);
666  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
667  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
669  return *this;
670 }
671 
673  const Array<PetscScalar>& vals)
674 {
675  MFEM_VERIFY(idx.Size() == vals.Size(),
676  "Size mismatch between indices and values");
677  PetscInt n = idx.Size();
678  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
679  PCHKERRQ(x,ierr);
680  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
681  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
683  return *this;
684 }
685 
687 {
688  ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
690  return *this;
691 }
692 
694 {
695  ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
697  return *this;
698 }
699 
701 {
702  ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
704  return *this;
705 }
706 
708 {
709  ierr = VecScale(x,s); PCHKERRQ(x,ierr);
711  return *this;
712 }
713 
715 {
716  ierr = VecShift(x,s); PCHKERRQ(x,ierr);
718  return *this;
719 }
720 
722 {
723  ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
724 }
725 
727 {
728  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
729 }
730 
732 {
733  PetscInt n;
734 
735  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
736  MFEM_VERIFY(n <= mem.Capacity(),
737  "Memory size " << mem.Capacity() << " < " << n << " vector size!");
738  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
739  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
740 #if defined(_USE_DEVICE)
741  PetscBool iscuda;
742  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
743  ""); PCHKERRQ(x,ierr);
744  if (iscuda)
745  {
746  bool usedev = mem.DeviceIsValid() || (!rw && mem.UseDevice());
747  pdata.MakeAliasForSync(mem,0,n,rw,true,usedev);
748  if (usedev)
749  {
750  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
751  ierr = VecCUDAPlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
752  }
753  else
754  {
755  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
756  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
757  }
758  }
759  else
760 #endif
761  {
762  double *w = rw ? mfem::HostReadWrite(mem,size) : mfem::HostWrite(mem,size);
763  pdata.MakeAliasForSync(mem,0,n,rw,true,false);
764 #if defined(PETSC_HAVE_DEVICE)
765  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
766 #endif
767  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
768  }
769  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
771 }
772 
774 {
775  PetscInt n;
776 
777  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
778  MFEM_VERIFY(n <= mem.Capacity(),
779  "Memory size " << mem.Capacity() << " < " << n << " vector size!");
780  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
781  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
782 #if defined(_USE_DEVICE)
783  PetscBool iscuda;
784  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
785  ""); PCHKERRQ(x,ierr);
786  if (iscuda)
787  {
788  pdata.MakeAliasForSync(mem,0,n,mem.DeviceIsValid());
789  if (mem.DeviceIsValid())
790  {
791  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
792  ierr = VecCUDAPlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
793  }
794  else
795  {
796  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
797  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
798  }
799  }
800  else
801 #endif
802  {
803  const double *w = mfem::HostRead(mem,size);
804  pdata.MakeAliasForSync(mem,0,n,false);
805 #if defined(PETSC_HAVE_DEVICE)
806  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
807 #endif
808  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
809  }
811  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
812  ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
813 }
814 
816 {
817  MFEM_VERIFY(pdata.IsAliasForSync(),"Vector data is not an alias");
818  MFEM_VERIFY(!pdata.Empty(),"Vector data is empty");
819  bool read = pdata.ReadRequested();
820  bool usedev = pdata.DeviceRequested();
821  bool write = pdata.WriteRequested();
822  /*
823  check for strange corner cases
824  - device memory used but somehow PETSc ended up putting up to date data on host
825  - host memory used but somehow PETSc ended up putting up to date data on device
826  */
827  if (write)
828  {
829  const PetscScalar *v;
830 #if defined(PETSC_HAVE_DEVICE)
831  PetscOffloadMask mask;
832  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
833  if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
834  (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
835 #endif
836  {
837  ierr = VecGetArrayRead(x,&v); PCHKERRQ(x,ierr);
838  pdata.CopyFromHost(v, size);
839  ierr = VecRestoreArrayRead(x,&v); PCHKERRQ(x,ierr);
840  }
841  }
843  data.Reset();
844  if (read && !write) { ierr = VecLockReadPop(x); PCHKERRQ(x,ierr); }
845  if (usedev)
846  {
847 #if defined(PETSC_HAVE_DEVICE)
848  ierr = VecCUDAResetArray(x); PCHKERRQ(x,ierr);
849 #else
850  MFEM_VERIFY(false,"This should not happen");
851 #endif
852  }
853  else
854  {
855  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
856  }
857 }
858 
860 {
861  PetscRandom rctx = NULL;
862 
863  if (seed)
864  {
865  ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
866  PCHKERRQ(x,ierr);
867  ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
868  ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
869  }
870  ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
871  ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
872 }
873 
874 void PetscParVector::Print(const char *fname, bool binary) const
875 {
876  if (fname)
877  {
878  PetscViewer view;
879 
880  if (binary)
881  {
882  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
883  FILE_MODE_WRITE,&view);
884  }
885  else
886  {
887  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
888  }
889  PCHKERRQ(x,ierr);
890  ierr = VecView(x,view); PCHKERRQ(x,ierr);
891  ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
892  }
893  else
894  {
895  ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
896  }
897 }
898 
899 // PetscParMatrix methods
900 
902 {
903  PetscInt N;
904  ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
905  return N;
906 }
907 
909 {
910  PetscInt N;
911  ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
912  return N;
913 }
914 
916 {
917  PetscInt N;
918  ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
919  return N;
920 }
921 
923 {
924  PetscInt N;
925  ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
926  return N;
927 }
928 
930 {
931  PetscInt N;
932  ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
933  return N;
934 }
935 
937 {
938  PetscInt N;
939  ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
940  return N;
941 }
942 
944 {
945  MatInfo info;
946  ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
947  return (PetscInt)info.nz_used;
948 }
949 
951 {
952  if (cbs < 0) { cbs = rbs; }
953  ierr = MatSetBlockSizes(A,rbs,cbs); PCHKERRQ(A,ierr);
954 }
955 
957 {
958  A = NULL;
959  X = Y = NULL;
960  height = width = 0;
961 }
962 
964 {
965  Init();
966 }
967 
969  const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
970 {
971  Init();
972 
973  Mat B = const_cast<PetscParMatrix&>(pB);
974 
975  IS isr,isc;
976  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
977  rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
978  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
979  cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
980  ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
981  ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
982  ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
983 
984  height = GetNumRows();
985  width = GetNumCols();
986 }
987 
989 {
990  Init();
991  height = pa->Height();
992  width = pa->Width();
993  ConvertOperator(pa->GetComm(),*pa,&A,tid);
994 }
995 
997 {
998  Init();
999  height = ha->Height();
1000  width = ha->Width();
1001  ConvertOperator(ha->GetComm(),*ha,&A,tid);
1002 }
1003 
1005 {
1006  Init();
1007  height = sa->Height();
1008  width = sa->Width();
1009  ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
1010 }
1011 
1012 PetscParMatrix::PetscParMatrix(MPI_Comm comm, const Operator *op,
1013  Operator::Type tid)
1014 {
1015  Init();
1016  height = op->Height();
1017  width = op->Width();
1018  ConvertOperator(comm,*op,&A,tid);
1019 }
1020 
1021 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt glob_size,
1022  PetscInt *row_starts, SparseMatrix *diag,
1023  Operator::Type tid)
1024 {
1025  Init();
1026  BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1027  tid==PETSC_MATAIJ,&A);
1028  SetUpForDevice();
1029  // update base class
1030  height = GetNumRows();
1031  width = GetNumCols();
1032 }
1033 
1034 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
1035  PetscInt global_num_cols, PetscInt *row_starts,
1036  PetscInt *col_starts, SparseMatrix *diag,
1037  Operator::Type tid)
1038 {
1039  Init();
1040  BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1041  tid==PETSC_MATAIJ,&A);
1042  SetUpForDevice();
1043  // update base class
1044  height = GetNumRows();
1045  width = GetNumCols();
1046 }
1047 
1049 {
1050  if (A)
1051  {
1052  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1053  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1054  if (X) { delete X; }
1055  if (Y) { delete Y; }
1056  X = Y = NULL;
1057  }
1058  height = B.Height();
1059  width = B.Width();
1060 #if defined(PETSC_HAVE_HYPRE)
1061  ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
1062 #else
1063  ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
1064 #endif
1065  SetUpForDevice();
1066  return *this;
1067 }
1068 
1070 {
1071  if (A)
1072  {
1073  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1074  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1075  if (X) { delete X; }
1076  if (Y) { delete Y; }
1077  X = Y = NULL;
1078  }
1079  height = B.Height();
1080  width = B.Width();
1081  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1082  return *this;
1083 }
1084 
1086 {
1087  if (!A)
1088  {
1089  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1090  }
1091  else
1092  {
1093  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1094  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1095  ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1096  }
1097  return *this;
1098 }
1099 
1101 {
1102  if (!A)
1103  {
1104  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1105  ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
1106  }
1107  else
1108  {
1109  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1110  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1111  ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1112  }
1113  return *this;
1114 }
1115 
1116 void PetscParMatrix::
1117 BlockDiagonalConstructor(MPI_Comm comm,
1118  PetscInt *row_starts, PetscInt *col_starts,
1119  SparseMatrix *diag, bool assembled, Mat* Ad)
1120 {
1121  Mat A;
1122  PetscInt lrsize,lcsize,rstart,cstart;
1123  PetscMPIInt myid = 0,commsize;
1124 
1125  ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
1126  if (!HYPRE_AssumedPartitionCheck())
1127  {
1128  ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
1129  }
1130  lrsize = row_starts[myid+1]-row_starts[myid];
1131  rstart = row_starts[myid];
1132  lcsize = col_starts[myid+1]-col_starts[myid];
1133  cstart = col_starts[myid];
1134 
1135  if (!assembled)
1136  {
1137  IS is;
1138  ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1139  ISLocalToGlobalMapping rl2g,cl2g;
1140  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1141  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1142  if (row_starts != col_starts)
1143  {
1144  ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
1145  CCHKERRQ(comm,ierr);
1146  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1147  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1148  }
1149  else
1150  {
1151  ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1152  cl2g = rl2g;
1153  }
1154 
1155  // Create the PETSc object (MATIS format)
1156  ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1157  ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1158  PCHKERRQ(A,ierr);
1159  ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1160  ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1161  ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1162  ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1163 
1164  // Copy SparseMatrix into PETSc SeqAIJ format
1165  // pass through host for now
1166  Mat lA;
1167  ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1168  const int *II = diag->HostReadI();
1169  const int *JJ = diag->HostReadJ();
1170 #if defined(PETSC_USE_64BIT_INDICES)
1171  PetscInt *pII,*pJJ;
1172  int m = diag->Height()+1, nnz = II[diag->Height()];
1173  ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1174  for (int i = 0; i < m; i++) { pII[i] = II[i]; }
1175  for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1176  ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1177  diag->HostReadData()); PCHKERRQ(lA,ierr);
1178  ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1179 #else
1180  ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1181  diag->HostReadData()); PCHKERRQ(lA,ierr);
1182 #endif
1183  }
1184  else
1185  {
1186  PetscScalar *da;
1187  PetscInt *dii,*djj,*oii,
1188  m = diag->Height()+1, nnz = diag->NumNonZeroElems();
1189 
1190  diag->SortColumnIndices();
1191  // if we can take ownership of the SparseMatrix arrays, we can avoid this
1192  // step
1193  ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1194  ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1195  ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1196  if (sizeof(PetscInt) == sizeof(int))
1197  {
1198  ierr = PetscMemcpy(dii,diag->HostReadI(),m*sizeof(PetscInt));
1199  CCHKERRQ(PETSC_COMM_SELF,ierr);
1200  ierr = PetscMemcpy(djj,diag->HostReadJ(),nnz*sizeof(PetscInt));
1201  CCHKERRQ(PETSC_COMM_SELF,ierr);
1202  }
1203  else
1204  {
1205  const int *iii = diag->HostReadI();
1206  const int *jjj = diag->HostReadJ();
1207  for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
1208  for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1209  }
1210  ierr = PetscMemcpy(da,diag->HostReadData(),nnz*sizeof(PetscScalar));
1211  CCHKERRQ(PETSC_COMM_SELF,ierr);
1212  ierr = PetscCalloc1(m,&oii);
1213  CCHKERRQ(PETSC_COMM_SELF,ierr);
1214  if (commsize > 1)
1215  {
1216  ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1217  PETSC_DECIDE,
1218  dii,djj,da,oii,NULL,NULL,&A);
1219  CCHKERRQ(comm,ierr);
1220  }
1221  else
1222  {
1223  ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1224  CCHKERRQ(comm,ierr);
1225  }
1226 
1227  void *ptrs[4] = {dii,djj,da,oii};
1228  const char *names[4] = {"_mfem_csr_dii",
1229  "_mfem_csr_djj",
1230  "_mfem_csr_da",
1231  "_mfem_csr_oii",
1232  };
1233  for (PetscInt i=0; i<4; i++)
1234  {
1235  PetscContainer c;
1236 
1237  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1238  ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1239  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1240  CCHKERRQ(comm,ierr);
1241  ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
1242  CCHKERRQ(comm,ierr);
1243  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1244  }
1245  }
1246 
1247  // Tell PETSc the matrix is ready to be used
1248  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1249  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1250 
1251  *Ad = A;
1252 }
1253 
1254 MPI_Comm PetscParMatrix::GetComm() const
1255 {
1256  return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
1257 }
1258 
1259 // TODO ADD THIS CONSTRUCTOR
1260 //PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
1261 // PetscInt glob_ncols, int *I, PetscInt *J,
1262 // double *data, PetscInt *rows, PetscInt *cols)
1263 //{
1264 //}
1265 
1266 // TODO This should take a reference on op but how?
1267 void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
1268 {
1269  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1270  ierr = MatSetSizes(*A,op->Height(),op->Width(),
1271  PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1272  ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1273  ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
1274  ierr = MatShellSetOperation(*A,MATOP_MULT,
1275  (void (*)())__mfem_mat_shell_apply);
1276  PCHKERRQ(A,ierr);
1277  ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1278  (void (*)())__mfem_mat_shell_apply_transpose);
1279  PCHKERRQ(A,ierr);
1280  ierr = MatShellSetOperation(*A,MATOP_COPY,
1281  (void (*)())__mfem_mat_shell_copy);
1282  PCHKERRQ(A,ierr);
1283  ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1284  (void (*)())__mfem_mat_shell_destroy);
1285 #if defined(_USE_DEVICE)
1286  MemoryType mt = GetMemoryType(op->GetMemoryClass());
1287  if (mt == MemoryType::DEVICE || mt == MemoryType::MANAGED)
1288  {
1289  ierr = MatShellSetVecType(*A,VECCUDA); PCHKERRQ(A,ierr);
1290  ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1291  }
1292  else
1293  {
1294  ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1295  }
1296 #endif
1297  PCHKERRQ(A,ierr);
1298  ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1299 }
1300 
1301 void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
1302  Operator::Type tid)
1303 {
1304  PetscParMatrix *pA = const_cast<PetscParMatrix *>
1305  (dynamic_cast<const PetscParMatrix *>(&op));
1306  HypreParMatrix *pH = const_cast<HypreParMatrix *>
1307  (dynamic_cast<const HypreParMatrix *>(&op));
1308  BlockOperator *pB = const_cast<BlockOperator *>
1309  (dynamic_cast<const BlockOperator *>(&op));
1310  IdentityOperator *pI = const_cast<IdentityOperator *>
1311  (dynamic_cast<const IdentityOperator *>(&op));
1312  SparseMatrix *pS = const_cast<SparseMatrix *>
1313  (dynamic_cast<const SparseMatrix *>(&op));
1314 
1315  if (pA && tid == ANY_TYPE) // use same object and return
1316  {
1317  ierr = PetscObjectReference((PetscObject)(pA->A));
1318  CCHKERRQ(pA->GetComm(),ierr);
1319  *A = pA->A;
1320  return;
1321  }
1322 
1323  PetscBool avoidmatconvert = PETSC_FALSE;
1324  if (pA) // we test for these types since MatConvert will fail
1325  {
1326  ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
1327  MATSHELL,"");
1328  CCHKERRQ(comm,ierr);
1329  }
1330  if (pA && !avoidmatconvert)
1331  {
1332  Mat At = NULL;
1333  PetscBool istrans;
1334 #if PETSC_VERSION_LT(3,10,0)
1335  PetscBool ismatis;
1336 #endif
1337 
1338 #if PETSC_VERSION_LT(3,18,0)
1339  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
1340 #else
1341  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEVIRTUAL,
1342  &istrans);
1343 #endif
1344  CCHKERRQ(pA->GetComm(),ierr);
1345  if (!istrans)
1346  {
1347  if (tid == pA->GetType()) // use same object and return
1348  {
1349  ierr = PetscObjectReference((PetscObject)(pA->A));
1350  CCHKERRQ(pA->GetComm(),ierr);
1351  *A = pA->A;
1352  return;
1353  }
1354 #if PETSC_VERSION_LT(3,10,0)
1355  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
1356  CCHKERRQ(pA->GetComm(),ierr);
1357 #endif
1358  }
1359  else
1360  {
1361  ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
1362 #if PETSC_VERSION_LT(3,10,0)
1363  ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
1364 #endif
1365  CCHKERRQ(pA->GetComm(),ierr);
1366  }
1367 
1368  // Try to convert
1369  if (tid == PETSC_MATAIJ)
1370  {
1371 #if PETSC_VERSION_LT(3,10,0)
1372  if (ismatis)
1373  {
1374  if (istrans)
1375  {
1376  Mat B;
1377 
1378  ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1379  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1380  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1381  }
1382  else
1383  {
1384  ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
1385  PCHKERRQ(pA->A,ierr);
1386  }
1387  }
1388  else
1389 #endif
1390  {
1391  PetscMPIInt size;
1392  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1393 
1394  // call MatConvert and see if a converter is available
1395  if (istrans)
1396  {
1397  Mat B;
1398  ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1399  PCHKERRQ(pA->A,ierr);
1400  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1401  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1402  }
1403  else
1404  {
1405  ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1406  PCHKERRQ(pA->A,ierr);
1407  }
1408  }
1409  }
1410  else if (tid == PETSC_MATIS)
1411  {
1412  if (istrans)
1413  {
1414  Mat B;
1415  ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1416  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1417  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1418  }
1419  else
1420  {
1421  ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1422  }
1423  }
1424  else if (tid == PETSC_MATHYPRE)
1425  {
1426 #if defined(PETSC_HAVE_HYPRE)
1427  if (istrans)
1428  {
1429  Mat B;
1430  ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1431  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1432  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1433  }
1434  else
1435  {
1436  ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1437  }
1438 #else
1439  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1440 #endif
1441  }
1442  else if (tid == PETSC_MATSHELL)
1443  {
1444  MakeWrapper(comm,&op,A);
1445  }
1446  else
1447  {
1448  MFEM_ABORT("Unsupported operator type conversion " << tid)
1449  }
1450  }
1451  else if (pH)
1452  {
1453  if (tid == PETSC_MATAIJ)
1454  {
1455 #if defined(PETSC_HAVE_HYPRE)
1456  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1457  PETSC_USE_POINTER,A);
1458 #else
1459  ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1460 #endif
1461  CCHKERRQ(pH->GetComm(),ierr);
1462  }
1463  else if (tid == PETSC_MATIS)
1464  {
1465 #if defined(PETSC_HAVE_HYPRE)
1466  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1467  PETSC_USE_POINTER,A);
1468 #else
1469  ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1470 #endif
1471  CCHKERRQ(pH->GetComm(),ierr);
1472  }
1473  else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1474  {
1475 #if defined(PETSC_HAVE_HYPRE)
1476  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1477  PETSC_USE_POINTER,A);
1478  CCHKERRQ(pH->GetComm(),ierr);
1479 #else
1480  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1481 #endif
1482  }
1483  else if (tid == PETSC_MATSHELL)
1484  {
1485  MakeWrapper(comm,&op,A);
1486  }
1487  else
1488  {
1489  MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1490  " is not implemented");
1491  }
1492  }
1493  else if (pB)
1494  {
1495  Mat *mats,*matsl2l = NULL;
1496  PetscInt i,j,nr,nc;
1497 
1498  nr = pB->NumRowBlocks();
1499  nc = pB->NumColBlocks();
1500  ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1501  if (tid == PETSC_MATIS)
1502  {
1503  ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1504  }
1505  for (i=0; i<nr; i++)
1506  {
1507  PetscBool needl2l = PETSC_TRUE;
1508 
1509  for (j=0; j<nc; j++)
1510  {
1511  if (!pB->IsZeroBlock(i,j))
1512  {
1513  ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1514  if (tid == PETSC_MATIS && needl2l)
1515  {
1516  PetscContainer c;
1517  ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1518  (PetscObject*)&c);
1519  PCHKERRQ(mats[i*nc+j],ierr);
1520  // special case for block operators: the local Vdofs should be
1521  // ordered as:
1522  // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1523  // with m fields, Ni the number of Vdofs for the i-th field
1524  if (c)
1525  {
1526  Array<Mat> *l2l = NULL;
1527  ierr = PetscContainerGetPointer(c,(void**)&l2l);
1528  PCHKERRQ(c,ierr);
1529  MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1530  << l2l->Size() << " for block row " << i );
1531  ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1532  PCHKERRQ(c,ierr);
1533  matsl2l[i] = (*l2l)[0];
1534  needl2l = PETSC_FALSE;
1535  }
1536  }
1537  }
1538  }
1539  }
1540  ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1541  if (tid == PETSC_MATIS)
1542  {
1543  ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1544 
1545  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1546  for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1547  ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1548 
1549  PetscContainer c;
1550  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1551  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1552  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1553  PCHKERRQ(c,ierr);
1554  ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1555  PCHKERRQ((*A),ierr);
1556  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1557  }
1558  for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1559  ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1560  }
1561  else if (pI && tid == PETSC_MATAIJ)
1562  {
1563  PetscInt rst;
1564 
1565  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1566  ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1567  PCHKERRQ(A,ierr);
1568  ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1569  ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1570  ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1571  ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1572  ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1573  for (PetscInt i = rst; i < rst+pI->Height(); i++)
1574  {
1575  ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1576  }
1577  ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1578  ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1579  }
1580  else if (pS)
1581  {
1582  if (tid == PETSC_MATSHELL)
1583  {
1584  MakeWrapper(comm,&op,A);
1585  }
1586  else
1587  {
1588  /* from SparseMatrix to SEQAIJ -> always pass through host for now */
1589  Mat B;
1590  PetscScalar *pdata;
1591  PetscInt *pii,*pjj,*oii;
1592  PetscMPIInt size;
1593 
1594  int m = pS->Height();
1595  int n = pS->Width();
1596  const int *ii = pS->HostReadI();
1597  const int *jj = pS->HostReadJ();
1598  const double *data = pS->HostReadData();
1599 
1600  ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1601  ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1602  ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1603  pii[0] = ii[0];
1604  for (int i = 0; i < m; i++)
1605  {
1606  bool issorted = true;
1607  pii[i+1] = ii[i+1];
1608  for (int j = ii[i]; j < ii[i+1]; j++)
1609  {
1610  pjj[j] = jj[j];
1611  if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1612  pdata[j] = data[j];
1613  }
1614  if (!issorted)
1615  {
1616  ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1617  CCHKERRQ(PETSC_COMM_SELF,ierr);
1618  }
1619  }
1620 
1621  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1622  if (size == 1)
1623  {
1624  ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1625  CCHKERRQ(comm,ierr);
1626  oii = NULL;
1627  }
1628  else // block diagonal constructor
1629  {
1630  ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1631  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1632  PETSC_DECIDE,
1633  pii,pjj,pdata,oii,NULL,NULL,&B);
1634  CCHKERRQ(comm,ierr);
1635  }
1636  void *ptrs[4] = {pii,pjj,pdata,oii};
1637  const char *names[4] = {"_mfem_csr_pii",
1638  "_mfem_csr_pjj",
1639  "_mfem_csr_pdata",
1640  "_mfem_csr_oii"
1641  };
1642  for (int i=0; i<4; i++)
1643  {
1644  PetscContainer c;
1645 
1646  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1647  ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1648  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1649  PCHKERRQ(B,ierr);
1650  ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1651  PCHKERRQ(B,ierr);
1652  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1653  }
1654  if (tid == PETSC_MATAIJ)
1655  {
1656  *A = B;
1657  }
1658  else if (tid == PETSC_MATHYPRE)
1659  {
1660  ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1661  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1662  }
1663  else if (tid == PETSC_MATIS)
1664  {
1665  ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1666  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1667  }
1668  else
1669  {
1670  MFEM_ABORT("Unsupported operator type conversion " << tid)
1671  }
1672  }
1673  }
1674  else // fallback to general operator
1675  {
1676  MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1677  "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1678  MakeWrapper(comm,&op,A);
1679  if (tid == PETSC_MATAIJ)
1680  {
1681  Mat B;
1682  PetscBool isaij;
1683 
1684  ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1685  ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1686  CCHKERRQ(comm,ierr);
1687  ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1688  if (!isaij)
1689  {
1690  ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1691  ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1692  }
1693  else
1694  {
1695  *A = B;
1696  }
1697  }
1698  }
1699  SetUpForDevice();
1700 }
1701 
1703 {
1704  if (A != NULL)
1705  {
1706  MPI_Comm comm = MPI_COMM_NULL;
1707  ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1708  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1709  }
1710  delete X;
1711  delete Y;
1712  X = Y = NULL;
1713 }
1714 
1716 {
1717  if (ref)
1718  {
1719  ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1720  }
1721  Init();
1722  A = a;
1723  height = GetNumRows();
1724  width = GetNumCols();
1725 }
1726 
1728 {
1729  if (A_ == A) { return; }
1730  Destroy();
1731  ierr = PetscObjectReference((PetscObject)A_); PCHKERRQ(A_,ierr);
1732  A = A_;
1733  height = GetNumRows();
1734  width = GetNumCols();
1735 }
1736 
1737 void PetscParMatrix::SetUpForDevice()
1738 {
1739 #if !defined(_USE_DEVICE)
1740  return;
1741 #else
1742  if (!A || !Device::Allows(Backend::CUDA_MASK)) { return; }
1743 
1744  PetscBool ismatis,isnest,isseqaij,ismpiaij;
1745  ierr = PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
1746  PCHKERRQ(A,ierr);
1747  ierr = PetscObjectTypeCompare((PetscObject)A,MATNEST,&isnest);
1748  PCHKERRQ(A,ierr);
1749  Mat tA = A;
1750  if (ismatis)
1751  {
1752  ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1753  ierr = PetscObjectTypeCompare((PetscObject)tA,MATNEST,&isnest);
1754  PCHKERRQ(tA,ierr);
1755  }
1756  if (isnest)
1757  {
1758  PetscInt n,m;
1759  Mat **sub;
1760  ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1761  bool dvec = false;
1762  for (PetscInt i = 0; i < n; i++)
1763  {
1764  for (PetscInt j = 0; j < m; j++)
1765  {
1766  if (sub[i][j])
1767  {
1768  bool expT = false;
1769  Mat sA = sub[i][j];
1770  ierr = PetscObjectTypeCompare((PetscObject)sA,MATSEQAIJ,&isseqaij);
1771  PCHKERRQ(sA,ierr);
1772  ierr = PetscObjectTypeCompare((PetscObject)sA,MATMPIAIJ,&ismpiaij);
1773  PCHKERRQ(sA,ierr);
1774  if (isseqaij)
1775  {
1776  ierr = MatSetType(sA,MATSEQAIJCUSPARSE); PCHKERRQ(sA,ierr);
1777  dvec = true;
1778  expT = true;
1779  }
1780  else if (ismpiaij)
1781  {
1782  ierr = MatSetType(sA,MATMPIAIJCUSPARSE); PCHKERRQ(sA,ierr);
1783  dvec = true;
1784  expT = true;
1785  }
1786  if (expT)
1787  {
1788  ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1789  PETSC_TRUE); PCHKERRQ(sA,ierr);
1790  }
1791  }
1792  }
1793  }
1794  if (dvec)
1795  {
1796  ierr = MatSetVecType(tA,VECCUDA); PCHKERRQ(tA,ierr);
1797  }
1798  }
1799  else
1800  {
1801  bool expT = false;
1802  ierr = PetscObjectTypeCompare((PetscObject)tA,MATSEQAIJ,&isseqaij);
1803  PCHKERRQ(tA,ierr);
1804  ierr = PetscObjectTypeCompare((PetscObject)tA,MATMPIAIJ,&ismpiaij);
1805  PCHKERRQ(tA,ierr);
1806  if (isseqaij)
1807  {
1808  ierr = MatSetType(tA,MATSEQAIJCUSPARSE); PCHKERRQ(tA,ierr);
1809  expT = true;
1810  }
1811  else if (ismpiaij)
1812  {
1813  ierr = MatSetType(tA,MATMPIAIJCUSPARSE); PCHKERRQ(tA,ierr);
1814  expT = true;
1815  }
1816  if (expT)
1817  {
1818  ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1819  PETSC_TRUE); PCHKERRQ(tA,ierr);
1820  }
1821  }
1822 #endif
1823 }
1824 
1825 // Computes y = alpha * A * x + beta * y
1826 // or y = alpha * A^T* x + beta * y
1827 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1828  bool transpose)
1829 {
1830  PetscErrorCode (*f)(Mat,Vec,Vec);
1831  PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1832  if (transpose)
1833  {
1834  f = MatMultTranspose;
1835  fadd = MatMultTransposeAdd;
1836  }
1837  else
1838  {
1839  f = MatMult;
1840  fadd = MatMultAdd;
1841  }
1842  if (a != 0.)
1843  {
1844  if (b != 0.)
1845  {
1846  ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1847  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1848  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1849  }
1850  else
1851  {
1852  ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1853  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1854  }
1855  }
1856  else
1857  {
1858  if (b == 1.)
1859  {
1860  // do nothing
1861  }
1862  else if (b != 0.)
1863  {
1864  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1865  }
1866  else
1867  {
1868  ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1869  }
1870  }
1871 }
1872 
1874 {
1875  ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1876  Destroy();
1877  Init();
1878  A = master.A;
1879  height = master.height;
1880  width = master.width;
1881 }
1882 
1884 {
1885  if (!X)
1886  {
1887  MFEM_VERIFY(A,"Mat not present");
1888  X = new PetscParVector(*this,false,false); PCHKERRQ(A,ierr);
1889  }
1890  return X;
1891 }
1892 
1894 {
1895  if (!Y)
1896  {
1897  MFEM_VERIFY(A,"Mat not present");
1898  Y = new PetscParVector(*this,true,false); PCHKERRQ(A,ierr);
1899  }
1900  return Y;
1901 }
1902 
1904 {
1905  Mat B;
1906  if (action)
1907  {
1908  ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1909  }
1910  else
1911  {
1912  ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1913  }
1914  return new PetscParMatrix(B,false);
1915 }
1916 
1918 {
1919  ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1920 }
1921 
1922 void PetscParMatrix::Mult(double a, const Vector &x, double b, Vector &y) const
1923 {
1924  MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1925  << ", expected size = " << Width());
1926  MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1927  << ", expected size = " << Height());
1928 
1929  PetscParVector *XX = GetX();
1930  PetscParVector *YY = GetY();
1931  bool rw = (b != 0.0);
1932  XX->PlaceMemory(x.GetMemory());
1933  YY->PlaceMemory(y.GetMemory(),rw);
1934  MatMultKernel(A,a,XX->x,b,YY->x,false);
1935  XX->ResetMemory();
1936  YY->ResetMemory();
1937 }
1938 
1939 void PetscParMatrix::MultTranspose(double a, const Vector &x, double b,
1940  Vector &y) const
1941 {
1942  MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
1943  << ", expected size = " << Height());
1944  MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
1945  << ", expected size = " << Width());
1946 
1947  PetscParVector *XX = GetX();
1948  PetscParVector *YY = GetY();
1949  bool rw = (b != 0.0);
1950  XX->PlaceMemory(y.GetMemory(),rw);
1951  YY->PlaceMemory(x.GetMemory());
1952  MatMultKernel(A,a,YY->x,b,XX->x,true);
1953  XX->ResetMemory();
1954  YY->ResetMemory();
1955 }
1956 
1957 void PetscParMatrix::Print(const char *fname, bool binary) const
1958 {
1959  if (fname)
1960  {
1961  PetscViewer view;
1962 
1963  if (binary)
1964  {
1965  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
1966  FILE_MODE_WRITE,&view);
1967  }
1968  else
1969  {
1970  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
1971  }
1972  PCHKERRQ(A,ierr);
1973  ierr = MatView(A,view); PCHKERRQ(A,ierr);
1974  ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1975  }
1976  else
1977  {
1978  ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1979  }
1980 }
1981 
1983 {
1984  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1985  << ", expected size = " << Height());
1986 
1987  PetscParVector *YY = GetY();
1988  YY->PlaceMemory(s.GetMemory());
1989  ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1990  YY->ResetMemory();
1991 }
1992 
1994 {
1995  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1996  << ", expected size = " << Width());
1997 
1998  PetscParVector *XX = GetX();
1999  XX->PlaceMemory(s.GetMemory());
2000  ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
2001  XX->ResetMemory();
2002 }
2003 
2005 {
2006  ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
2007 }
2008 
2010 {
2011  // for matrices with square diagonal blocks only
2012  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2013  << ", expected size = " << Height());
2014  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2015  << ", expected size = " << Width());
2016 
2017  PetscParVector *XX = GetX();
2018  XX->PlaceMemory(s.GetMemory());
2019  ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2020  XX->ResetMemory();
2021 }
2022 
2024  PetscParMatrix *P)
2025 {
2026  MFEM_VERIFY(A->Width() == P->Height(),
2027  "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
2028  " differs from number of local rows of P " << P->Height());
2029  MFEM_VERIFY(A->Height() == R->Width(),
2030  "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
2031  " differs from number of local cols of R " << R->Width());
2032  Mat B;
2033  ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2034  PCHKERRQ(*R,ierr);
2035  return new PetscParMatrix(B);
2036 }
2037 
2039 {
2040  Mat pA = *A,pP = *P,pRt = *Rt;
2041  Mat B;
2042  PetscBool Aismatis,Pismatis,Rtismatis;
2043 
2044  MFEM_VERIFY(A->Width() == P->Height(),
2045  "Petsc RAP: Number of local cols of A " << A->Width() <<
2046  " differs from number of local rows of P " << P->Height());
2047  MFEM_VERIFY(A->Height() == Rt->Height(),
2048  "Petsc RAP: Number of local rows of A " << A->Height() <<
2049  " differs from number of local rows of Rt " << Rt->Height());
2050  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
2051  PCHKERRQ(pA,ierr);
2052  ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
2053  PCHKERRQ(pA,ierr);
2054  ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
2055  PCHKERRQ(pA,ierr);
2056  if (Aismatis &&
2057  Pismatis &&
2058  Rtismatis) // handle special case (this code will eventually go into PETSc)
2059  {
2060  Mat lA,lP,lB,lRt;
2061  ISLocalToGlobalMapping cl2gP,cl2gRt;
2062  PetscInt rlsize,clsize,rsize,csize;
2063 
2064  ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2065  ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2066  ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2067  ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2068  ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2069  ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2070  ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
2071  ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2072  ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2073  ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2074  ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2075  ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2076  ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2077  if (lRt == lP)
2078  {
2079  ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2080  PCHKERRQ(lA,ierr);
2081  }
2082  else
2083  {
2084  Mat lR;
2085  ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2086  ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2087  PCHKERRQ(lRt,ierr);
2088  ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2089  }
2090 
2091  // attach lRt matrix to the subdomain local matrix
2092  // it may be used if markers on vdofs have to be mapped on
2093  // subdomain true dofs
2094  {
2095  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
2096  ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
2097  (*vmatsl2l)[0] = lRt;
2098 
2099  PetscContainer c;
2100  ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
2101  PCHKERRQ(B,ierr);
2102  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2103  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2104  PCHKERRQ(c,ierr);
2105  ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
2106  PCHKERRQ(B,ierr);
2107  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2108  }
2109 
2110  // Set local problem
2111  ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2112  ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2113  ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2114  ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2115  }
2116  else // it raises an error if the PtAP is not supported in PETSc
2117  {
2118  if (pP == pRt)
2119  {
2120  ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2121  PCHKERRQ(pA,ierr);
2122  }
2123  else
2124  {
2125  Mat pR;
2126  ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2127  ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2128  PCHKERRQ(pRt,ierr);
2129  ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2130  }
2131  }
2132  return new PetscParMatrix(B);
2133 }
2134 
2136 {
2137  PetscParMatrix *out = RAP(P,A,P);
2138  return out;
2139 }
2140 
2142 {
2143  PetscParMatrix *out,*A;
2144 #if defined(PETSC_HAVE_HYPRE)
2146 #else
2147  A = new PetscParMatrix(hA);
2148 #endif
2149  out = RAP(P,A,P);
2150  delete A;
2151  return out;
2152 }
2153 
2154 
2156 {
2157  Mat AB;
2158 
2159  ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2160  CCHKERRQ(A->GetComm(),ierr);
2161  return new PetscParMatrix(AB);
2162 }
2163 
2165 {
2166  Mat Ae;
2167 
2168  PetscParVector dummy(GetComm(),0);
2169  ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2170  EliminateRowsCols(rows_cols,dummy,dummy);
2171  ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2172  return new PetscParMatrix(Ae);
2173 }
2174 
2176  const HypreParVector &X,
2177  HypreParVector &B,
2178  double diag)
2179 {
2180  MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2181 }
2182 
2184  const PetscParVector &X,
2185  PetscParVector &B,
2186  double diag)
2187 {
2188  PetscInt M,N;
2189  ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2190  MFEM_VERIFY(M == N,"Rectangular case unsupported");
2191 
2192  // TODO: what if a diagonal term is not present?
2193  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2194 
2195  // rows need to be in global numbering
2196  PetscInt rst;
2197  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2198 
2199  IS dir;
2200  ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2201  if (!X.GlobalSize() && !B.GlobalSize())
2202  {
2203  ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2204  }
2205  else
2206  {
2207  ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2208  }
2209  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2210 }
2211 
2213 {
2214  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2215 
2216  // rows need to be in global numbering
2217  PetscInt rst;
2218  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2219 
2220  IS dir;
2221  ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2222  ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2223  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2224 }
2225 
2227 {
2228 
2229  Mat B = A;
2230  if (dereference)
2231  {
2232  MPI_Comm comm = GetComm();
2233  ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
2234  }
2235  A = NULL;
2236  return B;
2237 }
2238 
2240 {
2241  PetscBool ok;
2242  MFEM_VERIFY(A, "no associated PETSc Mat object");
2243  PetscObject oA = (PetscObject)(this->A);
2244  // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
2245  ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2246  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2247  ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2248  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2249  ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2250  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2251  ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2252  if (ok == PETSC_TRUE) { return PETSC_MATIS; }
2253  ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2254  if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
2255  ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2256  if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
2257 #if defined(PETSC_HAVE_HYPRE)
2258  ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2259  if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
2260 #endif
2261  return PETSC_MATGENERIC;
2262 }
2263 
2265  const Array<int> &ess_dof_list,
2266  const Vector &X, Vector &B)
2267 {
2268  const PetscScalar *array;
2269  Mat pA = const_cast<PetscParMatrix&>(A);
2270 
2271  // B -= Ae*X
2272  Ae.Mult(-1.0, X, 1.0, B);
2273 
2274  Vec diag = const_cast<PetscParVector&>((*A.GetX()));
2275  ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2276  ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2277  for (int i = 0; i < ess_dof_list.Size(); i++)
2278  {
2279  int r = ess_dof_list[i];
2280  B(r) = array[r] * X(r);
2281  }
2282  ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2283 }
2284 
2285 // PetscSolver methods
2286 
2287 PetscSolver::PetscSolver() : clcustom(false)
2288 {
2289  obj = NULL;
2290  B = X = NULL;
2291  cid = -1;
2292  operatorset = false;
2293  bchandler = NULL;
2294  private_ctx = NULL;
2295 }
2296 
2298 {
2299  delete B;
2300  delete X;
2302 }
2303 
2304 void PetscSolver::SetTol(double tol)
2305 {
2306  SetRelTol(tol);
2307 }
2308 
2309 void PetscSolver::SetRelTol(double tol)
2310 {
2311  if (cid == KSP_CLASSID)
2312  {
2313  KSP ksp = (KSP)obj;
2314  ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2315  }
2316  else if (cid == SNES_CLASSID)
2317  {
2318  SNES snes = (SNES)obj;
2319  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2320  PETSC_DEFAULT);
2321  }
2322  else if (cid == TS_CLASSID)
2323  {
2324  TS ts = (TS)obj;
2325  ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2326  }
2327  else
2328  {
2329  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2330  }
2331  PCHKERRQ(obj,ierr);
2332 }
2333 
2334 void PetscSolver::SetAbsTol(double tol)
2335 {
2336  if (cid == KSP_CLASSID)
2337  {
2338  KSP ksp = (KSP)obj;
2339  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2340  }
2341  else if (cid == SNES_CLASSID)
2342  {
2343  SNES snes = (SNES)obj;
2344  ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2345  PETSC_DEFAULT);
2346  }
2347  else if (cid == TS_CLASSID)
2348  {
2349  TS ts = (TS)obj;
2350  ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2351  }
2352  else
2353  {
2354  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2355  }
2356  PCHKERRQ(obj,ierr);
2357 }
2358 
2359 void PetscSolver::SetMaxIter(int max_iter)
2360 {
2361  if (cid == KSP_CLASSID)
2362  {
2363  KSP ksp = (KSP)obj;
2364  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2365  max_iter);
2366  }
2367  else if (cid == SNES_CLASSID)
2368  {
2369  SNES snes = (SNES)obj;
2370  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2371  max_iter,PETSC_DEFAULT);
2372  }
2373  else if (cid == TS_CLASSID)
2374  {
2375  TS ts = (TS)obj;
2376  ierr = TSSetMaxSteps(ts,max_iter);
2377  }
2378  else
2379  {
2380  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2381  }
2382  PCHKERRQ(obj,ierr);
2383 }
2384 
2385 
2387 {
2388  typedef PetscErrorCode (*myPetscFunc)(void**);
2389  PetscViewerAndFormat *vf = NULL;
2390  PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
2391 
2392  if (plev > 0)
2393  {
2394  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2395  PCHKERRQ(obj,ierr);
2396  }
2397  if (cid == KSP_CLASSID)
2398  {
2399  // there are many other options, see the function KSPSetFromOptions() in
2400  // src/ksp/ksp/interface/itcl.c
2401  typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
2402  KSP ksp = (KSP)obj;
2403  if (plev >= 0)
2404  {
2405  ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2406  }
2407  if (plev == 1)
2408  {
2409 #if PETSC_VERSION_LT(3,15,0)
2410  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2411 #else
2412  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2413 #endif
2414  (myPetscFunc)PetscViewerAndFormatDestroy);
2415  PCHKERRQ(ksp,ierr);
2416  }
2417  else if (plev > 1)
2418  {
2419  ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2420  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2421  (myPetscFunc)PetscViewerAndFormatDestroy);
2422  PCHKERRQ(ksp,ierr);
2423  if (plev > 2)
2424  {
2425  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2426  PCHKERRQ(viewer,ierr);
2427 #if PETSC_VERSION_LT(3,15,0)
2428  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2429 #else
2430  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2431 #endif
2432  (myPetscFunc)PetscViewerAndFormatDestroy);
2433  PCHKERRQ(ksp,ierr);
2434  }
2435  }
2436  }
2437  else if (cid == SNES_CLASSID)
2438  {
2439  typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
2440  SNES snes = (SNES)obj;
2441  if (plev >= 0)
2442  {
2443  ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2444  }
2445  if (plev > 0)
2446  {
2447  ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2448  (myPetscFunc)PetscViewerAndFormatDestroy);
2449  PCHKERRQ(snes,ierr);
2450  }
2451  }
2452  else if (cid == TS_CLASSID)
2453  {
2454  TS ts = (TS)obj;
2455  if (plev >= 0)
2456  {
2457  ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2458  }
2459  }
2460  else
2461  {
2462  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2463  }
2464 }
2465 
2466 MPI_Comm PetscSolver::GetComm() const
2467 {
2468  return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
2469 }
2470 
2472 {
2473  __mfem_monitor_ctx *monctx;
2474  ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2475  monctx->solver = this;
2476  monctx->monitor = ctx;
2477  if (cid == KSP_CLASSID)
2478  {
2479  ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
2480  __mfem_monitor_ctx_destroy);
2481  PCHKERRQ(obj,ierr);
2482  }
2483  else if (cid == SNES_CLASSID)
2484  {
2485  ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
2486  __mfem_monitor_ctx_destroy);
2487  PCHKERRQ(obj,ierr);
2488  }
2489  else if (cid == TS_CLASSID)
2490  {
2491  ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
2492  __mfem_monitor_ctx_destroy);
2493  PCHKERRQ(obj,ierr);
2494  }
2495  else
2496  {
2497  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2498  }
2499 }
2500 
2502 {
2503  bchandler = bch;
2504  if (cid == SNES_CLASSID)
2505  {
2506  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
2507  snes_ctx->bchandler = bchandler;
2508  }
2509  else if (cid == TS_CLASSID)
2510  {
2511  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
2512  ts_ctx->bchandler = bchandler;
2513  }
2514  else
2515  {
2516  MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2517  }
2518 }
2519 
2521 {
2522  PC pc = NULL;
2523  if (cid == TS_CLASSID)
2524  {
2525  SNES snes;
2526  KSP ksp;
2527 
2528  ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
2529  ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2530  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2531  }
2532  else if (cid == SNES_CLASSID)
2533  {
2534  KSP ksp;
2535 
2536  ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
2537  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2538  }
2539  else if (cid == KSP_CLASSID)
2540  {
2541  ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2542  }
2543  else if (cid == PC_CLASSID)
2544  {
2545  pc = (PC)obj;
2546  }
2547  else
2548  {
2549  MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2550  }
2551  if (factory)
2552  {
2553  ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2554  }
2555  else
2556  {
2557  ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2558  }
2559 }
2560 
2561 void PetscSolver::Customize(bool customize) const
2562 {
2563  if (!customize) { clcustom = true; }
2564  if (!clcustom)
2565  {
2566  if (cid == PC_CLASSID)
2567  {
2568  PC pc = (PC)obj;
2569  ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2570  }
2571  else if (cid == KSP_CLASSID)
2572  {
2573  KSP ksp = (KSP)obj;
2574  ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2575  }
2576  else if (cid == SNES_CLASSID)
2577  {
2578  SNES snes = (SNES)obj;
2579  ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2580  }
2581  else if (cid == TS_CLASSID)
2582  {
2583  TS ts = (TS)obj;
2584  ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2585  }
2586  else
2587  {
2588  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2589  }
2590  }
2591  clcustom = true;
2592 }
2593 
2595 {
2596  if (cid == KSP_CLASSID)
2597  {
2598  KSP ksp = (KSP)obj;
2599  KSPConvergedReason reason;
2600  ierr = KSPGetConvergedReason(ksp,&reason);
2601  PCHKERRQ(ksp,ierr);
2602  return reason > 0 ? 1 : 0;
2603  }
2604  else if (cid == SNES_CLASSID)
2605  {
2606  SNES snes = (SNES)obj;
2607  SNESConvergedReason reason;
2608  ierr = SNESGetConvergedReason(snes,&reason);
2609  PCHKERRQ(snes,ierr);
2610  return reason > 0 ? 1 : 0;
2611  }
2612  else if (cid == TS_CLASSID)
2613  {
2614  TS ts = (TS)obj;
2615  TSConvergedReason reason;
2616  ierr = TSGetConvergedReason(ts,&reason);
2617  PCHKERRQ(ts,ierr);
2618  return reason > 0 ? 1 : 0;
2619  }
2620  else
2621  {
2622  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2623  return -1;
2624  }
2625 }
2626 
2628 {
2629  if (cid == KSP_CLASSID)
2630  {
2631  KSP ksp = (KSP)obj;
2632  PetscInt its;
2633  ierr = KSPGetIterationNumber(ksp,&its);
2634  PCHKERRQ(ksp,ierr);
2635  return its;
2636  }
2637  else if (cid == SNES_CLASSID)
2638  {
2639  SNES snes = (SNES)obj;
2640  PetscInt its;
2641  ierr = SNESGetIterationNumber(snes,&its);
2642  PCHKERRQ(snes,ierr);
2643  return its;
2644  }
2645  else if (cid == TS_CLASSID)
2646  {
2647  TS ts = (TS)obj;
2648  PetscInt its;
2649  ierr = TSGetStepNumber(ts,&its);
2650  PCHKERRQ(ts,ierr);
2651  return its;
2652  }
2653  else
2654  {
2655  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2656  return -1;
2657  }
2658 }
2659 
2661 {
2662  if (cid == KSP_CLASSID)
2663  {
2664  KSP ksp = (KSP)obj;
2665  PetscReal norm;
2666  ierr = KSPGetResidualNorm(ksp,&norm);
2667  PCHKERRQ(ksp,ierr);
2668  return norm;
2669  }
2670  if (cid == SNES_CLASSID)
2671  {
2672  SNES snes = (SNES)obj;
2673  PetscReal norm;
2674  ierr = SNESGetFunctionNorm(snes,&norm);
2675  PCHKERRQ(snes,ierr);
2676  return norm;
2677  }
2678  else
2679  {
2680  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2681  return PETSC_MAX_REAL;
2682  }
2683 }
2684 
2686 {
2688  if (cid == SNES_CLASSID)
2689  {
2690  __mfem_snes_ctx *snes_ctx;
2691  ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2692  snes_ctx->op = NULL;
2693  snes_ctx->bchandler = NULL;
2694  snes_ctx->work = NULL;
2695  snes_ctx->jacType = Operator::PETSC_MATAIJ;
2696  private_ctx = (void*) snes_ctx;
2697  }
2698  else if (cid == TS_CLASSID)
2699  {
2700  __mfem_ts_ctx *ts_ctx;
2701  ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2702  ts_ctx->op = NULL;
2703  ts_ctx->bchandler = NULL;
2704  ts_ctx->work = NULL;
2705  ts_ctx->work2 = NULL;
2706  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2707  ts_ctx->cached_ijacstate = -1;
2708  ts_ctx->cached_rhsjacstate = -1;
2709  ts_ctx->cached_splits_xstate = -1;
2710  ts_ctx->cached_splits_xdotstate = -1;
2711  ts_ctx->type = PetscODESolver::ODE_SOLVER_GENERAL;
2712  ts_ctx->jacType = Operator::PETSC_MATAIJ;
2713  private_ctx = (void*) ts_ctx;
2714  }
2715 }
2716 
2718 {
2719  if (!private_ctx) { return; }
2720  // free private context's owned objects
2721  if (cid == SNES_CLASSID)
2722  {
2723  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2724  delete snes_ctx->work;
2725  }
2726  else if (cid == TS_CLASSID)
2727  {
2728  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2729  delete ts_ctx->work;
2730  delete ts_ctx->work2;
2731  }
2732  ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2733 }
2734 
2735 // PetscBCHandler methods
2736 
2738  enum PetscBCHandler::Type type_)
2739  : bctype(type_), setup(false), eval_t(0.0),
2740  eval_t_cached(std::numeric_limits<double>::min())
2741 {
2742  SetTDofs(ess_tdof_list);
2743 }
2744 
2746 {
2747  ess_tdof_list.SetSize(list.Size());
2748  ess_tdof_list.Assign(list);
2749  setup = false;
2750 }
2751 
2753 {
2754  if (setup) { return; }
2755  if (bctype == CONSTANT)
2756  {
2757  eval_g.SetSize(n);
2758  this->Eval(eval_t,eval_g);
2759  eval_t_cached = eval_t;
2760  }
2761  else if (bctype == TIME_DEPENDENT)
2762  {
2763  eval_g.SetSize(n);
2764  }
2765  setup = true;
2766 }
2767 
2769 {
2770  (*this).SetUp(x.Size());
2771  y = x;
2772  if (bctype == ZERO)
2773  {
2774  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2775  {
2776  y[ess_tdof_list[i]] = 0.0;
2777  }
2778  }
2779  else
2780  {
2781  if (bctype != CONSTANT && eval_t != eval_t_cached)
2782  {
2783  Eval(eval_t,eval_g);
2784  eval_t_cached = eval_t;
2785  }
2786  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2787  {
2788  y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2789  }
2790  }
2791 }
2792 
2794 {
2795  (*this).SetUp(x.Size());
2796  if (bctype == ZERO)
2797  {
2798  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2799  {
2800  x[ess_tdof_list[i]] = 0.0;
2801  }
2802  }
2803  else
2804  {
2805  if (bctype != CONSTANT && eval_t != eval_t_cached)
2806  {
2807  Eval(eval_t,eval_g);
2808  eval_t_cached = eval_t;
2809  }
2810  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2811  {
2812  x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2813  }
2814  }
2815 }
2816 
2818 {
2819  (*this).SetUp(x.Size());
2820  if (bctype == ZERO)
2821  {
2822  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2823  {
2824  y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2825  }
2826  }
2827  else
2828  {
2829  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2830  {
2831  y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2832  }
2833  }
2834 }
2835 
2837 {
2838  (*this).SetUp(x.Size());
2839  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2840  {
2841  x[ess_tdof_list[i]] = 0.0;
2842  }
2843 }
2844 
2846 {
2847  (*this).SetUp(x.Size());
2848  y = x;
2849  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2850  {
2851  y[ess_tdof_list[i]] = 0.0;
2852  }
2853 }
2854 
2855 // PetscLinearSolver methods
2856 
2857 PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2858  bool wrapin, bool iter_mode)
2859  : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2860 {
2861  KSP ksp;
2862  ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2863  obj = (PetscObject)ksp;
2864  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2865  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2866  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2867  PCHKERRQ(ksp, ierr);
2868 }
2869 
2871  const std::string &prefix, bool iter_mode)
2872  : PetscSolver(), Solver(0,iter_mode), wrap(false)
2873 {
2874  KSP ksp;
2875  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2876  obj = (PetscObject)ksp;
2877  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2878  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2879  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2880  PCHKERRQ(ksp, ierr);
2881  SetOperator(A);
2882 }
2883 
2885  const std::string &prefix, bool iter_mode)
2886  : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2887 {
2888  KSP ksp;
2889  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2890  obj = (PetscObject)ksp;
2891  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2892  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2893  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2894  PCHKERRQ(ksp, ierr);
2895  SetOperator(A);
2896 }
2897 
2899 {
2900  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2901  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2902  (dynamic_cast<const PetscParMatrix *>(&op));
2903  const Operator *oA = dynamic_cast<const Operator *>(&op);
2904 
2905  // update base classes: Operator, Solver, PetscLinearSolver
2906  bool delete_pA = false;
2907  if (!pA)
2908  {
2909  if (hA)
2910  {
2911  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2912  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2913  delete_pA = true;
2914  }
2915  else if (oA) // fallback to general operator
2916  {
2917  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2918  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2919  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2920  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2921  delete_pA = true;
2922  }
2923  }
2924  MFEM_VERIFY(pA, "Unsupported operation!");
2925 
2926  // Set operators into PETSc KSP
2927  KSP ksp = (KSP)obj;
2928  Mat A = pA->A;
2929  if (operatorset)
2930  {
2931  Mat C;
2932  PetscInt nheight,nwidth,oheight,owidth;
2933 
2934  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2935  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2936  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2937  if (nheight != oheight || nwidth != owidth)
2938  {
2939  // reinit without destroying the KSP
2940  // communicator remains the same
2941  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2942  delete X;
2943  delete B;
2944  X = B = NULL;
2945  }
2946  }
2947  ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2948 
2949  // Update PetscSolver
2950  operatorset = true;
2951 
2952  // Update the Operator fields.
2953  height = pA->Height();
2954  width = pA->Width();
2955 
2956  if (delete_pA) { delete pA; }
2957 }
2958 
2960 {
2961  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2962  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2963  (dynamic_cast<const PetscParMatrix *>(&op));
2964  const Operator *oA = dynamic_cast<const Operator *>(&op);
2965 
2966  PetscParMatrix *ppA = const_cast<PetscParMatrix *>
2967  (dynamic_cast<const PetscParMatrix *>(&pop));
2968  const Operator *poA = dynamic_cast<const Operator *>(&pop);
2969 
2970  // Convert Operator for linear system
2971  bool delete_pA = false;
2972  if (!pA)
2973  {
2974  if (hA)
2975  {
2976  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2977  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2978  delete_pA = true;
2979  }
2980  else if (oA) // fallback to general operator
2981  {
2982  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2983  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2984  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2985  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2986  delete_pA = true;
2987  }
2988  }
2989  MFEM_VERIFY(pA, "Unsupported operation!");
2990 
2991  // Convert Operator to be preconditioned
2992  bool delete_ppA = false;
2993  if (!ppA)
2994  {
2995  if (oA == poA && !wrap) // Same operator, already converted
2996  {
2997  ppA = pA;
2998  }
2999  else
3000  {
3001  ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
3002  delete_ppA = true;
3003  }
3004  }
3005  MFEM_VERIFY(ppA, "Unsupported operation!");
3006 
3007  // Set operators into PETSc KSP
3008  KSP ksp = (KSP)obj;
3009  Mat A = pA->A;
3010  Mat P = ppA->A;
3011  if (operatorset)
3012  {
3013  Mat C;
3014  PetscInt nheight,nwidth,oheight,owidth;
3015 
3016  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3017  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3018  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3019  if (nheight != oheight || nwidth != owidth)
3020  {
3021  // reinit without destroying the KSP
3022  // communicator remains the same
3023  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3024  delete X;
3025  delete B;
3026  X = B = NULL;
3027  wrap = false;
3028  }
3029  }
3030  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3031 
3032  // Update PetscSolver
3033  operatorset = true;
3034 
3035  // Update the Operator fields.
3036  height = pA->Height();
3037  width = pA->Width();
3038 
3039  if (delete_pA) { delete pA; }
3040  if (delete_ppA) { delete ppA; }
3041 }
3042 
3044 {
3045  KSP ksp = (KSP)obj;
3046 
3047  // Preserve Amat if already set
3048  Mat A = NULL;
3049  PetscBool amat;
3050  ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3051  if (amat)
3052  {
3053  ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3054  ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
3055  }
3056  PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
3057  if (ppc)
3058  {
3059  ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3060  }
3061  else
3062  {
3063  // wrap the Solver action
3064  // Solver is assumed to be already setup
3065  // ownership of precond is not transferred,
3066  // consistently with other MFEM's linear solvers
3067  PC pc;
3068  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3069  ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
3070  }
3071  if (A)
3072  {
3073  Mat P;
3074 
3075  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3076  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
3077  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3078  ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3079  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3080  }
3081 }
3082 
3083 void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
3084 {
3085  KSP ksp = (KSP)obj;
3086 
3087  if (!B || !X)
3088  {
3089  Mat pA = NULL;
3090  ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
3091  if (!B)
3092  {
3093  PetscParMatrix A = PetscParMatrix(pA, true);
3094  B = new PetscParVector(A, true, false);
3095  }
3096  if (!X)
3097  {
3098  PetscParMatrix A = PetscParMatrix(pA, true);
3099  X = new PetscParVector(A, false, false);
3100  }
3101  }
3102  B->PlaceMemory(b.GetMemory());
3103 
3104  Customize();
3105 
3106  PetscBool flg;
3107  ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3108  X->PlaceMemory(x.GetMemory(),flg);
3109 
3110  // Solve the system.
3111  if (trans)
3112  {
3113  ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3114  }
3115  else
3116  {
3117  ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3118  }
3119  B->ResetMemory();
3120  X->ResetMemory();
3121 }
3122 
3123 void PetscLinearSolver::Mult(const Vector &b, Vector &x) const
3124 {
3125  (*this).MultKernel(b,x,false);
3126 }
3127 
3129 {
3130  (*this).MultKernel(b,x,true);
3131 }
3132 
3134 {
3135  MPI_Comm comm;
3136  KSP ksp = (KSP)obj;
3137  ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3138  ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3139 }
3140 
3141 // PetscPCGSolver methods
3142 
3143 PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix,
3144  bool iter_mode)
3145  : PetscLinearSolver(comm,prefix,iter_mode)
3146 {
3147  KSP ksp = (KSP)obj;
3148  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3149  // this is to obtain a textbook PCG
3150  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3151 }
3152 
3153 PetscPCGSolver::PetscPCGSolver(PetscParMatrix& A, const std::string &prefix,
3154  bool iter_mode)
3155  : PetscLinearSolver(A,prefix,iter_mode)
3156 {
3157  KSP ksp = (KSP)obj;
3158  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3159  // this is to obtain a textbook PCG
3160  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3161 }
3162 
3164  const std::string &prefix, bool iter_mode)
3165  : PetscLinearSolver(A,wrap,prefix,iter_mode)
3166 {
3167  KSP ksp = (KSP)obj;
3168  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3169  // this is to obtain a textbook PCG
3170  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3171 }
3172 
3173 // PetscPreconditioner methods
3174 
3176  const std::string &prefix)
3177  : PetscSolver(), Solver()
3178 {
3179  PC pc;
3180  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3181  obj = (PetscObject)pc;
3182  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3183  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3184 }
3185 
3187  const string &prefix)
3188  : PetscSolver(), Solver()
3189 {
3190  PC pc;
3191  ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
3192  obj = (PetscObject)pc;
3193  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3194  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3195  SetOperator(A);
3196 }
3197 
3199  const string &prefix)
3200  : PetscSolver(), Solver()
3201 {
3202  PC pc;
3203  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3204  obj = (PetscObject)pc;
3205  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3206  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3207  SetOperator(op);
3208 }
3209 
3211 {
3212  bool delete_pA = false;
3213  PetscParMatrix *pA = const_cast<PetscParMatrix *>
3214  (dynamic_cast<const PetscParMatrix *>(&op));
3215 
3216  if (!pA)
3217  {
3218  const Operator *cop = dynamic_cast<const Operator *>(&op);
3219  pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
3220  delete_pA = true;
3221  }
3222 
3223  // Set operators into PETSc PC
3224  PC pc = (PC)obj;
3225  Mat A = pA->A;
3226  if (operatorset)
3227  {
3228  Mat C;
3229  PetscInt nheight,nwidth,oheight,owidth;
3230 
3231  ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3232  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3233  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3234  if (nheight != oheight || nwidth != owidth)
3235  {
3236  // reinit without destroying the PC
3237  // communicator remains the same
3238  ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3239  delete X;
3240  delete B;
3241  X = B = NULL;
3242  }
3243  }
3244  ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
3245 
3246  // Update PetscSolver
3247  operatorset = true;
3248 
3249  // Update the Operator fields.
3250  height = pA->Height();
3251  width = pA->Width();
3252 
3253  if (delete_pA) { delete pA; };
3254 }
3255 
3256 void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
3257  bool trans) const
3258 {
3259  MFEM_VERIFY(!iterative_mode,
3260  "Iterative mode not supported for PetscPreconditioner");
3261  PC pc = (PC)obj;
3262 
3263  if (!B || !X)
3264  {
3265  Mat pA = NULL;
3266  ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
3267  if (!B)
3268  {
3269  PetscParMatrix A(pA, true);
3270  B = new PetscParVector(A, true, false);
3271  }
3272  if (!X)
3273  {
3274  PetscParMatrix A(pA, true);
3275  X = new PetscParVector(A, false, false);
3276  }
3277  }
3278  B->PlaceMemory(b.GetMemory());
3279  X->PlaceMemory(x.GetMemory());
3280 
3281  Customize();
3282 
3283  // Apply the preconditioner.
3284  if (trans)
3285  {
3286  ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3287  }
3288  else
3289  {
3290  ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3291  }
3292  B->ResetMemory();
3293  X->ResetMemory();
3294 }
3295 
3297 {
3298  (*this).MultKernel(b,x,false);
3299 }
3300 
3302 {
3303  (*this).MultKernel(b,x,true);
3304 }
3305 
3307 {
3308  MPI_Comm comm;
3309  PC pc = (PC)obj;
3310  ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3311  ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3312 }
3313 
3314 // PetscBDDCSolver methods
3315 
3316 // Coordinates sampling function
3317 static void func_coords(const Vector &x, Vector &y)
3318 {
3319  y = x;
3320 }
3321 
3322 void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
3323 {
3324  MPI_Comm comm = PetscObjectComm(obj);
3325 
3326  // get PETSc object
3327  PC pc = (PC)obj;
3328  Mat pA;
3329  ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3330 
3331  // matrix type should be of type MATIS
3332  PetscBool ismatis;
3333  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
3334  PCHKERRQ(pA,ierr);
3335  MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
3336 
3337  // Check options
3338  ParFiniteElementSpace *fespace = opts.fespace;
3339  if (opts.netflux && !fespace)
3340  {
3341  MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3342  }
3343 
3344  // Attach default near-null space to local matrices
3345  {
3346  MatNullSpace nnsp;
3347  Mat lA;
3348  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3349  ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
3350  &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3351  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3352  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3353  }
3354 
3355  // set PETSc PC type to PCBDDC
3356  ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
3357 
3358  PetscInt rst,nl;
3359  ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3360  ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3361 
3362  // index sets for fields splitting and coordinates for nodal spaces
3363  IS *fields = NULL;
3364  PetscInt nf = 0;
3365  PetscInt sdim = 0;
3366  PetscReal *coords = NULL;
3367  if (fespace)
3368  {
3369  int vdim = fespace->GetVDim();
3370 
3371  // Ideally, the block size should be set at matrix creation
3372  // but the MFEM assembly does not allow to do so
3373  if (fespace->GetOrdering() == Ordering::byVDIM)
3374  {
3375  Mat lA;
3376  ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3377  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3378  ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3379  }
3380 
3381  // fields
3382  if (vdim > 1)
3383  {
3384  PetscInt st = rst, bs, inc, nlf;
3385  nf = vdim;
3386  nlf = nl/nf;
3387  ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3388  if (fespace->GetOrdering() == Ordering::byVDIM)
3389  {
3390  inc = 1;
3391  bs = vdim;
3392  }
3393  else
3394  {
3395  inc = nlf;
3396  bs = 1;
3397  }
3398  for (PetscInt i = 0; i < nf; i++)
3399  {
3400  ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3401  st += inc;
3402  }
3403  }
3404 
3405  // coordinates
3406  const FiniteElementCollection *fec = fespace->FEColl();
3407  bool h1space = dynamic_cast<const H1_FECollection*>(fec);
3408  if (h1space)
3409  {
3410  ParFiniteElementSpace *fespace_coords = fespace;
3411 
3412  sdim = fespace->GetParMesh()->SpaceDimension();
3413  if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
3414  {
3415  fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3417  }
3418  VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3419  ParGridFunction gf_coords(fespace_coords);
3420  gf_coords.ProjectCoefficient(coeff_coords);
3421  HypreParVector *hvec_coords = gf_coords.ParallelProject();
3422  PetscScalar *data_coords = (PetscScalar*)mfem::Read(hvec_coords->GetMemory(),
3423  hvec_coords->Size(),false);
3424 
3425  // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
3426  // and to the global matrix
3427  if (vdim == sdim)
3428  {
3429  MatNullSpace nnsp;
3430  Mat lA;
3431  Vec pvec_coords,lvec_coords;
3432  ISLocalToGlobalMapping l2g;
3433  PetscSF sf;
3434  PetscLayout rmap;
3435  const PetscInt *gidxs;
3436  PetscInt nleaves;
3437 
3438  ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3439  hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3440  CCHKERRQ(comm,ierr);
3441  ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3442  if (!nnsp)
3443  {
3444  ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3445  CCHKERRQ(comm,ierr);
3446  ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3447  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3448  }
3449  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3450  ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3451  ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3452  ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3453  ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3454  ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3455  ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3456  ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3457  ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3458  CCHKERRQ(comm,ierr);
3459  ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3460  {
3461  const PetscScalar *garray;
3462  PetscScalar *larray;
3463 
3464  ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3465  ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3466 #if PETSC_VERSION_LT(3,15,0)
3467  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3468  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3469 #else
3470  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3471  CCHKERRQ(comm,ierr);
3472  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3473  CCHKERRQ(comm,ierr);
3474 #endif
3475  ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3476  ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3477  }
3478  ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3479  ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3480  CCHKERRQ(PETSC_COMM_SELF,ierr);
3481  ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3482  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3483  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3484  ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3485  }
3486 
3487  // each single dof has associated a tuple of coordinates
3488  ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3489  if (nf > 0)
3490  {
3491  for (PetscInt i = 0; i < nf; i++)
3492  {
3493  const PetscInt *idxs;
3494  PetscInt nn;
3495 
3496  // It also handles the case of fespace not ordered by VDIM
3497  ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3498  ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3499  for (PetscInt j = 0; j < nn; j++)
3500  {
3501  PetscInt idx = idxs[j]-rst;
3502  for (PetscInt d = 0; d < sdim; d++)
3503  {
3504  coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3505  }
3506  }
3507  ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3508  }
3509  }
3510  else
3511  {
3512  for (PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3513  }
3514  if (fespace_coords != fespace)
3515  {
3516  delete fespace_coords;
3517  }
3518  delete hvec_coords;
3519  }
3520  }
3521 
3522  // index sets for boundary dofs specification (Essential = dir, Natural = neu)
3523  IS dir = NULL, neu = NULL;
3524 
3525  // Extract l2l matrices
3526  Array<Mat> *l2l = NULL;
3527  if (opts.ess_dof_local || opts.nat_dof_local)
3528  {
3529  PetscContainer c;
3530 
3531  ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
3532  MFEM_VERIFY(c,"Local-to-local PETSc container not present");
3533  ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
3534  }
3535 
3536  // check information about index sets (essential dofs, fields, etc.)
3537 #ifdef MFEM_DEBUG
3538  {
3539  // make sure ess/nat_dof have been collectively set
3540  PetscBool lpr = PETSC_FALSE,pr;
3541  if (opts.ess_dof) { lpr = PETSC_TRUE; }
3542  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3543  PCHKERRQ(pA,ierr);
3544  MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
3545  lpr = PETSC_FALSE;
3546  if (opts.nat_dof) { lpr = PETSC_TRUE; }
3547  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3548  PCHKERRQ(pA,ierr);
3549  MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
3550  // make sure fields have been collectively set
3551  PetscInt ms[2],Ms[2];
3552  ms[0] = -nf; ms[1] = nf;
3553  ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3554  PCHKERRQ(pA,ierr);
3555  MFEM_VERIFY(-Ms[0] == Ms[1],
3556  "number of fields should be the same across processes");
3557  }
3558 #endif
3559 
3560  // boundary sets
3561  if (opts.ess_dof)
3562  {
3563  PetscInt st = opts.ess_dof_local ? 0 : rst;
3564  if (!opts.ess_dof_local)
3565  {
3566  // need to compute the boundary dofs in global ordering
3567  ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3568  CCHKERRQ(comm,ierr);
3569  ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3570  }
3571  else
3572  {
3573  // need to compute a list for the marked boundary dofs in local ordering
3574  ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3575  CCHKERRQ(comm,ierr);
3576  ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3577  }
3578  }
3579  if (opts.nat_dof)
3580  {
3581  PetscInt st = opts.nat_dof_local ? 0 : rst;
3582  if (!opts.nat_dof_local)
3583  {
3584  // need to compute the boundary dofs in global ordering
3585  ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3586  CCHKERRQ(comm,ierr);
3587  ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3588  }
3589  else
3590  {
3591  // need to compute a list for the marked boundary dofs in local ordering
3592  ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3593  CCHKERRQ(comm,ierr);
3594  ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3595  }
3596  }
3597 
3598  // field splitting
3599  if (fields)
3600  {
3601  ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3602  }
3603  for (PetscInt i = 0; i < nf; i++)
3604  {
3605  ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3606  }
3607  ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3608 
3609  // coordinates
3610  if (coords)
3611  {
3612  ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3613  }
3614  ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3615 
3616  // code for block size is disabled since we cannot change the matrix
3617  // block size after it has been setup
3618  // int bs = 1;
3619 
3620  // Customize using the finite element space (if any)
3621  if (fespace)
3622  {
3623  const FiniteElementCollection *fec = fespace->FEColl();
3624  bool edgespace, rtspace, h1space;
3625  bool needint = opts.netflux;
3626  bool tracespace, rt_tracespace, edge_tracespace;
3627  int vdim, dim, p;
3628  PetscBool B_is_Trans = PETSC_FALSE;
3629 
3630  ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3631  dim = pmesh->Dimension();
3632  vdim = fespace->GetVDim();
3633  h1space = dynamic_cast<const H1_FECollection*>(fec);
3634  rtspace = dynamic_cast<const RT_FECollection*>(fec);
3635  edgespace = dynamic_cast<const ND_FECollection*>(fec);
3636  edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3637  rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3638  tracespace = edge_tracespace || rt_tracespace;
3639 
3640  p = 1;
3641  if (fespace->GetNE() > 0)
3642  {
3643  if (!tracespace)
3644  {
3645  p = fespace->GetElementOrder(0);
3646  }
3647  else
3648  {
3649  p = fespace->GetFaceOrder(0);
3650  if (dim == 2) { p++; }
3651  }
3652  }
3653 
3654  if (edgespace) // H(curl)
3655  {
3656  if (dim == 2)
3657  {
3658  needint = true;
3659  if (tracespace)
3660  {
3661  MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3662  " not using auxiliary quadrature");
3663  needint = false;
3664  }
3665  }
3666  else
3667  {
3668  FiniteElementCollection *vfec;
3669  if (tracespace)
3670  {
3671  vfec = new H1_Trace_FECollection(p,dim);
3672  }
3673  else
3674  {
3675  vfec = new H1_FECollection(p,dim);
3676  }
3677  ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3678  ParDiscreteLinearOperator *grad;
3679  grad = new ParDiscreteLinearOperator(vfespace,fespace);
3680  if (tracespace)
3681  {
3682  grad->AddTraceFaceInterpolator(new GradientInterpolator);
3683  }
3684  else
3685  {
3686  grad->AddDomainInterpolator(new GradientInterpolator);
3687  }
3688  grad->Assemble();
3689  grad->Finalize();
3690  HypreParMatrix *hG = grad->ParallelAssemble();
3691  PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3692  delete hG;
3693  delete grad;
3694 
3695  PetscBool conforming = PETSC_TRUE;
3696  if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3697  ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3698  PCHKERRQ(pc,ierr);
3699  delete vfec;
3700  delete vfespace;
3701  delete G;
3702  }
3703  }
3704  else if (rtspace) // H(div)
3705  {
3706  needint = true;
3707  if (tracespace)
3708  {
3709  MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3710  " auxiliary quadrature");
3711  needint = false;
3712  }
3713  }
3714  else if (h1space) // H(grad), only for the vector case
3715  {
3716  if (vdim != dim) { needint = false; }
3717  }
3718 
3719  PetscParMatrix *B = NULL;
3720  if (needint)
3721  {
3722  // Generate bilinear form in unassembled format which is used to
3723  // compute the net-flux across subdomain boundaries for H(div) and
3724  // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3725  FiniteElementCollection *auxcoll;
3726  if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3727  else
3728  {
3729  if (h1space)
3730  {
3731  auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3732  }
3733  else
3734  {
3735  auxcoll = new L2_FECollection(p,dim);
3736  }
3737  }
3738  ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3739  ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3740 
3741  if (edgespace)
3742  {
3743  if (tracespace)
3744  {
3745  b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3746  }
3747  else
3748  {
3749  b->AddDomainIntegrator(new VectorFECurlIntegrator);
3750  }
3751  }
3752  else if (rtspace)
3753  {
3754  if (tracespace)
3755  {
3756  b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3757  }
3758  else
3759  {
3760  b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3761  }
3762  }
3763  else
3764  {
3765  b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3766  }
3767  b->Assemble();
3768  b->Finalize();
3769  OperatorHandle Bh(Operator::PETSC_MATIS);
3770  b->ParallelAssemble(Bh);
3771  Bh.Get(B);
3772  Bh.SetOperatorOwner(false);
3773 
3774  if (dir) // if essential dofs are present, we need to zero the columns
3775  {
3776  Mat pB = *B;
3777  ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3778  if (!opts.ess_dof_local)
3779  {
3780  ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3781  }
3782  else
3783  {
3784  ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3785  }
3786  B_is_Trans = PETSC_TRUE;
3787  }
3788  delete b;
3789  delete pspace;
3790  delete auxcoll;
3791  }
3792 
3793  if (B)
3794  {
3795  ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3796  }
3797  delete B;
3798  }
3799  ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3800  ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3801 }
3802 
3804  const PetscBDDCSolverParams &opts,
3805  const std::string &prefix)
3806  : PetscPreconditioner(A,prefix)
3807 {
3808  BDDCSolverConstructor(opts);
3809  Customize();
3810 }
3811 
3813  const PetscBDDCSolverParams &opts,
3814  const std::string &prefix)
3815  : PetscPreconditioner(comm,op,prefix)
3816 {
3817  BDDCSolverConstructor(opts);
3818  Customize();
3819 }
3820 
3822  const string &prefix)
3823  : PetscPreconditioner(comm,op,prefix)
3824 {
3825  PC pc = (PC)obj;
3826  ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3827 
3828  Mat pA;
3829  ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3830 
3831  // Check if pA is of type MATNEST
3832  PetscBool isnest;
3833  ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3834 
3835  PetscInt nr = 0;
3836  IS *isrow = NULL;
3837  if (isnest) // we know the fields
3838  {
3839  ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3840  ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3841  ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3842  }
3843 
3844  // We need to customize here, before setting the index sets.
3845  // This is because PCFieldSplitSetType customizes the function
3846  // pointers. SubSolver options will be processed during PCApply
3847  Customize();
3848 
3849  for (PetscInt i=0; i<nr; i++)
3850  {
3851  ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3852  }
3853  ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3854 }
3855 
3857  ParFiniteElementSpace *fes,
3858  const std::string &prefix)
3859  : PetscPreconditioner(fes->GetParMesh()->GetComm(),prefix)
3860 {
3862  MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
3863  MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);
3864  SetOperator(A);
3865  H2SolverConstructor(fes);
3866  Customize();
3867 }
3868 
3869 void PetscH2Solver::H2SolverConstructor(ParFiniteElementSpace *fes)
3870 {
3871 #if defined(PETSC_HAVE_H2OPUS)
3872  int sdim = fes->GetParMesh()->SpaceDimension();
3873  int vdim = fes->GetVDim();
3874  const FiniteElementCollection *fec = fes->FEColl();
3875  ParFiniteElementSpace *fes_coords = NULL;
3876 
3877  if (vdim != sdim || fes->GetOrdering() != Ordering::byVDIM)
3878  {
3879  fes_coords = new ParFiniteElementSpace(fes->GetParMesh(),fec,sdim,
3881  fes = fes_coords;
3882  }
3883  VectorFunctionCoefficient ccoords(sdim, func_coords);
3884 
3885  ParGridFunction coords(fes);
3886  coords.ProjectCoefficient(ccoords);
3887  Vector c(fes->GetTrueVSize());
3888  coords.ParallelProject(c);
3889  delete fes_coords;
3890  PCSetType(*this,PCH2OPUS);
3891  PCSetCoordinates(*this,sdim,c.Size()/sdim,(PetscReal*)mfem::Read(c.GetMemory(),
3892  c.Size(),false));
3893  PCSetFromOptions(*this);
3894 #else
3895  MFEM_ABORT("Need PETSc configured with --download-h2opus");
3896 #endif
3897 }
3898 
3899 // PetscNonlinearSolver methods
3900 
3902  const std::string &prefix)
3903  : PetscSolver(), Solver()
3904 {
3905  // Create the actual solver object
3906  SNES snes;
3907  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3908  obj = (PetscObject)snes;
3909  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3910  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3911 
3912  // Allocate private solver context
3914 }
3915 
3917  const std::string &prefix)
3918  : PetscSolver(), Solver()
3919 {
3920  // Create the actual solver object
3921  SNES snes;
3922  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3923  obj = (PetscObject)snes;
3924  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3925  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3926 
3927  // Allocate private solver context
3929 
3930  SetOperator(op);
3931 }
3932 
3934 {
3935  MPI_Comm comm;
3936  SNES snes = (SNES)obj;
3937  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
3938  ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3939 }
3940 
3942 {
3943  SNES snes = (SNES)obj;
3944 
3945  if (operatorset)
3946  {
3947  PetscBool ls,gs;
3948  void *fctx,*jctx;
3949 
3950  ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3951  PCHKERRQ(snes, ierr);
3952  ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3953  PCHKERRQ(snes, ierr);
3954 
3955  ls = (PetscBool)(height == op.Height() && width == op.Width() &&
3956  (void*)&op == fctx &&
3957  (void*)&op == jctx);
3958  ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3959  PetscObjectComm((PetscObject)snes));
3960  PCHKERRQ(snes,ierr);
3961  if (!gs)
3962  {
3963  ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3964  delete X;
3965  delete B;
3966  X = B = NULL;
3967  }
3968  }
3969  else
3970  {
3971  /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
3972  yet set. We default to backtracking */
3973  SNESLineSearch ls;
3974  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3975  ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3976  }
3977 
3978  // If we do not pass matrices in, the default matrix type for DMShell is MATDENSE
3979  // in 3.15, which may cause issues.
3980  Mat dummy;
3981  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)snes),op.Height(),
3982  op.Height(),&dummy);
3983 
3984  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3985  snes_ctx->op = (Operator*)&op;
3986  ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
3987  PCHKERRQ(snes, ierr);
3988  ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
3989  (void *)snes_ctx);
3990  PCHKERRQ(snes, ierr);
3991 
3992  ierr = MatDestroy(&dummy);
3993  PCHKERRQ(snes, ierr);
3994 
3995  // Update PetscSolver
3996  operatorset = true;
3997 
3998  // Update the Operator fields.
3999  height = op.Height();
4000  width = op.Width();
4001 }
4002 
4004 {
4005  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4006  snes_ctx->jacType = jacType;
4007 }
4008 
4010  double*))
4011 {
4012  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4013  snes_ctx->objective = objfn;
4014 
4015  SNES snes = (SNES)obj;
4016  ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
4017  PCHKERRQ(snes, ierr);
4018 }
4019 
4021  Vector&, Vector&,
4022  bool&, bool&))
4023 {
4024  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4025  snes_ctx->postcheck = post;
4026 
4027  SNES snes = (SNES)obj;
4028  SNESLineSearch ls;
4029  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4030  ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
4031  PCHKERRQ(ls, ierr);
4032 }
4033 
4034 void PetscNonlinearSolver::SetUpdate(void (*update)(Operator *,int,
4035  const Vector&,
4036  const Vector&,
4037  const Vector&,
4038  const Vector&))
4039 {
4040  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4041  snes_ctx->update = update;
4042 
4043  SNES snes = (SNES)obj;
4044  ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4045 }
4046 
4048 {
4049  SNES snes = (SNES)obj;
4050 
4051  bool b_nonempty = b.Size();
4052  if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
4053  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
4055  if (b_nonempty) { B->PlaceMemory(b.GetMemory()); }
4056  else { *B = 0.0; }
4057 
4058  Customize();
4059 
4060  if (!iterative_mode) { *X = 0.; }
4061 
4062  // Solve the system.
4063  ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4064  X->ResetMemory();
4065  if (b_nonempty) { B->ResetMemory(); }
4066 }
4067 
4068 // PetscODESolver methods
4069 
4070 PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
4071  : PetscSolver(), ODESolver()
4072 {
4073  // Create the actual solver object
4074  TS ts;
4075  ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4076  obj = (PetscObject)ts;
4077  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
4078  ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4079 
4080  // Allocate private solver context
4082 
4083  // Default options, to comply with the current interface to ODESolver.
4084  ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4085  PCHKERRQ(ts,ierr);
4086  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4087  PCHKERRQ(ts,ierr);
4088  TSAdapt tsad;
4089  ierr = TSGetAdapt(ts,&tsad);
4090  PCHKERRQ(ts,ierr);
4091  ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4092  PCHKERRQ(ts,ierr);
4093 }
4094 
4096 {
4097  MPI_Comm comm;
4098  TS ts = (TS)obj;
4099  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
4100  ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4101 }
4102 
4104  enum PetscODESolver::Type type)
4105 {
4106  TS ts = (TS)obj;
4107 
4108  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4109  if (operatorset)
4110  {
4111  ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4112  delete X;
4113  X = NULL;
4114  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4115  ts_ctx->cached_ijacstate = -1;
4116  ts_ctx->cached_rhsjacstate = -1;
4117  ts_ctx->cached_splits_xstate = -1;
4118  ts_ctx->cached_splits_xdotstate = -1;
4119  }
4120  f = &f_;
4121 
4122  // Set functions in TS
4123  ts_ctx->op = &f_;
4124  if (f_.isImplicit())
4125  {
4126  Mat dummy;
4127  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4128  f_.Height(),&dummy);
4129  PCHKERRQ(ts, ierr);
4130  ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
4131  PCHKERRQ(ts, ierr);
4132  ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (void *)ts_ctx);
4133  PCHKERRQ(ts, ierr);
4134  ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4135  PCHKERRQ(ts, ierr);
4136  ierr = MatDestroy(&dummy);
4137  PCHKERRQ(ts, ierr);
4138  }
4139  if (!f_.isHomogeneous())
4140  {
4141  Mat dummy = NULL;
4142  if (!f_.isImplicit())
4143  {
4144  ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4145  PCHKERRQ(ts, ierr);
4146  }
4147  else
4148  {
4149  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4150  f_.Height(),&dummy);
4151  PCHKERRQ(ts, ierr);
4152  }
4153  ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
4154  PCHKERRQ(ts, ierr);
4155  ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4156  (void *)ts_ctx);
4157  PCHKERRQ(ts, ierr);
4158  ierr = MatDestroy(&dummy);
4159  PCHKERRQ(ts, ierr);
4160  }
4161  operatorset = true;
4162 
4163  SetType(type);
4164 
4165  // Set solution vector
4166  PetscParVector X(PetscObjectComm(obj),*f,false,true);
4167  ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4168 
4169  // Compose special purpose function for PDE-constrained optimization
4170  PetscBool use = PETSC_TRUE;
4171  ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
4172  if (use && f_.isImplicit())
4173  {
4174  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4175  __mfem_ts_computesplits);
4176  PCHKERRQ(ts,ierr);
4177  }
4178  else
4179  {
4180  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4181  NULL);
4182  PCHKERRQ(ts,ierr);
4183  }
4184 }
4185 
4187 {
4188  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4189  ts_ctx->jacType = jacType;
4190 }
4191 
4193 {
4194  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4195  return ts_ctx->type;
4196 }
4197 
4199 {
4200  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4201 
4202  TS ts = (TS)obj;
4203  ts_ctx->type = type;
4204  if (type == ODE_SOLVER_LINEAR)
4205  {
4206  ierr = TSSetProblemType(ts, TS_LINEAR);
4207  PCHKERRQ(ts, ierr);
4208  }
4209  else
4210  {
4211  ierr = TSSetProblemType(ts, TS_NONLINEAR);
4212  PCHKERRQ(ts, ierr);
4213  }
4214 }
4215 
4216 void PetscODESolver::Step(Vector &x, double &t, double &dt)
4217 {
4218  // Pass the parameters to PETSc.
4219  TS ts = (TS)obj;
4220  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4221  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4222 
4223  PetscInt i;
4224  ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4225 
4226  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4227  X->PlaceMemory(x.GetMemory(),true);
4228 
4229  Customize();
4230 
4231  // Monitor initial step
4232  if (!i)
4233  {
4234  ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4235  }
4236 
4237  // Take the step.
4238  ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4239  ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4240 
4241  // Get back current time and the time step used to caller.
4242  // We cannot use TSGetTimeStep() as it returns the next candidate step
4243  PetscReal pt;
4244  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4245  dt = pt - (PetscReal)t;
4246  t = pt;
4247 
4248  // Monitor current step
4249  ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4250 
4251  X->ResetMemory();
4252 }
4253 
4254 void PetscODESolver::Run(Vector &x, double &t, double &dt, double t_final)
4255 {
4256  // Give the parameters to PETSc.
4257  TS ts = (TS)obj;
4258  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4259  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4260  ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4261  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4262  PCHKERRQ(ts, ierr);
4263 
4264  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4265  X->PlaceMemory(x.GetMemory(),true);
4266 
4267  Customize();
4268 
4269  // Reset Jacobian caching since the user may have changed
4270  // the parameters of the solver
4271  // We don't do this in the Step method because two consecutive
4272  // Step() calls are done with the same operator
4273  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4274  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4275  ts_ctx->cached_ijacstate = -1;
4276  ts_ctx->cached_rhsjacstate = -1;
4277  ts_ctx->cached_splits_xstate = -1;
4278  ts_ctx->cached_splits_xdotstate = -1;
4279 
4280  // Take the steps.
4281  ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4282  X->ResetMemory();
4283 
4284  // Get back final time and time step to caller.
4285  PetscReal pt;
4286  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4287  t = pt;
4288  ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4289  dt = pt;
4290 }
4291 
4292 } // namespace mfem
4293 
4294 #include "petsc/private/petscimpl.h"
4295 #include "petsc/private/matimpl.h"
4296 
4297 // auxiliary functions
4298 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
4299  void* ctx)
4300 {
4301  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4302 
4303  PetscFunctionBeginUser;
4304  if (!monctx)
4305  {
4306  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4307  }
4308  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4310  monctx->monitor);
4311 
4312  if (user_monitor->mon_sol)
4313  {
4314  mfem::PetscParVector V(x,true);
4315  user_monitor->MonitorSolution(it,t,V);
4316  }
4317  user_monitor->MonitorSolver(solver);
4318  PetscFunctionReturn(0);
4319 }
4320 
4321 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
4322  Vec f,void *ctx)
4323 {
4324  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4325 
4326  PetscFunctionBeginUser;
4327  mfem::PetscParVector xx(x,true);
4328  mfem::PetscParVector yy(xp,true);
4329  mfem::PetscParVector ff(f,true);
4330 
4331  mfem::TimeDependentOperator *op = ts_ctx->op;
4332  op->SetTime(t);
4333 
4334  if (ts_ctx->bchandler)
4335  {
4336  // we evaluate the ImplicitMult method with the correct bc
4337  // this means the correct time derivative for essential boundary
4338  // dofs is zero
4339  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
4340  if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
4341  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4342  mfem::Vector* txx = ts_ctx->work;
4343  mfem::Vector* txp = ts_ctx->work2;
4344  bchandler->SetTime(t);
4345  bchandler->ApplyBC(xx,*txx);
4346  bchandler->ZeroBC(yy,*txp);
4347  op->ImplicitMult(*txx,*txp,ff);
4348  // and fix the residual (i.e. f_\partial\Omega = u - g(t))
4349  bchandler->FixResidualBC(xx,ff);
4350  }
4351  else
4352  {
4353  // use the ImplicitMult method of the class
4354  op->ImplicitMult(xx,yy,ff);
4355  }
4356  ff.UpdateVecFromFlags();
4357  PetscFunctionReturn(0);
4358 }
4359 
4360 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
4361  void *ctx)
4362 {
4363  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4364 
4365  PetscFunctionBeginUser;
4366  if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
4367  mfem::PetscParVector xx(x,true);
4368  mfem::PetscParVector ff(f,true);
4369  mfem::TimeDependentOperator *top = ts_ctx->op;
4370  top->SetTime(t);
4371 
4372  // use the ExplicitMult method - compute the RHS function
4373  top->ExplicitMult(xx,ff);
4374 
4375  ff.UpdateVecFromFlags();
4376  PetscFunctionReturn(0);
4377 }
4378 
4379 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
4380  Vec xp, PetscReal shift, Mat A, Mat P,
4381  void *ctx)
4382 {
4383  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4384  mfem::Vector *xx;
4385  PetscScalar *array;
4386  PetscReal eps = 0.001; /* 0.1% difference */
4387  PetscInt n;
4388  PetscObjectState state;
4389  PetscErrorCode ierr;
4390 
4391  PetscFunctionBeginUser;
4392  // Matrix-free case
4393  if (A && A != P)
4394  {
4395  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4396  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4397  }
4398 
4399  // prevent to recompute a Jacobian if we already did so
4400  // the relative tolerance comparison should be fine given the fact
4401  // that two consecutive shifts should have similar magnitude
4402  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4403  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4404  std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4405  state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
4406 
4407  // update time
4408  mfem::TimeDependentOperator *op = ts_ctx->op;
4409  op->SetTime(t);
4410 
4411  // wrap Vecs with Vectors
4412  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4413  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4414  mfem::Vector yy(array,n);
4415  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4416  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4417  if (!ts_ctx->bchandler)
4418  {
4419  xx = new mfem::Vector(array,n);
4420  }
4421  else
4422  {
4423  // make sure we compute a Jacobian with the correct boundary values
4424  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4425  mfem::Vector txx(array,n);
4426  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4427  xx = ts_ctx->work;
4428  bchandler->SetTime(t);
4429  bchandler->ApplyBC(txx,*xx);
4430  }
4431  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4432 
4433  // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
4434  mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
4435  if (!ts_ctx->bchandler) { delete xx; }
4436  ts_ctx->cached_shift = shift;
4437 
4438  // Convert to the operator type requested if needed
4439  bool delete_pA = false;
4440  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4441  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4442  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4443  pA->GetType() != ts_ctx->jacType))
4444  {
4445  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4446  ts_ctx->jacType);
4447  delete_pA = true;
4448  }
4449 
4450  // Eliminate essential dofs
4451  if (ts_ctx->bchandler)
4452  {
4453  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4454  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4455  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4456  }
4457 
4458  // Get nonzerostate
4459  PetscObjectState nonzerostate;
4460  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4461 
4462  // Avoid unneeded copy of the matrix by hacking
4463  Mat B;
4464  B = pA->ReleaseMat(false);
4465  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4466  if (delete_pA) { delete pA; }
4467 
4468  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4469  // preconditioner fails because MatCreateSubMatrix_Nest does not
4470  // actually return a matrix. Instead, for efficiency reasons,
4471  // it returns a reference to the submatrix. The second time it
4472  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4473  // aborts since the two submatrices are actually different.
4474  // We circumvent this issue by incrementing the nonzero state
4475  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4476  // This does not impact performances in the case of MATNEST
4477  PetscBool isnest;
4478  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4479  CHKERRQ(ierr);
4480  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4481 
4482  // Jacobian reusage
4483  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
4484  CHKERRQ(ierr);
4485 
4486  // Fool DM
4487  DM dm;
4488  MatType mtype;
4489  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4490  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4491  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4492  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4493  PetscFunctionReturn(0);
4494 }
4495 
4496 static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
4497  Mat Ax,Mat Jx,
4498  Mat Axp,Mat Jxp)
4499 {
4500  __mfem_ts_ctx* ts_ctx;
4501  mfem::Vector *xx;
4502  PetscScalar *array;
4503  PetscInt n;
4504  PetscObjectState state;
4505  PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4506  PetscBool assembled;
4507  PetscErrorCode ierr;
4508 
4509  PetscFunctionBeginUser;
4510  // Matrix-free cases
4511  if (Ax && Ax != Jx)
4512  {
4513  ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4514  ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4515  }
4516  if (Axp && Axp != Jxp)
4517  {
4518  ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4519  ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4520  }
4521 
4522  ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
4523 
4524  // prevent to recompute the Jacobians if we already did so
4525  ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
4526  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4527  state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4528  ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
4529  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4530  state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4531  if (!rx && !rxp) { PetscFunctionReturn(0); }
4532 
4533  // update time
4534  mfem::TimeDependentOperator *op = ts_ctx->op;
4535  op->SetTime(t);
4536 
4537  // wrap Vecs with Vectors
4538  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4539  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4540  mfem::Vector yy(array,n);
4541  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4542  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4543  if (!ts_ctx->bchandler)
4544  {
4545  xx = new mfem::Vector(array,n);
4546  }
4547  else
4548  {
4549  // make sure we compute a Jacobian with the correct boundary values
4550  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4551  mfem::Vector txx(array,n);
4552  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4553  xx = ts_ctx->work;
4554  bchandler->SetTime(t);
4555  bchandler->ApplyBC(txx,*xx);
4556  }
4557  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4558 
4559  // We don't have a specialized interface, so we just compute the split jacobians
4560  // evaluating twice the implicit gradient method with the correct shifts
4561 
4562  // first we do the state jacobian
4563  mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
4564 
4565  // Convert to the operator type requested if needed
4566  bool delete_mat = false;
4567  mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
4568  (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
4569  if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4570  pJx->GetType() != ts_ctx->jacType))
4571  {
4572  if (pJx)
4573  {
4574  Mat B = *pJx;
4575  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4576  }
4577  pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
4578  ts_ctx->jacType);
4579  delete_mat = true;
4580  }
4581  if (rx)
4582  {
4583  ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4584  if (assembled)
4585  {
4586  ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4587  }
4588  else
4589  {
4590  Mat B;
4591  ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4592  ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4593  }
4594  }
4595  if (delete_mat) { delete pJx; }
4596  pJx = new mfem::PetscParMatrix(Jx,true);
4597 
4598  // Eliminate essential dofs
4599  if (ts_ctx->bchandler)
4600  {
4601  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4602  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4603  pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4604  }
4605 
4606  // Then we do the jacobian wrt the time derivative of the state
4607  // Note that this is usually the mass matrix
4608  mfem::PetscParMatrix *pJxp = NULL;
4609  if (rxp)
4610  {
4611  delete_mat = false;
4612  mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
4613  pJxp = const_cast<mfem::PetscParMatrix *>
4614  (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
4615  if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4616  pJxp->GetType() != ts_ctx->jacType))
4617  {
4618  if (pJxp)
4619  {
4620  Mat B = *pJxp;
4621  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4622  }
4623  pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
4624  &oJxp,ts_ctx->jacType);
4625  delete_mat = true;
4626  }
4627 
4628  ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4629  if (assembled)
4630  {
4631  ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4632  }
4633  else
4634  {
4635  Mat B;
4636  ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4637  ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4638  }
4639  if (delete_mat) { delete pJxp; }
4640  pJxp = new mfem::PetscParMatrix(Jxp,true);
4641 
4642  // Eliminate essential dofs
4643  if (ts_ctx->bchandler)
4644  {
4645  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4646  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4647  pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
4648  }
4649 
4650  // Obtain the time dependent part of the jacobian by subtracting
4651  // the state jacobian
4652  // We don't do it with the class operator "-=" since we know that
4653  // the sparsity pattern of the two matrices is the same
4654  ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4655  }
4656 
4657  // Jacobian reusage
4658  ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4659  CHKERRQ(ierr);
4660  ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4661  CHKERRQ(ierr);
4662 
4663  delete pJx;
4664  delete pJxp;
4665  if (!ts_ctx->bchandler) { delete xx; }
4666  PetscFunctionReturn(0);
4667 }
4668 
4669 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4670  Mat A, Mat P, void *ctx)
4671 {
4672  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4673  mfem::Vector *xx;
4674  PetscScalar *array;
4675  PetscInt n;
4676  PetscObjectState state;
4677  PetscErrorCode ierr;
4678 
4679  PetscFunctionBeginUser;
4680  // Matrix-free case
4681  if (A && A != P)
4682  {
4683  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4684  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4685  }
4686 
4687  // prevent to recompute a Jacobian if we already did so
4688  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4689  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4690  state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4691 
4692  // update time
4693  mfem::TimeDependentOperator *op = ts_ctx->op;
4694  op->SetTime(t);
4695 
4696  // wrap Vec with Vector
4697  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4698  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4699  if (!ts_ctx->bchandler)
4700  {
4701  xx = new mfem::Vector(array,n);
4702  }
4703  else
4704  {
4705  // make sure we compute a Jacobian with the correct boundary values
4706  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4707  mfem::Vector txx(array,n);
4708  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4709  xx = ts_ctx->work;
4710  bchandler->SetTime(t);
4711  bchandler->ApplyBC(txx,*xx);
4712  }
4713  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4714 
4715  // Use TimeDependentOperator::GetExplicitGradient(x)
4716  mfem::Operator& J = op->GetExplicitGradient(*xx);
4717  if (!ts_ctx->bchandler) { delete xx; }
4718 
4719  // Convert to the operator type requested if needed
4720  bool delete_pA = false;
4721  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4722  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4723  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4724  pA->GetType() != ts_ctx->jacType))
4725  {
4726  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4727  ts_ctx->jacType);
4728  delete_pA = true;
4729  }
4730 
4731  // Eliminate essential dofs
4732  if (ts_ctx->bchandler)
4733  {
4734  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4735  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4736  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4737  }
4738 
4739  // Get nonzerostate
4740  PetscObjectState nonzerostate;
4741  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4742 
4743  // Avoid unneeded copy of the matrix by hacking
4744  Mat B;
4745  B = pA->ReleaseMat(false);
4746  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4747  if (delete_pA) { delete pA; }
4748 
4749  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4750  // preconditioner fails because MatCreateSubMatrix_Nest does not
4751  // actually return a matrix. Instead, for efficiency reasons,
4752  // it returns a reference to the submatrix. The second time it
4753  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4754  // aborts since the two submatrices are actually different.
4755  // We circumvent this issue by incrementing the nonzero state
4756  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4757  // This does not impact performances in the case of MATNEST
4758  PetscBool isnest;
4759  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4760  CHKERRQ(ierr);
4761  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4762 
4763  // Jacobian reusage
4764  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4765  {
4766  ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4767  }
4768  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_rhsjacstate);
4769  CHKERRQ(ierr);
4770 
4771  // Fool DM
4772  DM dm;
4773  MatType mtype;
4774  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4775  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4776  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4777  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4778  PetscFunctionReturn(0);
4779 }
4780 
4781 static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4782  void* ctx)
4783 {
4784  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4785 
4786  PetscFunctionBeginUser;
4787  if (!monctx)
4788  {
4789  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4790  }
4791 
4792  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4794  monctx->monitor);
4795  if (user_monitor->mon_sol)
4796  {
4797  Vec x;
4798  PetscErrorCode ierr;
4799 
4800  ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4801  mfem::PetscParVector V(x,true);
4802  user_monitor->MonitorSolution(it,res,V);
4803  }
4804  if (user_monitor->mon_res)
4805  {
4806  Vec x;
4807  PetscErrorCode ierr;
4808 
4809  ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4810  mfem::PetscParVector V(x,true);
4811  user_monitor->MonitorResidual(it,res,V);
4812  }
4813  user_monitor->MonitorSolver(solver);
4814  PetscFunctionReturn(0);
4815 }
4816 
4817 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4818  void *ctx)
4819 {
4820  PetscScalar *array;
4821  PetscInt n;
4822  PetscErrorCode ierr;
4823  mfem::Vector *xx;
4824  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4825 
4826  PetscFunctionBeginUser;
4827  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4828  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4829  if (!snes_ctx->bchandler)
4830  {
4831  xx = new mfem::Vector(array,n);
4832  }
4833  else
4834  {
4835  // make sure we compute a Jacobian with the correct boundary values
4836  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4837  mfem::Vector txx(array,n);
4838  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4839  xx = snes_ctx->work;
4840  bchandler->ApplyBC(txx,*xx);
4841  }
4842 
4843  // Use Operator::GetGradient(x)
4844  mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4845  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4846  if (!snes_ctx->bchandler) { delete xx; }
4847 
4848  // Convert to the operator type requested if needed
4849  bool delete_pA = false;
4850  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4851  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4852  if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4853  pA->GetType() != snes_ctx->jacType))
4854  {
4855  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4856  snes_ctx->jacType);
4857  delete_pA = true;
4858  }
4859 
4860  // Eliminate essential dofs
4861  if (snes_ctx->bchandler)
4862  {
4863  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4864  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4865  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4866  }
4867 
4868  // Get nonzerostate
4869  PetscObjectState nonzerostate;
4870  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4871 
4872  // Avoid unneeded copy of the matrix by hacking
4873  Mat B = pA->ReleaseMat(false);
4874  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4875  if (delete_pA) { delete pA; }
4876 
4877  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4878  // preconditioner fails because MatCreateSubMatrix_Nest does not
4879  // actually return a matrix. Instead, for efficiency reasons,
4880  // it returns a reference to the submatrix. The second time it
4881  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4882  // aborts since the two submatrices are actually different.
4883  // We circumvent this issue by incrementing the nonzero state
4884  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4885  // This does not impact performances in the case of MATNEST
4886  PetscBool isnest;
4887  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4888  CHKERRQ(ierr);
4889  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4890 
4891  // Matrix-free case
4892  if (A && A != P)
4893  {
4894  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4895  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4896  }
4897 
4898  // Fool DM
4899  DM dm;
4900  MatType mtype;
4901  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4902  ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4903  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4904  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4905  PetscFunctionReturn(0);
4906 }
4907 
4908 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4909 {
4910  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4911 
4912  PetscFunctionBeginUser;
4913  mfem::PetscParVector xx(x,true);
4914  mfem::PetscParVector ff(f,true);
4915  if (snes_ctx->bchandler)
4916  {
4917  // we evaluate the Mult method with the correct bc
4918  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4919  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4920  mfem::Vector* txx = snes_ctx->work;
4921  bchandler->ApplyBC(xx,*txx);
4922  snes_ctx->op->Mult(*txx,ff);
4923  // and fix the residual (i.e. f_\partial\Omega = u - g)
4924  bchandler->FixResidualBC(xx,ff);
4925  }
4926  else
4927  {
4928  // use the Mult method of the class
4929  snes_ctx->op->Mult(xx,ff);
4930  }
4931  ff.UpdateVecFromFlags();
4932  PetscFunctionReturn(0);
4933 }
4934 
4935 static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
4936  void *ctx)
4937 {
4938  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4939 
4940  PetscFunctionBeginUser;
4941  if (!snes_ctx->objective)
4942  {
4943  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
4944  }
4945  mfem::PetscParVector xx(x,true);
4946  double lf;
4947  (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4948  *f = (PetscReal)lf;
4949  PetscFunctionReturn(0);
4950 }
4951 
4952 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4953  PetscBool *cy,PetscBool *cw, void* ctx)
4954 {
4955  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4956  bool lcy = false,lcw = false;
4957 
4958  PetscFunctionBeginUser;
4959  mfem::PetscParVector x(X,true);
4960  mfem::PetscParVector y(Y,true);
4961  mfem::PetscParVector w(W,true);
4962  (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4963  if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
4964  if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
4965  PetscFunctionReturn(0);
4966 }
4967 
4968 static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
4969 {
4970  Vec F,X,dX,pX;
4971  __mfem_snes_ctx* snes_ctx;
4972 
4973  PetscFunctionBeginUser;
4974  /* Update callback does not use the context */
4975  ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
4976  ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4977  if (!it)
4978  {
4979  ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4980  ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
4981  CHKERRQ(ierr);
4982  ierr = VecDestroy(&pX); CHKERRQ(ierr);
4983  }
4984  ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
4985  CHKERRQ(ierr);
4986  if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
4987  "Missing previous solution");
4988  ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4989  mfem::PetscParVector f(F,true);
4990  mfem::PetscParVector x(X,true);
4991  mfem::PetscParVector dx(dX,true);
4992  mfem::PetscParVector px(pX,true);
4993  (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4994  /* Store previous solution */
4995  ierr = VecCopy(X,pX); CHKERRQ(ierr);
4996  PetscFunctionReturn(0);
4997 }
4998 
4999 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
5000  void* ctx)
5001 {
5002  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
5003 
5004  PetscFunctionBeginUser;
5005  if (!monctx)
5006  {
5007  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
5008  }
5009 
5010  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
5012  monctx->monitor);
5013  if (user_monitor->mon_sol)
5014  {
5015  Vec x;
5016  PetscErrorCode ierr;
5017 
5018  ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5019  mfem::PetscParVector V(x,true);
5020  user_monitor->MonitorSolution(it,res,V);
5021  }
5022  if (user_monitor->mon_res)
5023  {
5024  Vec x;
5025  PetscErrorCode ierr;
5026 
5027  ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5028  mfem::PetscParVector V(x,true);
5029  user_monitor->MonitorResidual(it,res,V);
5030  }
5031  user_monitor->MonitorSolver(solver);
5032  PetscFunctionReturn(0);
5033 }
5034 
5035 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5036 {
5037  mfem::Operator *op;
5038  PetscErrorCode ierr;
5039 
5040  PetscFunctionBeginUser;
5041  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5042  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5043  mfem::PetscParVector xx(x,true);
5044  mfem::PetscParVector yy(y,true);
5045  op->Mult(xx,yy);
5046  yy.UpdateVecFromFlags();
5047  PetscFunctionReturn(0);
5048 }
5049 
5050 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5051 {
5052  mfem::Operator *op;
5053  PetscErrorCode ierr;
5054  PetscBool flg,symm;
5055 
5056  PetscFunctionBeginUser;
5057  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5058  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5059  mfem::PetscParVector xx(x,true);
5060  mfem::PetscParVector yy(y,true);
5061  ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5062  if (flg && symm)
5063  {
5064  op->Mult(xx,yy);
5065  }
5066  else
5067  {
5068  op->MultTranspose(xx,yy);
5069  }
5070  yy.UpdateVecFromFlags();
5071  PetscFunctionReturn(0);
5072 }
5073 
5074 static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5075 {
5076  mfem::Operator *op;
5077  PetscErrorCode ierr;
5078 
5079  PetscFunctionBeginUser;
5080  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5081  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5082  ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
5083  PetscFunctionReturn(0);
5084 }
5085 
5086 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5087 {
5088  PetscFunctionBeginUser;
5089  PetscFunctionReturn(0);
5090 }
5091 
5092 static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5093 {
5094  __mfem_pc_shell_ctx *ctx;
5095  PetscErrorCode ierr;
5096 
5097  PetscFunctionBeginUser;
5098  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5099  if (ctx->op)
5100  {
5101  PetscBool isascii;
5102  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5103  CHKERRQ(ierr);
5104 
5106  (ctx->op);
5107  if (ppc)
5108  {
5109  ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5110  }
5111  else
5112  {
5113  if (isascii)
5114  {
5115  ierr = PetscViewerASCIIPrintf(viewer,
5116  "No information available on the mfem::Solver\n");
5117  CHKERRQ(ierr);
5118  }
5119  }
5120  if (isascii && ctx->factory)
5121  {
5122  ierr = PetscViewerASCIIPrintf(viewer,
5123  "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5124  CHKERRQ(ierr);
5125  }
5126  }
5127  PetscFunctionReturn(0);
5128 }
5129 
5130 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5131 {
5132  __mfem_pc_shell_ctx *ctx;
5133  PetscErrorCode ierr;
5134 
5135  PetscFunctionBeginUser;
5136  mfem::PetscParVector xx(x,true);
5137  mfem::PetscParVector yy(y,true);
5138  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5139  if (ctx->op)
5140  {
5141  ctx->op->Mult(xx,yy);
5142  yy.UpdateVecFromFlags();
5143  }
5144  else // operator is not present, copy x
5145  {
5146  yy = xx;
5147  }
5148  PetscFunctionReturn(0);
5149 }
5150 
5151 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5152 {
5153  __mfem_pc_shell_ctx *ctx;
5154  PetscErrorCode ierr;
5155 
5156  PetscFunctionBeginUser;
5157  mfem::PetscParVector xx(x,true);
5158  mfem::PetscParVector yy(y,true);
5159  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5160  if (ctx->op)
5161  {
5162  ctx->op->MultTranspose(xx,yy);
5163  yy.UpdateVecFromFlags();
5164  }
5165  else // operator is not present, copy x
5166  {
5167  yy = xx;
5168  }
5169  PetscFunctionReturn(0);
5170 }
5171 
5172 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5173 {
5174  __mfem_pc_shell_ctx *ctx;
5175 
5176  PetscFunctionBeginUser;
5177  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5178  if (ctx->factory)
5179  {
5180  // Delete any owned operator
5181  if (ctx->ownsop)
5182  {
5183  delete ctx->op;
5184  }
5185 
5186  // Get current preconditioning Mat
5187  Mat B;
5188  ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5189 
5190  // Call user-defined setup
5191  mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
5192  mfem::PetscPreconditionerFactory *factory = ctx->factory;
5193  ctx->op = factory->NewPreconditioner(hB);
5194  ctx->ownsop = true;
5195  ctx->numprec++;
5196  }
5197  PetscFunctionReturn(0);
5198 }
5199 
5200 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5201 {
5202  __mfem_pc_shell_ctx *ctx;
5203  PetscErrorCode ierr;
5204 
5205  PetscFunctionBeginUser;
5206  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5207  if (ctx->ownsop)
5208  {
5209  delete ctx->op;
5210  }
5211  delete ctx;
5212  PetscFunctionReturn(0);
5213 }
5214 
5215 static PetscErrorCode __mfem_array_container_destroy(void *ptr)
5216 {
5217  PetscErrorCode ierr;
5218 
5219  PetscFunctionBeginUser;
5220  ierr = PetscFree(ptr); CHKERRQ(ierr);
5221  PetscFunctionReturn(0);
5222 }
5223 
5224 static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
5225 {
5227  PetscErrorCode ierr;
5228 
5229  PetscFunctionBeginUser;
5230  for (int i=0; i<a->Size(); i++)
5231  {
5232  Mat M = (*a)[i];
5233  MPI_Comm comm = PetscObjectComm((PetscObject)M);
5234  ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5235  }
5236  delete a;
5237  PetscFunctionReturn(0);
5238 }
5239 
5240 static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
5241 {
5242  PetscErrorCode ierr;
5243 
5244  PetscFunctionBeginUser;
5245  ierr = PetscFree(*ctx); CHKERRQ(ierr);
5246  PetscFunctionReturn(0);
5247 }
5248 
5249 // Sets the type of PC to PCSHELL and wraps the solver action
5250 // if ownsop is true, ownership of precond is transferred to the PETSc object
5251 PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
5252 {
5253  PetscFunctionBeginUser;
5254  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5255  ctx->op = &precond;
5256  ctx->ownsop = ownsop;
5257  ctx->factory = NULL;
5258  ctx->numprec = 0;
5259 
5260  // In case the PC was already of type SHELL, this will destroy any
5261  // previous user-defined data structure
5262  // We cannot call PCReset as it will wipe out any operator already set
5263  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5264 
5265  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5266  ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5267  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5268  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5269  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5270  CHKERRQ(ierr);
5271  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5272  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5273  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5274  PetscFunctionReturn(0);
5275 }
5276 
5277 // Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
5278 // Takes ownership of the solver created by the factory
5279 PetscErrorCode MakeShellPCWithFactory(PC pc,
5281 {
5282  PetscFunctionBeginUser;
5283  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5284  ctx->op = NULL;
5285  ctx->ownsop = true;
5286  ctx->factory = factory;
5287  ctx->numprec = 0;
5288 
5289  // In case the PC was already of type SHELL, this will destroy any
5290  // previous user-defined data structure
5291  // We cannot call PCReset as it will wipe out any operator already set
5292  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5293 
5294  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5295  ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
5296  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5297  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5298  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5299  CHKERRQ(ierr);
5300  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5301  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5302  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5303  PetscFunctionReturn(0);
5304 }
5305 
5306 // Converts from a list (or a marked Array if islist is false) to an IS
5307 // st indicates the offset where to start numbering
5308 static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
5309  const mfem::Array<int> *list,
5310  PetscInt st, IS* is)
5311 {
5312  PetscInt n = list ? list->Size() : 0,*idxs;
5313  const int *data = list ? list->GetData() : NULL;
5314  PetscErrorCode ierr;
5315 
5316  PetscFunctionBeginUser;
5317  ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5318  if (islist)
5319  {
5320  for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5321  }
5322  else
5323  {
5324  PetscInt cum = 0;
5325  for (PetscInt i=0; i<n; i++)
5326  {
5327  if (data[i]) { idxs[cum++] = i+st; }
5328  }
5329  n = cum;
5330  }
5331  ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5332  CHKERRQ(ierr);
5333  PetscFunctionReturn(0);
5334 }
5335 
5336 // Converts from a marked Array of Vdofs to an IS
5337 // st indicates the offset where to start numbering
5338 // l2l is a vector of matrices generated during RAP
5339 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5340  mfem::Array<Mat> &pl2l,
5341  const mfem::Array<int> *mark,
5342  PetscInt st, IS* is)
5343 {
5344  mfem::Array<int> sub_dof_marker;
5346  PetscInt nl;
5347  PetscErrorCode ierr;
5348 
5349  PetscFunctionBeginUser;
5350  for (int i = 0; i < pl2l.Size(); i++)
5351  {
5352  PetscInt m,n,*ii,*jj;
5353  PetscBool done;
5354  ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
5355  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5356  MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
5357  ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5358 #if defined(PETSC_USE_64BIT_INDICES)
5359  int nnz = (int)ii[m];
5360  int *mii = new int[m+1];
5361  int *mjj = new int[nnz];
5362  for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5363  for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5364  l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
5365 #else
5366  l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
5367 #endif
5368  ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5369  (const PetscInt**)&ii,
5370  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5371  MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
5372  << i << " l2l matrix");
5373  }
5374  nl = 0;
5375  for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5376  sub_dof_marker.SetSize(nl);
5377  const int* vdata = mark->GetData();
5378  int* sdata = sub_dof_marker.GetData();
5379  int cumh = 0, cumw = 0;
5380  for (int i = 0; i < l2l.Size(); i++)
5381  {
5382  const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
5383  l2l[i]->Height());
5384  mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
5385  l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5386  cumh += l2l[i]->Height();
5387  cumw += l2l[i]->Width();
5388  }
5389  ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5390  for (int i = 0; i < pl2l.Size(); i++)
5391  {
5392  delete l2l[i];
5393  }
5394  PetscFunctionReturn(0);
5395 }
5396 
5397 #if !defined(PETSC_HAVE_HYPRE)
5398 
5399 #if defined(HYPRE_MIXEDINT)
5400 #error "HYPRE_MIXEDINT not supported"
5401 #endif
5402 
5403 #include "_hypre_parcsr_mv.h"
5404 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5405 {
5406  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5407  hypre_CSRMatrix *hdiag,*hoffd;
5408  PetscScalar *da,*oa,*aptr;
5409  PetscInt *dii,*djj,*oii,*ojj,*iptr;
5410  PetscInt i,dnnz,onnz,m,n;
5411  PetscMPIInt size;
5412  PetscErrorCode ierr;
5413 
5414  PetscFunctionBeginUser;
5415  hdiag = hypre_ParCSRMatrixDiag(hA);
5416  hoffd = hypre_ParCSRMatrixOffd(hA);
5417  m = hypre_CSRMatrixNumRows(hdiag);
5418  n = hypre_CSRMatrixNumCols(hdiag);
5419  dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5420  onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5421  ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5422  ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5423  ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5424  ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
5425  CHKERRQ(ierr);
5426  ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
5427  CHKERRQ(ierr);
5428  ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
5429  CHKERRQ(ierr);
5430  iptr = djj;
5431  aptr = da;
5432  for (i=0; i<m; i++)
5433  {
5434  PetscInt nc = dii[i+1]-dii[i];
5435  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5436  iptr += nc;
5437  aptr += nc;
5438  }
5439  ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5440  if (size > 1)
5441  {
5442  PetscInt *offdj,*coffd;
5443 
5444  ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5445  ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5446  ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5447  ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
5448  CHKERRQ(ierr);
5449  offdj = hypre_CSRMatrixJ(hoffd);
5450  coffd = hypre_ParCSRMatrixColMapOffd(hA);
5451  for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5452  ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
5453  CHKERRQ(ierr);
5454  iptr = ojj;
5455  aptr = oa;
5456  for (i=0; i<m; i++)
5457  {
5458  PetscInt nc = oii[i+1]-oii[i];
5459  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5460  iptr += nc;
5461  aptr += nc;
5462  }
5463  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5464  djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5465  }
5466  else
5467  {
5468  oii = ojj = NULL;
5469  oa = NULL;
5470  ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5471  }
5472  /* We are responsible to free the CSR arrays. However, since we can take
5473  references of a PetscParMatrix but we cannot take reference of PETSc
5474  arrays, we need to create a PetscContainer object to take reference of
5475  these arrays in reference objects */
5476  void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5477  const char *names[6] = {"_mfem_csr_dii",
5478  "_mfem_csr_djj",
5479  "_mfem_csr_da",
5480  "_mfem_csr_oii",
5481  "_mfem_csr_ojj",
5482  "_mfem_csr_oa"
5483  };
5484  for (i=0; i<6; i++)
5485  {
5486  PetscContainer c;
5487 
5488  ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5489  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5490  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5491  CHKERRQ(ierr);
5492  ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
5493  CHKERRQ(ierr);
5494  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5495  }
5496  PetscFunctionReturn(0);
5497 }
5498 
5499 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5500 {
5501  Mat lA;
5502  ISLocalToGlobalMapping rl2g,cl2g;
5503  IS is;
5504  hypre_CSRMatrix *hdiag,*hoffd;
5505  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5506  void *ptrs[2];
5507  const char *names[2] = {"_mfem_csr_aux",
5508  "_mfem_csr_data"
5509  };
5510  PetscScalar *hdd,*hod,*aa,*data;
5511  PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5512  PetscInt *aux,*ii,*jj;
5513  PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5514  PetscErrorCode ierr;
5515 
5516  PetscFunctionBeginUser;
5517  /* access relevant information in ParCSR */
5518  str = hypre_ParCSRMatrixFirstRowIndex(hA);
5519  stc = hypre_ParCSRMatrixFirstColDiag(hA);
5520  hdiag = hypre_ParCSRMatrixDiag(hA);
5521  hoffd = hypre_ParCSRMatrixOffd(hA);
5522  dr = hypre_CSRMatrixNumRows(hdiag);
5523  dc = hypre_CSRMatrixNumCols(hdiag);
5524  nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5525  hdi = hypre_CSRMatrixI(hdiag);
5526  hdj = hypre_CSRMatrixJ(hdiag);
5527  hdd = hypre_CSRMatrixData(hdiag);
5528  oc = hypre_CSRMatrixNumCols(hoffd);
5529  nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5530  hoi = hypre_CSRMatrixI(hoffd);
5531  hoj = hypre_CSRMatrixJ(hoffd);
5532  hod = hypre_CSRMatrixData(hoffd);
5533 
5534  /* generate l2g maps for rows and cols */
5535  ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5536  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5537  ierr = ISDestroy(&is); CHKERRQ(ierr);
5538  col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5539  ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5540  for (i=0; i<dc; i++) { aux[i] = i+stc; }
5541  for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5542  ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5543  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5544  ierr = ISDestroy(&is); CHKERRQ(ierr);
5545 
5546  /* create MATIS object */
5547  ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5548  ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5549  ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5550  ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5551  ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5552  ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5553 
5554  /* merge local matrices */
5555  ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5556  ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5557  ii = aux;
5558  jj = aux+dr+1;
5559  aa = data;
5560  *ii = *(hdi++) + *(hoi++);
5561  for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5562  {
5563  PetscScalar *aold = aa;
5564  PetscInt *jold = jj,nc = jd+jo;
5565  for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5566  for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5567  *(++ii) = *(hdi++) + *(hoi++);
5568  ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5569  }
5570  for (; cum<dr; cum++) { *(++ii) = nnz; }
5571  ii = aux;
5572  jj = aux+dr+1;
5573  aa = data;
5574  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5575  CHKERRQ(ierr);
5576  ptrs[0] = aux;
5577  ptrs[1] = data;
5578  for (i=0; i<2; i++)
5579  {
5580  PetscContainer c;
5581 
5582  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5583  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5584  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5585  CHKERRQ(ierr);
5586  ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
5587  CHKERRQ(ierr);
5588  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5589  }
5590  ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5591  ierr = MatDestroy(&lA); CHKERRQ(ierr);
5592  ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5593  ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5594  PetscFunctionReturn(0);
5595 }
5596 #endif
5597 
5598 #include <petsc/private/matimpl.h>
5599 
5600 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm, PetscInt m,
5601  PetscInt n, Mat *A)
5602 {
5603  PetscFunctionBegin;
5604  ierr = MatCreate(comm,A); CHKERRQ(ierr);
5605  ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5606  ierr = PetscObjectChangeTypeName((PetscObject)*A,"mfemdummy"); CHKERRQ(ierr);
5607  (*A)->preallocated = PETSC_TRUE;
5608  ierr = MatSetUp(*A); CHKERRQ(ierr);
5609  PetscFunctionReturn(0);
5610 }
5611 
5612 #include <petsc/private/vecimpl.h>
5613 
5614 #if defined(PETSC_HAVE_DEVICE)
5615 static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5616 {
5617  PetscFunctionBegin;
5618  v->offloadmask = m;
5619  PetscFunctionReturn(0);
5620 }
5621 #endif
5622 
5623 static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5624 {
5625  PetscFunctionBegin;
5626 #if defined(PETSC_HAVE_DEVICE)
5627  *flg = v->boundtocpu;
5628 #else
5629  *flg = PETSC_TRUE;
5630 #endif
5631  PetscFunctionReturn(0);
5632 }
5633 
5634 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject o)
5635 {
5636  PetscErrorCode ierr;
5637 
5638  PetscFunctionBegin;
5639  ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5640  PetscFunctionReturn(0);
5641 }
5642 
5643 #endif // MFEM_USE_PETSC
5644 #endif // MFEM_USE_MPI
void MFEMInitializePetsc(int *argc, char ***argv, const char rc_file[], const char help[])
Definition: petsc.cpp:178
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: operator.hpp:93
const double * GetDevicePointer() const
Definition: petsc.cpp:207
const double * HostReadData() const
Definition: sparsemat.hpp:277
void SetPreconditioner(Solver &precond)
Definition: petsc.cpp:3043
void SetPrintLevel(int plev)
Definition: petsc.cpp:2386
static int GetId()
Get the device id of the configured device.
Definition: device.hpp:252
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:3123
PetscParVector * B
Right-hand side and solution vector.
Definition: petsc.hpp:685
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
Definition: sparsemat.cpp:1588
void CreatePrivateContext()
Definition: petsc.cpp:2685
void trans(const Vector &u, Vector &x)
Definition: ex27.cpp:412
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
Definition: hypre.cpp:3264
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
Definition: petsc.cpp:4034
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition: petsc.cpp:3941
bool DeviceIsValid() const
Return true if device pointer is valid.
Memory< double > data
Definition: vector.hpp:64
Abstract class for PETSc&#39;s preconditioners.
Definition: petsc.hpp:801
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition: operator.hpp:122
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition: petsc.cpp:1085
double PetscScalar
Definition: petsc.hpp:48
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
Definition: operator.cpp:304
PetscMemory pdata
Definition: petsc.hpp:159
petsc::Vec x
The actual PETSc object.
Definition: petsc.hpp:157
virtual void SetOperator(const Operator &op)
Definition: petsc.cpp:2898
bool clcustom
Boolean to handle SetFromOptions calls.
Definition: petsc.hpp:676
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Definition: petsc.cpp:4009
HYPRE_Int PetscInt
Definition: petsc.hpp:47
double GetFinalNorm()
Definition: petsc.cpp:2660
Device memory; using CUDA or HIP *Malloc and *Free.
PetscODESolver::Type GetType() const
Definition: petsc.cpp:4192
petsc::Mat A
The actual PETSc object.
Definition: petsc.hpp:319
Wrapper for PETSc&#39;s matrix class.
Definition: petsc.hpp:315
PetscParVector & operator+=(const PetscParVector &y)
Definition: petsc.cpp:693
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition: petsc.cpp:2768
Abstract class for PETSc&#39;s linear solvers.
Definition: petsc.hpp:746
void SetSize(int s)
Resize the vector to size s.
Definition: vector.hpp:512
Abstract class for PETSc&#39;s solvers.
Definition: petsc.hpp:672
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
Definition: petsc.cpp:4254
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition: petsc.cpp:4020
PetscBCHandler(Type type_=ZERO)
Definition: petsc.hpp:598
void Delete()
Delete the owned pointers and reset the Memory object.
int NumRowBlocks() const
Return the number of row blocks.
Base abstract class for first order time dependent operators.
Definition: operator.hpp:316
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
Definition: petsc.hpp:99
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
Definition: vector.hpp:117
void SetType(PetscODESolver::Type)
Definition: petsc.cpp:4198
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
Definition: petsc.cpp:1301
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition: petsc.hpp:1003
Pointer to an Operator of a specified type.
Definition: handle.hpp:33
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition: petsc.cpp:3812
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition: petsc.cpp:672
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition: operator.hpp:72
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition: petsc.cpp:2297
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3901
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition: petsc.cpp:1883
T * GetData()
Returns the data.
Definition: array.hpp:115
int Size() const
Returns the size of the vector.
Definition: vector.hpp:199
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:336
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
Definition: petsc.cpp:950
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Definition: petsc.cpp:4216
struct ::_p_Mat * Mat
Definition: petsc.hpp:70
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition: ode.hpp:22
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition: petsc.cpp:956
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3175
PetscClassId cid
The class id of the actual PETSc object.
Definition: petsc.hpp:682
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition: petsc.cpp:1939
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition: petsc.cpp:901
ID for class PetscParMatrix, MATHYPRE format.
Definition: operator.hpp:292
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
Definition: operator.hpp:360
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:4047
void SetDataAndSize_()
Definition: petsc.cpp:218
Abstract parallel finite element space.
Definition: pfespace.hpp:28
Constant in time b.c.
Definition: petsc.hpp:594
void SetRelTol(double tol)
Definition: petsc.cpp:2309
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition: operator.hpp:686
STL namespace.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
PetscInt M() const
Returns the global number of rows.
Definition: petsc.cpp:929
void Randomize(PetscInt seed=0)
Set random values.
Definition: petsc.cpp:859
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition: petsc.cpp:1702
void SyncBaseAndReset()
Definition: petsc.hpp:124
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition: petsc.cpp:2471
void SetMaxIter(int max_iter)
Definition: petsc.cpp:2359
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:1254
PetscParVector & operator*=(PetscScalar d)
Definition: petsc.cpp:707
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition: hypre.cpp:2850
void SortColumnIndices()
Sort the column indices corresponding to each row.
Definition: sparsemat.cpp:457
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
Definition: vector.hpp:230
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition: petsc.cpp:2183
double PetscReal
Definition: petsc.hpp:49
int NumColBlocks() const
Return the number of column blocks.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition: petsc.cpp:1957
PetscParVector * Y
Definition: petsc.hpp:322
virtual ~PetscLinearSolver()
Definition: petsc.cpp:3133
struct ::_p_PC * PC
Definition: petsc.hpp:72
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
Definition: petsc.cpp:726
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition: petsc.cpp:658
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:2466
Type GetType() const
Definition: petsc.cpp:2239
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition: petsc.cpp:4103
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition: petsc.cpp:1903
ID for the base class Operator, i.e. any type.
Definition: operator.hpp:285
double f(const Vector &xvec)
Definition: lor_mms.hpp:32
void write(std::ostream &os, T value)
Write &#39;value&#39; to stream.
Definition: binaryio.hpp:30
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition: petsc.hpp:679
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc&#39;s solvers.
Definition: petsc.hpp:981
bool DeviceRequested() const
Definition: petsc.hpp:140
bool Empty() const
Return true if the Memory object is empty, see Reset().
Data type sparse matrix.
Definition: sparsemat.hpp:50
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
Definition: petsc.cpp:2857
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:4003
const FiniteElementCollection * FEColl() const
Definition: fespace.hpp:601
struct ::_p_TS * TS
Definition: petsc.hpp:74
double b
Definition: lissajous.cpp:42
int to_int(const std::string &str)
Convert a string to an int.
Definition: text.hpp:62
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition: operator.hpp:367
Auxiliary class for BDDC customization.
Definition: petsc.hpp:827
PetscInt GetNumRows() const
Returns the local number of rows.
Definition: petsc.cpp:915
virtual ~PetscPreconditioner()
Definition: petsc.cpp:3306
ID for class PetscParMatrix, unspecified format.
Definition: operator.hpp:293
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition: petsc.cpp:2817
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
Definition: petsc.cpp:448
const int * HostReadJ() const
Definition: sparsemat.hpp:261
ParMesh * GetParMesh() const
Definition: pfespace.hpp:277
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition: petsc.cpp:1069
virtual ~PetscParVector()
Calls PETSc&#39;s destroy function.
Definition: petsc.cpp:523
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition: petsc.cpp:3821
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition: petsc.hpp:688
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition: petsc.cpp:1100
struct ::_p_Vec * Vec
Definition: petsc.hpp:69
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition: petsc.cpp:1982
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition: petsc.cpp:2836
void Assign(const T *)
Copy data from a pointer. &#39;Size()&#39; elements are copied.
Definition: array.hpp:905
T read(std::istream &is)
Read a value from the stream and return it.
Definition: binaryio.hpp:37
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3301
void Shift(double s)
Shift diagonal by a constant.
Definition: petsc.cpp:2004
PetscInt GlobalSize() const
Returns the global number of rows.
Definition: petsc.cpp:456
ID for class PetscParMatrix, MATNEST format.
Definition: operator.hpp:291
int Capacity() const
Return the size of the allocated memory.
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition: petsc.cpp:1893
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
Definition: densemat.cpp:3252
void SetTime(double t)
Sets the current time.
Definition: petsc.hpp:624
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:319
ID for class PetscParMatrix, MATSHELL format.
Definition: operator.hpp:290
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition: petsc.cpp:1727
Biwise-OR of all CUDA backends.
Definition: device.hpp:88
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition: petsc.cpp:963
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc&#39;s MATSHELL object and returns the Mat in B...
Definition: petsc.cpp:1267
int GetNumIterations()
Definition: petsc.cpp:2627
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
Definition: petsc.cpp:1922
int GetVDim() const
Returns vector dimension.
Definition: fespace.hpp:581
MPI_Comm GetComm() const
MPI communicator.
Definition: hypre.hpp:534
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
Definition: device.hpp:343
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition: petsc.cpp:2501
struct ::_p_KSP * KSP
Definition: petsc.hpp:71
Wrapper for hypre&#39;s parallel vector class.
Definition: hypre.hpp:161
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition: petsc.cpp:2845
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Definition: petsc.hpp:614
Type
Enumeration defining IDs for some classes derived from Operator.
Definition: operator.hpp:283
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
Definition: operator.cpp:282
struct ::_p_SNES * SNES
Definition: petsc.hpp:73
PetscInt N() const
Returns the global number of columns.
Definition: petsc.cpp:936
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Definition: fe_coll.hpp:26
void SetFlagsFromMask_() const
Definition: petsc.cpp:268
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition: globals.hpp:66
MemoryType
Memory types supported by MFEM.
Definition: mem_manager.hpp:31
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
PetscInt GetNumCols() const
Returns the local number of columns.
Definition: petsc.cpp:922
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition: array.hpp:684
HYPRE_Int HYPRE_BigInt
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:4186
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition: operator.hpp:66
bool operatorset
Boolean to handle SetOperator calls.
Definition: petsc.hpp:694
PetscParVector * X
Auxiliary vectors for typecasting.
Definition: petsc.hpp:322
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
Definition: device.hpp:258
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
int SpaceDimension() const
Definition: mesh.hpp:1048
const int * HostReadI() const
Definition: sparsemat.hpp:245
int height
Dimension of the output / number of rows in the matrix.
Definition: operator.hpp:27
PetscInt NNZ() const
Returns the number of nonzeros.
Definition: petsc.cpp:943
PetscParVector & operator-=(const PetscParVector &y)
Definition: petsc.cpp:700
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
Definition: pfespace.hpp:289
double a
Definition: lissajous.cpp:41
void MFEMFinalizePetsc()
Definition: petsc.cpp:192
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition: vector.hpp:187
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
Definition: device.hpp:326
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition: petsc.cpp:3296
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true, or the mfem::Device&#39;s HostMemoryClass, otherwise.
Definition: device.hpp:353
const double * GetHostPointer() const
Definition: petsc.cpp:198
virtual ~PetscNonlinearSolver()
Definition: petsc.cpp:3933
virtual ~PetscODESolver()
Definition: petsc.cpp:4095
int GetConverged()
Definition: petsc.cpp:2594
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition: petsc.cpp:2287
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition: operator.cpp:312
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition: petsc.cpp:2520
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
Definition: petsc.cpp:2226
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition: petsc.cpp:505
Ordering::Type GetOrdering() const
Return the ordering method.
Definition: fespace.hpp:599
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition: petsc.hpp:991
ID for class PetscParMatrix, MATAIJ format.
Definition: operator.hpp:288
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:3210
int dim
Definition: ex24.cpp:53
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
Definition: mem_manager.cpp:55
PetscParVector * X
Definition: petsc.hpp:685
struct _p_PetscObject * PetscObject
Definition: petsc.hpp:51
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Definition: petsc.cpp:3856
void ResetMemory()
Completes the operation started with PlaceMemory.
Definition: petsc.cpp:815
void SetAbsTol(double tol)
Definition: petsc.cpp:2334
Helper class for handling essential boundary conditions.
Definition: petsc.hpp:588
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition: petsc.hpp:621
void SetTol(double tol)
Definition: petsc.cpp:2304
void Customize(bool customize=true) const
Customize object with options set.
Definition: petsc.cpp:2561
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition: petsc.cpp:651
int Size() const
Return the logical size of the array.
Definition: array.hpp:141
RefCoord t[3]
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition: petsc.hpp:997
MPI_Comm GetComm() const
Definition: pfespace.hpp:273
bool WriteRequested() const
Definition: petsc.hpp:135
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition: petsc.cpp:2023
Vector data type.
Definition: vector.hpp:60
Identity Operator I: x -> x.
Definition: operator.hpp:705
bool UseDevice() const
Read the internal device flag.
bool IsAliasForSync() const
Definition: petsc.hpp:97
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
Definition: petsc.cpp:721
void FreePrivateContext()
Definition: petsc.cpp:2717
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition: ode.hpp:26
RefCoord s[3]
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:4070
bool ReadRequested() const
Definition: petsc.hpp:130
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition: petsc.cpp:2212
Base class for solvers.
Definition: operator.hpp:682
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3128
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
Definition: petsc.cpp:3143
Abstract operator.
Definition: operator.hpp:24
Wrapper for hypre&#39;s ParCSR matrix class.
Definition: hypre.hpp:343
A class to handle Block systems in a matrix-free implementation.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition: operator.hpp:365
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
Definition: device.hpp:360
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition: petsc.cpp:874
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
Definition: petsc.cpp:1917
HYPRE_BigInt * GetTrueDofOffsets() const
Definition: pfespace.hpp:282
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition: petsc.cpp:2745
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition: petsc.cpp:626
ID for class PetscParMatrix, MATIS format.
Definition: operator.hpp:289
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition: petsc.cpp:908
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
Definition: petsc.cpp:731
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:621
int width
Dimension of the input / number of columns in the matrix.
Definition: operator.hpp:28
void * private_ctx
Private context for solver.
Definition: petsc.hpp:691
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition: petsc.cpp:2752
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition: operator.cpp:287
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition: petsc.cpp:1873
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition: petsc.cpp:1993
double f(const Vector &p)