MFEM  v4.4.0
Finite element discretization library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Pages
petsc.cpp
Go to the documentation of this file.
1 // Copyright (c) 2010-2022, Lawrence Livermore National Security, LLC. Produced
2 // at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3 // LICENSE and NOTICE for details. LLNL-CODE-806117.
4 //
5 // This file is part of the MFEM library. For more information and source code
6 // availability visit https://mfem.org.
7 //
8 // MFEM is free software; you can redistribute it and/or modify it under the
9 // terms of the BSD-3 license. We welcome feedback and contributions, see file
10 // CONTRIBUTING.md for details.
11 
12 // Author: Stefano Zampini <stefano.zampini@gmail.com>
13 
14 #include "../config/config.hpp"
15 
16 #ifdef MFEM_USE_MPI
17 #ifdef MFEM_USE_PETSC
18 
19 #include "linalg.hpp"
20 #include "../fem/fem.hpp"
21 
22 #include "petsc.h"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
25 #endif
26 
27 // Backward compatibility
28 #if PETSC_VERSION_LT(3,11,0)
29 #define VecLockReadPush VecLockPush
30 #define VecLockReadPop VecLockPop
31 #endif
32 #if PETSC_VERSION_LT(3,12,0)
33 #define VecGetArrayWrite VecGetArray
34 #define VecRestoreArrayWrite VecRestoreArray
35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
37 #endif
38 
39 #include <fstream>
40 #include <iomanip>
41 #include <cmath>
42 #include <cstdlib>
43 
44 // Note: there are additional #include statements below.
45 
46 #include "petscinternals.hpp"
47 
48 // Callback functions: these functions will be called by PETSc
49 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
50 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
51 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
52  void*);
53 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
54 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
55  PetscReal,Mat,
56  Mat,void*);
57 static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
58  Mat,Mat,Mat,Mat);
59 static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
60 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
61 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
62 static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
63 static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
64 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
65  PetscBool*,PetscBool*,void*);
66 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
67 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
68 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
69 static PetscErrorCode __mfem_pc_shell_setup(PC);
70 static PetscErrorCode __mfem_pc_shell_destroy(PC);
71 static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
72 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
73 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
74 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
75 static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
76 static PetscErrorCode __mfem_array_container_destroy(void*);
77 static PetscErrorCode __mfem_matarray_container_destroy(void*);
78 static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
79 
80 // auxiliary functions
81 static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
82  PetscInt,IS*);
83 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
84  const mfem::Array<int>*,PetscInt,IS*);
85 static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
86 static PetscErrorCode MakeShellPCWithFactory(PC,
88 
89 // Equivalent functions are present in PETSc source code
90 // if PETSc has been compiled with hypre support
91 // We provide them here in case PETSC_HAVE_HYPRE is not defined
92 #if !defined(PETSC_HAVE_HYPRE)
93 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
94 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
95 #endif
96 
97 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
98 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
99 #ifndef _USE_DEVICE
100 #define _USE_DEVICE
101 #endif
102 #endif
103 #endif
104 
105 #if defined(PETSC_HAVE_DEVICE)
106 static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
107 #endif
108 static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
109 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject);
110 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,PetscInt,PetscInt,Mat*);
111 
112 // structs used by PETSc code
113 typedef struct
114 {
115  mfem::Solver *op;
117  bool ownsop;
118  unsigned long int numprec;
119 } __mfem_pc_shell_ctx;
120 
121 typedef struct
122 {
123  mfem::Operator *op; // The nonlinear operator
124  mfem::PetscBCHandler *bchandler; // Handling of essential bc
125  mfem::Vector *work; // Work vector
126  mfem::Operator::Type jacType; // OperatorType for the Jacobian
127  // Objective for line search
128  void (*objective)(mfem::Operator *op, const mfem::Vector&, double*);
129  // PostCheck function (to be called after successful line search)
130  void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
131  mfem::Vector&, bool&, bool&);
132  // General purpose update function (to be called at the beginning of
133  // each nonlinear step)
134  void (*update)(mfem::Operator *op, int,
135  const mfem::Vector&, const mfem::Vector&,
136  const mfem::Vector&, const mfem::Vector&);
137 } __mfem_snes_ctx;
138 
139 typedef struct
140 {
141  mfem::TimeDependentOperator *op; // The time-dependent operator
142  mfem::PetscBCHandler *bchandler; // Handling of essential bc
143  mfem::Vector *work; // Work vector
144  mfem::Vector *work2; // Work vector
145  mfem::Operator::Type jacType; // OperatorType for the Jacobian
146  enum mfem::PetscODESolver::Type type;
147  PetscReal cached_shift;
148  PetscObjectState cached_ijacstate;
149  PetscObjectState cached_rhsjacstate;
150  PetscObjectState cached_splits_xstate;
151  PetscObjectState cached_splits_xdotstate;
152 } __mfem_ts_ctx;
153 
154 typedef struct
155 {
156  mfem::PetscSolver *solver; // The solver object
157  mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
158 } __mfem_monitor_ctx;
159 
160 // use global scope ierr to check PETSc errors inside mfem calls
161 static PetscErrorCode ierr;
162 
163 using namespace std;
164 
165 namespace mfem
166 {
167 
169 {
170  MFEMInitializePetsc(NULL,NULL,NULL,NULL);
171 }
172 
173 void MFEMInitializePetsc(int *argc,char*** argv)
174 {
175  MFEMInitializePetsc(argc,argv,NULL,NULL);
176 }
177 
178 void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
179  const char help[])
180 {
182  {
183  // Tell PETSc to use the same CUDA device as MFEM:
184  ierr = PetscOptionsSetValue(NULL,"-cuda_device",
185  to_string(mfem::Device::GetId()).c_str());
186  MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
187  }
188  ierr = PetscInitialize(argc,argv,rc_file,help);
189  MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
190 }
191 
193 {
194  ierr = PetscFinalize();
195  MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
196 }
197 
198 const double* PetscMemory::GetHostPointer() const
199 {
200  int oflags = flags;
201  SetHostValid();
202  const double *v = mfem::Read(*this,Capacity(),false);
203  flags = oflags;
204  return v;
205 }
206 
207 const double* PetscMemory::GetDevicePointer() const
208 {
209  int oflags = flags;
210  SetDeviceValid();
211  const double *v = mfem::Read(*this,Capacity(),true);
212  flags = oflags;
213  return v;
214 }
215 
216 // PetscParVector methods
217 
218 void PetscParVector::SetDataAndSize_()
219 {
220  PetscScalar *array;
221  PetscInt n;
222 
223  MFEM_VERIFY(x,"Missing Vec");
224  ierr = VecSetUp(x); PCHKERRQ(x,ierr);
225  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
226  MFEM_VERIFY(n >= 0,"Invalid local size");
227  size = n;
228 #if defined(PETSC_HAVE_DEVICE)
229  PetscOffloadMask omask;
230  PetscBool iscuda;
231 
232  ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
233  if (omask != PETSC_OFFLOAD_BOTH)
234  {
235  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
236  }
237 #endif
238  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
239 #if defined(PETSC_HAVE_DEVICE)
240  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
241  ""); PCHKERRQ(x,ierr);
242  if (iscuda)
243  {
244  if (omask != PETSC_OFFLOAD_BOTH)
245  {
246  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
247  }
248  PetscScalar *darray;
249  ierr = VecCUDAGetArrayRead(x,(const PetscScalar**)&darray); PCHKERRQ(x,ierr);
250  pdata.Wrap(array,darray,size,MemoryType::HOST,false);
251  ierr = VecCUDARestoreArrayRead(x,(const PetscScalar**)&darray);
252  PCHKERRQ(x,ierr);
253  }
254  else
255 #endif
256  {
257  pdata.Wrap(array,size,MemoryType::HOST,false);
258  }
259  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
260 
261 #if defined(PETSC_HAVE_DEVICE)
262  ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
263 #endif
264  data.MakeAlias(pdata,0,size);
265  SetFlagsFromMask_();
266 }
267 
268 void PetscParVector::SetFlagsFromMask_() const
269 {
270  MFEM_VERIFY(x,"Missing Vec");
271 #if defined(_USE_DEVICE)
272  PetscOffloadMask mask;
273  PetscBool iscuda;
274  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
275  ""); PCHKERRQ(x,ierr);
276  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
277  if (iscuda)
278  {
279  switch (mask)
280  {
281  case PETSC_OFFLOAD_CPU:
282  pdata.SetHostValid();
283  pdata.SetDeviceInvalid();
284  break;
285  case PETSC_OFFLOAD_GPU:
286  pdata.SetHostInvalid();
287  pdata.SetDeviceValid();
288  break;
289  case PETSC_OFFLOAD_BOTH:
290  pdata.SetHostValid();
291  pdata.SetDeviceValid();
292  break;
293  default:
294  MFEM_ABORT("Unhandled case " << mask);
295  }
296  }
297 #endif
298  data.Sync(pdata);
299 }
300 
301 void PetscParVector::UpdateVecFromFlags()
302 {
303  MFEM_VERIFY(x,"Missing Vec");
304  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
305 #if defined(_USE_DEVICE)
306  PetscBool iscuda;
307  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
308  ""); PCHKERRQ(x,ierr);
309  if (iscuda)
310  {
311  bool dv = pdata.DeviceIsValid();
312  bool hv = pdata.HostIsValid();
313  PetscOffloadMask mask;
314  if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
315  else if (dv) { mask = PETSC_OFFLOAD_GPU; }
316  else { mask = PETSC_OFFLOAD_CPU; }
317  ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
318  }
319  else
320 #endif
321  {
322  /* Just make sure we have an up-to-date copy on the CPU for PETSc */
323  PetscScalar *v;
324  ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
325  pdata.CopyToHost(v,size);
326  ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
327  }
328 }
329 
330 void PetscParVector::SetVecType_()
331 {
332  VecType vectype;
333  MFEM_VERIFY(x,"Missing Vec");
334  ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
335 #if defined(_USE_DEVICE)
336  switch (Device::GetDeviceMemoryType())
337  {
338  case MemoryType::DEVICE:
339  case MemoryType::MANAGED:
340  ierr = VecSetType(x,VECCUDA); PCHKERRQ(x,ierr);
341  break;
342  default:
343  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
344  break;
345  }
346 #else
347  if (!vectype)
348  {
349  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
350  }
351 #endif
352 }
353 
354 const double* PetscParVector::Read(bool on_dev) const
355 {
356  const PetscScalar *dummy;
357  MFEM_VERIFY(x,"Missing Vec");
358 #if defined(PETSC_HAVE_DEVICE)
359  PetscBool iscuda;
360  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
361  ""); PCHKERRQ(x,ierr);
362  if (on_dev && iscuda)
363  {
364  ierr = VecCUDAGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
365  ierr = VecCUDARestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
366  }
367  else
368 #endif
369  {
370  ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
371  ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
372  }
373  SetFlagsFromMask_();
374  return mfem::Read(pdata, size, on_dev);
375 }
376 
377 const double* PetscParVector::HostRead() const
378 {
379  return Read(false);
380 }
381 
382 double* PetscParVector::Write(bool on_dev)
383 {
384  PetscScalar *dummy;
385  MFEM_VERIFY(x,"Missing Vec");
386 #if defined(PETSC_HAVE_DEVICE)
387  PetscBool iscuda;
388  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
389  ""); PCHKERRQ(x,ierr);
390  if (on_dev && iscuda)
391  {
392  ierr = VecCUDAGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
393  ierr = VecCUDARestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
394  }
395  else
396 #endif
397  {
398  ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
399  ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
400  }
401  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
402  SetFlagsFromMask_();
403  return mfem::Write(pdata, size, on_dev);
404 }
405 
407 {
408  return Write(false);
409 }
410 
411 double* PetscParVector::ReadWrite(bool on_dev)
412 {
413  PetscScalar *dummy;
414  MFEM_VERIFY(x,"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE)
416  PetscBool iscuda;
417  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
418  ""); PCHKERRQ(x,ierr);
419  if (on_dev && iscuda)
420  {
421  ierr = VecCUDAGetArray(x,&dummy); PCHKERRQ(x,ierr);
422  ierr = VecCUDARestoreArray(x,&dummy); PCHKERRQ(x,ierr);
423  }
424  else
425 #endif
426  {
427  ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
428  ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
429  }
430  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
431  SetFlagsFromMask_();
432  return mfem::ReadWrite(pdata, size, on_dev);
433 }
434 
436 {
437  return ReadWrite(false);
438 }
439 
440 void PetscParVector::UseDevice(bool dev) const
441 {
442  MFEM_VERIFY(x,"Missing Vec");
443 #if defined(PETSC_HAVE_DEVICE)
444  ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
445 #endif
446 }
447 
448 bool PetscParVector::UseDevice() const
449 {
450  PetscBool flg;
451  MFEM_VERIFY(x,"Missing Vec");
452  ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
453  return flg ? false : true;
454 }
455 
456 PetscInt PetscParVector::GlobalSize() const
457 {
458  PetscInt N;
459  ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
460  return N;
461 }
462 
463 PetscParVector::PetscParVector(MPI_Comm comm, const Vector &x_,
464  bool copy) : Vector()
465 {
466  int n = x_.Size();
467  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
468  ierr = VecSetSizes(x,n,PETSC_DECIDE); PCHKERRQ(x,ierr);
469  SetVecType_();
470  SetDataAndSize_();
471  if (copy)
472  {
473 
474  /* we use PETSc accessors to flag valid memory location to PETSc */
475  PetscErrorCode (*rest)(Vec,PetscScalar**);
476  PetscScalar *array;
477 #if defined(PETSC_HAVE_DEVICE)
478  PetscBool iscuda;
479  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
480  ""); PCHKERRQ(x,ierr);
481  if (iscuda && x_.UseDevice())
482  {
483  UseDevice(true);
484  ierr = VecCUDAGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
485  rest = VecCUDARestoreArrayWrite;
486  }
487  else
488 #endif
489  {
490  UseDevice(false);
491  ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
492  rest = VecRestoreArrayWrite;
493  }
494  pdata.CopyFrom(x_.GetMemory(), n);
495  ierr = (*rest)(x,&array); PCHKERRQ(x,ierr);
497  }
498 }
499 
500 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
501  PetscInt *col) : Vector()
502 {
503  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
504  if (col)
505  {
506  PetscMPIInt myid;
507  MPI_Comm_rank(comm, &myid);
508  ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
509  }
510  else
511  {
512  ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
513  }
514  SetVecType_();
515  SetDataAndSize_();
516 }
517 
519 {
520  MPI_Comm comm = PetscObjectComm((PetscObject)x);
521  ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
522  pdata.Delete();
523 }
524 
525 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
526  PetscScalar *data_, PetscInt *col) : Vector()
527 {
528  MFEM_VERIFY(col,"Missing distribution");
529  PetscMPIInt myid;
530  MPI_Comm_rank(comm, &myid);
531  ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
532  &x); CCHKERRQ(comm,ierr)
533  SetVecType_();
534  SetDataAndSize_();
535 }
536 
538 {
539  ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
540  SetDataAndSize_();
541 }
542 
543 PetscParVector::PetscParVector(MPI_Comm comm, const Operator &op,
544  bool transpose, bool allocate) : Vector()
545 {
546  PetscInt loc = transpose ? op.Height() : op.Width();
547 
548  ierr = VecCreate(comm,&x);
549  CCHKERRQ(comm,ierr);
550  ierr = VecSetSizes(x,loc,PETSC_DECIDE);
551  PCHKERRQ(x,ierr);
552 
553  SetVecType_();
554  if (allocate)
555  {
556  SetDataAndSize_();
557  }
558  else /* Vector intended to be used with Place/ResetMemory calls */
559  {
560  size = loc;
561  }
562 }
563 
565  bool transpose, bool allocate) : Vector()
566 {
567  Mat pA = const_cast<PetscParMatrix&>(A);
568  if (!transpose)
569  {
570  ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
571  }
572  else
573  {
574  ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
575  }
576  SetVecType_();
577  if (!allocate) /* Vector intended to be used with Place/ResetMemory calls */
578  {
579  PetscInt n;
580  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
581  size = n;
582  }
583  else
584  {
585  SetDataAndSize_();
586  }
587 }
588 
590 {
591  if (ref)
592  {
593  ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
594  }
595  x = y;
596  SetDataAndSize_();
597 }
598 
600 {
601  HYPRE_BigInt* offsets = pfes->GetTrueDofOffsets();
602  MPI_Comm comm = pfes->GetComm();
603  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
604 
605  PetscMPIInt myid = 0;
606  if (!HYPRE_AssumedPartitionCheck())
607  {
608  MPI_Comm_rank(comm,&myid);
609  }
610  ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
611  PCHKERRQ(x,ierr);
612  SetVecType_();
613  SetDataAndSize_();
614 }
615 
616 MPI_Comm PetscParVector::GetComm() const
617 {
618  return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
619 }
620 
622 {
623  VecScatter scctx;
624  Vec vout;
625  const PetscScalar *array;
626  PetscInt size;
627 
628  ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
629  ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
630  PCHKERRQ(x,ierr);
631  ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
632  PCHKERRQ(x,ierr);
633  ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
634  ierr = VecGetArrayRead(vout,&array); PCHKERRQ(x,ierr);
635  ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
636  Array<PetscScalar> data(size);
637  data.Assign(array);
638  ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(x,ierr);
639  ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
640  Vector *v = new Vector(data, internal::to_int(size));
641  v->MakeDataOwner();
642  data.LoseData();
643  return v;
644 }
645 
647 {
648  ierr = VecSet(x,d); PCHKERRQ(x,ierr);
650  return *this;
651 }
652 
654  const Array<PetscScalar>& vals)
655 {
656  MFEM_VERIFY(idx.Size() == vals.Size(),
657  "Size mismatch between indices and values");
658  PetscInt n = idx.Size();
659  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
660  PCHKERRQ(x,ierr);
661  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
662  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
664  return *this;
665 }
666 
668  const Array<PetscScalar>& vals)
669 {
670  MFEM_VERIFY(idx.Size() == vals.Size(),
671  "Size mismatch between indices and values");
672  PetscInt n = idx.Size();
673  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
674  PCHKERRQ(x,ierr);
675  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
676  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
678  return *this;
679 }
680 
682 {
683  ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
685  return *this;
686 }
687 
689 {
690  ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
692  return *this;
693 }
694 
696 {
697  ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
699  return *this;
700 }
701 
703 {
704  ierr = VecScale(x,s); PCHKERRQ(x,ierr);
706  return *this;
707 }
708 
710 {
711  ierr = VecShift(x,s); PCHKERRQ(x,ierr);
713  return *this;
714 }
715 
717 {
718  ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
719 }
720 
722 {
723  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
724 }
725 
727 {
728  PetscInt n;
729 
730  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
731  MFEM_VERIFY(n <= mem.Capacity(),
732  "Memory size " << mem.Capacity() << " < " << n << " vector size!");
733  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
734  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
735 #if defined(_USE_DEVICE)
736  PetscBool iscuda;
737  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
738  ""); PCHKERRQ(x,ierr);
739  if (iscuda)
740  {
741  bool usedev = mem.DeviceIsValid() || (!rw && mem.UseDevice());
742  pdata.MakeAliasForSync(mem,0,n,rw,true,usedev);
743  if (usedev)
744  {
745  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
746  ierr = VecCUDAPlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
747  }
748  else
749  {
750  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
751  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
752  }
753  }
754  else
755 #endif
756  {
757  double *w = rw ? mfem::HostReadWrite(mem,size) : mfem::HostWrite(mem,size);
758  pdata.MakeAliasForSync(mem,0,n,rw,true,false);
759 #if defined(PETSC_HAVE_DEVICE)
760  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
761 #endif
762  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
763  }
764  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
766 }
767 
769 {
770  PetscInt n;
771 
772  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
773  MFEM_VERIFY(n <= mem.Capacity(),
774  "Memory size " << mem.Capacity() << " < " << n << " vector size!");
775  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
776  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
777 #if defined(_USE_DEVICE)
778  PetscBool iscuda;
779  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
780  ""); PCHKERRQ(x,ierr);
781  if (iscuda)
782  {
783  pdata.MakeAliasForSync(mem,0,n,mem.DeviceIsValid());
784  if (mem.DeviceIsValid())
785  {
786  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
787  ierr = VecCUDAPlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
788  }
789  else
790  {
791  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
792  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
793  }
794  }
795  else
796 #endif
797  {
798  const double *w = mfem::HostRead(mem,size);
799  pdata.MakeAliasForSync(mem,0,n,false);
800 #if defined(PETSC_HAVE_DEVICE)
801  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
802 #endif
803  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
804  }
806  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
807  ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
808 }
809 
811 {
812  MFEM_VERIFY(pdata.IsAliasForSync(),"Vector data is not an alias");
813  MFEM_VERIFY(!pdata.Empty(),"Vector data is empty");
814  bool read = pdata.ReadRequested();
815  bool usedev = pdata.DeviceRequested();
816  bool write = pdata.WriteRequested();
817  /*
818  check for strange corner cases
819  - device memory used but somehow PETSc ended up putting up to date data on host
820  - host memory used but somehow PETSc ended up putting up to date data on device
821  */
822  if (write)
823  {
824  const PetscScalar *v;
825 #if defined(PETSC_HAVE_DEVICE)
826  PetscOffloadMask mask;
827  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
828  if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
829  (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
830 #endif
831  {
832  ierr = VecGetArrayRead(x,&v); PCHKERRQ(x,ierr);
833  pdata.CopyFromHost(v, size);
834  ierr = VecRestoreArrayRead(x,&v); PCHKERRQ(x,ierr);
835  }
836  }
838  data.Reset();
839  if (read && !write) { ierr = VecLockReadPop(x); PCHKERRQ(x,ierr); }
840  if (usedev)
841  {
842 #if defined(PETSC_HAVE_DEVICE)
843  ierr = VecCUDAResetArray(x); PCHKERRQ(x,ierr);
844 #else
845  MFEM_VERIFY(false,"This should not happen");
846 #endif
847  }
848  else
849  {
850  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
851  }
852 }
853 
855 {
856  PetscRandom rctx = NULL;
857 
858  if (seed)
859  {
860  ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
861  PCHKERRQ(x,ierr);
862  ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
863  ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
864  }
865  ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
866  ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
867 }
868 
869 void PetscParVector::Print(const char *fname, bool binary) const
870 {
871  if (fname)
872  {
873  PetscViewer view;
874 
875  if (binary)
876  {
877  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
878  FILE_MODE_WRITE,&view);
879  }
880  else
881  {
882  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
883  }
884  PCHKERRQ(x,ierr);
885  ierr = VecView(x,view); PCHKERRQ(x,ierr);
886  ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
887  }
888  else
889  {
890  ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
891  }
892 }
893 
894 // PetscParMatrix methods
895 
897 {
898  PetscInt N;
899  ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
900  return N;
901 }
902 
904 {
905  PetscInt N;
906  ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
907  return N;
908 }
909 
911 {
912  PetscInt N;
913  ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
914  return N;
915 }
916 
918 {
919  PetscInt N;
920  ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
921  return N;
922 }
923 
925 {
926  PetscInt N;
927  ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
928  return N;
929 }
930 
932 {
933  PetscInt N;
934  ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
935  return N;
936 }
937 
939 {
940  MatInfo info;
941  ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
942  return (PetscInt)info.nz_used;
943 }
944 
946 {
947  A = NULL;
948  X = Y = NULL;
949  height = width = 0;
950 }
951 
953 {
954  Init();
955 }
956 
958  const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
959 {
960  Init();
961 
962  Mat B = const_cast<PetscParMatrix&>(pB);
963 
964  IS isr,isc;
965  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
966  rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
967  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
968  cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
969  ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
970  ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
971  ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
972 
973  height = GetNumRows();
974  width = GetNumCols();
975 }
976 
978 {
979  Init();
980  height = pa->Height();
981  width = pa->Width();
982  ConvertOperator(pa->GetComm(),*pa,&A,tid);
983 }
984 
986 {
987  Init();
988  height = ha->Height();
989  width = ha->Width();
990  ConvertOperator(ha->GetComm(),*ha,&A,tid);
991 }
992 
994 {
995  Init();
996  height = sa->Height();
997  width = sa->Width();
998  ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
999 }
1000 
1001 PetscParMatrix::PetscParMatrix(MPI_Comm comm, const Operator *op,
1002  Operator::Type tid)
1003 {
1004  Init();
1005  height = op->Height();
1006  width = op->Width();
1007  ConvertOperator(comm,*op,&A,tid);
1008 }
1009 
1010 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt glob_size,
1011  PetscInt *row_starts, SparseMatrix *diag,
1012  Operator::Type tid)
1013 {
1014  Init();
1015  BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1016  tid==PETSC_MATAIJ,&A);
1017  SetUpForDevice();
1018  // update base class
1019  height = GetNumRows();
1020  width = GetNumCols();
1021 }
1022 
1023 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
1024  PetscInt global_num_cols, PetscInt *row_starts,
1025  PetscInt *col_starts, SparseMatrix *diag,
1026  Operator::Type tid)
1027 {
1028  Init();
1029  BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1030  tid==PETSC_MATAIJ,&A);
1031  SetUpForDevice();
1032  // update base class
1033  height = GetNumRows();
1034  width = GetNumCols();
1035 }
1036 
1038 {
1039  if (A)
1040  {
1041  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1042  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1043  if (X) { delete X; }
1044  if (Y) { delete Y; }
1045  X = Y = NULL;
1046  }
1047  height = B.Height();
1048  width = B.Width();
1049 #if defined(PETSC_HAVE_HYPRE)
1050  ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
1051 #else
1052  ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
1053 #endif
1054  SetUpForDevice();
1055  return *this;
1056 }
1057 
1059 {
1060  if (A)
1061  {
1062  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1063  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1064  if (X) { delete X; }
1065  if (Y) { delete Y; }
1066  X = Y = NULL;
1067  }
1068  height = B.Height();
1069  width = B.Width();
1070  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1071  return *this;
1072 }
1073 
1075 {
1076  if (!A)
1077  {
1078  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1079  }
1080  else
1081  {
1082  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1083  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1084  ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1085  }
1086  return *this;
1087 }
1088 
1090 {
1091  if (!A)
1092  {
1093  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1094  ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
1095  }
1096  else
1097  {
1098  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1099  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1100  ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1101  }
1102  return *this;
1103 }
1104 
1105 void PetscParMatrix::
1106 BlockDiagonalConstructor(MPI_Comm comm,
1107  PetscInt *row_starts, PetscInt *col_starts,
1108  SparseMatrix *diag, bool assembled, Mat* Ad)
1109 {
1110  Mat A;
1111  PetscInt lrsize,lcsize,rstart,cstart;
1112  PetscMPIInt myid = 0,commsize;
1113 
1114  ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
1115  if (!HYPRE_AssumedPartitionCheck())
1116  {
1117  ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
1118  }
1119  lrsize = row_starts[myid+1]-row_starts[myid];
1120  rstart = row_starts[myid];
1121  lcsize = col_starts[myid+1]-col_starts[myid];
1122  cstart = col_starts[myid];
1123 
1124  if (!assembled)
1125  {
1126  IS is;
1127  ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1128  ISLocalToGlobalMapping rl2g,cl2g;
1129  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1130  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1131  if (row_starts != col_starts)
1132  {
1133  ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
1134  CCHKERRQ(comm,ierr);
1135  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1136  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1137  }
1138  else
1139  {
1140  ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1141  cl2g = rl2g;
1142  }
1143 
1144  // Create the PETSc object (MATIS format)
1145  ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1146  ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1147  PCHKERRQ(A,ierr);
1148  ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1149  ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1150  ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1151  ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1152 
1153  // Copy SparseMatrix into PETSc SeqAIJ format
1154  // pass through host for now
1155  Mat lA;
1156  ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1157  const int *II = diag->HostReadI();
1158  const int *JJ = diag->HostReadJ();
1159 #if defined(PETSC_USE_64BIT_INDICES)
1160  PetscInt *pII,*pJJ;
1161  int m = diag->Height()+1, nnz = II[diag->Height()];
1162  ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1163  for (int i = 0; i < m; i++) { pII[i] = II[i]; }
1164  for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1165  ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1166  diag->HostReadData()); PCHKERRQ(lA,ierr);
1167  ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1168 #else
1169  ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1170  diag->HostReadData()); PCHKERRQ(lA,ierr);
1171 #endif
1172  }
1173  else
1174  {
1175  PetscScalar *da;
1176  PetscInt *dii,*djj,*oii,
1177  m = diag->Height()+1, nnz = diag->NumNonZeroElems();
1178 
1179  diag->SortColumnIndices();
1180  // if we can take ownership of the SparseMatrix arrays, we can avoid this
1181  // step
1182  ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1183  ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1184  ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1185  if (sizeof(PetscInt) == sizeof(int))
1186  {
1187  ierr = PetscMemcpy(dii,diag->HostReadI(),m*sizeof(PetscInt));
1188  CCHKERRQ(PETSC_COMM_SELF,ierr);
1189  ierr = PetscMemcpy(djj,diag->HostReadJ(),nnz*sizeof(PetscInt));
1190  CCHKERRQ(PETSC_COMM_SELF,ierr);
1191  }
1192  else
1193  {
1194  const int *iii = diag->HostReadI();
1195  const int *jjj = diag->HostReadJ();
1196  for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
1197  for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1198  }
1199  ierr = PetscMemcpy(da,diag->HostReadData(),nnz*sizeof(PetscScalar));
1200  CCHKERRQ(PETSC_COMM_SELF,ierr);
1201  ierr = PetscCalloc1(m,&oii);
1202  CCHKERRQ(PETSC_COMM_SELF,ierr);
1203  if (commsize > 1)
1204  {
1205  ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1206  PETSC_DECIDE,
1207  dii,djj,da,oii,NULL,NULL,&A);
1208  CCHKERRQ(comm,ierr);
1209  }
1210  else
1211  {
1212  ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1213  CCHKERRQ(comm,ierr);
1214  }
1215 
1216  void *ptrs[4] = {dii,djj,da,oii};
1217  const char *names[4] = {"_mfem_csr_dii",
1218  "_mfem_csr_djj",
1219  "_mfem_csr_da",
1220  "_mfem_csr_oii",
1221  };
1222  for (PetscInt i=0; i<4; i++)
1223  {
1224  PetscContainer c;
1225 
1226  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1227  ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1228  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1229  CCHKERRQ(comm,ierr);
1230  ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
1231  CCHKERRQ(comm,ierr);
1232  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1233  }
1234  }
1235 
1236  // Tell PETSc the matrix is ready to be used
1237  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1238  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1239 
1240  *Ad = A;
1241 }
1242 
1243 MPI_Comm PetscParMatrix::GetComm() const
1244 {
1245  return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
1246 }
1247 
1248 // TODO ADD THIS CONSTRUCTOR
1249 //PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
1250 // PetscInt glob_ncols, int *I, PetscInt *J,
1251 // double *data, PetscInt *rows, PetscInt *cols)
1252 //{
1253 //}
1254 
1255 // TODO This should take a reference on op but how?
1256 void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
1257 {
1258  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1259  ierr = MatSetSizes(*A,op->Height(),op->Width(),
1260  PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1261  ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1262  ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
1263  ierr = MatShellSetOperation(*A,MATOP_MULT,
1264  (void (*)())__mfem_mat_shell_apply);
1265  PCHKERRQ(A,ierr);
1266  ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1267  (void (*)())__mfem_mat_shell_apply_transpose);
1268  PCHKERRQ(A,ierr);
1269  ierr = MatShellSetOperation(*A,MATOP_COPY,
1270  (void (*)())__mfem_mat_shell_copy);
1271  PCHKERRQ(A,ierr);
1272  ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1273  (void (*)())__mfem_mat_shell_destroy);
1274 #if defined(_USE_DEVICE)
1276  if (mt == MemoryType::DEVICE || mt == MemoryType::MANAGED)
1277  {
1278  ierr = MatShellSetVecType(*A,VECCUDA); PCHKERRQ(A,ierr);
1279  ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1280  }
1281  else
1282  {
1283  ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1284  }
1285 #endif
1286  PCHKERRQ(A,ierr);
1287  ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1288 }
1289 
1290 void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
1291  Operator::Type tid)
1292 {
1293  PetscParMatrix *pA = const_cast<PetscParMatrix *>
1294  (dynamic_cast<const PetscParMatrix *>(&op));
1295  HypreParMatrix *pH = const_cast<HypreParMatrix *>
1296  (dynamic_cast<const HypreParMatrix *>(&op));
1297  BlockOperator *pB = const_cast<BlockOperator *>
1298  (dynamic_cast<const BlockOperator *>(&op));
1299  IdentityOperator *pI = const_cast<IdentityOperator *>
1300  (dynamic_cast<const IdentityOperator *>(&op));
1301  SparseMatrix *pS = const_cast<SparseMatrix *>
1302  (dynamic_cast<const SparseMatrix *>(&op));
1303 
1304  if (pA && tid == ANY_TYPE) // use same object and return
1305  {
1306  ierr = PetscObjectReference((PetscObject)(pA->A));
1307  CCHKERRQ(pA->GetComm(),ierr);
1308  *A = pA->A;
1309  return;
1310  }
1311 
1312  PetscBool avoidmatconvert = PETSC_FALSE;
1313  if (pA) // we test for these types since MatConvert will fail
1314  {
1315  ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
1316  MATSHELL,"");
1317  CCHKERRQ(comm,ierr);
1318  }
1319  if (pA && !avoidmatconvert)
1320  {
1321  Mat At = NULL;
1322  PetscBool istrans;
1323 #if PETSC_VERSION_LT(3,10,0)
1324  PetscBool ismatis;
1325 #endif
1326 
1327  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
1328  CCHKERRQ(pA->GetComm(),ierr);
1329  if (!istrans)
1330  {
1331  if (tid == pA->GetType()) // use same object and return
1332  {
1333  ierr = PetscObjectReference((PetscObject)(pA->A));
1334  CCHKERRQ(pA->GetComm(),ierr);
1335  *A = pA->A;
1336  return;
1337  }
1338 #if PETSC_VERSION_LT(3,10,0)
1339  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
1340  CCHKERRQ(pA->GetComm(),ierr);
1341 #endif
1342  }
1343  else
1344  {
1345  ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
1346 #if PETSC_VERSION_LT(3,10,0)
1347  ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
1348 #endif
1349  CCHKERRQ(pA->GetComm(),ierr);
1350  }
1351 
1352  // Try to convert
1353  if (tid == PETSC_MATAIJ)
1354  {
1355 #if PETSC_VERSION_LT(3,10,0)
1356  if (ismatis)
1357  {
1358  if (istrans)
1359  {
1360  Mat B;
1361 
1362  ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1363  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1364  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1365  }
1366  else
1367  {
1368  ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
1369  PCHKERRQ(pA->A,ierr);
1370  }
1371  }
1372  else
1373 #endif
1374  {
1375  PetscMPIInt size;
1376  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1377 
1378  // call MatConvert and see if a converter is available
1379  if (istrans)
1380  {
1381  Mat B;
1382  ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1383  PCHKERRQ(pA->A,ierr);
1384  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1385  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1386  }
1387  else
1388  {
1389  ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1390  PCHKERRQ(pA->A,ierr);
1391  }
1392  }
1393  }
1394  else if (tid == PETSC_MATIS)
1395  {
1396  if (istrans)
1397  {
1398  Mat B;
1399  ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1400  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1401  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1402  }
1403  else
1404  {
1405  ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1406  }
1407  }
1408  else if (tid == PETSC_MATHYPRE)
1409  {
1410 #if defined(PETSC_HAVE_HYPRE)
1411  if (istrans)
1412  {
1413  Mat B;
1414  ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1415  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1416  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1417  }
1418  else
1419  {
1420  ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1421  }
1422 #else
1423  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1424 #endif
1425  }
1426  else if (tid == PETSC_MATSHELL)
1427  {
1428  MakeWrapper(comm,&op,A);
1429  }
1430  else
1431  {
1432  MFEM_ABORT("Unsupported operator type conversion " << tid)
1433  }
1434  }
1435  else if (pH)
1436  {
1437  if (tid == PETSC_MATAIJ)
1438  {
1439 #if defined(PETSC_HAVE_HYPRE)
1440  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1441  PETSC_USE_POINTER,A);
1442 #else
1443  ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1444 #endif
1445  CCHKERRQ(pH->GetComm(),ierr);
1446  }
1447  else if (tid == PETSC_MATIS)
1448  {
1449 #if defined(PETSC_HAVE_HYPRE)
1450  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1451  PETSC_USE_POINTER,A);
1452 #else
1453  ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1454 #endif
1455  CCHKERRQ(pH->GetComm(),ierr);
1456  }
1457  else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1458  {
1459 #if defined(PETSC_HAVE_HYPRE)
1460  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1461  PETSC_USE_POINTER,A);
1462  CCHKERRQ(pH->GetComm(),ierr);
1463 #else
1464  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1465 #endif
1466  }
1467  else if (tid == PETSC_MATSHELL)
1468  {
1469  MakeWrapper(comm,&op,A);
1470  }
1471  else
1472  {
1473  MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1474  " is not implemented");
1475  }
1476  }
1477  else if (pB)
1478  {
1479  Mat *mats,*matsl2l = NULL;
1480  PetscInt i,j,nr,nc;
1481 
1482  nr = pB->NumRowBlocks();
1483  nc = pB->NumColBlocks();
1484  ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1485  if (tid == PETSC_MATIS)
1486  {
1487  ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1488  }
1489  for (i=0; i<nr; i++)
1490  {
1491  PetscBool needl2l = PETSC_TRUE;
1492 
1493  for (j=0; j<nc; j++)
1494  {
1495  if (!pB->IsZeroBlock(i,j))
1496  {
1497  ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1498  if (tid == PETSC_MATIS && needl2l)
1499  {
1500  PetscContainer c;
1501  ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1502  (PetscObject*)&c);
1503  PCHKERRQ(mats[i*nc+j],ierr);
1504  // special case for block operators: the local Vdofs should be
1505  // ordered as:
1506  // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1507  // with m fields, Ni the number of Vdofs for the i-th field
1508  if (c)
1509  {
1510  Array<Mat> *l2l = NULL;
1511  ierr = PetscContainerGetPointer(c,(void**)&l2l);
1512  PCHKERRQ(c,ierr);
1513  MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1514  << l2l->Size() << " for block row " << i );
1515  ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1516  PCHKERRQ(c,ierr);
1517  matsl2l[i] = (*l2l)[0];
1518  needl2l = PETSC_FALSE;
1519  }
1520  }
1521  }
1522  }
1523  }
1524  ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1525  if (tid == PETSC_MATIS)
1526  {
1527  ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1528 
1529  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1530  for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1531  ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1532 
1533  PetscContainer c;
1534  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1535  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1536  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1537  PCHKERRQ(c,ierr);
1538  ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1539  PCHKERRQ((*A),ierr);
1540  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1541  }
1542  for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1543  ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1544  }
1545  else if (pI && tid == PETSC_MATAIJ)
1546  {
1547  PetscInt rst;
1548 
1549  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1550  ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1551  PCHKERRQ(A,ierr);
1552  ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1553  ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1554  ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1555  ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1556  ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1557  for (PetscInt i = rst; i < rst+pI->Height(); i++)
1558  {
1559  ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1560  }
1561  ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1562  ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1563  }
1564  else if (pS)
1565  {
1566  if (tid == PETSC_MATSHELL)
1567  {
1568  MakeWrapper(comm,&op,A);
1569  }
1570  else
1571  {
1572  /* from SparseMatrix to SEQAIJ -> always pass through host for now */
1573  Mat B;
1574  PetscScalar *pdata;
1575  PetscInt *pii,*pjj,*oii;
1576  PetscMPIInt size;
1577 
1578  int m = pS->Height();
1579  int n = pS->Width();
1580  const int *ii = pS->HostReadI();
1581  const int *jj = pS->HostReadJ();
1582  const double *data = pS->HostReadData();
1583 
1584  ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1585  ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1586  ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1587  pii[0] = ii[0];
1588  for (int i = 0; i < m; i++)
1589  {
1590  bool issorted = true;
1591  pii[i+1] = ii[i+1];
1592  for (int j = ii[i]; j < ii[i+1]; j++)
1593  {
1594  pjj[j] = jj[j];
1595  if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1596  pdata[j] = data[j];
1597  }
1598  if (!issorted)
1599  {
1600  ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1601  CCHKERRQ(PETSC_COMM_SELF,ierr);
1602  }
1603  }
1604 
1605  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1606  if (size == 1)
1607  {
1608  ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1609  CCHKERRQ(comm,ierr);
1610  oii = NULL;
1611  }
1612  else // block diagonal constructor
1613  {
1614  ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1615  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1616  PETSC_DECIDE,
1617  pii,pjj,pdata,oii,NULL,NULL,&B);
1618  CCHKERRQ(comm,ierr);
1619  }
1620  void *ptrs[4] = {pii,pjj,pdata,oii};
1621  const char *names[4] = {"_mfem_csr_pii",
1622  "_mfem_csr_pjj",
1623  "_mfem_csr_pdata",
1624  "_mfem_csr_oii"
1625  };
1626  for (int i=0; i<4; i++)
1627  {
1628  PetscContainer c;
1629 
1630  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1631  ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1632  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1633  PCHKERRQ(B,ierr);
1634  ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1635  PCHKERRQ(B,ierr);
1636  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1637  }
1638  if (tid == PETSC_MATAIJ)
1639  {
1640  *A = B;
1641  }
1642  else if (tid == PETSC_MATHYPRE)
1643  {
1644  ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1645  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1646  }
1647  else if (tid == PETSC_MATIS)
1648  {
1649  ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1650  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1651  }
1652  else
1653  {
1654  MFEM_ABORT("Unsupported operator type conversion " << tid)
1655  }
1656  }
1657  }
1658  else // fallback to general operator
1659  {
1660  MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1661  "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1662  MakeWrapper(comm,&op,A);
1663  if (tid == PETSC_MATAIJ)
1664  {
1665  Mat B;
1666  PetscBool isaij;
1667 
1668  ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1669  ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1670  CCHKERRQ(comm,ierr);
1671  ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1672  if (!isaij)
1673  {
1674  ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1675  ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1676  }
1677  else
1678  {
1679  *A = B;
1680  }
1681  }
1682  }
1683  SetUpForDevice();
1684 }
1685 
1687 {
1688  if (A != NULL)
1689  {
1690  MPI_Comm comm = MPI_COMM_NULL;
1691  ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1692  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1693  }
1694  delete X;
1695  delete Y;
1696  X = Y = NULL;
1697 }
1698 
1700 {
1701  if (ref)
1702  {
1703  ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1704  }
1705  Init();
1706  A = a;
1707  height = GetNumRows();
1708  width = GetNumCols();
1709 }
1710 
1712 {
1713  if (A_ == A) { return; }
1714  Destroy();
1715  ierr = PetscObjectReference((PetscObject)A_); PCHKERRQ(A_,ierr);
1716  A = A_;
1717  height = GetNumRows();
1718  width = GetNumCols();
1719 }
1720 
1721 void PetscParMatrix::SetUpForDevice()
1722 {
1723 #if !defined(_USE_DEVICE)
1724  return;
1725 #else
1726  if (!A || !Device::Allows(Backend::CUDA_MASK)) { return; }
1727 
1728  PetscBool ismatis,isnest,isseqaij,ismpiaij;
1729  ierr = PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
1730  PCHKERRQ(A,ierr);
1731  ierr = PetscObjectTypeCompare((PetscObject)A,MATNEST,&isnest);
1732  PCHKERRQ(A,ierr);
1733  Mat tA = A;
1734  if (ismatis)
1735  {
1736  ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1737  ierr = PetscObjectTypeCompare((PetscObject)tA,MATNEST,&isnest);
1738  PCHKERRQ(tA,ierr);
1739  }
1740  if (isnest)
1741  {
1742  PetscInt n,m;
1743  Mat **sub;
1744  ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1745  bool dvec = false;
1746  for (PetscInt i = 0; i < n; i++)
1747  {
1748  for (PetscInt j = 0; j < m; j++)
1749  {
1750  if (sub[i][j])
1751  {
1752  bool expT = false;
1753  Mat sA = sub[i][j];
1754  ierr = PetscObjectTypeCompare((PetscObject)sA,MATSEQAIJ,&isseqaij);
1755  PCHKERRQ(sA,ierr);
1756  ierr = PetscObjectTypeCompare((PetscObject)sA,MATMPIAIJ,&ismpiaij);
1757  PCHKERRQ(sA,ierr);
1758  if (isseqaij)
1759  {
1760  ierr = MatSetType(sA,MATSEQAIJCUSPARSE); PCHKERRQ(sA,ierr);
1761  dvec = true;
1762  expT = true;
1763  }
1764  else if (ismpiaij)
1765  {
1766  ierr = MatSetType(sA,MATMPIAIJCUSPARSE); PCHKERRQ(sA,ierr);
1767  dvec = true;
1768  expT = true;
1769  }
1770  if (expT)
1771  {
1772  ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1773  PETSC_TRUE); PCHKERRQ(sA,ierr);
1774  }
1775  }
1776  }
1777  }
1778  if (dvec)
1779  {
1780  ierr = MatSetVecType(tA,VECCUDA); PCHKERRQ(tA,ierr);
1781  }
1782  }
1783  else
1784  {
1785  bool expT = false;
1786  ierr = PetscObjectTypeCompare((PetscObject)tA,MATSEQAIJ,&isseqaij);
1787  PCHKERRQ(tA,ierr);
1788  ierr = PetscObjectTypeCompare((PetscObject)tA,MATMPIAIJ,&ismpiaij);
1789  PCHKERRQ(tA,ierr);
1790  if (isseqaij)
1791  {
1792  ierr = MatSetType(tA,MATSEQAIJCUSPARSE); PCHKERRQ(tA,ierr);
1793  expT = true;
1794  }
1795  else if (ismpiaij)
1796  {
1797  ierr = MatSetType(tA,MATMPIAIJCUSPARSE); PCHKERRQ(tA,ierr);
1798  expT = true;
1799  }
1800  if (expT)
1801  {
1802  ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1803  PETSC_TRUE); PCHKERRQ(tA,ierr);
1804  }
1805  }
1806 #endif
1807 }
1808 
1809 // Computes y = alpha * A * x + beta * y
1810 // or y = alpha * A^T* x + beta * y
1811 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1812  bool transpose)
1813 {
1814  PetscErrorCode (*f)(Mat,Vec,Vec);
1815  PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1816  if (transpose)
1817  {
1818  f = MatMultTranspose;
1819  fadd = MatMultTransposeAdd;
1820  }
1821  else
1822  {
1823  f = MatMult;
1824  fadd = MatMultAdd;
1825  }
1826  if (a != 0.)
1827  {
1828  if (b != 0.)
1829  {
1830  ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1831  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1832  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1833  }
1834  else
1835  {
1836  ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1837  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1838  }
1839  }
1840  else
1841  {
1842  if (b == 1.)
1843  {
1844  // do nothing
1845  }
1846  else if (b != 0.)
1847  {
1848  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1849  }
1850  else
1851  {
1852  ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1853  }
1854  }
1855 }
1856 
1858 {
1859  ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1860  Destroy();
1861  Init();
1862  A = master.A;
1863  height = master.height;
1864  width = master.width;
1865 }
1866 
1868 {
1869  if (!X)
1870  {
1871  MFEM_VERIFY(A,"Mat not present");
1872  X = new PetscParVector(*this,false,false); PCHKERRQ(A,ierr);
1873  }
1874  return X;
1875 }
1876 
1878 {
1879  if (!Y)
1880  {
1881  MFEM_VERIFY(A,"Mat not present");
1882  Y = new PetscParVector(*this,true,false); PCHKERRQ(A,ierr);
1883  }
1884  return Y;
1885 }
1886 
1888 {
1889  Mat B;
1890  if (action)
1891  {
1892  ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1893  }
1894  else
1895  {
1896  ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1897  }
1898  return new PetscParMatrix(B,false);
1899 }
1900 
1902 {
1903  ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1904 }
1905 
1906 void PetscParMatrix::Mult(double a, const Vector &x, double b, Vector &y) const
1907 {
1908  MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1909  << ", expected size = " << Width());
1910  MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1911  << ", expected size = " << Height());
1912 
1913  PetscParVector *XX = GetX();
1914  PetscParVector *YY = GetY();
1915  bool rw = (b != 0.0);
1916  XX->PlaceMemory(x.GetMemory());
1917  YY->PlaceMemory(y.GetMemory(),rw);
1918  MatMultKernel(A,a,XX->x,b,YY->x,false);
1919  XX->ResetMemory();
1920  YY->ResetMemory();
1921 }
1922 
1923 void PetscParMatrix::MultTranspose(double a, const Vector &x, double b,
1924  Vector &y) const
1925 {
1926  MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
1927  << ", expected size = " << Height());
1928  MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
1929  << ", expected size = " << Width());
1930 
1931  PetscParVector *XX = GetX();
1932  PetscParVector *YY = GetY();
1933  bool rw = (b != 0.0);
1934  XX->PlaceMemory(y.GetMemory(),rw);
1935  YY->PlaceMemory(x.GetMemory());
1936  MatMultKernel(A,a,YY->x,b,XX->x,true);
1937  XX->ResetMemory();
1938  YY->ResetMemory();
1939 }
1940 
1941 void PetscParMatrix::Print(const char *fname, bool binary) const
1942 {
1943  if (fname)
1944  {
1945  PetscViewer view;
1946 
1947  if (binary)
1948  {
1949  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
1950  FILE_MODE_WRITE,&view);
1951  }
1952  else
1953  {
1954  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
1955  }
1956  PCHKERRQ(A,ierr);
1957  ierr = MatView(A,view); PCHKERRQ(A,ierr);
1958  ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1959  }
1960  else
1961  {
1962  ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1963  }
1964 }
1965 
1967 {
1968  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1969  << ", expected size = " << Height());
1970 
1971  PetscParVector *YY = GetY();
1972  YY->PlaceMemory(s.GetMemory());
1973  ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1974  YY->ResetMemory();
1975 }
1976 
1978 {
1979  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1980  << ", expected size = " << Width());
1981 
1982  PetscParVector *XX = GetX();
1983  XX->PlaceMemory(s.GetMemory());
1984  ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
1985  XX->ResetMemory();
1986 }
1987 
1989 {
1990  ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
1991 }
1992 
1994 {
1995  // for matrices with square diagonal blocks only
1996  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1997  << ", expected size = " << Height());
1998  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1999  << ", expected size = " << Width());
2000 
2001  PetscParVector *XX = GetX();
2002  XX->PlaceMemory(s.GetMemory());
2003  ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2004  XX->ResetMemory();
2005 }
2006 
2008  PetscParMatrix *P)
2009 {
2010  MFEM_VERIFY(A->Width() == P->Height(),
2011  "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
2012  " differs from number of local rows of P " << P->Height());
2013  MFEM_VERIFY(A->Height() == R->Width(),
2014  "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
2015  " differs from number of local cols of R " << R->Width());
2016  Mat B;
2017  ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2018  PCHKERRQ(*R,ierr);
2019  return new PetscParMatrix(B);
2020 }
2021 
2023 {
2024  Mat pA = *A,pP = *P,pRt = *Rt;
2025  Mat B;
2026  PetscBool Aismatis,Pismatis,Rtismatis;
2027 
2028  MFEM_VERIFY(A->Width() == P->Height(),
2029  "Petsc RAP: Number of local cols of A " << A->Width() <<
2030  " differs from number of local rows of P " << P->Height());
2031  MFEM_VERIFY(A->Height() == Rt->Height(),
2032  "Petsc RAP: Number of local rows of A " << A->Height() <<
2033  " differs from number of local rows of Rt " << Rt->Height());
2034  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
2035  PCHKERRQ(pA,ierr);
2036  ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
2037  PCHKERRQ(pA,ierr);
2038  ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
2039  PCHKERRQ(pA,ierr);
2040  if (Aismatis &&
2041  Pismatis &&
2042  Rtismatis) // handle special case (this code will eventually go into PETSc)
2043  {
2044  Mat lA,lP,lB,lRt;
2045  ISLocalToGlobalMapping cl2gP,cl2gRt;
2046  PetscInt rlsize,clsize,rsize,csize;
2047 
2048  ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2049  ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2050  ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2051  ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2052  ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2053  ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2054  ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
2055  ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2056  ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2057  ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2058  ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2059  ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2060  ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2061  if (lRt == lP)
2062  {
2063  ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2064  PCHKERRQ(lA,ierr);
2065  }
2066  else
2067  {
2068  Mat lR;
2069  ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2070  ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2071  PCHKERRQ(lRt,ierr);
2072  ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2073  }
2074 
2075  // attach lRt matrix to the subdomain local matrix
2076  // it may be used if markers on vdofs have to be mapped on
2077  // subdomain true dofs
2078  {
2079  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
2080  ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
2081  (*vmatsl2l)[0] = lRt;
2082 
2083  PetscContainer c;
2084  ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
2085  PCHKERRQ(B,ierr);
2086  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2087  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2088  PCHKERRQ(c,ierr);
2089  ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
2090  PCHKERRQ(B,ierr);
2091  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2092  }
2093 
2094  // Set local problem
2095  ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2096  ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2097  ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2098  ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2099  }
2100  else // it raises an error if the PtAP is not supported in PETSc
2101  {
2102  if (pP == pRt)
2103  {
2104  ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2105  PCHKERRQ(pA,ierr);
2106  }
2107  else
2108  {
2109  Mat pR;
2110  ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2111  ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2112  PCHKERRQ(pRt,ierr);
2113  ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2114  }
2115  }
2116  return new PetscParMatrix(B);
2117 }
2118 
2120 {
2121  PetscParMatrix *out = RAP(P,A,P);
2122  return out;
2123 }
2124 
2126 {
2127  PetscParMatrix *out,*A;
2128 #if defined(PETSC_HAVE_HYPRE)
2130 #else
2131  A = new PetscParMatrix(hA);
2132 #endif
2133  out = RAP(P,A,P);
2134  delete A;
2135  return out;
2136 }
2137 
2138 
2140 {
2141  Mat AB;
2142 
2143  ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2144  CCHKERRQ(A->GetComm(),ierr);
2145  return new PetscParMatrix(AB);
2146 }
2147 
2149 {
2150  Mat Ae;
2151 
2152  PetscParVector dummy(GetComm(),0);
2153  ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2154  EliminateRowsCols(rows_cols,dummy,dummy);
2155  ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2156  return new PetscParMatrix(Ae);
2157 }
2158 
2160  const HypreParVector &X,
2161  HypreParVector &B,
2162  double diag)
2163 {
2164  MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2165 }
2166 
2168  const PetscParVector &X,
2169  PetscParVector &B,
2170  double diag)
2171 {
2172  PetscInt M,N;
2173  ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2174  MFEM_VERIFY(M == N,"Rectangular case unsupported");
2175 
2176  // TODO: what if a diagonal term is not present?
2177  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2178 
2179  // rows need to be in global numbering
2180  PetscInt rst;
2181  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2182 
2183  IS dir;
2184  ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2185  if (!X.GlobalSize() && !B.GlobalSize())
2186  {
2187  ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2188  }
2189  else
2190  {
2191  ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2192  }
2193  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2194 }
2195 
2197 {
2198  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2199 
2200  // rows need to be in global numbering
2201  PetscInt rst;
2202  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2203 
2204  IS dir;
2205  ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2206  ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2207  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2208 }
2209 
2211 {
2212 
2213  Mat B = A;
2214  if (dereference)
2215  {
2216  MPI_Comm comm = GetComm();
2217  ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
2218  }
2219  A = NULL;
2220  return B;
2221 }
2222 
2224 {
2225  PetscBool ok;
2226  MFEM_VERIFY(A, "no associated PETSc Mat object");
2227  PetscObject oA = (PetscObject)(this->A);
2228  // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
2229  ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2230  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2231  ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2232  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2233  ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2234  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2235  ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2236  if (ok == PETSC_TRUE) { return PETSC_MATIS; }
2237  ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2238  if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
2239  ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2240  if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
2241 #if defined(PETSC_HAVE_HYPRE)
2242  ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2243  if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
2244 #endif
2245  return PETSC_MATGENERIC;
2246 }
2247 
2249  const Array<int> &ess_dof_list,
2250  const Vector &X, Vector &B)
2251 {
2252  const PetscScalar *array;
2253  Mat pA = const_cast<PetscParMatrix&>(A);
2254 
2255  // B -= Ae*X
2256  Ae.Mult(-1.0, X, 1.0, B);
2257 
2258  Vec diag = const_cast<PetscParVector&>((*A.GetX()));
2259  ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2260  ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2261  for (int i = 0; i < ess_dof_list.Size(); i++)
2262  {
2263  int r = ess_dof_list[i];
2264  B(r) = array[r] * X(r);
2265  }
2266  ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2267 }
2268 
2269 // PetscSolver methods
2270 
2271 PetscSolver::PetscSolver() : clcustom(false)
2272 {
2273  obj = NULL;
2274  B = X = NULL;
2275  cid = -1;
2276  operatorset = false;
2277  bchandler = NULL;
2278  private_ctx = NULL;
2279 }
2280 
2282 {
2283  delete B;
2284  delete X;
2286 }
2287 
2288 void PetscSolver::SetTol(double tol)
2289 {
2290  SetRelTol(tol);
2291 }
2292 
2293 void PetscSolver::SetRelTol(double tol)
2294 {
2295  if (cid == KSP_CLASSID)
2296  {
2297  KSP ksp = (KSP)obj;
2298  ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2299  }
2300  else if (cid == SNES_CLASSID)
2301  {
2302  SNES snes = (SNES)obj;
2303  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2304  PETSC_DEFAULT);
2305  }
2306  else if (cid == TS_CLASSID)
2307  {
2308  TS ts = (TS)obj;
2309  ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2310  }
2311  else
2312  {
2313  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2314  }
2315  PCHKERRQ(obj,ierr);
2316 }
2317 
2318 void PetscSolver::SetAbsTol(double tol)
2319 {
2320  if (cid == KSP_CLASSID)
2321  {
2322  KSP ksp = (KSP)obj;
2323  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2324  }
2325  else if (cid == SNES_CLASSID)
2326  {
2327  SNES snes = (SNES)obj;
2328  ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2329  PETSC_DEFAULT);
2330  }
2331  else if (cid == TS_CLASSID)
2332  {
2333  TS ts = (TS)obj;
2334  ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2335  }
2336  else
2337  {
2338  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2339  }
2340  PCHKERRQ(obj,ierr);
2341 }
2342 
2343 void PetscSolver::SetMaxIter(int max_iter)
2344 {
2345  if (cid == KSP_CLASSID)
2346  {
2347  KSP ksp = (KSP)obj;
2348  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2349  max_iter);
2350  }
2351  else if (cid == SNES_CLASSID)
2352  {
2353  SNES snes = (SNES)obj;
2354  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2355  max_iter,PETSC_DEFAULT);
2356  }
2357  else if (cid == TS_CLASSID)
2358  {
2359  TS ts = (TS)obj;
2360  ierr = TSSetMaxSteps(ts,max_iter);
2361  }
2362  else
2363  {
2364  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2365  }
2366  PCHKERRQ(obj,ierr);
2367 }
2368 
2369 
2371 {
2372  typedef PetscErrorCode (*myPetscFunc)(void**);
2373  PetscViewerAndFormat *vf = NULL;
2374  PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
2375 
2376  if (plev > 0)
2377  {
2378  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2379  PCHKERRQ(obj,ierr);
2380  }
2381  if (cid == KSP_CLASSID)
2382  {
2383  // there are many other options, see the function KSPSetFromOptions() in
2384  // src/ksp/ksp/interface/itcl.c
2385  typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
2386  KSP ksp = (KSP)obj;
2387  if (plev >= 0)
2388  {
2389  ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2390  }
2391  if (plev == 1)
2392  {
2393 #if PETSC_VERSION_LT(3,15,0)
2394  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2395 #else
2396  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2397 #endif
2398  (myPetscFunc)PetscViewerAndFormatDestroy);
2399  PCHKERRQ(ksp,ierr);
2400  }
2401  else if (plev > 1)
2402  {
2403  ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2404  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2405  (myPetscFunc)PetscViewerAndFormatDestroy);
2406  PCHKERRQ(ksp,ierr);
2407  if (plev > 2)
2408  {
2409  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2410  PCHKERRQ(viewer,ierr);
2411 #if PETSC_VERSION_LT(3,15,0)
2412  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2413 #else
2414  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2415 #endif
2416  (myPetscFunc)PetscViewerAndFormatDestroy);
2417  PCHKERRQ(ksp,ierr);
2418  }
2419  }
2420  }
2421  else if (cid == SNES_CLASSID)
2422  {
2423  typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
2424  SNES snes = (SNES)obj;
2425  if (plev >= 0)
2426  {
2427  ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2428  }
2429  if (plev > 0)
2430  {
2431  ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2432  (myPetscFunc)PetscViewerAndFormatDestroy);
2433  PCHKERRQ(snes,ierr);
2434  }
2435  }
2436  else if (cid == TS_CLASSID)
2437  {
2438  TS ts = (TS)obj;
2439  if (plev >= 0)
2440  {
2441  ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2442  }
2443  }
2444  else
2445  {
2446  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2447  }
2448 }
2449 
2450 MPI_Comm PetscSolver::GetComm() const
2451 {
2452  return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
2453 }
2454 
2456 {
2457  __mfem_monitor_ctx *monctx;
2458  ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2459  monctx->solver = this;
2460  monctx->monitor = ctx;
2461  if (cid == KSP_CLASSID)
2462  {
2463  ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
2464  __mfem_monitor_ctx_destroy);
2465  PCHKERRQ(obj,ierr);
2466  }
2467  else if (cid == SNES_CLASSID)
2468  {
2469  ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
2470  __mfem_monitor_ctx_destroy);
2471  PCHKERRQ(obj,ierr);
2472  }
2473  else if (cid == TS_CLASSID)
2474  {
2475  ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
2476  __mfem_monitor_ctx_destroy);
2477  PCHKERRQ(obj,ierr);
2478  }
2479  else
2480  {
2481  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2482  }
2483 }
2484 
2486 {
2487  bchandler = bch;
2488  if (cid == SNES_CLASSID)
2489  {
2490  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
2491  snes_ctx->bchandler = bchandler;
2492  }
2493  else if (cid == TS_CLASSID)
2494  {
2495  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
2496  ts_ctx->bchandler = bchandler;
2497  }
2498  else
2499  {
2500  MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2501  }
2502 }
2503 
2505 {
2506  PC pc = NULL;
2507  if (cid == TS_CLASSID)
2508  {
2509  SNES snes;
2510  KSP ksp;
2511 
2512  ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
2513  ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2514  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2515  }
2516  else if (cid == SNES_CLASSID)
2517  {
2518  KSP ksp;
2519 
2520  ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
2521  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2522  }
2523  else if (cid == KSP_CLASSID)
2524  {
2525  ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2526  }
2527  else if (cid == PC_CLASSID)
2528  {
2529  pc = (PC)obj;
2530  }
2531  else
2532  {
2533  MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2534  }
2535  if (factory)
2536  {
2537  ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2538  }
2539  else
2540  {
2541  ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2542  }
2543 }
2544 
2545 void PetscSolver::Customize(bool customize) const
2546 {
2547  if (!customize) { clcustom = true; }
2548  if (!clcustom)
2549  {
2550  if (cid == PC_CLASSID)
2551  {
2552  PC pc = (PC)obj;
2553  ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2554  }
2555  else if (cid == KSP_CLASSID)
2556  {
2557  KSP ksp = (KSP)obj;
2558  ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2559  }
2560  else if (cid == SNES_CLASSID)
2561  {
2562  SNES snes = (SNES)obj;
2563  ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2564  }
2565  else if (cid == TS_CLASSID)
2566  {
2567  TS ts = (TS)obj;
2568  ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2569  }
2570  else
2571  {
2572  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2573  }
2574  }
2575  clcustom = true;
2576 }
2577 
2579 {
2580  if (cid == KSP_CLASSID)
2581  {
2582  KSP ksp = (KSP)obj;
2583  KSPConvergedReason reason;
2584  ierr = KSPGetConvergedReason(ksp,&reason);
2585  PCHKERRQ(ksp,ierr);
2586  return reason > 0 ? 1 : 0;
2587  }
2588  else if (cid == SNES_CLASSID)
2589  {
2590  SNES snes = (SNES)obj;
2591  SNESConvergedReason reason;
2592  ierr = SNESGetConvergedReason(snes,&reason);
2593  PCHKERRQ(snes,ierr);
2594  return reason > 0 ? 1 : 0;
2595  }
2596  else if (cid == TS_CLASSID)
2597  {
2598  TS ts = (TS)obj;
2599  TSConvergedReason reason;
2600  ierr = TSGetConvergedReason(ts,&reason);
2601  PCHKERRQ(ts,ierr);
2602  return reason > 0 ? 1 : 0;
2603  }
2604  else
2605  {
2606  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2607  return -1;
2608  }
2609 }
2610 
2612 {
2613  if (cid == KSP_CLASSID)
2614  {
2615  KSP ksp = (KSP)obj;
2616  PetscInt its;
2617  ierr = KSPGetIterationNumber(ksp,&its);
2618  PCHKERRQ(ksp,ierr);
2619  return its;
2620  }
2621  else if (cid == SNES_CLASSID)
2622  {
2623  SNES snes = (SNES)obj;
2624  PetscInt its;
2625  ierr = SNESGetIterationNumber(snes,&its);
2626  PCHKERRQ(snes,ierr);
2627  return its;
2628  }
2629  else if (cid == TS_CLASSID)
2630  {
2631  TS ts = (TS)obj;
2632  PetscInt its;
2633  ierr = TSGetStepNumber(ts,&its);
2634  PCHKERRQ(ts,ierr);
2635  return its;
2636  }
2637  else
2638  {
2639  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2640  return -1;
2641  }
2642 }
2643 
2645 {
2646  if (cid == KSP_CLASSID)
2647  {
2648  KSP ksp = (KSP)obj;
2649  PetscReal norm;
2650  ierr = KSPGetResidualNorm(ksp,&norm);
2651  PCHKERRQ(ksp,ierr);
2652  return norm;
2653  }
2654  if (cid == SNES_CLASSID)
2655  {
2656  SNES snes = (SNES)obj;
2657  PetscReal norm;
2658  ierr = SNESGetFunctionNorm(snes,&norm);
2659  PCHKERRQ(snes,ierr);
2660  return norm;
2661  }
2662  else
2663  {
2664  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2665  return PETSC_MAX_REAL;
2666  }
2667 }
2668 
2670 {
2672  if (cid == SNES_CLASSID)
2673  {
2674  __mfem_snes_ctx *snes_ctx;
2675  ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2676  snes_ctx->op = NULL;
2677  snes_ctx->bchandler = NULL;
2678  snes_ctx->work = NULL;
2679  snes_ctx->jacType = Operator::PETSC_MATAIJ;
2680  private_ctx = (void*) snes_ctx;
2681  }
2682  else if (cid == TS_CLASSID)
2683  {
2684  __mfem_ts_ctx *ts_ctx;
2685  ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2686  ts_ctx->op = NULL;
2687  ts_ctx->bchandler = NULL;
2688  ts_ctx->work = NULL;
2689  ts_ctx->work2 = NULL;
2690  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2691  ts_ctx->cached_ijacstate = -1;
2692  ts_ctx->cached_rhsjacstate = -1;
2693  ts_ctx->cached_splits_xstate = -1;
2694  ts_ctx->cached_splits_xdotstate = -1;
2695  ts_ctx->type = PetscODESolver::ODE_SOLVER_GENERAL;
2696  ts_ctx->jacType = Operator::PETSC_MATAIJ;
2697  private_ctx = (void*) ts_ctx;
2698  }
2699 }
2700 
2702 {
2703  if (!private_ctx) { return; }
2704  // free private context's owned objects
2705  if (cid == SNES_CLASSID)
2706  {
2707  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2708  delete snes_ctx->work;
2709  }
2710  else if (cid == TS_CLASSID)
2711  {
2712  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2713  delete ts_ctx->work;
2714  delete ts_ctx->work2;
2715  }
2716  ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2717 }
2718 
2719 // PetscBCHandler methods
2720 
2722  enum PetscBCHandler::Type type_)
2723  : bctype(type_), setup(false), eval_t(0.0),
2724  eval_t_cached(std::numeric_limits<double>::min())
2725 {
2726  SetTDofs(ess_tdof_list);
2727 }
2728 
2730 {
2731  ess_tdof_list.SetSize(list.Size());
2732  ess_tdof_list.Assign(list);
2733  setup = false;
2734 }
2735 
2737 {
2738  if (setup) { return; }
2739  if (bctype == CONSTANT)
2740  {
2741  eval_g.SetSize(n);
2742  this->Eval(eval_t,eval_g);
2743  eval_t_cached = eval_t;
2744  }
2745  else if (bctype == TIME_DEPENDENT)
2746  {
2747  eval_g.SetSize(n);
2748  }
2749  setup = true;
2750 }
2751 
2753 {
2754  (*this).SetUp(x.Size());
2755  y = x;
2756  if (bctype == ZERO)
2757  {
2758  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2759  {
2760  y[ess_tdof_list[i]] = 0.0;
2761  }
2762  }
2763  else
2764  {
2765  if (bctype != CONSTANT && eval_t != eval_t_cached)
2766  {
2767  Eval(eval_t,eval_g);
2768  eval_t_cached = eval_t;
2769  }
2770  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2771  {
2772  y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2773  }
2774  }
2775 }
2776 
2778 {
2779  (*this).SetUp(x.Size());
2780  if (bctype == ZERO)
2781  {
2782  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2783  {
2784  x[ess_tdof_list[i]] = 0.0;
2785  }
2786  }
2787  else
2788  {
2789  if (bctype != CONSTANT && eval_t != eval_t_cached)
2790  {
2791  Eval(eval_t,eval_g);
2792  eval_t_cached = eval_t;
2793  }
2794  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2795  {
2796  x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2797  }
2798  }
2799 }
2800 
2802 {
2803  (*this).SetUp(x.Size());
2804  if (bctype == ZERO)
2805  {
2806  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2807  {
2808  y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2809  }
2810  }
2811  else
2812  {
2813  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2814  {
2815  y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2816  }
2817  }
2818 }
2819 
2821 {
2822  (*this).SetUp(x.Size());
2823  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2824  {
2825  x[ess_tdof_list[i]] = 0.0;
2826  }
2827 }
2828 
2830 {
2831  (*this).SetUp(x.Size());
2832  y = x;
2833  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2834  {
2835  y[ess_tdof_list[i]] = 0.0;
2836  }
2837 }
2838 
2839 // PetscLinearSolver methods
2840 
2841 PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2842  bool wrapin)
2843  : PetscSolver(), Solver(), wrap(wrapin)
2844 {
2845  KSP ksp;
2846  ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2847  obj = (PetscObject)ksp;
2848  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2849  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2850 }
2851 
2853  const std::string &prefix)
2854  : PetscSolver(), Solver(), wrap(false)
2855 {
2856  KSP ksp;
2857  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2858  obj = (PetscObject)ksp;
2859  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2860  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2861  SetOperator(A);
2862 }
2863 
2865  const std::string &prefix)
2866  : PetscSolver(), Solver(), wrap(wrapin)
2867 {
2868  KSP ksp;
2869  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2870  obj = (PetscObject)ksp;
2871  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2872  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2873  SetOperator(A);
2874 }
2875 
2877 {
2878  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2879  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2880  (dynamic_cast<const PetscParMatrix *>(&op));
2881  const Operator *oA = dynamic_cast<const Operator *>(&op);
2882 
2883  // update base classes: Operator, Solver, PetscLinearSolver
2884  bool delete_pA = false;
2885  if (!pA)
2886  {
2887  if (hA)
2888  {
2889  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2890  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2891  delete_pA = true;
2892  }
2893  else if (oA) // fallback to general operator
2894  {
2895  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2896  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2897  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2898  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2899  delete_pA = true;
2900  }
2901  }
2902  MFEM_VERIFY(pA, "Unsupported operation!");
2903 
2904  // Set operators into PETSc KSP
2905  KSP ksp = (KSP)obj;
2906  Mat A = pA->A;
2907  if (operatorset)
2908  {
2909  Mat C;
2910  PetscInt nheight,nwidth,oheight,owidth;
2911 
2912  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2913  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2914  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2915  if (nheight != oheight || nwidth != owidth)
2916  {
2917  // reinit without destroying the KSP
2918  // communicator remains the same
2919  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2920  delete X;
2921  delete B;
2922  X = B = NULL;
2923  }
2924  }
2925  ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2926 
2927  // Update PetscSolver
2928  operatorset = true;
2929 
2930  // Update the Operator fields.
2931  height = pA->Height();
2932  width = pA->Width();
2933 
2934  if (delete_pA) { delete pA; }
2935 }
2936 
2938 {
2939  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2940  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2941  (dynamic_cast<const PetscParMatrix *>(&op));
2942  const Operator *oA = dynamic_cast<const Operator *>(&op);
2943 
2944  PetscParMatrix *ppA = const_cast<PetscParMatrix *>
2945  (dynamic_cast<const PetscParMatrix *>(&pop));
2946  const Operator *poA = dynamic_cast<const Operator *>(&pop);
2947 
2948  // Convert Operator for linear system
2949  bool delete_pA = false;
2950  if (!pA)
2951  {
2952  if (hA)
2953  {
2954  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2955  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2956  delete_pA = true;
2957  }
2958  else if (oA) // fallback to general operator
2959  {
2960  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2961  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2962  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2963  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2964  delete_pA = true;
2965  }
2966  }
2967  MFEM_VERIFY(pA, "Unsupported operation!");
2968 
2969  // Convert Operator to be preconditioned
2970  bool delete_ppA = false;
2971  if (!ppA)
2972  {
2973  if (oA == poA && !wrap) // Same operator, already converted
2974  {
2975  ppA = pA;
2976  }
2977  else
2978  {
2979  ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
2980  delete_ppA = true;
2981  }
2982  }
2983  MFEM_VERIFY(ppA, "Unsupported operation!");
2984 
2985  // Set operators into PETSc KSP
2986  KSP ksp = (KSP)obj;
2987  Mat A = pA->A;
2988  Mat P = ppA->A;
2989  if (operatorset)
2990  {
2991  Mat C;
2992  PetscInt nheight,nwidth,oheight,owidth;
2993 
2994  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2995  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2996  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2997  if (nheight != oheight || nwidth != owidth)
2998  {
2999  // reinit without destroying the KSP
3000  // communicator remains the same
3001  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3002  delete X;
3003  delete B;
3004  X = B = NULL;
3005  wrap = false;
3006  }
3007  }
3008  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3009 
3010  // Update PetscSolver
3011  operatorset = true;
3012 
3013  // Update the Operator fields.
3014  height = pA->Height();
3015  width = pA->Width();
3016 
3017  if (delete_pA) { delete pA; }
3018  if (delete_ppA) { delete ppA; }
3019 }
3020 
3022 {
3023  KSP ksp = (KSP)obj;
3024 
3025  // Preserve Amat if already set
3026  Mat A = NULL;
3027  PetscBool amat;
3028  ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3029  if (amat)
3030  {
3031  ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3032  ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
3033  }
3034  PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
3035  if (ppc)
3036  {
3037  ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3038  }
3039  else
3040  {
3041  // wrap the Solver action
3042  // Solver is assumed to be already setup
3043  // ownership of precond is not transferred,
3044  // consistently with other MFEM's linear solvers
3045  PC pc;
3046  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3047  ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
3048  }
3049  if (A)
3050  {
3051  Mat P;
3052 
3053  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3054  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
3055  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3056  ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3057  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3058  }
3059 }
3060 
3061 void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
3062 {
3063  KSP ksp = (KSP)obj;
3064 
3065  if (!B || !X)
3066  {
3067  Mat pA = NULL;
3068  ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
3069  if (!B)
3070  {
3071  PetscParMatrix A = PetscParMatrix(pA, true);
3072  B = new PetscParVector(A, true, false);
3073  }
3074  if (!X)
3075  {
3076  PetscParMatrix A = PetscParMatrix(pA, true);
3077  X = new PetscParVector(A, false, false);
3078  }
3079  }
3080  B->PlaceMemory(b.GetMemory());
3081  X->PlaceMemory(x.GetMemory(),iterative_mode);
3082 
3083  Customize();
3084 
3085  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
3086  PCHKERRQ(ksp, ierr);
3087 
3088  // Solve the system.
3089  if (trans)
3090  {
3091  ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3092  }
3093  else
3094  {
3095  ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3096  }
3097  B->ResetMemory();
3098  X->ResetMemory();
3099 }
3100 
3101 void PetscLinearSolver::Mult(const Vector &b, Vector &x) const
3102 {
3103  (*this).MultKernel(b,x,false);
3104 }
3105 
3107 {
3108  (*this).MultKernel(b,x,true);
3109 }
3110 
3112 {
3113  MPI_Comm comm;
3114  KSP ksp = (KSP)obj;
3115  ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3116  ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3117 }
3118 
3119 // PetscPCGSolver methods
3120 
3121 PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix)
3122  : PetscLinearSolver(comm,prefix)
3123 {
3124  KSP ksp = (KSP)obj;
3125  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3126  // this is to obtain a textbook PCG
3127  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3128 }
3129 
3130 PetscPCGSolver::PetscPCGSolver(PetscParMatrix& A, const std::string &prefix)
3131  : PetscLinearSolver(A,prefix)
3132 {
3133  KSP ksp = (KSP)obj;
3134  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3135  // this is to obtain a textbook PCG
3136  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3137 }
3138 
3140  const std::string &prefix)
3141  : PetscLinearSolver(A,wrap,prefix)
3142 {
3143  KSP ksp = (KSP)obj;
3144  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3145  // this is to obtain a textbook PCG
3146  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3147 }
3148 
3149 // PetscPreconditioner methods
3150 
3152  const std::string &prefix)
3153  : PetscSolver(), Solver()
3154 {
3155  PC pc;
3156  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3157  obj = (PetscObject)pc;
3158  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3159  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3160 }
3161 
3163  const string &prefix)
3164  : PetscSolver(), Solver()
3165 {
3166  PC pc;
3167  ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
3168  obj = (PetscObject)pc;
3169  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3170  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3171  SetOperator(A);
3172 }
3173 
3175  const string &prefix)
3176  : PetscSolver(), Solver()
3177 {
3178  PC pc;
3179  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3180  obj = (PetscObject)pc;
3181  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3182  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3183  SetOperator(op);
3184 }
3185 
3187 {
3188  bool delete_pA = false;
3189  PetscParMatrix *pA = const_cast<PetscParMatrix *>
3190  (dynamic_cast<const PetscParMatrix *>(&op));
3191 
3192  if (!pA)
3193  {
3194  const Operator *cop = dynamic_cast<const Operator *>(&op);
3195  pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
3196  delete_pA = true;
3197  }
3198 
3199  // Set operators into PETSc PC
3200  PC pc = (PC)obj;
3201  Mat A = pA->A;
3202  if (operatorset)
3203  {
3204  Mat C;
3205  PetscInt nheight,nwidth,oheight,owidth;
3206 
3207  ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3208  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3209  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3210  if (nheight != oheight || nwidth != owidth)
3211  {
3212  // reinit without destroying the PC
3213  // communicator remains the same
3214  ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3215  delete X;
3216  delete B;
3217  X = B = NULL;
3218  }
3219  }
3220  ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
3221 
3222  // Update PetscSolver
3223  operatorset = true;
3224 
3225  // Update the Operator fields.
3226  height = pA->Height();
3227  width = pA->Width();
3228 
3229  if (delete_pA) { delete pA; };
3230 }
3231 
3232 void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
3233  bool trans) const
3234 {
3235  MFEM_VERIFY(!iterative_mode,
3236  "Iterative mode not supported for PetscPreconditioner");
3237  PC pc = (PC)obj;
3238 
3239  if (!B || !X)
3240  {
3241  Mat pA = NULL;
3242  ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
3243  if (!B)
3244  {
3245  PetscParMatrix A(pA, true);
3246  B = new PetscParVector(A, true, false);
3247  }
3248  if (!X)
3249  {
3250  PetscParMatrix A(pA, true);
3251  X = new PetscParVector(A, false, false);
3252  }
3253  }
3254  B->PlaceMemory(b.GetMemory());
3255  X->PlaceMemory(x.GetMemory());
3256 
3257  Customize();
3258 
3259  // Apply the preconditioner.
3260  if (trans)
3261  {
3262  ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3263  }
3264  else
3265  {
3266  ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3267  }
3268  B->ResetMemory();
3269  X->ResetMemory();
3270 }
3271 
3272 void PetscPreconditioner::Mult(const Vector &b, Vector &x) const
3273 {
3274  (*this).MultKernel(b,x,false);
3275 }
3276 
3278 {
3279  (*this).MultKernel(b,x,true);
3280 }
3281 
3283 {
3284  MPI_Comm comm;
3285  PC pc = (PC)obj;
3286  ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3287  ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3288 }
3289 
3290 // PetscBDDCSolver methods
3291 
3292 // Coordinates sampling function
3293 static void func_coords(const Vector &x, Vector &y)
3294 {
3295  y = x;
3296 }
3297 
3298 void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
3299 {
3300  MPI_Comm comm = PetscObjectComm(obj);
3301 
3302  // get PETSc object
3303  PC pc = (PC)obj;
3304  Mat pA;
3305  ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3306 
3307  // matrix type should be of type MATIS
3308  PetscBool ismatis;
3309  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
3310  PCHKERRQ(pA,ierr);
3311  MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
3312 
3313  // Check options
3314  ParFiniteElementSpace *fespace = opts.fespace;
3315  if (opts.netflux && !fespace)
3316  {
3317  MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3318  }
3319 
3320  // Attach default near-null space to local matrices
3321  {
3322  MatNullSpace nnsp;
3323  Mat lA;
3324  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3325  ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
3326  &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3327  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3328  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3329  }
3330 
3331  // set PETSc PC type to PCBDDC
3332  ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
3333 
3334  PetscInt rst,nl;
3335  ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3336  ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3337 
3338  // index sets for fields splitting and coordinates for nodal spaces
3339  IS *fields = NULL;
3340  PetscInt nf = 0;
3341  PetscInt sdim = 0;
3342  PetscReal *coords = NULL;
3343  if (fespace)
3344  {
3345  int vdim = fespace->GetVDim();
3346 
3347  // Ideally, the block size should be set at matrix creation
3348  // but the MFEM assembly does not allow to do so
3349  if (fespace->GetOrdering() == Ordering::byVDIM)
3350  {
3351  Mat lA;
3352  ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3353  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3354  ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3355  }
3356 
3357  // fields
3358  if (vdim > 1)
3359  {
3360  PetscInt st = rst, bs, inc, nlf;
3361  nf = vdim;
3362  nlf = nl/nf;
3363  ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3364  if (fespace->GetOrdering() == Ordering::byVDIM)
3365  {
3366  inc = 1;
3367  bs = vdim;
3368  }
3369  else
3370  {
3371  inc = nlf;
3372  bs = 1;
3373  }
3374  for (PetscInt i = 0; i < nf; i++)
3375  {
3376  ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3377  st += inc;
3378  }
3379  }
3380 
3381  // coordinates
3382  const FiniteElementCollection *fec = fespace->FEColl();
3383  bool h1space = dynamic_cast<const H1_FECollection*>(fec);
3384  if (h1space)
3385  {
3386  ParFiniteElementSpace *fespace_coords = fespace;
3387 
3388  sdim = fespace->GetParMesh()->SpaceDimension();
3389  if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
3390  {
3391  fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3393  }
3394  VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3395  ParGridFunction gf_coords(fespace_coords);
3396  gf_coords.ProjectCoefficient(coeff_coords);
3397  HypreParVector *hvec_coords = gf_coords.ParallelProject();
3398  PetscScalar *data_coords = (PetscScalar*)mfem::Read(hvec_coords->GetMemory(),
3399  hvec_coords->Size(),false);
3400 
3401  // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
3402  // and to the global matrix
3403  if (vdim == sdim)
3404  {
3405  MatNullSpace nnsp;
3406  Mat lA;
3407  Vec pvec_coords,lvec_coords;
3408  ISLocalToGlobalMapping l2g;
3409  PetscSF sf;
3410  PetscLayout rmap;
3411  const PetscInt *gidxs;
3412  PetscInt nleaves;
3413 
3414  ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3415  hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3416  CCHKERRQ(comm,ierr);
3417  ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3418  if (!nnsp)
3419  {
3420  ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3421  CCHKERRQ(comm,ierr);
3422  ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3423  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3424  }
3425  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3426  ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3427  ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3428  ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3429  ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3430  ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3431  ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3432  ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3433  ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3434  CCHKERRQ(comm,ierr);
3435  ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3436  {
3437  const PetscScalar *garray;
3438  PetscScalar *larray;
3439 
3440  ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3441  ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3442 #if PETSC_VERSION_LT(3,15,0)
3443  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3444  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3445 #else
3446  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3447  CCHKERRQ(comm,ierr);
3448  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3449  CCHKERRQ(comm,ierr);
3450 #endif
3451  ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3452  ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3453  }
3454  ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3455  ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3456  CCHKERRQ(PETSC_COMM_SELF,ierr);
3457  ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3458  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3459  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3460  ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3461  }
3462 
3463  // each single dof has associated a tuple of coordinates
3464  ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3465  if (nf > 0)
3466  {
3467  for (PetscInt i = 0; i < nf; i++)
3468  {
3469  const PetscInt *idxs;
3470  PetscInt nn;
3471 
3472  // It also handles the case of fespace not ordered by VDIM
3473  ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3474  ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3475  for (PetscInt j = 0; j < nn; j++)
3476  {
3477  PetscInt idx = idxs[j]-rst;
3478  for (PetscInt d = 0; d < sdim; d++)
3479  {
3480  coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3481  }
3482  }
3483  ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3484  }
3485  }
3486  else
3487  {
3488  for (PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3489  }
3490  if (fespace_coords != fespace)
3491  {
3492  delete fespace_coords;
3493  }
3494  delete hvec_coords;
3495  }
3496  }
3497 
3498  // index sets for boundary dofs specification (Essential = dir, Natural = neu)
3499  IS dir = NULL, neu = NULL;
3500 
3501  // Extract l2l matrices
3502  Array<Mat> *l2l = NULL;
3503  if (opts.ess_dof_local || opts.nat_dof_local)
3504  {
3505  PetscContainer c;
3506 
3507  ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
3508  MFEM_VERIFY(c,"Local-to-local PETSc container not present");
3509  ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
3510  }
3511 
3512  // check information about index sets (essential dofs, fields, etc.)
3513 #ifdef MFEM_DEBUG
3514  {
3515  // make sure ess/nat_dof have been collectively set
3516  PetscBool lpr = PETSC_FALSE,pr;
3517  if (opts.ess_dof) { lpr = PETSC_TRUE; }
3518  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3519  PCHKERRQ(pA,ierr);
3520  MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
3521  lpr = PETSC_FALSE;
3522  if (opts.nat_dof) { lpr = PETSC_TRUE; }
3523  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3524  PCHKERRQ(pA,ierr);
3525  MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
3526  // make sure fields have been collectively set
3527  PetscInt ms[2],Ms[2];
3528  ms[0] = -nf; ms[1] = nf;
3529  ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3530  PCHKERRQ(pA,ierr);
3531  MFEM_VERIFY(-Ms[0] == Ms[1],
3532  "number of fields should be the same across processes");
3533  }
3534 #endif
3535 
3536  // boundary sets
3537  if (opts.ess_dof)
3538  {
3539  PetscInt st = opts.ess_dof_local ? 0 : rst;
3540  if (!opts.ess_dof_local)
3541  {
3542  // need to compute the boundary dofs in global ordering
3543  ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3544  CCHKERRQ(comm,ierr);
3545  ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3546  }
3547  else
3548  {
3549  // need to compute a list for the marked boundary dofs in local ordering
3550  ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3551  CCHKERRQ(comm,ierr);
3552  ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3553  }
3554  }
3555  if (opts.nat_dof)
3556  {
3557  PetscInt st = opts.nat_dof_local ? 0 : rst;
3558  if (!opts.nat_dof_local)
3559  {
3560  // need to compute the boundary dofs in global ordering
3561  ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3562  CCHKERRQ(comm,ierr);
3563  ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3564  }
3565  else
3566  {
3567  // need to compute a list for the marked boundary dofs in local ordering
3568  ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3569  CCHKERRQ(comm,ierr);
3570  ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3571  }
3572  }
3573 
3574  // field splitting
3575  if (fields)
3576  {
3577  ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3578  }
3579  for (PetscInt i = 0; i < nf; i++)
3580  {
3581  ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3582  }
3583  ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3584 
3585  // coordinates
3586  if (coords)
3587  {
3588  ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3589  }
3590  ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3591 
3592  // code for block size is disabled since we cannot change the matrix
3593  // block size after it has been setup
3594  // int bs = 1;
3595 
3596  // Customize using the finite element space (if any)
3597  if (fespace)
3598  {
3599  const FiniteElementCollection *fec = fespace->FEColl();
3600  bool edgespace, rtspace, h1space;
3601  bool needint = opts.netflux;
3602  bool tracespace, rt_tracespace, edge_tracespace;
3603  int vdim, dim, p;
3604  PetscBool B_is_Trans = PETSC_FALSE;
3605 
3606  ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3607  dim = pmesh->Dimension();
3608  vdim = fespace->GetVDim();
3609  h1space = dynamic_cast<const H1_FECollection*>(fec);
3610  rtspace = dynamic_cast<const RT_FECollection*>(fec);
3611  edgespace = dynamic_cast<const ND_FECollection*>(fec);
3612  edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3613  rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3614  tracespace = edge_tracespace || rt_tracespace;
3615 
3616  p = 1;
3617  if (fespace->GetNE() > 0)
3618  {
3619  if (!tracespace)
3620  {
3621  p = fespace->GetElementOrder(0);
3622  }
3623  else
3624  {
3625  p = fespace->GetFaceOrder(0);
3626  if (dim == 2) { p++; }
3627  }
3628  }
3629 
3630  if (edgespace) // H(curl)
3631  {
3632  if (dim == 2)
3633  {
3634  needint = true;
3635  if (tracespace)
3636  {
3637  MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3638  " not using auxiliary quadrature");
3639  needint = false;
3640  }
3641  }
3642  else
3643  {
3644  FiniteElementCollection *vfec;
3645  if (tracespace)
3646  {
3647  vfec = new H1_Trace_FECollection(p,dim);
3648  }
3649  else
3650  {
3651  vfec = new H1_FECollection(p,dim);
3652  }
3653  ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3654  ParDiscreteLinearOperator *grad;
3655  grad = new ParDiscreteLinearOperator(vfespace,fespace);
3656  if (tracespace)
3657  {
3658  grad->AddTraceFaceInterpolator(new GradientInterpolator);
3659  }
3660  else
3661  {
3662  grad->AddDomainInterpolator(new GradientInterpolator);
3663  }
3664  grad->Assemble();
3665  grad->Finalize();
3666  HypreParMatrix *hG = grad->ParallelAssemble();
3667  PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3668  delete hG;
3669  delete grad;
3670 
3671  PetscBool conforming = PETSC_TRUE;
3672  if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3673  ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3674  PCHKERRQ(pc,ierr);
3675  delete vfec;
3676  delete vfespace;
3677  delete G;
3678  }
3679  }
3680  else if (rtspace) // H(div)
3681  {
3682  needint = true;
3683  if (tracespace)
3684  {
3685  MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3686  " auxiliary quadrature");
3687  needint = false;
3688  }
3689  }
3690  else if (h1space) // H(grad), only for the vector case
3691  {
3692  if (vdim != dim) { needint = false; }
3693  }
3694 
3695  PetscParMatrix *B = NULL;
3696  if (needint)
3697  {
3698  // Generate bilinear form in unassembled format which is used to
3699  // compute the net-flux across subdomain boundaries for H(div) and
3700  // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3701  FiniteElementCollection *auxcoll;
3702  if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3703  else
3704  {
3705  if (h1space)
3706  {
3707  auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3708  }
3709  else
3710  {
3711  auxcoll = new L2_FECollection(p,dim);
3712  }
3713  }
3714  ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3715  ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3716 
3717  if (edgespace)
3718  {
3719  if (tracespace)
3720  {
3721  b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3722  }
3723  else
3724  {
3725  b->AddDomainIntegrator(new VectorFECurlIntegrator);
3726  }
3727  }
3728  else if (rtspace)
3729  {
3730  if (tracespace)
3731  {
3732  b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3733  }
3734  else
3735  {
3736  b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3737  }
3738  }
3739  else
3740  {
3741  b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3742  }
3743  b->Assemble();
3744  b->Finalize();
3745  OperatorHandle Bh(Operator::PETSC_MATIS);
3746  b->ParallelAssemble(Bh);
3747  Bh.Get(B);
3748  Bh.SetOperatorOwner(false);
3749 
3750  if (dir) // if essential dofs are present, we need to zero the columns
3751  {
3752  Mat pB = *B;
3753  ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3754  if (!opts.ess_dof_local)
3755  {
3756  ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3757  }
3758  else
3759  {
3760  ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3761  }
3762  B_is_Trans = PETSC_TRUE;
3763  }
3764  delete b;
3765  delete pspace;
3766  delete auxcoll;
3767  }
3768 
3769  if (B)
3770  {
3771  ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3772  }
3773  delete B;
3774  }
3775  ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3776  ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3777 }
3778 
3780  const PetscBDDCSolverParams &opts,
3781  const std::string &prefix)
3782  : PetscPreconditioner(A,prefix)
3783 {
3784  BDDCSolverConstructor(opts);
3785  Customize();
3786 }
3787 
3789  const PetscBDDCSolverParams &opts,
3790  const std::string &prefix)
3791  : PetscPreconditioner(comm,op,prefix)
3792 {
3793  BDDCSolverConstructor(opts);
3794  Customize();
3795 }
3796 
3798  const string &prefix)
3799  : PetscPreconditioner(comm,op,prefix)
3800 {
3801  PC pc = (PC)obj;
3802  ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3803 
3804  Mat pA;
3805  ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3806 
3807  // Check if pA is of type MATNEST
3808  PetscBool isnest;
3809  ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3810 
3811  PetscInt nr = 0;
3812  IS *isrow = NULL;
3813  if (isnest) // we know the fields
3814  {
3815  ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3816  ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3817  ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3818  }
3819 
3820  // We need to customize here, before setting the index sets.
3821  // This is because PCFieldSplitSetType customizes the function
3822  // pointers. SubSolver options will be processed during PCApply
3823  Customize();
3824 
3825  for (PetscInt i=0; i<nr; i++)
3826  {
3827  ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3828  }
3829  ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3830 }
3831 
3833  ParFiniteElementSpace *fes,
3834  const std::string &prefix)
3835  : PetscPreconditioner(fes->GetParMesh()->GetComm(),prefix)
3836 {
3838  MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
3839  MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);
3840  SetOperator(A);
3841  H2SolverConstructor(fes);
3842  Customize();
3843 }
3844 
3845 void PetscH2Solver::H2SolverConstructor(ParFiniteElementSpace *fes)
3846 {
3847 #if defined(PETSC_HAVE_H2OPUS)
3848  int sdim = fes->GetParMesh()->SpaceDimension();
3849  int vdim = fes->GetVDim();
3850  const FiniteElementCollection *fec = fes->FEColl();
3851  ParFiniteElementSpace *fes_coords = NULL;
3852 
3853  if (vdim != sdim || fes->GetOrdering() != Ordering::byVDIM)
3854  {
3855  fes_coords = new ParFiniteElementSpace(fes->GetParMesh(),fec,sdim,
3857  fes = fes_coords;
3858  }
3859  VectorFunctionCoefficient ccoords(sdim, func_coords);
3860 
3861  ParGridFunction coords(fes);
3862  coords.ProjectCoefficient(ccoords);
3863  Vector c(fes->GetTrueVSize());
3864  coords.ParallelProject(c);
3865  delete fes_coords;
3866  PCSetType(*this,PCH2OPUS);
3867  PCSetCoordinates(*this,sdim,c.Size()/sdim,(PetscReal*)mfem::Read(c.GetMemory(),
3868  c.Size(),false));
3869  PCSetFromOptions(*this);
3870 #else
3871  MFEM_ABORT("Need PETSc configured with --download-h2opus");
3872 #endif
3873 }
3874 
3875 // PetscNonlinearSolver methods
3876 
3878  const std::string &prefix)
3879  : PetscSolver(), Solver()
3880 {
3881  // Create the actual solver object
3882  SNES snes;
3883  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3884  obj = (PetscObject)snes;
3885  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3886  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3887 
3888  // Allocate private solver context
3890 }
3891 
3893  const std::string &prefix)
3894  : PetscSolver(), Solver()
3895 {
3896  // Create the actual solver object
3897  SNES snes;
3898  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3899  obj = (PetscObject)snes;
3900  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3901  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3902 
3903  // Allocate private solver context
3905 
3906  SetOperator(op);
3907 }
3908 
3910 {
3911  MPI_Comm comm;
3912  SNES snes = (SNES)obj;
3913  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
3914  ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3915 }
3916 
3918 {
3919  SNES snes = (SNES)obj;
3920 
3921  if (operatorset)
3922  {
3923  PetscBool ls,gs;
3924  void *fctx,*jctx;
3925 
3926  ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3927  PCHKERRQ(snes, ierr);
3928  ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3929  PCHKERRQ(snes, ierr);
3930 
3931  ls = (PetscBool)(height == op.Height() && width == op.Width() &&
3932  (void*)&op == fctx &&
3933  (void*)&op == jctx);
3934  ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3935  PetscObjectComm((PetscObject)snes));
3936  PCHKERRQ(snes,ierr);
3937  if (!gs)
3938  {
3939  ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3940  delete X;
3941  delete B;
3942  X = B = NULL;
3943  }
3944  }
3945  else
3946  {
3947  /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
3948  yet set. We default to backtracking */
3949  SNESLineSearch ls;
3950  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3951  ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3952  }
3953 
3954  // If we do not pass matrices in, the default matrix type for DMShell is MATDENSE
3955  // in 3.15, which may cause issues.
3956  Mat dummy;
3957  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)snes),op.Height(),
3958  op.Height(),&dummy);
3959 
3960  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3961  snes_ctx->op = (Operator*)&op;
3962  ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
3963  PCHKERRQ(snes, ierr);
3964  ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
3965  (void *)snes_ctx);
3966  PCHKERRQ(snes, ierr);
3967 
3968  ierr = MatDestroy(&dummy);
3969  PCHKERRQ(snes, ierr);
3970 
3971  // Update PetscSolver
3972  operatorset = true;
3973 
3974  // Update the Operator fields.
3975  height = op.Height();
3976  width = op.Width();
3977 }
3978 
3980 {
3981  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3982  snes_ctx->jacType = jacType;
3983 }
3984 
3986  double*))
3987 {
3988  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3989  snes_ctx->objective = objfn;
3990 
3991  SNES snes = (SNES)obj;
3992  ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
3993  PCHKERRQ(snes, ierr);
3994 }
3995 
3997  Vector&, Vector&,
3998  bool&, bool&))
3999 {
4000  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4001  snes_ctx->postcheck = post;
4002 
4003  SNES snes = (SNES)obj;
4004  SNESLineSearch ls;
4005  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4006  ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
4007  PCHKERRQ(ls, ierr);
4008 }
4009 
4010 void PetscNonlinearSolver::SetUpdate(void (*update)(Operator *,int,
4011  const Vector&,
4012  const Vector&,
4013  const Vector&,
4014  const Vector&))
4015 {
4016  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4017  snes_ctx->update = update;
4018 
4019  SNES snes = (SNES)obj;
4020  ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4021 }
4022 
4023 void PetscNonlinearSolver::Mult(const Vector &b, Vector &x) const
4024 {
4025  SNES snes = (SNES)obj;
4026 
4027  bool b_nonempty = b.Size();
4028  if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
4029  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
4030  X->PlaceMemory(x.GetMemory(),iterative_mode);
4031  if (b_nonempty) { B->PlaceMemory(b.GetMemory()); }
4032  else { *B = 0.0; }
4033 
4034  Customize();
4035 
4036  if (!iterative_mode) { *X = 0.; }
4037 
4038  // Solve the system.
4039  ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4040  X->ResetMemory();
4041  if (b_nonempty) { B->ResetMemory(); }
4042 }
4043 
4044 // PetscODESolver methods
4045 
4046 PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
4047  : PetscSolver(), ODESolver()
4048 {
4049  // Create the actual solver object
4050  TS ts;
4051  ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4052  obj = (PetscObject)ts;
4053  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
4054  ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4055 
4056  // Allocate private solver context
4058 
4059  // Default options, to comply with the current interface to ODESolver.
4060  ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4061  PCHKERRQ(ts,ierr);
4062  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4063  PCHKERRQ(ts,ierr);
4064  TSAdapt tsad;
4065  ierr = TSGetAdapt(ts,&tsad);
4066  PCHKERRQ(ts,ierr);
4067  ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4068  PCHKERRQ(ts,ierr);
4069 }
4070 
4072 {
4073  MPI_Comm comm;
4074  TS ts = (TS)obj;
4075  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
4076  ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4077 }
4078 
4080  enum PetscODESolver::Type type)
4081 {
4082  TS ts = (TS)obj;
4083 
4084  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4085  if (operatorset)
4086  {
4087  ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4088  delete X;
4089  X = NULL;
4090  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4091  ts_ctx->cached_ijacstate = -1;
4092  ts_ctx->cached_rhsjacstate = -1;
4093  ts_ctx->cached_splits_xstate = -1;
4094  ts_ctx->cached_splits_xdotstate = -1;
4095  }
4096  f = &f_;
4097 
4098  // Set functions in TS
4099  ts_ctx->op = &f_;
4100  if (f_.isImplicit())
4101  {
4102  Mat dummy;
4103  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4104  f_.Height(),&dummy);
4105  PCHKERRQ(ts, ierr);
4106  ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
4107  PCHKERRQ(ts, ierr);
4108  ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (void *)ts_ctx);
4109  PCHKERRQ(ts, ierr);
4110  ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4111  PCHKERRQ(ts, ierr);
4112  ierr = MatDestroy(&dummy);
4113  PCHKERRQ(ts, ierr);
4114  }
4115  if (!f_.isHomogeneous())
4116  {
4117  Mat dummy = NULL;
4118  if (!f_.isImplicit())
4119  {
4120  ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4121  PCHKERRQ(ts, ierr);
4122  }
4123  else
4124  {
4125  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4126  f_.Height(),&dummy);
4127  PCHKERRQ(ts, ierr);
4128  }
4129  ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
4130  PCHKERRQ(ts, ierr);
4131  ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4132  (void *)ts_ctx);
4133  PCHKERRQ(ts, ierr);
4134  ierr = MatDestroy(&dummy);
4135  PCHKERRQ(ts, ierr);
4136  }
4137  operatorset = true;
4138 
4139  SetType(type);
4140 
4141  // Set solution vector
4142  PetscParVector X(PetscObjectComm(obj),*f,false,true);
4143  ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4144 
4145  // Compose special purpose function for PDE-constrained optimization
4146  PetscBool use = PETSC_TRUE;
4147  ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
4148  if (use && f_.isImplicit())
4149  {
4150  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4151  __mfem_ts_computesplits);
4152  PCHKERRQ(ts,ierr);
4153  }
4154  else
4155  {
4156  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4157  NULL);
4158  PCHKERRQ(ts,ierr);
4159  }
4160 }
4161 
4163 {
4164  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4165  ts_ctx->jacType = jacType;
4166 }
4167 
4169 {
4170  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4171  return ts_ctx->type;
4172 }
4173 
4175 {
4176  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4177 
4178  TS ts = (TS)obj;
4179  ts_ctx->type = type;
4180  if (type == ODE_SOLVER_LINEAR)
4181  {
4182  ierr = TSSetProblemType(ts, TS_LINEAR);
4183  PCHKERRQ(ts, ierr);
4184  }
4185  else
4186  {
4187  ierr = TSSetProblemType(ts, TS_NONLINEAR);
4188  PCHKERRQ(ts, ierr);
4189  }
4190 }
4191 
4192 void PetscODESolver::Step(Vector &x, double &t, double &dt)
4193 {
4194  // Pass the parameters to PETSc.
4195  TS ts = (TS)obj;
4196  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4197  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4198 
4199  PetscInt i;
4200  ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4201 
4202  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4203  X->PlaceMemory(x.GetMemory(),true);
4204 
4205  Customize();
4206 
4207  // Monitor initial step
4208  if (!i)
4209  {
4210  ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4211  }
4212 
4213  // Take the step.
4214  ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4215  ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4216 
4217  // Get back current time and the time step used to caller.
4218  // We cannot use TSGetTimeStep() as it returns the next candidate step
4219  PetscReal pt;
4220  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4221  dt = pt - (PetscReal)t;
4222  t = pt;
4223 
4224  // Monitor current step
4225  ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4226 
4227  X->ResetMemory();
4228 }
4229 
4230 void PetscODESolver::Run(Vector &x, double &t, double &dt, double t_final)
4231 {
4232  // Give the parameters to PETSc.
4233  TS ts = (TS)obj;
4234  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4235  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4236  ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4237  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4238  PCHKERRQ(ts, ierr);
4239 
4240  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4241  X->PlaceMemory(x.GetMemory(),true);
4242 
4243  Customize();
4244 
4245  // Reset Jacobian caching since the user may have changed
4246  // the parameters of the solver
4247  // We don't do this in the Step method because two consecutive
4248  // Step() calls are done with the same operator
4249  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4250  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4251  ts_ctx->cached_ijacstate = -1;
4252  ts_ctx->cached_rhsjacstate = -1;
4253  ts_ctx->cached_splits_xstate = -1;
4254  ts_ctx->cached_splits_xdotstate = -1;
4255 
4256  // Take the steps.
4257  ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4258  X->ResetMemory();
4259 
4260  // Get back final time and time step to caller.
4261  PetscReal pt;
4262  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4263  t = pt;
4264  ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4265  dt = pt;
4266 }
4267 
4268 } // namespace mfem
4269 
4270 #include "petsc/private/petscimpl.h"
4271 #include "petsc/private/matimpl.h"
4272 
4273 // auxiliary functions
4274 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
4275  void* ctx)
4276 {
4277  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4278 
4279  PetscFunctionBeginUser;
4280  if (!monctx)
4281  {
4282  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4283  }
4284  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4286  monctx->monitor);
4287 
4288  if (user_monitor->mon_sol)
4289  {
4290  mfem::PetscParVector V(x,true);
4291  user_monitor->MonitorSolution(it,t,V);
4292  }
4293  user_monitor->MonitorSolver(solver);
4294  PetscFunctionReturn(0);
4295 }
4296 
4297 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
4298  Vec f,void *ctx)
4299 {
4300  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4301 
4302  PetscFunctionBeginUser;
4303  mfem::PetscParVector xx(x,true);
4304  mfem::PetscParVector yy(xp,true);
4305  mfem::PetscParVector ff(f,true);
4306 
4307  mfem::TimeDependentOperator *op = ts_ctx->op;
4308  op->SetTime(t);
4309 
4310  if (ts_ctx->bchandler)
4311  {
4312  // we evaluate the ImplicitMult method with the correct bc
4313  // this means the correct time derivative for essential boundary
4314  // dofs is zero
4315  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
4316  if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
4317  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4318  mfem::Vector* txx = ts_ctx->work;
4319  mfem::Vector* txp = ts_ctx->work2;
4320  bchandler->SetTime(t);
4321  bchandler->ApplyBC(xx,*txx);
4322  bchandler->ZeroBC(yy,*txp);
4323  op->ImplicitMult(*txx,*txp,ff);
4324  // and fix the residual (i.e. f_\partial\Omega = u - g(t))
4325  bchandler->FixResidualBC(xx,ff);
4326  }
4327  else
4328  {
4329  // use the ImplicitMult method of the class
4330  op->ImplicitMult(xx,yy,ff);
4331  }
4332  ff.UpdateVecFromFlags();
4333  PetscFunctionReturn(0);
4334 }
4335 
4336 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
4337  void *ctx)
4338 {
4339  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4340 
4341  PetscFunctionBeginUser;
4342  if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
4343  mfem::PetscParVector xx(x,true);
4344  mfem::PetscParVector ff(f,true);
4345  mfem::TimeDependentOperator *top = ts_ctx->op;
4346  top->SetTime(t);
4347 
4348  // use the ExplicitMult method - compute the RHS function
4349  top->ExplicitMult(xx,ff);
4350 
4351  ff.UpdateVecFromFlags();
4352  PetscFunctionReturn(0);
4353 }
4354 
4355 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
4356  Vec xp, PetscReal shift, Mat A, Mat P,
4357  void *ctx)
4358 {
4359  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4360  mfem::Vector *xx;
4361  PetscScalar *array;
4362  PetscReal eps = 0.001; /* 0.1% difference */
4363  PetscInt n;
4364  PetscObjectState state;
4365  PetscErrorCode ierr;
4366 
4367  PetscFunctionBeginUser;
4368  // Matrix-free case
4369  if (A && A != P)
4370  {
4371  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4372  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4373  }
4374 
4375  // prevent to recompute a Jacobian if we already did so
4376  // the relative tolerance comparison should be fine given the fact
4377  // that two consecutive shifts should have similar magnitude
4378  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4379  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4380  std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4381  state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
4382 
4383  // update time
4384  mfem::TimeDependentOperator *op = ts_ctx->op;
4385  op->SetTime(t);
4386 
4387  // wrap Vecs with Vectors
4388  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4389  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4390  mfem::Vector yy(array,n);
4391  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4392  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4393  if (!ts_ctx->bchandler)
4394  {
4395  xx = new mfem::Vector(array,n);
4396  }
4397  else
4398  {
4399  // make sure we compute a Jacobian with the correct boundary values
4400  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4401  mfem::Vector txx(array,n);
4402  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4403  xx = ts_ctx->work;
4404  bchandler->SetTime(t);
4405  bchandler->ApplyBC(txx,*xx);
4406  }
4407  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4408 
4409  // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
4410  mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
4411  if (!ts_ctx->bchandler) { delete xx; }
4412  ts_ctx->cached_shift = shift;
4413 
4414  // Convert to the operator type requested if needed
4415  bool delete_pA = false;
4416  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4417  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4418  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4419  pA->GetType() != ts_ctx->jacType))
4420  {
4421  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4422  ts_ctx->jacType);
4423  delete_pA = true;
4424  }
4425 
4426  // Eliminate essential dofs
4427  if (ts_ctx->bchandler)
4428  {
4429  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4430  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4431  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4432  }
4433 
4434  // Get nonzerostate
4435  PetscObjectState nonzerostate;
4436  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4437 
4438  // Avoid unneeded copy of the matrix by hacking
4439  Mat B;
4440  B = pA->ReleaseMat(false);
4441  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4442  if (delete_pA) { delete pA; }
4443 
4444  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4445  // preconditioner fails because MatCreateSubMatrix_Nest does not
4446  // actually return a matrix. Instead, for efficiency reasons,
4447  // it returns a reference to the submatrix. The second time it
4448  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4449  // aborts since the two submatrices are actually different.
4450  // We circumvent this issue by incrementing the nonzero state
4451  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4452  // This does not impact performances in the case of MATNEST
4453  PetscBool isnest;
4454  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4455  CHKERRQ(ierr);
4456  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4457 
4458  // Jacobian reusage
4459  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
4460  CHKERRQ(ierr);
4461 
4462  // Fool DM
4463  DM dm;
4464  MatType mtype;
4465  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4466  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4467  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4468  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4469  PetscFunctionReturn(0);
4470 }
4471 
4472 static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
4473  Mat Ax,Mat Jx,
4474  Mat Axp,Mat Jxp)
4475 {
4476  __mfem_ts_ctx* ts_ctx;
4477  mfem::Vector *xx;
4478  PetscScalar *array;
4479  PetscInt n;
4480  PetscObjectState state;
4481  PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4482  PetscBool assembled;
4483  PetscErrorCode ierr;
4484 
4485  PetscFunctionBeginUser;
4486  // Matrix-free cases
4487  if (Ax && Ax != Jx)
4488  {
4489  ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4490  ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4491  }
4492  if (Axp && Axp != Jxp)
4493  {
4494  ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4495  ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4496  }
4497 
4498  ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
4499 
4500  // prevent to recompute the Jacobians if we already did so
4501  ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
4502  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4503  state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4504  ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
4505  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4506  state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4507  if (!rx && !rxp) { PetscFunctionReturn(0); }
4508 
4509  // update time
4510  mfem::TimeDependentOperator *op = ts_ctx->op;
4511  op->SetTime(t);
4512 
4513  // wrap Vecs with Vectors
4514  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4515  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4516  mfem::Vector yy(array,n);
4517  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4518  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4519  if (!ts_ctx->bchandler)
4520  {
4521  xx = new mfem::Vector(array,n);
4522  }
4523  else
4524  {
4525  // make sure we compute a Jacobian with the correct boundary values
4526  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4527  mfem::Vector txx(array,n);
4528  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4529  xx = ts_ctx->work;
4530  bchandler->SetTime(t);
4531  bchandler->ApplyBC(txx,*xx);
4532  }
4533  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4534 
4535  // We don't have a specialized interface, so we just compute the split jacobians
4536  // evaluating twice the implicit gradient method with the correct shifts
4537 
4538  // first we do the state jacobian
4539  mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
4540 
4541  // Convert to the operator type requested if needed
4542  bool delete_mat = false;
4543  mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
4544  (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
4545  if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4546  pJx->GetType() != ts_ctx->jacType))
4547  {
4548  if (pJx)
4549  {
4550  Mat B = *pJx;
4551  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4552  }
4553  pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
4554  ts_ctx->jacType);
4555  delete_mat = true;
4556  }
4557  if (rx)
4558  {
4559  ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4560  if (assembled)
4561  {
4562  ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4563  }
4564  else
4565  {
4566  Mat B;
4567  ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4568  ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4569  }
4570  }
4571  if (delete_mat) { delete pJx; }
4572  pJx = new mfem::PetscParMatrix(Jx,true);
4573 
4574  // Eliminate essential dofs
4575  if (ts_ctx->bchandler)
4576  {
4577  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4578  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4579  pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4580  }
4581 
4582  // Then we do the jacobian wrt the time derivative of the state
4583  // Note that this is usually the mass matrix
4584  mfem::PetscParMatrix *pJxp = NULL;
4585  if (rxp)
4586  {
4587  delete_mat = false;
4588  mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
4589  pJxp = const_cast<mfem::PetscParMatrix *>
4590  (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
4591  if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4592  pJxp->GetType() != ts_ctx->jacType))
4593  {
4594  if (pJxp)
4595  {
4596  Mat B = *pJxp;
4597  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4598  }
4599  pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
4600  &oJxp,ts_ctx->jacType);
4601  delete_mat = true;
4602  }
4603 
4604  ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4605  if (assembled)
4606  {
4607  ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4608  }
4609  else
4610  {
4611  Mat B;
4612  ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4613  ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4614  }
4615  if (delete_mat) { delete pJxp; }
4616  pJxp = new mfem::PetscParMatrix(Jxp,true);
4617 
4618  // Eliminate essential dofs
4619  if (ts_ctx->bchandler)
4620  {
4621  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4622  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4623  pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
4624  }
4625 
4626  // Obtain the time dependent part of the jacobian by subtracting
4627  // the state jacobian
4628  // We don't do it with the class operator "-=" since we know that
4629  // the sparsity pattern of the two matrices is the same
4630  ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4631  }
4632 
4633  // Jacobian reusage
4634  ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4635  CHKERRQ(ierr);
4636  ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4637  CHKERRQ(ierr);
4638 
4639  delete pJx;
4640  delete pJxp;
4641  if (!ts_ctx->bchandler) { delete xx; }
4642  PetscFunctionReturn(0);
4643 }
4644 
4645 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4646  Mat A, Mat P, void *ctx)
4647 {
4648  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4649  mfem::Vector *xx;
4650  PetscScalar *array;
4651  PetscInt n;
4652  PetscObjectState state;
4653  PetscErrorCode ierr;
4654 
4655  PetscFunctionBeginUser;
4656  // Matrix-free case
4657  if (A && A != P)
4658  {
4659  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4660  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4661  }
4662 
4663  // prevent to recompute a Jacobian if we already did so
4664  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4665  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4666  state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4667 
4668  // update time
4669  mfem::TimeDependentOperator *op = ts_ctx->op;
4670  op->SetTime(t);
4671 
4672  // wrap Vec with Vector
4673  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4674  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4675  if (!ts_ctx->bchandler)
4676  {
4677  xx = new mfem::Vector(array,n);
4678  }
4679  else
4680  {
4681  // make sure we compute a Jacobian with the correct boundary values
4682  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4683  mfem::Vector txx(array,n);
4684  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4685  xx = ts_ctx->work;
4686  bchandler->SetTime(t);
4687  bchandler->ApplyBC(txx,*xx);
4688  }
4689  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4690 
4691  // Use TimeDependentOperator::GetExplicitGradient(x)
4692  mfem::Operator& J = op->GetExplicitGradient(*xx);
4693  if (!ts_ctx->bchandler) { delete xx; }
4694 
4695  // Convert to the operator type requested if needed
4696  bool delete_pA = false;
4697  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4698  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4699  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4700  pA->GetType() != ts_ctx->jacType))
4701  {
4702  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4703  ts_ctx->jacType);
4704  delete_pA = true;
4705  }
4706 
4707  // Eliminate essential dofs
4708  if (ts_ctx->bchandler)
4709  {
4710  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4711  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4712  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4713  }
4714 
4715  // Get nonzerostate
4716  PetscObjectState nonzerostate;
4717  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4718 
4719  // Avoid unneeded copy of the matrix by hacking
4720  Mat B;
4721  B = pA->ReleaseMat(false);
4722  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4723  if (delete_pA) { delete pA; }
4724 
4725  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4726  // preconditioner fails because MatCreateSubMatrix_Nest does not
4727  // actually return a matrix. Instead, for efficiency reasons,
4728  // it returns a reference to the submatrix. The second time it
4729  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4730  // aborts since the two submatrices are actually different.
4731  // We circumvent this issue by incrementing the nonzero state
4732  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4733  // This does not impact performances in the case of MATNEST
4734  PetscBool isnest;
4735  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4736  CHKERRQ(ierr);
4737  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4738 
4739  // Jacobian reusage
4740  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4741  {
4742  ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4743  }
4744  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_rhsjacstate);
4745  CHKERRQ(ierr);
4746 
4747  // Fool DM
4748  DM dm;
4749  MatType mtype;
4750  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4751  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4752  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4753  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4754  PetscFunctionReturn(0);
4755 }
4756 
4757 static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4758  void* ctx)
4759 {
4760  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4761 
4762  PetscFunctionBeginUser;
4763  if (!monctx)
4764  {
4765  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4766  }
4767 
4768  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4770  monctx->monitor);
4771  if (user_monitor->mon_sol)
4772  {
4773  Vec x;
4774  PetscErrorCode ierr;
4775 
4776  ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4777  mfem::PetscParVector V(x,true);
4778  user_monitor->MonitorSolution(it,res,V);
4779  }
4780  if (user_monitor->mon_res)
4781  {
4782  Vec x;
4783  PetscErrorCode ierr;
4784 
4785  ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4786  mfem::PetscParVector V(x,true);
4787  user_monitor->MonitorResidual(it,res,V);
4788  }
4789  user_monitor->MonitorSolver(solver);
4790  PetscFunctionReturn(0);
4791 }
4792 
4793 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4794  void *ctx)
4795 {
4796  PetscScalar *array;
4797  PetscInt n;
4798  PetscErrorCode ierr;
4799  mfem::Vector *xx;
4800  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4801 
4802  PetscFunctionBeginUser;
4803  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4804  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4805  if (!snes_ctx->bchandler)
4806  {
4807  xx = new mfem::Vector(array,n);
4808  }
4809  else
4810  {
4811  // make sure we compute a Jacobian with the correct boundary values
4812  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4813  mfem::Vector txx(array,n);
4814  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4815  xx = snes_ctx->work;
4816  bchandler->ApplyBC(txx,*xx);
4817  }
4818 
4819  // Use Operator::GetGradient(x)
4820  mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4821  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4822  if (!snes_ctx->bchandler) { delete xx; }
4823 
4824  // Convert to the operator type requested if needed
4825  bool delete_pA = false;
4826  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4827  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4828  if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4829  pA->GetType() != snes_ctx->jacType))
4830  {
4831  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4832  snes_ctx->jacType);
4833  delete_pA = true;
4834  }
4835 
4836  // Eliminate essential dofs
4837  if (snes_ctx->bchandler)
4838  {
4839  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4840  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4841  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4842  }
4843 
4844  // Get nonzerostate
4845  PetscObjectState nonzerostate;
4846  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4847 
4848  // Avoid unneeded copy of the matrix by hacking
4849  Mat B = pA->ReleaseMat(false);
4850  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4851  if (delete_pA) { delete pA; }
4852 
4853  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4854  // preconditioner fails because MatCreateSubMatrix_Nest does not
4855  // actually return a matrix. Instead, for efficiency reasons,
4856  // it returns a reference to the submatrix. The second time it
4857  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4858  // aborts since the two submatrices are actually different.
4859  // We circumvent this issue by incrementing the nonzero state
4860  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4861  // This does not impact performances in the case of MATNEST
4862  PetscBool isnest;
4863  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4864  CHKERRQ(ierr);
4865  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4866 
4867  // Matrix-free case
4868  if (A && A != P)
4869  {
4870  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4871  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4872  }
4873 
4874  // Fool DM
4875  DM dm;
4876  MatType mtype;
4877  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4878  ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4879  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4880  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4881  PetscFunctionReturn(0);
4882 }
4883 
4884 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4885 {
4886  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4887 
4888  PetscFunctionBeginUser;
4889  mfem::PetscParVector xx(x,true);
4890  mfem::PetscParVector ff(f,true);
4891  if (snes_ctx->bchandler)
4892  {
4893  // we evaluate the Mult method with the correct bc
4894  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4895  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4896  mfem::Vector* txx = snes_ctx->work;
4897  bchandler->ApplyBC(xx,*txx);
4898  snes_ctx->op->Mult(*txx,ff);
4899  // and fix the residual (i.e. f_\partial\Omega = u - g)
4900  bchandler->FixResidualBC(xx,ff);
4901  }
4902  else
4903  {
4904  // use the Mult method of the class
4905  snes_ctx->op->Mult(xx,ff);
4906  }
4907  ff.UpdateVecFromFlags();
4908  PetscFunctionReturn(0);
4909 }
4910 
4911 static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
4912  void *ctx)
4913 {
4914  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4915 
4916  PetscFunctionBeginUser;
4917  if (!snes_ctx->objective)
4918  {
4919  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
4920  }
4921  mfem::PetscParVector xx(x,true);
4922  double lf;
4923  (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4924  *f = (PetscReal)lf;
4925  PetscFunctionReturn(0);
4926 }
4927 
4928 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4929  PetscBool *cy,PetscBool *cw, void* ctx)
4930 {
4931  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4932  bool lcy = false,lcw = false;
4933 
4934  PetscFunctionBeginUser;
4935  mfem::PetscParVector x(X,true);
4936  mfem::PetscParVector y(Y,true);
4937  mfem::PetscParVector w(W,true);
4938  (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4939  if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
4940  if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
4941  PetscFunctionReturn(0);
4942 }
4943 
4944 static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
4945 {
4946  Vec F,X,dX,pX;
4947  __mfem_snes_ctx* snes_ctx;
4948 
4949  PetscFunctionBeginUser;
4950  /* Update callback does not use the context */
4951  ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
4952  ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4953  if (!it)
4954  {
4955  ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4956  ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
4957  CHKERRQ(ierr);
4958  ierr = VecDestroy(&pX); CHKERRQ(ierr);
4959  }
4960  ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
4961  CHKERRQ(ierr);
4962  if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
4963  "Missing previous solution");
4964  ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4965  mfem::PetscParVector f(F,true);
4966  mfem::PetscParVector x(X,true);
4967  mfem::PetscParVector dx(dX,true);
4968  mfem::PetscParVector px(pX,true);
4969  (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4970  /* Store previous solution */
4971  ierr = VecCopy(X,pX); CHKERRQ(ierr);
4972  PetscFunctionReturn(0);
4973 }
4974 
4975 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
4976  void* ctx)
4977 {
4978  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4979 
4980  PetscFunctionBeginUser;
4981  if (!monctx)
4982  {
4983  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4984  }
4985 
4986  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4988  monctx->monitor);
4989  if (user_monitor->mon_sol)
4990  {
4991  Vec x;
4992  PetscErrorCode ierr;
4993 
4994  ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
4995  mfem::PetscParVector V(x,true);
4996  user_monitor->MonitorSolution(it,res,V);
4997  }
4998  if (user_monitor->mon_res)
4999  {
5000  Vec x;
5001  PetscErrorCode ierr;
5002 
5003  ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5004  mfem::PetscParVector V(x,true);
5005  user_monitor->MonitorResidual(it,res,V);
5006  }
5007  user_monitor->MonitorSolver(solver);
5008  PetscFunctionReturn(0);
5009 }
5010 
5011 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5012 {
5013  mfem::Operator *op;
5014  PetscErrorCode ierr;
5015 
5016  PetscFunctionBeginUser;
5017  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5018  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5019  mfem::PetscParVector xx(x,true);
5020  mfem::PetscParVector yy(y,true);
5021  op->Mult(xx,yy);
5022  yy.UpdateVecFromFlags();
5023  PetscFunctionReturn(0);
5024 }
5025 
5026 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5027 {
5028  mfem::Operator *op;
5029  PetscErrorCode ierr;
5030  PetscBool flg,symm;
5031 
5032  PetscFunctionBeginUser;
5033  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5034  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5035  mfem::PetscParVector xx(x,true);
5036  mfem::PetscParVector yy(y,true);
5037  ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5038  if (flg && symm)
5039  {
5040  op->Mult(xx,yy);
5041  }
5042  else
5043  {
5044  op->MultTranspose(xx,yy);
5045  }
5046  yy.UpdateVecFromFlags();
5047  PetscFunctionReturn(0);
5048 }
5049 
5050 static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5051 {
5052  mfem::Operator *op;
5053  PetscErrorCode ierr;
5054 
5055  PetscFunctionBeginUser;
5056  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5057  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5058  ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
5059  PetscFunctionReturn(0);
5060 }
5061 
5062 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5063 {
5064  PetscFunctionBeginUser;
5065  PetscFunctionReturn(0);
5066 }
5067 
5068 static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5069 {
5070  __mfem_pc_shell_ctx *ctx;
5071  PetscErrorCode ierr;
5072 
5073  PetscFunctionBeginUser;
5074  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5075  if (ctx->op)
5076  {
5077  PetscBool isascii;
5078  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5079  CHKERRQ(ierr);
5080 
5082  (ctx->op);
5083  if (ppc)
5084  {
5085  ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5086  }
5087  else
5088  {
5089  if (isascii)
5090  {
5091  ierr = PetscViewerASCIIPrintf(viewer,
5092  "No information available on the mfem::Solver\n");
5093  CHKERRQ(ierr);
5094  }
5095  }
5096  if (isascii && ctx->factory)
5097  {
5098  ierr = PetscViewerASCIIPrintf(viewer,
5099  "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5100  CHKERRQ(ierr);
5101  }
5102  }
5103  PetscFunctionReturn(0);
5104 }
5105 
5106 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5107 {
5108  __mfem_pc_shell_ctx *ctx;
5109  PetscErrorCode ierr;
5110 
5111  PetscFunctionBeginUser;
5112  mfem::PetscParVector xx(x,true);
5113  mfem::PetscParVector yy(y,true);
5114  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5115  if (ctx->op)
5116  {
5117  ctx->op->Mult(xx,yy);
5118  yy.UpdateVecFromFlags();
5119  }
5120  else // operator is not present, copy x
5121  {
5122  yy = xx;
5123  }
5124  PetscFunctionReturn(0);
5125 }
5126 
5127 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5128 {
5129  __mfem_pc_shell_ctx *ctx;
5130  PetscErrorCode ierr;
5131 
5132  PetscFunctionBeginUser;
5133  mfem::PetscParVector xx(x,true);
5134  mfem::PetscParVector yy(y,true);
5135  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5136  if (ctx->op)
5137  {
5138  ctx->op->MultTranspose(xx,yy);
5139  yy.UpdateVecFromFlags();
5140  }
5141  else // operator is not present, copy x
5142  {
5143  yy = xx;
5144  }
5145  PetscFunctionReturn(0);
5146 }
5147 
5148 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5149 {
5150  __mfem_pc_shell_ctx *ctx;
5151 
5152  PetscFunctionBeginUser;
5153  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5154  if (ctx->factory)
5155  {
5156  // Delete any owned operator
5157  if (ctx->ownsop)
5158  {
5159  delete ctx->op;
5160  }
5161 
5162  // Get current preconditioning Mat
5163  Mat B;
5164  ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5165 
5166  // Call user-defined setup
5167  mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
5168  mfem::PetscPreconditionerFactory *factory = ctx->factory;
5169  ctx->op = factory->NewPreconditioner(hB);
5170  ctx->ownsop = true;
5171  ctx->numprec++;
5172  }
5173  PetscFunctionReturn(0);
5174 }
5175 
5176 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5177 {
5178  __mfem_pc_shell_ctx *ctx;
5179  PetscErrorCode ierr;
5180 
5181  PetscFunctionBeginUser;
5182  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5183  if (ctx->ownsop)
5184  {
5185  delete ctx->op;
5186  }
5187  delete ctx;
5188  PetscFunctionReturn(0);
5189 }
5190 
5191 static PetscErrorCode __mfem_array_container_destroy(void *ptr)
5192 {
5193  PetscErrorCode ierr;
5194 
5195  PetscFunctionBeginUser;
5196  ierr = PetscFree(ptr); CHKERRQ(ierr);
5197  PetscFunctionReturn(0);
5198 }
5199 
5200 static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
5201 {
5203  PetscErrorCode ierr;
5204 
5205  PetscFunctionBeginUser;
5206  for (int i=0; i<a->Size(); i++)
5207  {
5208  Mat M = (*a)[i];
5209  MPI_Comm comm = PetscObjectComm((PetscObject)M);
5210  ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5211  }
5212  delete a;
5213  PetscFunctionReturn(0);
5214 }
5215 
5216 static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
5217 {
5218  PetscErrorCode ierr;
5219 
5220  PetscFunctionBeginUser;
5221  ierr = PetscFree(*ctx); CHKERRQ(ierr);
5222  PetscFunctionReturn(0);
5223 }
5224 
5225 // Sets the type of PC to PCSHELL and wraps the solver action
5226 // if ownsop is true, ownership of precond is transferred to the PETSc object
5227 PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
5228 {
5229  PetscFunctionBeginUser;
5230  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5231  ctx->op = &precond;
5232  ctx->ownsop = ownsop;
5233  ctx->factory = NULL;
5234  ctx->numprec = 0;
5235 
5236  // In case the PC was already of type SHELL, this will destroy any
5237  // previous user-defined data structure
5238  // We cannot call PCReset as it will wipe out any operator already set
5239  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5240 
5241  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5242  ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5243  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5244  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5245  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5246  CHKERRQ(ierr);
5247  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5248  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5249  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5250  PetscFunctionReturn(0);
5251 }
5252 
5253 // Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
5254 // Takes ownership of the solver created by the factory
5255 PetscErrorCode MakeShellPCWithFactory(PC pc,
5257 {
5258  PetscFunctionBeginUser;
5259  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5260  ctx->op = NULL;
5261  ctx->ownsop = true;
5262  ctx->factory = factory;
5263  ctx->numprec = 0;
5264 
5265  // In case the PC was already of type SHELL, this will destroy any
5266  // previous user-defined data structure
5267  // We cannot call PCReset as it will wipe out any operator already set
5268  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5269 
5270  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5271  ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
5272  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5273  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5274  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5275  CHKERRQ(ierr);
5276  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5277  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5278  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5279  PetscFunctionReturn(0);
5280 }
5281 
5282 // Converts from a list (or a marked Array if islist is false) to an IS
5283 // st indicates the offset where to start numbering
5284 static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
5285  const mfem::Array<int> *list,
5286  PetscInt st, IS* is)
5287 {
5288  PetscInt n = list ? list->Size() : 0,*idxs;
5289  const int *data = list ? list->GetData() : NULL;
5290  PetscErrorCode ierr;
5291 
5292  PetscFunctionBeginUser;
5293  ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5294  if (islist)
5295  {
5296  for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5297  }
5298  else
5299  {
5300  PetscInt cum = 0;
5301  for (PetscInt i=0; i<n; i++)
5302  {
5303  if (data[i]) { idxs[cum++] = i+st; }
5304  }
5305  n = cum;
5306  }
5307  ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5308  CHKERRQ(ierr);
5309  PetscFunctionReturn(0);
5310 }
5311 
5312 // Converts from a marked Array of Vdofs to an IS
5313 // st indicates the offset where to start numbering
5314 // l2l is a vector of matrices generated during RAP
5315 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5316  mfem::Array<Mat> &pl2l,
5317  const mfem::Array<int> *mark,
5318  PetscInt st, IS* is)
5319 {
5320  mfem::Array<int> sub_dof_marker;
5322  PetscInt nl;
5323  PetscErrorCode ierr;
5324 
5325  PetscFunctionBeginUser;
5326  for (int i = 0; i < pl2l.Size(); i++)
5327  {
5328  PetscInt m,n,*ii,*jj;
5329  PetscBool done;
5330  ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
5331  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5332  MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
5333  ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5334 #if defined(PETSC_USE_64BIT_INDICES)
5335  int nnz = (int)ii[m];
5336  int *mii = new int[m+1];
5337  int *mjj = new int[nnz];
5338  for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5339  for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5340  l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
5341 #else
5342  l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
5343 #endif
5344  ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5345  (const PetscInt**)&ii,
5346  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5347  MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
5348  << i << " l2l matrix");
5349  }
5350  nl = 0;
5351  for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5352  sub_dof_marker.SetSize(nl);
5353  const int* vdata = mark->GetData();
5354  int* sdata = sub_dof_marker.GetData();
5355  int cumh = 0, cumw = 0;
5356  for (int i = 0; i < l2l.Size(); i++)
5357  {
5358  const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
5359  l2l[i]->Height());
5360  mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
5361  l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5362  cumh += l2l[i]->Height();
5363  cumw += l2l[i]->Width();
5364  }
5365  ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5366  for (int i = 0; i < pl2l.Size(); i++)
5367  {
5368  delete l2l[i];
5369  }
5370  PetscFunctionReturn(0);
5371 }
5372 
5373 #if !defined(PETSC_HAVE_HYPRE)
5374 
5375 #if defined(HYPRE_MIXEDINT)
5376 #error "HYPRE_MIXEDINT not supported"
5377 #endif
5378 
5379 #include "_hypre_parcsr_mv.h"
5380 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5381 {
5382  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5383  hypre_CSRMatrix *hdiag,*hoffd;
5384  PetscScalar *da,*oa,*aptr;
5385  PetscInt *dii,*djj,*oii,*ojj,*iptr;
5386  PetscInt i,dnnz,onnz,m,n;
5387  PetscMPIInt size;
5388  PetscErrorCode ierr;
5389 
5390  PetscFunctionBeginUser;
5391  hdiag = hypre_ParCSRMatrixDiag(hA);
5392  hoffd = hypre_ParCSRMatrixOffd(hA);
5393  m = hypre_CSRMatrixNumRows(hdiag);
5394  n = hypre_CSRMatrixNumCols(hdiag);
5395  dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5396  onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5397  ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5398  ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5399  ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5400  ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
5401  CHKERRQ(ierr);
5402  ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
5403  CHKERRQ(ierr);
5404  ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
5405  CHKERRQ(ierr);
5406  iptr = djj;
5407  aptr = da;
5408  for (i=0; i<m; i++)
5409  {
5410  PetscInt nc = dii[i+1]-dii[i];
5411  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5412  iptr += nc;
5413  aptr += nc;
5414  }
5415  ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5416  if (size > 1)
5417  {
5418  PetscInt *offdj,*coffd;
5419 
5420  ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5421  ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5422  ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5423  ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
5424  CHKERRQ(ierr);
5425  offdj = hypre_CSRMatrixJ(hoffd);
5426  coffd = hypre_ParCSRMatrixColMapOffd(hA);
5427  for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5428  ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
5429  CHKERRQ(ierr);
5430  iptr = ojj;
5431  aptr = oa;
5432  for (i=0; i<m; i++)
5433  {
5434  PetscInt nc = oii[i+1]-oii[i];
5435  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5436  iptr += nc;
5437  aptr += nc;
5438  }
5439  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5440  djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5441  }
5442  else
5443  {
5444  oii = ojj = NULL;
5445  oa = NULL;
5446  ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5447  }
5448  /* We are responsible to free the CSR arrays. However, since we can take
5449  references of a PetscParMatrix but we cannot take reference of PETSc
5450  arrays, we need to create a PetscContainer object to take reference of
5451  these arrays in reference objects */
5452  void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5453  const char *names[6] = {"_mfem_csr_dii",
5454  "_mfem_csr_djj",
5455  "_mfem_csr_da",
5456  "_mfem_csr_oii",
5457  "_mfem_csr_ojj",
5458  "_mfem_csr_oa"
5459  };
5460  for (i=0; i<6; i++)
5461  {
5462  PetscContainer c;
5463 
5464  ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5465  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5466  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5467  CHKERRQ(ierr);
5468  ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
5469  CHKERRQ(ierr);
5470  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5471  }
5472  PetscFunctionReturn(0);
5473 }
5474 
5475 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5476 {
5477  Mat lA;
5478  ISLocalToGlobalMapping rl2g,cl2g;
5479  IS is;
5480  hypre_CSRMatrix *hdiag,*hoffd;
5481  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5482  void *ptrs[2];
5483  const char *names[2] = {"_mfem_csr_aux",
5484  "_mfem_csr_data"
5485  };
5486  PetscScalar *hdd,*hod,*aa,*data;
5487  PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5488  PetscInt *aux,*ii,*jj;
5489  PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5490  PetscErrorCode ierr;
5491 
5492  PetscFunctionBeginUser;
5493  /* access relevant information in ParCSR */
5494  str = hypre_ParCSRMatrixFirstRowIndex(hA);
5495  stc = hypre_ParCSRMatrixFirstColDiag(hA);
5496  hdiag = hypre_ParCSRMatrixDiag(hA);
5497  hoffd = hypre_ParCSRMatrixOffd(hA);
5498  dr = hypre_CSRMatrixNumRows(hdiag);
5499  dc = hypre_CSRMatrixNumCols(hdiag);
5500  nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5501  hdi = hypre_CSRMatrixI(hdiag);
5502  hdj = hypre_CSRMatrixJ(hdiag);
5503  hdd = hypre_CSRMatrixData(hdiag);
5504  oc = hypre_CSRMatrixNumCols(hoffd);
5505  nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5506  hoi = hypre_CSRMatrixI(hoffd);
5507  hoj = hypre_CSRMatrixJ(hoffd);
5508  hod = hypre_CSRMatrixData(hoffd);
5509 
5510  /* generate l2g maps for rows and cols */
5511  ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5512  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5513  ierr = ISDestroy(&is); CHKERRQ(ierr);
5514  col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5515  ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5516  for (i=0; i<dc; i++) { aux[i] = i+stc; }
5517  for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5518  ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5519  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5520  ierr = ISDestroy(&is); CHKERRQ(ierr);
5521 
5522  /* create MATIS object */
5523  ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5524  ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5525  ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5526  ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5527  ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5528  ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5529 
5530  /* merge local matrices */
5531  ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5532  ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5533  ii = aux;
5534  jj = aux+dr+1;
5535  aa = data;
5536  *ii = *(hdi++) + *(hoi++);
5537  for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5538  {
5539  PetscScalar *aold = aa;
5540  PetscInt *jold = jj,nc = jd+jo;
5541  for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5542  for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5543  *(++ii) = *(hdi++) + *(hoi++);
5544  ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5545  }
5546  for (; cum<dr; cum++) { *(++ii) = nnz; }
5547  ii = aux;
5548  jj = aux+dr+1;
5549  aa = data;
5550  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5551  CHKERRQ(ierr);
5552  ptrs[0] = aux;
5553  ptrs[1] = data;
5554  for (i=0; i<2; i++)
5555  {
5556  PetscContainer c;
5557 
5558  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5559  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5560  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5561  CHKERRQ(ierr);
5562  ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
5563  CHKERRQ(ierr);
5564  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5565  }
5566  ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5567  ierr = MatDestroy(&lA); CHKERRQ(ierr);
5568  ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5569  ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5570  PetscFunctionReturn(0);
5571 }
5572 #endif
5573 
5574 #include <petsc/private/matimpl.h>
5575 
5576 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm, PetscInt m,
5577  PetscInt n, Mat *A)
5578 {
5579  PetscFunctionBegin;
5580  ierr = MatCreate(comm,A); CHKERRQ(ierr);
5581  ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5582  ierr = PetscObjectChangeTypeName((PetscObject)*A,"mfemdummy"); CHKERRQ(ierr);
5583  (*A)->preallocated = PETSC_TRUE;
5584  ierr = MatSetUp(*A); CHKERRQ(ierr);
5585  PetscFunctionReturn(0);
5586 }
5587 
5588 #include <petsc/private/vecimpl.h>
5589 
5590 #if defined(PETSC_HAVE_DEVICE)
5591 static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5592 {
5593  PetscFunctionBegin;
5594  v->offloadmask = m;
5595  PetscFunctionReturn(0);
5596 }
5597 #endif
5598 
5599 static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5600 {
5601  PetscFunctionBegin;
5602 #if defined(PETSC_HAVE_DEVICE)
5603  *flg = v->boundtocpu;
5604 #else
5605  *flg = PETSC_TRUE;
5606 #endif
5607  PetscFunctionReturn(0);
5608 }
5609 
5610 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject o)
5611 {
5612  PetscErrorCode ierr;
5613 
5614  PetscFunctionBegin;
5615  ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5616  PetscFunctionReturn(0);
5617 }
5618 
5619 #endif // MFEM_USE_PETSC
5620 #endif // MFEM_USE_MPI
Ordering::Type GetOrdering() const
Return the ordering method.
Definition: fespace.hpp:575
int Size() const
Return the logical size of the array.
Definition: array.hpp:138
void SetPreconditioner(Solver &precond)
Definition: petsc.cpp:3021
void SetPrintLevel(int plev)
Definition: petsc.cpp:2370
static int GetId()
Get the device id of the configured device.
Definition: device.hpp:252
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
Definition: sparsemat.cpp:1497
PetscParVector * B
Right-hand side and solution vector.
Definition: petsc.hpp:661
void CreatePrivateContext()
Definition: petsc.cpp:2669
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition: petsc.cpp:1941
void trans(const Vector &u, Vector &x)
Definition: ex27.cpp:412
MPI_Comm GetComm() const
MPI communicator.
Definition: hypre.hpp:528
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
Definition: hypre.cpp:3053
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition: operator.hpp:334
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
Definition: petsc.cpp:4010
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition: operator.hpp:336
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition: petsc.cpp:3917
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition: vector.hpp:187
Memory< double > data
Definition: vector.hpp:64
Abstract class for PETSc&#39;s preconditioners.
Definition: petsc.hpp:775
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition: petsc.cpp:1074
double PetscScalar
Definition: petsc.hpp:48
ParMesh * GetParMesh() const
Definition: pfespace.hpp:277
PetscMemory pdata
Definition: petsc.hpp:159
petsc::Vec x
The actual PETSc object.
Definition: petsc.hpp:157
virtual void SetOperator(const Operator &op)
Definition: petsc.cpp:2876
HypreParMatrix * RAP(const HypreParMatrix *A, const HypreParMatrix *P)
Returns the matrix P^t * A * P.
Definition: hypre.cpp:2673
bool clcustom
Boolean to handle SetFromOptions calls.
Definition: petsc.hpp:652
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Definition: petsc.cpp:3985
HYPRE_Int PetscInt
Definition: petsc.hpp:47
double GetFinalNorm()
Definition: petsc.cpp:2644
Device memory; using CUDA or HIP *Malloc and *Free.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3277
petsc::Mat A
The actual PETSc object.
Definition: petsc.hpp:311
Wrapper for PETSc&#39;s matrix class.
Definition: petsc.hpp:307
PetscParVector & operator+=(const PetscParVector &y)
Definition: petsc.cpp:688
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition: petsc.cpp:2752
Abstract class for PETSc&#39;s linear solvers.
Definition: petsc.hpp:722
void SetSize(int s)
Resize the vector to size s.
Definition: vector.hpp:521
Abstract class for PETSc&#39;s solvers.
Definition: petsc.hpp:648
PetscODESolver::Type GetType() const
Definition: petsc.cpp:4168
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
Definition: petsc.cpp:4230
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition: petsc.cpp:3996
PetscBCHandler(Type type_=ZERO)
Definition: petsc.hpp:574
void Delete()
Delete the owned pointers and reset the Memory object.
Base abstract class for first order time dependent operators.
Definition: operator.hpp:285
PetscInt GetNumCols() const
Returns the local number of columns.
Definition: petsc.cpp:917
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
Definition: petsc.hpp:99
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition: operator.hpp:98
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition: operator.hpp:72
void SetType(PetscODESolver::Type)
Definition: petsc.cpp:4174
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
Definition: petsc.cpp:1290
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition: petsc.hpp:977
Pointer to an Operator of a specified type.
Definition: handle.hpp:33
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
Definition: petsc.cpp:1906
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition: petsc.cpp:3788
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition: petsc.cpp:667
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition: petsc.cpp:2281
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3877
bool DeviceRequested() const
Definition: petsc.hpp:140
T * GetData()
Returns the data.
Definition: array.hpp:112
Type GetType() const
Definition: petsc.cpp:2223
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition: operator.cpp:249
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:336
int Size() const
Returns the size of the vector.
Definition: vector.hpp:199
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Definition: petsc.cpp:4192
struct::_p_Mat * Mat
Definition: petsc.hpp:70
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition: ode.hpp:22
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition: petsc.cpp:945
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3151
PetscClassId cid
The class id of the actual PETSc object.
Definition: petsc.hpp:658
ID for class PetscParMatrix, MATHYPRE format.
Definition: operator.hpp:264
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
Definition: operator.hpp:329
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
void SetDataAndSize_()
Definition: petsc.cpp:218
Abstract parallel finite element space.
Definition: pfespace.hpp:28
Constant in time b.c.
Definition: petsc.hpp:570
void SetRelTol(double tol)
Definition: petsc.cpp:2293
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition: operator.hpp:655
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Randomize(PetscInt seed=0)
Set random values.
Definition: petsc.cpp:854
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3121
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition: petsc.cpp:1686
void SyncBaseAndReset()
Definition: petsc.hpp:124
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition: petsc.cpp:2455
void SetMaxIter(int max_iter)
Definition: petsc.cpp:2343
PetscParVector & operator*=(PetscScalar d)
Definition: petsc.cpp:702
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition: hypre.cpp:2652
void SortColumnIndices()
Sort the column indices corresponding to each row.
Definition: sparsemat.cpp:449
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
Definition: vector.hpp:117
int Capacity() const
Return the size of the allocated memory.
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
Definition: vector.hpp:234
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition: petsc.cpp:2167
double PetscReal
Definition: petsc.hpp:49
void Customize(bool customize=true) const
Customize object with options set.
Definition: petsc.cpp:2545
PetscInt M() const
Returns the global number of rows.
Definition: petsc.cpp:924
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: operator.hpp:93
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:4023
PetscParVector * Y
Definition: petsc.hpp:314
virtual ~PetscLinearSolver()
Definition: petsc.cpp:3111
PetscInt GetNumRows() const
Returns the local number of rows.
Definition: petsc.cpp:910
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
Definition: pfespace.hpp:289
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
Definition: operator.cpp:219
struct::_p_PC * PC
Definition: petsc.hpp:72
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition: petsc.cpp:869
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
Definition: petsc.cpp:721
const double * GetHostPointer() const
Definition: petsc.cpp:198
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition: petsc.cpp:653
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition: petsc.cpp:4079
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition: petsc.cpp:1887
ID for the base class Operator, i.e. any type.
Definition: operator.hpp:257
MPI_Comm GetComm() const
Definition: pfespace.hpp:273
double f(const Vector &xvec)
Definition: lor_mms.hpp:32
void write(std::ostream &os, T value)
Write &#39;value&#39; to stream.
Definition: binaryio.hpp:30
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition: petsc.hpp:655
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc&#39;s solvers.
Definition: petsc.hpp:955
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
Definition: operator.hpp:86
Data type sparse matrix.
Definition: sparsemat.hpp:46
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition: operator.hpp:66
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3979
struct::_p_TS * TS
Definition: petsc.hpp:74
double b
Definition: lissajous.cpp:42
int to_int(const std::string &str)
Convert a string to an int.
Definition: text.hpp:62
const double * HostReadData() const
Definition: sparsemat.hpp:264
Auxiliary class for BDDC customization.
Definition: petsc.hpp:801
virtual ~PetscPreconditioner()
Definition: petsc.cpp:3282
ID for class PetscParMatrix, unspecified format.
Definition: operator.hpp:265
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:1243
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition: petsc.cpp:2801
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
Definition: petsc.cpp:448
void LoseData()
NULL-ifies the data.
Definition: array.hpp:132
void SetFlagsFromMask_() const
Definition: petsc.cpp:268
bool DeviceIsValid() const
Return true if device pointer is valid.
const int * HostReadJ() const
Definition: sparsemat.hpp:248
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition: petsc.cpp:1058
bool ReadRequested() const
Definition: petsc.hpp:130
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition: petsc.cpp:903
virtual ~PetscParVector()
Calls PETSc&#39;s destroy function.
Definition: petsc.cpp:518
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition: petsc.cpp:3797
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition: petsc.hpp:664
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition: petsc.cpp:1089
struct::_p_Vec * Vec
Definition: petsc.hpp:69
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition: petsc.cpp:1966
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition: petsc.cpp:2820
void Assign(const T *)
Copy data from a pointer. &#39;Size()&#39; elements are copied.
Definition: array.hpp:900
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:2450
T read(std::istream &is)
Read a value from the stream and return it.
Definition: binaryio.hpp:37
bool WriteRequested() const
Definition: petsc.hpp:135
void Shift(double s)
Shift diagonal by a constant.
Definition: petsc.cpp:1988
ID for class PetscParMatrix, MATNEST format.
Definition: operator.hpp:263
int GetVDim() const
Returns vector dimension.
Definition: fespace.hpp:557
void SetTime(double t)
Sets the current time.
Definition: petsc.hpp:600
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:319
ID for class PetscParMatrix, MATSHELL format.
Definition: operator.hpp:262
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition: petsc.cpp:1711
Biwise-OR of all CUDA backends.
Definition: device.hpp:88
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition: petsc.cpp:952
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc&#39;s MATSHELL object and returns the Mat in B...
Definition: petsc.cpp:1256
int GetNumIterations()
Definition: petsc.cpp:2611
int SpaceDimension() const
Definition: mesh.hpp:1000
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory&lt;T&gt; &amp;mem, int size, false)
Definition: device.hpp:343
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition: petsc.cpp:2485
struct::_p_KSP * KSP
Definition: petsc.hpp:71
Wrapper for hypre&#39;s parallel vector class.
Definition: hypre.hpp:148
HYPRE_BigInt * GetTrueDofOffsets() const
Definition: pfespace.hpp:282
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition: petsc.cpp:2829
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Definition: petsc.hpp:590
Type
Enumeration defining IDs for some classes derived from Operator.
Definition: operator.hpp:255
struct::_p_SNES * SNES
Definition: petsc.hpp:73
PetscInt NNZ() const
Returns the number of nonzeros.
Definition: petsc.cpp:938
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true)
Definition: petsc.cpp:2841
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition: petsc.cpp:3272
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Definition: fe_coll.hpp:26
MemoryType
Memory types supported by MFEM.
Definition: mem_manager.hpp:31
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition: array.hpp:679
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
Definition: operator.cpp:241
HYPRE_Int HYPRE_BigInt
PetscInt N() const
Returns the global number of columns.
Definition: petsc.cpp:931
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition: petsc.cpp:1877
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:4162
bool operatorset
Boolean to handle SetOperator calls.
Definition: petsc.hpp:670
PetscParVector * X
Auxiliary vectors for typecasting.
Definition: petsc.hpp:314
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
Definition: device.hpp:258
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition: petsc.cpp:1923
bool IsAliasForSync() const
Definition: petsc.hpp:97
int height
Dimension of the output / number of rows in the matrix.
Definition: operator.hpp:27
PetscParVector & operator-=(const PetscParVector &y)
Definition: petsc.cpp:695
double a
Definition: lissajous.cpp:41
void MFEMFinalizePetsc()
Definition: petsc.cpp:192
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition: operator.cpp:224
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory&lt;T&gt; &amp;mem, int size, false)
Definition: device.hpp:326
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true, or the mfem::Device&#39;s HostMemoryClass, otherwise.
Definition: device.hpp:353
virtual ~PetscNonlinearSolver()
Definition: petsc.cpp:3909
virtual ~PetscODESolver()
Definition: petsc.cpp:4071
int GetConverged()
Definition: petsc.cpp:2578
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition: petsc.cpp:2271
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition: petsc.cpp:2504
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition: petsc.cpp:168
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
Definition: petsc.cpp:2210
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition: petsc.cpp:500
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition: petsc.hpp:965
bool Empty() const
Return true if the Memory object is empty, see Reset().
ID for class PetscParMatrix, MATAIJ format.
Definition: operator.hpp:260
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:3186
int dim
Definition: ex24.cpp:53
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
Definition: mem_manager.cpp:55
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:616
PetscInt GlobalSize() const
Returns the global number of rows.
Definition: petsc.cpp:456
PetscParVector * X
Definition: petsc.hpp:661
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition: petsc.cpp:896
struct _p_PetscObject * PetscObject
Definition: petsc.hpp:51
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Definition: petsc.cpp:3832
void ResetMemory()
Completes the operation started with PlaceMemory.
Definition: petsc.cpp:810
void SetAbsTol(double tol)
Definition: petsc.cpp:2318
Helper class for handling essential boundary conditions.
Definition: petsc.hpp:564
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition: petsc.cpp:1867
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition: petsc.hpp:597
void SetTol(double tol)
Definition: petsc.cpp:2288
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition: petsc.cpp:646
RefCoord t[3]
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition: petsc.hpp:971
int NumRowBlocks() const
Return the number of row blocks.
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition: petsc.cpp:2007
Vector data type.
Definition: vector.hpp:60
const FiniteElementCollection * FEColl() const
Definition: fespace.hpp:577
const int * HostReadI() const
Definition: sparsemat.hpp:232
Identity Operator I: x -&gt; x.
Definition: operator.hpp:674
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
Definition: petsc.cpp:716
bool UseDevice() const
Read the internal device flag.
void FreePrivateContext()
Definition: petsc.cpp:2701
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition: ode.hpp:26
RefCoord s[3]
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition: petsc.cpp:621
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:4046
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition: petsc.cpp:2196
Base class for solvers.
Definition: operator.hpp:651
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition: globals.hpp:66
Abstract operator.
Definition: operator.hpp:24
Wrapper for hypre&#39;s ParCSR matrix class.
Definition: hypre.hpp:337
A class to handle Block systems in a matrix-free implementation.
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory&lt;T&gt; &amp;mem, int size, false)
Definition: device.hpp:360
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:3101
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
Definition: petsc.cpp:1901
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition: petsc.cpp:2729
ID for class PetscParMatrix, MATIS format.
Definition: operator.hpp:261
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3106
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
Definition: petsc.cpp:726
int width
Dimension of the input / number of columns in the matrix.
Definition: operator.hpp:28
void * private_ctx
Private context for solver.
Definition: petsc.hpp:667
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition: petsc.cpp:2736
const double * GetDevicePointer() const
Definition: petsc.cpp:207
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition: petsc.cpp:1857
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition: petsc.cpp:1977
double f(const Vector &p)