MFEM  v4.3.0
Finite element discretization library
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Pages
petsc.cpp
Go to the documentation of this file.
1 // Copyright (c) 2010-2021, Lawrence Livermore National Security, LLC. Produced
2 // at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3 // LICENSE and NOTICE for details. LLNL-CODE-806117.
4 //
5 // This file is part of the MFEM library. For more information and source code
6 // availability visit https://mfem.org.
7 //
8 // MFEM is free software; you can redistribute it and/or modify it under the
9 // terms of the BSD-3 license. We welcome feedback and contributions, see file
10 // CONTRIBUTING.md for details.
11 
12 // Author: Stefano Zampini <stefano.zampini@gmail.com>
13 
14 #include "../config/config.hpp"
15 
16 #ifdef MFEM_USE_MPI
17 #ifdef MFEM_USE_PETSC
18 
19 #include "linalg.hpp"
20 #include "../fem/fem.hpp"
21 
22 #include "petsc.h"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
25 #endif
26 
27 // Backward compatibility
28 #if PETSC_VERSION_LT(3,11,0)
29 #define VecLockReadPush VecLockPush
30 #define VecLockReadPop VecLockPop
31 #endif
32 #if PETSC_VERSION_LT(3,12,0)
33 #define VecGetArrayWrite VecGetArray
34 #define VecRestoreArrayWrite VecRestoreArray
35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
37 #endif
38 
39 #include <fstream>
40 #include <iomanip>
41 #include <cmath>
42 #include <cstdlib>
43 
44 // Note: there are additional #include statements below.
45 
46 #include "petscinternals.hpp"
47 
48 // Callback functions: these functions will be called by PETSc
49 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
50 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
51 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
52  void*);
53 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
54 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
55  PetscReal,Mat,
56  Mat,void*);
57 static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
58  Mat,Mat,Mat,Mat);
59 static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
60 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
61 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
62 static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
63 static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
64 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
65  PetscBool*,PetscBool*,void*);
66 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
67 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
68 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
69 static PetscErrorCode __mfem_pc_shell_setup(PC);
70 static PetscErrorCode __mfem_pc_shell_destroy(PC);
71 static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
72 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
73 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
74 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
75 static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
76 static PetscErrorCode __mfem_array_container_destroy(void*);
77 static PetscErrorCode __mfem_matarray_container_destroy(void*);
78 static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
79 
80 // auxiliary functions
81 static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
82  PetscInt,IS*);
83 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
84  const mfem::Array<int>*,PetscInt,IS*);
85 static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
86 static PetscErrorCode MakeShellPCWithFactory(PC,
88 
89 // Equivalent functions are present in PETSc source code
90 // if PETSc has been compiled with hypre support
91 // We provide them here in case PETSC_HAVE_HYPRE is not defined
92 #if !defined(PETSC_HAVE_HYPRE)
93 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
94 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
95 #endif
96 
97 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
98 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
99 #ifndef _USE_DEVICE
100 #define _USE_DEVICE
101 #endif
102 #endif
103 #endif
104 
105 #if defined(PETSC_HAVE_DEVICE)
106 static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
107 #endif
108 static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
109 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject);
110 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,PetscInt,PetscInt,Mat*);
111 
112 // structs used by PETSc code
113 typedef struct
114 {
115  mfem::Solver *op;
117  bool ownsop;
118  unsigned long int numprec;
119 } __mfem_pc_shell_ctx;
120 
121 typedef struct
122 {
123  mfem::Operator *op; // The nonlinear operator
124  mfem::PetscBCHandler *bchandler; // Handling of essential bc
125  mfem::Vector *work; // Work vector
126  mfem::Operator::Type jacType; // OperatorType for the Jacobian
127  // Objective for line search
128  void (*objective)(mfem::Operator *op, const mfem::Vector&, double*);
129  // PostCheck function (to be called after successful line search)
130  void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
131  mfem::Vector&, bool&, bool&);
132  // General purpose update function (to be called at the beginning of
133  // each nonlinear step)
134  void (*update)(mfem::Operator *op, int,
135  const mfem::Vector&, const mfem::Vector&,
136  const mfem::Vector&, const mfem::Vector&);
137 } __mfem_snes_ctx;
138 
139 typedef struct
140 {
141  mfem::TimeDependentOperator *op; // The time-dependent operator
142  mfem::PetscBCHandler *bchandler; // Handling of essential bc
143  mfem::Vector *work; // Work vector
144  mfem::Vector *work2; // Work vector
145  mfem::Operator::Type jacType; // OperatorType for the Jacobian
146  enum mfem::PetscODESolver::Type type;
147  PetscReal cached_shift;
148  PetscObjectState cached_ijacstate;
149  PetscObjectState cached_rhsjacstate;
150  PetscObjectState cached_splits_xstate;
151  PetscObjectState cached_splits_xdotstate;
152 } __mfem_ts_ctx;
153 
154 typedef struct
155 {
156  mfem::PetscSolver *solver; // The solver object
157  mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
158 } __mfem_monitor_ctx;
159 
160 // use global scope ierr to check PETSc errors inside mfem calls
161 static PetscErrorCode ierr;
162 
163 using namespace std;
164 
165 namespace mfem
166 {
167 
169 {
170  MFEMInitializePetsc(NULL,NULL,NULL,NULL);
171 }
172 
173 void MFEMInitializePetsc(int *argc,char*** argv)
174 {
175  MFEMInitializePetsc(argc,argv,NULL,NULL);
176 }
177 
178 void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
179  const char help[])
180 {
182  {
183  // Tell PETSc to use the same CUDA device as MFEM:
184  ierr = PetscOptionsSetValue(NULL,"-cuda_device",
185  to_string(mfem::Device::GetId()).c_str());
186  MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
187  }
188  ierr = PetscInitialize(argc,argv,rc_file,help);
189  MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
190 }
191 
193 {
194  ierr = PetscFinalize();
195  MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
196 }
197 
198 const double* PetscMemory::GetHostPointer() const
199 {
200  int oflags = flags;
201  SetHostValid();
202  const double *v = mfem::Read(*this,Capacity(),false);
203  flags = oflags;
204  return v;
205 }
206 
207 const double* PetscMemory::GetDevicePointer() const
208 {
209  int oflags = flags;
210  SetDeviceValid();
211  const double *v = mfem::Read(*this,Capacity(),true);
212  flags = oflags;
213  return v;
214 }
215 
216 // PetscParVector methods
217 
218 void PetscParVector::SetDataAndSize_()
219 {
220  PetscScalar *array;
221  PetscInt n;
222 
223  MFEM_VERIFY(x,"Missing Vec");
224  ierr = VecSetUp(x); PCHKERRQ(x,ierr);
225  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
226  MFEM_VERIFY(n >= 0,"Invalid local size");
227  size = n;
228 #if defined(PETSC_HAVE_DEVICE)
229  PetscOffloadMask omask;
230  PetscBool iscuda;
231 
232  ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
233  if (omask != PETSC_OFFLOAD_BOTH)
234  {
235  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
236  }
237 #endif
238  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
239 #if defined(PETSC_HAVE_DEVICE)
240  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
241  ""); PCHKERRQ(x,ierr);
242  if (iscuda)
243  {
244  if (omask != PETSC_OFFLOAD_BOTH)
245  {
246  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
247  }
248  PetscScalar *darray;
249  ierr = VecCUDAGetArrayRead(x,(const PetscScalar**)&darray); PCHKERRQ(x,ierr);
250  pdata.Wrap(array,darray,size,MemoryType::HOST,false);
251  ierr = VecCUDARestoreArrayRead(x,(const PetscScalar**)&darray);
252  PCHKERRQ(x,ierr);
253  }
254  else
255 #endif
256  {
257  pdata.Wrap(array,size,MemoryType::HOST,false);
258  }
259  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
260 
261 #if defined(PETSC_HAVE_DEVICE)
262  ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
263 #endif
264  data.MakeAlias(pdata,0,size);
265  SetFlagsFromMask_();
266 }
267 
268 void PetscParVector::SetFlagsFromMask_() const
269 {
270  MFEM_VERIFY(x,"Missing Vec");
271 #if defined(_USE_DEVICE)
272  PetscOffloadMask mask;
273  PetscBool iscuda;
274  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
275  ""); PCHKERRQ(x,ierr);
276  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
277  if (iscuda)
278  {
279  switch (mask)
280  {
281  case PETSC_OFFLOAD_CPU:
282  pdata.SetHostValid();
283  pdata.SetDeviceInvalid();
284  break;
285  case PETSC_OFFLOAD_GPU:
286  pdata.SetHostInvalid();
287  pdata.SetDeviceValid();
288  break;
289  case PETSC_OFFLOAD_BOTH:
290  pdata.SetHostValid();
291  pdata.SetDeviceValid();
292  break;
293  default:
294  MFEM_ABORT("Unhandled case " << mask);
295  }
296  }
297 #endif
298  data.Sync(pdata);
299 }
300 
301 void PetscParVector::UpdateVecFromFlags()
302 {
303  MFEM_VERIFY(x,"Missing Vec");
304  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
305 #if defined(_USE_DEVICE)
306  PetscBool iscuda;
307  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
308  ""); PCHKERRQ(x,ierr);
309  if (iscuda)
310  {
311  bool dv = pdata.DeviceIsValid();
312  bool hv = pdata.HostIsValid();
313  PetscOffloadMask mask;
314  if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
315  else if (dv) { mask = PETSC_OFFLOAD_GPU; }
316  else { mask = PETSC_OFFLOAD_CPU; }
317  ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
318  }
319  else
320 #endif
321  {
322  /* Just make sure we have an up-to-date copy on the CPU for PETSc */
323  PetscScalar *v;
324  ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
325  pdata.CopyToHost(v,size);
326  ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
327  }
328 }
329 
330 void PetscParVector::SetVecType_()
331 {
332  VecType vectype;
333  MFEM_VERIFY(x,"Missing Vec");
334  ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
335 #if defined(_USE_DEVICE)
336  switch (Device::GetDeviceMemoryType())
337  {
338  case MemoryType::DEVICE:
339  case MemoryType::MANAGED:
340  ierr = VecSetType(x,VECCUDA); PCHKERRQ(x,ierr);
341  break;
342  default:
343  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
344  break;
345  }
346 #else
347  if (!vectype)
348  {
349  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
350  }
351 #endif
352 }
353 
354 const double* PetscParVector::Read(bool on_dev) const
355 {
356  const PetscScalar *dummy;
357  MFEM_VERIFY(x,"Missing Vec");
358 #if defined(PETSC_HAVE_DEVICE)
359  PetscBool iscuda;
360  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
361  ""); PCHKERRQ(x,ierr);
362  if (on_dev && iscuda)
363  {
364  ierr = VecCUDAGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
365  ierr = VecCUDARestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
366  }
367  else
368 #endif
369  {
370  ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
371  ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
372  }
373  SetFlagsFromMask_();
374  return mfem::Read(pdata, size, on_dev);
375 }
376 
377 const double* PetscParVector::HostRead() const
378 {
379  return Read(false);
380 }
381 
382 double* PetscParVector::Write(bool on_dev)
383 {
384  PetscScalar *dummy;
385  MFEM_VERIFY(x,"Missing Vec");
386 #if defined(PETSC_HAVE_DEVICE)
387  PetscBool iscuda;
388  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
389  ""); PCHKERRQ(x,ierr);
390  if (on_dev && iscuda)
391  {
392  ierr = VecCUDAGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
393  ierr = VecCUDARestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
394  }
395  else
396 #endif
397  {
398  ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
399  ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
400  }
401  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
402  SetFlagsFromMask_();
403  return mfem::Write(pdata, size, on_dev);
404 }
405 
407 {
408  return Write(false);
409 }
410 
411 double* PetscParVector::ReadWrite(bool on_dev)
412 {
413  PetscScalar *dummy;
414  MFEM_VERIFY(x,"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE)
416  PetscBool iscuda;
417  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
418  ""); PCHKERRQ(x,ierr);
419  if (on_dev && iscuda)
420  {
421  ierr = VecCUDAGetArray(x,&dummy); PCHKERRQ(x,ierr);
422  ierr = VecCUDARestoreArray(x,&dummy); PCHKERRQ(x,ierr);
423  }
424  else
425 #endif
426  {
427  ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
428  ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
429  }
430  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
431  SetFlagsFromMask_();
432  return mfem::ReadWrite(pdata, size, on_dev);
433 }
434 
436 {
437  return ReadWrite(false);
438 }
439 
440 void PetscParVector::UseDevice(bool dev) const
441 {
442  MFEM_VERIFY(x,"Missing Vec");
443 #if defined(PETSC_HAVE_DEVICE)
444  ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
445 #endif
446 }
447 
448 bool PetscParVector::UseDevice() const
449 {
450  PetscBool flg;
451  MFEM_VERIFY(x,"Missing Vec");
452  ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
453  return flg ? false : true;
454 }
455 
456 PetscInt PetscParVector::GlobalSize() const
457 {
458  PetscInt N;
459  ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
460  return N;
461 }
462 
463 PetscParVector::PetscParVector(MPI_Comm comm, const Vector &x_,
464  bool copy) : Vector()
465 {
466  int n = x_.Size();
467  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
468  ierr = VecSetSizes(x,n,PETSC_DECIDE); PCHKERRQ(x,ierr);
469  SetVecType_();
470  SetDataAndSize_();
471  if (copy)
472  {
473 
474  /* we use PETSc accessors to flag valid memory location to PETSc */
475  PetscErrorCode (*rest)(Vec,PetscScalar**);
476  PetscScalar *array;
477 #if defined(PETSC_HAVE_DEVICE)
478  PetscBool iscuda;
479  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
480  ""); PCHKERRQ(x,ierr);
481  if (iscuda && x_.UseDevice())
482  {
483  UseDevice(true);
484  ierr = VecCUDAGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
485  rest = VecCUDARestoreArrayWrite;
486  }
487  else
488 #endif
489  {
490  UseDevice(false);
491  ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
492  rest = VecRestoreArrayWrite;
493  }
494  pdata.CopyFrom(x_.GetMemory(), n);
495  ierr = (*rest)(x,&array); PCHKERRQ(x,ierr);
497  }
498 }
499 
500 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
501  PetscInt *col) : Vector()
502 {
503  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
504  if (col)
505  {
506  PetscMPIInt myid;
507  MPI_Comm_rank(comm, &myid);
508  ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
509  }
510  else
511  {
512  ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
513  }
514  SetVecType_();
515  SetDataAndSize_();
516 }
517 
519 {
520  MPI_Comm comm = PetscObjectComm((PetscObject)x);
521  ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
522  pdata.Delete();
523 }
524 
525 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
526  PetscScalar *data_, PetscInt *col) : Vector()
527 {
528  MFEM_VERIFY(col,"Missing distribution");
529  PetscMPIInt myid;
530  MPI_Comm_rank(comm, &myid);
531  ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
532  &x); CCHKERRQ(comm,ierr)
533  SetVecType_();
534  SetDataAndSize_();
535 }
536 
538 {
539  ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
540  SetDataAndSize_();
541 }
542 
543 PetscParVector::PetscParVector(MPI_Comm comm, const Operator &op,
544  bool transpose, bool allocate) : Vector()
545 {
546  PetscInt loc = transpose ? op.Height() : op.Width();
547 
548  ierr = VecCreate(comm,&x);
549  CCHKERRQ(comm,ierr);
550  ierr = VecSetSizes(x,loc,PETSC_DECIDE);
551  PCHKERRQ(x,ierr);
552 
553  SetVecType_();
554  if (allocate)
555  {
556  SetDataAndSize_();
557  }
558  else /* Vector intended to be used with Place/ResetMemory calls */
559  {
560  size = loc;
561  pdata.Reset();
562  data.Reset();
563  }
564 }
565 
567  bool transpose, bool allocate) : Vector()
568 {
569  Mat pA = const_cast<PetscParMatrix&>(A);
570  if (!transpose)
571  {
572  ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
573  }
574  else
575  {
576  ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
577  }
578  SetVecType_();
579  if (!allocate) /* Vector intended to be used with Place/ResetMemory calls */
580  {
581  PetscInt n;
582  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
583  size = n;
584  pdata.Reset();
585  data.Reset();
586  }
587  else
588  {
589  SetDataAndSize_();
590  }
591 }
592 
594 {
595  if (ref)
596  {
597  ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
598  }
599  x = y;
600  SetDataAndSize_();
601 }
602 
604 {
605  HYPRE_BigInt* offsets = pfes->GetTrueDofOffsets();
606  MPI_Comm comm = pfes->GetComm();
607  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
608 
609  PetscMPIInt myid = 0;
610  if (!HYPRE_AssumedPartitionCheck())
611  {
612  MPI_Comm_rank(comm,&myid);
613  }
614  ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
615  PCHKERRQ(x,ierr);
616  SetVecType_();
617  SetDataAndSize_();
618 }
619 
620 MPI_Comm PetscParVector::GetComm() const
621 {
622  return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
623 }
624 
626 {
627  VecScatter scctx;
628  Vec vout;
629  const PetscScalar *array;
630  PetscInt size;
631 
632  ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
633  ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
634  PCHKERRQ(x,ierr);
635  ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
636  PCHKERRQ(x,ierr);
637  ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
638  ierr = VecGetArrayRead(vout,&array); PCHKERRQ(x,ierr);
639  ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
640  Array<PetscScalar> data(size);
641  data.Assign(array);
642  ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(x,ierr);
643  ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
644  Vector *v = new Vector(data, internal::to_int(size));
645  v->MakeDataOwner();
646  data.LoseData();
647  return v;
648 }
649 
651 {
652  ierr = VecSet(x,d); PCHKERRQ(x,ierr);
654  return *this;
655 }
656 
658  const Array<PetscScalar>& vals)
659 {
660  MFEM_VERIFY(idx.Size() == vals.Size(),
661  "Size mismatch between indices and values");
662  PetscInt n = idx.Size();
663  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
664  PCHKERRQ(x,ierr);
665  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
666  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
668  return *this;
669 }
670 
672  const Array<PetscScalar>& vals)
673 {
674  MFEM_VERIFY(idx.Size() == vals.Size(),
675  "Size mismatch between indices and values");
676  PetscInt n = idx.Size();
677  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
678  PCHKERRQ(x,ierr);
679  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
680  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
682  return *this;
683 }
684 
686 {
687  ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
689  return *this;
690 }
691 
693 {
694  ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
696  return *this;
697 }
698 
700 {
701  ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
703  return *this;
704 }
705 
707 {
708  ierr = VecScale(x,s); PCHKERRQ(x,ierr);
710  return *this;
711 }
712 
714 {
715  ierr = VecShift(x,s); PCHKERRQ(x,ierr);
717  return *this;
718 }
719 
721 {
722  ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
723 }
724 
726 {
727  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
728 }
729 
731 {
732  PetscInt n;
733 
734  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
735  MFEM_VERIFY(n == mem.Capacity(),
736  "Memory size " << mem.Capacity() << " != " << n << " vector size!");
737  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
738  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
739 #if defined(_USE_DEVICE)
740  PetscBool iscuda;
741  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
742  ""); PCHKERRQ(x,ierr);
743  if (iscuda)
744  {
745  bool usedev = mem.DeviceIsValid() || (!rw && mem.UseDevice());
746  pdata.MakeAliasForSync(mem,0,n,rw,true,usedev);
747  if (usedev)
748  {
749  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
750  ierr = VecCUDAPlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
751  }
752  else
753  {
754  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
755  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
756  }
757  }
758  else
759 #endif
760  {
761  double *w = rw ? mfem::HostReadWrite(mem,size) : mfem::HostWrite(mem,size);
762  pdata.MakeAliasForSync(mem,0,n,rw,true,false);
763 #if defined(PETSC_HAVE_DEVICE)
764  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
765 #endif
766  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
767  }
768  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
770 }
771 
773 {
774  PetscInt n;
775 
776  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
777  MFEM_VERIFY(n == mem.Capacity(),
778  "Memory size " << mem.Capacity() << " != " << n << " vector size!");
779  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
780  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
781 #if defined(_USE_DEVICE)
782  PetscBool iscuda;
783  ierr = PetscObjectTypeCompareAny((PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
784  ""); PCHKERRQ(x,ierr);
785  if (iscuda)
786  {
787  pdata.MakeAliasForSync(mem,0,n,mem.DeviceIsValid());
788  if (mem.DeviceIsValid())
789  {
790  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
791  ierr = VecCUDAPlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
792  }
793  else
794  {
795  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
796  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
797  }
798  }
799  else
800 #endif
801  {
802  const double *w = mfem::HostRead(mem,size);
803  pdata.MakeAliasForSync(mem,0,n,false);
804 #if defined(PETSC_HAVE_DEVICE)
805  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
806 #endif
807  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
808  }
810  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
811  ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
812 }
813 
815 {
816  MFEM_VERIFY(pdata.IsAliasForSync(),"Vector data is not an alias");
817  MFEM_VERIFY(!pdata.Empty(),"Vector data is empty");
818  bool read = pdata.ReadRequested();
819  bool usedev = pdata.DeviceRequested();
820  bool write = pdata.WriteRequested();
821  /*
822  check for strange corner cases
823  - device memory used but somehow PETSc ended up putting up to date data on host
824  - host memory used but somehow PETSc ended up putting up to date data on device
825  */
826  if (write)
827  {
828  const PetscScalar *v;
829 #if defined(PETSC_HAVE_DEVICE)
830  PetscOffloadMask mask;
831  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
832  if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
833  (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
834 #endif
835  {
836  ierr = VecGetArrayRead(x,&v); PCHKERRQ(x,ierr);
837  pdata.CopyFromHost(v, size);
838  ierr = VecRestoreArrayRead(x,&v); PCHKERRQ(x,ierr);
839  }
840  }
842  data.Reset();
843  if (read && !write) { ierr = VecLockReadPop(x); PCHKERRQ(x,ierr); }
844  if (usedev)
845  {
846 #if defined(PETSC_HAVE_DEVICE)
847  ierr = VecCUDAResetArray(x); PCHKERRQ(x,ierr);
848 #else
849  MFEM_VERIFY(false,"This should not happen");
850 #endif
851  }
852  else
853  {
854  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
855  }
856 }
857 
859 {
860  PetscRandom rctx = NULL;
861 
862  if (seed)
863  {
864  ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
865  PCHKERRQ(x,ierr);
866  ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
867  ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
868  }
869  ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
870  ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
871 }
872 
873 void PetscParVector::Print(const char *fname, bool binary) const
874 {
875  if (fname)
876  {
877  PetscViewer view;
878 
879  if (binary)
880  {
881  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
882  FILE_MODE_WRITE,&view);
883  }
884  else
885  {
886  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
887  }
888  PCHKERRQ(x,ierr);
889  ierr = VecView(x,view); PCHKERRQ(x,ierr);
890  ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
891  }
892  else
893  {
894  ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
895  }
896 }
897 
898 // PetscParMatrix methods
899 
901 {
902  PetscInt N;
903  ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
904  return N;
905 }
906 
908 {
909  PetscInt N;
910  ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
911  return N;
912 }
913 
915 {
916  PetscInt N;
917  ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
918  return N;
919 }
920 
922 {
923  PetscInt N;
924  ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
925  return N;
926 }
927 
929 {
930  PetscInt N;
931  ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
932  return N;
933 }
934 
936 {
937  PetscInt N;
938  ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
939  return N;
940 }
941 
943 {
944  MatInfo info;
945  ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
946  return (PetscInt)info.nz_used;
947 }
948 
950 {
951  A = NULL;
952  X = Y = NULL;
953  height = width = 0;
954 }
955 
957 {
958  Init();
959 }
960 
962  const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
963 {
964  Init();
965 
966  Mat B = const_cast<PetscParMatrix&>(pB);
967 
968  IS isr,isc;
969  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
970  rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
971  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
972  cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
973  ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
974  ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
975  ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
976 
977  height = GetNumRows();
978  width = GetNumCols();
979 }
980 
982 {
983  Init();
984  height = pa->Height();
985  width = pa->Width();
986  ConvertOperator(pa->GetComm(),*pa,&A,tid);
987 }
988 
990 {
991  Init();
992  height = ha->Height();
993  width = ha->Width();
994  ConvertOperator(ha->GetComm(),*ha,&A,tid);
995 }
996 
998 {
999  Init();
1000  height = sa->Height();
1001  width = sa->Width();
1002  ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
1003 }
1004 
1005 PetscParMatrix::PetscParMatrix(MPI_Comm comm, const Operator *op,
1006  Operator::Type tid)
1007 {
1008  Init();
1009  height = op->Height();
1010  width = op->Width();
1011  ConvertOperator(comm,*op,&A,tid);
1012 }
1013 
1014 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt glob_size,
1015  PetscInt *row_starts, SparseMatrix *diag,
1016  Operator::Type tid)
1017 {
1018  Init();
1019  BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1020  tid==PETSC_MATAIJ,&A);
1021  SetUpForDevice();
1022  // update base class
1023  height = GetNumRows();
1024  width = GetNumCols();
1025 }
1026 
1027 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
1028  PetscInt global_num_cols, PetscInt *row_starts,
1029  PetscInt *col_starts, SparseMatrix *diag,
1030  Operator::Type tid)
1031 {
1032  Init();
1033  BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1034  tid==PETSC_MATAIJ,&A);
1035  SetUpForDevice();
1036  // update base class
1037  height = GetNumRows();
1038  width = GetNumCols();
1039 }
1040 
1042 {
1043  if (A)
1044  {
1045  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1046  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1047  if (X) { delete X; }
1048  if (Y) { delete Y; }
1049  X = Y = NULL;
1050  }
1051  height = B.Height();
1052  width = B.Width();
1053 #if defined(PETSC_HAVE_HYPRE)
1054  ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
1055 #else
1056  ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
1057 #endif
1058  SetUpForDevice();
1059  return *this;
1060 }
1061 
1063 {
1064  if (A)
1065  {
1066  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1067  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1068  if (X) { delete X; }
1069  if (Y) { delete Y; }
1070  X = Y = NULL;
1071  }
1072  height = B.Height();
1073  width = B.Width();
1074  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1075  return *this;
1076 }
1077 
1079 {
1080  if (!A)
1081  {
1082  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1083  }
1084  else
1085  {
1086  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1087  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1088  ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1089  }
1090  return *this;
1091 }
1092 
1094 {
1095  if (!A)
1096  {
1097  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1098  ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
1099  }
1100  else
1101  {
1102  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1103  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1104  ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1105  }
1106  return *this;
1107 }
1108 
1109 void PetscParMatrix::
1110 BlockDiagonalConstructor(MPI_Comm comm,
1111  PetscInt *row_starts, PetscInt *col_starts,
1112  SparseMatrix *diag, bool assembled, Mat* Ad)
1113 {
1114  Mat A;
1115  PetscInt lrsize,lcsize,rstart,cstart;
1116  PetscMPIInt myid = 0,commsize;
1117 
1118  ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
1119  if (!HYPRE_AssumedPartitionCheck())
1120  {
1121  ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
1122  }
1123  lrsize = row_starts[myid+1]-row_starts[myid];
1124  rstart = row_starts[myid];
1125  lcsize = col_starts[myid+1]-col_starts[myid];
1126  cstart = col_starts[myid];
1127 
1128  if (!assembled)
1129  {
1130  IS is;
1131  ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1132  ISLocalToGlobalMapping rl2g,cl2g;
1133  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1134  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1135  if (row_starts != col_starts)
1136  {
1137  ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
1138  CCHKERRQ(comm,ierr);
1139  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1140  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1141  }
1142  else
1143  {
1144  ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1145  cl2g = rl2g;
1146  }
1147 
1148  // Create the PETSc object (MATIS format)
1149  ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1150  ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1151  PCHKERRQ(A,ierr);
1152  ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1153  ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1154  ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1155  ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1156 
1157  // Copy SparseMatrix into PETSc SeqAIJ format
1158  // pass through host for now
1159  Mat lA;
1160  ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1161  const int *II = diag->HostReadI();
1162  const int *JJ = diag->HostReadJ();
1163 #if defined(PETSC_USE_64BIT_INDICES)
1164  PetscInt *pII,*pJJ;
1165  int m = diag->Height()+1, nnz = II[diag->Height()];
1166  ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1167  for (int i = 0; i < m; i++) { pII[i] = II[i]; }
1168  for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1169  ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1170  diag->HostReadData()); PCHKERRQ(lA,ierr);
1171  ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1172 #else
1173  ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1174  diag->HostReadData()); PCHKERRQ(lA,ierr);
1175 #endif
1176  }
1177  else
1178  {
1179  PetscScalar *da;
1180  PetscInt *dii,*djj,*oii,
1181  m = diag->Height()+1, nnz = diag->NumNonZeroElems();
1182 
1183  diag->SortColumnIndices();
1184  // if we can take ownership of the SparseMatrix arrays, we can avoid this
1185  // step
1186  ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1187  ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1188  ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1189  if (sizeof(PetscInt) == sizeof(int))
1190  {
1191  ierr = PetscMemcpy(dii,diag->HostReadI(),m*sizeof(PetscInt));
1192  CCHKERRQ(PETSC_COMM_SELF,ierr);
1193  ierr = PetscMemcpy(djj,diag->HostReadJ(),nnz*sizeof(PetscInt));
1194  CCHKERRQ(PETSC_COMM_SELF,ierr);
1195  }
1196  else
1197  {
1198  const int *iii = diag->HostReadI();
1199  const int *jjj = diag->HostReadJ();
1200  for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
1201  for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1202  }
1203  ierr = PetscMemcpy(da,diag->HostReadData(),nnz*sizeof(PetscScalar));
1204  CCHKERRQ(PETSC_COMM_SELF,ierr);
1205  ierr = PetscCalloc1(m,&oii);
1206  CCHKERRQ(PETSC_COMM_SELF,ierr);
1207  if (commsize > 1)
1208  {
1209  ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1210  PETSC_DECIDE,
1211  dii,djj,da,oii,NULL,NULL,&A);
1212  CCHKERRQ(comm,ierr);
1213  }
1214  else
1215  {
1216  ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1217  CCHKERRQ(comm,ierr);
1218  }
1219 
1220  void *ptrs[4] = {dii,djj,da,oii};
1221  const char *names[4] = {"_mfem_csr_dii",
1222  "_mfem_csr_djj",
1223  "_mfem_csr_da",
1224  "_mfem_csr_oii",
1225  };
1226  for (PetscInt i=0; i<4; i++)
1227  {
1228  PetscContainer c;
1229 
1230  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1231  ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1232  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1233  CCHKERRQ(comm,ierr);
1234  ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
1235  CCHKERRQ(comm,ierr);
1236  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1237  }
1238  }
1239 
1240  // Tell PETSc the matrix is ready to be used
1241  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1242  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1243 
1244  *Ad = A;
1245 }
1246 
1247 MPI_Comm PetscParMatrix::GetComm() const
1248 {
1249  return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
1250 }
1251 
1252 // TODO ADD THIS CONSTRUCTOR
1253 //PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
1254 // PetscInt glob_ncols, int *I, PetscInt *J,
1255 // double *data, PetscInt *rows, PetscInt *cols)
1256 //{
1257 //}
1258 
1259 // TODO This should take a reference on op but how?
1260 void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
1261 {
1262  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1263  ierr = MatSetSizes(*A,op->Height(),op->Width(),
1264  PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1265  ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1266  ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
1267  ierr = MatShellSetOperation(*A,MATOP_MULT,
1268  (void (*)())__mfem_mat_shell_apply);
1269  PCHKERRQ(A,ierr);
1270  ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1271  (void (*)())__mfem_mat_shell_apply_transpose);
1272  PCHKERRQ(A,ierr);
1273  ierr = MatShellSetOperation(*A,MATOP_COPY,
1274  (void (*)())__mfem_mat_shell_copy);
1275  PCHKERRQ(A,ierr);
1276  ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1277  (void (*)())__mfem_mat_shell_destroy);
1278 #if defined(_USE_DEVICE)
1280  if (mt == MemoryType::DEVICE || mt == MemoryType::MANAGED)
1281  {
1282  ierr = MatShellSetVecType(*A,VECCUDA); PCHKERRQ(A,ierr);
1283  ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1284  }
1285  else
1286  {
1287  ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1288  }
1289 #endif
1290  PCHKERRQ(A,ierr);
1291  ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1292 }
1293 
1294 void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
1295  Operator::Type tid)
1296 {
1297  PetscParMatrix *pA = const_cast<PetscParMatrix *>
1298  (dynamic_cast<const PetscParMatrix *>(&op));
1299  HypreParMatrix *pH = const_cast<HypreParMatrix *>
1300  (dynamic_cast<const HypreParMatrix *>(&op));
1301  BlockOperator *pB = const_cast<BlockOperator *>
1302  (dynamic_cast<const BlockOperator *>(&op));
1303  IdentityOperator *pI = const_cast<IdentityOperator *>
1304  (dynamic_cast<const IdentityOperator *>(&op));
1305  SparseMatrix *pS = const_cast<SparseMatrix *>
1306  (dynamic_cast<const SparseMatrix *>(&op));
1307 
1308  if (pA && tid == ANY_TYPE) // use same object and return
1309  {
1310  ierr = PetscObjectReference((PetscObject)(pA->A));
1311  CCHKERRQ(pA->GetComm(),ierr);
1312  *A = pA->A;
1313  return;
1314  }
1315 
1316  PetscBool avoidmatconvert = PETSC_FALSE;
1317  if (pA) // we test for these types since MatConvert will fail
1318  {
1319  ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
1320  MATSHELL,"");
1321  CCHKERRQ(comm,ierr);
1322  }
1323  if (pA && !avoidmatconvert)
1324  {
1325  Mat At = NULL;
1326  PetscBool istrans;
1327 #if PETSC_VERSION_LT(3,10,0)
1328  PetscBool ismatis;
1329 #endif
1330 
1331  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
1332  CCHKERRQ(pA->GetComm(),ierr);
1333  if (!istrans)
1334  {
1335  if (tid == pA->GetType()) // use same object and return
1336  {
1337  ierr = PetscObjectReference((PetscObject)(pA->A));
1338  CCHKERRQ(pA->GetComm(),ierr);
1339  *A = pA->A;
1340  return;
1341  }
1342 #if PETSC_VERSION_LT(3,10,0)
1343  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
1344  CCHKERRQ(pA->GetComm(),ierr);
1345 #endif
1346  }
1347  else
1348  {
1349  ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
1350 #if PETSC_VERSION_LT(3,10,0)
1351  ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
1352 #endif
1353  CCHKERRQ(pA->GetComm(),ierr);
1354  }
1355 
1356  // Try to convert
1357  if (tid == PETSC_MATAIJ)
1358  {
1359 #if PETSC_VERSION_LT(3,10,0)
1360  if (ismatis)
1361  {
1362  if (istrans)
1363  {
1364  Mat B;
1365 
1366  ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1367  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1368  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1369  }
1370  else
1371  {
1372  ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
1373  PCHKERRQ(pA->A,ierr);
1374  }
1375  }
1376  else
1377 #endif
1378  {
1379  PetscMPIInt size;
1380  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1381 
1382  // call MatConvert and see if a converter is available
1383  if (istrans)
1384  {
1385  Mat B;
1386  ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1387  PCHKERRQ(pA->A,ierr);
1388  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1389  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1390  }
1391  else
1392  {
1393  ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1394  PCHKERRQ(pA->A,ierr);
1395  }
1396  }
1397  }
1398  else if (tid == PETSC_MATIS)
1399  {
1400  if (istrans)
1401  {
1402  Mat B;
1403  ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1404  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1405  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1406  }
1407  else
1408  {
1409  ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1410  }
1411  }
1412  else if (tid == PETSC_MATHYPRE)
1413  {
1414 #if defined(PETSC_HAVE_HYPRE)
1415  if (istrans)
1416  {
1417  Mat B;
1418  ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1419  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1420  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1421  }
1422  else
1423  {
1424  ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1425  }
1426 #else
1427  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1428 #endif
1429  }
1430  else if (tid == PETSC_MATSHELL)
1431  {
1432  MakeWrapper(comm,&op,A);
1433  }
1434  else
1435  {
1436  MFEM_ABORT("Unsupported operator type conversion " << tid)
1437  }
1438  }
1439  else if (pH)
1440  {
1441  if (tid == PETSC_MATAIJ)
1442  {
1443 #if defined(PETSC_HAVE_HYPRE)
1444  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1445  PETSC_USE_POINTER,A);
1446 #else
1447  ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1448 #endif
1449  CCHKERRQ(pH->GetComm(),ierr);
1450  }
1451  else if (tid == PETSC_MATIS)
1452  {
1453 #if defined(PETSC_HAVE_HYPRE)
1454  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1455  PETSC_USE_POINTER,A);
1456 #else
1457  ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1458 #endif
1459  CCHKERRQ(pH->GetComm(),ierr);
1460  }
1461  else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1462  {
1463 #if defined(PETSC_HAVE_HYPRE)
1464  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1465  PETSC_USE_POINTER,A);
1466  CCHKERRQ(pH->GetComm(),ierr);
1467 #else
1468  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1469 #endif
1470  }
1471  else if (tid == PETSC_MATSHELL)
1472  {
1473  MakeWrapper(comm,&op,A);
1474  }
1475  else
1476  {
1477  MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1478  " is not implemented");
1479  }
1480  }
1481  else if (pB)
1482  {
1483  Mat *mats,*matsl2l = NULL;
1484  PetscInt i,j,nr,nc;
1485 
1486  nr = pB->NumRowBlocks();
1487  nc = pB->NumColBlocks();
1488  ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1489  if (tid == PETSC_MATIS)
1490  {
1491  ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1492  }
1493  for (i=0; i<nr; i++)
1494  {
1495  PetscBool needl2l = PETSC_TRUE;
1496 
1497  for (j=0; j<nc; j++)
1498  {
1499  if (!pB->IsZeroBlock(i,j))
1500  {
1501  ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1502  if (tid == PETSC_MATIS && needl2l)
1503  {
1504  PetscContainer c;
1505  ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1506  (PetscObject*)&c);
1507  PCHKERRQ(mats[i*nc+j],ierr);
1508  // special case for block operators: the local Vdofs should be
1509  // ordered as:
1510  // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1511  // with m fields, Ni the number of Vdofs for the i-th field
1512  if (c)
1513  {
1514  Array<Mat> *l2l = NULL;
1515  ierr = PetscContainerGetPointer(c,(void**)&l2l);
1516  PCHKERRQ(c,ierr);
1517  MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1518  << l2l->Size() << " for block row " << i );
1519  ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1520  PCHKERRQ(c,ierr);
1521  matsl2l[i] = (*l2l)[0];
1522  needl2l = PETSC_FALSE;
1523  }
1524  }
1525  }
1526  }
1527  }
1528  ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1529  if (tid == PETSC_MATIS)
1530  {
1531  ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1532 
1533  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1534  for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1535  ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1536 
1537  PetscContainer c;
1538  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1539  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1540  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1541  PCHKERRQ(c,ierr);
1542  ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1543  PCHKERRQ((*A),ierr);
1544  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1545  }
1546  for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1547  ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1548  }
1549  else if (pI && tid == PETSC_MATAIJ)
1550  {
1551  PetscInt rst;
1552 
1553  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1554  ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1555  PCHKERRQ(A,ierr);
1556  ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1557  ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1558  ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1559  ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1560  ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1561  for (PetscInt i = rst; i < rst+pI->Height(); i++)
1562  {
1563  ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1564  }
1565  ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1566  ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1567  }
1568  else if (pS)
1569  {
1570  if (tid == PETSC_MATSHELL)
1571  {
1572  MakeWrapper(comm,&op,A);
1573  }
1574  else
1575  {
1576  /* from SparseMatrix to SEQAIJ -> always pass through host for now */
1577  Mat B;
1578  PetscScalar *pdata;
1579  PetscInt *pii,*pjj,*oii;
1580  PetscMPIInt size;
1581 
1582  int m = pS->Height();
1583  int n = pS->Width();
1584  const int *ii = pS->HostReadI();
1585  const int *jj = pS->HostReadJ();
1586  const double *data = pS->HostReadData();
1587 
1588  ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1589  ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1590  ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1591  pii[0] = ii[0];
1592  for (int i = 0; i < m; i++)
1593  {
1594  bool issorted = true;
1595  pii[i+1] = ii[i+1];
1596  for (int j = ii[i]; j < ii[i+1]; j++)
1597  {
1598  pjj[j] = jj[j];
1599  if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1600  pdata[j] = data[j];
1601  }
1602  if (!issorted)
1603  {
1604  ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1605  CCHKERRQ(PETSC_COMM_SELF,ierr);
1606  }
1607  }
1608 
1609  ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1610  if (size == 1)
1611  {
1612  ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1613  CCHKERRQ(comm,ierr);
1614  oii = NULL;
1615  }
1616  else // block diagonal constructor
1617  {
1618  ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1619  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1620  PETSC_DECIDE,
1621  pii,pjj,pdata,oii,NULL,NULL,&B);
1622  CCHKERRQ(comm,ierr);
1623  }
1624  void *ptrs[4] = {pii,pjj,pdata,oii};
1625  const char *names[4] = {"_mfem_csr_pii",
1626  "_mfem_csr_pjj",
1627  "_mfem_csr_pdata",
1628  "_mfem_csr_oii"
1629  };
1630  for (int i=0; i<4; i++)
1631  {
1632  PetscContainer c;
1633 
1634  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1635  ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1636  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1637  PCHKERRQ(B,ierr);
1638  ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1639  PCHKERRQ(B,ierr);
1640  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1641  }
1642  if (tid == PETSC_MATAIJ)
1643  {
1644  *A = B;
1645  }
1646  else if (tid == PETSC_MATHYPRE)
1647  {
1648  ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1649  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1650  }
1651  else if (tid == PETSC_MATIS)
1652  {
1653  ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1654  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1655  }
1656  else
1657  {
1658  MFEM_ABORT("Unsupported operator type conversion " << tid)
1659  }
1660  }
1661  }
1662  else // fallback to general operator
1663  {
1664  MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1665  "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1666  MakeWrapper(comm,&op,A);
1667  if (tid == PETSC_MATAIJ)
1668  {
1669  Mat B;
1670  PetscBool isaij;
1671 
1672  ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1673  ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1674  CCHKERRQ(comm,ierr);
1675  ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1676  if (!isaij)
1677  {
1678  ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1679  ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1680  }
1681  else
1682  {
1683  *A = B;
1684  }
1685  }
1686  }
1687  SetUpForDevice();
1688 }
1689 
1691 {
1692  if (A != NULL)
1693  {
1694  MPI_Comm comm = MPI_COMM_NULL;
1695  ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1696  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1697  }
1698  delete X;
1699  delete Y;
1700  X = Y = NULL;
1701 }
1702 
1704 {
1705  if (ref)
1706  {
1707  ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1708  }
1709  Init();
1710  A = a;
1711  height = GetNumRows();
1712  width = GetNumCols();
1713 }
1714 
1716 {
1717  if (A_ == A) { return; }
1718  Destroy();
1719  ierr = PetscObjectReference((PetscObject)A_); PCHKERRQ(A_,ierr);
1720  A = A_;
1721  height = GetNumRows();
1722  width = GetNumCols();
1723 }
1724 
1725 void PetscParMatrix::SetUpForDevice()
1726 {
1727 #if !defined(_USE_DEVICE)
1728  return;
1729 #else
1730  if (!A || !Device::Allows(Backend::CUDA_MASK)) { return; }
1731 
1732  PetscBool ismatis,isnest,isseqaij,ismpiaij;
1733  ierr = PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
1734  PCHKERRQ(A,ierr);
1735  ierr = PetscObjectTypeCompare((PetscObject)A,MATNEST,&isnest);
1736  PCHKERRQ(A,ierr);
1737  Mat tA = A;
1738  if (ismatis)
1739  {
1740  ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1741  ierr = PetscObjectTypeCompare((PetscObject)tA,MATNEST,&isnest);
1742  PCHKERRQ(tA,ierr);
1743  }
1744  if (isnest)
1745  {
1746  PetscInt n,m;
1747  Mat **sub;
1748  ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1749  bool dvec = false;
1750  for (PetscInt i = 0; i < n; i++)
1751  {
1752  for (PetscInt j = 0; j < m; j++)
1753  {
1754  if (sub[i][j])
1755  {
1756  bool expT = false;
1757  Mat sA = sub[i][j];
1758  ierr = PetscObjectTypeCompare((PetscObject)sA,MATSEQAIJ,&isseqaij);
1759  PCHKERRQ(sA,ierr);
1760  ierr = PetscObjectTypeCompare((PetscObject)sA,MATMPIAIJ,&ismpiaij);
1761  PCHKERRQ(sA,ierr);
1762  if (isseqaij)
1763  {
1764  ierr = MatSetType(sA,MATSEQAIJCUSPARSE); PCHKERRQ(sA,ierr);
1765  dvec = true;
1766  expT = true;
1767  }
1768  else if (ismpiaij)
1769  {
1770  ierr = MatSetType(sA,MATMPIAIJCUSPARSE); PCHKERRQ(sA,ierr);
1771  dvec = true;
1772  expT = true;
1773  }
1774  if (expT)
1775  {
1776  ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1777  PETSC_TRUE); PCHKERRQ(sA,ierr);
1778  }
1779  }
1780  }
1781  }
1782  if (dvec)
1783  {
1784  ierr = MatSetVecType(tA,VECCUDA); PCHKERRQ(tA,ierr);
1785  }
1786  }
1787  else
1788  {
1789  bool expT = false;
1790  ierr = PetscObjectTypeCompare((PetscObject)tA,MATSEQAIJ,&isseqaij);
1791  PCHKERRQ(tA,ierr);
1792  ierr = PetscObjectTypeCompare((PetscObject)tA,MATMPIAIJ,&ismpiaij);
1793  PCHKERRQ(tA,ierr);
1794  if (isseqaij)
1795  {
1796  ierr = MatSetType(tA,MATSEQAIJCUSPARSE); PCHKERRQ(tA,ierr);
1797  expT = true;
1798  }
1799  else if (ismpiaij)
1800  {
1801  ierr = MatSetType(tA,MATMPIAIJCUSPARSE); PCHKERRQ(tA,ierr);
1802  expT = true;
1803  }
1804  if (expT)
1805  {
1806  ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1807  PETSC_TRUE); PCHKERRQ(tA,ierr);
1808  }
1809  }
1810 #endif
1811 }
1812 
1813 // Computes y = alpha * A * x + beta * y
1814 // or y = alpha * A^T* x + beta * y
1815 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1816  bool transpose)
1817 {
1818  PetscErrorCode (*f)(Mat,Vec,Vec);
1819  PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1820  if (transpose)
1821  {
1822  f = MatMultTranspose;
1823  fadd = MatMultTransposeAdd;
1824  }
1825  else
1826  {
1827  f = MatMult;
1828  fadd = MatMultAdd;
1829  }
1830  if (a != 0.)
1831  {
1832  if (b != 0.)
1833  {
1834  ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1835  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1836  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1837  }
1838  else
1839  {
1840  ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1841  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1842  }
1843  }
1844  else
1845  {
1846  if (b == 1.)
1847  {
1848  // do nothing
1849  }
1850  else if (b != 0.)
1851  {
1852  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1853  }
1854  else
1855  {
1856  ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1857  }
1858  }
1859 }
1860 
1862 {
1863  ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1864  Destroy();
1865  Init();
1866  A = master.A;
1867  height = master.height;
1868  width = master.width;
1869 }
1870 
1872 {
1873  if (!X)
1874  {
1875  MFEM_VERIFY(A,"Mat not present");
1876  X = new PetscParVector(*this,false,false); PCHKERRQ(A,ierr);
1877  }
1878  return X;
1879 }
1880 
1882 {
1883  if (!Y)
1884  {
1885  MFEM_VERIFY(A,"Mat not present");
1886  Y = new PetscParVector(*this,true,false); PCHKERRQ(A,ierr);
1887  }
1888  return Y;
1889 }
1890 
1892 {
1893  Mat B;
1894  if (action)
1895  {
1896  ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1897  }
1898  else
1899  {
1900  ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1901  }
1902  return new PetscParMatrix(B,false);
1903 }
1904 
1906 {
1907  ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1908 }
1909 
1910 void PetscParMatrix::Mult(double a, const Vector &x, double b, Vector &y) const
1911 {
1912  MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1913  << ", expected size = " << Width());
1914  MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1915  << ", expected size = " << Height());
1916 
1917  PetscParVector *XX = GetX();
1918  PetscParVector *YY = GetY();
1919  bool rw = (b != 0.0);
1920  XX->PlaceMemory(x.GetMemory());
1921  YY->PlaceMemory(y.GetMemory(),rw);
1922  MatMultKernel(A,a,XX->x,b,YY->x,false);
1923  XX->ResetMemory();
1924  YY->ResetMemory();
1925 }
1926 
1927 void PetscParMatrix::MultTranspose(double a, const Vector &x, double b,
1928  Vector &y) const
1929 {
1930  MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
1931  << ", expected size = " << Height());
1932  MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
1933  << ", expected size = " << Width());
1934 
1935  PetscParVector *XX = GetX();
1936  PetscParVector *YY = GetY();
1937  bool rw = (b != 0.0);
1938  XX->PlaceMemory(y.GetMemory(),rw);
1939  YY->PlaceMemory(x.GetMemory());
1940  MatMultKernel(A,a,YY->x,b,XX->x,true);
1941  XX->ResetMemory();
1942  YY->ResetMemory();
1943 }
1944 
1945 void PetscParMatrix::Print(const char *fname, bool binary) const
1946 {
1947  if (fname)
1948  {
1949  PetscViewer view;
1950 
1951  if (binary)
1952  {
1953  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
1954  FILE_MODE_WRITE,&view);
1955  }
1956  else
1957  {
1958  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
1959  }
1960  PCHKERRQ(A,ierr);
1961  ierr = MatView(A,view); PCHKERRQ(A,ierr);
1962  ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1963  }
1964  else
1965  {
1966  ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1967  }
1968 }
1969 
1971 {
1972  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
1973  << ", expected size = " << Height());
1974 
1975  PetscParVector *YY = GetY();
1976  YY->PlaceMemory(s.GetMemory());
1977  ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1978  YY->ResetMemory();
1979 }
1980 
1982 {
1983  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
1984  << ", expected size = " << Width());
1985 
1986  PetscParVector *XX = GetX();
1987  XX->PlaceMemory(s.GetMemory());
1988  ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
1989  XX->ResetMemory();
1990 }
1991 
1993 {
1994  ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
1995 }
1996 
1998 {
1999  // for matrices with square diagonal blocks only
2000  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2001  << ", expected size = " << Height());
2002  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2003  << ", expected size = " << Width());
2004 
2005  PetscParVector *XX = GetX();
2006  XX->PlaceMemory(s.GetMemory());
2007  ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2008  XX->ResetMemory();
2009 }
2010 
2012  PetscParMatrix *P)
2013 {
2014  MFEM_VERIFY(A->Width() == P->Height(),
2015  "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
2016  " differs from number of local rows of P " << P->Height());
2017  MFEM_VERIFY(A->Height() == R->Width(),
2018  "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
2019  " differs from number of local cols of R " << R->Width());
2020  Mat B;
2021  ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2022  PCHKERRQ(*R,ierr);
2023  return new PetscParMatrix(B);
2024 }
2025 
2027 {
2028  Mat pA = *A,pP = *P,pRt = *Rt;
2029  Mat B;
2030  PetscBool Aismatis,Pismatis,Rtismatis;
2031 
2032  MFEM_VERIFY(A->Width() == P->Height(),
2033  "Petsc RAP: Number of local cols of A " << A->Width() <<
2034  " differs from number of local rows of P " << P->Height());
2035  MFEM_VERIFY(A->Height() == Rt->Height(),
2036  "Petsc RAP: Number of local rows of A " << A->Height() <<
2037  " differs from number of local rows of Rt " << Rt->Height());
2038  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
2039  PCHKERRQ(pA,ierr);
2040  ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
2041  PCHKERRQ(pA,ierr);
2042  ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
2043  PCHKERRQ(pA,ierr);
2044  if (Aismatis &&
2045  Pismatis &&
2046  Rtismatis) // handle special case (this code will eventually go into PETSc)
2047  {
2048  Mat lA,lP,lB,lRt;
2049  ISLocalToGlobalMapping cl2gP,cl2gRt;
2050  PetscInt rlsize,clsize,rsize,csize;
2051 
2052  ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2053  ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2054  ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2055  ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2056  ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2057  ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2058  ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
2059  ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2060  ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2061  ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2062  ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2063  ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2064  ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2065  if (lRt == lP)
2066  {
2067  ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2068  PCHKERRQ(lA,ierr);
2069  }
2070  else
2071  {
2072  Mat lR;
2073  ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2074  ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2075  PCHKERRQ(lRt,ierr);
2076  ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2077  }
2078 
2079  // attach lRt matrix to the subdomain local matrix
2080  // it may be used if markers on vdofs have to be mapped on
2081  // subdomain true dofs
2082  {
2083  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
2084  ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
2085  (*vmatsl2l)[0] = lRt;
2086 
2087  PetscContainer c;
2088  ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
2089  PCHKERRQ(B,ierr);
2090  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2091  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2092  PCHKERRQ(c,ierr);
2093  ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
2094  PCHKERRQ(B,ierr);
2095  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2096  }
2097 
2098  // Set local problem
2099  ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2100  ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2101  ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2102  ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2103  }
2104  else // it raises an error if the PtAP is not supported in PETSc
2105  {
2106  if (pP == pRt)
2107  {
2108  ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2109  PCHKERRQ(pA,ierr);
2110  }
2111  else
2112  {
2113  Mat pR;
2114  ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2115  ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2116  PCHKERRQ(pRt,ierr);
2117  ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2118  }
2119  }
2120  return new PetscParMatrix(B);
2121 }
2122 
2124 {
2125  PetscParMatrix *out = RAP(P,A,P);
2126  return out;
2127 }
2128 
2130 {
2131  PetscParMatrix *out,*A;
2132 #if defined(PETSC_HAVE_HYPRE)
2134 #else
2135  A = new PetscParMatrix(hA);
2136 #endif
2137  out = RAP(P,A,P);
2138  delete A;
2139  return out;
2140 }
2141 
2142 
2144 {
2145  Mat AB;
2146 
2147  ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2148  CCHKERRQ(A->GetComm(),ierr);
2149  return new PetscParMatrix(AB);
2150 }
2151 
2153 {
2154  Mat Ae;
2155 
2156  PetscParVector dummy(GetComm(),0);
2157  ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2158  EliminateRowsCols(rows_cols,dummy,dummy);
2159  ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2160  return new PetscParMatrix(Ae);
2161 }
2162 
2164  const HypreParVector &X,
2165  HypreParVector &B,
2166  double diag)
2167 {
2168  MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2169 }
2170 
2172  const PetscParVector &X,
2173  PetscParVector &B,
2174  double diag)
2175 {
2176  PetscInt M,N;
2177  ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2178  MFEM_VERIFY(M == N,"Rectangular case unsupported");
2179 
2180  // TODO: what if a diagonal term is not present?
2181  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2182 
2183  // rows need to be in global numbering
2184  PetscInt rst;
2185  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2186 
2187  IS dir;
2188  ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2189  if (!X.GlobalSize() && !B.GlobalSize())
2190  {
2191  ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2192  }
2193  else
2194  {
2195  ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2196  }
2197  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2198 }
2199 
2201 {
2202  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2203 
2204  // rows need to be in global numbering
2205  PetscInt rst;
2206  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2207 
2208  IS dir;
2209  ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2210  ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2211  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2212 }
2213 
2215 {
2216 
2217  Mat B = A;
2218  if (dereference)
2219  {
2220  MPI_Comm comm = GetComm();
2221  ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
2222  }
2223  A = NULL;
2224  return B;
2225 }
2226 
2228 {
2229  PetscBool ok;
2230  MFEM_VERIFY(A, "no associated PETSc Mat object");
2231  PetscObject oA = (PetscObject)(this->A);
2232  // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
2233  ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2234  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2235  ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2236  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2237  ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2238  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2239  ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2240  if (ok == PETSC_TRUE) { return PETSC_MATIS; }
2241  ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2242  if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
2243  ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2244  if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
2245 #if defined(PETSC_HAVE_HYPRE)
2246  ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2247  if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
2248 #endif
2249  return PETSC_MATGENERIC;
2250 }
2251 
2253  const Array<int> &ess_dof_list,
2254  const Vector &X, Vector &B)
2255 {
2256  const PetscScalar *array;
2257  Mat pA = const_cast<PetscParMatrix&>(A);
2258 
2259  // B -= Ae*X
2260  Ae.Mult(-1.0, X, 1.0, B);
2261 
2262  Vec diag = const_cast<PetscParVector&>((*A.GetX()));
2263  ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2264  ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2265  for (int i = 0; i < ess_dof_list.Size(); i++)
2266  {
2267  int r = ess_dof_list[i];
2268  B(r) = array[r] * X(r);
2269  }
2270  ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2271 }
2272 
2273 // PetscSolver methods
2274 
2275 PetscSolver::PetscSolver() : clcustom(false)
2276 {
2277  obj = NULL;
2278  B = X = NULL;
2279  cid = -1;
2280  operatorset = false;
2281  bchandler = NULL;
2282  private_ctx = NULL;
2283 }
2284 
2286 {
2287  delete B;
2288  delete X;
2290 }
2291 
2292 void PetscSolver::SetTol(double tol)
2293 {
2294  SetRelTol(tol);
2295 }
2296 
2297 void PetscSolver::SetRelTol(double tol)
2298 {
2299  if (cid == KSP_CLASSID)
2300  {
2301  KSP ksp = (KSP)obj;
2302  ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2303  }
2304  else if (cid == SNES_CLASSID)
2305  {
2306  SNES snes = (SNES)obj;
2307  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2308  PETSC_DEFAULT);
2309  }
2310  else if (cid == TS_CLASSID)
2311  {
2312  TS ts = (TS)obj;
2313  ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2314  }
2315  else
2316  {
2317  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2318  }
2319  PCHKERRQ(obj,ierr);
2320 }
2321 
2322 void PetscSolver::SetAbsTol(double tol)
2323 {
2324  if (cid == KSP_CLASSID)
2325  {
2326  KSP ksp = (KSP)obj;
2327  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2328  }
2329  else if (cid == SNES_CLASSID)
2330  {
2331  SNES snes = (SNES)obj;
2332  ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2333  PETSC_DEFAULT);
2334  }
2335  else if (cid == TS_CLASSID)
2336  {
2337  TS ts = (TS)obj;
2338  ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2339  }
2340  else
2341  {
2342  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2343  }
2344  PCHKERRQ(obj,ierr);
2345 }
2346 
2347 void PetscSolver::SetMaxIter(int max_iter)
2348 {
2349  if (cid == KSP_CLASSID)
2350  {
2351  KSP ksp = (KSP)obj;
2352  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2353  max_iter);
2354  }
2355  else if (cid == SNES_CLASSID)
2356  {
2357  SNES snes = (SNES)obj;
2358  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2359  max_iter,PETSC_DEFAULT);
2360  }
2361  else if (cid == TS_CLASSID)
2362  {
2363  TS ts = (TS)obj;
2364  ierr = TSSetMaxSteps(ts,max_iter);
2365  }
2366  else
2367  {
2368  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2369  }
2370  PCHKERRQ(obj,ierr);
2371 }
2372 
2373 
2375 {
2376  typedef PetscErrorCode (*myPetscFunc)(void**);
2377  PetscViewerAndFormat *vf = NULL;
2378  PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
2379 
2380  if (plev > 0)
2381  {
2382  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2383  PCHKERRQ(obj,ierr);
2384  }
2385  if (cid == KSP_CLASSID)
2386  {
2387  // there are many other options, see the function KSPSetFromOptions() in
2388  // src/ksp/ksp/interface/itcl.c
2389  typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
2390  KSP ksp = (KSP)obj;
2391  if (plev >= 0)
2392  {
2393  ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2394  }
2395  if (plev == 1)
2396  {
2397 #if PETSC_VERSION_LT(3,15,0)
2398  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2399 #else
2400  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2401 #endif
2402  (myPetscFunc)PetscViewerAndFormatDestroy);
2403  PCHKERRQ(ksp,ierr);
2404  }
2405  else if (plev > 1)
2406  {
2407  ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2408  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2409  (myPetscFunc)PetscViewerAndFormatDestroy);
2410  PCHKERRQ(ksp,ierr);
2411  if (plev > 2)
2412  {
2413  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2414  PCHKERRQ(viewer,ierr);
2415 #if PETSC_VERSION_LT(3,15,0)
2416  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2417 #else
2418  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2419 #endif
2420  (myPetscFunc)PetscViewerAndFormatDestroy);
2421  PCHKERRQ(ksp,ierr);
2422  }
2423  }
2424  }
2425  else if (cid == SNES_CLASSID)
2426  {
2427  typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
2428  SNES snes = (SNES)obj;
2429  if (plev >= 0)
2430  {
2431  ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2432  }
2433  if (plev > 0)
2434  {
2435  ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2436  (myPetscFunc)PetscViewerAndFormatDestroy);
2437  PCHKERRQ(snes,ierr);
2438  }
2439  }
2440  else if (cid == TS_CLASSID)
2441  {
2442  TS ts = (TS)obj;
2443  if (plev >= 0)
2444  {
2445  ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2446  }
2447  }
2448  else
2449  {
2450  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2451  }
2452 }
2453 
2454 MPI_Comm PetscSolver::GetComm() const
2455 {
2456  return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
2457 }
2458 
2460 {
2461  __mfem_monitor_ctx *monctx;
2462  ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2463  monctx->solver = this;
2464  monctx->monitor = ctx;
2465  if (cid == KSP_CLASSID)
2466  {
2467  ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
2468  __mfem_monitor_ctx_destroy);
2469  PCHKERRQ(obj,ierr);
2470  }
2471  else if (cid == SNES_CLASSID)
2472  {
2473  ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
2474  __mfem_monitor_ctx_destroy);
2475  PCHKERRQ(obj,ierr);
2476  }
2477  else if (cid == TS_CLASSID)
2478  {
2479  ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
2480  __mfem_monitor_ctx_destroy);
2481  PCHKERRQ(obj,ierr);
2482  }
2483  else
2484  {
2485  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2486  }
2487 }
2488 
2490 {
2491  bchandler = bch;
2492  if (cid == SNES_CLASSID)
2493  {
2494  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
2495  snes_ctx->bchandler = bchandler;
2496  }
2497  else if (cid == TS_CLASSID)
2498  {
2499  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
2500  ts_ctx->bchandler = bchandler;
2501  }
2502  else
2503  {
2504  MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2505  }
2506 }
2507 
2509 {
2510  PC pc = NULL;
2511  if (cid == TS_CLASSID)
2512  {
2513  SNES snes;
2514  KSP ksp;
2515 
2516  ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
2517  ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2518  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2519  }
2520  else if (cid == SNES_CLASSID)
2521  {
2522  KSP ksp;
2523 
2524  ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
2525  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2526  }
2527  else if (cid == KSP_CLASSID)
2528  {
2529  ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2530  }
2531  else if (cid == PC_CLASSID)
2532  {
2533  pc = (PC)obj;
2534  }
2535  else
2536  {
2537  MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2538  }
2539  if (factory)
2540  {
2541  ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2542  }
2543  else
2544  {
2545  ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2546  }
2547 }
2548 
2549 void PetscSolver::Customize(bool customize) const
2550 {
2551  if (!customize) { clcustom = true; }
2552  if (!clcustom)
2553  {
2554  if (cid == PC_CLASSID)
2555  {
2556  PC pc = (PC)obj;
2557  ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2558  }
2559  else if (cid == KSP_CLASSID)
2560  {
2561  KSP ksp = (KSP)obj;
2562  ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2563  }
2564  else if (cid == SNES_CLASSID)
2565  {
2566  SNES snes = (SNES)obj;
2567  ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2568  }
2569  else if (cid == TS_CLASSID)
2570  {
2571  TS ts = (TS)obj;
2572  ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2573  }
2574  else
2575  {
2576  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2577  }
2578  }
2579  clcustom = true;
2580 }
2581 
2583 {
2584  if (cid == KSP_CLASSID)
2585  {
2586  KSP ksp = (KSP)obj;
2587  KSPConvergedReason reason;
2588  ierr = KSPGetConvergedReason(ksp,&reason);
2589  PCHKERRQ(ksp,ierr);
2590  return reason > 0 ? 1 : 0;
2591  }
2592  else if (cid == SNES_CLASSID)
2593  {
2594  SNES snes = (SNES)obj;
2595  SNESConvergedReason reason;
2596  ierr = SNESGetConvergedReason(snes,&reason);
2597  PCHKERRQ(snes,ierr);
2598  return reason > 0 ? 1 : 0;
2599  }
2600  else if (cid == TS_CLASSID)
2601  {
2602  TS ts = (TS)obj;
2603  TSConvergedReason reason;
2604  ierr = TSGetConvergedReason(ts,&reason);
2605  PCHKERRQ(ts,ierr);
2606  return reason > 0 ? 1 : 0;
2607  }
2608  else
2609  {
2610  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2611  return -1;
2612  }
2613 }
2614 
2616 {
2617  if (cid == KSP_CLASSID)
2618  {
2619  KSP ksp = (KSP)obj;
2620  PetscInt its;
2621  ierr = KSPGetIterationNumber(ksp,&its);
2622  PCHKERRQ(ksp,ierr);
2623  return its;
2624  }
2625  else if (cid == SNES_CLASSID)
2626  {
2627  SNES snes = (SNES)obj;
2628  PetscInt its;
2629  ierr = SNESGetIterationNumber(snes,&its);
2630  PCHKERRQ(snes,ierr);
2631  return its;
2632  }
2633  else if (cid == TS_CLASSID)
2634  {
2635  TS ts = (TS)obj;
2636  PetscInt its;
2637  ierr = TSGetStepNumber(ts,&its);
2638  PCHKERRQ(ts,ierr);
2639  return its;
2640  }
2641  else
2642  {
2643  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2644  return -1;
2645  }
2646 }
2647 
2649 {
2650  if (cid == KSP_CLASSID)
2651  {
2652  KSP ksp = (KSP)obj;
2653  PetscReal norm;
2654  ierr = KSPGetResidualNorm(ksp,&norm);
2655  PCHKERRQ(ksp,ierr);
2656  return norm;
2657  }
2658  if (cid == SNES_CLASSID)
2659  {
2660  SNES snes = (SNES)obj;
2661  PetscReal norm;
2662  ierr = SNESGetFunctionNorm(snes,&norm);
2663  PCHKERRQ(snes,ierr);
2664  return norm;
2665  }
2666  else
2667  {
2668  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2669  return PETSC_MAX_REAL;
2670  }
2671 }
2672 
2674 {
2676  if (cid == SNES_CLASSID)
2677  {
2678  __mfem_snes_ctx *snes_ctx;
2679  ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2680  snes_ctx->op = NULL;
2681  snes_ctx->bchandler = NULL;
2682  snes_ctx->work = NULL;
2683  snes_ctx->jacType = Operator::PETSC_MATAIJ;
2684  private_ctx = (void*) snes_ctx;
2685  }
2686  else if (cid == TS_CLASSID)
2687  {
2688  __mfem_ts_ctx *ts_ctx;
2689  ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2690  ts_ctx->op = NULL;
2691  ts_ctx->bchandler = NULL;
2692  ts_ctx->work = NULL;
2693  ts_ctx->work2 = NULL;
2694  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2695  ts_ctx->cached_ijacstate = -1;
2696  ts_ctx->cached_rhsjacstate = -1;
2697  ts_ctx->cached_splits_xstate = -1;
2698  ts_ctx->cached_splits_xdotstate = -1;
2699  ts_ctx->type = PetscODESolver::ODE_SOLVER_GENERAL;
2700  ts_ctx->jacType = Operator::PETSC_MATAIJ;
2701  private_ctx = (void*) ts_ctx;
2702  }
2703 }
2704 
2706 {
2707  if (!private_ctx) { return; }
2708  // free private context's owned objects
2709  if (cid == SNES_CLASSID)
2710  {
2711  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2712  delete snes_ctx->work;
2713  }
2714  else if (cid == TS_CLASSID)
2715  {
2716  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2717  delete ts_ctx->work;
2718  delete ts_ctx->work2;
2719  }
2720  ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2721 }
2722 
2723 // PetscBCHandler methods
2724 
2726  enum PetscBCHandler::Type type_)
2727  : bctype(type_), setup(false), eval_t(0.0),
2728  eval_t_cached(std::numeric_limits<double>::min())
2729 {
2730  SetTDofs(ess_tdof_list);
2731 }
2732 
2734 {
2735  ess_tdof_list.SetSize(list.Size());
2736  ess_tdof_list.Assign(list);
2737  setup = false;
2738 }
2739 
2741 {
2742  if (setup) { return; }
2743  if (bctype == CONSTANT)
2744  {
2745  eval_g.SetSize(n);
2746  this->Eval(eval_t,eval_g);
2747  eval_t_cached = eval_t;
2748  }
2749  else if (bctype == TIME_DEPENDENT)
2750  {
2751  eval_g.SetSize(n);
2752  }
2753  setup = true;
2754 }
2755 
2757 {
2758  (*this).SetUp(x.Size());
2759  y = x;
2760  if (bctype == ZERO)
2761  {
2762  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2763  {
2764  y[ess_tdof_list[i]] = 0.0;
2765  }
2766  }
2767  else
2768  {
2769  if (bctype != CONSTANT && eval_t != eval_t_cached)
2770  {
2771  Eval(eval_t,eval_g);
2772  eval_t_cached = eval_t;
2773  }
2774  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2775  {
2776  y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2777  }
2778  }
2779 }
2780 
2782 {
2783  (*this).SetUp(x.Size());
2784  if (bctype == ZERO)
2785  {
2786  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2787  {
2788  x[ess_tdof_list[i]] = 0.0;
2789  }
2790  }
2791  else
2792  {
2793  if (bctype != CONSTANT && eval_t != eval_t_cached)
2794  {
2795  Eval(eval_t,eval_g);
2796  eval_t_cached = eval_t;
2797  }
2798  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2799  {
2800  x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2801  }
2802  }
2803 }
2804 
2806 {
2807  (*this).SetUp(x.Size());
2808  if (bctype == ZERO)
2809  {
2810  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2811  {
2812  y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2813  }
2814  }
2815  else
2816  {
2817  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2818  {
2819  y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2820  }
2821  }
2822 }
2823 
2825 {
2826  (*this).SetUp(x.Size());
2827  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2828  {
2829  x[ess_tdof_list[i]] = 0.0;
2830  }
2831 }
2832 
2834 {
2835  (*this).SetUp(x.Size());
2836  y = x;
2837  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2838  {
2839  y[ess_tdof_list[i]] = 0.0;
2840  }
2841 }
2842 
2843 // PetscLinearSolver methods
2844 
2845 PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2846  bool wrapin)
2847  : PetscSolver(), Solver(), wrap(wrapin)
2848 {
2849  KSP ksp;
2850  ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2851  obj = (PetscObject)ksp;
2852  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2853  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2854 }
2855 
2857  const std::string &prefix)
2858  : PetscSolver(), Solver(), wrap(false)
2859 {
2860  KSP ksp;
2861  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2862  obj = (PetscObject)ksp;
2863  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2864  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2865  SetOperator(A);
2866 }
2867 
2869  const std::string &prefix)
2870  : PetscSolver(), Solver(), wrap(wrapin)
2871 {
2872  KSP ksp;
2873  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2874  obj = (PetscObject)ksp;
2875  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2876  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2877  SetOperator(A);
2878 }
2879 
2881 {
2882  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2883  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2884  (dynamic_cast<const PetscParMatrix *>(&op));
2885  const Operator *oA = dynamic_cast<const Operator *>(&op);
2886 
2887  // update base classes: Operator, Solver, PetscLinearSolver
2888  bool delete_pA = false;
2889  if (!pA)
2890  {
2891  if (hA)
2892  {
2893  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2894  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2895  delete_pA = true;
2896  }
2897  else if (oA) // fallback to general operator
2898  {
2899  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2900  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2901  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2902  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2903  delete_pA = true;
2904  }
2905  }
2906  MFEM_VERIFY(pA, "Unsupported operation!");
2907 
2908  // Set operators into PETSc KSP
2909  KSP ksp = (KSP)obj;
2910  Mat A = pA->A;
2911  if (operatorset)
2912  {
2913  Mat C;
2914  PetscInt nheight,nwidth,oheight,owidth;
2915 
2916  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2917  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2918  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2919  if (nheight != oheight || nwidth != owidth)
2920  {
2921  // reinit without destroying the KSP
2922  // communicator remains the same
2923  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2924  delete X;
2925  delete B;
2926  X = B = NULL;
2927  }
2928  }
2929  ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2930 
2931  // Update PetscSolver
2932  operatorset = true;
2933 
2934  // Update the Operator fields.
2935  height = pA->Height();
2936  width = pA->Width();
2937 
2938  if (delete_pA) { delete pA; }
2939 }
2940 
2942 {
2943  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2944  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2945  (dynamic_cast<const PetscParMatrix *>(&op));
2946  const Operator *oA = dynamic_cast<const Operator *>(&op);
2947 
2948  PetscParMatrix *ppA = const_cast<PetscParMatrix *>
2949  (dynamic_cast<const PetscParMatrix *>(&pop));
2950  const Operator *poA = dynamic_cast<const Operator *>(&pop);
2951 
2952  // Convert Operator for linear system
2953  bool delete_pA = false;
2954  if (!pA)
2955  {
2956  if (hA)
2957  {
2958  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2959  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2960  delete_pA = true;
2961  }
2962  else if (oA) // fallback to general operator
2963  {
2964  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2965  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2966  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2967  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2968  delete_pA = true;
2969  }
2970  }
2971  MFEM_VERIFY(pA, "Unsupported operation!");
2972 
2973  // Convert Operator to be preconditioned
2974  bool delete_ppA = false;
2975  if (!ppA)
2976  {
2977  if (oA == poA && !wrap) // Same operator, already converted
2978  {
2979  ppA = pA;
2980  }
2981  else
2982  {
2983  ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
2984  delete_ppA = true;
2985  }
2986  }
2987  MFEM_VERIFY(ppA, "Unsupported operation!");
2988 
2989  // Set operators into PETSc KSP
2990  KSP ksp = (KSP)obj;
2991  Mat A = pA->A;
2992  Mat P = ppA->A;
2993  if (operatorset)
2994  {
2995  Mat C;
2996  PetscInt nheight,nwidth,oheight,owidth;
2997 
2998  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2999  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3000  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3001  if (nheight != oheight || nwidth != owidth)
3002  {
3003  // reinit without destroying the KSP
3004  // communicator remains the same
3005  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3006  delete X;
3007  delete B;
3008  X = B = NULL;
3009  wrap = false;
3010  }
3011  }
3012  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3013 
3014  // Update PetscSolver
3015  operatorset = true;
3016 
3017  // Update the Operator fields.
3018  height = pA->Height();
3019  width = pA->Width();
3020 
3021  if (delete_pA) { delete pA; }
3022  if (delete_ppA) { delete ppA; }
3023 }
3024 
3026 {
3027  KSP ksp = (KSP)obj;
3028 
3029  // Preserve Amat if already set
3030  Mat A = NULL;
3031  PetscBool amat;
3032  ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3033  if (amat)
3034  {
3035  ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3036  ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
3037  }
3038  PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
3039  if (ppc)
3040  {
3041  ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3042  }
3043  else
3044  {
3045  // wrap the Solver action
3046  // Solver is assumed to be already setup
3047  // ownership of precond is not transferred,
3048  // consistently with other MFEM's linear solvers
3049  PC pc;
3050  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3051  ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
3052  }
3053  if (A)
3054  {
3055  Mat P;
3056 
3057  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3058  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
3059  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3060  ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3061  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3062  }
3063 }
3064 
3065 void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
3066 {
3067  KSP ksp = (KSP)obj;
3068 
3069  if (!B || !X)
3070  {
3071  Mat pA = NULL;
3072  ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
3073  if (!B)
3074  {
3075  PetscParMatrix A = PetscParMatrix(pA, true);
3076  B = new PetscParVector(A, true, false);
3077  }
3078  if (!X)
3079  {
3080  PetscParMatrix A = PetscParMatrix(pA, true);
3081  X = new PetscParVector(A, false, false);
3082  }
3083  }
3084  B->PlaceMemory(b.GetMemory());
3085  X->PlaceMemory(x.GetMemory(),iterative_mode);
3086 
3087  Customize();
3088 
3089  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
3090  PCHKERRQ(ksp, ierr);
3091 
3092  // Solve the system.
3093  if (trans)
3094  {
3095  ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3096  }
3097  else
3098  {
3099  ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3100  }
3101  B->ResetMemory();
3102  X->ResetMemory();
3103 }
3104 
3105 void PetscLinearSolver::Mult(const Vector &b, Vector &x) const
3106 {
3107  (*this).MultKernel(b,x,false);
3108 }
3109 
3111 {
3112  (*this).MultKernel(b,x,true);
3113 }
3114 
3116 {
3117  MPI_Comm comm;
3118  KSP ksp = (KSP)obj;
3119  ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3120  ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3121 }
3122 
3123 // PetscPCGSolver methods
3124 
3125 PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix)
3126  : PetscLinearSolver(comm,prefix)
3127 {
3128  KSP ksp = (KSP)obj;
3129  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3130  // this is to obtain a textbook PCG
3131  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3132 }
3133 
3134 PetscPCGSolver::PetscPCGSolver(PetscParMatrix& A, const std::string &prefix)
3135  : PetscLinearSolver(A,prefix)
3136 {
3137  KSP ksp = (KSP)obj;
3138  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3139  // this is to obtain a textbook PCG
3140  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3141 }
3142 
3144  const std::string &prefix)
3145  : PetscLinearSolver(A,wrap,prefix)
3146 {
3147  KSP ksp = (KSP)obj;
3148  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3149  // this is to obtain a textbook PCG
3150  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3151 }
3152 
3153 // PetscPreconditioner methods
3154 
3156  const std::string &prefix)
3157  : PetscSolver(), Solver()
3158 {
3159  PC pc;
3160  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3161  obj = (PetscObject)pc;
3162  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3163  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3164 }
3165 
3167  const string &prefix)
3168  : PetscSolver(), Solver()
3169 {
3170  PC pc;
3171  ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
3172  obj = (PetscObject)pc;
3173  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3174  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3175  SetOperator(A);
3176 }
3177 
3179  const string &prefix)
3180  : PetscSolver(), Solver()
3181 {
3182  PC pc;
3183  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3184  obj = (PetscObject)pc;
3185  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3186  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3187  SetOperator(op);
3188 }
3189 
3191 {
3192  bool delete_pA = false;
3193  PetscParMatrix *pA = const_cast<PetscParMatrix *>
3194  (dynamic_cast<const PetscParMatrix *>(&op));
3195 
3196  if (!pA)
3197  {
3198  const Operator *cop = dynamic_cast<const Operator *>(&op);
3199  pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
3200  delete_pA = true;
3201  }
3202 
3203  // Set operators into PETSc PC
3204  PC pc = (PC)obj;
3205  Mat A = pA->A;
3206  if (operatorset)
3207  {
3208  Mat C;
3209  PetscInt nheight,nwidth,oheight,owidth;
3210 
3211  ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3212  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3213  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3214  if (nheight != oheight || nwidth != owidth)
3215  {
3216  // reinit without destroying the PC
3217  // communicator remains the same
3218  ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3219  delete X;
3220  delete B;
3221  X = B = NULL;
3222  }
3223  }
3224  ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
3225 
3226  // Update PetscSolver
3227  operatorset = true;
3228 
3229  // Update the Operator fields.
3230  height = pA->Height();
3231  width = pA->Width();
3232 
3233  if (delete_pA) { delete pA; };
3234 }
3235 
3236 void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
3237  bool trans) const
3238 {
3239  MFEM_VERIFY(!iterative_mode,
3240  "Iterative mode not supported for PetscPreconditioner");
3241  PC pc = (PC)obj;
3242 
3243  if (!B || !X)
3244  {
3245  Mat pA = NULL;
3246  ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
3247  if (!B)
3248  {
3249  PetscParMatrix A(pA, true);
3250  B = new PetscParVector(A, true, false);
3251  }
3252  if (!X)
3253  {
3254  PetscParMatrix A(pA, true);
3255  X = new PetscParVector(A, false, false);
3256  }
3257  }
3258  B->PlaceMemory(b.GetMemory());
3259  X->PlaceMemory(x.GetMemory());
3260 
3261  Customize();
3262 
3263  // Apply the preconditioner.
3264  if (trans)
3265  {
3266  ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3267  }
3268  else
3269  {
3270  ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3271  }
3272  B->ResetMemory();
3273  X->ResetMemory();
3274 }
3275 
3276 void PetscPreconditioner::Mult(const Vector &b, Vector &x) const
3277 {
3278  (*this).MultKernel(b,x,false);
3279 }
3280 
3282 {
3283  (*this).MultKernel(b,x,true);
3284 }
3285 
3287 {
3288  MPI_Comm comm;
3289  PC pc = (PC)obj;
3290  ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3291  ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3292 }
3293 
3294 // PetscBDDCSolver methods
3295 
3296 // Coordinates sampling function
3297 static void func_coords(const Vector &x, Vector &y)
3298 {
3299  y = x;
3300 }
3301 
3302 void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
3303 {
3304  MPI_Comm comm = PetscObjectComm(obj);
3305 
3306  // get PETSc object
3307  PC pc = (PC)obj;
3308  Mat pA;
3309  ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3310 
3311  // matrix type should be of type MATIS
3312  PetscBool ismatis;
3313  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
3314  PCHKERRQ(pA,ierr);
3315  MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
3316 
3317  // Check options
3318  ParFiniteElementSpace *fespace = opts.fespace;
3319  if (opts.netflux && !fespace)
3320  {
3321  MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3322  }
3323 
3324  // Attach default near-null space to local matrices
3325  {
3326  MatNullSpace nnsp;
3327  Mat lA;
3328  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3329  ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
3330  &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3331  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3332  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3333  }
3334 
3335  // set PETSc PC type to PCBDDC
3336  ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
3337 
3338  PetscInt rst,nl;
3339  ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3340  ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3341 
3342  // index sets for fields splitting and coordinates for nodal spaces
3343  IS *fields = NULL;
3344  PetscInt nf = 0;
3345  PetscInt sdim = 0;
3346  PetscReal *coords = NULL;
3347  if (fespace)
3348  {
3349  int vdim = fespace->GetVDim();
3350 
3351  // Ideally, the block size should be set at matrix creation
3352  // but the MFEM assembly does not allow to do so
3353  if (fespace->GetOrdering() == Ordering::byVDIM)
3354  {
3355  Mat lA;
3356  ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3357  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3358  ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3359  }
3360 
3361  // fields
3362  if (vdim > 1)
3363  {
3364  PetscInt st = rst, bs, inc, nlf;
3365  nf = vdim;
3366  nlf = nl/nf;
3367  ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3368  if (fespace->GetOrdering() == Ordering::byVDIM)
3369  {
3370  inc = 1;
3371  bs = vdim;
3372  }
3373  else
3374  {
3375  inc = nlf;
3376  bs = 1;
3377  }
3378  for (PetscInt i = 0; i < nf; i++)
3379  {
3380  ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3381  st += inc;
3382  }
3383  }
3384 
3385  // coordinates
3386  const FiniteElementCollection *fec = fespace->FEColl();
3387  bool h1space = dynamic_cast<const H1_FECollection*>(fec);
3388  if (h1space)
3389  {
3390  ParFiniteElementSpace *fespace_coords = fespace;
3391 
3392  sdim = fespace->GetParMesh()->SpaceDimension();
3393  if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
3394  {
3395  fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3397  }
3398  VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3399  ParGridFunction gf_coords(fespace_coords);
3400  gf_coords.ProjectCoefficient(coeff_coords);
3401  HypreParVector *hvec_coords = gf_coords.ParallelProject();
3402  PetscScalar *data_coords = (PetscScalar*)mfem::Read(hvec_coords->GetMemory(),
3403  hvec_coords->Size(),false);
3404 
3405  // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
3406  if (vdim == sdim)
3407  {
3408  MatNullSpace nnsp;
3409  Mat lA;
3410  Vec pvec_coords,lvec_coords;
3411  ISLocalToGlobalMapping l2g;
3412  PetscSF sf;
3413  PetscLayout rmap;
3414  const PetscInt *gidxs;
3415  PetscInt nleaves;
3416 
3417  ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3418  hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3419  CCHKERRQ(comm,ierr);
3420  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3421  ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3422  ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3423  ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3424  ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3425  ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3426  ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3427  ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3428  ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3429  CCHKERRQ(comm,ierr);
3430  ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3431  {
3432  const PetscScalar *garray;
3433  PetscScalar *larray;
3434 
3435  ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3436  ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3437 #if PETSC_VERSION_LT(3,15,0)
3438  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3439  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3440 #else
3441  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3442  CCHKERRQ(comm,ierr);
3443  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3444  CCHKERRQ(comm,ierr);
3445 #endif
3446  ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3447  ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3448  }
3449  ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3450  ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3451  CCHKERRQ(PETSC_COMM_SELF,ierr);
3452  ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3453  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3454  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3455  ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3456  }
3457 
3458  // each single dof has associated a tuple of coordinates
3459  ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3460  if (nf > 0)
3461  {
3462  for (PetscInt i = 0; i < nf; i++)
3463  {
3464  const PetscInt *idxs;
3465  PetscInt nn;
3466 
3467  // It also handles the case of fespace not ordered by VDIM
3468  ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3469  ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3470  for (PetscInt j = 0; j < nn; j++)
3471  {
3472  PetscInt idx = idxs[j]-rst;
3473  for (PetscInt d = 0; d < sdim; d++)
3474  {
3475  coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3476  }
3477  }
3478  ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3479  }
3480  }
3481  else
3482  {
3483  for (PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3484  }
3485  if (fespace_coords != fespace)
3486  {
3487  delete fespace_coords;
3488  }
3489  delete hvec_coords;
3490  }
3491  }
3492 
3493  // index sets for boundary dofs specification (Essential = dir, Natural = neu)
3494  IS dir = NULL, neu = NULL;
3495 
3496  // Extract l2l matrices
3497  Array<Mat> *l2l = NULL;
3498  if (opts.ess_dof_local || opts.nat_dof_local)
3499  {
3500  PetscContainer c;
3501 
3502  ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
3503  MFEM_VERIFY(c,"Local-to-local PETSc container not present");
3504  ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
3505  }
3506 
3507  // check information about index sets (essential dofs, fields, etc.)
3508 #ifdef MFEM_DEBUG
3509  {
3510  // make sure ess/nat_dof have been collectively set
3511  PetscBool lpr = PETSC_FALSE,pr;
3512  if (opts.ess_dof) { lpr = PETSC_TRUE; }
3513  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3514  PCHKERRQ(pA,ierr);
3515  MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
3516  lpr = PETSC_FALSE;
3517  if (opts.nat_dof) { lpr = PETSC_TRUE; }
3518  ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3519  PCHKERRQ(pA,ierr);
3520  MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
3521  // make sure fields have been collectively set
3522  PetscInt ms[2],Ms[2];
3523  ms[0] = -nf; ms[1] = nf;
3524  ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3525  PCHKERRQ(pA,ierr);
3526  MFEM_VERIFY(-Ms[0] == Ms[1],
3527  "number of fields should be the same across processes");
3528  }
3529 #endif
3530 
3531  // boundary sets
3532  if (opts.ess_dof)
3533  {
3534  PetscInt st = opts.ess_dof_local ? 0 : rst;
3535  if (!opts.ess_dof_local)
3536  {
3537  // need to compute the boundary dofs in global ordering
3538  ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3539  CCHKERRQ(comm,ierr);
3540  ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3541  }
3542  else
3543  {
3544  // need to compute a list for the marked boundary dofs in local ordering
3545  ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3546  CCHKERRQ(comm,ierr);
3547  ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3548  }
3549  }
3550  if (opts.nat_dof)
3551  {
3552  PetscInt st = opts.nat_dof_local ? 0 : rst;
3553  if (!opts.nat_dof_local)
3554  {
3555  // need to compute the boundary dofs in global ordering
3556  ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3557  CCHKERRQ(comm,ierr);
3558  ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3559  }
3560  else
3561  {
3562  // need to compute a list for the marked boundary dofs in local ordering
3563  ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3564  CCHKERRQ(comm,ierr);
3565  ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3566  }
3567  }
3568 
3569  // field splitting
3570  if (fields)
3571  {
3572  ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3573  }
3574  for (PetscInt i = 0; i < nf; i++)
3575  {
3576  ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3577  }
3578  ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3579 
3580  // coordinates
3581  if (coords)
3582  {
3583  ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3584  }
3585  ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3586 
3587  // code for block size is disabled since we cannot change the matrix
3588  // block size after it has been setup
3589  // int bs = 1;
3590 
3591  // Customize using the finite element space (if any)
3592  if (fespace)
3593  {
3594  const FiniteElementCollection *fec = fespace->FEColl();
3595  bool edgespace, rtspace, h1space;
3596  bool needint = opts.netflux;
3597  bool tracespace, rt_tracespace, edge_tracespace;
3598  int vdim, dim, p;
3599  PetscBool B_is_Trans = PETSC_FALSE;
3600 
3601  ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3602  dim = pmesh->Dimension();
3603  vdim = fespace->GetVDim();
3604  h1space = dynamic_cast<const H1_FECollection*>(fec);
3605  rtspace = dynamic_cast<const RT_FECollection*>(fec);
3606  edgespace = dynamic_cast<const ND_FECollection*>(fec);
3607  edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3608  rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3609  tracespace = edge_tracespace || rt_tracespace;
3610 
3611  p = 1;
3612  if (fespace->GetNE() > 0)
3613  {
3614  if (!tracespace)
3615  {
3616  p = fespace->GetElementOrder(0);
3617  }
3618  else
3619  {
3620  p = fespace->GetFaceOrder(0);
3621  if (dim == 2) { p++; }
3622  }
3623  }
3624 
3625  if (edgespace) // H(curl)
3626  {
3627  if (dim == 2)
3628  {
3629  needint = true;
3630  if (tracespace)
3631  {
3632  MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3633  " not using auxiliary quadrature");
3634  needint = false;
3635  }
3636  }
3637  else
3638  {
3639  FiniteElementCollection *vfec;
3640  if (tracespace)
3641  {
3642  vfec = new H1_Trace_FECollection(p,dim);
3643  }
3644  else
3645  {
3646  vfec = new H1_FECollection(p,dim);
3647  }
3648  ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3649  ParDiscreteLinearOperator *grad;
3650  grad = new ParDiscreteLinearOperator(vfespace,fespace);
3651  if (tracespace)
3652  {
3653  grad->AddTraceFaceInterpolator(new GradientInterpolator);
3654  }
3655  else
3656  {
3657  grad->AddDomainInterpolator(new GradientInterpolator);
3658  }
3659  grad->Assemble();
3660  grad->Finalize();
3661  HypreParMatrix *hG = grad->ParallelAssemble();
3662  PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3663  delete hG;
3664  delete grad;
3665 
3666  PetscBool conforming = PETSC_TRUE;
3667  if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3668  ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3669  PCHKERRQ(pc,ierr);
3670  delete vfec;
3671  delete vfespace;
3672  delete G;
3673  }
3674  }
3675  else if (rtspace) // H(div)
3676  {
3677  needint = true;
3678  if (tracespace)
3679  {
3680  MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3681  " auxiliary quadrature");
3682  needint = false;
3683  }
3684  }
3685  else if (h1space) // H(grad), only for the vector case
3686  {
3687  if (vdim != dim) { needint = false; }
3688  }
3689 
3690  PetscParMatrix *B = NULL;
3691  if (needint)
3692  {
3693  // Generate bilinear form in unassembled format which is used to
3694  // compute the net-flux across subdomain boundaries for H(div) and
3695  // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3696  FiniteElementCollection *auxcoll;
3697  if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3698  else
3699  {
3700  if (h1space)
3701  {
3702  auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3703  }
3704  else
3705  {
3706  auxcoll = new L2_FECollection(p,dim);
3707  }
3708  }
3709  ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3710  ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3711 
3712  if (edgespace)
3713  {
3714  if (tracespace)
3715  {
3716  b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3717  }
3718  else
3719  {
3720  b->AddDomainIntegrator(new VectorFECurlIntegrator);
3721  }
3722  }
3723  else if (rtspace)
3724  {
3725  if (tracespace)
3726  {
3727  b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3728  }
3729  else
3730  {
3731  b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3732  }
3733  }
3734  else
3735  {
3736  b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3737  }
3738  b->Assemble();
3739  b->Finalize();
3740  OperatorHandle Bh(Operator::PETSC_MATIS);
3741  b->ParallelAssemble(Bh);
3742  Bh.Get(B);
3743  Bh.SetOperatorOwner(false);
3744 
3745  if (dir) // if essential dofs are present, we need to zero the columns
3746  {
3747  Mat pB = *B;
3748  ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3749  if (!opts.ess_dof_local)
3750  {
3751  ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3752  }
3753  else
3754  {
3755  ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3756  }
3757  B_is_Trans = PETSC_TRUE;
3758  }
3759  delete b;
3760  delete pspace;
3761  delete auxcoll;
3762  }
3763 
3764  if (B)
3765  {
3766  ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3767  }
3768  delete B;
3769  }
3770  ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3771  ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3772 }
3773 
3775  const PetscBDDCSolverParams &opts,
3776  const std::string &prefix)
3777  : PetscPreconditioner(A,prefix)
3778 {
3779  BDDCSolverConstructor(opts);
3780  Customize();
3781 }
3782 
3784  const PetscBDDCSolverParams &opts,
3785  const std::string &prefix)
3786  : PetscPreconditioner(comm,op,prefix)
3787 {
3788  BDDCSolverConstructor(opts);
3789  Customize();
3790 }
3791 
3793  const string &prefix)
3794  : PetscPreconditioner(comm,op,prefix)
3795 {
3796  PC pc = (PC)obj;
3797  ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3798 
3799  Mat pA;
3800  ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3801 
3802  // Check if pA is of type MATNEST
3803  PetscBool isnest;
3804  ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3805 
3806  PetscInt nr = 0;
3807  IS *isrow = NULL;
3808  if (isnest) // we know the fields
3809  {
3810  ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3811  ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3812  ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3813  }
3814 
3815  // We need to customize here, before setting the index sets.
3816  // This is because PCFieldSplitSetType customizes the function
3817  // pointers. SubSolver options will be processed during PCApply
3818  Customize();
3819 
3820  for (PetscInt i=0; i<nr; i++)
3821  {
3822  ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3823  }
3824  ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3825 }
3826 
3828  ParFiniteElementSpace *fes,
3829  const std::string &prefix)
3830  : PetscPreconditioner(fes->GetParMesh()->GetComm(),prefix)
3831 {
3833  MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
3834  MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);
3835  SetOperator(A);
3836  H2SolverConstructor(fes);
3837  Customize();
3838 }
3839 
3840 void PetscH2Solver::H2SolverConstructor(ParFiniteElementSpace *fes)
3841 {
3842 #if defined(PETSC_HAVE_H2OPUS)
3843  int sdim = fes->GetParMesh()->SpaceDimension();
3844  int vdim = fes->GetVDim();
3845  const FiniteElementCollection *fec = fes->FEColl();
3846  ParFiniteElementSpace *fes_coords = NULL;
3847 
3848  if (vdim != sdim || fes->GetOrdering() != Ordering::byVDIM)
3849  {
3850  fes_coords = new ParFiniteElementSpace(fes->GetParMesh(),fec,sdim,
3852  fes = fes_coords;
3853  }
3854  VectorFunctionCoefficient ccoords(sdim, func_coords);
3855 
3856  ParGridFunction coords(fes);
3857  coords.ProjectCoefficient(ccoords);
3858  Vector c(fes->GetTrueVSize());
3859  coords.ParallelProject(c);
3860  delete fes_coords;
3861  PCSetType(*this,PCH2OPUS);
3862  PCSetCoordinates(*this,sdim,c.Size()/sdim,(PetscReal*)mfem::Read(c.GetMemory(),
3863  c.Size(),false));
3864  PCSetFromOptions(*this);
3865 #else
3866  MFEM_ABORT("Need PETSc configured with --download-h2opus");
3867 #endif
3868 }
3869 
3870 // PetscNonlinearSolver methods
3871 
3873  const std::string &prefix)
3874  : PetscSolver(), Solver()
3875 {
3876  // Create the actual solver object
3877  SNES snes;
3878  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3879  obj = (PetscObject)snes;
3880  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3881  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3882 
3883  // Allocate private solver context
3885 }
3886 
3888  const std::string &prefix)
3889  : PetscSolver(), Solver()
3890 {
3891  // Create the actual solver object
3892  SNES snes;
3893  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3894  obj = (PetscObject)snes;
3895  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3896  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3897 
3898  // Allocate private solver context
3900 
3901  SetOperator(op);
3902 }
3903 
3905 {
3906  MPI_Comm comm;
3907  SNES snes = (SNES)obj;
3908  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
3909  ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3910 }
3911 
3913 {
3914  SNES snes = (SNES)obj;
3915 
3916  if (operatorset)
3917  {
3918  PetscBool ls,gs;
3919  void *fctx,*jctx;
3920 
3921  ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3922  PCHKERRQ(snes, ierr);
3923  ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3924  PCHKERRQ(snes, ierr);
3925 
3926  ls = (PetscBool)(height == op.Height() && width == op.Width() &&
3927  (void*)&op == fctx &&
3928  (void*)&op == jctx);
3929  ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3930  PetscObjectComm((PetscObject)snes));
3931  PCHKERRQ(snes,ierr);
3932  if (!gs)
3933  {
3934  ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3935  delete X;
3936  delete B;
3937  X = B = NULL;
3938  }
3939  }
3940  else
3941  {
3942  /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
3943  yet set. We default to backtracking */
3944  SNESLineSearch ls;
3945  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3946  ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3947  }
3948 
3949  // If we do not pass matrices in, the default matrix type for DMShell is MATDENSE
3950  // in 3.15, which may cause issues.
3951  Mat dummy;
3952  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)snes),op.Height(),
3953  op.Height(),&dummy);
3954 
3955  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3956  snes_ctx->op = (Operator*)&op;
3957  ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
3958  PCHKERRQ(snes, ierr);
3959  ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
3960  (void *)snes_ctx);
3961  PCHKERRQ(snes, ierr);
3962 
3963  ierr = MatDestroy(&dummy);
3964  PCHKERRQ(snes, ierr);
3965 
3966  // Update PetscSolver
3967  operatorset = true;
3968 
3969  // Update the Operator fields.
3970  height = op.Height();
3971  width = op.Width();
3972 }
3973 
3975 {
3976  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3977  snes_ctx->jacType = jacType;
3978 }
3979 
3981  double*))
3982 {
3983  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3984  snes_ctx->objective = objfn;
3985 
3986  SNES snes = (SNES)obj;
3987  ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
3988  PCHKERRQ(snes, ierr);
3989 }
3990 
3992  Vector&, Vector&,
3993  bool&, bool&))
3994 {
3995  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
3996  snes_ctx->postcheck = post;
3997 
3998  SNES snes = (SNES)obj;
3999  SNESLineSearch ls;
4000  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4001  ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
4002  PCHKERRQ(ls, ierr);
4003 }
4004 
4005 void PetscNonlinearSolver::SetUpdate(void (*update)(Operator *,int,
4006  const Vector&,
4007  const Vector&,
4008  const Vector&,
4009  const Vector&))
4010 {
4011  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4012  snes_ctx->update = update;
4013 
4014  SNES snes = (SNES)obj;
4015  ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4016 }
4017 
4018 void PetscNonlinearSolver::Mult(const Vector &b, Vector &x) const
4019 {
4020  SNES snes = (SNES)obj;
4021 
4022  bool b_nonempty = b.Size();
4023  if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
4024  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
4025  X->PlaceMemory(x.GetMemory(),iterative_mode);
4026  if (b_nonempty) { B->PlaceMemory(b.GetMemory()); }
4027  else { *B = 0.0; }
4028 
4029  Customize();
4030 
4031  if (!iterative_mode) { *X = 0.; }
4032 
4033  // Solve the system.
4034  ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4035  X->ResetMemory();
4036  if (b_nonempty) { B->ResetMemory(); }
4037 }
4038 
4039 // PetscODESolver methods
4040 
4041 PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
4042  : PetscSolver(), ODESolver()
4043 {
4044  // Create the actual solver object
4045  TS ts;
4046  ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4047  obj = (PetscObject)ts;
4048  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
4049  ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4050 
4051  // Allocate private solver context
4053 
4054  // Default options, to comply with the current interface to ODESolver.
4055  ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4056  PCHKERRQ(ts,ierr);
4057  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4058  PCHKERRQ(ts,ierr);
4059  TSAdapt tsad;
4060  ierr = TSGetAdapt(ts,&tsad);
4061  PCHKERRQ(ts,ierr);
4062  ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4063  PCHKERRQ(ts,ierr);
4064 }
4065 
4067 {
4068  MPI_Comm comm;
4069  TS ts = (TS)obj;
4070  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
4071  ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4072 }
4073 
4075  enum PetscODESolver::Type type)
4076 {
4077  TS ts = (TS)obj;
4078 
4079  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4080  if (operatorset)
4081  {
4082  ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4083  delete X;
4084  X = NULL;
4085  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4086  ts_ctx->cached_ijacstate = -1;
4087  ts_ctx->cached_rhsjacstate = -1;
4088  ts_ctx->cached_splits_xstate = -1;
4089  ts_ctx->cached_splits_xdotstate = -1;
4090  }
4091  f = &f_;
4092 
4093  // Set functions in TS
4094  ts_ctx->op = &f_;
4095  if (f_.isImplicit())
4096  {
4097  Mat dummy;
4098  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4099  f_.Height(),&dummy);
4100  PCHKERRQ(ts, ierr);
4101  ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
4102  PCHKERRQ(ts, ierr);
4103  ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (void *)ts_ctx);
4104  PCHKERRQ(ts, ierr);
4105  ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4106  PCHKERRQ(ts, ierr);
4107  ierr = MatDestroy(&dummy);
4108  PCHKERRQ(ts, ierr);
4109  }
4110  if (!f_.isHomogeneous())
4111  {
4112  Mat dummy = NULL;
4113  if (!f_.isImplicit())
4114  {
4115  ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4116  PCHKERRQ(ts, ierr);
4117  }
4118  else
4119  {
4120  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4121  f_.Height(),&dummy);
4122  PCHKERRQ(ts, ierr);
4123  }
4124  ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
4125  PCHKERRQ(ts, ierr);
4126  ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4127  (void *)ts_ctx);
4128  PCHKERRQ(ts, ierr);
4129  ierr = MatDestroy(&dummy);
4130  PCHKERRQ(ts, ierr);
4131  }
4132  operatorset = true;
4133 
4134  SetType(type);
4135 
4136  // Set solution vector
4137  PetscParVector X(PetscObjectComm(obj),*f,false,true);
4138  ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4139 
4140  // Compose special purpose function for PDE-constrained optimization
4141  PetscBool use = PETSC_TRUE;
4142  ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
4143  if (use && f_.isImplicit())
4144  {
4145  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4146  __mfem_ts_computesplits);
4147  PCHKERRQ(ts,ierr);
4148  }
4149  else
4150  {
4151  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4152  NULL);
4153  PCHKERRQ(ts,ierr);
4154  }
4155 }
4156 
4158 {
4159  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4160  ts_ctx->jacType = jacType;
4161 }
4162 
4164 {
4165  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4166  return ts_ctx->type;
4167 }
4168 
4170 {
4171  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4172 
4173  TS ts = (TS)obj;
4174  ts_ctx->type = type;
4175  if (type == ODE_SOLVER_LINEAR)
4176  {
4177  ierr = TSSetProblemType(ts, TS_LINEAR);
4178  PCHKERRQ(ts, ierr);
4179  }
4180  else
4181  {
4182  ierr = TSSetProblemType(ts, TS_NONLINEAR);
4183  PCHKERRQ(ts, ierr);
4184  }
4185 }
4186 
4187 void PetscODESolver::Step(Vector &x, double &t, double &dt)
4188 {
4189  // Pass the parameters to PETSc.
4190  TS ts = (TS)obj;
4191  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4192  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4193 
4194  PetscInt i;
4195  ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4196 
4197  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4198  X->PlaceMemory(x.GetMemory(),true);
4199 
4200  Customize();
4201 
4202  // Monitor initial step
4203  if (!i)
4204  {
4205  ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4206  }
4207 
4208  // Take the step.
4209  ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4210  ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4211 
4212  // Get back current time and the time step used to caller.
4213  // We cannot use TSGetTimeStep() as it returns the next candidate step
4214  PetscReal pt;
4215  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4216  dt = pt - (PetscReal)t;
4217  t = pt;
4218 
4219  // Monitor current step
4220  ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4221 
4222  X->ResetMemory();
4223 }
4224 
4225 void PetscODESolver::Run(Vector &x, double &t, double &dt, double t_final)
4226 {
4227  // Give the parameters to PETSc.
4228  TS ts = (TS)obj;
4229  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4230  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4231  ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4232  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4233  PCHKERRQ(ts, ierr);
4234 
4235  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4236  X->PlaceMemory(x.GetMemory(),true);
4237 
4238  Customize();
4239 
4240  // Reset Jacobian caching since the user may have changed
4241  // the parameters of the solver
4242  // We don't do this in the Step method because two consecutive
4243  // Step() calls are done with the same operator
4244  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4245  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4246  ts_ctx->cached_ijacstate = -1;
4247  ts_ctx->cached_rhsjacstate = -1;
4248  ts_ctx->cached_splits_xstate = -1;
4249  ts_ctx->cached_splits_xdotstate = -1;
4250 
4251  // Take the steps.
4252  ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4253  X->ResetMemory();
4254 
4255  // Get back final time and time step to caller.
4256  PetscReal pt;
4257  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4258  t = pt;
4259  ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4260  dt = pt;
4261 }
4262 
4263 } // namespace mfem
4264 
4265 #include "petsc/private/petscimpl.h"
4266 #include "petsc/private/matimpl.h"
4267 
4268 // auxiliary functions
4269 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
4270  void* ctx)
4271 {
4272  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4273 
4274  PetscFunctionBeginUser;
4275  if (!monctx)
4276  {
4277  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4278  }
4279  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4281  monctx->monitor);
4282 
4283  if (user_monitor->mon_sol)
4284  {
4285  mfem::PetscParVector V(x,true);
4286  user_monitor->MonitorSolution(it,t,V);
4287  }
4288  user_monitor->MonitorSolver(solver);
4289  PetscFunctionReturn(0);
4290 }
4291 
4292 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
4293  Vec f,void *ctx)
4294 {
4295  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4296 
4297  PetscFunctionBeginUser;
4298  mfem::PetscParVector xx(x,true);
4299  mfem::PetscParVector yy(xp,true);
4300  mfem::PetscParVector ff(f,true);
4301 
4302  mfem::TimeDependentOperator *op = ts_ctx->op;
4303  op->SetTime(t);
4304 
4305  if (ts_ctx->bchandler)
4306  {
4307  // we evaluate the ImplicitMult method with the correct bc
4308  // this means the correct time derivative for essential boundary
4309  // dofs is zero
4310  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
4311  if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
4312  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4313  mfem::Vector* txx = ts_ctx->work;
4314  mfem::Vector* txp = ts_ctx->work2;
4315  bchandler->SetTime(t);
4316  bchandler->ApplyBC(xx,*txx);
4317  bchandler->ZeroBC(yy,*txp);
4318  op->ImplicitMult(*txx,*txp,ff);
4319  // and fix the residual (i.e. f_\partial\Omega = u - g(t))
4320  bchandler->FixResidualBC(xx,ff);
4321  }
4322  else
4323  {
4324  // use the ImplicitMult method of the class
4325  op->ImplicitMult(xx,yy,ff);
4326  }
4327  ff.UpdateVecFromFlags();
4328  PetscFunctionReturn(0);
4329 }
4330 
4331 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
4332  void *ctx)
4333 {
4334  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4335 
4336  PetscFunctionBeginUser;
4337  if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
4338  mfem::PetscParVector xx(x,true);
4339  mfem::PetscParVector ff(f,true);
4340  mfem::TimeDependentOperator *top = ts_ctx->op;
4341  top->SetTime(t);
4342 
4343  // use the ExplicitMult method - compute the RHS function
4344  top->ExplicitMult(xx,ff);
4345 
4346  ff.UpdateVecFromFlags();
4347  PetscFunctionReturn(0);
4348 }
4349 
4350 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
4351  Vec xp, PetscReal shift, Mat A, Mat P,
4352  void *ctx)
4353 {
4354  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4355  mfem::Vector *xx;
4356  PetscScalar *array;
4357  PetscReal eps = 0.001; /* 0.1% difference */
4358  PetscInt n;
4359  PetscObjectState state;
4360  PetscErrorCode ierr;
4361 
4362  PetscFunctionBeginUser;
4363  // Matrix-free case
4364  if (A && A != P)
4365  {
4366  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4367  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4368  }
4369 
4370  // prevent to recompute a Jacobian if we already did so
4371  // the relative tolerance comparison should be fine given the fact
4372  // that two consecutive shifts should have similar magnitude
4373  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4374  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4375  std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4376  state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
4377 
4378  // update time
4379  mfem::TimeDependentOperator *op = ts_ctx->op;
4380  op->SetTime(t);
4381 
4382  // wrap Vecs with Vectors
4383  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4384  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4385  mfem::Vector yy(array,n);
4386  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4387  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4388  if (!ts_ctx->bchandler)
4389  {
4390  xx = new mfem::Vector(array,n);
4391  }
4392  else
4393  {
4394  // make sure we compute a Jacobian with the correct boundary values
4395  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4396  mfem::Vector txx(array,n);
4397  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4398  xx = ts_ctx->work;
4399  bchandler->SetTime(t);
4400  bchandler->ApplyBC(txx,*xx);
4401  }
4402  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4403 
4404  // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
4405  mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
4406  if (!ts_ctx->bchandler) { delete xx; }
4407  ts_ctx->cached_shift = shift;
4408 
4409  // Convert to the operator type requested if needed
4410  bool delete_pA = false;
4411  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4412  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4413  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4414  pA->GetType() != ts_ctx->jacType))
4415  {
4416  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4417  ts_ctx->jacType);
4418  delete_pA = true;
4419  }
4420 
4421  // Eliminate essential dofs
4422  if (ts_ctx->bchandler)
4423  {
4424  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4425  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4426  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4427  }
4428 
4429  // Get nonzerostate
4430  PetscObjectState nonzerostate;
4431  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4432 
4433  // Avoid unneeded copy of the matrix by hacking
4434  Mat B;
4435  B = pA->ReleaseMat(false);
4436  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4437  if (delete_pA) { delete pA; }
4438 
4439  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4440  // preconditioner fails because MatCreateSubMatrix_Nest does not
4441  // actually return a matrix. Instead, for efficiency reasons,
4442  // it returns a reference to the submatrix. The second time it
4443  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4444  // aborts since the two submatrices are actually different.
4445  // We circumvent this issue by incrementing the nonzero state
4446  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4447  // This does not impact performances in the case of MATNEST
4448  PetscBool isnest;
4449  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4450  CHKERRQ(ierr);
4451  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4452 
4453  // Jacobian reusage
4454  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
4455  CHKERRQ(ierr);
4456 
4457  // Fool DM
4458  DM dm;
4459  MatType mtype;
4460  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4461  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4462  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4463  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4464  PetscFunctionReturn(0);
4465 }
4466 
4467 static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
4468  Mat Ax,Mat Jx,
4469  Mat Axp,Mat Jxp)
4470 {
4471  __mfem_ts_ctx* ts_ctx;
4472  mfem::Vector *xx;
4473  PetscScalar *array;
4474  PetscInt n;
4475  PetscObjectState state;
4476  PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4477  PetscBool assembled;
4478  PetscErrorCode ierr;
4479 
4480  PetscFunctionBeginUser;
4481  // Matrix-free cases
4482  if (Ax && Ax != Jx)
4483  {
4484  ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4485  ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4486  }
4487  if (Axp && Axp != Jxp)
4488  {
4489  ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4490  ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4491  }
4492 
4493  ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
4494 
4495  // prevent to recompute the Jacobians if we already did so
4496  ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
4497  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4498  state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4499  ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
4500  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4501  state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4502  if (!rx && !rxp) { PetscFunctionReturn(0); }
4503 
4504  // update time
4505  mfem::TimeDependentOperator *op = ts_ctx->op;
4506  op->SetTime(t);
4507 
4508  // wrap Vecs with Vectors
4509  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4510  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4511  mfem::Vector yy(array,n);
4512  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4513  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4514  if (!ts_ctx->bchandler)
4515  {
4516  xx = new mfem::Vector(array,n);
4517  }
4518  else
4519  {
4520  // make sure we compute a Jacobian with the correct boundary values
4521  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4522  mfem::Vector txx(array,n);
4523  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4524  xx = ts_ctx->work;
4525  bchandler->SetTime(t);
4526  bchandler->ApplyBC(txx,*xx);
4527  }
4528  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4529 
4530  // We don't have a specialized interface, so we just compute the split jacobians
4531  // evaluating twice the implicit gradient method with the correct shifts
4532 
4533  // first we do the state jacobian
4534  mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
4535 
4536  // Convert to the operator type requested if needed
4537  bool delete_mat = false;
4538  mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
4539  (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
4540  if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4541  pJx->GetType() != ts_ctx->jacType))
4542  {
4543  if (pJx)
4544  {
4545  Mat B = *pJx;
4546  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4547  }
4548  pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
4549  ts_ctx->jacType);
4550  delete_mat = true;
4551  }
4552  if (rx)
4553  {
4554  ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4555  if (assembled)
4556  {
4557  ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4558  }
4559  else
4560  {
4561  Mat B;
4562  ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4563  ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4564  }
4565  }
4566  if (delete_mat) { delete pJx; }
4567  pJx = new mfem::PetscParMatrix(Jx,true);
4568 
4569  // Eliminate essential dofs
4570  if (ts_ctx->bchandler)
4571  {
4572  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4573  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4574  pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4575  }
4576 
4577  // Then we do the jacobian wrt the time derivative of the state
4578  // Note that this is usually the mass matrix
4579  mfem::PetscParMatrix *pJxp = NULL;
4580  if (rxp)
4581  {
4582  delete_mat = false;
4583  mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
4584  pJxp = const_cast<mfem::PetscParMatrix *>
4585  (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
4586  if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4587  pJxp->GetType() != ts_ctx->jacType))
4588  {
4589  if (pJxp)
4590  {
4591  Mat B = *pJxp;
4592  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4593  }
4594  pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
4595  &oJxp,ts_ctx->jacType);
4596  delete_mat = true;
4597  }
4598 
4599  ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4600  if (assembled)
4601  {
4602  ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4603  }
4604  else
4605  {
4606  Mat B;
4607  ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4608  ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4609  }
4610  if (delete_mat) { delete pJxp; }
4611  pJxp = new mfem::PetscParMatrix(Jxp,true);
4612 
4613  // Eliminate essential dofs
4614  if (ts_ctx->bchandler)
4615  {
4616  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4617  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4618  pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
4619  }
4620 
4621  // Obtain the time dependent part of the jacobian by subtracting
4622  // the state jacobian
4623  // We don't do it with the class operator "-=" since we know that
4624  // the sparsity pattern of the two matrices is the same
4625  ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4626  }
4627 
4628  // Jacobian reusage
4629  ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4630  CHKERRQ(ierr);
4631  ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4632  CHKERRQ(ierr);
4633 
4634  delete pJx;
4635  delete pJxp;
4636  if (!ts_ctx->bchandler) { delete xx; }
4637  PetscFunctionReturn(0);
4638 }
4639 
4640 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4641  Mat A, Mat P, void *ctx)
4642 {
4643  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4644  mfem::Vector *xx;
4645  PetscScalar *array;
4646  PetscInt n;
4647  PetscObjectState state;
4648  PetscErrorCode ierr;
4649 
4650  PetscFunctionBeginUser;
4651  // Matrix-free case
4652  if (A && A != P)
4653  {
4654  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4655  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4656  }
4657 
4658  // prevent to recompute a Jacobian if we already did so
4659  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4660  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4661  state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4662 
4663  // update time
4664  mfem::TimeDependentOperator *op = ts_ctx->op;
4665  op->SetTime(t);
4666 
4667  // wrap Vec with Vector
4668  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4669  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4670  if (!ts_ctx->bchandler)
4671  {
4672  xx = new mfem::Vector(array,n);
4673  }
4674  else
4675  {
4676  // make sure we compute a Jacobian with the correct boundary values
4677  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4678  mfem::Vector txx(array,n);
4679  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4680  xx = ts_ctx->work;
4681  bchandler->SetTime(t);
4682  bchandler->ApplyBC(txx,*xx);
4683  }
4684  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4685 
4686  // Use TimeDependentOperator::GetExplicitGradient(x)
4687  mfem::Operator& J = op->GetExplicitGradient(*xx);
4688  if (!ts_ctx->bchandler) { delete xx; }
4689 
4690  // Convert to the operator type requested if needed
4691  bool delete_pA = false;
4692  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4693  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4694  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4695  pA->GetType() != ts_ctx->jacType))
4696  {
4697  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4698  ts_ctx->jacType);
4699  delete_pA = true;
4700  }
4701 
4702  // Eliminate essential dofs
4703  if (ts_ctx->bchandler)
4704  {
4705  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4706  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4707  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4708  }
4709 
4710  // Get nonzerostate
4711  PetscObjectState nonzerostate;
4712  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4713 
4714  // Avoid unneeded copy of the matrix by hacking
4715  Mat B;
4716  B = pA->ReleaseMat(false);
4717  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4718  if (delete_pA) { delete pA; }
4719 
4720  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4721  // preconditioner fails because MatCreateSubMatrix_Nest does not
4722  // actually return a matrix. Instead, for efficiency reasons,
4723  // it returns a reference to the submatrix. The second time it
4724  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4725  // aborts since the two submatrices are actually different.
4726  // We circumvent this issue by incrementing the nonzero state
4727  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4728  // This does not impact performances in the case of MATNEST
4729  PetscBool isnest;
4730  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4731  CHKERRQ(ierr);
4732  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4733 
4734  // Jacobian reusage
4735  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4736  {
4737  ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4738  }
4739  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_rhsjacstate);
4740  CHKERRQ(ierr);
4741 
4742  // Fool DM
4743  DM dm;
4744  MatType mtype;
4745  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4746  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4747  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4748  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4749  PetscFunctionReturn(0);
4750 }
4751 
4752 static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4753  void* ctx)
4754 {
4755  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4756 
4757  PetscFunctionBeginUser;
4758  if (!monctx)
4759  {
4760  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4761  }
4762 
4763  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4765  monctx->monitor);
4766  if (user_monitor->mon_sol)
4767  {
4768  Vec x;
4769  PetscErrorCode ierr;
4770 
4771  ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4772  mfem::PetscParVector V(x,true);
4773  user_monitor->MonitorSolution(it,res,V);
4774  }
4775  if (user_monitor->mon_res)
4776  {
4777  Vec x;
4778  PetscErrorCode ierr;
4779 
4780  ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4781  mfem::PetscParVector V(x,true);
4782  user_monitor->MonitorResidual(it,res,V);
4783  }
4784  user_monitor->MonitorSolver(solver);
4785  PetscFunctionReturn(0);
4786 }
4787 
4788 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4789  void *ctx)
4790 {
4791  PetscScalar *array;
4792  PetscInt n;
4793  PetscErrorCode ierr;
4794  mfem::Vector *xx;
4795  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4796 
4797  PetscFunctionBeginUser;
4798  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4799  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4800  if (!snes_ctx->bchandler)
4801  {
4802  xx = new mfem::Vector(array,n);
4803  }
4804  else
4805  {
4806  // make sure we compute a Jacobian with the correct boundary values
4807  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4808  mfem::Vector txx(array,n);
4809  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4810  xx = snes_ctx->work;
4811  bchandler->ApplyBC(txx,*xx);
4812  }
4813 
4814  // Use Operator::GetGradient(x)
4815  mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4816  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4817  if (!snes_ctx->bchandler) { delete xx; }
4818 
4819  // Convert to the operator type requested if needed
4820  bool delete_pA = false;
4821  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4822  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4823  if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4824  pA->GetType() != snes_ctx->jacType))
4825  {
4826  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4827  snes_ctx->jacType);
4828  delete_pA = true;
4829  }
4830 
4831  // Eliminate essential dofs
4832  if (snes_ctx->bchandler)
4833  {
4834  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4835  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4836  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4837  }
4838 
4839  // Get nonzerostate
4840  PetscObjectState nonzerostate;
4841  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4842 
4843  // Avoid unneeded copy of the matrix by hacking
4844  Mat B = pA->ReleaseMat(false);
4845  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4846  if (delete_pA) { delete pA; }
4847 
4848  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4849  // preconditioner fails because MatCreateSubMatrix_Nest does not
4850  // actually return a matrix. Instead, for efficiency reasons,
4851  // it returns a reference to the submatrix. The second time it
4852  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4853  // aborts since the two submatrices are actually different.
4854  // We circumvent this issue by incrementing the nonzero state
4855  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4856  // This does not impact performances in the case of MATNEST
4857  PetscBool isnest;
4858  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4859  CHKERRQ(ierr);
4860  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4861 
4862  // Matrix-free case
4863  if (A && A != P)
4864  {
4865  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4866  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4867  }
4868 
4869  // Fool DM
4870  DM dm;
4871  MatType mtype;
4872  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4873  ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4874  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4875  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4876  PetscFunctionReturn(0);
4877 }
4878 
4879 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4880 {
4881  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4882 
4883  PetscFunctionBeginUser;
4884  mfem::PetscParVector xx(x,true);
4885  mfem::PetscParVector ff(f,true);
4886  if (snes_ctx->bchandler)
4887  {
4888  // we evaluate the Mult method with the correct bc
4889  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4890  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4891  mfem::Vector* txx = snes_ctx->work;
4892  bchandler->ApplyBC(xx,*txx);
4893  snes_ctx->op->Mult(*txx,ff);
4894  // and fix the residual (i.e. f_\partial\Omega = u - g)
4895  bchandler->FixResidualBC(xx,ff);
4896  }
4897  else
4898  {
4899  // use the Mult method of the class
4900  snes_ctx->op->Mult(xx,ff);
4901  }
4902  ff.UpdateVecFromFlags();
4903  PetscFunctionReturn(0);
4904 }
4905 
4906 static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
4907  void *ctx)
4908 {
4909  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4910 
4911  PetscFunctionBeginUser;
4912  if (!snes_ctx->objective)
4913  {
4914  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
4915  }
4916  mfem::PetscParVector xx(x,true);
4917  double lf;
4918  (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4919  *f = (PetscReal)lf;
4920  PetscFunctionReturn(0);
4921 }
4922 
4923 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4924  PetscBool *cy,PetscBool *cw, void* ctx)
4925 {
4926  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4927  bool lcy = false,lcw = false;
4928 
4929  PetscFunctionBeginUser;
4930  mfem::PetscParVector x(X,true);
4931  mfem::PetscParVector y(Y,true);
4932  mfem::PetscParVector w(W,true);
4933  (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4934  if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
4935  if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
4936  PetscFunctionReturn(0);
4937 }
4938 
4939 static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
4940 {
4941  Vec F,X,dX,pX;
4942  __mfem_snes_ctx* snes_ctx;
4943 
4944  PetscFunctionBeginUser;
4945  /* Update callback does not use the context */
4946  ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
4947  ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4948  if (!it)
4949  {
4950  ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4951  ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
4952  CHKERRQ(ierr);
4953  ierr = VecDestroy(&pX); CHKERRQ(ierr);
4954  }
4955  ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
4956  CHKERRQ(ierr);
4957  if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
4958  "Missing previous solution");
4959  ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4960  mfem::PetscParVector f(F,true);
4961  mfem::PetscParVector x(X,true);
4962  mfem::PetscParVector dx(dX,true);
4963  mfem::PetscParVector px(pX,true);
4964  (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4965  /* Store previous solution */
4966  ierr = VecCopy(X,pX); CHKERRQ(ierr);
4967  PetscFunctionReturn(0);
4968 }
4969 
4970 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
4971  void* ctx)
4972 {
4973  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4974 
4975  PetscFunctionBeginUser;
4976  if (!monctx)
4977  {
4978  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4979  }
4980 
4981  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4983  monctx->monitor);
4984  if (user_monitor->mon_sol)
4985  {
4986  Vec x;
4987  PetscErrorCode ierr;
4988 
4989  ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
4990  mfem::PetscParVector V(x,true);
4991  user_monitor->MonitorSolution(it,res,V);
4992  }
4993  if (user_monitor->mon_res)
4994  {
4995  Vec x;
4996  PetscErrorCode ierr;
4997 
4998  ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
4999  mfem::PetscParVector V(x,true);
5000  user_monitor->MonitorResidual(it,res,V);
5001  }
5002  user_monitor->MonitorSolver(solver);
5003  PetscFunctionReturn(0);
5004 }
5005 
5006 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5007 {
5008  mfem::Operator *op;
5009  PetscErrorCode ierr;
5010 
5011  PetscFunctionBeginUser;
5012  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5013  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5014  mfem::PetscParVector xx(x,true);
5015  mfem::PetscParVector yy(y,true);
5016  op->Mult(xx,yy);
5017  yy.UpdateVecFromFlags();
5018  PetscFunctionReturn(0);
5019 }
5020 
5021 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5022 {
5023  mfem::Operator *op;
5024  PetscErrorCode ierr;
5025  PetscBool flg,symm;
5026 
5027  PetscFunctionBeginUser;
5028  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5029  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5030  mfem::PetscParVector xx(x,true);
5031  mfem::PetscParVector yy(y,true);
5032  ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5033  if (flg && symm)
5034  {
5035  op->Mult(xx,yy);
5036  }
5037  else
5038  {
5039  op->MultTranspose(xx,yy);
5040  }
5041  yy.UpdateVecFromFlags();
5042  PetscFunctionReturn(0);
5043 }
5044 
5045 static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5046 {
5047  mfem::Operator *op;
5048  PetscErrorCode ierr;
5049 
5050  PetscFunctionBeginUser;
5051  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5052  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5053  ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
5054  PetscFunctionReturn(0);
5055 }
5056 
5057 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5058 {
5059  PetscFunctionBeginUser;
5060  PetscFunctionReturn(0);
5061 }
5062 
5063 static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5064 {
5065  __mfem_pc_shell_ctx *ctx;
5066  PetscErrorCode ierr;
5067 
5068  PetscFunctionBeginUser;
5069  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5070  if (ctx->op)
5071  {
5072  PetscBool isascii;
5073  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5074  CHKERRQ(ierr);
5075 
5077  (ctx->op);
5078  if (ppc)
5079  {
5080  ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5081  }
5082  else
5083  {
5084  if (isascii)
5085  {
5086  ierr = PetscViewerASCIIPrintf(viewer,
5087  "No information available on the mfem::Solver\n");
5088  CHKERRQ(ierr);
5089  }
5090  }
5091  if (isascii && ctx->factory)
5092  {
5093  ierr = PetscViewerASCIIPrintf(viewer,
5094  "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5095  CHKERRQ(ierr);
5096  }
5097  }
5098  PetscFunctionReturn(0);
5099 }
5100 
5101 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5102 {
5103  __mfem_pc_shell_ctx *ctx;
5104  PetscErrorCode ierr;
5105 
5106  PetscFunctionBeginUser;
5107  mfem::PetscParVector xx(x,true);
5108  mfem::PetscParVector yy(y,true);
5109  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5110  if (ctx->op)
5111  {
5112  ctx->op->Mult(xx,yy);
5113  yy.UpdateVecFromFlags();
5114  }
5115  else // operator is not present, copy x
5116  {
5117  yy = xx;
5118  }
5119  PetscFunctionReturn(0);
5120 }
5121 
5122 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5123 {
5124  __mfem_pc_shell_ctx *ctx;
5125  PetscErrorCode ierr;
5126 
5127  PetscFunctionBeginUser;
5128  mfem::PetscParVector xx(x,true);
5129  mfem::PetscParVector yy(y,true);
5130  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5131  if (ctx->op)
5132  {
5133  ctx->op->MultTranspose(xx,yy);
5134  yy.UpdateVecFromFlags();
5135  }
5136  else // operator is not present, copy x
5137  {
5138  yy = xx;
5139  }
5140  PetscFunctionReturn(0);
5141 }
5142 
5143 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5144 {
5145  __mfem_pc_shell_ctx *ctx;
5146 
5147  PetscFunctionBeginUser;
5148  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5149  if (ctx->factory)
5150  {
5151  // Delete any owned operator
5152  if (ctx->ownsop)
5153  {
5154  delete ctx->op;
5155  }
5156 
5157  // Get current preconditioning Mat
5158  Mat B;
5159  ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5160 
5161  // Call user-defined setup
5162  mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
5163  mfem::PetscPreconditionerFactory *factory = ctx->factory;
5164  ctx->op = factory->NewPreconditioner(hB);
5165  ctx->ownsop = true;
5166  ctx->numprec++;
5167  }
5168  PetscFunctionReturn(0);
5169 }
5170 
5171 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5172 {
5173  __mfem_pc_shell_ctx *ctx;
5174  PetscErrorCode ierr;
5175 
5176  PetscFunctionBeginUser;
5177  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5178  if (ctx->ownsop)
5179  {
5180  delete ctx->op;
5181  }
5182  delete ctx;
5183  PetscFunctionReturn(0);
5184 }
5185 
5186 static PetscErrorCode __mfem_array_container_destroy(void *ptr)
5187 {
5188  PetscErrorCode ierr;
5189 
5190  PetscFunctionBeginUser;
5191  ierr = PetscFree(ptr); CHKERRQ(ierr);
5192  PetscFunctionReturn(0);
5193 }
5194 
5195 static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
5196 {
5198  PetscErrorCode ierr;
5199 
5200  PetscFunctionBeginUser;
5201  for (int i=0; i<a->Size(); i++)
5202  {
5203  Mat M = (*a)[i];
5204  MPI_Comm comm = PetscObjectComm((PetscObject)M);
5205  ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5206  }
5207  delete a;
5208  PetscFunctionReturn(0);
5209 }
5210 
5211 static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
5212 {
5213  PetscErrorCode ierr;
5214 
5215  PetscFunctionBeginUser;
5216  ierr = PetscFree(*ctx); CHKERRQ(ierr);
5217  PetscFunctionReturn(0);
5218 }
5219 
5220 // Sets the type of PC to PCSHELL and wraps the solver action
5221 // if ownsop is true, ownership of precond is transferred to the PETSc object
5222 PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
5223 {
5224  PetscFunctionBeginUser;
5225  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5226  ctx->op = &precond;
5227  ctx->ownsop = ownsop;
5228  ctx->factory = NULL;
5229  ctx->numprec = 0;
5230 
5231  // In case the PC was already of type SHELL, this will destroy any
5232  // previous user-defined data structure
5233  // We cannot call PCReset as it will wipe out any operator already set
5234  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5235 
5236  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5237  ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5238  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5239  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5240  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5241  CHKERRQ(ierr);
5242  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5243  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5244  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5245  PetscFunctionReturn(0);
5246 }
5247 
5248 // Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
5249 // Takes ownership of the solver created by the factory
5250 PetscErrorCode MakeShellPCWithFactory(PC pc,
5252 {
5253  PetscFunctionBeginUser;
5254  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5255  ctx->op = NULL;
5256  ctx->ownsop = true;
5257  ctx->factory = factory;
5258  ctx->numprec = 0;
5259 
5260  // In case the PC was already of type SHELL, this will destroy any
5261  // previous user-defined data structure
5262  // We cannot call PCReset as it will wipe out any operator already set
5263  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5264 
5265  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5266  ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
5267  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5268  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5269  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5270  CHKERRQ(ierr);
5271  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5272  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5273  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5274  PetscFunctionReturn(0);
5275 }
5276 
5277 // Converts from a list (or a marked Array if islist is false) to an IS
5278 // st indicates the offset where to start numbering
5279 static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
5280  const mfem::Array<int> *list,
5281  PetscInt st, IS* is)
5282 {
5283  PetscInt n = list ? list->Size() : 0,*idxs;
5284  const int *data = list ? list->GetData() : NULL;
5285  PetscErrorCode ierr;
5286 
5287  PetscFunctionBeginUser;
5288  ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5289  if (islist)
5290  {
5291  for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5292  }
5293  else
5294  {
5295  PetscInt cum = 0;
5296  for (PetscInt i=0; i<n; i++)
5297  {
5298  if (data[i]) { idxs[cum++] = i+st; }
5299  }
5300  n = cum;
5301  }
5302  ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5303  CHKERRQ(ierr);
5304  PetscFunctionReturn(0);
5305 }
5306 
5307 // Converts from a marked Array of Vdofs to an IS
5308 // st indicates the offset where to start numbering
5309 // l2l is a vector of matrices generated during RAP
5310 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5311  mfem::Array<Mat> &pl2l,
5312  const mfem::Array<int> *mark,
5313  PetscInt st, IS* is)
5314 {
5315  mfem::Array<int> sub_dof_marker;
5317  PetscInt nl;
5318  PetscErrorCode ierr;
5319 
5320  PetscFunctionBeginUser;
5321  for (int i = 0; i < pl2l.Size(); i++)
5322  {
5323  PetscInt m,n,*ii,*jj;
5324  PetscBool done;
5325  ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
5326  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5327  MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
5328  ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5329 #if defined(PETSC_USE_64BIT_INDICES)
5330  int nnz = (int)ii[m];
5331  int *mii = new int[m+1];
5332  int *mjj = new int[nnz];
5333  for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5334  for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5335  l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
5336 #else
5337  l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
5338 #endif
5339  ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5340  (const PetscInt**)&ii,
5341  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5342  MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
5343  << i << " l2l matrix");
5344  }
5345  nl = 0;
5346  for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5347  sub_dof_marker.SetSize(nl);
5348  const int* vdata = mark->GetData();
5349  int* sdata = sub_dof_marker.GetData();
5350  int cumh = 0, cumw = 0;
5351  for (int i = 0; i < l2l.Size(); i++)
5352  {
5353  const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
5354  l2l[i]->Height());
5355  mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
5356  l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5357  cumh += l2l[i]->Height();
5358  cumw += l2l[i]->Width();
5359  }
5360  ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5361  for (int i = 0; i < pl2l.Size(); i++)
5362  {
5363  delete l2l[i];
5364  }
5365  PetscFunctionReturn(0);
5366 }
5367 
5368 #if !defined(PETSC_HAVE_HYPRE)
5369 
5370 #if defined(HYPRE_MIXEDINT)
5371 #error "HYPRE_MIXEDINT not supported"
5372 #endif
5373 
5374 #include "_hypre_parcsr_mv.h"
5375 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5376 {
5377  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5378  hypre_CSRMatrix *hdiag,*hoffd;
5379  PetscScalar *da,*oa,*aptr;
5380  PetscInt *dii,*djj,*oii,*ojj,*iptr;
5381  PetscInt i,dnnz,onnz,m,n;
5382  PetscMPIInt size;
5383  PetscErrorCode ierr;
5384 
5385  PetscFunctionBeginUser;
5386  hdiag = hypre_ParCSRMatrixDiag(hA);
5387  hoffd = hypre_ParCSRMatrixOffd(hA);
5388  m = hypre_CSRMatrixNumRows(hdiag);
5389  n = hypre_CSRMatrixNumCols(hdiag);
5390  dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5391  onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5392  ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5393  ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5394  ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5395  ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
5396  CHKERRQ(ierr);
5397  ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
5398  CHKERRQ(ierr);
5399  ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
5400  CHKERRQ(ierr);
5401  iptr = djj;
5402  aptr = da;
5403  for (i=0; i<m; i++)
5404  {
5405  PetscInt nc = dii[i+1]-dii[i];
5406  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5407  iptr += nc;
5408  aptr += nc;
5409  }
5410  ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5411  if (size > 1)
5412  {
5413  PetscInt *offdj,*coffd;
5414 
5415  ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5416  ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5417  ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5418  ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
5419  CHKERRQ(ierr);
5420  offdj = hypre_CSRMatrixJ(hoffd);
5421  coffd = hypre_ParCSRMatrixColMapOffd(hA);
5422  for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5423  ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
5424  CHKERRQ(ierr);
5425  iptr = ojj;
5426  aptr = oa;
5427  for (i=0; i<m; i++)
5428  {
5429  PetscInt nc = oii[i+1]-oii[i];
5430  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5431  iptr += nc;
5432  aptr += nc;
5433  }
5434  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5435  djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5436  }
5437  else
5438  {
5439  oii = ojj = NULL;
5440  oa = NULL;
5441  ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5442  }
5443  /* We are responsible to free the CSR arrays. However, since we can take
5444  references of a PetscParMatrix but we cannot take reference of PETSc
5445  arrays, we need to create a PetscContainer object to take reference of
5446  these arrays in reference objects */
5447  void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5448  const char *names[6] = {"_mfem_csr_dii",
5449  "_mfem_csr_djj",
5450  "_mfem_csr_da",
5451  "_mfem_csr_oii",
5452  "_mfem_csr_ojj",
5453  "_mfem_csr_oa"
5454  };
5455  for (i=0; i<6; i++)
5456  {
5457  PetscContainer c;
5458 
5459  ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5460  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5461  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5462  CHKERRQ(ierr);
5463  ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
5464  CHKERRQ(ierr);
5465  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5466  }
5467  PetscFunctionReturn(0);
5468 }
5469 
5470 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5471 {
5472  Mat lA;
5473  ISLocalToGlobalMapping rl2g,cl2g;
5474  IS is;
5475  hypre_CSRMatrix *hdiag,*hoffd;
5476  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5477  void *ptrs[2];
5478  const char *names[2] = {"_mfem_csr_aux",
5479  "_mfem_csr_data"
5480  };
5481  PetscScalar *hdd,*hod,*aa,*data;
5482  PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5483  PetscInt *aux,*ii,*jj;
5484  PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5485  PetscErrorCode ierr;
5486 
5487  PetscFunctionBeginUser;
5488  /* access relevant information in ParCSR */
5489  str = hypre_ParCSRMatrixFirstRowIndex(hA);
5490  stc = hypre_ParCSRMatrixFirstColDiag(hA);
5491  hdiag = hypre_ParCSRMatrixDiag(hA);
5492  hoffd = hypre_ParCSRMatrixOffd(hA);
5493  dr = hypre_CSRMatrixNumRows(hdiag);
5494  dc = hypre_CSRMatrixNumCols(hdiag);
5495  nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5496  hdi = hypre_CSRMatrixI(hdiag);
5497  hdj = hypre_CSRMatrixJ(hdiag);
5498  hdd = hypre_CSRMatrixData(hdiag);
5499  oc = hypre_CSRMatrixNumCols(hoffd);
5500  nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5501  hoi = hypre_CSRMatrixI(hoffd);
5502  hoj = hypre_CSRMatrixJ(hoffd);
5503  hod = hypre_CSRMatrixData(hoffd);
5504 
5505  /* generate l2g maps for rows and cols */
5506  ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5507  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5508  ierr = ISDestroy(&is); CHKERRQ(ierr);
5509  col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5510  ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5511  for (i=0; i<dc; i++) { aux[i] = i+stc; }
5512  for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5513  ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5514  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5515  ierr = ISDestroy(&is); CHKERRQ(ierr);
5516 
5517  /* create MATIS object */
5518  ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5519  ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5520  ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5521  ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5522  ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5523  ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5524 
5525  /* merge local matrices */
5526  ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5527  ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5528  ii = aux;
5529  jj = aux+dr+1;
5530  aa = data;
5531  *ii = *(hdi++) + *(hoi++);
5532  for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5533  {
5534  PetscScalar *aold = aa;
5535  PetscInt *jold = jj,nc = jd+jo;
5536  for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5537  for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5538  *(++ii) = *(hdi++) + *(hoi++);
5539  ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5540  }
5541  for (; cum<dr; cum++) { *(++ii) = nnz; }
5542  ii = aux;
5543  jj = aux+dr+1;
5544  aa = data;
5545  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5546  CHKERRQ(ierr);
5547  ptrs[0] = aux;
5548  ptrs[1] = data;
5549  for (i=0; i<2; i++)
5550  {
5551  PetscContainer c;
5552 
5553  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5554  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5555  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5556  CHKERRQ(ierr);
5557  ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
5558  CHKERRQ(ierr);
5559  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5560  }
5561  ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5562  ierr = MatDestroy(&lA); CHKERRQ(ierr);
5563  ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5564  ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5565  PetscFunctionReturn(0);
5566 }
5567 #endif
5568 
5569 #include <petsc/private/matimpl.h>
5570 
5571 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm, PetscInt m,
5572  PetscInt n, Mat *A)
5573 {
5574  PetscFunctionBegin;
5575  ierr = MatCreate(comm,A); CHKERRQ(ierr);
5576  ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5577  ierr = PetscObjectChangeTypeName((PetscObject)*A,"mfemdummy"); CHKERRQ(ierr);
5578  (*A)->preallocated = PETSC_TRUE;
5579  ierr = MatSetUp(*A); CHKERRQ(ierr);
5580  PetscFunctionReturn(0);
5581 }
5582 
5583 #include <petsc/private/vecimpl.h>
5584 
5585 #if defined(PETSC_HAVE_DEVICE)
5586 static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5587 {
5588  PetscFunctionBegin;
5589  v->offloadmask = m;
5590  PetscFunctionReturn(0);
5591 }
5592 #endif
5593 
5594 static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5595 {
5596  PetscFunctionBegin;
5597 #if defined(PETSC_HAVE_DEVICE)
5598  *flg = v->boundtocpu;
5599 #else
5600  *flg = PETSC_TRUE;
5601 #endif
5602  PetscFunctionReturn(0);
5603 }
5604 
5605 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject o)
5606 {
5607  PetscErrorCode ierr;
5608 
5609  PetscFunctionBegin;
5610  ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5611  PetscFunctionReturn(0);
5612 }
5613 
5614 #endif // MFEM_USE_PETSC
5615 #endif // MFEM_USE_MPI
Ordering::Type GetOrdering() const
Return the ordering method.
Definition: fespace.hpp:552
int Size() const
Return the logical size of the array.
Definition: array.hpp:134
void SetPreconditioner(Solver &precond)
Definition: petsc.cpp:3025
void SetPrintLevel(int plev)
Definition: petsc.cpp:2374
static int GetId()
Get the device id of the configured device.
Definition: device.hpp:252
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
Definition: sparsemat.cpp:1443
PetscParVector * B
Right-hand side and solution vector.
Definition: petsc.hpp:661
void CreatePrivateContext()
Definition: petsc.cpp:2673
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition: petsc.cpp:1945
Vector()
Default constructor for Vector. Sets size = 0 and data = NULL.
Definition: vector.hpp:70
void trans(const Vector &u, Vector &x)
Definition: ex27.cpp:421
MPI_Comm GetComm() const
MPI communicator.
Definition: hypre.hpp:472
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
Definition: hypre.cpp:2857
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition: operator.hpp:331
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
Definition: petsc.cpp:4005
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition: operator.hpp:333
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition: petsc.cpp:3912
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition: vector.hpp:178
Memory< double > data
Definition: vector.hpp:64
Abstract class for PETSc&#39;s preconditioners.
Definition: petsc.hpp:775
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition: petsc.cpp:1078
double PetscScalar
Definition: petsc.hpp:48
ParMesh * GetParMesh() const
Definition: pfespace.hpp:267
PetscMemory pdata
Definition: petsc.hpp:159
petsc::Vec x
The actual PETSc object.
Definition: petsc.hpp:157
virtual void SetOperator(const Operator &op)
Definition: petsc.cpp:2880
HypreParMatrix * RAP(const HypreParMatrix *A, const HypreParMatrix *P)
Returns the matrix P^t * A * P.
Definition: hypre.cpp:2485
bool clcustom
Boolean to handle SetFromOptions calls.
Definition: petsc.hpp:652
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Definition: petsc.cpp:3980
HYPRE_Int PetscInt
Definition: petsc.hpp:47
double GetFinalNorm()
Definition: petsc.cpp:2648
Device memory; using CUDA or HIP *Malloc and *Free.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3281
petsc::Mat A
The actual PETSc object.
Definition: petsc.hpp:311
Wrapper for PETSc&#39;s matrix class.
Definition: petsc.hpp:307
PetscParVector & operator+=(const PetscParVector &y)
Definition: petsc.cpp:692
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition: petsc.cpp:2756
Abstract class for PETSc&#39;s linear solvers.
Definition: petsc.hpp:722
void SetSize(int s)
Resize the vector to size s.
Definition: vector.hpp:513
Abstract class for PETSc&#39;s solvers.
Definition: petsc.hpp:648
PetscODESolver::Type GetType() const
Definition: petsc.cpp:4163
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
Definition: petsc.cpp:4225
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition: petsc.cpp:3991
PetscBCHandler(Type type_=ZERO)
Definition: petsc.hpp:574
void Delete()
Delete the owned pointers. The Memory is not reset by this method, i.e. it will, generally, not be Empty() after this call.
Base abstract class for first order time dependent operators.
Definition: operator.hpp:282
PetscInt GetNumCols() const
Returns the local number of columns.
Definition: petsc.cpp:921
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
Definition: petsc.hpp:99
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition: operator.hpp:98
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition: operator.hpp:72
void SetType(PetscODESolver::Type)
Definition: petsc.cpp:4169
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
Definition: petsc.cpp:1294
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition: petsc.hpp:977
Pointer to an Operator of a specified type.
Definition: handle.hpp:33
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
Definition: petsc.cpp:1910
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition: petsc.cpp:3783
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition: petsc.cpp:671
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition: petsc.cpp:2285
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3872
bool DeviceRequested() const
Definition: petsc.hpp:140
T * GetData()
Returns the data.
Definition: array.hpp:108
Type GetType() const
Definition: petsc.cpp:2227
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition: operator.cpp:244
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:336
int Size() const
Returns the size of the vector.
Definition: vector.hpp:190
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Definition: petsc.cpp:4187
struct::_p_Mat * Mat
Definition: petsc.hpp:70
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition: ode.hpp:22
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition: petsc.cpp:949
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3155
PetscClassId cid
The class id of the actual PETSc object.
Definition: petsc.hpp:658
ID for class PetscParMatrix, MATHYPRE format.
Definition: operator.hpp:261
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
Definition: operator.hpp:326
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
void SetDataAndSize_()
Definition: petsc.cpp:218
Abstract parallel finite element space.
Definition: pfespace.hpp:28
Constant in time b.c.
Definition: petsc.hpp:570
void SetRelTol(double tol)
Definition: petsc.cpp:2297
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition: operator.hpp:652
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Randomize(PetscInt seed=0)
Set random values.
Definition: petsc.cpp:858
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3125
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition: petsc.cpp:1690
void SyncBaseAndReset()
Definition: petsc.hpp:124
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition: petsc.cpp:2459
void SetMaxIter(int max_iter)
Definition: petsc.cpp:2347
PetscParVector & operator*=(PetscScalar d)
Definition: petsc.cpp:706
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition: hypre.cpp:2464
void SortColumnIndices()
Sort the column indices corresponding to each row.
Definition: sparsemat.cpp:424
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
Definition: vector.hpp:108
int Capacity() const
Return the size of the allocated memory.
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
Definition: vector.hpp:225
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition: petsc.cpp:2171
double PetscReal
Definition: petsc.hpp:49
void Customize(bool customize=true) const
Customize object with options set.
Definition: petsc.cpp:2549
PetscInt M() const
Returns the global number of rows.
Definition: petsc.cpp:928
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: operator.hpp:93
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:4018
PetscParVector * Y
Definition: petsc.hpp:314
virtual ~PetscLinearSolver()
Definition: petsc.cpp:3115
PetscInt GetNumRows() const
Returns the local number of rows.
Definition: petsc.cpp:914
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
Definition: pfespace.hpp:279
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
Definition: operator.cpp:214
struct::_p_PC * PC
Definition: petsc.hpp:72
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition: petsc.cpp:873
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
Definition: petsc.cpp:725
const double * GetHostPointer() const
Definition: petsc.cpp:198
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition: petsc.cpp:657
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition: petsc.cpp:4074
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition: petsc.cpp:1891
ID for the base class Operator, i.e. any type.
Definition: operator.hpp:254
MPI_Comm GetComm() const
Definition: pfespace.hpp:263
double f(const Vector &xvec)
Definition: lor_mms.hpp:32
void write(std::ostream &os, T value)
Write &#39;value&#39; to stream.
Definition: binaryio.hpp:30
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition: petsc.hpp:655
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc&#39;s solvers.
Definition: petsc.hpp:955
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
Definition: operator.hpp:86
Data type sparse matrix.
Definition: sparsemat.hpp:41
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition: operator.hpp:66
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:3974
struct::_p_TS * TS
Definition: petsc.hpp:74
double b
Definition: lissajous.cpp:42
int to_int(const std::string &str)
Convert a string to an int.
Definition: text.hpp:62
const double * HostReadData() const
Definition: sparsemat.hpp:235
Auxiliary class for BDDC customization.
Definition: petsc.hpp:801
virtual ~PetscPreconditioner()
Definition: petsc.cpp:3286
ID for class PetscParMatrix, unspecified format.
Definition: operator.hpp:262
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:1247
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition: petsc.cpp:2805
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
Definition: petsc.cpp:448
void LoseData()
NULL-ifies the data.
Definition: array.hpp:128
void SetFlagsFromMask_() const
Definition: petsc.cpp:268
bool DeviceIsValid() const
Return true if device pointer is valid.
const int * HostReadJ() const
Definition: sparsemat.hpp:219
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition: petsc.cpp:1062
bool ReadRequested() const
Definition: petsc.hpp:130
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition: petsc.cpp:907
virtual ~PetscParVector()
Calls PETSc&#39;s destroy function.
Definition: petsc.cpp:518
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition: petsc.cpp:3792
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition: petsc.hpp:664
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition: petsc.cpp:1093
struct::_p_Vec * Vec
Definition: petsc.hpp:69
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition: petsc.cpp:1970
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition: petsc.cpp:2824
void Assign(const T *)
Copy data from a pointer. &#39;Size()&#39; elements are copied.
Definition: array.hpp:895
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:2454
T read(std::istream &is)
Read a value from the stream and return it.
Definition: binaryio.hpp:37
bool WriteRequested() const
Definition: petsc.hpp:135
void Shift(double s)
Shift diagonal by a constant.
Definition: petsc.cpp:1992
ID for class PetscParMatrix, MATNEST format.
Definition: operator.hpp:260
int GetVDim() const
Returns vector dimension.
Definition: fespace.hpp:534
void SetTime(double t)
Sets the current time.
Definition: petsc.hpp:600
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:319
ID for class PetscParMatrix, MATSHELL format.
Definition: operator.hpp:259
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition: petsc.cpp:1715
Biwise-OR of all CUDA backends.
Definition: device.hpp:88
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition: petsc.cpp:956
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc&#39;s MATSHELL object and returns the Mat in B...
Definition: petsc.cpp:1260
int GetNumIterations()
Definition: petsc.cpp:2615
int SpaceDimension() const
Definition: mesh.hpp:912
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory&lt;T&gt; &amp;mem, int size, false)
Definition: device.hpp:343
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition: petsc.cpp:2489
struct::_p_KSP * KSP
Definition: petsc.hpp:71
Wrapper for hypre&#39;s parallel vector class.
Definition: hypre.hpp:99
HYPRE_BigInt * GetTrueDofOffsets() const
Definition: pfespace.hpp:272
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition: petsc.cpp:2833
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Definition: petsc.hpp:590
Type
Enumeration defining IDs for some classes derived from Operator.
Definition: operator.hpp:252
struct::_p_SNES * SNES
Definition: petsc.hpp:73
PetscInt NNZ() const
Returns the number of nonzeros.
Definition: petsc.cpp:942
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true)
Definition: petsc.cpp:2845
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition: petsc.cpp:3276
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Definition: fe_coll.hpp:26
MemoryType
Memory types supported by MFEM.
Definition: mem_manager.hpp:31
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition: array.hpp:674
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
Definition: operator.cpp:236
HYPRE_Int HYPRE_BigInt
PetscInt N() const
Returns the global number of columns.
Definition: petsc.cpp:935
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition: petsc.cpp:1881
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:4157
bool operatorset
Boolean to handle SetOperator calls.
Definition: petsc.hpp:670
PetscParVector * X
Auxiliary vectors for typecasting.
Definition: petsc.hpp:314
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
Definition: device.hpp:258
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition: petsc.cpp:1927
bool IsAliasForSync() const
Definition: petsc.hpp:97
int height
Dimension of the output / number of rows in the matrix.
Definition: operator.hpp:27
PetscParVector & operator-=(const PetscParVector &y)
Definition: petsc.cpp:699
double a
Definition: lissajous.cpp:41
void MFEMFinalizePetsc()
Definition: petsc.cpp:192
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition: operator.cpp:219
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory&lt;T&gt; &amp;mem, int size, false)
Definition: device.hpp:326
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true, or the mfem::Device&#39;s HostMemoryClass, otherwise.
Definition: device.hpp:353
virtual ~PetscNonlinearSolver()
Definition: petsc.cpp:3904
virtual ~PetscODESolver()
Definition: petsc.cpp:4066
int GetConverged()
Definition: petsc.cpp:2582
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition: petsc.cpp:2275
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition: petsc.cpp:2508
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition: petsc.cpp:168
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
Definition: petsc.cpp:2214
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition: petsc.cpp:500
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition: petsc.hpp:965
bool Empty() const
Return true if the Memory object is empty, see Reset().
ID for class PetscParMatrix, MATAIJ format.
Definition: operator.hpp:257
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:3190
int dim
Definition: ex24.cpp:53
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
Definition: mem_manager.cpp:54
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:620
PetscInt GlobalSize() const
Returns the global number of rows.
Definition: petsc.cpp:456
PetscParVector * X
Definition: petsc.hpp:661
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition: petsc.cpp:900
struct _p_PetscObject * PetscObject
Definition: petsc.hpp:51
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Definition: petsc.cpp:3827
void ResetMemory()
Completes the operation started with PlaceMemory.
Definition: petsc.cpp:814
void SetAbsTol(double tol)
Definition: petsc.cpp:2322
Helper class for handling essential boundary conditions.
Definition: petsc.hpp:564
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition: petsc.cpp:1871
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition: petsc.hpp:597
void SetTol(double tol)
Definition: petsc.cpp:2292
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition: petsc.cpp:650
RefCoord t[3]
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition: petsc.hpp:971
int NumRowBlocks() const
Return the number of row blocks.
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition: petsc.cpp:2011
Vector data type.
Definition: vector.hpp:60
const FiniteElementCollection * FEColl() const
Definition: fespace.hpp:554
const int * HostReadI() const
Definition: sparsemat.hpp:203
Identity Operator I: x -&gt; x.
Definition: operator.hpp:671
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
Definition: petsc.cpp:720
bool UseDevice() const
Read the internal device flag.
void FreePrivateContext()
Definition: petsc.cpp:2705
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition: ode.hpp:26
RefCoord s[3]
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition: petsc.cpp:625
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:4041
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition: petsc.cpp:2200
Base class for solvers.
Definition: operator.hpp:648
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition: globals.hpp:66
Abstract operator.
Definition: operator.hpp:24
Wrapper for hypre&#39;s ParCSR matrix class.
Definition: hypre.hpp:277
A class to handle Block systems in a matrix-free implementation.
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory&lt;T&gt; &amp;mem, int size, false)
Definition: device.hpp:360
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:3105
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
Definition: petsc.cpp:1905
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition: petsc.cpp:2733
ID for class PetscParMatrix, MATIS format.
Definition: operator.hpp:258
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3110
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
Definition: petsc.cpp:730
int width
Dimension of the input / number of columns in the matrix.
Definition: operator.hpp:28
void * private_ctx
Private context for solver.
Definition: petsc.hpp:667
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition: petsc.cpp:2740
const double * GetDevicePointer() const
Definition: petsc.cpp:207
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition: petsc.cpp:1861
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition: petsc.cpp:1981
double f(const Vector &p)