MFEM  v4.6.0
Finite element discretization library
petsc.cpp
Go to the documentation of this file.
1 // Copyright (c) 2010-2023, Lawrence Livermore National Security, LLC. Produced
2 // at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3 // LICENSE and NOTICE for details. LLNL-CODE-806117.
4 //
5 // This file is part of the MFEM library. For more information and source code
6 // availability visit https://mfem.org.
7 //
8 // MFEM is free software; you can redistribute it and/or modify it under the
9 // terms of the BSD-3 license. We welcome feedback and contributions, see file
10 // CONTRIBUTING.md for details.
11 
12 // Author: Stefano Zampini <stefano.zampini@gmail.com>
13 
14 #include "../config/config.hpp"
15 
16 #ifdef MFEM_USE_MPI
17 #ifdef MFEM_USE_PETSC
18 
19 #include "linalg.hpp"
20 #include "../fem/fem.hpp"
21 
22 #include "petsc.h"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
25 #endif
26 
27 // Backward compatibility
28 #if PETSC_VERSION_LT(3,11,0)
29 #define VecLockReadPush VecLockPush
30 #define VecLockReadPop VecLockPop
31 #endif
32 #if PETSC_VERSION_LT(3,12,0)
33 #define VecGetArrayWrite VecGetArray
34 #define VecRestoreArrayWrite VecRestoreArray
35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
37 #endif
38 #if PETSC_VERSION_LT(3,19,0)
39 #define PETSC_SUCCESS 0
40 #endif
41 
42 #include <fstream>
43 #include <iomanip>
44 #include <cmath>
45 #include <cstdlib>
46 
47 // Note: there are additional #include statements below.
48 
49 #include "petscinternals.hpp"
50 
51 // Callback functions: these functions will be called by PETSc
52 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
53 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
54 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
55  void*);
56 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
57 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
58  PetscReal,Mat,
59  Mat,void*);
60 static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
61  Mat,Mat,Mat,Mat);
62 static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
63 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
64 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
65 static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
66 static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
67 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
68  PetscBool*,PetscBool*,void*);
69 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
70 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
71 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
72 static PetscErrorCode __mfem_pc_shell_setup(PC);
73 static PetscErrorCode __mfem_pc_shell_destroy(PC);
74 static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
75 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
76 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
77 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
78 static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
79 static PetscErrorCode __mfem_array_container_destroy(void*);
80 static PetscErrorCode __mfem_matarray_container_destroy(void*);
81 static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
82 
83 // auxiliary functions
84 static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
85  PetscInt,IS*);
86 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
87  const mfem::Array<int>*,PetscInt,IS*);
88 static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
89 static PetscErrorCode MakeShellPCWithFactory(PC,
91 
92 // Equivalent functions are present in PETSc source code
93 // if PETSc has been compiled with hypre support
94 // We provide them here in case PETSC_HAVE_HYPRE is not defined
95 #if !defined(PETSC_HAVE_HYPRE)
96 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
97 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
98 #endif
99 
100 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
101 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
102 #define _USE_DEVICE
103 #define PETSC_VECDEVICE VECCUDA
104 #define PETSC_MATAIJDEVICE MATAIJCUSPARSE
105 #define VecDeviceGetArrayRead VecCUDAGetArrayRead
106 #define VecDeviceGetArrayWrite VecCUDAGetArrayWrite
107 #define VecDeviceGetArray VecCUDAGetArray
108 #define VecDeviceRestoreArrayRead VecCUDARestoreArrayRead
109 #define VecDeviceRestoreArrayWrite VecCUDARestoreArrayWrite
110 #define VecDeviceRestoreArray VecCUDARestoreArray
111 #define VecDevicePlaceArray VecCUDAPlaceArray
112 #define VecDeviceResetArray VecCUDAResetArray
113 #elif defined(MFEM_USE_HIP) && defined(PETSC_HAVE_HIP)
114 #define _USE_DEVICE
115 #define PETSC_VECDEVICE VECHIP
116 #define PETSC_MATAIJDEVICE MATAIJHIPSPARSE
117 #define VecDeviceGetArrayRead VecHIPGetArrayRead
118 #define VecDeviceGetArrayWrite VecHIPGetArrayWrite
119 #define VecDeviceGetArray VecHIPGetArray
120 #define VecDeviceRestoreArrayRead VecHIPRestoreArrayRead
121 #define VecDeviceRestoreArrayWrite VecHIPRestoreArrayWrite
122 #define VecDeviceRestoreArray VecHIPRestoreArray
123 #define VecDevicePlaceArray VecHIPPlaceArray
124 #define VecDeviceResetArray VecHIPResetArray
125 #else
126 #define VecDeviceGetArrayRead VecGetArrayRead
127 #define VecDeviceGetArrayWrite VecGetArrayWrite
128 #define VecDeviceGetArray VecGetArray
129 #define VecDeviceRestoreArrayRead VecRestoreArrayRead
130 #define VecDeviceRestoreArrayWrite VecRestoreArrayWrite
131 #define VecDeviceRestoreArray VecRestoreArray
132 #define VecDevicePlaceArray VecPlaceArray
133 #define VecDeviceResetArray VecResetArray
134 #endif
135 #endif
136 
137 #if defined(PETSC_HAVE_DEVICE)
138 static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
139 #endif
140 static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
141 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject);
142 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,PetscInt,PetscInt,Mat*);
143 
144 // structs used by PETSc code
145 typedef struct
146 {
147  mfem::Solver *op;
149  bool ownsop;
150  unsigned long int numprec;
151 } __mfem_pc_shell_ctx;
152 
153 typedef struct
154 {
155  mfem::Operator *op; // The nonlinear operator
156  mfem::PetscBCHandler *bchandler; // Handling of essential bc
157  mfem::Vector *work; // Work vector
158  mfem::Operator::Type jacType; // OperatorType for the Jacobian
159  // Objective for line search
160  void (*objective)(mfem::Operator *op, const mfem::Vector&, double*);
161  // PostCheck function (to be called after successful line search)
162  void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
163  mfem::Vector&, bool&, bool&);
164  // General purpose update function (to be called at the beginning of
165  // each nonlinear step)
166  void (*update)(mfem::Operator *op, int,
167  const mfem::Vector&, const mfem::Vector&,
168  const mfem::Vector&, const mfem::Vector&);
169 } __mfem_snes_ctx;
170 
171 typedef struct
172 {
173  mfem::TimeDependentOperator *op; // The time-dependent operator
174  mfem::PetscBCHandler *bchandler; // Handling of essential bc
175  mfem::Vector *work; // Work vector
176  mfem::Vector *work2; // Work vector
177  mfem::Operator::Type jacType; // OperatorType for the Jacobian
178  enum mfem::PetscODESolver::Type type;
179  PetscReal cached_shift;
180  PetscObjectState cached_ijacstate;
181  PetscObjectState cached_rhsjacstate;
182  PetscObjectState cached_splits_xstate;
183  PetscObjectState cached_splits_xdotstate;
184 } __mfem_ts_ctx;
185 
186 typedef struct
187 {
188  mfem::PetscSolver *solver; // The solver object
189  mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
190 } __mfem_monitor_ctx;
191 
192 // use global scope ierr to check PETSc errors inside mfem calls
193 static PetscErrorCode ierr;
194 static PetscMPIInt mpiierr;
195 
196 using namespace std;
197 
198 namespace mfem
199 {
200 
202 {
203  MFEMInitializePetsc(NULL,NULL,NULL,NULL);
204 }
205 
206 void MFEMInitializePetsc(int *argc,char*** argv)
207 {
208  MFEMInitializePetsc(argc,argv,NULL,NULL);
209 }
210 
211 void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
212  const char help[])
213 {
214  // Tell PETSc to use the same CUDA or HIP device as MFEM:
216  {
217 #if PETSC_VERSION_LT(3,17,0)
218  const char *opts = "-cuda_device";
219 #else
220  const char *opts = "-device_select_cuda";
221 #endif
222  ierr = PetscOptionsSetValue(NULL,opts,
223  to_string(mfem::Device::GetId()).c_str());
224  MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
225  }
227  {
228 #if PETSC_VERSION_LT(3,17,0)
229  const char *opts = "-hip_device";
230 #else
231  const char *opts = "-device_select_hip";
232 #endif
233  ierr = PetscOptionsSetValue(NULL,opts,
234  to_string(mfem::Device::GetId()).c_str());
235  MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
236  }
237  ierr = PetscInitialize(argc,argv,rc_file,help);
238  MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
239 }
240 
242 {
243  ierr = PetscFinalize();
244  MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
245 }
246 
247 const double* PetscMemory::GetHostPointer() const
248 {
249  int oflags = flags;
250  SetHostValid();
251  const double *v = mfem::Read(*this,Capacity(),false);
252  flags = oflags;
253  return v;
254 }
255 
256 const double* PetscMemory::GetDevicePointer() const
257 {
258  int oflags = flags;
259  SetDeviceValid();
260  const double *v = mfem::Read(*this,Capacity(),true);
261  flags = oflags;
262  return v;
263 }
264 
265 // PetscParVector methods
266 
267 void PetscParVector::SetDataAndSize_()
268 {
269  PetscScalar *array;
270  PetscInt n;
271  PetscBool isnest;
272 
273  MFEM_VERIFY(x,"Missing Vec");
274  ierr = VecSetUp(x); PCHKERRQ(x,ierr);
275  ierr = PetscObjectTypeCompare((PetscObject)x,VECNEST,&isnest); PCHKERRQ(x,ierr);
276  MFEM_VERIFY(!isnest,"Not for type nest");
277  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
278  MFEM_VERIFY(n >= 0,"Invalid local size");
279  size = n;
280 #if defined(PETSC_HAVE_DEVICE)
281  PetscOffloadMask omask;
282  PetscBool isdevice;
283 
284  ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
285  if (omask != PETSC_OFFLOAD_BOTH)
286  {
287  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
288  }
289 #endif
290  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
291 #if defined(PETSC_HAVE_DEVICE)
292  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
293  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
294  ""); PCHKERRQ(x,ierr);
295  if (isdevice)
296  {
297  if (omask != PETSC_OFFLOAD_BOTH)
298  {
299  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
300  }
301  PetscScalar *darray;
302  ierr = VecDeviceGetArrayRead(x,(const PetscScalar**)&darray);
303  PCHKERRQ(x,ierr);
304  pdata.Wrap(array,darray,size,MemoryType::HOST,false);
305  ierr = VecDeviceRestoreArrayRead(x,(const PetscScalar**)&darray);
306  PCHKERRQ(x,ierr);
307  }
308  else
309 #endif
310  {
311  pdata.Wrap(array,size,MemoryType::HOST,false);
312  }
313  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
314 
315 #if defined(PETSC_HAVE_DEVICE)
316  if (omask == PETSC_OFFLOAD_UNALLOCATED && isdevice) { omask = PETSC_OFFLOAD_CPU; }
317  ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
318 #endif
319  data.MakeAlias(pdata,0,size);
320  SetFlagsFromMask_();
321 }
322 
323 void PetscParVector::SetFlagsFromMask_() const
324 {
325  MFEM_VERIFY(x,"Missing Vec");
326 #if defined(_USE_DEVICE)
327  PetscOffloadMask mask;
328  PetscBool isdevice;
329  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
330  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
331  ""); PCHKERRQ(x,ierr);
332  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
333  if (isdevice)
334  {
335  switch (mask)
336  {
337  case PETSC_OFFLOAD_CPU:
338  pdata.SetHostValid();
339  pdata.SetDeviceInvalid();
340  break;
341  case PETSC_OFFLOAD_GPU:
342  pdata.SetHostInvalid();
343  pdata.SetDeviceValid();
344  break;
345  case PETSC_OFFLOAD_BOTH:
346  pdata.SetHostValid();
347  pdata.SetDeviceValid();
348  break;
349  default:
350  MFEM_ABORT("Unhandled case " << mask);
351  }
352  }
353 #endif
354  data.Sync(pdata);
355 }
356 
357 void PetscParVector::UpdateVecFromFlags()
358 {
359  MFEM_VERIFY(x,"Missing Vec");
360  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
361 #if defined(_USE_DEVICE)
362  PetscBool isdevice;
363  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
364  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
365  ""); PCHKERRQ(x,ierr);
366  if (isdevice)
367  {
368  bool dv = pdata.DeviceIsValid();
369  bool hv = pdata.HostIsValid();
370  PetscOffloadMask mask;
371  if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
372  else if (dv) { mask = PETSC_OFFLOAD_GPU; }
373  else { mask = PETSC_OFFLOAD_CPU; }
374  ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
375  }
376  else
377 #endif
378  {
379  /* Just make sure we have an up-to-date copy on the CPU for PETSc */
380  PetscScalar *v;
381  ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
382  pdata.CopyToHost(v,size);
383  ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
384  }
385 }
386 
387 void PetscParVector::SetVecType_()
388 {
389  VecType vectype;
390  MFEM_VERIFY(x,"Missing Vec");
391  ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
392 #if defined(_USE_DEVICE)
393  switch (Device::GetDeviceMemoryType())
394  {
395  case MemoryType::DEVICE:
396  case MemoryType::MANAGED:
397  ierr = VecSetType(x,PETSC_VECDEVICE); PCHKERRQ(x,ierr);
398  break;
399  default:
400  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
401  break;
402  }
403 #else
404  if (!vectype)
405  {
406  ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
407  }
408 #endif
409 }
410 
411 const double* PetscParVector::Read(bool on_dev) const
412 {
413  const PetscScalar *dummy;
414  MFEM_VERIFY(x,"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE)
416  PetscBool isdevice;
417  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
418  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
419  ""); PCHKERRQ(x,ierr);
420  if (on_dev && isdevice)
421  {
422  ierr = VecDeviceGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
423  ierr = VecDeviceRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
424  }
425  else
426 #endif
427  {
428  ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
429  ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
430  }
431  SetFlagsFromMask_();
432  return mfem::Read(pdata, size, on_dev);
433 }
434 
435 const double* PetscParVector::HostRead() const
436 {
437  return Read(false);
438 }
439 
440 double* PetscParVector::Write(bool on_dev)
441 {
442  PetscScalar *dummy;
443  MFEM_VERIFY(x,"Missing Vec");
444 #if defined(PETSC_HAVE_DEVICE)
445  PetscBool isdevice;
446  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
447  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
448  ""); PCHKERRQ(x,ierr);
449  if (on_dev && isdevice)
450  {
451  ierr = VecDeviceGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
452  ierr = VecDeviceRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
453  }
454  else
455 #endif
456  {
457  ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
458  ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
459  }
460  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
461  SetFlagsFromMask_();
462  return mfem::Write(pdata, size, on_dev);
463 }
464 
466 {
467  return Write(false);
468 }
469 
470 double* PetscParVector::ReadWrite(bool on_dev)
471 {
472  PetscScalar *dummy;
473  MFEM_VERIFY(x,"Missing Vec");
474 #if defined(PETSC_HAVE_DEVICE)
475  PetscBool isdevice;
476  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
477  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
478  ""); PCHKERRQ(x,ierr);
479  if (on_dev && isdevice)
480  {
481  ierr = VecDeviceGetArray(x,&dummy); PCHKERRQ(x,ierr);
482  ierr = VecDeviceRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
483  }
484  else
485 #endif
486  {
487  ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
488  ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
489  }
490  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
491  SetFlagsFromMask_();
492  return mfem::ReadWrite(pdata, size, on_dev);
493 }
494 
496 {
497  return ReadWrite(false);
498 }
499 
500 void PetscParVector::UseDevice(bool dev) const
501 {
502  MFEM_VERIFY(x,"Missing Vec");
503 #if defined(PETSC_HAVE_DEVICE)
504  ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
505  SetFlagsFromMask_();
506 #endif
507 }
508 
509 bool PetscParVector::UseDevice() const
510 {
511  PetscBool flg;
512  MFEM_VERIFY(x,"Missing Vec");
513  ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
514  return flg ? false : true;
515 }
516 
517 PetscInt PetscParVector::GlobalSize() const
518 {
519  PetscInt N;
520  ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
521  return N;
522 }
523 
524 void PetscParVector::SetBlockSize(PetscInt bs)
525 {
526  ierr = VecSetBlockSize(x,bs); PCHKERRQ(x,ierr);
527 }
528 
529 PetscParVector::PetscParVector(MPI_Comm comm, const Vector &x_,
530  bool copy) : Vector()
531 {
532  PetscBool isdevice;
533 
534  int n = x_.Size();
535  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
536  ierr = VecSetSizes(x,n,PETSC_DECIDE); PCHKERRQ(x,ierr);
537  SetVecType_();
538  SetDataAndSize_();
539  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
540  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
541  ""); PCHKERRQ(x,ierr);
542  if (copy)
543  {
544  /* we use PETSc accessors to flag valid memory location to PETSc */
545  PetscErrorCode (*rest)(Vec,PetscScalar**);
546  PetscScalar *array;
547 #if defined(PETSC_HAVE_DEVICE)
548  if (isdevice && x_.UseDevice())
549  {
550  UseDevice(true);
551  ierr = VecDeviceGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
552  rest = VecDeviceRestoreArrayWrite;
553  }
554  else
555 #endif
556  {
557  UseDevice(false);
558  ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
559  rest = VecRestoreArrayWrite;
560  }
561  pdata.CopyFrom(x_.GetMemory(), n);
562  ierr = (*rest)(x,&array); PCHKERRQ(x,ierr);
564  }
565  else // don't copy, just set the device flag
566  {
567  if (isdevice && x_.UseDevice())
568  {
569  UseDevice(true);
570  }
571  else
572  {
573  UseDevice(false);
574  }
575  }
576 }
577 
578 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
579  PetscInt *col) : Vector()
580 {
581  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
582  if (col)
583  {
584  PetscMPIInt myid;
585  mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
586  ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
587  }
588  else
589  {
590  ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
591  }
592  SetVecType_();
593  SetDataAndSize_();
594 }
595 
597 {
598  MPI_Comm comm = PetscObjectComm((PetscObject)x);
599  ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
600  pdata.Delete();
601 }
602 
603 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
604  PetscScalar *data_, PetscInt *col) : Vector()
605 {
606  MFEM_VERIFY(col,"Missing distribution");
607  PetscMPIInt myid;
608  mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
609  ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
610  &x); CCHKERRQ(comm,ierr)
611  SetVecType_();
612  SetDataAndSize_();
613 }
614 
616 {
617  ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
618  SetDataAndSize_();
619 }
620 
621 PetscParVector::PetscParVector(MPI_Comm comm, const Operator &op,
622  bool transpose, bool allocate) : Vector()
623 {
624  PetscInt loc = transpose ? op.Height() : op.Width();
625 
626  ierr = VecCreate(comm,&x);
627  CCHKERRQ(comm,ierr);
628  ierr = VecSetSizes(x,loc,PETSC_DECIDE);
629  PCHKERRQ(x,ierr);
630 
631  SetVecType_();
632  if (allocate)
633  {
634  SetDataAndSize_();
635  }
636  else /* Vector intended to be used with Place/ResetMemory calls */
637  {
638  size = loc;
639  }
640 }
641 
643  bool transpose, bool allocate) : Vector()
644 {
645  Mat pA = const_cast<PetscParMatrix&>(A);
646  if (!transpose)
647  {
648  ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
649  }
650  else
651  {
652  ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
653  }
654  SetVecType_();
655  if (!allocate) /* Vector intended to be used with Place/ResetMemory calls */
656  {
657  PetscInt n;
658  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
659  size = n;
660  }
661  else
662  {
663  SetDataAndSize_();
664  }
665 }
666 
668 {
669  if (ref)
670  {
671  ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
672  }
673  x = y;
674  SetDataAndSize_();
675 }
676 
678 {
679  HYPRE_BigInt* offsets = pfes->GetTrueDofOffsets();
680  MPI_Comm comm = pfes->GetComm();
681  ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
682 
683  PetscMPIInt myid = 0;
684  if (!HYPRE_AssumedPartitionCheck())
685  {
686  mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
687  }
688  ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
689  PCHKERRQ(x,ierr);
690  SetVecType_();
691  SetDataAndSize_();
692 }
693 
694 MPI_Comm PetscParVector::GetComm() const
695 {
696  return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
697 }
698 
700 {
701  VecScatter scctx;
702  Vec vout;
703  const PetscScalar *array;
704  PetscInt size;
705 
706  ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
707  ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
708  PCHKERRQ(x,ierr);
709  ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
710  PCHKERRQ(x,ierr);
711  ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
712  ierr = VecGetArrayRead(vout,&array); PCHKERRQ(x,ierr);
713  ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
715  data.Assign(array);
716  ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(x,ierr);
717  ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
718  Vector *v = new Vector(data, internal::to_int(size));
719  v->MakeDataOwner();
720  data.LoseData();
721  return v;
722 }
723 
725 {
726  ierr = VecSet(x,d); PCHKERRQ(x,ierr);
728  return *this;
729 }
730 
732  const Array<PetscScalar>& vals)
733 {
734  MFEM_VERIFY(idx.Size() == vals.Size(),
735  "Size mismatch between indices and values");
736  PetscInt n = idx.Size();
737  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
738  PCHKERRQ(x,ierr);
739  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
740  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
742  return *this;
743 }
744 
746  const Array<PetscScalar>& vals)
747 {
748  MFEM_VERIFY(idx.Size() == vals.Size(),
749  "Size mismatch between indices and values");
750  PetscInt n = idx.Size();
751  ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
752  PCHKERRQ(x,ierr);
753  ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
754  ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
756  return *this;
757 }
758 
760 {
761  ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
763  return *this;
764 }
765 
767 {
768  ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
770  return *this;
771 }
772 
774 {
775  ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
777  return *this;
778 }
779 
781 {
782  ierr = VecScale(x,s); PCHKERRQ(x,ierr);
784  return *this;
785 }
786 
788 {
789  ierr = VecShift(x,s); PCHKERRQ(x,ierr);
791  return *this;
792 }
793 
795 {
796  ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
797 }
798 
800 {
801  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
802 }
803 
805 {
806  PetscInt n;
807 
808  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
809  MFEM_VERIFY(n <= mem.Capacity(),
810  "Memory size " << mem.Capacity() << " < " << n << " vector size!");
811  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
812  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
813 #if defined(_USE_DEVICE)
814  PetscBool isdevice;
815  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
816  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
817  ""); PCHKERRQ(x,ierr);
818  if (isdevice)
819  {
820  bool usedev = mem.DeviceIsValid() || (!rw && mem.UseDevice());
821  pdata.MakeAliasForSync(mem,0,n,rw,true,usedev);
822  if (usedev)
823  {
824  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
825  ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
826  }
827  else
828  {
829  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
830  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
831  }
832  }
833  else
834 #endif
835  {
836  double *w = rw ? mfem::HostReadWrite(mem,size) : mfem::HostWrite(mem,size);
837  pdata.MakeAliasForSync(mem,0,n,rw,true,false);
838 #if defined(PETSC_HAVE_DEVICE)
839  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
840 #endif
841  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
842  }
843  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
845 }
846 
848 {
849  PetscInt n;
850 
851  ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
852  MFEM_VERIFY(n <= mem.Capacity(),
853  "Memory size " << mem.Capacity() << " < " << n << " vector size!");
854  MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
855  MFEM_VERIFY(data.Empty(),"Vector data is not empty");
856 #if defined(_USE_DEVICE)
857  PetscBool isdevice;
858  ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
859  VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
860  ""); PCHKERRQ(x,ierr);
861  if (isdevice)
862  {
863  pdata.MakeAliasForSync(mem,0,n,mem.DeviceIsValid());
864  if (mem.DeviceIsValid())
865  {
866  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
867  ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
868  }
869  else
870  {
871  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
872  ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
873  }
874  }
875  else
876 #endif
877  {
878  const double *w = mfem::HostRead(mem,size);
879  pdata.MakeAliasForSync(mem,0,n,false);
880 #if defined(PETSC_HAVE_DEVICE)
881  ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
882 #endif
883  ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
884  }
886  ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
887  ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
888 }
889 
891 {
892  MFEM_VERIFY(pdata.IsAliasForSync(),"Vector data is not an alias");
893  MFEM_VERIFY(!pdata.Empty(),"Vector data is empty");
894  bool read = pdata.ReadRequested();
895  bool usedev = pdata.DeviceRequested();
896  bool write = pdata.WriteRequested();
897  /*
898  check for strange corner cases
899  - device memory used but somehow PETSc ended up putting up to date data on host
900  - host memory used but somehow PETSc ended up putting up to date data on device
901  */
902  if (write)
903  {
904  const PetscScalar *v;
905 #if defined(PETSC_HAVE_DEVICE)
906  PetscOffloadMask mask;
907  ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
908  if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
909  (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
910 #endif
911  {
912  ierr = VecGetArrayRead(x,&v); PCHKERRQ(x,ierr);
913  pdata.CopyFromHost(v, size);
914  ierr = VecRestoreArrayRead(x,&v); PCHKERRQ(x,ierr);
915  }
916  }
918  data.Reset();
919  if (read && !write) { ierr = VecLockReadPop(x); PCHKERRQ(x,ierr); }
920  if (usedev)
921  {
922 #if defined(PETSC_HAVE_DEVICE)
923  ierr = VecDeviceResetArray(x); PCHKERRQ(x,ierr);
924 #else
925  MFEM_VERIFY(false,"This should not happen");
926 #endif
927  }
928  else
929  {
930  ierr = VecResetArray(x); PCHKERRQ(x,ierr);
931  }
932 }
933 
935 {
936  PetscRandom rctx = NULL;
937 
938  if (seed)
939  {
940  ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
941  PCHKERRQ(x,ierr);
942  ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
943  ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
944  }
945  ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
946  ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
947 }
948 
949 void PetscParVector::Print(const char *fname, bool binary) const
950 {
951  if (fname)
952  {
953  PetscViewer view;
954 
955  if (binary)
956  {
957  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
958  FILE_MODE_WRITE,&view);
959  }
960  else
961  {
962  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
963  }
964  PCHKERRQ(x,ierr);
965  ierr = VecView(x,view); PCHKERRQ(x,ierr);
966  ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
967  }
968  else
969  {
970  ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
971  }
972 }
973 
974 // PetscParMatrix methods
975 
977 {
978  PetscInt N;
979  ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
980  return N;
981 }
982 
984 {
985  PetscInt N;
986  ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
987  return N;
988 }
989 
991 {
992  PetscInt N;
993  ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
994  return N;
995 }
996 
998 {
999  PetscInt N;
1000  ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
1001  return N;
1002 }
1003 
1005 {
1006  PetscInt N;
1007  ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
1008  return N;
1009 }
1010 
1012 {
1013  PetscInt N;
1014  ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
1015  return N;
1016 }
1017 
1019 {
1020  MatInfo info;
1021  ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
1022  return (PetscInt)info.nz_used;
1023 }
1024 
1026 {
1027  if (cbs < 0) { cbs = rbs; }
1028  ierr = MatSetBlockSizes(A,rbs,cbs); PCHKERRQ(A,ierr);
1029 }
1030 
1032 {
1033  A = NULL;
1034  X = Y = NULL;
1035  height = width = 0;
1036 }
1037 
1039 {
1040  Init();
1041 }
1042 
1044  const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
1045 {
1046  Init();
1047 
1048  Mat B = const_cast<PetscParMatrix&>(pB);
1049 
1050  IS isr,isc;
1051  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
1052  rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
1053  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
1054  cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
1055  ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
1056  ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
1057  ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1058 
1059  height = GetNumRows();
1060  width = GetNumCols();
1061 }
1062 
1064 {
1065  Init();
1066  height = pa->Height();
1067  width = pa->Width();
1068  ConvertOperator(pa->GetComm(),*pa,&A,tid);
1069 }
1070 
1072 {
1073  Init();
1074  height = ha->Height();
1075  width = ha->Width();
1076  ConvertOperator(ha->GetComm(),*ha,&A,tid);
1077 }
1078 
1080 {
1081  Init();
1082  height = sa->Height();
1083  width = sa->Width();
1084  ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
1085 }
1086 
1087 PetscParMatrix::PetscParMatrix(MPI_Comm comm, const Operator *op,
1088  Operator::Type tid)
1089 {
1090  Init();
1091  height = op->Height();
1092  width = op->Width();
1093  ConvertOperator(comm,*op,&A,tid);
1094 }
1095 
1096 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt glob_size,
1097  PetscInt *row_starts, SparseMatrix *diag,
1098  Operator::Type tid)
1099 {
1100  Init();
1101  BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1102  tid==PETSC_MATAIJ,&A);
1103  SetUpForDevice();
1104  // update base class
1105  height = GetNumRows();
1106  width = GetNumCols();
1107 }
1108 
1109 PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
1110  PetscInt global_num_cols, PetscInt *row_starts,
1111  PetscInt *col_starts, SparseMatrix *diag,
1112  Operator::Type tid)
1113 {
1114  Init();
1115  BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1116  tid==PETSC_MATAIJ,&A);
1117  SetUpForDevice();
1118  // update base class
1119  height = GetNumRows();
1120  width = GetNumCols();
1121 }
1122 
1124 {
1125  if (A)
1126  {
1127  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1128  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1129  if (X) { delete X; }
1130  if (Y) { delete Y; }
1131  X = Y = NULL;
1132  }
1133  height = B.Height();
1134  width = B.Width();
1135 #if defined(PETSC_HAVE_HYPRE)
1136  ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
1137 #else
1138  ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
1139 #endif
1140  SetUpForDevice();
1141  return *this;
1142 }
1143 
1145 {
1146  if (A)
1147  {
1148  MPI_Comm comm = PetscObjectComm((PetscObject)A);
1149  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1150  if (X) { delete X; }
1151  if (Y) { delete Y; }
1152  X = Y = NULL;
1153  }
1154  height = B.Height();
1155  width = B.Width();
1156  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1157  return *this;
1158 }
1159 
1161 {
1162  if (!A)
1163  {
1164  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1165  }
1166  else
1167  {
1168  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1169  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1170  ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1171  }
1172  return *this;
1173 }
1174 
1176 {
1177  if (!A)
1178  {
1179  ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1180  ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
1181  }
1182  else
1183  {
1184  MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1185  MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1186  ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1187  }
1188  return *this;
1189 }
1190 
1191 void PetscParMatrix::
1192 BlockDiagonalConstructor(MPI_Comm comm,
1193  PetscInt *row_starts, PetscInt *col_starts,
1194  SparseMatrix *diag, bool assembled, Mat* Ad)
1195 {
1196  Mat A;
1197  PetscInt lrsize,lcsize,rstart,cstart;
1198  PetscMPIInt myid = 0,commsize;
1199 
1200  mpiierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,mpiierr);
1201  if (!HYPRE_AssumedPartitionCheck())
1202  {
1203  mpiierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,mpiierr);
1204  }
1205  lrsize = row_starts[myid+1]-row_starts[myid];
1206  rstart = row_starts[myid];
1207  lcsize = col_starts[myid+1]-col_starts[myid];
1208  cstart = col_starts[myid];
1209 
1210  if (!assembled)
1211  {
1212  IS is;
1213  ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1214  ISLocalToGlobalMapping rl2g,cl2g;
1215  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1216  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1217  if (row_starts != col_starts)
1218  {
1219  ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
1220  CCHKERRQ(comm,ierr);
1221  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1222  ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1223  }
1224  else
1225  {
1226  ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1227  cl2g = rl2g;
1228  }
1229 
1230  // Create the PETSc object (MATIS format)
1231  ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1232  ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1233  PCHKERRQ(A,ierr);
1234  ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1235  ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1236  ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1237  ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1238 
1239  // Copy SparseMatrix into PETSc SeqAIJ format
1240  // pass through host for now
1241  Mat lA;
1242  ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1243  const int *II = diag->HostReadI();
1244  const int *JJ = diag->HostReadJ();
1245 #if defined(PETSC_USE_64BIT_INDICES)
1246  PetscInt *pII,*pJJ;
1247  int m = diag->Height()+1, nnz = II[diag->Height()];
1248  ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1249  for (int i = 0; i < m; i++) { pII[i] = II[i]; }
1250  for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1251  ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1252  diag->HostReadData()); PCHKERRQ(lA,ierr);
1253  ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1254 #else
1255  ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1256  diag->HostReadData()); PCHKERRQ(lA,ierr);
1257 #endif
1258  }
1259  else
1260  {
1261  PetscScalar *da;
1262  PetscInt *dii,*djj,*oii,
1263  m = diag->Height()+1, nnz = diag->NumNonZeroElems();
1264 
1265  diag->SortColumnIndices();
1266  // if we can take ownership of the SparseMatrix arrays, we can avoid this
1267  // step
1268  ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1269  ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1270  ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1271  if (sizeof(PetscInt) == sizeof(int))
1272  {
1273  ierr = PetscMemcpy(dii,diag->HostReadI(),m*sizeof(PetscInt));
1274  CCHKERRQ(PETSC_COMM_SELF,ierr);
1275  ierr = PetscMemcpy(djj,diag->HostReadJ(),nnz*sizeof(PetscInt));
1276  CCHKERRQ(PETSC_COMM_SELF,ierr);
1277  }
1278  else
1279  {
1280  const int *iii = diag->HostReadI();
1281  const int *jjj = diag->HostReadJ();
1282  for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
1283  for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1284  }
1285  ierr = PetscMemcpy(da,diag->HostReadData(),nnz*sizeof(PetscScalar));
1286  CCHKERRQ(PETSC_COMM_SELF,ierr);
1287  ierr = PetscCalloc1(m,&oii);
1288  CCHKERRQ(PETSC_COMM_SELF,ierr);
1289  if (commsize > 1)
1290  {
1291  ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1292  PETSC_DECIDE,
1293  dii,djj,da,oii,NULL,NULL,&A);
1294  CCHKERRQ(comm,ierr);
1295  }
1296  else
1297  {
1298  ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1299  CCHKERRQ(comm,ierr);
1300  }
1301 
1302  void *ptrs[4] = {dii,djj,da,oii};
1303  const char *names[4] = {"_mfem_csr_dii",
1304  "_mfem_csr_djj",
1305  "_mfem_csr_da",
1306  "_mfem_csr_oii",
1307  };
1308  for (PetscInt i=0; i<4; i++)
1309  {
1310  PetscContainer c;
1311 
1312  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1313  ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1314  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1315  CCHKERRQ(comm,ierr);
1316  ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
1317  CCHKERRQ(comm,ierr);
1318  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1319  }
1320  }
1321 
1322  // Tell PETSc the matrix is ready to be used
1323  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1324  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1325 
1326  *Ad = A;
1327 }
1328 
1329 MPI_Comm PetscParMatrix::GetComm() const
1330 {
1331  return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
1332 }
1333 
1334 // TODO ADD THIS CONSTRUCTOR
1335 //PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
1336 // PetscInt glob_ncols, int *I, PetscInt *J,
1337 // double *data, PetscInt *rows, PetscInt *cols)
1338 //{
1339 //}
1340 
1341 // TODO This should take a reference on op but how?
1342 void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
1343 {
1344  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1345  ierr = MatSetSizes(*A,op->Height(),op->Width(),
1346  PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1347  ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1348  ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
1349  ierr = MatShellSetOperation(*A,MATOP_MULT,
1350  (void (*)())__mfem_mat_shell_apply);
1351  PCHKERRQ(A,ierr);
1352  ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1353  (void (*)())__mfem_mat_shell_apply_transpose);
1354  PCHKERRQ(A,ierr);
1355  ierr = MatShellSetOperation(*A,MATOP_COPY,
1356  (void (*)())__mfem_mat_shell_copy);
1357  PCHKERRQ(A,ierr);
1358  ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1359  (void (*)())__mfem_mat_shell_destroy);
1360 #if defined(_USE_DEVICE)
1361  MemoryType mt = GetMemoryType(op->GetMemoryClass());
1362  if (mt == MemoryType::DEVICE || mt == MemoryType::MANAGED)
1363  {
1364  ierr = MatShellSetVecType(*A,PETSC_VECDEVICE); PCHKERRQ(A,ierr);
1365  ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1366  }
1367  else
1368  {
1369  ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1370  }
1371 #endif
1372  PCHKERRQ(A,ierr);
1373  ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1374 }
1375 
1376 void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
1377  Operator::Type tid)
1378 {
1379  PetscParMatrix *pA = const_cast<PetscParMatrix *>
1380  (dynamic_cast<const PetscParMatrix *>(&op));
1381  HypreParMatrix *pH = const_cast<HypreParMatrix *>
1382  (dynamic_cast<const HypreParMatrix *>(&op));
1383  BlockOperator *pB = const_cast<BlockOperator *>
1384  (dynamic_cast<const BlockOperator *>(&op));
1385  IdentityOperator *pI = const_cast<IdentityOperator *>
1386  (dynamic_cast<const IdentityOperator *>(&op));
1387  SparseMatrix *pS = const_cast<SparseMatrix *>
1388  (dynamic_cast<const SparseMatrix *>(&op));
1389 
1390  if (pA && tid == ANY_TYPE) // use same object and return
1391  {
1392  ierr = PetscObjectReference((PetscObject)(pA->A));
1393  CCHKERRQ(pA->GetComm(),ierr);
1394  *A = pA->A;
1395  return;
1396  }
1397 
1398  PetscBool avoidmatconvert = PETSC_FALSE;
1399  if (pA) // we test for these types since MatConvert will fail
1400  {
1401  ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
1402  MATSHELL,"");
1403  CCHKERRQ(comm,ierr);
1404  }
1405  if (pA && !avoidmatconvert)
1406  {
1407  Mat At = NULL;
1408  PetscBool istrans;
1409 #if PETSC_VERSION_LT(3,10,0)
1410  PetscBool ismatis;
1411 #endif
1412 
1413 #if PETSC_VERSION_LT(3,18,0)
1414  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
1415 #else
1416  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEVIRTUAL,
1417  &istrans);
1418 #endif
1419  CCHKERRQ(pA->GetComm(),ierr);
1420  if (!istrans)
1421  {
1422  if (tid == pA->GetType()) // use same object and return
1423  {
1424  ierr = PetscObjectReference((PetscObject)(pA->A));
1425  CCHKERRQ(pA->GetComm(),ierr);
1426  *A = pA->A;
1427  return;
1428  }
1429 #if PETSC_VERSION_LT(3,10,0)
1430  ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
1431  CCHKERRQ(pA->GetComm(),ierr);
1432 #endif
1433  }
1434  else
1435  {
1436  ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
1437 #if PETSC_VERSION_LT(3,10,0)
1438  ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
1439 #endif
1440  CCHKERRQ(pA->GetComm(),ierr);
1441  }
1442 
1443  // Try to convert
1444  if (tid == PETSC_MATAIJ)
1445  {
1446 #if PETSC_VERSION_LT(3,10,0)
1447  if (ismatis)
1448  {
1449  if (istrans)
1450  {
1451  Mat B;
1452 
1453  ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1454  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1455  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1456  }
1457  else
1458  {
1459  ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
1460  PCHKERRQ(pA->A,ierr);
1461  }
1462  }
1463  else
1464 #endif
1465  {
1466  PetscMPIInt size;
1467  mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1468 
1469  // call MatConvert and see if a converter is available
1470  if (istrans)
1471  {
1472  Mat B;
1473  ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1474  PCHKERRQ(pA->A,ierr);
1475  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1476  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1477  }
1478  else
1479  {
1480  ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1481  PCHKERRQ(pA->A,ierr);
1482  }
1483  }
1484  }
1485  else if (tid == PETSC_MATIS)
1486  {
1487  if (istrans)
1488  {
1489  Mat B;
1490  ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1491  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1492  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1493  }
1494  else
1495  {
1496  ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1497  }
1498  }
1499  else if (tid == PETSC_MATHYPRE)
1500  {
1501 #if defined(PETSC_HAVE_HYPRE)
1502  if (istrans)
1503  {
1504  Mat B;
1505  ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1506  ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1507  ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1508  }
1509  else
1510  {
1511  ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1512  }
1513 #else
1514  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1515 #endif
1516  }
1517  else if (tid == PETSC_MATSHELL)
1518  {
1519  MakeWrapper(comm,&op,A);
1520  }
1521  else
1522  {
1523  MFEM_ABORT("Unsupported operator type conversion " << tid)
1524  }
1525  }
1526  else if (pH)
1527  {
1528  if (tid == PETSC_MATAIJ)
1529  {
1530 #if defined(PETSC_HAVE_HYPRE)
1531  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1532  PETSC_USE_POINTER,A);
1533 #else
1534  ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1535 #endif
1536  CCHKERRQ(pH->GetComm(),ierr);
1537  }
1538  else if (tid == PETSC_MATIS)
1539  {
1540 #if defined(PETSC_HAVE_HYPRE)
1541  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1542  PETSC_USE_POINTER,A);
1543 #else
1544  ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1545 #endif
1546  CCHKERRQ(pH->GetComm(),ierr);
1547  }
1548  else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1549  {
1550 #if defined(PETSC_HAVE_HYPRE)
1551  ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1552  PETSC_USE_POINTER,A);
1553  CCHKERRQ(pH->GetComm(),ierr);
1554 #else
1555  MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1556 #endif
1557  }
1558  else if (tid == PETSC_MATSHELL)
1559  {
1560  MakeWrapper(comm,&op,A);
1561  }
1562  else
1563  {
1564  MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1565  " is not implemented");
1566  }
1567  }
1568  else if (pB)
1569  {
1570  Mat *mats,*matsl2l = NULL;
1571  PetscInt i,j,nr,nc;
1572 
1573  nr = pB->NumRowBlocks();
1574  nc = pB->NumColBlocks();
1575  ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1576  if (tid == PETSC_MATIS)
1577  {
1578  ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1579  }
1580  for (i=0; i<nr; i++)
1581  {
1582  PetscBool needl2l = PETSC_TRUE;
1583 
1584  for (j=0; j<nc; j++)
1585  {
1586  if (!pB->IsZeroBlock(i,j))
1587  {
1588  ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1589  if (tid == PETSC_MATIS && needl2l)
1590  {
1591  PetscContainer c;
1592  ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1593  (PetscObject*)&c);
1594  PCHKERRQ(mats[i*nc+j],ierr);
1595  // special case for block operators: the local Vdofs should be
1596  // ordered as:
1597  // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1598  // with m fields, Ni the number of Vdofs for the i-th field
1599  if (c)
1600  {
1601  Array<Mat> *l2l = NULL;
1602  ierr = PetscContainerGetPointer(c,(void**)&l2l);
1603  PCHKERRQ(c,ierr);
1604  MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1605  << l2l->Size() << " for block row " << i );
1606  ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1607  PCHKERRQ(c,ierr);
1608  matsl2l[i] = (*l2l)[0];
1609  needl2l = PETSC_FALSE;
1610  }
1611  }
1612  }
1613  }
1614  }
1615  ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1616  if (tid == PETSC_MATIS)
1617  {
1618  ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1619 
1620  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1621  for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1622  ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1623 
1624  PetscContainer c;
1625  ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1626  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1627  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1628  PCHKERRQ(c,ierr);
1629  ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1630  PCHKERRQ((*A),ierr);
1631  ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1632  }
1633  for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1634  ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1635  }
1636  else if (pI && tid == PETSC_MATAIJ)
1637  {
1638  PetscInt rst;
1639 
1640  ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1641  ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1642  PCHKERRQ(A,ierr);
1643  ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1644  ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1645  ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1646  ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1647  ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1648  for (PetscInt i = rst; i < rst+pI->Height(); i++)
1649  {
1650  ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1651  }
1652  ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1653  ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1654  }
1655  else if (pS)
1656  {
1657  if (tid == PETSC_MATSHELL)
1658  {
1659  MakeWrapper(comm,&op,A);
1660  }
1661  else
1662  {
1663  /* from SparseMatrix to SEQAIJ -> always pass through host for now */
1664  Mat B;
1665  PetscScalar *pdata;
1666  PetscInt *pii,*pjj,*oii;
1667  PetscMPIInt size;
1668 
1669  int m = pS->Height();
1670  int n = pS->Width();
1671  const int *ii = pS->HostReadI();
1672  const int *jj = pS->HostReadJ();
1673  const double *data = pS->HostReadData();
1674 
1675  ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1676  ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1677  ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1678  pii[0] = ii[0];
1679  for (int i = 0; i < m; i++)
1680  {
1681  bool issorted = true;
1682  pii[i+1] = ii[i+1];
1683  for (int j = ii[i]; j < ii[i+1]; j++)
1684  {
1685  pjj[j] = jj[j];
1686  if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1687  pdata[j] = data[j];
1688  }
1689  if (!issorted)
1690  {
1691  ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1692  CCHKERRQ(PETSC_COMM_SELF,ierr);
1693  }
1694  }
1695 
1696  mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1697  if (size == 1)
1698  {
1699  ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1700  CCHKERRQ(comm,ierr);
1701  oii = NULL;
1702  }
1703  else // block diagonal constructor
1704  {
1705  ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1706  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1707  PETSC_DECIDE,
1708  pii,pjj,pdata,oii,NULL,NULL,&B);
1709  CCHKERRQ(comm,ierr);
1710  }
1711  void *ptrs[4] = {pii,pjj,pdata,oii};
1712  const char *names[4] = {"_mfem_csr_pii",
1713  "_mfem_csr_pjj",
1714  "_mfem_csr_pdata",
1715  "_mfem_csr_oii"
1716  };
1717  for (int i=0; i<4; i++)
1718  {
1719  PetscContainer c;
1720 
1721  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1722  ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1723  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1724  PCHKERRQ(B,ierr);
1725  ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1726  PCHKERRQ(B,ierr);
1727  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1728  }
1729  if (tid == PETSC_MATAIJ)
1730  {
1731  *A = B;
1732  }
1733  else if (tid == PETSC_MATHYPRE)
1734  {
1735  ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1736  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1737  }
1738  else if (tid == PETSC_MATIS)
1739  {
1740  ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1741  ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1742  }
1743  else
1744  {
1745  MFEM_ABORT("Unsupported operator type conversion " << tid)
1746  }
1747  }
1748  }
1749  else // fallback to general operator
1750  {
1751  MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1752  "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1753  MakeWrapper(comm,&op,A);
1754  if (tid == PETSC_MATAIJ)
1755  {
1756  Mat B;
1757  PetscBool isaij;
1758 
1759  ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1760  ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1761  CCHKERRQ(comm,ierr);
1762  ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1763  if (!isaij)
1764  {
1765  ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1766  ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1767  }
1768  else
1769  {
1770  *A = B;
1771  }
1772  }
1773  }
1774  SetUpForDevice();
1775 }
1776 
1778 {
1779  if (A != NULL)
1780  {
1781  MPI_Comm comm = MPI_COMM_NULL;
1782  ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1783  ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1784  }
1785  delete X;
1786  delete Y;
1787  X = Y = NULL;
1788 }
1789 
1791 {
1792  if (ref)
1793  {
1794  ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1795  }
1796  Init();
1797  A = a;
1798  height = GetNumRows();
1799  width = GetNumCols();
1800 }
1801 
1803 {
1804  if (A_ == A) { return; }
1805  Destroy();
1806  ierr = PetscObjectReference((PetscObject)A_); PCHKERRQ(A_,ierr);
1807  A = A_;
1808  height = GetNumRows();
1809  width = GetNumCols();
1810 }
1811 
1812 void PetscParMatrix::SetUpForDevice()
1813 {
1814 #if !defined(_USE_DEVICE)
1815  return;
1816 #else
1817  if (!A || (!Device::Allows(Backend::CUDA_MASK) &&
1819  {
1820  if (A) { ierr = MatBindToCPU(A, PETSC_TRUE); PCHKERRQ(A,ierr); }
1821  return;
1822  }
1823  PetscBool ismatis,isnest,isaij;
1824  ierr = PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
1825  PCHKERRQ(A,ierr);
1826  ierr = PetscObjectTypeCompare((PetscObject)A,MATNEST,&isnest);
1827  PCHKERRQ(A,ierr);
1828  Mat tA = A;
1829  if (ismatis)
1830  {
1831  ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1832  ierr = PetscObjectTypeCompare((PetscObject)tA,MATNEST,&isnest);
1833  PCHKERRQ(tA,ierr);
1834  }
1835  if (isnest)
1836  {
1837  PetscInt n,m;
1838  Mat **sub;
1839  ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1840  bool dvec = false;
1841  for (PetscInt i = 0; i < n; i++)
1842  {
1843  for (PetscInt j = 0; j < m; j++)
1844  {
1845  if (sub[i][j])
1846  {
1847  bool expT = false;
1848  Mat sA = sub[i][j];
1849  ierr = PetscObjectTypeCompareAny((PetscObject)sA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1850  PCHKERRQ(sA,ierr);
1851  if (isaij)
1852  {
1853  ierr = MatSetType(sA,PETSC_MATAIJDEVICE); PCHKERRQ(sA,ierr);
1854  dvec = true;
1855  expT = true;
1856  }
1857  if (expT)
1858  {
1859  ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1860  PETSC_TRUE); PCHKERRQ(sA,ierr);
1861  }
1862  }
1863  }
1864  }
1865  if (dvec)
1866  {
1867  ierr = MatSetVecType(tA,PETSC_VECDEVICE); PCHKERRQ(tA,ierr);
1868  }
1869  }
1870  else
1871  {
1872  bool expT = false;
1873  ierr = PetscObjectTypeCompareAny((PetscObject)tA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1874  PCHKERRQ(tA,ierr);
1875  if (isaij)
1876  {
1877  ierr = MatSetType(tA,PETSC_MATAIJDEVICE); PCHKERRQ(tA,ierr);
1878  expT = true;
1879  }
1880  if (expT)
1881  {
1882  ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1883  PETSC_TRUE); PCHKERRQ(tA,ierr);
1884  }
1885  }
1886 #endif
1887 }
1888 
1889 // Computes y = alpha * A * x + beta * y
1890 // or y = alpha * A^T* x + beta * y
1891 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1892  bool transpose)
1893 {
1894  PetscErrorCode (*f)(Mat,Vec,Vec);
1895  PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1896  if (transpose)
1897  {
1898  f = MatMultTranspose;
1899  fadd = MatMultTransposeAdd;
1900  }
1901  else
1902  {
1903  f = MatMult;
1904  fadd = MatMultAdd;
1905  }
1906  if (a != 0.)
1907  {
1908  if (b != 0.)
1909  {
1910  ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1911  ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1912  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1913  }
1914  else
1915  {
1916  ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1917  ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1918  }
1919  }
1920  else
1921  {
1922  if (b == 1.)
1923  {
1924  // do nothing
1925  }
1926  else if (b != 0.)
1927  {
1928  ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1929  }
1930  else
1931  {
1932  ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1933  }
1934  }
1935 }
1936 
1938 {
1939  ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1940  Destroy();
1941  Init();
1942  A = master.A;
1943  height = master.height;
1944  width = master.width;
1945 }
1946 
1948 {
1949  if (!X)
1950  {
1951  MFEM_VERIFY(A,"Mat not present");
1952  X = new PetscParVector(*this,false,false); PCHKERRQ(A,ierr);
1953  }
1954  return X;
1955 }
1956 
1958 {
1959  if (!Y)
1960  {
1961  MFEM_VERIFY(A,"Mat not present");
1962  Y = new PetscParVector(*this,true,false); PCHKERRQ(A,ierr);
1963  }
1964  return Y;
1965 }
1966 
1968 {
1969  Mat B;
1970  if (action)
1971  {
1972  ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1973  }
1974  else
1975  {
1976  ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1977  }
1978  return new PetscParMatrix(B,false);
1979 }
1980 
1982 {
1983  ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1984 }
1985 
1986 void PetscParMatrix::Mult(double a, const Vector &x, double b, Vector &y) const
1987 {
1988  MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1989  << ", expected size = " << Width());
1990  MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1991  << ", expected size = " << Height());
1992 
1993  PetscParVector *XX = GetX();
1994  PetscParVector *YY = GetY();
1995  bool rw = (b != 0.0);
1996  XX->PlaceMemory(x.GetMemory());
1997  YY->PlaceMemory(y.GetMemory(),rw);
1998  MatMultKernel(A,a,XX->x,b,YY->x,false);
1999  XX->ResetMemory();
2000  YY->ResetMemory();
2001 }
2002 
2003 void PetscParMatrix::MultTranspose(double a, const Vector &x, double b,
2004  Vector &y) const
2005 {
2006  MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
2007  << ", expected size = " << Height());
2008  MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
2009  << ", expected size = " << Width());
2010 
2011  PetscParVector *XX = GetX();
2012  PetscParVector *YY = GetY();
2013  bool rw = (b != 0.0);
2014  XX->PlaceMemory(y.GetMemory(),rw);
2015  YY->PlaceMemory(x.GetMemory());
2016  MatMultKernel(A,a,YY->x,b,XX->x,true);
2017  XX->ResetMemory();
2018  YY->ResetMemory();
2019 }
2020 
2021 void PetscParMatrix::Print(const char *fname, bool binary) const
2022 {
2023  if (fname)
2024  {
2025  PetscViewer view;
2026 
2027  if (binary)
2028  {
2029  ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
2030  FILE_MODE_WRITE,&view);
2031  }
2032  else
2033  {
2034  ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
2035  }
2036  PCHKERRQ(A,ierr);
2037  ierr = MatView(A,view); PCHKERRQ(A,ierr);
2038  ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
2039  }
2040  else
2041  {
2042  ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
2043  }
2044 }
2045 
2047 {
2048  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2049  << ", expected size = " << Height());
2050 
2051  PetscParVector *YY = GetY();
2052  YY->PlaceMemory(s.GetMemory());
2053  ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
2054  YY->ResetMemory();
2055 }
2056 
2058 {
2059  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2060  << ", expected size = " << Width());
2061 
2062  PetscParVector *XX = GetX();
2063  XX->PlaceMemory(s.GetMemory());
2064  ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
2065  XX->ResetMemory();
2066 }
2067 
2069 {
2070  ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
2071 }
2072 
2074 {
2075  // for matrices with square diagonal blocks only
2076  MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2077  << ", expected size = " << Height());
2078  MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2079  << ", expected size = " << Width());
2080 
2081  PetscParVector *XX = GetX();
2082  XX->PlaceMemory(s.GetMemory());
2083  ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2084  XX->ResetMemory();
2085 }
2086 
2088  PetscParMatrix *P)
2089 {
2090  MFEM_VERIFY(A->Width() == P->Height(),
2091  "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
2092  " differs from number of local rows of P " << P->Height());
2093  MFEM_VERIFY(A->Height() == R->Width(),
2094  "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
2095  " differs from number of local cols of R " << R->Width());
2096  Mat B;
2097  ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2098  PCHKERRQ(*R,ierr);
2099  return new PetscParMatrix(B);
2100 }
2101 
2103 {
2104  Mat pA = *A,pP = *P,pRt = *Rt;
2105  Mat B;
2106  PetscBool Aismatis,Pismatis,Rtismatis;
2107 
2108  MFEM_VERIFY(A->Width() == P->Height(),
2109  "Petsc RAP: Number of local cols of A " << A->Width() <<
2110  " differs from number of local rows of P " << P->Height());
2111  MFEM_VERIFY(A->Height() == Rt->Height(),
2112  "Petsc RAP: Number of local rows of A " << A->Height() <<
2113  " differs from number of local rows of Rt " << Rt->Height());
2114  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
2115  PCHKERRQ(pA,ierr);
2116  ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
2117  PCHKERRQ(pA,ierr);
2118  ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
2119  PCHKERRQ(pA,ierr);
2120  if (Aismatis &&
2121  Pismatis &&
2122  Rtismatis) // handle special case (this code will eventually go into PETSc)
2123  {
2124  Mat lA,lP,lB,lRt;
2125  ISLocalToGlobalMapping cl2gP,cl2gRt;
2126  PetscInt rlsize,clsize,rsize,csize;
2127 
2128  ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2129  ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2130  ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2131  ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2132  ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2133  ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2134  ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
2135  ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2136  ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2137  ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2138  ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2139  ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2140  ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2141  if (lRt == lP)
2142  {
2143  ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2144  PCHKERRQ(lA,ierr);
2145  }
2146  else
2147  {
2148  Mat lR;
2149  ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2150  ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2151  PCHKERRQ(lRt,ierr);
2152  ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2153  }
2154 
2155  // attach lRt matrix to the subdomain local matrix
2156  // it may be used if markers on vdofs have to be mapped on
2157  // subdomain true dofs
2158  {
2159  mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
2160  ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
2161  (*vmatsl2l)[0] = lRt;
2162 
2163  PetscContainer c;
2164  ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
2165  PCHKERRQ(B,ierr);
2166  ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2167  ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2168  PCHKERRQ(c,ierr);
2169  ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
2170  PCHKERRQ(B,ierr);
2171  ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2172  }
2173 
2174  // Set local problem
2175  ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2176  ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2177  ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2178  ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2179  }
2180  else // it raises an error if the PtAP is not supported in PETSc
2181  {
2182  if (pP == pRt)
2183  {
2184  ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2185  PCHKERRQ(pA,ierr);
2186  }
2187  else
2188  {
2189  Mat pR;
2190  ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2191  ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2192  PCHKERRQ(pRt,ierr);
2193  ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2194  }
2195  }
2196  return new PetscParMatrix(B);
2197 }
2198 
2200 {
2201  PetscParMatrix *out = RAP(P,A,P);
2202  return out;
2203 }
2204 
2206 {
2207  PetscParMatrix *out,*A;
2208 #if defined(PETSC_HAVE_HYPRE)
2210 #else
2211  A = new PetscParMatrix(hA);
2212 #endif
2213  out = RAP(P,A,P);
2214  delete A;
2215  return out;
2216 }
2217 
2218 
2220 {
2221  Mat AB;
2222 
2223  ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2224  CCHKERRQ(A->GetComm(),ierr);
2225  return new PetscParMatrix(AB);
2226 }
2227 
2229 {
2230  Mat Ae;
2231 
2232  PetscParVector dummy(GetComm(),0);
2233  ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2234  EliminateRowsCols(rows_cols,dummy,dummy);
2235  ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2236  return new PetscParMatrix(Ae);
2237 }
2238 
2240  const HypreParVector &X,
2241  HypreParVector &B,
2242  double diag)
2243 {
2244  MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2245 }
2246 
2248  const PetscParVector &X,
2249  PetscParVector &B,
2250  double diag)
2251 {
2252  PetscInt M,N;
2253  ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2254  MFEM_VERIFY(M == N,"Rectangular case unsupported");
2255 
2256  // TODO: what if a diagonal term is not present?
2257  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2258 
2259  // rows need to be in global numbering
2260  PetscInt rst;
2261  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2262 
2263  IS dir;
2264  ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2265  if (!X.GlobalSize() && !B.GlobalSize())
2266  {
2267  ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2268  }
2269  else
2270  {
2271  ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2272  }
2273  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2274 }
2275 
2277 {
2278  ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2279 
2280  // rows need to be in global numbering
2281  PetscInt rst;
2282  ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2283 
2284  IS dir;
2285  ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2286  ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2287  ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2288 }
2289 
2291 {
2292 
2293  Mat B = A;
2294  if (dereference)
2295  {
2296  MPI_Comm comm = GetComm();
2297  ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
2298  }
2299  A = NULL;
2300  return B;
2301 }
2302 
2304 {
2305  PetscBool ok;
2306  MFEM_VERIFY(A, "no associated PETSc Mat object");
2307  PetscObject oA = (PetscObject)(this->A);
2308  // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
2309  ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2310  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2311  ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2312  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2313  ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2314  if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2315  ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2316  if (ok == PETSC_TRUE) { return PETSC_MATIS; }
2317  ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2318  if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
2319  ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2320  if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
2321 #if defined(PETSC_HAVE_HYPRE)
2322  ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2323  if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
2324 #endif
2325  return PETSC_MATGENERIC;
2326 }
2327 
2329  const Array<int> &ess_dof_list,
2330  const Vector &X, Vector &B)
2331 {
2332  const PetscScalar *array;
2333  Mat pA = const_cast<PetscParMatrix&>(A);
2334 
2335  // B -= Ae*X
2336  Ae.Mult(-1.0, X, 1.0, B);
2337 
2338  Vec diag = const_cast<PetscParVector&>((*A.GetX()));
2339  ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2340  ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2341  for (int i = 0; i < ess_dof_list.Size(); i++)
2342  {
2343  int r = ess_dof_list[i];
2344  B(r) = array[r] * X(r);
2345  }
2346  ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2347 }
2348 
2349 // PetscSolver methods
2350 
2351 PetscSolver::PetscSolver() : clcustom(false)
2352 {
2353  obj = NULL;
2354  B = X = NULL;
2355  cid = -1;
2356  operatorset = false;
2357  bchandler = NULL;
2358  private_ctx = NULL;
2359 }
2360 
2362 {
2363  delete B;
2364  delete X;
2366 }
2367 
2368 void PetscSolver::SetTol(double tol)
2369 {
2370  SetRelTol(tol);
2371 }
2372 
2373 void PetscSolver::SetRelTol(double tol)
2374 {
2375  if (cid == KSP_CLASSID)
2376  {
2377  KSP ksp = (KSP)obj;
2378  ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2379  }
2380  else if (cid == SNES_CLASSID)
2381  {
2382  SNES snes = (SNES)obj;
2383  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2384  PETSC_DEFAULT);
2385  }
2386  else if (cid == TS_CLASSID)
2387  {
2388  TS ts = (TS)obj;
2389  ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2390  }
2391  else
2392  {
2393  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2394  }
2395  PCHKERRQ(obj,ierr);
2396 }
2397 
2398 void PetscSolver::SetAbsTol(double tol)
2399 {
2400  if (cid == KSP_CLASSID)
2401  {
2402  KSP ksp = (KSP)obj;
2403  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2404  }
2405  else if (cid == SNES_CLASSID)
2406  {
2407  SNES snes = (SNES)obj;
2408  ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2409  PETSC_DEFAULT);
2410  }
2411  else if (cid == TS_CLASSID)
2412  {
2413  TS ts = (TS)obj;
2414  ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2415  }
2416  else
2417  {
2418  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2419  }
2420  PCHKERRQ(obj,ierr);
2421 }
2422 
2423 void PetscSolver::SetMaxIter(int max_iter)
2424 {
2425  if (cid == KSP_CLASSID)
2426  {
2427  KSP ksp = (KSP)obj;
2428  ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2429  max_iter);
2430  }
2431  else if (cid == SNES_CLASSID)
2432  {
2433  SNES snes = (SNES)obj;
2434  ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2435  max_iter,PETSC_DEFAULT);
2436  }
2437  else if (cid == TS_CLASSID)
2438  {
2439  TS ts = (TS)obj;
2440  ierr = TSSetMaxSteps(ts,max_iter);
2441  }
2442  else
2443  {
2444  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2445  }
2446  PCHKERRQ(obj,ierr);
2447 }
2448 
2449 
2451 {
2452  typedef PetscErrorCode (*myPetscFunc)(void**);
2453  PetscViewerAndFormat *vf = NULL;
2454  PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
2455 
2456  if (plev > 0)
2457  {
2458  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2459  PCHKERRQ(obj,ierr);
2460  }
2461  if (cid == KSP_CLASSID)
2462  {
2463  // there are many other options, see the function KSPSetFromOptions() in
2464  // src/ksp/ksp/interface/itcl.c
2465  typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
2466  KSP ksp = (KSP)obj;
2467  if (plev >= 0)
2468  {
2469  ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2470  }
2471  if (plev == 1)
2472  {
2473 #if PETSC_VERSION_LT(3,15,0)
2474  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2475 #else
2476  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2477 #endif
2478  (myPetscFunc)PetscViewerAndFormatDestroy);
2479  PCHKERRQ(ksp,ierr);
2480  }
2481  else if (plev > 1)
2482  {
2483  ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2484  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2485  (myPetscFunc)PetscViewerAndFormatDestroy);
2486  PCHKERRQ(ksp,ierr);
2487  if (plev > 2)
2488  {
2489  ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2490  PCHKERRQ(viewer,ierr);
2491 #if PETSC_VERSION_LT(3,15,0)
2492  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2493 #else
2494  ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2495 #endif
2496  (myPetscFunc)PetscViewerAndFormatDestroy);
2497  PCHKERRQ(ksp,ierr);
2498  }
2499  }
2500  }
2501  else if (cid == SNES_CLASSID)
2502  {
2503  typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
2504  SNES snes = (SNES)obj;
2505  if (plev >= 0)
2506  {
2507  ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2508  }
2509  if (plev > 0)
2510  {
2511  ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2512  (myPetscFunc)PetscViewerAndFormatDestroy);
2513  PCHKERRQ(snes,ierr);
2514  }
2515  }
2516  else if (cid == TS_CLASSID)
2517  {
2518  TS ts = (TS)obj;
2519  if (plev >= 0)
2520  {
2521  ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2522  }
2523  }
2524  else
2525  {
2526  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2527  }
2528 }
2529 
2530 MPI_Comm PetscSolver::GetComm() const
2531 {
2532  return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
2533 }
2534 
2536 {
2537  __mfem_monitor_ctx *monctx;
2538  ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2539  monctx->solver = this;
2540  monctx->monitor = ctx;
2541  if (cid == KSP_CLASSID)
2542  {
2543  ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
2544  __mfem_monitor_ctx_destroy);
2545  PCHKERRQ(obj,ierr);
2546  }
2547  else if (cid == SNES_CLASSID)
2548  {
2549  ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
2550  __mfem_monitor_ctx_destroy);
2551  PCHKERRQ(obj,ierr);
2552  }
2553  else if (cid == TS_CLASSID)
2554  {
2555  ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
2556  __mfem_monitor_ctx_destroy);
2557  PCHKERRQ(obj,ierr);
2558  }
2559  else
2560  {
2561  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2562  }
2563 }
2564 
2566 {
2567  bchandler = bch;
2568  if (cid == SNES_CLASSID)
2569  {
2570  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
2571  snes_ctx->bchandler = bchandler;
2572  }
2573  else if (cid == TS_CLASSID)
2574  {
2575  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
2576  ts_ctx->bchandler = bchandler;
2577  }
2578  else
2579  {
2580  MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2581  }
2582 }
2583 
2585 {
2586  PC pc = NULL;
2587  if (cid == TS_CLASSID)
2588  {
2589  SNES snes;
2590  KSP ksp;
2591 
2592  ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
2593  ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2594  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2595  }
2596  else if (cid == SNES_CLASSID)
2597  {
2598  KSP ksp;
2599 
2600  ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
2601  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2602  }
2603  else if (cid == KSP_CLASSID)
2604  {
2605  ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2606  }
2607  else if (cid == PC_CLASSID)
2608  {
2609  pc = (PC)obj;
2610  }
2611  else
2612  {
2613  MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2614  }
2615  if (factory)
2616  {
2617  ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2618  }
2619  else
2620  {
2621  ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2622  }
2623 }
2624 
2625 void PetscSolver::Customize(bool customize) const
2626 {
2627  if (!customize) { clcustom = true; }
2628  if (!clcustom)
2629  {
2630  if (cid == PC_CLASSID)
2631  {
2632  PC pc = (PC)obj;
2633  ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2634  }
2635  else if (cid == KSP_CLASSID)
2636  {
2637  KSP ksp = (KSP)obj;
2638  ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2639  }
2640  else if (cid == SNES_CLASSID)
2641  {
2642  SNES snes = (SNES)obj;
2643  ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2644  }
2645  else if (cid == TS_CLASSID)
2646  {
2647  TS ts = (TS)obj;
2648  ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2649  }
2650  else
2651  {
2652  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2653  }
2654  }
2655  clcustom = true;
2656 }
2657 
2659 {
2660  if (cid == KSP_CLASSID)
2661  {
2662  KSP ksp = (KSP)obj;
2663  KSPConvergedReason reason;
2664  ierr = KSPGetConvergedReason(ksp,&reason);
2665  PCHKERRQ(ksp,ierr);
2666  return reason > 0 ? 1 : 0;
2667  }
2668  else if (cid == SNES_CLASSID)
2669  {
2670  SNES snes = (SNES)obj;
2671  SNESConvergedReason reason;
2672  ierr = SNESGetConvergedReason(snes,&reason);
2673  PCHKERRQ(snes,ierr);
2674  return reason > 0 ? 1 : 0;
2675  }
2676  else if (cid == TS_CLASSID)
2677  {
2678  TS ts = (TS)obj;
2679  TSConvergedReason reason;
2680  ierr = TSGetConvergedReason(ts,&reason);
2681  PCHKERRQ(ts,ierr);
2682  return reason > 0 ? 1 : 0;
2683  }
2684  else
2685  {
2686  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2687  return -1;
2688  }
2689 }
2690 
2692 {
2693  if (cid == KSP_CLASSID)
2694  {
2695  KSP ksp = (KSP)obj;
2696  PetscInt its;
2697  ierr = KSPGetIterationNumber(ksp,&its);
2698  PCHKERRQ(ksp,ierr);
2699  return its;
2700  }
2701  else if (cid == SNES_CLASSID)
2702  {
2703  SNES snes = (SNES)obj;
2704  PetscInt its;
2705  ierr = SNESGetIterationNumber(snes,&its);
2706  PCHKERRQ(snes,ierr);
2707  return its;
2708  }
2709  else if (cid == TS_CLASSID)
2710  {
2711  TS ts = (TS)obj;
2712  PetscInt its;
2713  ierr = TSGetStepNumber(ts,&its);
2714  PCHKERRQ(ts,ierr);
2715  return its;
2716  }
2717  else
2718  {
2719  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2720  return -1;
2721  }
2722 }
2723 
2725 {
2726  if (cid == KSP_CLASSID)
2727  {
2728  KSP ksp = (KSP)obj;
2729  PetscReal norm;
2730  ierr = KSPGetResidualNorm(ksp,&norm);
2731  PCHKERRQ(ksp,ierr);
2732  return norm;
2733  }
2734  if (cid == SNES_CLASSID)
2735  {
2736  SNES snes = (SNES)obj;
2737  PetscReal norm;
2738  ierr = SNESGetFunctionNorm(snes,&norm);
2739  PCHKERRQ(snes,ierr);
2740  return norm;
2741  }
2742  else
2743  {
2744  MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2745  return PETSC_MAX_REAL;
2746  }
2747 }
2748 
2750 {
2752  if (cid == SNES_CLASSID)
2753  {
2754  __mfem_snes_ctx *snes_ctx;
2755  ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2756  snes_ctx->op = NULL;
2757  snes_ctx->bchandler = NULL;
2758  snes_ctx->work = NULL;
2759  snes_ctx->jacType = Operator::PETSC_MATAIJ;
2760  private_ctx = (void*) snes_ctx;
2761  }
2762  else if (cid == TS_CLASSID)
2763  {
2764  __mfem_ts_ctx *ts_ctx;
2765  ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2766  ts_ctx->op = NULL;
2767  ts_ctx->bchandler = NULL;
2768  ts_ctx->work = NULL;
2769  ts_ctx->work2 = NULL;
2770  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2771  ts_ctx->cached_ijacstate = -1;
2772  ts_ctx->cached_rhsjacstate = -1;
2773  ts_ctx->cached_splits_xstate = -1;
2774  ts_ctx->cached_splits_xdotstate = -1;
2775  ts_ctx->type = PetscODESolver::ODE_SOLVER_GENERAL;
2776  ts_ctx->jacType = Operator::PETSC_MATAIJ;
2777  private_ctx = (void*) ts_ctx;
2778  }
2779 }
2780 
2782 {
2783  if (!private_ctx) { return; }
2784  // free private context's owned objects
2785  if (cid == SNES_CLASSID)
2786  {
2787  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2788  delete snes_ctx->work;
2789  }
2790  else if (cid == TS_CLASSID)
2791  {
2792  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2793  delete ts_ctx->work;
2794  delete ts_ctx->work2;
2795  }
2796  ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2797 }
2798 
2799 // PetscBCHandler methods
2800 
2802  enum PetscBCHandler::Type type_)
2803  : bctype(type_), setup(false), eval_t(0.0),
2804  eval_t_cached(std::numeric_limits<double>::min())
2805 {
2806  SetTDofs(ess_tdof_list);
2807 }
2808 
2810 {
2811  ess_tdof_list.SetSize(list.Size());
2812  ess_tdof_list.Assign(list);
2813  setup = false;
2814 }
2815 
2817 {
2818  if (setup) { return; }
2819  if (bctype == CONSTANT)
2820  {
2821  eval_g.SetSize(n);
2822  this->Eval(eval_t,eval_g);
2823  eval_t_cached = eval_t;
2824  }
2825  else if (bctype == TIME_DEPENDENT)
2826  {
2827  eval_g.SetSize(n);
2828  }
2829  setup = true;
2830 }
2831 
2833 {
2834  (*this).SetUp(x.Size());
2835  y = x;
2836  if (bctype == ZERO)
2837  {
2838  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2839  {
2840  y[ess_tdof_list[i]] = 0.0;
2841  }
2842  }
2843  else
2844  {
2845  if (bctype != CONSTANT && eval_t != eval_t_cached)
2846  {
2847  Eval(eval_t,eval_g);
2848  eval_t_cached = eval_t;
2849  }
2850  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2851  {
2852  y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2853  }
2854  }
2855 }
2856 
2858 {
2859  (*this).SetUp(x.Size());
2860  if (bctype == ZERO)
2861  {
2862  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2863  {
2864  x[ess_tdof_list[i]] = 0.0;
2865  }
2866  }
2867  else
2868  {
2869  if (bctype != CONSTANT && eval_t != eval_t_cached)
2870  {
2871  Eval(eval_t,eval_g);
2872  eval_t_cached = eval_t;
2873  }
2874  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2875  {
2876  x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2877  }
2878  }
2879 }
2880 
2882 {
2883  (*this).SetUp(x.Size());
2884  if (bctype == ZERO)
2885  {
2886  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2887  {
2888  y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2889  }
2890  }
2891  else
2892  {
2893  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2894  {
2895  y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2896  }
2897  }
2898 }
2899 
2901 {
2902  (*this).SetUp(x.Size());
2903  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2904  {
2905  x[ess_tdof_list[i]] = 0.0;
2906  }
2907 }
2908 
2910 {
2911  (*this).SetUp(x.Size());
2912  y = x;
2913  for (int i = 0; i < ess_tdof_list.Size(); ++i)
2914  {
2915  y[ess_tdof_list[i]] = 0.0;
2916  }
2917 }
2918 
2919 // PetscLinearSolver methods
2920 
2921 PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2922  bool wrapin, bool iter_mode)
2923  : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2924 {
2925  KSP ksp;
2926  ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2927  obj = (PetscObject)ksp;
2928  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2929  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2930  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2931  PCHKERRQ(ksp, ierr);
2932 }
2933 
2935  const std::string &prefix, bool iter_mode)
2936  : PetscSolver(), Solver(0,iter_mode), wrap(false)
2937 {
2938  KSP ksp;
2939  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2940  obj = (PetscObject)ksp;
2941  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2942  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2943  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2944  PCHKERRQ(ksp, ierr);
2945  SetOperator(A);
2946 }
2947 
2949  const std::string &prefix, bool iter_mode)
2950  : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2951 {
2952  KSP ksp;
2953  ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2954  obj = (PetscObject)ksp;
2955  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2956  ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2957  ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2958  PCHKERRQ(ksp, ierr);
2959  SetOperator(A);
2960 }
2961 
2963 {
2964  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2965  PetscParMatrix *pA = const_cast<PetscParMatrix *>
2966  (dynamic_cast<const PetscParMatrix *>(&op));
2967  const Operator *oA = dynamic_cast<const Operator *>(&op);
2968 
2969  // update base classes: Operator, Solver, PetscLinearSolver
2970  bool delete_pA = false;
2971  if (!pA)
2972  {
2973  if (hA)
2974  {
2975  // Create MATSHELL object or convert into a format suitable to construct preconditioners
2976  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2977  delete_pA = true;
2978  }
2979  else if (oA) // fallback to general operator
2980  {
2981  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2982  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2983  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2984  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2985  delete_pA = true;
2986  }
2987  }
2988  MFEM_VERIFY(pA, "Unsupported operation!");
2989 
2990  // Set operators into PETSc KSP
2991  KSP ksp = (KSP)obj;
2992  Mat A = pA->A;
2993  if (operatorset)
2994  {
2995  Mat C;
2996  PetscInt nheight,nwidth,oheight,owidth;
2997 
2998  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2999  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3000  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3001  if (nheight != oheight || nwidth != owidth)
3002  {
3003  // reinit without destroying the KSP
3004  // communicator remains the same
3005  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3006  delete X;
3007  delete B;
3008  X = B = NULL;
3009  }
3010  }
3011  ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
3012 
3013  // Update PetscSolver
3014  operatorset = true;
3015 
3016  // Update the Operator fields.
3017  height = pA->Height();
3018  width = pA->Width();
3019 
3020  if (delete_pA) { delete pA; }
3021 }
3022 
3024 {
3025  const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
3026  PetscParMatrix *pA = const_cast<PetscParMatrix *>
3027  (dynamic_cast<const PetscParMatrix *>(&op));
3028  const Operator *oA = dynamic_cast<const Operator *>(&op);
3029 
3030  PetscParMatrix *ppA = const_cast<PetscParMatrix *>
3031  (dynamic_cast<const PetscParMatrix *>(&pop));
3032  const Operator *poA = dynamic_cast<const Operator *>(&pop);
3033 
3034  // Convert Operator for linear system
3035  bool delete_pA = false;
3036  if (!pA)
3037  {
3038  if (hA)
3039  {
3040  // Create MATSHELL object or convert into a format suitable to construct preconditioners
3041  pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3042  delete_pA = true;
3043  }
3044  else if (oA) // fallback to general operator
3045  {
3046  // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
3047  // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
3048  pA = new PetscParMatrix(PetscObjectComm(obj),oA,
3049  wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3050  delete_pA = true;
3051  }
3052  }
3053  MFEM_VERIFY(pA, "Unsupported operation!");
3054 
3055  // Convert Operator to be preconditioned
3056  bool delete_ppA = false;
3057  if (!ppA)
3058  {
3059  if (oA == poA && !wrap) // Same operator, already converted
3060  {
3061  ppA = pA;
3062  }
3063  else
3064  {
3065  ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
3066  delete_ppA = true;
3067  }
3068  }
3069  MFEM_VERIFY(ppA, "Unsupported operation!");
3070 
3071  // Set operators into PETSc KSP
3072  KSP ksp = (KSP)obj;
3073  Mat A = pA->A;
3074  Mat P = ppA->A;
3075  if (operatorset)
3076  {
3077  Mat C;
3078  PetscInt nheight,nwidth,oheight,owidth;
3079 
3080  ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3081  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3082  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3083  if (nheight != oheight || nwidth != owidth)
3084  {
3085  // reinit without destroying the KSP
3086  // communicator remains the same
3087  ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3088  delete X;
3089  delete B;
3090  X = B = NULL;
3091  wrap = false;
3092  }
3093  }
3094  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3095 
3096  // Update PetscSolver
3097  operatorset = true;
3098 
3099  // Update the Operator fields.
3100  height = pA->Height();
3101  width = pA->Width();
3102 
3103  if (delete_pA) { delete pA; }
3104  if (delete_ppA) { delete ppA; }
3105 }
3106 
3108 {
3109  KSP ksp = (KSP)obj;
3110 
3111  // Preserve Amat if already set
3112  Mat A = NULL;
3113  PetscBool amat;
3114  ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3115  if (amat)
3116  {
3117  ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3118  ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
3119  }
3120  PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
3121  if (ppc)
3122  {
3123  ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3124  }
3125  else
3126  {
3127  // wrap the Solver action
3128  // Solver is assumed to be already setup
3129  // ownership of precond is not transferred,
3130  // consistently with other MFEM's linear solvers
3131  PC pc;
3132  ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3133  ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
3134  }
3135  if (A)
3136  {
3137  Mat P;
3138 
3139  ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3140  ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
3141  ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3142  ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3143  ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3144  }
3145 }
3146 
3147 void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
3148 {
3149  KSP ksp = (KSP)obj;
3150 
3151  if (!B || !X)
3152  {
3153  Mat pA = NULL;
3154  ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
3155  if (!B)
3156  {
3157  PetscParMatrix A = PetscParMatrix(pA, true);
3158  B = new PetscParVector(A, true, false);
3159  }
3160  if (!X)
3161  {
3162  PetscParMatrix A = PetscParMatrix(pA, true);
3163  X = new PetscParVector(A, false, false);
3164  }
3165  }
3166  B->PlaceMemory(b.GetMemory());
3167 
3168  Customize();
3169 
3170  PetscBool flg;
3171  ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3172  X->PlaceMemory(x.GetMemory(),flg);
3173 
3174  // Solve the system.
3175  if (trans)
3176  {
3177  ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3178  }
3179  else
3180  {
3181  ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3182  }
3183  B->ResetMemory();
3184  X->ResetMemory();
3185 }
3186 
3187 void PetscLinearSolver::Mult(const Vector &b, Vector &x) const
3188 {
3189  (*this).MultKernel(b,x,false);
3190 }
3191 
3193 {
3194  (*this).MultKernel(b,x,true);
3195 }
3196 
3198 {
3199  MPI_Comm comm;
3200  KSP ksp = (KSP)obj;
3201  ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3202  ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3203 }
3204 
3205 // PetscPCGSolver methods
3206 
3207 PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix,
3208  bool iter_mode)
3209  : PetscLinearSolver(comm,prefix,true,iter_mode)
3210 {
3211  KSP ksp = (KSP)obj;
3212  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3213  // this is to obtain a textbook PCG
3214  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3215 }
3216 
3217 PetscPCGSolver::PetscPCGSolver(PetscParMatrix& A, const std::string &prefix,
3218  bool iter_mode)
3219  : PetscLinearSolver(A,prefix,iter_mode)
3220 {
3221  KSP ksp = (KSP)obj;
3222  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3223  // this is to obtain a textbook PCG
3224  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3225 }
3226 
3228  const std::string &prefix, bool iter_mode)
3229  : PetscLinearSolver(A,wrap,prefix,iter_mode)
3230 {
3231  KSP ksp = (KSP)obj;
3232  ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3233  // this is to obtain a textbook PCG
3234  ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3235 }
3236 
3237 // PetscPreconditioner methods
3238 
3240  const std::string &prefix)
3241  : PetscSolver(), Solver()
3242 {
3243  PC pc;
3244  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3245  obj = (PetscObject)pc;
3246  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3247  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3248 }
3249 
3251  const string &prefix)
3252  : PetscSolver(), Solver()
3253 {
3254  PC pc;
3255  ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
3256  obj = (PetscObject)pc;
3257  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3258  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3259  SetOperator(A);
3260 }
3261 
3263  const string &prefix)
3264  : PetscSolver(), Solver()
3265 {
3266  PC pc;
3267  ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3268  obj = (PetscObject)pc;
3269  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3270  ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3271  SetOperator(op);
3272 }
3273 
3275 {
3276  bool delete_pA = false;
3277  PetscParMatrix *pA = const_cast<PetscParMatrix *>
3278  (dynamic_cast<const PetscParMatrix *>(&op));
3279 
3280  if (!pA)
3281  {
3282  const Operator *cop = dynamic_cast<const Operator *>(&op);
3283  pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
3284  delete_pA = true;
3285  }
3286 
3287  // Set operators into PETSc PC
3288  PC pc = (PC)obj;
3289  Mat A = pA->A;
3290  if (operatorset)
3291  {
3292  Mat C;
3293  PetscInt nheight,nwidth,oheight,owidth;
3294 
3295  ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3296  ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3297  ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3298  if (nheight != oheight || nwidth != owidth)
3299  {
3300  // reinit without destroying the PC
3301  // communicator remains the same
3302  ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3303  delete X;
3304  delete B;
3305  X = B = NULL;
3306  }
3307  }
3308  ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
3309 
3310  // Update PetscSolver
3311  operatorset = true;
3312 
3313  // Update the Operator fields.
3314  height = pA->Height();
3315  width = pA->Width();
3316 
3317  if (delete_pA) { delete pA; };
3318 }
3319 
3320 void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
3321  bool trans) const
3322 {
3323  MFEM_VERIFY(!iterative_mode,
3324  "Iterative mode not supported for PetscPreconditioner");
3325  PC pc = (PC)obj;
3326 
3327  if (!B || !X)
3328  {
3329  Mat pA = NULL;
3330  ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
3331  if (!B)
3332  {
3333  PetscParMatrix A(pA, true);
3334  B = new PetscParVector(A, true, false);
3335  }
3336  if (!X)
3337  {
3338  PetscParMatrix A(pA, true);
3339  X = new PetscParVector(A, false, false);
3340  }
3341  }
3342  B->PlaceMemory(b.GetMemory());
3343  X->PlaceMemory(x.GetMemory());
3344 
3345  Customize();
3346 
3347  // Apply the preconditioner.
3348  if (trans)
3349  {
3350  ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3351  }
3352  else
3353  {
3354  ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3355  }
3356  B->ResetMemory();
3357  X->ResetMemory();
3358 }
3359 
3361 {
3362  (*this).MultKernel(b,x,false);
3363 }
3364 
3366 {
3367  (*this).MultKernel(b,x,true);
3368 }
3369 
3371 {
3372  MPI_Comm comm;
3373  PC pc = (PC)obj;
3374  ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3375  ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3376 }
3377 
3378 // PetscBDDCSolver methods
3379 
3380 // Coordinates sampling function
3381 static void func_coords(const Vector &x, Vector &y)
3382 {
3383  y = x;
3384 }
3385 
3386 void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
3387 {
3388  MPI_Comm comm = PetscObjectComm(obj);
3389 
3390  // get PETSc object
3391  PC pc = (PC)obj;
3392  Mat pA;
3393  ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3394 
3395  // matrix type should be of type MATIS
3396  PetscBool ismatis;
3397  ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
3398  PCHKERRQ(pA,ierr);
3399  MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
3400 
3401  // Check options
3402  ParFiniteElementSpace *fespace = opts.fespace;
3403  if (opts.netflux && !fespace)
3404  {
3405  MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3406  }
3407 
3408  // Attach default near-null space to local matrices
3409  {
3410  MatNullSpace nnsp;
3411  Mat lA;
3412  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3413  ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
3414  &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3415  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3416  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3417  }
3418 
3419  // set PETSc PC type to PCBDDC
3420  ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
3421 
3422  PetscInt rst,nl;
3423  ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3424  ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3425 
3426  // index sets for fields splitting and coordinates for nodal spaces
3427  IS *fields = NULL;
3428  PetscInt nf = 0;
3429  PetscInt sdim = 0;
3430  PetscReal *coords = NULL;
3431  if (fespace)
3432  {
3433  int vdim = fespace->GetVDim();
3434 
3435  // Ideally, the block size should be set at matrix creation
3436  // but the MFEM assembly does not allow to do so
3437  if (fespace->GetOrdering() == Ordering::byVDIM)
3438  {
3439  Mat lA;
3440  ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3441  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3442  ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3443  }
3444 
3445  // fields
3446  if (vdim > 1)
3447  {
3448  PetscInt st = rst, bs, inc, nlf;
3449  nf = vdim;
3450  nlf = nl/nf;
3451  ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3452  if (fespace->GetOrdering() == Ordering::byVDIM)
3453  {
3454  inc = 1;
3455  bs = vdim;
3456  }
3457  else
3458  {
3459  inc = nlf;
3460  bs = 1;
3461  }
3462  for (PetscInt i = 0; i < nf; i++)
3463  {
3464  ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3465  st += inc;
3466  }
3467  }
3468 
3469  // coordinates
3470  const FiniteElementCollection *fec = fespace->FEColl();
3471  bool h1space = dynamic_cast<const H1_FECollection*>(fec);
3472  if (h1space)
3473  {
3474  ParFiniteElementSpace *fespace_coords = fespace;
3475 
3476  sdim = fespace->GetParMesh()->SpaceDimension();
3477  if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
3478  {
3479  fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3481  }
3482  VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3483  ParGridFunction gf_coords(fespace_coords);
3484  gf_coords.ProjectCoefficient(coeff_coords);
3485  HypreParVector *hvec_coords = gf_coords.ParallelProject();
3486  PetscScalar *data_coords = (PetscScalar*)mfem::Read(hvec_coords->GetMemory(),
3487  hvec_coords->Size(),false);
3488 
3489  // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
3490  // and to the global matrix
3491  if (vdim == sdim)
3492  {
3493  MatNullSpace nnsp;
3494  Mat lA;
3495  Vec pvec_coords,lvec_coords;
3496  ISLocalToGlobalMapping l2g;
3497  PetscSF sf;
3498  PetscLayout rmap;
3499  const PetscInt *gidxs;
3500  PetscInt nleaves;
3501 
3502  ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3503  hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3504  CCHKERRQ(comm,ierr);
3505  ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3506  if (!nnsp)
3507  {
3508  ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3509  CCHKERRQ(comm,ierr);
3510  ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3511  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3512  }
3513  ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3514  ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3515  ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3516  ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3517  ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3518  ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3519  ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3520  ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3521  ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3522  CCHKERRQ(comm,ierr);
3523  ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3524  {
3525  const PetscScalar *garray;
3526  PetscScalar *larray;
3527 
3528  ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3529  ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3530 #if PETSC_VERSION_LT(3,15,0)
3531  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3532  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3533 #else
3534  ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3535  CCHKERRQ(comm,ierr);
3536  ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3537  CCHKERRQ(comm,ierr);
3538 #endif
3539  ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3540  ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3541  }
3542  ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3543  ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3544  CCHKERRQ(PETSC_COMM_SELF,ierr);
3545  ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3546  ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3547  ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3548  ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3549  }
3550 
3551  // each single dof has associated a tuple of coordinates
3552  ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3553  if (nf > 0)
3554  {
3555  for (PetscInt i = 0; i < nf; i++)
3556  {
3557  const PetscInt *idxs;
3558  PetscInt nn;
3559 
3560  // It also handles the case of fespace not ordered by VDIM
3561  ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3562  ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3563  for (PetscInt j = 0; j < nn; j++)
3564  {
3565  PetscInt idx = idxs[j]-rst;
3566  for (PetscInt d = 0; d < sdim; d++)
3567  {
3568  coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3569  }
3570  }
3571  ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3572  }
3573  }
3574  else
3575  {
3576  for (PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3577  }
3578  if (fespace_coords != fespace)
3579  {
3580  delete fespace_coords;
3581  }
3582  delete hvec_coords;
3583  }
3584  }
3585 
3586  // index sets for boundary dofs specification (Essential = dir, Natural = neu)
3587  IS dir = NULL, neu = NULL;
3588 
3589  // Extract l2l matrices
3590  Array<Mat> *l2l = NULL;
3591  if (opts.ess_dof_local || opts.nat_dof_local)
3592  {
3593  PetscContainer c;
3594 
3595  ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
3596  MFEM_VERIFY(c,"Local-to-local PETSc container not present");
3597  ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
3598  }
3599 
3600  // check information about index sets (essential dofs, fields, etc.)
3601 #ifdef MFEM_DEBUG
3602  {
3603  // make sure ess/nat_dof have been collectively set
3604  PetscBool lpr = PETSC_FALSE,pr;
3605  if (opts.ess_dof) { lpr = PETSC_TRUE; }
3606  mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3607  CCHKERRQ(comm,mpiierr);
3608  MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
3609  lpr = PETSC_FALSE;
3610  if (opts.nat_dof) { lpr = PETSC_TRUE; }
3611  mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3612  CCHKERRQ(comm,mpiierr);
3613  MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
3614  // make sure fields have been collectively set
3615  PetscInt ms[2],Ms[2];
3616  ms[0] = -nf; ms[1] = nf;
3617  mpiierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3618  CCHKERRQ(comm,mpiierr);
3619  MFEM_VERIFY(-Ms[0] == Ms[1],
3620  "number of fields should be the same across processes");
3621  }
3622 #endif
3623 
3624  // boundary sets
3625  if (opts.ess_dof)
3626  {
3627  PetscInt st = opts.ess_dof_local ? 0 : rst;
3628  if (!opts.ess_dof_local)
3629  {
3630  // need to compute the boundary dofs in global ordering
3631  ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3632  CCHKERRQ(comm,ierr);
3633  ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3634  }
3635  else
3636  {
3637  // need to compute a list for the marked boundary dofs in local ordering
3638  ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3639  CCHKERRQ(comm,ierr);
3640  ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3641  }
3642  }
3643  if (opts.nat_dof)
3644  {
3645  PetscInt st = opts.nat_dof_local ? 0 : rst;
3646  if (!opts.nat_dof_local)
3647  {
3648  // need to compute the boundary dofs in global ordering
3649  ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3650  CCHKERRQ(comm,ierr);
3651  ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3652  }
3653  else
3654  {
3655  // need to compute a list for the marked boundary dofs in local ordering
3656  ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3657  CCHKERRQ(comm,ierr);
3658  ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3659  }
3660  }
3661 
3662  // field splitting
3663  if (fields)
3664  {
3665  ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3666  }
3667  for (PetscInt i = 0; i < nf; i++)
3668  {
3669  ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3670  }
3671  ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3672 
3673  // coordinates
3674  if (coords)
3675  {
3676  ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3677  }
3678  ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3679 
3680  // code for block size is disabled since we cannot change the matrix
3681  // block size after it has been setup
3682  // int bs = 1;
3683 
3684  // Customize using the finite element space (if any)
3685  if (fespace)
3686  {
3687  const FiniteElementCollection *fec = fespace->FEColl();
3688  bool edgespace, rtspace, h1space;
3689  bool needint = opts.netflux;
3690  bool tracespace, rt_tracespace, edge_tracespace;
3691  int vdim, dim, p;
3692  PetscBool B_is_Trans = PETSC_FALSE;
3693 
3694  ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3695  dim = pmesh->Dimension();
3696  vdim = fespace->GetVDim();
3697  h1space = dynamic_cast<const H1_FECollection*>(fec);
3698  rtspace = dynamic_cast<const RT_FECollection*>(fec);
3699  edgespace = dynamic_cast<const ND_FECollection*>(fec);
3700  edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3701  rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3702  tracespace = edge_tracespace || rt_tracespace;
3703 
3704  p = 1;
3705  if (fespace->GetNE() > 0)
3706  {
3707  if (!tracespace)
3708  {
3709  p = fespace->GetElementOrder(0);
3710  }
3711  else
3712  {
3713  p = fespace->GetFaceOrder(0);
3714  if (dim == 2) { p++; }
3715  }
3716  }
3717 
3718  if (edgespace) // H(curl)
3719  {
3720  if (dim == 2)
3721  {
3722  needint = true;
3723  if (tracespace)
3724  {
3725  MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3726  " not using auxiliary quadrature");
3727  needint = false;
3728  }
3729  }
3730  else
3731  {
3732  FiniteElementCollection *vfec;
3733  if (tracespace)
3734  {
3735  vfec = new H1_Trace_FECollection(p,dim);
3736  }
3737  else
3738  {
3739  vfec = new H1_FECollection(p,dim);
3740  }
3741  ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3742  ParDiscreteLinearOperator *grad;
3743  grad = new ParDiscreteLinearOperator(vfespace,fespace);
3744  if (tracespace)
3745  {
3746  grad->AddTraceFaceInterpolator(new GradientInterpolator);
3747  }
3748  else
3749  {
3750  grad->AddDomainInterpolator(new GradientInterpolator);
3751  }
3752  grad->Assemble();
3753  grad->Finalize();
3754  HypreParMatrix *hG = grad->ParallelAssemble();
3755  PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3756  delete hG;
3757  delete grad;
3758 
3759  PetscBool conforming = PETSC_TRUE;
3760  if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3761  ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3762  PCHKERRQ(pc,ierr);
3763  delete vfec;
3764  delete vfespace;
3765  delete G;
3766  }
3767  }
3768  else if (rtspace) // H(div)
3769  {
3770  needint = true;
3771  if (tracespace)
3772  {
3773  MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3774  " auxiliary quadrature");
3775  needint = false;
3776  }
3777  }
3778  else if (h1space) // H(grad), only for the vector case
3779  {
3780  if (vdim != dim) { needint = false; }
3781  }
3782 
3783  PetscParMatrix *B = NULL;
3784  if (needint)
3785  {
3786  // Generate bilinear form in unassembled format which is used to
3787  // compute the net-flux across subdomain boundaries for H(div) and
3788  // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3789  FiniteElementCollection *auxcoll;
3790  if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3791  else
3792  {
3793  if (h1space)
3794  {
3795  auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3796  }
3797  else
3798  {
3799  auxcoll = new L2_FECollection(p,dim);
3800  }
3801  }
3802  ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3803  ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3804 
3805  if (edgespace)
3806  {
3807  if (tracespace)
3808  {
3809  b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3810  }
3811  else
3812  {
3813  b->AddDomainIntegrator(new VectorFECurlIntegrator);
3814  }
3815  }
3816  else if (rtspace)
3817  {
3818  if (tracespace)
3819  {
3820  b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3821  }
3822  else
3823  {
3824  b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3825  }
3826  }
3827  else
3828  {
3829  b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3830  }
3831  b->Assemble();
3832  b->Finalize();
3833  OperatorHandle Bh(Operator::PETSC_MATIS);
3834  b->ParallelAssemble(Bh);
3835  Bh.Get(B);
3836  Bh.SetOperatorOwner(false);
3837 
3838  if (dir) // if essential dofs are present, we need to zero the columns
3839  {
3840  Mat pB = *B;
3841  ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3842  if (!opts.ess_dof_local)
3843  {
3844  ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3845  }
3846  else
3847  {
3848  ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3849  }
3850  B_is_Trans = PETSC_TRUE;
3851  }
3852  delete b;
3853  delete pspace;
3854  delete auxcoll;
3855  }
3856 
3857  if (B)
3858  {
3859  ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3860  }
3861  delete B;
3862  }
3863  ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3864  ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3865 }
3866 
3868  const PetscBDDCSolverParams &opts,
3869  const std::string &prefix)
3870  : PetscPreconditioner(A,prefix)
3871 {
3872  BDDCSolverConstructor(opts);
3873  Customize();
3874 }
3875 
3877  const PetscBDDCSolverParams &opts,
3878  const std::string &prefix)
3879  : PetscPreconditioner(comm,op,prefix)
3880 {
3881  BDDCSolverConstructor(opts);
3882  Customize();
3883 }
3884 
3886  const string &prefix)
3887  : PetscPreconditioner(comm,op,prefix)
3888 {
3889  PC pc = (PC)obj;
3890  ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3891 
3892  Mat pA;
3893  ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3894 
3895  // Check if pA is of type MATNEST
3896  PetscBool isnest;
3897  ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3898 
3899  PetscInt nr = 0;
3900  IS *isrow = NULL;
3901  if (isnest) // we know the fields
3902  {
3903  ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3904  ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3905  ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3906  }
3907 
3908  // We need to customize here, before setting the index sets.
3909  // This is because PCFieldSplitSetType customizes the function
3910  // pointers. SubSolver options will be processed during PCApply
3911  Customize();
3912 
3913  for (PetscInt i=0; i<nr; i++)
3914  {
3915  ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3916  }
3917  ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3918 }
3919 
3921  ParFiniteElementSpace *fes,
3922  const std::string &prefix)
3923  : PetscPreconditioner(fes->GetParMesh()->GetComm(),prefix)
3924 {
3926  ierr = MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); PCHKERRQ(A,ierr);
3927  ierr = MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE); PCHKERRQ(A,ierr);
3928  SetOperator(A);
3929  H2SolverConstructor(fes);
3930  Customize();
3931 }
3932 
3933 void PetscH2Solver::H2SolverConstructor(ParFiniteElementSpace *fes)
3934 {
3935 #if defined(PETSC_HAVE_H2OPUS)
3936  int sdim = fes->GetParMesh()->SpaceDimension();
3937  int vdim = fes->GetVDim();
3938  const FiniteElementCollection *fec = fes->FEColl();
3939  ParFiniteElementSpace *fes_coords = NULL;
3940 
3941  if (vdim != sdim || fes->GetOrdering() != Ordering::byVDIM)
3942  {
3943  fes_coords = new ParFiniteElementSpace(fes->GetParMesh(),fec,sdim,
3945  fes = fes_coords;
3946  }
3947  VectorFunctionCoefficient ccoords(sdim, func_coords);
3948 
3949  ParGridFunction coords(fes);
3950  coords.ProjectCoefficient(ccoords);
3951  Vector c(fes->GetTrueVSize());
3952  coords.ParallelProject(c);
3953  delete fes_coords;
3954  PCSetType(*this,PCH2OPUS);
3955  PCSetCoordinates(*this,sdim,c.Size()/sdim,(PetscReal*)mfem::Read(c.GetMemory(),
3956  c.Size(),false));
3957  PCSetFromOptions(*this);
3958 #else
3959  MFEM_ABORT("Need PETSc configured with --download-h2opus");
3960 #endif
3961 }
3962 
3963 // PetscNonlinearSolver methods
3964 
3966  const std::string &prefix)
3967  : PetscSolver(), Solver()
3968 {
3969  // Create the actual solver object
3970  SNES snes;
3971  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3972  obj = (PetscObject)snes;
3973  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3974  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3975 
3976  // Allocate private solver context
3978 }
3979 
3981  const std::string &prefix)
3982  : PetscSolver(), Solver()
3983 {
3984  // Create the actual solver object
3985  SNES snes;
3986  ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3987  obj = (PetscObject)snes;
3988  ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3989  ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3990 
3991  // Allocate private solver context
3993 
3994  SetOperator(op);
3995 }
3996 
3998 {
3999  MPI_Comm comm;
4000  SNES snes = (SNES)obj;
4001  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
4002  ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
4003 }
4004 
4006 {
4007  SNES snes = (SNES)obj;
4008 
4009  if (operatorset)
4010  {
4011  PetscBool ls,gs;
4012  void *fctx,*jctx;
4013 
4014  ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
4015  PCHKERRQ(snes, ierr);
4016  ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
4017  PCHKERRQ(snes, ierr);
4018 
4019  ls = (PetscBool)(height == op.Height() && width == op.Width() &&
4020  (void*)&op == fctx &&
4021  (void*)&op == jctx);
4022  mpiierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
4023  PetscObjectComm((PetscObject)snes));
4024  CCHKERRQ(PetscObjectComm((PetscObject)snes),mpiierr);
4025  if (!gs)
4026  {
4027  ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
4028  delete X;
4029  delete B;
4030  X = B = NULL;
4031  }
4032  }
4033  else
4034  {
4035  /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
4036  yet set. We default to backtracking */
4037  SNESLineSearch ls;
4038  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4039  ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
4040  }
4041 
4042  // If we do not pass matrices in, the default matrix type for DMShell is MATDENSE
4043  // in 3.15, which may cause issues.
4044  Mat dummy;
4045  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)snes),op.Height(),
4046  op.Height(),&dummy);
4047 
4048  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4049  snes_ctx->op = (Operator*)&op;
4050  ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
4051  PCHKERRQ(snes, ierr);
4052  ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
4053  (void *)snes_ctx);
4054  PCHKERRQ(snes, ierr);
4055 
4056  ierr = MatDestroy(&dummy);
4057  PCHKERRQ(snes, ierr);
4058 
4059  // Update PetscSolver
4060  operatorset = true;
4061 
4062  // Update the Operator fields.
4063  height = op.Height();
4064  width = op.Width();
4065 }
4066 
4068 {
4069  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4070  snes_ctx->jacType = jacType;
4071 }
4072 
4074  double*))
4075 {
4076  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4077  snes_ctx->objective = objfn;
4078 
4079  SNES snes = (SNES)obj;
4080  ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
4081  PCHKERRQ(snes, ierr);
4082 }
4083 
4085  Vector&, Vector&,
4086  bool&, bool&))
4087 {
4088  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4089  snes_ctx->postcheck = post;
4090 
4091  SNES snes = (SNES)obj;
4092  SNESLineSearch ls;
4093  ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4094  ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
4095  PCHKERRQ(ls, ierr);
4096 }
4097 
4098 void PetscNonlinearSolver::SetUpdate(void (*update)(Operator *,int,
4099  const Vector&,
4100  const Vector&,
4101  const Vector&,
4102  const Vector&))
4103 {
4104  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4105  snes_ctx->update = update;
4106 
4107  SNES snes = (SNES)obj;
4108  ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4109 }
4110 
4112 {
4113  SNES snes = (SNES)obj;
4114 
4115  bool b_nonempty = b.Size();
4116  if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
4117  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
4119  if (b_nonempty) { B->PlaceMemory(b.GetMemory()); }
4120  else { *B = 0.0; }
4121 
4122  Customize();
4123 
4124  if (!iterative_mode) { *X = 0.; }
4125 
4126  // Solve the system.
4127  ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4128  X->ResetMemory();
4129  if (b_nonempty) { B->ResetMemory(); }
4130 }
4131 
4132 // PetscODESolver methods
4133 
4134 PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
4135  : PetscSolver(), ODESolver()
4136 {
4137  // Create the actual solver object
4138  TS ts;
4139  ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4140  obj = (PetscObject)ts;
4141  ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
4142  ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4143 
4144  // Allocate private solver context
4146 
4147  // Default options, to comply with the current interface to ODESolver.
4148  ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4149  PCHKERRQ(ts,ierr);
4150  ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4151  PCHKERRQ(ts,ierr);
4152  TSAdapt tsad;
4153  ierr = TSGetAdapt(ts,&tsad);
4154  PCHKERRQ(ts,ierr);
4155  ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4156  PCHKERRQ(ts,ierr);
4157 }
4158 
4160 {
4161  MPI_Comm comm;
4162  TS ts = (TS)obj;
4163  ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
4164  ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4165 }
4166 
4168  enum PetscODESolver::Type type)
4169 {
4170  TS ts = (TS)obj;
4171 
4172  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4173  if (operatorset)
4174  {
4175  ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4176  delete X;
4177  X = NULL;
4178  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4179  ts_ctx->cached_ijacstate = -1;
4180  ts_ctx->cached_rhsjacstate = -1;
4181  ts_ctx->cached_splits_xstate = -1;
4182  ts_ctx->cached_splits_xdotstate = -1;
4183  }
4184  f = &f_;
4185 
4186  // Set functions in TS
4187  ts_ctx->op = &f_;
4188  if (f_.isImplicit())
4189  {
4190  Mat dummy;
4191  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4192  f_.Height(),&dummy);
4193  PCHKERRQ(ts, ierr);
4194  ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
4195  PCHKERRQ(ts, ierr);
4196  ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (void *)ts_ctx);
4197  PCHKERRQ(ts, ierr);
4198  ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4199  PCHKERRQ(ts, ierr);
4200  ierr = MatDestroy(&dummy);
4201  PCHKERRQ(ts, ierr);
4202  }
4203  if (!f_.isHomogeneous())
4204  {
4205  Mat dummy = NULL;
4206  if (!f_.isImplicit())
4207  {
4208  ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4209  PCHKERRQ(ts, ierr);
4210  }
4211  else
4212  {
4213  ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4214  f_.Height(),&dummy);
4215  PCHKERRQ(ts, ierr);
4216  }
4217  ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
4218  PCHKERRQ(ts, ierr);
4219  ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4220  (void *)ts_ctx);
4221  PCHKERRQ(ts, ierr);
4222  ierr = MatDestroy(&dummy);
4223  PCHKERRQ(ts, ierr);
4224  }
4225  operatorset = true;
4226 
4227  SetType(type);
4228 
4229  // Set solution vector
4230  PetscParVector X(PetscObjectComm(obj),*f,false,true);
4231  ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4232 
4233  // Compose special purpose function for PDE-constrained optimization
4234  PetscBool use = PETSC_TRUE;
4235  ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
4236  if (use && f_.isImplicit())
4237  {
4238  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4239  __mfem_ts_computesplits);
4240  PCHKERRQ(ts,ierr);
4241  }
4242  else
4243  {
4244  ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4245  NULL);
4246  PCHKERRQ(ts,ierr);
4247  }
4248 }
4249 
4251 {
4252  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4253  ts_ctx->jacType = jacType;
4254 }
4255 
4257 {
4258  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4259  return ts_ctx->type;
4260 }
4261 
4263 {
4264  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4265 
4266  TS ts = (TS)obj;
4267  ts_ctx->type = type;
4268  if (type == ODE_SOLVER_LINEAR)
4269  {
4270  ierr = TSSetProblemType(ts, TS_LINEAR);
4271  PCHKERRQ(ts, ierr);
4272  }
4273  else
4274  {
4275  ierr = TSSetProblemType(ts, TS_NONLINEAR);
4276  PCHKERRQ(ts, ierr);
4277  }
4278 }
4279 
4280 void PetscODESolver::Step(Vector &x, double &t, double &dt)
4281 {
4282  // Pass the parameters to PETSc.
4283  TS ts = (TS)obj;
4284  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4285  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4286 
4287  PetscInt i;
4288  ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4289 
4290  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4291  X->PlaceMemory(x.GetMemory(),true);
4292 
4293  Customize();
4294 
4295  // Monitor initial step
4296  if (!i)
4297  {
4298  ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4299  }
4300 
4301  // Take the step.
4302  ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4303  ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4304 
4305  // Get back current time and the time step used to caller.
4306  // We cannot use TSGetTimeStep() as it returns the next candidate step
4307  PetscReal pt;
4308  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4309  dt = pt - (PetscReal)t;
4310  t = pt;
4311 
4312  // Monitor current step
4313  ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4314 
4315  X->ResetMemory();
4316 }
4317 
4318 void PetscODESolver::Run(Vector &x, double &t, double &dt, double t_final)
4319 {
4320  // Give the parameters to PETSc.
4321  TS ts = (TS)obj;
4322  ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4323  ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4324  ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4325  ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4326  PCHKERRQ(ts, ierr);
4327 
4328  if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4329  X->PlaceMemory(x.GetMemory(),true);
4330 
4331  Customize();
4332 
4333  // Reset Jacobian caching since the user may have changed
4334  // the parameters of the solver
4335  // We don't do this in the Step method because two consecutive
4336  // Step() calls are done with the same operator
4337  __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4338  ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4339  ts_ctx->cached_ijacstate = -1;
4340  ts_ctx->cached_rhsjacstate = -1;
4341  ts_ctx->cached_splits_xstate = -1;
4342  ts_ctx->cached_splits_xdotstate = -1;
4343 
4344  // Take the steps.
4345  ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4346  X->ResetMemory();
4347 
4348  // Get back final time and time step to caller.
4349  PetscReal pt;
4350  ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4351  t = pt;
4352  ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4353  dt = pt;
4354 }
4355 
4356 } // namespace mfem
4357 
4358 #include "petsc/private/petscimpl.h"
4359 #include "petsc/private/matimpl.h"
4360 
4361 // auxiliary functions
4362 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
4363  void* ctx)
4364 {
4365  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4366 
4367  PetscFunctionBeginUser;
4368  if (!monctx)
4369  {
4370  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4371  }
4372  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4374  monctx->monitor);
4375 
4376  if (user_monitor->mon_sol)
4377  {
4378  mfem::PetscParVector V(x,true);
4379  user_monitor->MonitorSolution(it,t,V);
4380  }
4381  user_monitor->MonitorSolver(solver);
4382  PetscFunctionReturn(PETSC_SUCCESS);
4383 }
4384 
4385 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
4386  Vec f,void *ctx)
4387 {
4388  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4389 
4390  PetscFunctionBeginUser;
4391  mfem::PetscParVector xx(x,true);
4392  mfem::PetscParVector yy(xp,true);
4393  mfem::PetscParVector ff(f,true);
4394 
4395  mfem::TimeDependentOperator *op = ts_ctx->op;
4396  op->SetTime(t);
4397 
4398  if (ts_ctx->bchandler)
4399  {
4400  // we evaluate the ImplicitMult method with the correct bc
4401  // this means the correct time derivative for essential boundary
4402  // dofs is zero
4403  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
4404  if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
4405  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4406  mfem::Vector* txx = ts_ctx->work;
4407  mfem::Vector* txp = ts_ctx->work2;
4408  bchandler->SetTime(t);
4409  bchandler->ApplyBC(xx,*txx);
4410  bchandler->ZeroBC(yy,*txp);
4411  op->ImplicitMult(*txx,*txp,ff);
4412  // and fix the residual (i.e. f_\partial\Omega = u - g(t))
4413  bchandler->FixResidualBC(xx,ff);
4414  }
4415  else
4416  {
4417  // use the ImplicitMult method of the class
4418  op->ImplicitMult(xx,yy,ff);
4419  }
4420  ff.UpdateVecFromFlags();
4421  PetscFunctionReturn(PETSC_SUCCESS);
4422 }
4423 
4424 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
4425  void *ctx)
4426 {
4427  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4428 
4429  PetscFunctionBeginUser;
4430  if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
4431  mfem::PetscParVector xx(x,true);
4432  mfem::PetscParVector ff(f,true);
4433  mfem::TimeDependentOperator *top = ts_ctx->op;
4434  top->SetTime(t);
4435 
4436  // use the ExplicitMult method - compute the RHS function
4437  top->ExplicitMult(xx,ff);
4438 
4439  ff.UpdateVecFromFlags();
4440  PetscFunctionReturn(PETSC_SUCCESS);
4441 }
4442 
4443 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
4444  Vec xp, PetscReal shift, Mat A, Mat P,
4445  void *ctx)
4446 {
4447  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4448  mfem::Vector *xx;
4449  PetscScalar *array;
4450  PetscReal eps = 0.001; /* 0.1% difference */
4451  PetscInt n;
4452  PetscObjectState state;
4453  PetscErrorCode ierr;
4454 
4455  PetscFunctionBeginUser;
4456  // Matrix-free case
4457  if (A && A != P)
4458  {
4459  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4460  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4461  }
4462 
4463  // prevent to recompute a Jacobian if we already did so
4464  // the relative tolerance comparison should be fine given the fact
4465  // that two consecutive shifts should have similar magnitude
4466  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4467  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4468  std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4469  state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4470 
4471  // update time
4472  mfem::TimeDependentOperator *op = ts_ctx->op;
4473  op->SetTime(t);
4474 
4475  // wrap Vecs with Vectors
4476  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4477  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4478  mfem::Vector yy(array,n);
4479  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4480  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4481  if (!ts_ctx->bchandler)
4482  {
4483  xx = new mfem::Vector(array,n);
4484  }
4485  else
4486  {
4487  // make sure we compute a Jacobian with the correct boundary values
4488  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4489  mfem::Vector txx(array,n);
4490  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4491  xx = ts_ctx->work;
4492  bchandler->SetTime(t);
4493  bchandler->ApplyBC(txx,*xx);
4494  }
4495  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4496 
4497  // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
4498  mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
4499  if (!ts_ctx->bchandler) { delete xx; }
4500  ts_ctx->cached_shift = shift;
4501 
4502  // Convert to the operator type requested if needed
4503  bool delete_pA = false;
4504  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4505  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4506  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4507  pA->GetType() != ts_ctx->jacType))
4508  {
4509  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4510  ts_ctx->jacType);
4511  delete_pA = true;
4512  }
4513 
4514  // Eliminate essential dofs
4515  if (ts_ctx->bchandler)
4516  {
4517  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4518  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4519  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4520  }
4521 
4522  // Get nonzerostate
4523  PetscObjectState nonzerostate;
4524  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4525 
4526  // Avoid unneeded copy of the matrix by hacking
4527  Mat B;
4528  B = pA->ReleaseMat(false);
4529  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4530  if (delete_pA) { delete pA; }
4531 
4532  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4533  // preconditioner fails because MatCreateSubMatrix_Nest does not
4534  // actually return a matrix. Instead, for efficiency reasons,
4535  // it returns a reference to the submatrix. The second time it
4536  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4537  // aborts since the two submatrices are actually different.
4538  // We circumvent this issue by incrementing the nonzero state
4539  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4540  // This does not impact performances in the case of MATNEST
4541  PetscBool isnest;
4542  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4543  CHKERRQ(ierr);
4544  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4545 
4546  // Jacobian reusage
4547  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
4548  CHKERRQ(ierr);
4549 
4550  // Fool DM
4551  DM dm;
4552  MatType mtype;
4553  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4554  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4555  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4556  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4557  PetscFunctionReturn(PETSC_SUCCESS);
4558 }
4559 
4560 static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
4561  Mat Ax,Mat Jx,
4562  Mat Axp,Mat Jxp)
4563 {
4564  __mfem_ts_ctx* ts_ctx;
4565  mfem::Vector *xx;
4566  PetscScalar *array;
4567  PetscInt n;
4568  PetscObjectState state;
4569  PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4570  PetscBool assembled;
4571  PetscErrorCode ierr;
4572 
4573  PetscFunctionBeginUser;
4574  // Matrix-free cases
4575  if (Ax && Ax != Jx)
4576  {
4577  ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4578  ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4579  }
4580  if (Axp && Axp != Jxp)
4581  {
4582  ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4583  ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4584  }
4585 
4586  ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
4587 
4588  // prevent to recompute the Jacobians if we already did so
4589  ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
4590  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4591  state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4592  ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
4593  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4594  state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4595  if (!rx && !rxp) { PetscFunctionReturn(PETSC_SUCCESS); }
4596 
4597  // update time
4598  mfem::TimeDependentOperator *op = ts_ctx->op;
4599  op->SetTime(t);
4600 
4601  // wrap Vecs with Vectors
4602  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4603  ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4604  mfem::Vector yy(array,n);
4605  ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4606  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4607  if (!ts_ctx->bchandler)
4608  {
4609  xx = new mfem::Vector(array,n);
4610  }
4611  else
4612  {
4613  // make sure we compute a Jacobian with the correct boundary values
4614  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4615  mfem::Vector txx(array,n);
4616  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4617  xx = ts_ctx->work;
4618  bchandler->SetTime(t);
4619  bchandler->ApplyBC(txx,*xx);
4620  }
4621  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4622 
4623  // We don't have a specialized interface, so we just compute the split jacobians
4624  // evaluating twice the implicit gradient method with the correct shifts
4625 
4626  // first we do the state jacobian
4627  mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
4628 
4629  // Convert to the operator type requested if needed
4630  bool delete_mat = false;
4631  mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
4632  (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
4633  if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4634  pJx->GetType() != ts_ctx->jacType))
4635  {
4636  if (pJx)
4637  {
4638  Mat B = *pJx;
4639  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4640  }
4641  pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
4642  ts_ctx->jacType);
4643  delete_mat = true;
4644  }
4645  if (rx)
4646  {
4647  ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4648  if (assembled)
4649  {
4650  ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4651  }
4652  else
4653  {
4654  Mat B;
4655  ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4656  ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4657  }
4658  }
4659  if (delete_mat) { delete pJx; }
4660  pJx = new mfem::PetscParMatrix(Jx,true);
4661 
4662  // Eliminate essential dofs
4663  if (ts_ctx->bchandler)
4664  {
4665  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4666  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4667  pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4668  }
4669 
4670  // Then we do the jacobian wrt the time derivative of the state
4671  // Note that this is usually the mass matrix
4672  mfem::PetscParMatrix *pJxp = NULL;
4673  if (rxp)
4674  {
4675  delete_mat = false;
4676  mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
4677  pJxp = const_cast<mfem::PetscParMatrix *>
4678  (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
4679  if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4680  pJxp->GetType() != ts_ctx->jacType))
4681  {
4682  if (pJxp)
4683  {
4684  Mat B = *pJxp;
4685  ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4686  }
4687  pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
4688  &oJxp,ts_ctx->jacType);
4689  delete_mat = true;
4690  }
4691 
4692  ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4693  if (assembled)
4694  {
4695  ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4696  }
4697  else
4698  {
4699  Mat B;
4700  ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4701  ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4702  }
4703  if (delete_mat) { delete pJxp; }
4704  pJxp = new mfem::PetscParMatrix(Jxp,true);
4705 
4706  // Eliminate essential dofs
4707  if (ts_ctx->bchandler)
4708  {
4709  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4710  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4711  pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
4712  }
4713 
4714  // Obtain the time dependent part of the jacobian by subtracting
4715  // the state jacobian
4716  // We don't do it with the class operator "-=" since we know that
4717  // the sparsity pattern of the two matrices is the same
4718  ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4719  }
4720 
4721  // Jacobian reusage
4722  ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4723  CHKERRQ(ierr);
4724  ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4725  CHKERRQ(ierr);
4726 
4727  delete pJx;
4728  delete pJxp;
4729  if (!ts_ctx->bchandler) { delete xx; }
4730  PetscFunctionReturn(PETSC_SUCCESS);
4731 }
4732 
4733 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4734  Mat A, Mat P, void *ctx)
4735 {
4736  __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4737  mfem::Vector *xx;
4738  PetscScalar *array;
4739  PetscInt n;
4740  PetscObjectState state;
4741  PetscErrorCode ierr;
4742 
4743  PetscFunctionBeginUser;
4744  // Matrix-free case
4745  if (A && A != P)
4746  {
4747  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4748  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4749  }
4750 
4751  // prevent to recompute a Jacobian if we already did so
4752  ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4753  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4754  state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4755 
4756  // update time
4757  mfem::TimeDependentOperator *op = ts_ctx->op;
4758  op->SetTime(t);
4759 
4760  // wrap Vec with Vector
4761  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4762  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4763  if (!ts_ctx->bchandler)
4764  {
4765  xx = new mfem::Vector(array,n);
4766  }
4767  else
4768  {
4769  // make sure we compute a Jacobian with the correct boundary values
4770  if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4771  mfem::Vector txx(array,n);
4772  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4773  xx = ts_ctx->work;
4774  bchandler->SetTime(t);
4775  bchandler->ApplyBC(txx,*xx);
4776  }
4777  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4778 
4779  // Use TimeDependentOperator::GetExplicitGradient(x)
4780  mfem::Operator& J = op->GetExplicitGradient(*xx);
4781  if (!ts_ctx->bchandler) { delete xx; }
4782 
4783  // Convert to the operator type requested if needed
4784  bool delete_pA = false;
4785  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4786  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4787  if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4788  pA->GetType() != ts_ctx->jacType))
4789  {
4790  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4791  ts_ctx->jacType);
4792  delete_pA = true;
4793  }
4794 
4795  // Eliminate essential dofs
4796  if (ts_ctx->bchandler)
4797  {
4798  mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4799  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4800  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4801  }
4802 
4803  // Get nonzerostate
4804  PetscObjectState nonzerostate;
4805  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4806 
4807  // Avoid unneeded copy of the matrix by hacking
4808  Mat B;
4809  B = pA->ReleaseMat(false);
4810  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4811  if (delete_pA) { delete pA; }
4812 
4813  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4814  // preconditioner fails because MatCreateSubMatrix_Nest does not
4815  // actually return a matrix. Instead, for efficiency reasons,
4816  // it returns a reference to the submatrix. The second time it
4817  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4818  // aborts since the two submatrices are actually different.
4819  // We circumvent this issue by incrementing the nonzero state
4820  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4821  // This does not impact performances in the case of MATNEST
4822  PetscBool isnest;
4823  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4824  CHKERRQ(ierr);
4825  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4826 
4827  // Jacobian reusage
4828  if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4829  {
4830  ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4831  }
4832  ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_rhsjacstate);
4833  CHKERRQ(ierr);
4834 
4835  // Fool DM
4836  DM dm;
4837  MatType mtype;
4838  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4839  ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4840  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4841  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4842  PetscFunctionReturn(PETSC_SUCCESS);
4843 }
4844 
4845 static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4846  void* ctx)
4847 {
4848  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4849 
4850  PetscFunctionBeginUser;
4851  if (!monctx)
4852  {
4853  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4854  }
4855 
4856  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4858  monctx->monitor);
4859  if (user_monitor->mon_sol)
4860  {
4861  Vec x;
4862  PetscErrorCode ierr;
4863 
4864  ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4865  mfem::PetscParVector V(x,true);
4866  user_monitor->MonitorSolution(it,res,V);
4867  }
4868  if (user_monitor->mon_res)
4869  {
4870  Vec x;
4871  PetscErrorCode ierr;
4872 
4873  ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4874  mfem::PetscParVector V(x,true);
4875  user_monitor->MonitorResidual(it,res,V);
4876  }
4877  user_monitor->MonitorSolver(solver);
4878  PetscFunctionReturn(PETSC_SUCCESS);
4879 }
4880 
4881 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4882  void *ctx)
4883 {
4884  PetscScalar *array;
4885  PetscInt n;
4886  PetscErrorCode ierr;
4887  mfem::Vector *xx;
4888  __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4889 
4890  PetscFunctionBeginUser;
4891  ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4892  ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4893  if (!snes_ctx->bchandler)
4894  {
4895  xx = new mfem::Vector(array,n);
4896  }
4897  else
4898  {
4899  // make sure we compute a Jacobian with the correct boundary values
4900  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4901  mfem::Vector txx(array,n);
4902  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4903  xx = snes_ctx->work;
4904  bchandler->ApplyBC(txx,*xx);
4905  }
4906 
4907  // Use Operator::GetGradient(x)
4908  mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4909  ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4910  if (!snes_ctx->bchandler) { delete xx; }
4911 
4912  // Convert to the operator type requested if needed
4913  bool delete_pA = false;
4914  mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4915  (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4916  if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4917  pA->GetType() != snes_ctx->jacType))
4918  {
4919  pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4920  snes_ctx->jacType);
4921  delete_pA = true;
4922  }
4923 
4924  // Eliminate essential dofs
4925  if (snes_ctx->bchandler)
4926  {
4927  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4928  mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4929  pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4930  }
4931 
4932  // Get nonzerostate
4933  PetscObjectState nonzerostate;
4934  ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4935 
4936  // Avoid unneeded copy of the matrix by hacking
4937  Mat B = pA->ReleaseMat(false);
4938  ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4939  if (delete_pA) { delete pA; }
4940 
4941  // When using MATNEST and PCFIELDSPLIT, the second setup of the
4942  // preconditioner fails because MatCreateSubMatrix_Nest does not
4943  // actually return a matrix. Instead, for efficiency reasons,
4944  // it returns a reference to the submatrix. The second time it
4945  // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4946  // aborts since the two submatrices are actually different.
4947  // We circumvent this issue by incrementing the nonzero state
4948  // (i.e. PETSc thinks the operator sparsity pattern has changed)
4949  // This does not impact performances in the case of MATNEST
4950  PetscBool isnest;
4951  ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4952  CHKERRQ(ierr);
4953  if (isnest) { P->nonzerostate = nonzerostate + 1; }
4954 
4955  // Matrix-free case
4956  if (A && A != P)
4957  {
4958  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4959  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4960  }
4961 
4962  // Fool DM
4963  DM dm;
4964  MatType mtype;
4965  ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4966  ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4967  ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4968  ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4969  PetscFunctionReturn(PETSC_SUCCESS);
4970 }
4971 
4972 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4973 {
4974  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4975 
4976  PetscFunctionBeginUser;
4977  mfem::PetscParVector xx(x,true);
4978  mfem::PetscParVector ff(f,true);
4979  if (snes_ctx->bchandler)
4980  {
4981  // we evaluate the Mult method with the correct bc
4982  if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4983  mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4984  mfem::Vector* txx = snes_ctx->work;
4985  bchandler->ApplyBC(xx,*txx);
4986  snes_ctx->op->Mult(*txx,ff);
4987  // and fix the residual (i.e. f_\partial\Omega = u - g)
4988  bchandler->FixResidualBC(xx,ff);
4989  }
4990  else
4991  {
4992  // use the Mult method of the class
4993  snes_ctx->op->Mult(xx,ff);
4994  }
4995  ff.UpdateVecFromFlags();
4996  PetscFunctionReturn(PETSC_SUCCESS);
4997 }
4998 
4999 static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
5000  void *ctx)
5001 {
5002  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5003 
5004  PetscFunctionBeginUser;
5005  if (!snes_ctx->objective)
5006  {
5007  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
5008  }
5009  mfem::PetscParVector xx(x,true);
5010  double lf;
5011  (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
5012  *f = (PetscReal)lf;
5013  PetscFunctionReturn(PETSC_SUCCESS);
5014 }
5015 
5016 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
5017  PetscBool *cy,PetscBool *cw, void* ctx)
5018 {
5019  __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5020  bool lcy = false,lcw = false;
5021 
5022  PetscFunctionBeginUser;
5023  mfem::PetscParVector x(X,true);
5024  mfem::PetscParVector y(Y,true);
5025  mfem::PetscParVector w(W,true);
5026  (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
5027  if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
5028  if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
5029  PetscFunctionReturn(PETSC_SUCCESS);
5030 }
5031 
5032 static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
5033 {
5034  Vec F,X,dX,pX;
5035  __mfem_snes_ctx* snes_ctx;
5036 
5037  PetscFunctionBeginUser;
5038  /* Update callback does not use the context */
5039  ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
5040  ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
5041  if (!it)
5042  {
5043  ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
5044  ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
5045  CHKERRQ(ierr);
5046  ierr = VecDestroy(&pX); CHKERRQ(ierr);
5047  }
5048  ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
5049  CHKERRQ(ierr);
5050  if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
5051  "Missing previous solution");
5052  ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
5053  mfem::PetscParVector f(F,true);
5054  mfem::PetscParVector x(X,true);
5055  mfem::PetscParVector dx(dX,true);
5056  mfem::PetscParVector px(pX,true);
5057  (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
5058  /* Store previous solution */
5059  ierr = VecCopy(X,pX); CHKERRQ(ierr);
5060  PetscFunctionReturn(PETSC_SUCCESS);
5061 }
5062 
5063 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
5064  void* ctx)
5065 {
5066  __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
5067 
5068  PetscFunctionBeginUser;
5069  if (!monctx)
5070  {
5071  SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
5072  }
5073 
5074  mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
5076  monctx->monitor);
5077  if (user_monitor->mon_sol)
5078  {
5079  Vec x;
5080  PetscErrorCode ierr;
5081 
5082  ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5083  mfem::PetscParVector V(x,true);
5084  user_monitor->MonitorSolution(it,res,V);
5085  }
5086  if (user_monitor->mon_res)
5087  {
5088  Vec x;
5089  PetscErrorCode ierr;
5090 
5091  ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5092  mfem::PetscParVector V(x,true);
5093  user_monitor->MonitorResidual(it,res,V);
5094  }
5095  user_monitor->MonitorSolver(solver);
5096  PetscFunctionReturn(PETSC_SUCCESS);
5097 }
5098 
5099 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5100 {
5101  mfem::Operator *op;
5102  PetscErrorCode ierr;
5103 
5104  PetscFunctionBeginUser;
5105  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5106  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5107  mfem::PetscParVector xx(x,true);
5108  mfem::PetscParVector yy(y,true);
5109  op->Mult(xx,yy);
5110  yy.UpdateVecFromFlags();
5111  PetscFunctionReturn(PETSC_SUCCESS);
5112 }
5113 
5114 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5115 {
5116  mfem::Operator *op;
5117  PetscErrorCode ierr;
5118  PetscBool flg,symm;
5119 
5120  PetscFunctionBeginUser;
5121  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5122  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5123  mfem::PetscParVector xx(x,true);
5124  mfem::PetscParVector yy(y,true);
5125  ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5126  if (flg && symm)
5127  {
5128  op->Mult(xx,yy);
5129  }
5130  else
5131  {
5132  op->MultTranspose(xx,yy);
5133  }
5134  yy.UpdateVecFromFlags();
5135  PetscFunctionReturn(PETSC_SUCCESS);
5136 }
5137 
5138 static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5139 {
5140  mfem::Operator *op;
5141  PetscErrorCode ierr;
5142 
5143  PetscFunctionBeginUser;
5144  ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5145  if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5146  ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
5147  PetscFunctionReturn(PETSC_SUCCESS);
5148 }
5149 
5150 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5151 {
5152  PetscFunctionBeginUser;
5153  PetscFunctionReturn(PETSC_SUCCESS);
5154 }
5155 
5156 static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5157 {
5158  __mfem_pc_shell_ctx *ctx;
5159  PetscErrorCode ierr;
5160 
5161  PetscFunctionBeginUser;
5162  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5163  if (ctx->op)
5164  {
5165  PetscBool isascii;
5166  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5167  CHKERRQ(ierr);
5168 
5170  (ctx->op);
5171  if (ppc)
5172  {
5173  ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5174  }
5175  else
5176  {
5177  if (isascii)
5178  {
5179  ierr = PetscViewerASCIIPrintf(viewer,
5180  "No information available on the mfem::Solver\n");
5181  CHKERRQ(ierr);
5182  }
5183  }
5184  if (isascii && ctx->factory)
5185  {
5186  ierr = PetscViewerASCIIPrintf(viewer,
5187  "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5188  CHKERRQ(ierr);
5189  }
5190  }
5191  PetscFunctionReturn(PETSC_SUCCESS);
5192 }
5193 
5194 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5195 {
5196  __mfem_pc_shell_ctx *ctx;
5197  PetscErrorCode ierr;
5198 
5199  PetscFunctionBeginUser;
5200  mfem::PetscParVector xx(x,true);
5201  mfem::PetscParVector yy(y,true);
5202  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5203  if (ctx->op)
5204  {
5205  ctx->op->Mult(xx,yy);
5206  yy.UpdateVecFromFlags();
5207  }
5208  else // operator is not present, copy x
5209  {
5210  yy = xx;
5211  }
5212  PetscFunctionReturn(PETSC_SUCCESS);
5213 }
5214 
5215 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5216 {
5217  __mfem_pc_shell_ctx *ctx;
5218  PetscErrorCode ierr;
5219 
5220  PetscFunctionBeginUser;
5221  mfem::PetscParVector xx(x,true);
5222  mfem::PetscParVector yy(y,true);
5223  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5224  if (ctx->op)
5225  {
5226  ctx->op->MultTranspose(xx,yy);
5227  yy.UpdateVecFromFlags();
5228  }
5229  else // operator is not present, copy x
5230  {
5231  yy = xx;
5232  }
5233  PetscFunctionReturn(PETSC_SUCCESS);
5234 }
5235 
5236 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5237 {
5238  __mfem_pc_shell_ctx *ctx;
5239 
5240  PetscFunctionBeginUser;
5241  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5242  if (ctx->factory)
5243  {
5244  // Delete any owned operator
5245  if (ctx->ownsop)
5246  {
5247  delete ctx->op;
5248  }
5249 
5250  // Get current preconditioning Mat
5251  Mat B;
5252  ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5253 
5254  // Call user-defined setup
5255  mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
5256  mfem::PetscPreconditionerFactory *factory = ctx->factory;
5257  ctx->op = factory->NewPreconditioner(hB);
5258  ctx->ownsop = true;
5259  ctx->numprec++;
5260  }
5261  PetscFunctionReturn(PETSC_SUCCESS);
5262 }
5263 
5264 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5265 {
5266  __mfem_pc_shell_ctx *ctx;
5267  PetscErrorCode ierr;
5268 
5269  PetscFunctionBeginUser;
5270  ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5271  if (ctx->ownsop)
5272  {
5273  delete ctx->op;
5274  }
5275  delete ctx;
5276  PetscFunctionReturn(PETSC_SUCCESS);
5277 }
5278 
5279 static PetscErrorCode __mfem_array_container_destroy(void *ptr)
5280 {
5281  PetscErrorCode ierr;
5282 
5283  PetscFunctionBeginUser;
5284  ierr = PetscFree(ptr); CHKERRQ(ierr);
5285  PetscFunctionReturn(PETSC_SUCCESS);
5286 }
5287 
5288 static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
5289 {
5291  PetscErrorCode ierr;
5292 
5293  PetscFunctionBeginUser;
5294  for (int i=0; i<a->Size(); i++)
5295  {
5296  Mat M = (*a)[i];
5297  MPI_Comm comm = PetscObjectComm((PetscObject)M);
5298  ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5299  }
5300  delete a;
5301  PetscFunctionReturn(PETSC_SUCCESS);
5302 }
5303 
5304 static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
5305 {
5306  PetscErrorCode ierr;
5307 
5308  PetscFunctionBeginUser;
5309  ierr = PetscFree(*ctx); CHKERRQ(ierr);
5310  PetscFunctionReturn(PETSC_SUCCESS);
5311 }
5312 
5313 // Sets the type of PC to PCSHELL and wraps the solver action
5314 // if ownsop is true, ownership of precond is transferred to the PETSc object
5315 PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
5316 {
5317  PetscFunctionBeginUser;
5318  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5319  ctx->op = &precond;
5320  ctx->ownsop = ownsop;
5321  ctx->factory = NULL;
5322  ctx->numprec = 0;
5323 
5324  // In case the PC was already of type SHELL, this will destroy any
5325  // previous user-defined data structure
5326  // We cannot call PCReset as it will wipe out any operator already set
5327  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5328 
5329  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5330  ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5331  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5332  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5333  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5334  CHKERRQ(ierr);
5335  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5336  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5337  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5338  PetscFunctionReturn(PETSC_SUCCESS);
5339 }
5340 
5341 // Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
5342 // Takes ownership of the solver created by the factory
5343 PetscErrorCode MakeShellPCWithFactory(PC pc,
5345 {
5346  PetscFunctionBeginUser;
5347  __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5348  ctx->op = NULL;
5349  ctx->ownsop = true;
5350  ctx->factory = factory;
5351  ctx->numprec = 0;
5352 
5353  // In case the PC was already of type SHELL, this will destroy any
5354  // previous user-defined data structure
5355  // We cannot call PCReset as it will wipe out any operator already set
5356  ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5357 
5358  ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5359  ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
5360  ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5361  ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5362  ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5363  CHKERRQ(ierr);
5364  ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5365  ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5366  ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5367  PetscFunctionReturn(PETSC_SUCCESS);
5368 }
5369 
5370 // Converts from a list (or a marked Array if islist is false) to an IS
5371 // st indicates the offset where to start numbering
5372 static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
5373  const mfem::Array<int> *list,
5374  PetscInt st, IS* is)
5375 {
5376  PetscInt n = list ? list->Size() : 0,*idxs;
5377  const int *data = list ? list->GetData() : NULL;
5378  PetscErrorCode ierr;
5379 
5380  PetscFunctionBeginUser;
5381  ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5382  if (islist)
5383  {
5384  for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5385  }
5386  else
5387  {
5388  PetscInt cum = 0;
5389  for (PetscInt i=0; i<n; i++)
5390  {
5391  if (data[i]) { idxs[cum++] = i+st; }
5392  }
5393  n = cum;
5394  }
5395  ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5396  CHKERRQ(ierr);
5397  PetscFunctionReturn(PETSC_SUCCESS);
5398 }
5399 
5400 // Converts from a marked Array of Vdofs to an IS
5401 // st indicates the offset where to start numbering
5402 // l2l is a vector of matrices generated during RAP
5403 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5404  mfem::Array<Mat> &pl2l,
5405  const mfem::Array<int> *mark,
5406  PetscInt st, IS* is)
5407 {
5408  mfem::Array<int> sub_dof_marker;
5410  PetscInt nl;
5411  PetscErrorCode ierr;
5412 
5413  PetscFunctionBeginUser;
5414  for (int i = 0; i < pl2l.Size(); i++)
5415  {
5416  PetscInt m,n,*ii,*jj;
5417  PetscBool done;
5418  ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
5419  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5420  MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
5421  ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5422 #if defined(PETSC_USE_64BIT_INDICES)
5423  int nnz = (int)ii[m];
5424  int *mii = new int[m+1];
5425  int *mjj = new int[nnz];
5426  for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5427  for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5428  l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
5429 #else
5430  l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
5431 #endif
5432  ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5433  (const PetscInt**)&ii,
5434  (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5435  MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
5436  << i << " l2l matrix");
5437  }
5438  nl = 0;
5439  for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5440  sub_dof_marker.SetSize(nl);
5441  const int* vdata = mark->GetData();
5442  int* sdata = sub_dof_marker.GetData();
5443  int cumh = 0, cumw = 0;
5444  for (int i = 0; i < l2l.Size(); i++)
5445  {
5446  const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
5447  l2l[i]->Height());
5448  mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
5449  l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5450  cumh += l2l[i]->Height();
5451  cumw += l2l[i]->Width();
5452  }
5453  ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5454  for (int i = 0; i < pl2l.Size(); i++)
5455  {
5456  delete l2l[i];
5457  }
5458  PetscFunctionReturn(PETSC_SUCCESS);
5459 }
5460 
5461 #if !defined(PETSC_HAVE_HYPRE)
5462 
5463 #if defined(HYPRE_MIXEDINT)
5464 #error "HYPRE_MIXEDINT not supported"
5465 #endif
5466 
5467 #include "_hypre_parcsr_mv.h"
5468 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5469 {
5470  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5471  hypre_CSRMatrix *hdiag,*hoffd;
5472  PetscScalar *da,*oa,*aptr;
5473  PetscInt *dii,*djj,*oii,*ojj,*iptr;
5474  PetscInt i,dnnz,onnz,m,n;
5475  PetscMPIInt size;
5476  PetscErrorCode ierr;
5477 
5478  PetscFunctionBeginUser;
5479  hdiag = hypre_ParCSRMatrixDiag(hA);
5480  hoffd = hypre_ParCSRMatrixOffd(hA);
5481  m = hypre_CSRMatrixNumRows(hdiag);
5482  n = hypre_CSRMatrixNumCols(hdiag);
5483  dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5484  onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5485  ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5486  ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5487  ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5488  ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
5489  CHKERRQ(ierr);
5490  ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
5491  CHKERRQ(ierr);
5492  ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
5493  CHKERRQ(ierr);
5494  iptr = djj;
5495  aptr = da;
5496  for (i=0; i<m; i++)
5497  {
5498  PetscInt nc = dii[i+1]-dii[i];
5499  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5500  iptr += nc;
5501  aptr += nc;
5502  }
5503  ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5504  if (size > 1)
5505  {
5506  PetscInt *offdj,*coffd;
5507 
5508  ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5509  ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5510  ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5511  ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
5512  CHKERRQ(ierr);
5513  offdj = hypre_CSRMatrixJ(hoffd);
5514  coffd = hypre_ParCSRMatrixColMapOffd(hA);
5515  for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5516  ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
5517  CHKERRQ(ierr);
5518  iptr = ojj;
5519  aptr = oa;
5520  for (i=0; i<m; i++)
5521  {
5522  PetscInt nc = oii[i+1]-oii[i];
5523  ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5524  iptr += nc;
5525  aptr += nc;
5526  }
5527  ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5528  djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5529  }
5530  else
5531  {
5532  oii = ojj = NULL;
5533  oa = NULL;
5534  ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5535  }
5536  /* We are responsible to free the CSR arrays. However, since we can take
5537  references of a PetscParMatrix but we cannot take reference of PETSc
5538  arrays, we need to create a PetscContainer object to take reference of
5539  these arrays in reference objects */
5540  void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5541  const char *names[6] = {"_mfem_csr_dii",
5542  "_mfem_csr_djj",
5543  "_mfem_csr_da",
5544  "_mfem_csr_oii",
5545  "_mfem_csr_ojj",
5546  "_mfem_csr_oa"
5547  };
5548  for (i=0; i<6; i++)
5549  {
5550  PetscContainer c;
5551 
5552  ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5553  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5554  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5555  CHKERRQ(ierr);
5556  ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
5557  CHKERRQ(ierr);
5558  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5559  }
5560  PetscFunctionReturn(PETSC_SUCCESS);
5561 }
5562 
5563 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5564 {
5565  Mat lA;
5566  ISLocalToGlobalMapping rl2g,cl2g;
5567  IS is;
5568  hypre_CSRMatrix *hdiag,*hoffd;
5569  MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5570  void *ptrs[2];
5571  const char *names[2] = {"_mfem_csr_aux",
5572  "_mfem_csr_data"
5573  };
5574  PetscScalar *hdd,*hod,*aa,*data;
5575  PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5576  PetscInt *aux,*ii,*jj;
5577  PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5578  PetscErrorCode ierr;
5579 
5580  PetscFunctionBeginUser;
5581  /* access relevant information in ParCSR */
5582  str = hypre_ParCSRMatrixFirstRowIndex(hA);
5583  stc = hypre_ParCSRMatrixFirstColDiag(hA);
5584  hdiag = hypre_ParCSRMatrixDiag(hA);
5585  hoffd = hypre_ParCSRMatrixOffd(hA);
5586  dr = hypre_CSRMatrixNumRows(hdiag);
5587  dc = hypre_CSRMatrixNumCols(hdiag);
5588  nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5589  hdi = hypre_CSRMatrixI(hdiag);
5590  hdj = hypre_CSRMatrixJ(hdiag);
5591  hdd = hypre_CSRMatrixData(hdiag);
5592  oc = hypre_CSRMatrixNumCols(hoffd);
5593  nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5594  hoi = hypre_CSRMatrixI(hoffd);
5595  hoj = hypre_CSRMatrixJ(hoffd);
5596  hod = hypre_CSRMatrixData(hoffd);
5597 
5598  /* generate l2g maps for rows and cols */
5599  ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5600  ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5601  ierr = ISDestroy(&is); CHKERRQ(ierr);
5602  col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5603  ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5604  for (i=0; i<dc; i++) { aux[i] = i+stc; }
5605  for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5606  ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5607  ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5608  ierr = ISDestroy(&is); CHKERRQ(ierr);
5609 
5610  /* create MATIS object */
5611  ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5612  ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5613  ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5614  ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5615  ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5616  ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5617 
5618  /* merge local matrices */
5619  ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5620  ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5621  ii = aux;
5622  jj = aux+dr+1;
5623  aa = data;
5624  *ii = *(hdi++) + *(hoi++);
5625  for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5626  {
5627  PetscScalar *aold = aa;
5628  PetscInt *jold = jj,nc = jd+jo;
5629  for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5630  for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5631  *(++ii) = *(hdi++) + *(hoi++);
5632  ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5633  }
5634  for (; cum<dr; cum++) { *(++ii) = nnz; }
5635  ii = aux;
5636  jj = aux+dr+1;
5637  aa = data;
5638  ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5639  CHKERRQ(ierr);
5640  ptrs[0] = aux;
5641  ptrs[1] = data;
5642  for (i=0; i<2; i++)
5643  {
5644  PetscContainer c;
5645 
5646  ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5647  ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5648  ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5649  CHKERRQ(ierr);
5650  ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
5651  CHKERRQ(ierr);
5652  ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5653  }
5654  ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5655  ierr = MatDestroy(&lA); CHKERRQ(ierr);
5656  ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5657  ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5658  PetscFunctionReturn(PETSC_SUCCESS);
5659 }
5660 #endif
5661 
5662 #include <petsc/private/matimpl.h>
5663 
5664 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm, PetscInt m,
5665  PetscInt n, Mat *A)
5666 {
5667  PetscFunctionBegin;
5668  ierr = MatCreate(comm,A); CHKERRQ(ierr);
5669  ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5670  ierr = PetscObjectChangeTypeName((PetscObject)*A,"mfemdummy"); CHKERRQ(ierr);
5671  (*A)->preallocated = PETSC_TRUE;
5672  ierr = MatSetUp(*A); CHKERRQ(ierr);
5673  PetscFunctionReturn(PETSC_SUCCESS);
5674 }
5675 
5676 #include <petsc/private/vecimpl.h>
5677 
5678 #if defined(PETSC_HAVE_DEVICE)
5679 static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5680 {
5681  PetscFunctionBegin;
5682  v->offloadmask = m;
5683  PetscFunctionReturn(PETSC_SUCCESS);
5684 }
5685 #endif
5686 
5687 static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5688 {
5689  PetscFunctionBegin;
5690 #if defined(PETSC_HAVE_DEVICE)
5691  *flg = v->boundtocpu;
5692 #else
5693  *flg = PETSC_TRUE;
5694 #endif
5695  PetscFunctionReturn(PETSC_SUCCESS);
5696 }
5697 
5698 static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject o)
5699 {
5700  PetscErrorCode ierr;
5701 
5702  PetscFunctionBegin;
5703  ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5704  PetscFunctionReturn(PETSC_SUCCESS);
5705 }
5706 
5707 #endif // MFEM_USE_PETSC
5708 #endif // MFEM_USE_MPI
void MFEMInitializePetsc(int *argc, char ***argv, const char rc_file[], const char help[])
Definition: petsc.cpp:211
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: operator.hpp:93
const double * GetDevicePointer() const
Definition: petsc.cpp:256
const double * HostReadData() const
Definition: sparsemat.hpp:277
void SetPreconditioner(Solver &precond)
Definition: petsc.cpp:3107
void SetPrintLevel(int plev)
Definition: petsc.cpp:2450
static int GetId()
Get the device id of the configured device.
Definition: device.hpp:252
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:3187
PetscParVector * B
Right-hand side and solution vector.
Definition: petsc.hpp:685
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
Definition: sparsemat.cpp:1588
void CreatePrivateContext()
Definition: petsc.cpp:2749
void trans(const Vector &u, Vector &x)
Definition: ex27.cpp:412
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
Definition: hypre.cpp:3258
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
Definition: petsc.cpp:4098
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition: petsc.cpp:4005
bool DeviceIsValid() const
Return true if device pointer is valid.
Memory< double > data
Definition: vector.hpp:62
Abstract class for PETSc&#39;s preconditioners.
Definition: petsc.hpp:801
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition: operator.hpp:122
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition: petsc.cpp:1160
double PetscScalar
Definition: petsc.hpp:48
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
Definition: operator.cpp:304
PetscMemory pdata
Definition: petsc.hpp:159
petsc::Vec x
The actual PETSc object.
Definition: petsc.hpp:157
virtual void SetOperator(const Operator &op)
Definition: petsc.cpp:2962
bool clcustom
Boolean to handle SetFromOptions calls.
Definition: petsc.hpp:676
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Definition: petsc.cpp:4073
HYPRE_Int PetscInt
Definition: petsc.hpp:47
double GetFinalNorm()
Definition: petsc.cpp:2724
Device memory; using CUDA or HIP *Malloc and *Free.
PetscODESolver::Type GetType() const
Definition: petsc.cpp:4256
petsc::Mat A
The actual PETSc object.
Definition: petsc.hpp:319
Wrapper for PETSc&#39;s matrix class.
Definition: petsc.hpp:315
PetscParVector & operator+=(const PetscParVector &y)
Definition: petsc.cpp:766
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition: petsc.cpp:2832
Abstract class for PETSc&#39;s linear solvers.
Definition: petsc.hpp:746
void SetSize(int s)
Resize the vector to size s.
Definition: vector.hpp:517
Abstract class for PETSc&#39;s solvers.
Definition: petsc.hpp:672
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
Definition: petsc.cpp:4318
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition: petsc.cpp:4084
PetscBCHandler(Type type_=ZERO)
Definition: petsc.hpp:598
void Delete()
Delete the owned pointers and reset the Memory object.
int NumRowBlocks() const
Return the number of row blocks.
Base abstract class for first order time dependent operators.
Definition: operator.hpp:316
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
Definition: petsc.hpp:99
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
Definition: vector.hpp:115
void SetType(PetscODESolver::Type)
Definition: petsc.cpp:4262
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
Definition: petsc.cpp:1376
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition: petsc.hpp:1003
Pointer to an Operator of a specified type.
Definition: handle.hpp:33
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition: petsc.cpp:3876
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition: petsc.cpp:745
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition: operator.hpp:72
Biwise-OR of all HIP backends.
Definition: device.hpp:90
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition: petsc.cpp:2361
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3965
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition: petsc.cpp:1947
T * GetData()
Returns the data.
Definition: array.hpp:115
int Size() const
Returns the size of the vector.
Definition: vector.hpp:197
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:336
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
Definition: petsc.cpp:1025
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Definition: petsc.cpp:4280
struct ::_p_Mat * Mat
Definition: petsc.hpp:70
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition: ode.hpp:23
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition: petsc.cpp:1031
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:3239
PetscClassId cid
The class id of the actual PETSc object.
Definition: petsc.hpp:682
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition: petsc.cpp:2003
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition: petsc.cpp:976
ID for class PetscParMatrix, MATHYPRE format.
Definition: operator.hpp:292
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
Definition: operator.hpp:360
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition: petsc.cpp:4111
void SetDataAndSize_()
Definition: petsc.cpp:267
Abstract parallel finite element space.
Definition: pfespace.hpp:28
Constant in time b.c.
Definition: petsc.hpp:594
void SetRelTol(double tol)
Definition: petsc.cpp:2373
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition: operator.hpp:686
STL namespace.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
PetscInt M() const
Returns the global number of rows.
Definition: petsc.cpp:1004
void Randomize(PetscInt seed=0)
Set random values.
Definition: petsc.cpp:934
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition: petsc.cpp:1777
void SyncBaseAndReset()
Definition: petsc.hpp:124
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition: petsc.cpp:2535
void SetMaxIter(int max_iter)
Definition: petsc.cpp:2423
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:1329
PetscParVector & operator*=(PetscScalar d)
Definition: petsc.cpp:780
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition: hypre.cpp:2853
void SortColumnIndices()
Sort the column indices corresponding to each row.
Definition: sparsemat.cpp:457
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
Definition: vector.hpp:228
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition: petsc.cpp:2247
double PetscReal
Definition: petsc.hpp:49
int NumColBlocks() const
Return the number of column blocks.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition: petsc.cpp:2021
PetscParVector * Y
Definition: petsc.hpp:322
virtual ~PetscLinearSolver()
Definition: petsc.cpp:3197
std::function< double(const Vector &)> f(double mass_coeff)
Definition: lor_mms.hpp:30
struct ::_p_PC * PC
Definition: petsc.hpp:72
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
Definition: petsc.cpp:799
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition: petsc.cpp:731
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:2530
Type GetType() const
Definition: petsc.cpp:2303
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition: petsc.cpp:4167
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition: petsc.cpp:1967
ID for the base class Operator, i.e. any type.
Definition: operator.hpp:285
void write(std::ostream &os, T value)
Write &#39;value&#39; to stream.
Definition: binaryio.hpp:30
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition: petsc.hpp:679
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc&#39;s solvers.
Definition: petsc.hpp:981
bool DeviceRequested() const
Definition: petsc.hpp:140
bool Empty() const
Return true if the Memory object is empty, see Reset().
Data type sparse matrix.
Definition: sparsemat.hpp:50
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
Definition: petsc.cpp:2921
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:4067
const FiniteElementCollection * FEColl() const
Definition: fespace.hpp:723
struct ::_p_TS * TS
Definition: petsc.hpp:74
double b
Definition: lissajous.cpp:42
int to_int(const std::string &str)
Convert a string to an int.
Definition: text.hpp:62
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition: operator.hpp:367
Auxiliary class for BDDC customization.
Definition: petsc.hpp:827
PetscInt GetNumRows() const
Returns the local number of rows.
Definition: petsc.cpp:990
virtual ~PetscPreconditioner()
Definition: petsc.cpp:3370
ID for class PetscParMatrix, unspecified format.
Definition: operator.hpp:293
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition: petsc.cpp:2881
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
Definition: petsc.cpp:509
const int * HostReadJ() const
Definition: sparsemat.hpp:261
ParMesh * GetParMesh() const
Definition: pfespace.hpp:273
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition: petsc.cpp:1144
virtual ~PetscParVector()
Calls PETSc&#39;s destroy function.
Definition: petsc.cpp:596
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition: petsc.cpp:3885
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition: petsc.hpp:688
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition: petsc.cpp:1175
struct ::_p_Vec * Vec
Definition: petsc.hpp:69
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition: petsc.cpp:2046
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition: petsc.cpp:2900
void Assign(const T *)
Copy data from a pointer. &#39;Size()&#39; elements are copied.
Definition: array.hpp:907
T read(std::istream &is)
Read a value from the stream and return it.
Definition: binaryio.hpp:37
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3365
void Shift(double s)
Shift diagonal by a constant.
Definition: petsc.cpp:2068
PetscInt GlobalSize() const
Returns the global number of rows.
Definition: petsc.cpp:517
ID for class PetscParMatrix, MATNEST format.
Definition: operator.hpp:291
int Capacity() const
Return the size of the allocated memory.
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition: petsc.cpp:1957
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
Definition: densemat.cpp:3232
void SetTime(double t)
Sets the current time.
Definition: petsc.hpp:624
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true...
Definition: device.hpp:319
ID for class PetscParMatrix, MATSHELL format.
Definition: operator.hpp:290
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition: petsc.cpp:1802
Biwise-OR of all CUDA backends.
Definition: device.hpp:88
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition: petsc.cpp:1038
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc&#39;s MATSHELL object and returns the Mat in B...
Definition: petsc.cpp:1342
int GetNumIterations()
Definition: petsc.cpp:2691
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
Definition: petsc.cpp:1986
int GetVDim() const
Returns vector dimension.
Definition: fespace.hpp:702
MPI_Comm GetComm() const
MPI communicator.
Definition: hypre.hpp:534
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
Definition: device.hpp:343
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition: petsc.cpp:2565
struct ::_p_KSP * KSP
Definition: petsc.hpp:71
Wrapper for hypre&#39;s parallel vector class.
Definition: hypre.hpp:161
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition: petsc.cpp:2909
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Definition: petsc.hpp:614
Type
Enumeration defining IDs for some classes derived from Operator.
Definition: operator.hpp:283
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
Definition: operator.cpp:282
struct ::_p_SNES * SNES
Definition: petsc.hpp:73
PetscInt N() const
Returns the global number of columns.
Definition: petsc.cpp:1011
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Definition: fe_coll.hpp:26
void SetFlagsFromMask_() const
Definition: petsc.cpp:323
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition: globals.hpp:66
MemoryType
Memory types supported by MFEM.
Definition: mem_manager.hpp:31
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
PetscInt GetNumCols() const
Returns the local number of columns.
Definition: petsc.cpp:997
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition: array.hpp:687
HYPRE_Int HYPRE_BigInt
void SetJacobianType(Operator::Type type)
Definition: petsc.cpp:4250
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition: operator.hpp:66
bool operatorset
Boolean to handle SetOperator calls.
Definition: petsc.hpp:694
PetscParVector * X
Auxiliary vectors for typecasting.
Definition: petsc.hpp:322
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
Definition: device.hpp:258
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
int SpaceDimension() const
Dimension of the physical space containing the mesh.
Definition: mesh.hpp:1023
const int * HostReadI() const
Definition: sparsemat.hpp:245
int height
Dimension of the output / number of rows in the matrix.
Definition: operator.hpp:27
PetscInt NNZ() const
Returns the number of nonzeros.
Definition: petsc.cpp:1018
PetscParVector & operator-=(const PetscParVector &y)
Definition: petsc.cpp:773
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
Definition: pfespace.hpp:285
double a
Definition: lissajous.cpp:41
void MFEMFinalizePetsc()
Definition: petsc.cpp:241
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition: vector.hpp:185
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
Definition: device.hpp:326
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition: petsc.cpp:3360
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device&#39;s DeviceMemoryClass, if on_dev = true, or the mfem::Device&#39;s HostMemoryClass, otherwise.
Definition: device.hpp:353
const double * GetHostPointer() const
Definition: petsc.cpp:247
virtual ~PetscNonlinearSolver()
Definition: petsc.cpp:3997
virtual ~PetscODESolver()
Definition: petsc.cpp:4159
int GetConverged()
Definition: petsc.cpp:2658
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition: petsc.cpp:2351
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition: operator.cpp:312
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition: petsc.cpp:2584
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
Definition: petsc.cpp:2290
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition: petsc.cpp:578
Ordering::Type GetOrdering() const
Return the ordering method.
Definition: fespace.hpp:721
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition: petsc.hpp:991
ID for class PetscParMatrix, MATAIJ format.
Definition: operator.hpp:288
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition: petsc.cpp:3274
int dim
Definition: ex24.cpp:53
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
Definition: mem_manager.cpp:60
PetscParVector * X
Definition: petsc.hpp:685
struct _p_PetscObject * PetscObject
Definition: petsc.hpp:51
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Definition: petsc.cpp:3920
void ResetMemory()
Completes the operation started with PlaceMemory.
Definition: petsc.cpp:890
void SetAbsTol(double tol)
Definition: petsc.cpp:2398
Helper class for handling essential boundary conditions.
Definition: petsc.hpp:588
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition: petsc.hpp:621
void SetTol(double tol)
Definition: petsc.cpp:2368
void Customize(bool customize=true) const
Customize object with options set.
Definition: petsc.cpp:2625
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition: petsc.cpp:724
int Size() const
Return the logical size of the array.
Definition: array.hpp:141
RefCoord t[3]
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition: petsc.hpp:997
MPI_Comm GetComm() const
Definition: pfespace.hpp:269
bool WriteRequested() const
Definition: petsc.hpp:135
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition: petsc.cpp:2087
Vector data type.
Definition: vector.hpp:58
Identity Operator I: x -> x.
Definition: operator.hpp:705
bool UseDevice() const
Read the internal device flag.
bool IsAliasForSync() const
Definition: petsc.hpp:97
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
Definition: petsc.cpp:794
void FreePrivateContext()
Definition: petsc.cpp:2781
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition: ode.hpp:27
RefCoord s[3]
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition: petsc.cpp:4134
bool ReadRequested() const
Definition: petsc.hpp:130
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition: petsc.cpp:2276
Base class for solvers.
Definition: operator.hpp:682
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition: petsc.cpp:3192
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
Definition: petsc.cpp:3207
Abstract operator.
Definition: operator.hpp:24
Wrapper for hypre&#39;s ParCSR matrix class.
Definition: hypre.hpp:343
A class to handle Block systems in a matrix-free implementation.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition: operator.hpp:365
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
Definition: device.hpp:360
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition: petsc.cpp:949
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
Definition: petsc.cpp:1981
HYPRE_BigInt * GetTrueDofOffsets() const
Definition: pfespace.hpp:278
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition: petsc.cpp:2809
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition: petsc.cpp:699
ID for class PetscParMatrix, MATIS format.
Definition: operator.hpp:289
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition: petsc.cpp:983
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
Definition: petsc.cpp:804
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition: petsc.cpp:694
int width
Dimension of the input / number of columns in the matrix.
Definition: operator.hpp:28
void * private_ctx
Private context for solver.
Definition: petsc.hpp:691
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition: petsc.cpp:2816
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition: operator.cpp:287
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition: petsc.cpp:1937
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition: petsc.cpp:2057
double f(const Vector &p)