MFEM v4.9.0
Finite element discretization library
Loading...
Searching...
No Matches
petsc.cpp
Go to the documentation of this file.
1// Copyright (c) 2010-2025, Lawrence Livermore National Security, LLC. Produced
2// at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3// LICENSE and NOTICE for details. LLNL-CODE-806117.
4//
5// This file is part of the MFEM library. For more information and source code
6// availability visit https://mfem.org.
7//
8// MFEM is free software; you can redistribute it and/or modify it under the
9// terms of the BSD-3 license. We welcome feedback and contributions, see file
10// CONTRIBUTING.md for details.
11
12// Author: Stefano Zampini <stefano.zampini@gmail.com>
13
14#include "../config/config.hpp"
15
16#ifdef MFEM_USE_MPI
17#ifdef MFEM_USE_PETSC
18
19#include "linalg.hpp"
20#include "../fem/fem.hpp"
21
22#include "petsc.h"
23#if defined(PETSC_HAVE_HYPRE)
24#include "petscmathypre.h"
25#endif
26
27// Backward compatibility
28#if PETSC_VERSION_LT(3,11,0)
29#define VecLockReadPush VecLockPush
30#define VecLockReadPop VecLockPop
31#endif
32#if PETSC_VERSION_LT(3,12,0)
33#define VecGetArrayWrite VecGetArray
34#define VecRestoreArrayWrite VecRestoreArray
35#define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36#define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
37#endif
38#if PETSC_VERSION_LT(3,19,0)
39#define PETSC_SUCCESS 0
40#endif
41
42#include <fstream>
43#include <iomanip>
44#include <cmath>
45#include <cstdlib>
46
47// Note: there are additional #include statements below.
48
49#include "petscinternals.hpp"
50
51// Callback functions: these functions will be called by PETSc
52static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
53static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
54static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
55 void*);
56static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
57static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
58 PetscReal,Mat,
59 Mat,void*);
60static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
61 Mat,Mat,Mat,Mat);
62static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
63static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
64static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
65static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
66static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
67static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
68 PetscBool*,PetscBool*,void*);
69static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
70static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
71static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
72static PetscErrorCode __mfem_pc_shell_setup(PC);
73static PetscErrorCode __mfem_pc_shell_destroy(PC);
74static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
75static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
76static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
77static PetscErrorCode __mfem_mat_shell_destroy(Mat);
78static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
79#if PETSC_VERSION_LT(3,23,0)
80static PetscErrorCode __mfem_array_container_destroy(void*);
81static PetscErrorCode __mfem_matarray_container_destroy(void *);
82#else
83static PetscErrorCode __mfem_array_container_destroy(void**);
84static PetscErrorCode __mfem_matarray_container_destroy(void**);
85#endif
86static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
87
88// auxiliary functions
89static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
90 PetscInt,IS*);
91static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
92 const mfem::Array<int>*,PetscInt,IS*);
93static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
94static PetscErrorCode MakeShellPCWithFactory(PC,
96
97// Equivalent functions are present in PETSc source code
98// if PETSc has been compiled with hypre support
99// We provide them here in case PETSC_HAVE_HYPRE is not defined
100#if !defined(PETSC_HAVE_HYPRE)
101static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
102static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
103#endif
104
105#if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
106#if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
107#define _USE_DEVICE
108#define PETSC_VECDEVICE VECCUDA
109#define PETSC_MATAIJDEVICE MATAIJCUSPARSE
110#define VecDeviceGetArrayRead VecCUDAGetArrayRead
111#define VecDeviceGetArrayWrite VecCUDAGetArrayWrite
112#define VecDeviceGetArray VecCUDAGetArray
113#define VecDeviceRestoreArrayRead VecCUDARestoreArrayRead
114#define VecDeviceRestoreArrayWrite VecCUDARestoreArrayWrite
115#define VecDeviceRestoreArray VecCUDARestoreArray
116#define VecDevicePlaceArray VecCUDAPlaceArray
117#define VecDeviceResetArray VecCUDAResetArray
118#elif defined(MFEM_USE_HIP) && defined(PETSC_HAVE_HIP)
119#define _USE_DEVICE
120#define PETSC_VECDEVICE VECHIP
121#define PETSC_MATAIJDEVICE MATAIJHIPSPARSE
122#define VecDeviceGetArrayRead VecHIPGetArrayRead
123#define VecDeviceGetArrayWrite VecHIPGetArrayWrite
124#define VecDeviceGetArray VecHIPGetArray
125#define VecDeviceRestoreArrayRead VecHIPRestoreArrayRead
126#define VecDeviceRestoreArrayWrite VecHIPRestoreArrayWrite
127#define VecDeviceRestoreArray VecHIPRestoreArray
128#define VecDevicePlaceArray VecHIPPlaceArray
129#define VecDeviceResetArray VecHIPResetArray
130#else
131#define VecDeviceGetArrayRead VecGetArrayRead
132#define VecDeviceGetArrayWrite VecGetArrayWrite
133#define VecDeviceGetArray VecGetArray
134#define VecDeviceRestoreArrayRead VecRestoreArrayRead
135#define VecDeviceRestoreArrayWrite VecRestoreArrayWrite
136#define VecDeviceRestoreArray VecRestoreArray
137#define VecDevicePlaceArray VecPlaceArray
138#define VecDeviceResetArray VecResetArray
139#endif
140#endif
141
142#if defined(PETSC_HAVE_DEVICE)
143static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
144#endif
145static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
146static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject);
147static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,PetscInt,PetscInt,Mat*);
148
149// structs used by PETSc code
150typedef struct
151{
152 mfem::Solver *op;
154 bool ownsop;
155 unsigned long int numprec;
156} __mfem_pc_shell_ctx;
157
158typedef struct
159{
160 mfem::Operator *op; // The nonlinear operator
161 mfem::PetscBCHandler *bchandler; // Handling of essential bc
162 mfem::Vector *work; // Work vector
163 mfem::Operator::Type jacType; // OperatorType for the Jacobian
164 // Objective for line search
165 void (*objective)(mfem::Operator *op, const mfem::Vector&, mfem::real_t*);
166 // PostCheck function (to be called after successful line search)
167 void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
168 mfem::Vector&, bool&, bool&);
169 // General purpose update function (to be called at the beginning of
170 // each nonlinear step)
171 void (*update)(mfem::Operator *op, int,
172 const mfem::Vector&, const mfem::Vector&,
173 const mfem::Vector&, const mfem::Vector&);
174} __mfem_snes_ctx;
175
176typedef struct
177{
178 mfem::TimeDependentOperator *op; // The time-dependent operator
179 mfem::PetscBCHandler *bchandler; // Handling of essential bc
180 mfem::Vector *work; // Work vector
181 mfem::Vector *work2; // Work vector
182 mfem::Operator::Type jacType; // OperatorType for the Jacobian
184 PetscReal cached_shift;
185 PetscObjectState cached_ijacstate;
186 PetscObjectState cached_rhsjacstate;
187 PetscObjectState cached_splits_xstate;
188 PetscObjectState cached_splits_xdotstate;
189} __mfem_ts_ctx;
190
191typedef struct
192{
193 mfem::PetscSolver *solver; // The solver object
194 mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
195} __mfem_monitor_ctx;
196
197// use global scope ierr to check PETSc errors inside mfem calls
198static PetscErrorCode ierr;
199static PetscMPIInt mpiierr;
200
201using namespace std;
202
203namespace mfem
204{
205
207{
208 MFEMInitializePetsc(NULL,NULL,NULL,NULL);
209}
210
211void MFEMInitializePetsc(int *argc,char*** argv)
212{
213 MFEMInitializePetsc(argc,argv,NULL,NULL);
214}
215
216void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
217 const char help[])
218{
219 // Tell PETSc to use the same CUDA or HIP device as MFEM:
221 {
222#if PETSC_VERSION_LT(3,17,0)
223 const char *opts = "-cuda_device";
224#else
225 const char *opts = "-device_select_cuda";
226#endif
227 ierr = PetscOptionsSetValue(NULL,opts,
228 to_string(mfem::Device::GetId()).c_str());
229 MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
230 }
232 {
233#if PETSC_VERSION_LT(3,17,0)
234 const char *opts = "-hip_device";
235#else
236 const char *opts = "-device_select_hip";
237#endif
238 ierr = PetscOptionsSetValue(NULL,opts,
239 to_string(mfem::Device::GetId()).c_str());
240 MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
241 }
242 ierr = PetscInitialize(argc,argv,rc_file,help);
243 MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
244}
245
247{
248 ierr = PetscFinalize();
249 MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
250}
251
253{
254 int oflags = flags;
255 SetHostValid();
256 const mfem::real_t *v = mfem::Read(*this,Capacity(),false);
257 flags = oflags;
258 return v;
259}
260
262{
263 int oflags = flags;
265 const mfem::real_t *v = mfem::Read(*this,Capacity(),true);
266 flags = oflags;
267 return v;
268}
269
270// PetscParVector methods
271
273{
274 PetscScalar *array;
275 PetscInt n;
276 PetscBool isnest;
277
278 MFEM_VERIFY(x,"Missing Vec");
279 ierr = VecSetUp(x); PCHKERRQ(x,ierr);
280 ierr = PetscObjectTypeCompare((PetscObject)x,VECNEST,&isnest); PCHKERRQ(x,ierr);
281 MFEM_VERIFY(!isnest,"Not for type nest");
282 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
283 MFEM_VERIFY(n >= 0,"Invalid local size");
284 size = n;
285#if defined(PETSC_HAVE_DEVICE)
286 PetscOffloadMask omask;
287 PetscBool isdevice;
288
289 ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
290 if (omask != PETSC_OFFLOAD_BOTH)
291 {
292 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
293 }
294#endif
295 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
296#if defined(PETSC_HAVE_DEVICE)
297 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
298 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
299 ""); PCHKERRQ(x,ierr);
300 if (isdevice)
301 {
302 if (omask != PETSC_OFFLOAD_BOTH)
303 {
304 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
305 }
306 PetscScalar *darray;
307 ierr = VecDeviceGetArrayRead(x,(const PetscScalar**)&darray);
308 PCHKERRQ(x,ierr);
309 pdata.Wrap(array,darray,size,MemoryType::HOST,false);
310 ierr = VecDeviceRestoreArrayRead(x,(const PetscScalar**)&darray);
311 PCHKERRQ(x,ierr);
312 }
313 else
314#endif
315 {
316 pdata.Wrap(array,size,MemoryType::HOST,false);
317 }
318 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
319
320#if defined(PETSC_HAVE_DEVICE)
321 if (omask == PETSC_OFFLOAD_UNALLOCATED && isdevice) { omask = PETSC_OFFLOAD_CPU; }
322 ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
323#endif
326}
327
329{
330 MFEM_VERIFY(x,"Missing Vec");
331#if defined(_USE_DEVICE)
332 PetscOffloadMask mask;
333 PetscBool isdevice;
334 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
335 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
336 ""); PCHKERRQ(x,ierr);
337 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
338 if (isdevice)
339 {
340 switch (mask)
341 {
342 case PETSC_OFFLOAD_CPU:
345 break;
346 case PETSC_OFFLOAD_GPU:
349 break;
350 case PETSC_OFFLOAD_BOTH:
353 break;
354 default:
355 MFEM_ABORT("Unhandled case " << mask);
356 }
357 }
358#endif
359 data.Sync(pdata);
360}
361
363{
364 MFEM_VERIFY(x,"Missing Vec");
365 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
366#if defined(_USE_DEVICE)
367 PetscBool isdevice;
368 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
369 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
370 ""); PCHKERRQ(x,ierr);
371 if (isdevice)
372 {
373 bool dv = pdata.DeviceIsValid();
374 bool hv = pdata.HostIsValid();
375 PetscOffloadMask mask;
376 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
377 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
378 else { mask = PETSC_OFFLOAD_CPU; }
379 ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
380 }
381 else
382#endif
383 {
384 /* Just make sure we have an up-to-date copy on the CPU for PETSc */
385 PetscScalar *v;
386 ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
388 ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
389 }
390}
391
393{
394 VecType vectype;
395 MFEM_VERIFY(x,"Missing Vec");
396 ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
397#if defined(_USE_DEVICE)
399 {
402 ierr = VecSetType(x,PETSC_VECDEVICE); PCHKERRQ(x,ierr);
403 break;
404 default:
405 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
406 break;
407 }
408#else
409 if (!vectype)
410 {
411 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
412 }
413#endif
414}
415
416const mfem::real_t* PetscParVector::Read(bool on_dev) const
417{
418 const PetscScalar *dummy;
419 MFEM_VERIFY(x,"Missing Vec");
420#if defined(PETSC_HAVE_DEVICE)
421 PetscBool isdevice;
422 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
423 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
424 ""); PCHKERRQ(x,ierr);
425 if (on_dev && isdevice)
426 {
427 ierr = VecDeviceGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
428 ierr = VecDeviceRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
429 }
430 else
431#endif
432 {
433 ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
434 ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
435 }
437 return mfem::Read(pdata, size, on_dev);
438}
439
441{
442 return Read(false);
443}
444
446{
447 PetscScalar *dummy;
448 MFEM_VERIFY(x,"Missing Vec");
449#if defined(PETSC_HAVE_DEVICE)
450 PetscBool isdevice;
451 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
452 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
453 ""); PCHKERRQ(x,ierr);
454 if (on_dev && isdevice)
455 {
456 ierr = VecDeviceGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
457 ierr = VecDeviceRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
458 }
459 else
460#endif
461 {
462 ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
463 ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
464 }
465 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
467 return mfem::Write(pdata, size, on_dev);
468}
469
471{
472 return Write(false);
473}
474
476{
477 PetscScalar *dummy;
478 MFEM_VERIFY(x,"Missing Vec");
479#if defined(PETSC_HAVE_DEVICE)
480 PetscBool isdevice;
481 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
482 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
483 ""); PCHKERRQ(x,ierr);
484 if (on_dev && isdevice)
485 {
486 ierr = VecDeviceGetArray(x,&dummy); PCHKERRQ(x,ierr);
487 ierr = VecDeviceRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
488 }
489 else
490#endif
491 {
492 ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
493 ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
494 }
495 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
497 return mfem::ReadWrite(pdata, size, on_dev);
498}
499
504
505void PetscParVector::UseDevice(bool dev) const
506{
507 MFEM_VERIFY(x,"Missing Vec");
508#if defined(PETSC_HAVE_DEVICE)
509 ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
511#endif
512}
513
515{
516 PetscBool flg;
517 MFEM_VERIFY(x,"Missing Vec");
518 ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
519 return flg ? false : true;
520}
521
523{
524 PetscInt N;
525 ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
526 return N;
527}
528
530{
531 ierr = VecSetBlockSize(x,bs); PCHKERRQ(x,ierr);
532}
533
534PetscParVector::PetscParVector(MPI_Comm comm, const Vector &x_,
535 bool copy) : Vector()
536{
537 PetscBool isdevice;
538
539 int n = x_.Size();
540 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
541 ierr = VecSetSizes(x,n,PETSC_DECIDE); PCHKERRQ(x,ierr);
542 SetVecType_();
544 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
545 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
546 ""); PCHKERRQ(x,ierr);
547 if (copy)
548 {
549 /* we use PETSc accessors to flag valid memory location to PETSc */
550 PetscErrorCode (*rest)(Vec,PetscScalar**);
551 PetscScalar *array;
552#if defined(PETSC_HAVE_DEVICE)
553 if (isdevice && x_.UseDevice())
554 {
555 UseDevice(true);
556 ierr = VecDeviceGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
557 rest = VecDeviceRestoreArrayWrite;
558 }
559 else
560#endif
561 {
562 UseDevice(false);
563 ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
564 rest = VecRestoreArrayWrite;
565 }
566 pdata.CopyFrom(x_.GetMemory(), n);
567 ierr = (*rest)(x,&array); PCHKERRQ(x,ierr);
569 }
570 else // don't copy, just set the device flag
571 {
572 if (isdevice && x_.UseDevice())
573 {
574 UseDevice(true);
575 }
576 else
577 {
578 UseDevice(false);
579 }
580 }
581}
582
584 PetscInt *col) : Vector()
585{
586 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
587 if (col)
588 {
589 PetscMPIInt myid;
590 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
591 ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
592 }
593 else
594 {
595 ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
596 }
597 SetVecType_();
599}
600
602{
603 MPI_Comm comm = PetscObjectComm((PetscObject)x);
604 ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
605 pdata.Delete();
606}
607
609 PetscScalar *data_, PetscInt *col) : Vector()
610{
611 MFEM_VERIFY(col,"Missing distribution");
612 PetscMPIInt myid;
613 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
614 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
615 &x); CCHKERRQ(comm,ierr)
616 SetVecType_();
618}
619
621{
622 ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
624}
625
627 bool transpose, bool allocate) : Vector()
628{
629 PetscInt loc = transpose ? op.Height() : op.Width();
630
631 ierr = VecCreate(comm,&x);
632 CCHKERRQ(comm,ierr);
633 ierr = VecSetSizes(x,loc,PETSC_DECIDE);
634 PCHKERRQ(x,ierr);
635
636 SetVecType_();
637 if (allocate)
638 {
640 }
641 else /* Vector intended to be used with Place/ResetMemory calls */
642 {
643 size = loc;
644 }
645}
646
648 bool transpose, bool allocate) : Vector()
649{
650 Mat pA = const_cast<PetscParMatrix&>(A);
651 if (!transpose)
652 {
653 ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
654 }
655 else
656 {
657 ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
658 }
659 SetVecType_();
660 if (!allocate) /* Vector intended to be used with Place/ResetMemory calls */
661 {
662 PetscInt n;
663 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
664 size = n;
665 }
666 else
667 {
669 }
670}
671
673{
674 if (ref)
675 {
676 ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
677 }
678 x = y;
680}
681
683{
684 HYPRE_BigInt* offsets = pfes->GetTrueDofOffsets();
685 MPI_Comm comm = pfes->GetComm();
686 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
687
688 PetscMPIInt myid = 0;
689 if (!HYPRE_AssumedPartitionCheck())
690 {
691 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
692 }
693 ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
694 PCHKERRQ(x,ierr);
695 SetVecType_();
697}
698
700{
701 return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
702}
703
705{
706 VecScatter scctx;
707 Vec vout;
708 const PetscScalar *array;
710
711 ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
712 ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
713 PCHKERRQ(x,ierr);
714 ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
715 PCHKERRQ(x,ierr);
716 ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
717 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(x,ierr);
718 ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
720 data.Assign(array);
721 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(x,ierr);
722 ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
723 Vector *v = new Vector(data, internal::to_int(size));
724 v->MakeDataOwner();
725 data.LoseData();
726 return v;
727}
728
730{
731 ierr = VecSet(x,d); PCHKERRQ(x,ierr);
733 return *this;
734}
735
737 const Array<PetscScalar>& vals)
738{
739 MFEM_VERIFY(idx.Size() == vals.Size(),
740 "Size mismatch between indices and values");
741 PetscInt n = idx.Size();
742 ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
743 PCHKERRQ(x,ierr);
744 ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
745 ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
747 return *this;
748}
749
751 const Array<PetscScalar>& vals)
752{
753 MFEM_VERIFY(idx.Size() == vals.Size(),
754 "Size mismatch between indices and values");
755 PetscInt n = idx.Size();
756 ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
757 PCHKERRQ(x,ierr);
758 ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
759 ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
761 return *this;
762}
763
765{
766 ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
768 return *this;
769}
770
772{
773 ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
775 return *this;
776}
777
779{
780 ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
782 return *this;
783}
784
786{
787 ierr = VecScale(x,s); PCHKERRQ(x,ierr);
789 return *this;
790}
791
793{
794 ierr = VecShift(x,s); PCHKERRQ(x,ierr);
796 return *this;
797}
798
800{
801 ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
802}
803
805{
806 ierr = VecResetArray(x); PCHKERRQ(x,ierr);
807}
808
810{
811 PetscInt n;
812
813 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
814 MFEM_VERIFY(n <= mem.Capacity(),
815 "Memory size " << mem.Capacity() << " < " << n << " vector size!");
816 MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
817 MFEM_VERIFY(data.Empty(),"Vector data is not empty");
818#if defined(_USE_DEVICE)
819 PetscBool isdevice;
820 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
821 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
822 ""); PCHKERRQ(x,ierr);
823 if (isdevice)
824 {
825 bool usedev = mem.DeviceIsValid() || (!rw && mem.UseDevice());
826 pdata.MakeAliasForSync(mem,0,n,rw,true,usedev);
827 if (usedev)
828 {
829 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
830 ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
831 }
832 else
833 {
834 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
835 ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
836 }
837 }
838 else
839#endif
840 {
842 size);
843 pdata.MakeAliasForSync(mem,0,n,rw,true,false);
844#if defined(PETSC_HAVE_DEVICE)
845 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
846#endif
847 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
848 }
849 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
851}
852
854{
855 PetscInt n;
856
857 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
858 MFEM_VERIFY(n <= mem.Capacity(),
859 "Memory size " << mem.Capacity() << " < " << n << " vector size!");
860 MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
861 MFEM_VERIFY(data.Empty(),"Vector data is not empty");
862#if defined(_USE_DEVICE)
863 PetscBool isdevice;
864 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
865 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
866 ""); PCHKERRQ(x,ierr);
867 if (isdevice)
868 {
869 pdata.MakeAliasForSync(mem,0,n,mem.DeviceIsValid());
870 if (mem.DeviceIsValid())
871 {
872 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
873 ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
874 }
875 else
876 {
877 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
878 ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
879 }
880 }
881 else
882#endif
883 {
884 const mfem::real_t *w = mfem::HostRead(mem,size);
885 pdata.MakeAliasForSync(mem,0,n,false);
886#if defined(PETSC_HAVE_DEVICE)
887 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
888#endif
889 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
890 }
892 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
893 ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
894}
895
897{
898 MFEM_VERIFY(pdata.IsAliasForSync(),"Vector data is not an alias");
899 MFEM_VERIFY(!pdata.Empty(),"Vector data is empty");
900 bool read = pdata.ReadRequested();
901 bool usedev = pdata.DeviceRequested();
902 bool write = pdata.WriteRequested();
903 /*
904 check for strange corner cases
905 - device memory used but somehow PETSc ended up putting up to date data on host
906 - host memory used but somehow PETSc ended up putting up to date data on device
907 */
908 if (write)
909 {
910 const PetscScalar *v;
911#if defined(PETSC_HAVE_DEVICE)
912 PetscOffloadMask mask;
913 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
914 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
915 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
916#endif
917 {
918 ierr = VecGetArrayRead(x,&v); PCHKERRQ(x,ierr);
920 ierr = VecRestoreArrayRead(x,&v); PCHKERRQ(x,ierr);
921 }
922 }
924 data.Reset();
925 if (read && !write) { ierr = VecLockReadPop(x); PCHKERRQ(x,ierr); }
926 if (usedev)
927 {
928#if defined(PETSC_HAVE_DEVICE)
929 ierr = VecDeviceResetArray(x); PCHKERRQ(x,ierr);
930#else
931 MFEM_VERIFY(false,"This should not happen");
932#endif
933 }
934 else
935 {
936 ierr = VecResetArray(x); PCHKERRQ(x,ierr);
937 }
938}
939
941{
942 PetscRandom rctx = NULL;
943
944 if (seed)
945 {
946 ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
947 PCHKERRQ(x,ierr);
948 ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
949 ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
950 }
951 ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
952 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
953}
954
955void PetscParVector::Print(const char *fname, bool binary) const
956{
957 if (fname)
958 {
959 PetscViewer view;
960
961 if (binary)
962 {
963 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
964 FILE_MODE_WRITE,&view);
965 }
966 else
967 {
968 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
969 }
970 PCHKERRQ(x,ierr);
971 ierr = VecView(x,view); PCHKERRQ(x,ierr);
972 ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
973 }
974 else
975 {
976 ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
977 }
978}
979
980// PetscParMatrix methods
981
983{
984 PetscInt N;
985 ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
986 return N;
987}
988
990{
991 PetscInt N;
992 ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
993 return N;
994}
995
997{
998 PetscInt N;
999 ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
1000 return N;
1001}
1002
1004{
1005 PetscInt N;
1006 ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
1007 return N;
1008}
1009
1011{
1012 PetscInt N;
1013 ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
1014 return N;
1015}
1016
1018{
1019 PetscInt N;
1020 ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
1021 return N;
1022}
1023
1025{
1026 MatInfo info;
1027 ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
1028 return (PetscInt)info.nz_used;
1029}
1030
1032{
1033 if (cbs < 0) { cbs = rbs; }
1034 ierr = MatSetBlockSizes(A,rbs,cbs); PCHKERRQ(A,ierr);
1035}
1036
1038{
1039 A = NULL;
1040 X = Y = NULL;
1041 height = width = 0;
1042}
1043
1048
1050 const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
1051{
1052 Init();
1053
1054 Mat B = const_cast<PetscParMatrix&>(pB);
1055
1056 IS isr,isc;
1057 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
1058 rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
1059 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
1060 cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
1061 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
1062 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
1063 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1064
1065 height = GetNumRows();
1066 width = GetNumCols();
1067}
1068
1070{
1071 Init();
1072 height = pa->Height();
1073 width = pa->Width();
1074 ConvertOperator(pa->GetComm(),*pa,&A,tid);
1075}
1076
1078{
1079 Init();
1080 height = ha->Height();
1081 width = ha->Width();
1082 ConvertOperator(ha->GetComm(),*ha,&A,tid);
1083}
1084
1086{
1087 Init();
1088 height = sa->Height();
1089 width = sa->Width();
1090 ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
1091}
1092
1094 Operator::Type tid)
1095{
1096 Init();
1097 height = op->Height();
1098 width = op->Width();
1099 ConvertOperator(comm,*op,&A,tid);
1100}
1101
1103 PetscInt *row_starts, SparseMatrix *diag,
1104 Operator::Type tid)
1105{
1106 Init();
1107 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1108 tid==PETSC_MATAIJ,&A);
1109 SetUpForDevice();
1110 // update base class
1111 height = GetNumRows();
1112 width = GetNumCols();
1113}
1114
1115PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
1116 PetscInt global_num_cols, PetscInt *row_starts,
1117 PetscInt *col_starts, SparseMatrix *diag,
1118 Operator::Type tid)
1119{
1120 Init();
1121 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1122 tid==PETSC_MATAIJ,&A);
1123 SetUpForDevice();
1124 // update base class
1125 height = GetNumRows();
1126 width = GetNumCols();
1127}
1128
1130{
1131 if (A)
1132 {
1133 MPI_Comm comm = PetscObjectComm((PetscObject)A);
1134 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1135 if (X) { delete X; }
1136 if (Y) { delete Y; }
1137 X = Y = NULL;
1138 }
1139 height = B.Height();
1140 width = B.Width();
1141#if defined(PETSC_HAVE_HYPRE)
1142 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
1143#else
1144 ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
1145#endif
1146 SetUpForDevice();
1147 return *this;
1148}
1149
1151{
1152 if (A)
1153 {
1154 MPI_Comm comm = PetscObjectComm((PetscObject)A);
1155 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1156 if (X) { delete X; }
1157 if (Y) { delete Y; }
1158 X = Y = NULL;
1159 }
1160 height = B.Height();
1161 width = B.Width();
1162 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1163 return *this;
1164}
1165
1167{
1168 if (!A)
1169 {
1170 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1171 }
1172 else
1173 {
1174 MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1175 MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1176 ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1177 }
1178 return *this;
1179}
1180
1182{
1183 if (!A)
1184 {
1185 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1186 ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
1187 }
1188 else
1189 {
1190 MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1191 MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1192 ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1193 }
1194 return *this;
1195}
1196
1197void PetscParMatrix::
1198BlockDiagonalConstructor(MPI_Comm comm,
1199 PetscInt *row_starts, PetscInt *col_starts,
1200 SparseMatrix *diag, bool assembled, Mat* Ad)
1201{
1202 Mat A;
1203 PetscInt lrsize,lcsize,rstart,cstart;
1204 PetscMPIInt myid = 0,commsize;
1205
1206 mpiierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,mpiierr);
1207 if (!HYPRE_AssumedPartitionCheck())
1208 {
1209 mpiierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,mpiierr);
1210 }
1211 lrsize = row_starts[myid+1]-row_starts[myid];
1212 rstart = row_starts[myid];
1213 lcsize = col_starts[myid+1]-col_starts[myid];
1214 cstart = col_starts[myid];
1215
1216 if (!assembled)
1217 {
1218 IS is;
1219 ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1220 ISLocalToGlobalMapping rl2g,cl2g;
1221 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1222 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1223 if (row_starts != col_starts)
1224 {
1225 ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
1226 CCHKERRQ(comm,ierr);
1227 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1228 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1229 }
1230 else
1231 {
1232 ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1233 cl2g = rl2g;
1234 }
1235
1236 // Create the PETSc object (MATIS format)
1237 ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1238 ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1239 PCHKERRQ(A,ierr);
1240 ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1241 ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1242 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1243 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1244
1245 // Copy SparseMatrix into PETSc SeqAIJ format
1246 // pass through host for now
1247 Mat lA;
1248 ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1249 const int *II = diag->HostReadI();
1250 const int *JJ = diag->HostReadJ();
1251#if defined(PETSC_USE_64BIT_INDICES)
1252 PetscInt *pII,*pJJ;
1253 int m = diag->Height()+1, nnz = II[diag->Height()];
1254 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1255 for (int i = 0; i < m; i++) { pII[i] = II[i]; }
1256 for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1257 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1258 diag->HostReadData()); PCHKERRQ(lA,ierr);
1259 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1260#else
1261 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1262 diag->HostReadData()); PCHKERRQ(lA,ierr);
1263#endif
1264 }
1265 else
1266 {
1267 PetscScalar *da;
1268 PetscInt *dii,*djj,*oii,
1269 m = diag->Height()+1, nnz = diag->NumNonZeroElems();
1270
1271 diag->SortColumnIndices();
1272 // if we can take ownership of the SparseMatrix arrays, we can avoid this
1273 // step
1274 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1275 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1276 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1277 if (sizeof(PetscInt) == sizeof(int))
1278 {
1279 ierr = PetscMemcpy(dii,diag->HostReadI(),m*sizeof(PetscInt));
1280 CCHKERRQ(PETSC_COMM_SELF,ierr);
1281 ierr = PetscMemcpy(djj,diag->HostReadJ(),nnz*sizeof(PetscInt));
1282 CCHKERRQ(PETSC_COMM_SELF,ierr);
1283 }
1284 else
1285 {
1286 const int *iii = diag->HostReadI();
1287 const int *jjj = diag->HostReadJ();
1288 for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
1289 for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1290 }
1291 ierr = PetscMemcpy(da,diag->HostReadData(),nnz*sizeof(PetscScalar));
1292 CCHKERRQ(PETSC_COMM_SELF,ierr);
1293 ierr = PetscCalloc1(m,&oii);
1294 CCHKERRQ(PETSC_COMM_SELF,ierr);
1295 if (commsize > 1)
1296 {
1297 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1298 PETSC_DECIDE,
1299 dii,djj,da,oii,NULL,NULL,&A);
1300 CCHKERRQ(comm,ierr);
1301 }
1302 else
1303 {
1304 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1305 CCHKERRQ(comm,ierr);
1306 }
1307
1308 void *ptrs[4] = {dii,djj,da,oii};
1309 const char *names[4] = {"_mfem_csr_dii",
1310 "_mfem_csr_djj",
1311 "_mfem_csr_da",
1312 "_mfem_csr_oii",
1313 };
1314 for (PetscInt i=0; i<4; i++)
1315 {
1316 PetscContainer c;
1317
1318 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1319 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1320#if PETSC_VERSION_LT(3,23,0)
1321 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1322#else
1323 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
1324#endif
1325 CCHKERRQ(comm,ierr);
1326 ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
1327 CCHKERRQ(comm,ierr);
1328 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1329 }
1330 }
1331
1332 // Tell PETSc the matrix is ready to be used
1333 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1334 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1335
1336 *Ad = A;
1337}
1338
1340{
1341 return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
1342}
1343
1344// TODO ADD THIS CONSTRUCTOR
1345//PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
1346// PetscInt glob_ncols, int *I, PetscInt *J,
1347// mfem::real_t *data, PetscInt *rows, PetscInt *cols)
1348//{
1349//}
1350
1351// TODO This should take a reference on op but how?
1352void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
1353{
1354 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1355 ierr = MatSetSizes(*A,op->Height(),op->Width(),
1356 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1357 ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1358 ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
1359#if PETSC_VERSION_LT(3,24,0)
1360 ierr = MatShellSetOperation(*A,MATOP_MULT,
1361 (void (*)())__mfem_mat_shell_apply);
1362 PCHKERRQ(A,ierr);
1363 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1364 (void (*)())__mfem_mat_shell_apply_transpose);
1365 PCHKERRQ(A,ierr);
1366 ierr = MatShellSetOperation(*A,MATOP_COPY,
1367 (void (*)())__mfem_mat_shell_copy);
1368 PCHKERRQ(A,ierr);
1369 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1370 (void (*)())__mfem_mat_shell_destroy);
1371#else
1372 ierr = MatShellSetOperation(*A,MATOP_MULT,
1373 (PetscErrorCodeFn*)__mfem_mat_shell_apply);
1374 PCHKERRQ(A,ierr);
1375 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1376 (PetscErrorCodeFn*)__mfem_mat_shell_apply_transpose);
1377 PCHKERRQ(A,ierr);
1378 ierr = MatShellSetOperation(*A,MATOP_COPY,
1379 (PetscErrorCodeFn*)__mfem_mat_shell_copy);
1380 PCHKERRQ(A,ierr);
1381 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1382 (PetscErrorCodeFn*)__mfem_mat_shell_destroy);
1383#endif
1384#if defined(_USE_DEVICE)
1386 if (mt == MemoryType::DEVICE || mt == MemoryType::MANAGED)
1387 {
1388 ierr = MatShellSetVecType(*A,PETSC_VECDEVICE); PCHKERRQ(A,ierr);
1389 ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1390 }
1391 else
1392 {
1393 ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1394 }
1395#endif
1396 PCHKERRQ(A,ierr);
1397 ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1398}
1399
1400void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
1401 Operator::Type tid)
1402{
1403 PetscParMatrix *pA = const_cast<PetscParMatrix *>
1404 (dynamic_cast<const PetscParMatrix *>(&op));
1405 HypreParMatrix *pH = const_cast<HypreParMatrix *>
1406 (dynamic_cast<const HypreParMatrix *>(&op));
1407 BlockOperator *pB = const_cast<BlockOperator *>
1408 (dynamic_cast<const BlockOperator *>(&op));
1409 IdentityOperator *pI = const_cast<IdentityOperator *>
1410 (dynamic_cast<const IdentityOperator *>(&op));
1411 SparseMatrix *pS = const_cast<SparseMatrix *>
1412 (dynamic_cast<const SparseMatrix *>(&op));
1413
1414 if (pA && tid == ANY_TYPE) // use same object and return
1415 {
1416 ierr = PetscObjectReference((PetscObject)(pA->A));
1417 CCHKERRQ(pA->GetComm(),ierr);
1418 *A = pA->A;
1419 return;
1420 }
1421
1422 PetscBool avoidmatconvert = PETSC_FALSE;
1423 if (pA) // we test for these types since MatConvert will fail
1424 {
1425 ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
1426 MATSHELL,"");
1427 CCHKERRQ(comm,ierr);
1428 }
1429 if (pA && !avoidmatconvert)
1430 {
1431 Mat At = NULL;
1432 PetscBool istrans;
1433#if PETSC_VERSION_LT(3,10,0)
1434 PetscBool ismatis;
1435#endif
1436
1437#if PETSC_VERSION_LT(3,18,0)
1438 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
1439#else
1440 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEVIRTUAL,
1441 &istrans);
1442#endif
1443 CCHKERRQ(pA->GetComm(),ierr);
1444 if (!istrans)
1445 {
1446 if (tid == pA->GetType()) // use same object and return
1447 {
1448 ierr = PetscObjectReference((PetscObject)(pA->A));
1449 CCHKERRQ(pA->GetComm(),ierr);
1450 *A = pA->A;
1451 return;
1452 }
1453#if PETSC_VERSION_LT(3,10,0)
1454 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
1455 CCHKERRQ(pA->GetComm(),ierr);
1456#endif
1457 }
1458 else
1459 {
1460 ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
1461#if PETSC_VERSION_LT(3,10,0)
1462 ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
1463#endif
1464 CCHKERRQ(pA->GetComm(),ierr);
1465 }
1466
1467 // Try to convert
1468 if (tid == PETSC_MATAIJ)
1469 {
1470#if PETSC_VERSION_LT(3,10,0)
1471 if (ismatis)
1472 {
1473 if (istrans)
1474 {
1475 Mat B;
1476
1477 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1478 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1479 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1480 }
1481 else
1482 {
1483 ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
1484 PCHKERRQ(pA->A,ierr);
1485 }
1486 }
1487 else
1488#endif
1489 {
1490 PetscMPIInt size;
1491 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1492
1493 // call MatConvert and see if a converter is available
1494 if (istrans)
1495 {
1496 Mat B;
1497 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1498 PCHKERRQ(pA->A,ierr);
1499 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1500 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1501 }
1502 else
1503 {
1504 ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1505 PCHKERRQ(pA->A,ierr);
1506 }
1507 }
1508 }
1509 else if (tid == PETSC_MATIS)
1510 {
1511 if (istrans)
1512 {
1513 Mat B;
1514 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1515 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1516 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1517 }
1518 else
1519 {
1520 ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1521 }
1522 }
1523 else if (tid == PETSC_MATHYPRE)
1524 {
1525#if defined(PETSC_HAVE_HYPRE)
1526 if (istrans)
1527 {
1528 Mat B;
1529 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1530 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1531 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1532 }
1533 else
1534 {
1535 ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1536 }
1537#else
1538 MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1539#endif
1540 }
1541 else if (tid == PETSC_MATSHELL)
1542 {
1543 MakeWrapper(comm,&op,A);
1544 }
1545 else
1546 {
1547 MFEM_ABORT("Unsupported operator type conversion " << tid)
1548 }
1549 }
1550 else if (pH)
1551 {
1552 if (tid == PETSC_MATAIJ)
1553 {
1554#if defined(PETSC_HAVE_HYPRE)
1555 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1556 PETSC_USE_POINTER,A);
1557#else
1558 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1559#endif
1560 CCHKERRQ(pH->GetComm(),ierr);
1561 }
1562 else if (tid == PETSC_MATIS)
1563 {
1564#if defined(PETSC_HAVE_HYPRE)
1565 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1566 PETSC_USE_POINTER,A);
1567#else
1568 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1569#endif
1570 CCHKERRQ(pH->GetComm(),ierr);
1571 }
1572 else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1573 {
1574#if defined(PETSC_HAVE_HYPRE)
1575 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1576 PETSC_USE_POINTER,A);
1577 CCHKERRQ(pH->GetComm(),ierr);
1578#else
1579 MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1580#endif
1581 }
1582 else if (tid == PETSC_MATSHELL)
1583 {
1584 MakeWrapper(comm,&op,A);
1585 }
1586 else
1587 {
1588 MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1589 " is not implemented");
1590 }
1591 }
1592 else if (pB)
1593 {
1594 Mat *mats,*matsl2l = NULL;
1595 PetscInt i,j,nr,nc;
1596
1597 nr = pB->NumRowBlocks();
1598 nc = pB->NumColBlocks();
1599 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1600 if (tid == PETSC_MATIS)
1601 {
1602 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1603 }
1604 for (i=0; i<nr; i++)
1605 {
1606 PetscBool needl2l = PETSC_TRUE;
1607
1608 for (j=0; j<nc; j++)
1609 {
1610 if (!pB->IsZeroBlock(i,j))
1611 {
1612 ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1613 if (tid == PETSC_MATIS && needl2l)
1614 {
1615 PetscContainer c;
1616 ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1617 (PetscObject*)&c);
1618 PCHKERRQ(mats[i*nc+j],ierr);
1619 // special case for block operators: the local Vdofs should be
1620 // ordered as:
1621 // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1622 // with m fields, Ni the number of Vdofs for the i-th field
1623 if (c)
1624 {
1625 Array<Mat> *l2l = NULL;
1626 ierr = PetscContainerGetPointer(c,(void**)&l2l);
1627 PCHKERRQ(c,ierr);
1628 MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1629 << l2l->Size() << " for block row " << i );
1630 ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1631 PCHKERRQ(c,ierr);
1632 matsl2l[i] = (*l2l)[0];
1633 needl2l = PETSC_FALSE;
1634 }
1635 }
1636 }
1637 }
1638 }
1639 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1640 if (tid == PETSC_MATIS)
1641 {
1642 ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1643
1644 mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1645 for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1646 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1647
1648 PetscContainer c;
1649 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1650 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1651#if PETSC_VERSION_LT(3,23,0)
1652 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1653#else
1654 ierr = PetscContainerSetCtxDestroy(c,__mfem_matarray_container_destroy);
1655#endif
1656 PCHKERRQ(c,ierr);
1657 ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1658 PCHKERRQ((*A),ierr);
1659 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1660 }
1661 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1662 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1663 }
1664 else if (pI && tid == PETSC_MATAIJ)
1665 {
1666 PetscInt rst;
1667
1668 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1669 ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1670 PCHKERRQ(A,ierr);
1671 ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1672 ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1673 ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1674 ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1675 ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1676 for (PetscInt i = rst; i < rst+pI->Height(); i++)
1677 {
1678 ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1679 }
1680 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1681 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1682 }
1683 else if (pS)
1684 {
1685 if (tid == PETSC_MATSHELL)
1686 {
1687 MakeWrapper(comm,&op,A);
1688 }
1689 else
1690 {
1691 /* from SparseMatrix to SEQAIJ -> always pass through host for now */
1692 Mat B;
1693 PetscScalar *pdata;
1694 PetscInt *pii,*pjj,*oii;
1695 PetscMPIInt size;
1696
1697 int m = pS->Height();
1698 int n = pS->Width();
1699 const int *ii = pS->HostReadI();
1700 const int *jj = pS->HostReadJ();
1701 const mfem::real_t *data = pS->HostReadData();
1702
1703 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1704 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1705 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1706 pii[0] = ii[0];
1707 for (int i = 0; i < m; i++)
1708 {
1709 bool issorted = true;
1710 pii[i+1] = ii[i+1];
1711 for (int j = ii[i]; j < ii[i+1]; j++)
1712 {
1713 pjj[j] = jj[j];
1714 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1715 pdata[j] = data[j];
1716 }
1717 if (!issorted)
1718 {
1719 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1720 CCHKERRQ(PETSC_COMM_SELF,ierr);
1721 }
1722 }
1723
1724 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1725 if (size == 1)
1726 {
1727 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1728 CCHKERRQ(comm,ierr);
1729 oii = NULL;
1730 }
1731 else // block diagonal constructor
1732 {
1733 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1734 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1735 PETSC_DECIDE,
1736 pii,pjj,pdata,oii,NULL,NULL,&B);
1737 CCHKERRQ(comm,ierr);
1738 }
1739 void *ptrs[4] = {pii,pjj,pdata,oii};
1740 const char *names[4] = {"_mfem_csr_pii",
1741 "_mfem_csr_pjj",
1742 "_mfem_csr_pdata",
1743 "_mfem_csr_oii"
1744 };
1745 for (int i=0; i<4; i++)
1746 {
1747 PetscContainer c;
1748
1749 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1750 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1751#if PETSC_VERSION_LT(3,23,0)
1752 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1753#else
1754 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
1755#endif
1756 PCHKERRQ(B,ierr);
1757 ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1758 PCHKERRQ(B,ierr);
1759 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1760 }
1761 if (tid == PETSC_MATAIJ)
1762 {
1763 *A = B;
1764 }
1765 else if (tid == PETSC_MATHYPRE)
1766 {
1767 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1768 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1769 }
1770 else if (tid == PETSC_MATIS)
1771 {
1772 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1773 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1774 }
1775 else
1776 {
1777 MFEM_ABORT("Unsupported operator type conversion " << tid)
1778 }
1779 }
1780 }
1781 else // fallback to general operator
1782 {
1783 MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1784 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1785 MakeWrapper(comm,&op,A);
1786 if (tid == PETSC_MATAIJ)
1787 {
1788 Mat B;
1789 PetscBool isaij;
1790
1791 ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1792 ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1793 CCHKERRQ(comm,ierr);
1794 ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1795 if (!isaij)
1796 {
1797 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1798 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1799 }
1800 else
1801 {
1802 *A = B;
1803 }
1804 }
1805 }
1806 SetUpForDevice();
1807}
1808
1810{
1811 if (A != NULL)
1812 {
1813 MPI_Comm comm = MPI_COMM_NULL;
1814 ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1815 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1816 }
1817 delete X;
1818 delete Y;
1819 X = Y = NULL;
1820}
1821
1823{
1824 if (ref)
1825 {
1826 ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1827 }
1828 Init();
1829 A = a;
1830 height = GetNumRows();
1831 width = GetNumCols();
1832}
1833
1835{
1836 if (A_ == A) { return; }
1837 Destroy();
1838 ierr = PetscObjectReference((PetscObject)A_); PCHKERRQ(A_,ierr);
1839 A = A_;
1840 height = GetNumRows();
1841 width = GetNumCols();
1842}
1843
1844void PetscParMatrix::SetUpForDevice()
1845{
1846#if !defined(_USE_DEVICE)
1847 return;
1848#else
1849 if (!A || (!Device::Allows(Backend::CUDA_MASK) &&
1851 {
1852 if (A) { ierr = MatBindToCPU(A, PETSC_TRUE); PCHKERRQ(A,ierr); }
1853 return;
1854 }
1855 PetscBool ismatis,isnest,isaij;
1856 ierr = PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
1857 PCHKERRQ(A,ierr);
1858 ierr = PetscObjectTypeCompare((PetscObject)A,MATNEST,&isnest);
1859 PCHKERRQ(A,ierr);
1860 Mat tA = A;
1861 if (ismatis)
1862 {
1863 ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1864 ierr = PetscObjectTypeCompare((PetscObject)tA,MATNEST,&isnest);
1865 PCHKERRQ(tA,ierr);
1866 }
1867 if (isnest)
1868 {
1869 PetscInt n,m;
1870 Mat **sub;
1871 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1872 bool dvec = false;
1873 for (PetscInt i = 0; i < n; i++)
1874 {
1875 for (PetscInt j = 0; j < m; j++)
1876 {
1877 if (sub[i][j])
1878 {
1879 bool expT = false;
1880 Mat sA = sub[i][j];
1881 ierr = PetscObjectTypeCompareAny((PetscObject)sA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1882 PCHKERRQ(sA,ierr);
1883 if (isaij)
1884 {
1885 ierr = MatSetType(sA,PETSC_MATAIJDEVICE); PCHKERRQ(sA,ierr);
1886 dvec = true;
1887 expT = true;
1888 }
1889 if (expT)
1890 {
1891 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1892 PETSC_TRUE); PCHKERRQ(sA,ierr);
1893 }
1894 }
1895 }
1896 }
1897 if (dvec)
1898 {
1899 ierr = MatSetVecType(tA,PETSC_VECDEVICE); PCHKERRQ(tA,ierr);
1900 }
1901 }
1902 else
1903 {
1904 bool expT = false;
1905 ierr = PetscObjectTypeCompareAny((PetscObject)tA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1906 PCHKERRQ(tA,ierr);
1907 if (isaij)
1908 {
1909 ierr = MatSetType(tA,PETSC_MATAIJDEVICE); PCHKERRQ(tA,ierr);
1910 expT = true;
1911 }
1912 if (expT)
1913 {
1914 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1915 PETSC_TRUE); PCHKERRQ(tA,ierr);
1916 }
1917 }
1918#endif
1919}
1920
1921// Computes y = alpha * A * x + beta * y
1922// or y = alpha * A^T* x + beta * y
1923static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1924 bool transpose)
1925{
1926 PetscErrorCode (*f)(Mat,Vec,Vec);
1927 PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1928 if (transpose)
1929 {
1930 f = MatMultTranspose;
1931 fadd = MatMultTransposeAdd;
1932 }
1933 else
1934 {
1935 f = MatMult;
1936 fadd = MatMultAdd;
1937 }
1938 if (a != 0.)
1939 {
1940 if (b != 0.)
1941 {
1942 ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1943 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1944 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1945 }
1946 else
1947 {
1948 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1949 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1950 }
1951 }
1952 else
1953 {
1954 if (b == 1.)
1955 {
1956 // do nothing
1957 }
1958 else if (b != 0.)
1959 {
1960 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1961 }
1962 else
1963 {
1964 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1965 }
1966 }
1967}
1968
1970{
1971 ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1972 Destroy();
1973 Init();
1974 A = master.A;
1975 height = master.height;
1976 width = master.width;
1977}
1978
1980{
1981 if (!X)
1982 {
1983 MFEM_VERIFY(A,"Mat not present");
1984 X = new PetscParVector(*this,false,false); PCHKERRQ(A,ierr);
1985 }
1986 return X;
1987}
1988
1990{
1991 if (!Y)
1992 {
1993 MFEM_VERIFY(A,"Mat not present");
1994 Y = new PetscParVector(*this,true,false); PCHKERRQ(A,ierr);
1995 }
1996 return Y;
1997}
1998
2000{
2001 Mat B;
2002 if (action)
2003 {
2004 ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
2005 }
2006 else
2007 {
2008 ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
2009 }
2010 return new PetscParMatrix(B,false);
2011}
2012
2014{
2015 ierr = MatScale(A,s); PCHKERRQ(A,ierr);
2016}
2017
2019 Vector &y) const
2020{
2021 MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
2022 << ", expected size = " << Width());
2023 MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
2024 << ", expected size = " << Height());
2025
2026 PetscParVector *XX = GetX();
2027 PetscParVector *YY = GetY();
2028 bool rw = (b != 0.0);
2029 XX->PlaceMemory(x.GetMemory());
2030 YY->PlaceMemory(y.GetMemory(),rw);
2031 MatMultKernel(A,a,XX->x,b,YY->x,false);
2032 XX->ResetMemory();
2033 YY->ResetMemory();
2034}
2035
2038 Vector &y) const
2039{
2040 MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
2041 << ", expected size = " << Height());
2042 MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
2043 << ", expected size = " << Width());
2044
2045 PetscParVector *XX = GetX();
2046 PetscParVector *YY = GetY();
2047 bool rw = (b != 0.0);
2048 XX->PlaceMemory(y.GetMemory(),rw);
2049 YY->PlaceMemory(x.GetMemory());
2050 MatMultKernel(A,a,YY->x,b,XX->x,true);
2051 XX->ResetMemory();
2052 YY->ResetMemory();
2053}
2054
2055void PetscParMatrix::Print(const char *fname, bool binary) const
2056{
2057 if (fname)
2058 {
2059 PetscViewer view;
2060
2061 if (binary)
2062 {
2063 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
2064 FILE_MODE_WRITE,&view);
2065 }
2066 else
2067 {
2068 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
2069 }
2070 PCHKERRQ(A,ierr);
2071 ierr = MatView(A,view); PCHKERRQ(A,ierr);
2072 ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
2073 }
2074 else
2075 {
2076 ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
2077 }
2078}
2079
2081{
2082 MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2083 << ", expected size = " << Height());
2084
2085 PetscParVector *YY = GetY();
2086 YY->PlaceMemory(s.GetMemory());
2087 ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
2088 YY->ResetMemory();
2089}
2090
2092{
2093 MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2094 << ", expected size = " << Width());
2095
2096 PetscParVector *XX = GetX();
2097 XX->PlaceMemory(s.GetMemory());
2098 ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
2099 XX->ResetMemory();
2100}
2101
2103{
2104 ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
2105}
2106
2108{
2109 // for matrices with square diagonal blocks only
2110 MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2111 << ", expected size = " << Height());
2112 MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2113 << ", expected size = " << Width());
2114
2115 PetscParVector *XX = GetX();
2116 XX->PlaceMemory(s.GetMemory());
2117 ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2118 XX->ResetMemory();
2119}
2120
2122 PetscParMatrix *P)
2123{
2124 MFEM_VERIFY(A->Width() == P->Height(),
2125 "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
2126 " differs from number of local rows of P " << P->Height());
2127 MFEM_VERIFY(A->Height() == R->Width(),
2128 "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
2129 " differs from number of local cols of R " << R->Width());
2130 Mat B;
2131 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2132 PCHKERRQ(*R,ierr);
2133 return new PetscParMatrix(B);
2134}
2135
2137{
2138 Mat pA = *A,pP = *P,pRt = *Rt;
2139 Mat B;
2140 PetscBool Aismatis,Pismatis,Rtismatis;
2141
2142 MFEM_VERIFY(A->Width() == P->Height(),
2143 "Petsc RAP: Number of local cols of A " << A->Width() <<
2144 " differs from number of local rows of P " << P->Height());
2145 MFEM_VERIFY(A->Height() == Rt->Height(),
2146 "Petsc RAP: Number of local rows of A " << A->Height() <<
2147 " differs from number of local rows of Rt " << Rt->Height());
2148 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
2149 PCHKERRQ(pA,ierr);
2150 ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
2151 PCHKERRQ(pA,ierr);
2152 ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
2153 PCHKERRQ(pA,ierr);
2154 if (Aismatis &&
2155 Pismatis &&
2156 Rtismatis) // handle special case (this code will eventually go into PETSc)
2157 {
2158 Mat lA,lP,lB,lRt;
2159 ISLocalToGlobalMapping cl2gP,cl2gRt;
2160 PetscInt rlsize,clsize,rsize,csize;
2161
2162 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2163 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2164 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2165 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2166 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2167 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2168 ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
2169 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2170 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2171 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2172 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2173 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2174 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2175 if (lRt == lP)
2176 {
2177 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2178 PCHKERRQ(lA,ierr);
2179 }
2180 else
2181 {
2182 Mat lR;
2183 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2184 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2185 PCHKERRQ(lRt,ierr);
2186 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2187 }
2188
2189 // attach lRt matrix to the subdomain local matrix
2190 // it may be used if markers on vdofs have to be mapped on
2191 // subdomain true dofs
2192 {
2193 mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
2194 ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
2195 (*vmatsl2l)[0] = lRt;
2196
2197 PetscContainer c;
2198 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
2199 PCHKERRQ(B,ierr);
2200 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2201#if PETSC_VERSION_LT(3,23,0)
2202 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2203#else
2204 ierr = PetscContainerSetCtxDestroy(c,__mfem_matarray_container_destroy);
2205#endif
2206 PCHKERRQ(c,ierr);
2207 ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
2208 PCHKERRQ(B,ierr);
2209 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2210 }
2211
2212 // Set local problem
2213 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2214 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2215 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2216 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2217 }
2218 else // it raises an error if the PtAP is not supported in PETSc
2219 {
2220 if (pP == pRt)
2221 {
2222 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2223 PCHKERRQ(pA,ierr);
2224 }
2225 else
2226 {
2227 Mat pR;
2228 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2229 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2230 PCHKERRQ(pRt,ierr);
2231 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2232 }
2233 }
2234 return new PetscParMatrix(B);
2235}
2236
2238{
2239 PetscParMatrix *out = RAP(P,A,P);
2240 return out;
2241}
2242
2244{
2245 PetscParMatrix *out,*A;
2246#if defined(PETSC_HAVE_HYPRE)
2248#else
2249 A = new PetscParMatrix(hA);
2250#endif
2251 out = RAP(P,A,P);
2252 delete A;
2253 return out;
2254}
2255
2256
2258{
2259 Mat AB;
2260
2261 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2262 CCHKERRQ(A->GetComm(),ierr);
2263 return new PetscParMatrix(AB);
2264}
2265
2267{
2268 Mat Ae;
2269
2270 PetscParVector dummy(GetComm(),0);
2271 ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2272 EliminateRowsCols(rows_cols,dummy,dummy);
2273 ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2274 return new PetscParMatrix(Ae);
2275}
2276
2278 const HypreParVector &X,
2279 HypreParVector &B,
2280 mfem::real_t diag)
2281{
2282 MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2283}
2284
2286 const PetscParVector &X,
2287 PetscParVector &B,
2288 mfem::real_t diag)
2289{
2290 PetscInt M,N;
2291 ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2292 MFEM_VERIFY(M == N,"Rectangular case unsupported");
2293
2294 // TODO: what if a diagonal term is not present?
2295 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2296
2297 // rows need to be in global numbering
2298 PetscInt rst;
2299 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2300
2301 IS dir;
2302 ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2303 if (!X.GlobalSize() && !B.GlobalSize())
2304 {
2305 ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2306 }
2307 else
2308 {
2309 ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2310 }
2311 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2312}
2313
2315{
2316 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2317
2318 // rows need to be in global numbering
2319 PetscInt rst;
2320 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2321
2322 IS dir;
2323 ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2324 ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2325 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2326}
2327
2328Mat PetscParMatrix::ReleaseMat(bool dereference)
2329{
2330
2331 Mat B = A;
2332 if (dereference)
2333 {
2334 MPI_Comm comm = GetComm();
2335 ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
2336 }
2337 A = NULL;
2338 return B;
2339}
2340
2342{
2343 PetscBool ok;
2344 MFEM_VERIFY(A, "no associated PETSc Mat object");
2345 PetscObject oA = (PetscObject)(this->A);
2346 // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
2347 ierr = PetscObjectBaseTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2348 if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2349 ierr = PetscObjectBaseTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2350 if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2351 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2352 if (ok == PETSC_TRUE) { return PETSC_MATIS; }
2353 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2354 if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
2355 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2356 if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
2357#if defined(PETSC_HAVE_HYPRE)
2358 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2359 if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
2360#endif
2361 return PETSC_MATGENERIC;
2362}
2363
2365 const Array<int> &ess_dof_list,
2366 const Vector &X, Vector &B)
2367{
2368 const PetscScalar *array;
2369 Mat pA = const_cast<PetscParMatrix&>(A);
2370
2371 // B -= Ae*X
2372 Ae.Mult(-1.0, X, 1.0, B);
2373
2374 Vec diag = const_cast<PetscParVector&>((*A.GetX()));
2375 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2376 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2377 for (int i = 0; i < ess_dof_list.Size(); i++)
2378 {
2379 int r = ess_dof_list[i];
2380 B(r) = array[r] * X(r);
2381 }
2382 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2383}
2384
2385// PetscSolver methods
2386
2387PetscSolver::PetscSolver() : clcustom(false)
2388{
2389 obj = NULL;
2390 B = X = NULL;
2391 cid = -1;
2392 operatorset = false;
2393 bchandler = NULL;
2394 private_ctx = NULL;
2395}
2396
2398{
2399 delete B;
2400 delete X;
2402}
2403
2405{
2406 SetRelTol(tol);
2407}
2408
2410{
2411 if (cid == KSP_CLASSID)
2412 {
2413 KSP ksp = (KSP)obj;
2414 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2415 }
2416 else if (cid == SNES_CLASSID)
2417 {
2418 SNES snes = (SNES)obj;
2419 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2420 PETSC_DEFAULT);
2421 }
2422 else if (cid == TS_CLASSID)
2423 {
2424 TS ts = (TS)obj;
2425 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2426 }
2427 else
2428 {
2429 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2430 }
2431 PCHKERRQ(obj,ierr);
2432}
2433
2435{
2436 if (cid == KSP_CLASSID)
2437 {
2438 KSP ksp = (KSP)obj;
2439 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2440 }
2441 else if (cid == SNES_CLASSID)
2442 {
2443 SNES snes = (SNES)obj;
2444 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2445 PETSC_DEFAULT);
2446 }
2447 else if (cid == TS_CLASSID)
2448 {
2449 TS ts = (TS)obj;
2450 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2451 }
2452 else
2453 {
2454 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2455 }
2456 PCHKERRQ(obj,ierr);
2457}
2458
2459void PetscSolver::SetMaxIter(int max_iter)
2460{
2461 if (cid == KSP_CLASSID)
2462 {
2463 KSP ksp = (KSP)obj;
2464 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2465 max_iter);
2466 }
2467 else if (cid == SNES_CLASSID)
2468 {
2469 SNES snes = (SNES)obj;
2470 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2471 max_iter,PETSC_DEFAULT);
2472 }
2473 else if (cid == TS_CLASSID)
2474 {
2475 TS ts = (TS)obj;
2476 ierr = TSSetMaxSteps(ts,max_iter);
2477 }
2478 else
2479 {
2480 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2481 }
2482 PCHKERRQ(obj,ierr);
2483}
2484
2485
2487{
2488 typedef PetscErrorCode (*myPetscFunc)(void**);
2489 PetscViewerAndFormat *vf = NULL;
2490 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
2491
2492 if (plev > 0)
2493 {
2494 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2495 PCHKERRQ(obj,ierr);
2496 }
2497 if (cid == KSP_CLASSID)
2498 {
2499 // there are many other options, see the function KSPSetFromOptions() in
2500 // src/ksp/ksp/interface/itcl.c
2501 typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
2502 KSP ksp = (KSP)obj;
2503 if (plev >= 0)
2504 {
2505 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2506 }
2507 if (plev == 1)
2508 {
2509#if PETSC_VERSION_LT(3,15,0)
2510 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2511#else
2512 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2513#endif
2514 (myPetscFunc)PetscViewerAndFormatDestroy);
2515 PCHKERRQ(ksp,ierr);
2516 }
2517 else if (plev > 1)
2518 {
2519 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2520 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2521 (myPetscFunc)PetscViewerAndFormatDestroy);
2522 PCHKERRQ(ksp,ierr);
2523 if (plev > 2)
2524 {
2525 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2526 PCHKERRQ(viewer,ierr);
2527#if PETSC_VERSION_LT(3,15,0)
2528 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2529#else
2530 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2531#endif
2532 (myPetscFunc)PetscViewerAndFormatDestroy);
2533 PCHKERRQ(ksp,ierr);
2534 }
2535 }
2536 }
2537 else if (cid == SNES_CLASSID)
2538 {
2539 typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
2540 SNES snes = (SNES)obj;
2541 if (plev >= 0)
2542 {
2543 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2544 }
2545 if (plev > 0)
2546 {
2547 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2548 (myPetscFunc)PetscViewerAndFormatDestroy);
2549 PCHKERRQ(snes,ierr);
2550 }
2551 }
2552 else if (cid == TS_CLASSID)
2553 {
2554 TS ts = (TS)obj;
2555 if (plev >= 0)
2556 {
2557 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2558 }
2559 }
2560 else
2561 {
2562 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2563 }
2564}
2565
2566MPI_Comm PetscSolver::GetComm() const
2567{
2568 return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
2569}
2570
2572{
2573 __mfem_monitor_ctx *monctx;
2574 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2575 monctx->solver = this;
2576 monctx->monitor = ctx;
2577 if (cid == KSP_CLASSID)
2578 {
2579 ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
2580 __mfem_monitor_ctx_destroy);
2581 PCHKERRQ(obj,ierr);
2582 }
2583 else if (cid == SNES_CLASSID)
2584 {
2585 ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
2586 __mfem_monitor_ctx_destroy);
2587 PCHKERRQ(obj,ierr);
2588 }
2589 else if (cid == TS_CLASSID)
2590 {
2591 ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
2592 __mfem_monitor_ctx_destroy);
2593 PCHKERRQ(obj,ierr);
2594 }
2595 else
2596 {
2597 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2598 }
2599}
2600
2602{
2603 bchandler = bch;
2604 if (cid == SNES_CLASSID)
2605 {
2606 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
2607 snes_ctx->bchandler = bchandler;
2608 }
2609 else if (cid == TS_CLASSID)
2610 {
2611 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
2612 ts_ctx->bchandler = bchandler;
2613 }
2614 else
2615 {
2616 MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2617 }
2618}
2619
2621{
2622 PC pc = NULL;
2623 if (cid == TS_CLASSID)
2624 {
2625 SNES snes;
2626 KSP ksp;
2627
2628 ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
2629 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2630 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2631 }
2632 else if (cid == SNES_CLASSID)
2633 {
2634 KSP ksp;
2635
2636 ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
2637 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2638 }
2639 else if (cid == KSP_CLASSID)
2640 {
2641 ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2642 }
2643 else if (cid == PC_CLASSID)
2644 {
2645 pc = (PC)obj;
2646 }
2647 else
2648 {
2649 MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2650 }
2651 if (factory)
2652 {
2653 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2654 }
2655 else
2656 {
2657 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2658 }
2659}
2660
2661void PetscSolver::Customize(bool customize) const
2662{
2663 if (!customize) { clcustom = true; }
2664 if (!clcustom)
2665 {
2666 if (cid == PC_CLASSID)
2667 {
2668 PC pc = (PC)obj;
2669 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2670 }
2671 else if (cid == KSP_CLASSID)
2672 {
2673 KSP ksp = (KSP)obj;
2674 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2675 }
2676 else if (cid == SNES_CLASSID)
2677 {
2678 SNES snes = (SNES)obj;
2679 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2680 }
2681 else if (cid == TS_CLASSID)
2682 {
2683 TS ts = (TS)obj;
2684 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2685 }
2686 else
2687 {
2688 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2689 }
2690 }
2691 clcustom = true;
2692}
2693
2695{
2696 if (cid == KSP_CLASSID)
2697 {
2698 KSP ksp = (KSP)obj;
2699 KSPConvergedReason reason;
2700 ierr = KSPGetConvergedReason(ksp,&reason);
2701 PCHKERRQ(ksp,ierr);
2702 return reason > 0 ? 1 : 0;
2703 }
2704 else if (cid == SNES_CLASSID)
2705 {
2706 SNES snes = (SNES)obj;
2707 SNESConvergedReason reason;
2708 ierr = SNESGetConvergedReason(snes,&reason);
2709 PCHKERRQ(snes,ierr);
2710 return reason > 0 ? 1 : 0;
2711 }
2712 else if (cid == TS_CLASSID)
2713 {
2714 TS ts = (TS)obj;
2715 TSConvergedReason reason;
2716 ierr = TSGetConvergedReason(ts,&reason);
2717 PCHKERRQ(ts,ierr);
2718 return reason > 0 ? 1 : 0;
2719 }
2720 else
2721 {
2722 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2723 return -1;
2724 }
2725}
2726
2728{
2729 if (cid == KSP_CLASSID)
2730 {
2731 KSP ksp = (KSP)obj;
2732 PetscInt its;
2733 ierr = KSPGetIterationNumber(ksp,&its);
2734 PCHKERRQ(ksp,ierr);
2735 return its;
2736 }
2737 else if (cid == SNES_CLASSID)
2738 {
2739 SNES snes = (SNES)obj;
2740 PetscInt its;
2741 ierr = SNESGetIterationNumber(snes,&its);
2742 PCHKERRQ(snes,ierr);
2743 return its;
2744 }
2745 else if (cid == TS_CLASSID)
2746 {
2747 TS ts = (TS)obj;
2748 PetscInt its;
2749 ierr = TSGetStepNumber(ts,&its);
2750 PCHKERRQ(ts,ierr);
2751 return its;
2752 }
2753 else
2754 {
2755 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2756 return -1;
2757 }
2758}
2759
2761{
2762 if (cid == KSP_CLASSID)
2763 {
2764 KSP ksp = (KSP)obj;
2766 ierr = KSPGetResidualNorm(ksp,&norm);
2767 PCHKERRQ(ksp,ierr);
2768 return norm;
2769 }
2770 if (cid == SNES_CLASSID)
2771 {
2772 SNES snes = (SNES)obj;
2774 ierr = SNESGetFunctionNorm(snes,&norm);
2775 PCHKERRQ(snes,ierr);
2776 return norm;
2777 }
2778 else
2779 {
2780 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2781 return PETSC_MAX_REAL;
2782 }
2783}
2784
2786{
2788 if (cid == SNES_CLASSID)
2789 {
2790 __mfem_snes_ctx *snes_ctx;
2791 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2792 snes_ctx->op = NULL;
2793 snes_ctx->bchandler = NULL;
2794 snes_ctx->work = NULL;
2795 snes_ctx->jacType = Operator::PETSC_MATAIJ;
2796 private_ctx = (void*) snes_ctx;
2797 }
2798 else if (cid == TS_CLASSID)
2799 {
2800 __mfem_ts_ctx *ts_ctx;
2801 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2802 ts_ctx->op = NULL;
2803 ts_ctx->bchandler = NULL;
2804 ts_ctx->work = NULL;
2805 ts_ctx->work2 = NULL;
2806 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2807 ts_ctx->cached_ijacstate = -1;
2808 ts_ctx->cached_rhsjacstate = -1;
2809 ts_ctx->cached_splits_xstate = -1;
2810 ts_ctx->cached_splits_xdotstate = -1;
2812 ts_ctx->jacType = Operator::PETSC_MATAIJ;
2813 private_ctx = (void*) ts_ctx;
2814 }
2815}
2816
2818{
2819 if (!private_ctx) { return; }
2820 // free private context's owned objects
2821 if (cid == SNES_CLASSID)
2822 {
2823 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2824 delete snes_ctx->work;
2825 }
2826 else if (cid == TS_CLASSID)
2827 {
2828 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2829 delete ts_ctx->work;
2830 delete ts_ctx->work2;
2831 }
2832 ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2833}
2834
2835// PetscBCHandler methods
2836
2838 enum PetscBCHandler::Type type_)
2839 : bctype(type_), setup(false), eval_t(0.0),
2840 eval_t_cached(std::numeric_limits<mfem::real_t>::min())
2841{
2842 SetTDofs(ess_tdof_list);
2843}
2844
2846{
2847 ess_tdof_list.SetSize(list.Size());
2848 ess_tdof_list.Assign(list);
2849 setup = false;
2850}
2851
2853{
2854 if (setup) { return; }
2855 if (bctype == CONSTANT)
2856 {
2857 eval_g.SetSize(n);
2858 this->Eval(eval_t,eval_g);
2859 eval_t_cached = eval_t;
2860 }
2861 else if (bctype == TIME_DEPENDENT)
2862 {
2863 eval_g.SetSize(n);
2864 }
2865 setup = true;
2866}
2867
2869{
2870 (*this).SetUp(x.Size());
2871 y = x;
2872 if (bctype == ZERO)
2873 {
2874 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2875 {
2876 y[ess_tdof_list[i]] = 0.0;
2877 }
2878 }
2879 else
2880 {
2881 if (bctype != CONSTANT && eval_t != eval_t_cached)
2882 {
2883 Eval(eval_t,eval_g);
2884 eval_t_cached = eval_t;
2885 }
2886 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2887 {
2888 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2889 }
2890 }
2891}
2892
2894{
2895 (*this).SetUp(x.Size());
2896 if (bctype == ZERO)
2897 {
2898 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2899 {
2900 x[ess_tdof_list[i]] = 0.0;
2901 }
2902 }
2903 else
2904 {
2905 if (bctype != CONSTANT && eval_t != eval_t_cached)
2906 {
2907 Eval(eval_t,eval_g);
2908 eval_t_cached = eval_t;
2909 }
2910 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2911 {
2912 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2913 }
2914 }
2915}
2916
2918{
2919 (*this).SetUp(x.Size());
2920 if (bctype == ZERO)
2921 {
2922 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2923 {
2924 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2925 }
2926 }
2927 else
2928 {
2929 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2930 {
2931 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2932 }
2933 }
2934}
2935
2937{
2938 (*this).SetUp(x.Size());
2939 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2940 {
2941 x[ess_tdof_list[i]] = 0.0;
2942 }
2943}
2944
2946{
2947 (*this).SetUp(x.Size());
2948 y = x;
2949 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2950 {
2951 y[ess_tdof_list[i]] = 0.0;
2952 }
2953}
2954
2955// PetscLinearSolver methods
2956
2957PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2958 bool wrapin, bool iter_mode)
2959 : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2960{
2961 KSP ksp;
2962 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2963 obj = (PetscObject)ksp;
2964 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2965 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2966 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2967 PCHKERRQ(ksp, ierr);
2968}
2969
2971 const std::string &prefix, bool iter_mode)
2972 : PetscSolver(), Solver(0,iter_mode), wrap(false)
2973{
2974 KSP ksp;
2975 ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2976 obj = (PetscObject)ksp;
2977 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2978 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2979 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2980 PCHKERRQ(ksp, ierr);
2981 SetOperator(A);
2982}
2983
2985 const std::string &prefix, bool iter_mode)
2986 : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2987{
2988 KSP ksp;
2989 ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2990 obj = (PetscObject)ksp;
2991 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2992 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2993 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2994 PCHKERRQ(ksp, ierr);
2995 SetOperator(A);
2996}
2997
2999{
3000 const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
3001 PetscParMatrix *pA = const_cast<PetscParMatrix *>
3002 (dynamic_cast<const PetscParMatrix *>(&op));
3003 const Operator *oA = dynamic_cast<const Operator *>(&op);
3004
3005 // update base classes: Operator, Solver, PetscLinearSolver
3006 bool delete_pA = false;
3007 if (!pA)
3008 {
3009 if (hA)
3010 {
3011 // Create MATSHELL object or convert into a format suitable to construct preconditioners
3012 pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3013 delete_pA = true;
3014 }
3015 else if (oA) // fallback to general operator
3016 {
3017 // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
3018 // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
3019 pA = new PetscParMatrix(PetscObjectComm(obj),oA,
3020 wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3021 delete_pA = true;
3022 }
3023 }
3024 MFEM_VERIFY(pA, "Unsupported operation!");
3025
3026 // Set operators into PETSc KSP
3027 KSP ksp = (KSP)obj;
3028 Mat A = pA->A;
3029 if (operatorset)
3030 {
3031 Mat C;
3032 PetscInt nheight,nwidth,oheight,owidth;
3033
3034 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3035 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3036 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3037 if (nheight != oheight || nwidth != owidth)
3038 {
3039 // reinit without destroying the KSP
3040 // communicator remains the same
3041 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3042 delete X;
3043 delete B;
3044 X = B = NULL;
3045 }
3046 }
3047 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
3048
3049 // Update PetscSolver
3050 operatorset = true;
3051
3052 // Update the Operator fields.
3053 height = pA->Height();
3054 width = pA->Width();
3055
3056 if (delete_pA) { delete pA; }
3057}
3058
3060{
3061 const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
3062 PetscParMatrix *pA = const_cast<PetscParMatrix *>
3063 (dynamic_cast<const PetscParMatrix *>(&op));
3064 const Operator *oA = dynamic_cast<const Operator *>(&op);
3065
3066 PetscParMatrix *ppA = const_cast<PetscParMatrix *>
3067 (dynamic_cast<const PetscParMatrix *>(&pop));
3068 const Operator *poA = dynamic_cast<const Operator *>(&pop);
3069
3070 // Convert Operator for linear system
3071 bool delete_pA = false;
3072 if (!pA)
3073 {
3074 if (hA)
3075 {
3076 // Create MATSHELL object or convert into a format suitable to construct preconditioners
3077 pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3078 delete_pA = true;
3079 }
3080 else if (oA) // fallback to general operator
3081 {
3082 // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
3083 // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
3084 pA = new PetscParMatrix(PetscObjectComm(obj),oA,
3085 wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3086 delete_pA = true;
3087 }
3088 }
3089 MFEM_VERIFY(pA, "Unsupported operation!");
3090
3091 // Convert Operator to be preconditioned
3092 bool delete_ppA = false;
3093 if (!ppA)
3094 {
3095 if (oA == poA && !wrap) // Same operator, already converted
3096 {
3097 ppA = pA;
3098 }
3099 else
3100 {
3101 ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
3102 delete_ppA = true;
3103 }
3104 }
3105 MFEM_VERIFY(ppA, "Unsupported operation!");
3106
3107 // Set operators into PETSc KSP
3108 KSP ksp = (KSP)obj;
3109 Mat A = pA->A;
3110 Mat P = ppA->A;
3111 if (operatorset)
3112 {
3113 Mat C;
3114 PetscInt nheight,nwidth,oheight,owidth;
3115
3116 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3117 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3118 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3119 if (nheight != oheight || nwidth != owidth)
3120 {
3121 // reinit without destroying the KSP
3122 // communicator remains the same
3123 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3124 delete X;
3125 delete B;
3126 X = B = NULL;
3127 wrap = false;
3128 }
3129 }
3130 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3131
3132 // Update PetscSolver
3133 operatorset = true;
3134
3135 // Update the Operator fields.
3136 height = pA->Height();
3137 width = pA->Width();
3138
3139 if (delete_pA) { delete pA; }
3140 if (delete_ppA) { delete ppA; }
3141}
3142
3144{
3145 KSP ksp = (KSP)obj;
3146
3147 // Preserve Amat if already set
3148 Mat A = NULL;
3149 PetscBool amat;
3150 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3151 if (amat)
3152 {
3153 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3154 ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
3155 }
3156 PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
3157 if (ppc)
3158 {
3159 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3160 }
3161 else
3162 {
3163 // wrap the Solver action
3164 // Solver is assumed to be already setup
3165 // ownership of precond is not transferred,
3166 // consistently with other MFEM's linear solvers
3167 PC pc;
3168 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3169 ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
3170 }
3171 if (A)
3172 {
3173 Mat P;
3174
3175 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3176 ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
3177 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3178 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3179 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3180 }
3181}
3182
3183void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
3184{
3185 KSP ksp = (KSP)obj;
3186
3187 if (!B || !X)
3188 {
3189 Mat pA = NULL;
3190 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
3191 if (!B)
3192 {
3193 PetscParMatrix A = PetscParMatrix(pA, true);
3194 B = new PetscParVector(A, true, false);
3195 }
3196 if (!X)
3197 {
3198 PetscParMatrix A = PetscParMatrix(pA, true);
3199 X = new PetscParVector(A, false, false);
3200 }
3201 }
3202 B->PlaceMemory(b.GetMemory());
3203
3204 Customize();
3205
3206 PetscBool flg;
3207 ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3208 X->PlaceMemory(x.GetMemory(),flg);
3209
3210 // Solve the system.
3211 if (trans)
3212 {
3213 ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3214 }
3215 else
3216 {
3217 ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3218 }
3219 B->ResetMemory();
3220 X->ResetMemory();
3221}
3222
3224{
3225 (*this).MultKernel(b,x,false);
3226}
3227
3229{
3230 (*this).MultKernel(b,x,true);
3231}
3232
3234{
3235 MPI_Comm comm;
3236 KSP ksp = (KSP)obj;
3237 ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3238 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3239}
3240
3241// PetscPCGSolver methods
3242
3243PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix,
3244 bool iter_mode)
3245 : PetscLinearSolver(comm,prefix,true,iter_mode)
3246{
3247 KSP ksp = (KSP)obj;
3248 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3249 // this is to obtain a textbook PCG
3250 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3251}
3252
3254 bool iter_mode)
3255 : PetscLinearSolver(A,prefix,iter_mode)
3256{
3257 KSP ksp = (KSP)obj;
3258 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3259 // this is to obtain a textbook PCG
3260 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3261}
3262
3264 const std::string &prefix, bool iter_mode)
3265 : PetscLinearSolver(A,wrap,prefix,iter_mode)
3266{
3267 KSP ksp = (KSP)obj;
3268 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3269 // this is to obtain a textbook PCG
3270 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3271}
3272
3273// PetscPreconditioner methods
3274
3276 const std::string &prefix)
3277 : PetscSolver(), Solver()
3278{
3279 PC pc;
3280 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3281 obj = (PetscObject)pc;
3282 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3283 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3284}
3285
3287 const string &prefix)
3288 : PetscSolver(), Solver()
3289{
3290 PC pc;
3291 ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
3292 obj = (PetscObject)pc;
3293 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3294 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3295 SetOperator(A);
3296}
3297
3299 const string &prefix)
3300 : PetscSolver(), Solver()
3301{
3302 PC pc;
3303 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3304 obj = (PetscObject)pc;
3305 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3306 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3307 SetOperator(op);
3308}
3309
3311{
3312 bool delete_pA = false;
3313 PetscParMatrix *pA = const_cast<PetscParMatrix *>
3314 (dynamic_cast<const PetscParMatrix *>(&op));
3315
3316 if (!pA)
3317 {
3318 const Operator *cop = dynamic_cast<const Operator *>(&op);
3319 pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
3320 delete_pA = true;
3321 }
3322
3323 // Set operators into PETSc PC
3324 PC pc = (PC)obj;
3325 Mat A = pA->A;
3326 if (operatorset)
3327 {
3328 Mat C;
3329 PetscInt nheight,nwidth,oheight,owidth;
3330
3331 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3332 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3333 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3334 if (nheight != oheight || nwidth != owidth)
3335 {
3336 // reinit without destroying the PC
3337 // communicator remains the same
3338 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3339 delete X;
3340 delete B;
3341 X = B = NULL;
3342 }
3343 }
3344 ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
3345
3346 // Update PetscSolver
3347 operatorset = true;
3348
3349 // Update the Operator fields.
3350 height = pA->Height();
3351 width = pA->Width();
3352
3353 if (delete_pA) { delete pA; };
3354}
3355
3356void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
3357 bool trans) const
3358{
3359 MFEM_VERIFY(!iterative_mode,
3360 "Iterative mode not supported for PetscPreconditioner");
3361 PC pc = (PC)obj;
3362
3363 if (!B || !X)
3364 {
3365 Mat pA = NULL;
3366 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
3367 if (!B)
3368 {
3369 PetscParMatrix A(pA, true);
3370 B = new PetscParVector(A, true, false);
3371 }
3372 if (!X)
3373 {
3374 PetscParMatrix A(pA, true);
3375 X = new PetscParVector(A, false, false);
3376 }
3377 }
3378 B->PlaceMemory(b.GetMemory());
3379 X->PlaceMemory(x.GetMemory());
3380
3381 Customize();
3382
3383 // Apply the preconditioner.
3384 if (trans)
3385 {
3386 ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3387 }
3388 else
3389 {
3390 ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3391 }
3392 B->ResetMemory();
3393 X->ResetMemory();
3394}
3395
3397{
3398 (*this).MultKernel(b,x,false);
3399}
3400
3402{
3403 (*this).MultKernel(b,x,true);
3404}
3405
3407{
3408 MPI_Comm comm;
3409 PC pc = (PC)obj;
3410 ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3411 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3412}
3413
3414// PetscBDDCSolver methods
3415
3416// Coordinates sampling function
3417static void func_coords(const Vector &x, Vector &y)
3418{
3419 y = x;
3420}
3421
3422void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
3423{
3424 MPI_Comm comm = PetscObjectComm(obj);
3425
3426 // get PETSc object
3427 PC pc = (PC)obj;
3428 Mat pA;
3429 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3430
3431 // matrix type should be of type MATIS
3432 PetscBool ismatis;
3433 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
3434 PCHKERRQ(pA,ierr);
3435 MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
3436
3437 // Check options
3438 ParFiniteElementSpace *fespace = opts.fespace;
3439 if (opts.netflux && !fespace)
3440 {
3441 MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3442 }
3443
3444 // Attach default near-null space to local matrices
3445 {
3446 MatNullSpace nnsp;
3447 Mat lA;
3448 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3449 ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
3450 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3451 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3452 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3453 }
3454
3455 // set PETSc PC type to PCBDDC
3456 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
3457
3458 PetscInt rst,nl;
3459 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3460 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3461
3462 // index sets for fields splitting and coordinates for nodal spaces
3463 IS *fields = NULL;
3464 PetscInt nf = 0;
3465 PetscInt sdim = 0;
3466 PetscReal *coords = NULL;
3467 if (fespace)
3468 {
3469 int vdim = fespace->GetVDim();
3470
3471 // Ideally, the block size should be set at matrix creation
3472 // but the MFEM assembly does not allow to do so
3473 if (fespace->GetOrdering() == Ordering::byVDIM)
3474 {
3475 Mat lA;
3476 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3477 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3478 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3479 }
3480
3481 // fields
3482 if (vdim > 1)
3483 {
3484 PetscInt st = rst, bs, inc, nlf;
3485 nf = vdim;
3486 nlf = nl/nf;
3487 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3488 if (fespace->GetOrdering() == Ordering::byVDIM)
3489 {
3490 inc = 1;
3491 bs = vdim;
3492 }
3493 else
3494 {
3495 inc = nlf;
3496 bs = 1;
3497 }
3498 for (PetscInt i = 0; i < nf; i++)
3499 {
3500 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3501 st += inc;
3502 }
3503 }
3504
3505 // coordinates
3506 const FiniteElementCollection *fec = fespace->FEColl();
3507 bool h1space = dynamic_cast<const H1_FECollection*>(fec);
3508 if (h1space)
3509 {
3510 ParFiniteElementSpace *fespace_coords = fespace;
3511
3512 sdim = fespace->GetParMesh()->SpaceDimension();
3513 if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
3514 {
3515 fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3517 }
3518 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3519 ParGridFunction gf_coords(fespace_coords);
3520 gf_coords.ProjectCoefficient(coeff_coords);
3521 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3522 PetscScalar *data_coords = (PetscScalar*)mfem::Read(hvec_coords->GetMemory(),
3523 hvec_coords->Size(),false);
3524
3525 // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
3526 // and to the global matrix
3527 if (vdim == sdim)
3528 {
3529 MatNullSpace nnsp;
3530 Mat lA;
3531 Vec pvec_coords,lvec_coords;
3532 ISLocalToGlobalMapping l2g;
3533 PetscSF sf;
3534 PetscLayout rmap;
3535 const PetscInt *gidxs;
3536 PetscInt nleaves;
3537
3538 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3539 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3540 CCHKERRQ(comm,ierr);
3541 ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3542 if (!nnsp)
3543 {
3544 ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3545 CCHKERRQ(comm,ierr);
3546 ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3547 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3548 }
3549 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3550 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3551 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3552 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3553 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3554 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3555 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3556 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3557 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3558 CCHKERRQ(comm,ierr);
3559 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3560 {
3561 const PetscScalar *garray;
3562 PetscScalar *larray;
3563
3564 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3565 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3566#if PETSC_VERSION_LT(3,15,0)
3567 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3568 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3569#else
3570 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3571 CCHKERRQ(comm,ierr);
3572 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3573 CCHKERRQ(comm,ierr);
3574#endif
3575 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3576 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3577 }
3578 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3579 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3580 CCHKERRQ(PETSC_COMM_SELF,ierr);
3581 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3582 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3583 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3584 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3585 }
3586
3587 // each single dof has associated a tuple of coordinates
3588 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3589 if (nf > 0)
3590 {
3591 for (PetscInt i = 0; i < nf; i++)
3592 {
3593 const PetscInt *idxs;
3594 PetscInt nn;
3595
3596 // It also handles the case of fespace not ordered by VDIM
3597 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3598 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3599 for (PetscInt j = 0; j < nn; j++)
3600 {
3601 PetscInt idx = idxs[j]-rst;
3602 for (PetscInt d = 0; d < sdim; d++)
3603 {
3604 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3605 }
3606 }
3607 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3608 }
3609 }
3610 else
3611 {
3612 for (PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3613 }
3614 if (fespace_coords != fespace)
3615 {
3616 delete fespace_coords;
3617 }
3618 delete hvec_coords;
3619 }
3620 }
3621
3622 // index sets for boundary dofs specification (Essential = dir, Natural = neu)
3623 IS dir = NULL, neu = NULL;
3624
3625 // Extract l2l matrices
3626 Array<Mat> *l2l = NULL;
3627 if (opts.ess_dof_local || opts.nat_dof_local)
3628 {
3629 PetscContainer c;
3630
3631 ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
3632 MFEM_VERIFY(c,"Local-to-local PETSc container not present");
3633 ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
3634 }
3635
3636 // check information about index sets (essential dofs, fields, etc.)
3637#ifdef MFEM_DEBUG
3638 {
3639 // make sure ess/nat_dof have been collectively set
3640 PetscBool lpr = PETSC_FALSE,pr;
3641 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3642 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3643 CCHKERRQ(comm,mpiierr);
3644 MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
3645 lpr = PETSC_FALSE;
3646 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3647 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3648 CCHKERRQ(comm,mpiierr);
3649 MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
3650 // make sure fields have been collectively set
3651 PetscInt ms[2],Ms[2];
3652 ms[0] = -nf; ms[1] = nf;
3653 mpiierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3654 CCHKERRQ(comm,mpiierr);
3655 MFEM_VERIFY(-Ms[0] == Ms[1],
3656 "number of fields should be the same across processes");
3657 }
3658#endif
3659
3660 // boundary sets
3661 if (opts.ess_dof)
3662 {
3663 PetscInt st = opts.ess_dof_local ? 0 : rst;
3664 if (!opts.ess_dof_local)
3665 {
3666 // need to compute the boundary dofs in global ordering
3667 ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3668 CCHKERRQ(comm,ierr);
3669 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3670 }
3671 else
3672 {
3673 // need to compute a list for the marked boundary dofs in local ordering
3674 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3675 CCHKERRQ(comm,ierr);
3676 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3677 }
3678 }
3679 if (opts.nat_dof)
3680 {
3681 PetscInt st = opts.nat_dof_local ? 0 : rst;
3682 if (!opts.nat_dof_local)
3683 {
3684 // need to compute the boundary dofs in global ordering
3685 ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3686 CCHKERRQ(comm,ierr);
3687 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3688 }
3689 else
3690 {
3691 // need to compute a list for the marked boundary dofs in local ordering
3692 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3693 CCHKERRQ(comm,ierr);
3694 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3695 }
3696 }
3697
3698 // field splitting
3699 if (fields)
3700 {
3701 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3702 }
3703 for (PetscInt i = 0; i < nf; i++)
3704 {
3705 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3706 }
3707 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3708
3709 // coordinates
3710 if (coords)
3711 {
3712 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3713 }
3714 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3715
3716 // code for block size is disabled since we cannot change the matrix
3717 // block size after it has been setup
3718 // int bs = 1;
3719
3720 // Customize using the finite element space (if any)
3721 if (fespace)
3722 {
3723 const FiniteElementCollection *fec = fespace->FEColl();
3724 bool edgespace, rtspace, h1space;
3725 bool needint = opts.netflux;
3726 bool tracespace, rt_tracespace, edge_tracespace;
3727 int vdim, dim, p;
3728 PetscBool B_is_Trans = PETSC_FALSE;
3729
3730 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3731 dim = pmesh->Dimension();
3732 vdim = fespace->GetVDim();
3733 h1space = dynamic_cast<const H1_FECollection*>(fec);
3734 rtspace = dynamic_cast<const RT_FECollection*>(fec);
3735 edgespace = dynamic_cast<const ND_FECollection*>(fec);
3736 edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3737 rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3738 tracespace = edge_tracespace || rt_tracespace;
3739
3740 p = 1;
3741 if (fespace->GetNE() > 0)
3742 {
3743 if (!tracespace)
3744 {
3745 p = fespace->GetElementOrder(0);
3746 }
3747 else
3748 {
3749 p = fespace->GetFaceOrder(0);
3750 if (dim == 2) { p++; }
3751 }
3752 }
3753
3754 if (edgespace) // H(curl)
3755 {
3756 if (dim == 2)
3757 {
3758 needint = true;
3759 if (tracespace)
3760 {
3761 MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3762 " not using auxiliary quadrature");
3763 needint = false;
3764 }
3765 }
3766 else
3767 {
3768 FiniteElementCollection *vfec;
3769 if (tracespace)
3770 {
3771 vfec = new H1_Trace_FECollection(p,dim);
3772 }
3773 else
3774 {
3775 vfec = new H1_FECollection(p,dim);
3776 }
3777 ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3778 ParDiscreteLinearOperator *grad;
3779 grad = new ParDiscreteLinearOperator(vfespace,fespace);
3780 if (tracespace)
3781 {
3782 grad->AddTraceFaceInterpolator(new GradientInterpolator);
3783 }
3784 else
3785 {
3786 grad->AddDomainInterpolator(new GradientInterpolator);
3787 }
3788 grad->Assemble();
3789 grad->Finalize();
3790 HypreParMatrix *hG = grad->ParallelAssemble();
3791 PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3792 delete hG;
3793 delete grad;
3794
3795 PetscBool conforming = PETSC_TRUE;
3796 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3797 ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3798 PCHKERRQ(pc,ierr);
3799 delete vfec;
3800 delete vfespace;
3801 delete G;
3802 }
3803 }
3804 else if (rtspace) // H(div)
3805 {
3806 needint = true;
3807 if (tracespace)
3808 {
3809 MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3810 " auxiliary quadrature");
3811 needint = false;
3812 }
3813 }
3814 else if (h1space) // H(grad), only for the vector case
3815 {
3816 if (vdim != dim) { needint = false; }
3817 }
3818
3819 PetscParMatrix *B = NULL;
3820 if (needint)
3821 {
3822 // Generate bilinear form in unassembled format which is used to
3823 // compute the net-flux across subdomain boundaries for H(div) and
3824 // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3825 FiniteElementCollection *auxcoll;
3826 if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3827 else
3828 {
3829 if (h1space)
3830 {
3831 auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3832 }
3833 else
3834 {
3835 auxcoll = new L2_FECollection(p,dim);
3836 }
3837 }
3838 ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3839 ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3840
3841 if (edgespace)
3842 {
3843 if (tracespace)
3844 {
3845 b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3846 }
3847 else
3848 {
3849 b->AddDomainIntegrator(new VectorFECurlIntegrator);
3850 }
3851 }
3852 else if (rtspace)
3853 {
3854 if (tracespace)
3855 {
3856 b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3857 }
3858 else
3859 {
3860 b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3861 }
3862 }
3863 else
3864 {
3865 b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3866 }
3867 b->Assemble();
3868 b->Finalize();
3869 OperatorHandle Bh(Operator::PETSC_MATIS);
3870 b->ParallelAssemble(Bh);
3871 Bh.Get(B);
3872 Bh.SetOperatorOwner(false);
3873
3874 if (dir) // if essential dofs are present, we need to zero the columns
3875 {
3876 Mat pB = *B;
3877 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3878 if (!opts.ess_dof_local)
3879 {
3880 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3881 }
3882 else
3883 {
3884 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3885 }
3886 B_is_Trans = PETSC_TRUE;
3887 }
3888 delete b;
3889 delete pspace;
3890 delete auxcoll;
3891 }
3892
3893 if (B)
3894 {
3895 ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3896 }
3897 delete B;
3898 }
3899 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3900 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3901}
3902
3904 const PetscBDDCSolverParams &opts,
3905 const std::string &prefix)
3906 : PetscPreconditioner(A,prefix)
3907{
3908 BDDCSolverConstructor(opts);
3909 Customize();
3910}
3911
3913 const PetscBDDCSolverParams &opts,
3914 const std::string &prefix)
3915 : PetscPreconditioner(comm,op,prefix)
3916{
3917 BDDCSolverConstructor(opts);
3918 Customize();
3919}
3920
3922 const string &prefix)
3923 : PetscPreconditioner(comm,op,prefix)
3924{
3925 PC pc = (PC)obj;
3926 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3927
3928 Mat pA;
3929 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3930
3931 // Check if pA is of type MATNEST
3932 PetscBool isnest;
3933 ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3934
3935 PetscInt nr = 0;
3936 IS *isrow = NULL;
3937 if (isnest) // we know the fields
3938 {
3939 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3940 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3941 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3942 }
3943
3944 // We need to customize here, before setting the index sets.
3945 // This is because PCFieldSplitSetType customizes the function
3946 // pointers. SubSolver options will be processed during PCApply
3947 Customize();
3948
3949 for (PetscInt i=0; i<nr; i++)
3950 {
3951 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3952 }
3953 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3954}
3955
3958 const std::string &prefix)
3959 : PetscPreconditioner(fes->GetParMesh()->GetComm(),prefix)
3960{
3962 ierr = MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); PCHKERRQ(A,ierr);
3963 ierr = MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE); PCHKERRQ(A,ierr);
3964 SetOperator(A);
3965 H2SolverConstructor(fes);
3966 Customize();
3967}
3968
3969void PetscH2Solver::H2SolverConstructor(ParFiniteElementSpace *fes)
3970{
3971#if defined(PETSC_HAVE_H2OPUS)
3972 int sdim = fes->GetParMesh()->SpaceDimension();
3973 int vdim = fes->GetVDim();
3974 const FiniteElementCollection *fec = fes->FEColl();
3975 ParFiniteElementSpace *fes_coords = NULL;
3976
3977 if (vdim != sdim || fes->GetOrdering() != Ordering::byVDIM)
3978 {
3979 fes_coords = new ParFiniteElementSpace(fes->GetParMesh(),fec,sdim,
3981 fes = fes_coords;
3982 }
3983 VectorFunctionCoefficient ccoords(sdim, func_coords);
3984
3985 ParGridFunction coords(fes);
3986 coords.ProjectCoefficient(ccoords);
3987 Vector c(fes->GetTrueVSize());
3988 coords.ParallelProject(c);
3989 delete fes_coords;
3990
3991 PC pc = (PC)obj;
3992 ierr = PCSetType(pc,PCH2OPUS); PCHKERRQ(obj, ierr);
3993 ierr = PCSetCoordinates(pc,sdim,c.Size()/sdim,
3994 (PetscReal*)mfem::Read(c.GetMemory(),
3995 c.Size(),false));
3996 ierr = PCSetFromOptions(pc); PCHKERRQ(obj, ierr);
3997#else
3998 MFEM_ABORT("Need PETSc configured with --download-h2opus");
3999#endif
4000}
4001
4002// PetscNonlinearSolver methods
4003
4005 const std::string &prefix)
4006 : PetscSolver(), Solver()
4007{
4008 // Create the actual solver object
4009 SNES snes;
4010 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
4011 obj = (PetscObject)snes;
4012 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
4013 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
4014
4015 // Allocate private solver context
4017}
4018
4020 const std::string &prefix)
4021 : PetscSolver(), Solver()
4022{
4023 // Create the actual solver object
4024 SNES snes;
4025 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
4026 obj = (PetscObject)snes;
4027 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
4028 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
4029
4030 // Allocate private solver context
4032
4033 SetOperator(op);
4034}
4035
4037{
4038 MPI_Comm comm;
4039 SNES snes = (SNES)obj;
4040 ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
4041 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
4042}
4043
4045{
4046 SNES snes = (SNES)obj;
4047
4048 if (operatorset)
4049 {
4050 PetscBool ls,gs;
4051 void *fctx,*jctx;
4052
4053 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
4054 PCHKERRQ(snes, ierr);
4055 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
4056 PCHKERRQ(snes, ierr);
4057
4058 ls = (PetscBool)(height == op.Height() && width == op.Width() &&
4059 (void*)&op == fctx &&
4060 (void*)&op == jctx);
4061 mpiierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
4062 PetscObjectComm((PetscObject)snes));
4063 CCHKERRQ(PetscObjectComm((PetscObject)snes),mpiierr);
4064 if (!gs)
4065 {
4066 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
4067 delete X;
4068 delete B;
4069 X = B = NULL;
4070 }
4071 }
4072 else
4073 {
4074 /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
4075 yet set. We default to backtracking */
4076 SNESLineSearch ls;
4077 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4078 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
4079 }
4080
4081 // If we do not pass matrices in, the default matrix type for DMShell is MATDENSE
4082 // in 3.15, which may cause issues.
4083 Mat dummy;
4084 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)snes),op.Height(),
4085 op.Height(),&dummy);
4086
4087 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4088 snes_ctx->op = (Operator*)&op;
4089 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
4090 PCHKERRQ(snes, ierr);
4091 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
4092 (void *)snes_ctx);
4093 PCHKERRQ(snes, ierr);
4094
4095 ierr = MatDestroy(&dummy);
4096 PCHKERRQ(snes, ierr);
4097
4098 // Update PetscSolver
4099 operatorset = true;
4100
4101 // Update the Operator fields.
4102 height = op.Height();
4103 width = op.Width();
4104}
4105
4107{
4108 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4109 snes_ctx->jacType = jacType;
4110}
4111
4113 mfem::real_t*))
4114{
4115 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4116 snes_ctx->objective = objfn;
4117
4118 SNES snes = (SNES)obj;
4119 ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
4120 PCHKERRQ(snes, ierr);
4121}
4122
4124 Vector&, Vector&,
4125 bool&, bool&))
4126{
4127 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4128 snes_ctx->postcheck = post;
4129
4130 SNES snes = (SNES)obj;
4131 SNESLineSearch ls;
4132 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4133 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
4134 PCHKERRQ(ls, ierr);
4135}
4136
4138 const Vector&,
4139 const Vector&,
4140 const Vector&,
4141 const Vector&))
4142{
4143 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4144 snes_ctx->update = update;
4145
4146 SNES snes = (SNES)obj;
4147 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4148}
4149
4151{
4152 SNES snes = (SNES)obj;
4153
4154 bool b_nonempty = b.Size();
4155 if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
4156 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
4158 if (b_nonempty) { B->PlaceMemory(b.GetMemory()); }
4159 else { *B = 0.0; }
4160
4161 Customize();
4162
4163 if (!iterative_mode) { *X = 0.; }
4164
4165 // Solve the system.
4166 ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4167 X->ResetMemory();
4168 if (b_nonempty) { B->ResetMemory(); }
4169}
4170
4171// PetscODESolver methods
4172
4173PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
4174 : PetscSolver(), ODESolver()
4175{
4176 // Create the actual solver object
4177 TS ts;
4178 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4179 obj = (PetscObject)ts;
4180 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
4181 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4182
4183 // Allocate private solver context
4185
4186 // Default options, to comply with the current interface to ODESolver.
4187 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4188 PCHKERRQ(ts,ierr);
4189 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4190 PCHKERRQ(ts,ierr);
4191 TSAdapt tsad;
4192 ierr = TSGetAdapt(ts,&tsad);
4193 PCHKERRQ(ts,ierr);
4194 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4195 PCHKERRQ(ts,ierr);
4196}
4197
4199{
4200 MPI_Comm comm;
4201 TS ts = (TS)obj;
4202 ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
4203 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4204}
4205
4207 enum PetscODESolver::Type type)
4208{
4209 TS ts = (TS)obj;
4210
4211 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4212 if (operatorset)
4213 {
4214 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4215 delete X;
4216 X = NULL;
4217 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4218 ts_ctx->cached_ijacstate = -1;
4219 ts_ctx->cached_rhsjacstate = -1;
4220 ts_ctx->cached_splits_xstate = -1;
4221 ts_ctx->cached_splits_xdotstate = -1;
4222 }
4223 f = &f_;
4224
4225 // Set functions in TS
4226 ts_ctx->op = &f_;
4227 if (f_.isImplicit())
4228 {
4229 Mat dummy;
4230 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4231 f_.Height(),&dummy);
4232 PCHKERRQ(ts, ierr);
4233 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
4234 PCHKERRQ(ts, ierr);
4235 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (void *)ts_ctx);
4236 PCHKERRQ(ts, ierr);
4237 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4238 PCHKERRQ(ts, ierr);
4239 ierr = MatDestroy(&dummy);
4240 PCHKERRQ(ts, ierr);
4241 }
4242 if (!f_.isHomogeneous())
4243 {
4244 Mat dummy = NULL;
4245 if (!f_.isImplicit())
4246 {
4247 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4248 PCHKERRQ(ts, ierr);
4249 }
4250 else
4251 {
4252 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4253 f_.Height(),&dummy);
4254 PCHKERRQ(ts, ierr);
4255 }
4256 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
4257 PCHKERRQ(ts, ierr);
4258 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4259 (void *)ts_ctx);
4260 PCHKERRQ(ts, ierr);
4261 ierr = MatDestroy(&dummy);
4262 PCHKERRQ(ts, ierr);
4263 }
4264 operatorset = true;
4265
4266 SetType(type);
4267
4268 // Set solution vector
4269 PetscParVector X(PetscObjectComm(obj),*f,false,true);
4270 ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4271
4272 // Compose special purpose function for PDE-constrained optimization
4273 PetscBool use = PETSC_TRUE;
4274 ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
4275 if (use && f_.isImplicit())
4276 {
4277 ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4278 __mfem_ts_computesplits);
4279 PCHKERRQ(ts,ierr);
4280 }
4281 else
4282 {
4283 ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4284 NULL);
4285 PCHKERRQ(ts,ierr);
4286 }
4287}
4288
4290{
4291 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4292 ts_ctx->jacType = jacType;
4293}
4294
4296{
4297 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4298 return ts_ctx->type;
4299}
4300
4302{
4303 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4304
4305 TS ts = (TS)obj;
4306 ts_ctx->type = type;
4307 if (type == ODE_SOLVER_LINEAR)
4308 {
4309 ierr = TSSetProblemType(ts, TS_LINEAR);
4310 PCHKERRQ(ts, ierr);
4311 }
4312 else
4313 {
4314 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4315 PCHKERRQ(ts, ierr);
4316 }
4317}
4318
4320{
4321 // Pass the parameters to PETSc.
4322 TS ts = (TS)obj;
4323 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4324 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4325
4326 PetscInt i;
4327 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4328
4329 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4330 X->PlaceMemory(x.GetMemory(),true);
4331
4332 Customize();
4333
4334 // Monitor initial step
4335 if (!i)
4336 {
4337 ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4338 }
4339
4340 // Take the step.
4341 ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4342 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4343
4344 // Get back current time and the time step used to caller.
4345 // We cannot use TSGetTimeStep() as it returns the next candidate step
4346 PetscReal pt;
4347 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4348 dt = pt - (PetscReal)t;
4349 t = pt;
4350
4351 // Monitor current step
4352 ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4353
4354 X->ResetMemory();
4355}
4356
4358 mfem::real_t t_final)
4359{
4360 // Give the parameters to PETSc.
4361 TS ts = (TS)obj;
4362 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4363 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4364 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4365 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4366 PCHKERRQ(ts, ierr);
4367
4368 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4369 X->PlaceMemory(x.GetMemory(),true);
4370
4371 Customize();
4372
4373 // Reset Jacobian caching since the user may have changed
4374 // the parameters of the solver
4375 // We don't do this in the Step method because two consecutive
4376 // Step() calls are done with the same operator
4377 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4378 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4379 ts_ctx->cached_ijacstate = -1;
4380 ts_ctx->cached_rhsjacstate = -1;
4381 ts_ctx->cached_splits_xstate = -1;
4382 ts_ctx->cached_splits_xdotstate = -1;
4383
4384 // Take the steps.
4385 ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4386 X->ResetMemory();
4387
4388 // Get back final time and time step to caller.
4389 PetscReal pt;
4390 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4391 t = pt;
4392 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4393 dt = pt;
4394}
4395
4396} // namespace mfem
4397
4398#include "petsc/private/petscimpl.h"
4399#include "petsc/private/matimpl.h"
4400
4401// auxiliary functions
4402static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
4403 void* ctx)
4404{
4405 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4406
4407 PetscFunctionBeginUser;
4408 if (!monctx)
4409 {
4410 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4411 }
4412 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4414 monctx->monitor);
4415
4416 if (user_monitor->mon_sol)
4417 {
4418 mfem::PetscParVector V(x,true);
4419 user_monitor->MonitorSolution(it,t,V);
4420 }
4421 user_monitor->MonitorSolver(solver);
4422 PetscFunctionReturn(PETSC_SUCCESS);
4423}
4424
4425static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
4426 Vec f,void *ctx)
4427{
4428 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4429
4430 PetscFunctionBeginUser;
4431 mfem::PetscParVector xx(x,true);
4432 mfem::PetscParVector yy(xp,true);
4433 mfem::PetscParVector ff(f,true);
4434
4435 mfem::TimeDependentOperator *op = ts_ctx->op;
4436 op->SetTime(t);
4437
4438 if (ts_ctx->bchandler)
4439 {
4440 // we evaluate the ImplicitMult method with the correct bc
4441 // this means the correct time derivative for essential boundary
4442 // dofs is zero
4443 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
4444 if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
4445 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4446 mfem::Vector* txx = ts_ctx->work;
4447 mfem::Vector* txp = ts_ctx->work2;
4448 bchandler->SetTime(t);
4449 bchandler->ApplyBC(xx,*txx);
4450 bchandler->ZeroBC(yy,*txp);
4451 op->ImplicitMult(*txx,*txp,ff);
4452 // and fix the residual (i.e. f_\partial\Omega = u - g(t))
4453 bchandler->FixResidualBC(xx,ff);
4454 }
4455 else
4456 {
4457 // use the ImplicitMult method of the class
4458 op->ImplicitMult(xx,yy,ff);
4459 }
4460 ff.UpdateVecFromFlags();
4461 PetscFunctionReturn(PETSC_SUCCESS);
4462}
4463
4464static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
4465 void *ctx)
4466{
4467 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4468
4469 PetscFunctionBeginUser;
4470 if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
4471 mfem::PetscParVector xx(x,true);
4472 mfem::PetscParVector ff(f,true);
4473 mfem::TimeDependentOperator *top = ts_ctx->op;
4474 top->SetTime(t);
4475
4476 // use the ExplicitMult method - compute the RHS function
4477 top->ExplicitMult(xx,ff);
4478
4479 ff.UpdateVecFromFlags();
4480 PetscFunctionReturn(PETSC_SUCCESS);
4481}
4482
4483static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
4484 Vec xp, PetscReal shift, Mat A, Mat P,
4485 void *ctx)
4486{
4487 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4488 mfem::Vector *xx;
4489 PetscScalar *array;
4490 PetscReal eps = 0.001; /* 0.1% difference */
4491 PetscInt n;
4492 PetscObjectState state;
4493 PetscErrorCode ierr;
4494
4495 PetscFunctionBeginUser;
4496 // Matrix-free case
4497 if (A && A != P)
4498 {
4499 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4500 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4501 }
4502
4503 // prevent to recompute a Jacobian if we already did so
4504 // the relative tolerance comparison should be fine given the fact
4505 // that two consecutive shifts should have similar magnitude
4506 ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4507 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4508 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4509 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4510
4511 // update time
4512 mfem::TimeDependentOperator *op = ts_ctx->op;
4513 op->SetTime(t);
4514
4515 // wrap Vecs with Vectors
4516 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4517 ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4518 mfem::Vector yy(array,n);
4519 ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4520 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4521 if (!ts_ctx->bchandler)
4522 {
4523 xx = new mfem::Vector(array,n);
4524 }
4525 else
4526 {
4527 // make sure we compute a Jacobian with the correct boundary values
4528 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4529 mfem::Vector txx(array,n);
4530 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4531 xx = ts_ctx->work;
4532 bchandler->SetTime(t);
4533 bchandler->ApplyBC(txx,*xx);
4534 }
4535 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4536
4537 // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
4538 mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
4539 if (!ts_ctx->bchandler) { delete xx; }
4540 ts_ctx->cached_shift = shift;
4541
4542 // Convert to the operator type requested if needed
4543 bool delete_pA = false;
4544 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4545 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4546 if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4547 pA->GetType() != ts_ctx->jacType))
4548 {
4549 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4550 ts_ctx->jacType);
4551 delete_pA = true;
4552 }
4553
4554 // Eliminate essential dofs
4555 if (ts_ctx->bchandler)
4556 {
4557 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4558 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4559 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4560 }
4561
4562 // Get nonzerostate
4563 PetscObjectState nonzerostate;
4564 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4565
4566 // Avoid unneeded copy of the matrix by hacking
4567 Mat B;
4568 B = pA->ReleaseMat(false);
4569 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4570 if (delete_pA) { delete pA; }
4571
4572 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4573 // preconditioner fails because MatCreateSubMatrix_Nest does not
4574 // actually return a matrix. Instead, for efficiency reasons,
4575 // it returns a reference to the submatrix. The second time it
4576 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4577 // aborts since the two submatrices are actually different.
4578 // We circumvent this issue by incrementing the nonzero state
4579 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4580 // This does not impact performances in the case of MATNEST
4581 PetscBool isnest;
4582 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4583 CHKERRQ(ierr);
4584 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4585
4586 // Jacobian reusage
4587 ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
4588 CHKERRQ(ierr);
4589
4590 // Fool DM
4591 DM dm;
4592 MatType mtype;
4593 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4594 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4595 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4596 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4597 PetscFunctionReturn(PETSC_SUCCESS);
4598}
4599
4600static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
4601 Mat Ax,Mat Jx,
4602 Mat Axp,Mat Jxp)
4603{
4604 __mfem_ts_ctx* ts_ctx;
4605 mfem::Vector *xx;
4606 PetscScalar *array;
4607 PetscInt n;
4608 PetscObjectState state;
4609 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4610 PetscBool assembled;
4611 PetscErrorCode ierr;
4612
4613 PetscFunctionBeginUser;
4614 // Matrix-free cases
4615 if (Ax && Ax != Jx)
4616 {
4617 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4618 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4619 }
4620 if (Axp && Axp != Jxp)
4621 {
4622 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4623 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4624 }
4625
4626 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
4627
4628 // prevent to recompute the Jacobians if we already did so
4629 ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
4630 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4631 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4632 ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
4633 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4634 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4635 if (!rx && !rxp) { PetscFunctionReturn(PETSC_SUCCESS); }
4636
4637 // update time
4638 mfem::TimeDependentOperator *op = ts_ctx->op;
4639 op->SetTime(t);
4640
4641 // wrap Vecs with Vectors
4642 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4643 ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4644 mfem::Vector yy(array,n);
4645 ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4646 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4647 if (!ts_ctx->bchandler)
4648 {
4649 xx = new mfem::Vector(array,n);
4650 }
4651 else
4652 {
4653 // make sure we compute a Jacobian with the correct boundary values
4654 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4655 mfem::Vector txx(array,n);
4656 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4657 xx = ts_ctx->work;
4658 bchandler->SetTime(t);
4659 bchandler->ApplyBC(txx,*xx);
4660 }
4661 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4662
4663 // We don't have a specialized interface, so we just compute the split jacobians
4664 // evaluating twice the implicit gradient method with the correct shifts
4665
4666 // first we do the state jacobian
4667 mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
4668
4669 // Convert to the operator type requested if needed
4670 bool delete_mat = false;
4671 mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
4672 (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
4673 if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4674 pJx->GetType() != ts_ctx->jacType))
4675 {
4676 if (pJx)
4677 {
4678 Mat B = *pJx;
4679 ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4680 }
4681 pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
4682 ts_ctx->jacType);
4683 delete_mat = true;
4684 }
4685 if (rx)
4686 {
4687 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4688 if (assembled)
4689 {
4690 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4691 }
4692 else
4693 {
4694 Mat B;
4695 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4696 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4697 }
4698 }
4699 if (delete_mat) { delete pJx; }
4700 pJx = new mfem::PetscParMatrix(Jx,true);
4701
4702 // Eliminate essential dofs
4703 if (ts_ctx->bchandler)
4704 {
4705 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4706 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4707 pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4708 }
4709
4710 // Then we do the jacobian wrt the time derivative of the state
4711 // Note that this is usually the mass matrix
4712 mfem::PetscParMatrix *pJxp = NULL;
4713 if (rxp)
4714 {
4715 delete_mat = false;
4716 mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
4717 pJxp = const_cast<mfem::PetscParMatrix *>
4718 (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
4719 if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4720 pJxp->GetType() != ts_ctx->jacType))
4721 {
4722 if (pJxp)
4723 {
4724 Mat B = *pJxp;
4725 ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4726 }
4727 pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
4728 &oJxp,ts_ctx->jacType);
4729 delete_mat = true;
4730 }
4731
4732 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4733 if (assembled)
4734 {
4735 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4736 }
4737 else
4738 {
4739 Mat B;
4740 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4741 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4742 }
4743 if (delete_mat) { delete pJxp; }
4744 pJxp = new mfem::PetscParMatrix(Jxp,true);
4745
4746 // Eliminate essential dofs
4747 if (ts_ctx->bchandler)
4748 {
4749 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4750 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4751 pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
4752 }
4753
4754 // Obtain the time dependent part of the jacobian by subtracting
4755 // the state jacobian
4756 // We don't do it with the class operator "-=" since we know that
4757 // the sparsity pattern of the two matrices is the same
4758 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4759 }
4760
4761 // Jacobian reusage
4762 ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4763 CHKERRQ(ierr);
4764 ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4765 CHKERRQ(ierr);
4766
4767 delete pJx;
4768 delete pJxp;
4769 if (!ts_ctx->bchandler) { delete xx; }
4770 PetscFunctionReturn(PETSC_SUCCESS);
4771}
4772
4773static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4774 Mat A, Mat P, void *ctx)
4775{
4776 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4777 mfem::Vector *xx;
4778 PetscScalar *array;
4779 PetscInt n;
4780 PetscObjectState state;
4781 PetscErrorCode ierr;
4782
4783 PetscFunctionBeginUser;
4784 // Matrix-free case
4785 if (A && A != P)
4786 {
4787 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4788 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4789 }
4790
4791 // prevent to recompute a Jacobian if we already did so
4792 ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4793 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4794 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4795
4796 // update time
4797 mfem::TimeDependentOperator *op = ts_ctx->op;
4798 op->SetTime(t);
4799
4800 // wrap Vec with Vector
4801 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4802 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4803 if (!ts_ctx->bchandler)
4804 {
4805 xx = new mfem::Vector(array,n);
4806 }
4807 else
4808 {
4809 // make sure we compute a Jacobian with the correct boundary values
4810 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4811 mfem::Vector txx(array,n);
4812 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4813 xx = ts_ctx->work;
4814 bchandler->SetTime(t);
4815 bchandler->ApplyBC(txx,*xx);
4816 }
4817 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4818
4819 // Use TimeDependentOperator::GetExplicitGradient(x)
4820 mfem::Operator& J = op->GetExplicitGradient(*xx);
4821 if (!ts_ctx->bchandler) { delete xx; }
4822
4823 // Convert to the operator type requested if needed
4824 bool delete_pA = false;
4825 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4826 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4827 if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4828 pA->GetType() != ts_ctx->jacType))
4829 {
4830 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4831 ts_ctx->jacType);
4832 delete_pA = true;
4833 }
4834
4835 // Eliminate essential dofs
4836 if (ts_ctx->bchandler)
4837 {
4838 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4839 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4840 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4841 }
4842
4843 // Get nonzerostate
4844 PetscObjectState nonzerostate;
4845 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4846
4847 // Avoid unneeded copy of the matrix by hacking
4848 Mat B;
4849 B = pA->ReleaseMat(false);
4850 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4851 if (delete_pA) { delete pA; }
4852
4853 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4854 // preconditioner fails because MatCreateSubMatrix_Nest does not
4855 // actually return a matrix. Instead, for efficiency reasons,
4856 // it returns a reference to the submatrix. The second time it
4857 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4858 // aborts since the two submatrices are actually different.
4859 // We circumvent this issue by incrementing the nonzero state
4860 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4861 // This does not impact performances in the case of MATNEST
4862 PetscBool isnest;
4863 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4864 CHKERRQ(ierr);
4865 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4866
4867 // Jacobian reusage
4868 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4869 {
4870 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4871 }
4872 ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_rhsjacstate);
4873 CHKERRQ(ierr);
4874
4875 // Fool DM
4876 DM dm;
4877 MatType mtype;
4878 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4879 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4880 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4881 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4882 PetscFunctionReturn(PETSC_SUCCESS);
4883}
4884
4885static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4886 void* ctx)
4887{
4888 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4889
4890 PetscFunctionBeginUser;
4891 if (!monctx)
4892 {
4893 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4894 }
4895
4896 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4898 monctx->monitor);
4899 if (user_monitor->mon_sol)
4900 {
4901 Vec x;
4902 PetscErrorCode ierr;
4903
4904 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4905 mfem::PetscParVector V(x,true);
4906 user_monitor->MonitorSolution(it,res,V);
4907 }
4908 if (user_monitor->mon_res)
4909 {
4910 Vec x;
4911 PetscErrorCode ierr;
4912
4913 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4914 mfem::PetscParVector V(x,true);
4915 user_monitor->MonitorResidual(it,res,V);
4916 }
4917 user_monitor->MonitorSolver(solver);
4918 PetscFunctionReturn(PETSC_SUCCESS);
4919}
4920
4921static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4922 void *ctx)
4923{
4924 PetscScalar *array;
4925 PetscInt n;
4926 PetscErrorCode ierr;
4927 mfem::Vector *xx;
4928 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4929
4930 PetscFunctionBeginUser;
4931 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4932 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4933 if (!snes_ctx->bchandler)
4934 {
4935 xx = new mfem::Vector(array,n);
4936 }
4937 else
4938 {
4939 // make sure we compute a Jacobian with the correct boundary values
4940 if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4941 mfem::Vector txx(array,n);
4942 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4943 xx = snes_ctx->work;
4944 bchandler->ApplyBC(txx,*xx);
4945 }
4946
4947 // Use Operator::GetGradient(x)
4948 mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4949 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4950 if (!snes_ctx->bchandler) { delete xx; }
4951
4952 // Convert to the operator type requested if needed
4953 bool delete_pA = false;
4954 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4955 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4956 if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4957 pA->GetType() != snes_ctx->jacType))
4958 {
4959 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4960 snes_ctx->jacType);
4961 delete_pA = true;
4962 }
4963
4964 // Eliminate essential dofs
4965 if (snes_ctx->bchandler)
4966 {
4967 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4968 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4969 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4970 }
4971
4972 // Get nonzerostate
4973 PetscObjectState nonzerostate;
4974 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4975
4976 // Avoid unneeded copy of the matrix by hacking
4977 Mat B = pA->ReleaseMat(false);
4978 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4979 if (delete_pA) { delete pA; }
4980
4981 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4982 // preconditioner fails because MatCreateSubMatrix_Nest does not
4983 // actually return a matrix. Instead, for efficiency reasons,
4984 // it returns a reference to the submatrix. The second time it
4985 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4986 // aborts since the two submatrices are actually different.
4987 // We circumvent this issue by incrementing the nonzero state
4988 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4989 // This does not impact performances in the case of MATNEST
4990 PetscBool isnest;
4991 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4992 CHKERRQ(ierr);
4993 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4994
4995 // Matrix-free case
4996 if (A && A != P)
4997 {
4998 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4999 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5000 }
5001
5002 // Fool DM
5003 DM dm;
5004 MatType mtype;
5005 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
5006 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
5007 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
5008 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
5009 PetscFunctionReturn(PETSC_SUCCESS);
5010}
5011
5012static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
5013{
5014 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5015
5016 PetscFunctionBeginUser;
5017 mfem::PetscParVector xx(x,true);
5018 mfem::PetscParVector ff(f,true);
5019 if (snes_ctx->bchandler)
5020 {
5021 // we evaluate the Mult method with the correct bc
5022 if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
5023 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
5024 mfem::Vector* txx = snes_ctx->work;
5025 bchandler->ApplyBC(xx,*txx);
5026 snes_ctx->op->Mult(*txx,ff);
5027 // and fix the residual (i.e. f_\partial\Omega = u - g)
5028 bchandler->FixResidualBC(xx,ff);
5029 }
5030 else
5031 {
5032 // use the Mult method of the class
5033 snes_ctx->op->Mult(xx,ff);
5034 }
5035 ff.UpdateVecFromFlags();
5036 PetscFunctionReturn(PETSC_SUCCESS);
5037}
5038
5039static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
5040 void *ctx)
5041{
5042 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5043
5044 PetscFunctionBeginUser;
5045 if (!snes_ctx->objective)
5046 {
5047 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
5048 }
5049 mfem::PetscParVector xx(x,true);
5050 mfem::real_t lf;
5051 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
5052 *f = (PetscReal)lf;
5053 PetscFunctionReturn(PETSC_SUCCESS);
5054}
5055
5056static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
5057 PetscBool *cy,PetscBool *cw, void* ctx)
5058{
5059 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5060 bool lcy = false,lcw = false;
5061
5062 PetscFunctionBeginUser;
5063 mfem::PetscParVector x(X,true);
5064 mfem::PetscParVector y(Y,true);
5065 mfem::PetscParVector w(W,true);
5066 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
5067 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
5068 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
5069 PetscFunctionReturn(PETSC_SUCCESS);
5070}
5071
5072static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
5073{
5074 Vec F,X,dX,pX;
5075 __mfem_snes_ctx* snes_ctx;
5076
5077 PetscFunctionBeginUser;
5078 /* Update callback does not use the context */
5079 ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
5080 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
5081 if (!it)
5082 {
5083 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
5084 ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
5085 CHKERRQ(ierr);
5086 ierr = VecDestroy(&pX); CHKERRQ(ierr);
5087 }
5088 ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
5089 CHKERRQ(ierr);
5090 if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
5091 "Missing previous solution");
5092 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
5093 mfem::PetscParVector f(F,true);
5094 mfem::PetscParVector x(X,true);
5095 mfem::PetscParVector dx(dX,true);
5096 mfem::PetscParVector px(pX,true);
5097 (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
5098 /* Store previous solution */
5099 ierr = VecCopy(X,pX); CHKERRQ(ierr);
5100 PetscFunctionReturn(PETSC_SUCCESS);
5101}
5102
5103static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
5104 void* ctx)
5105{
5106 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
5107
5108 PetscFunctionBeginUser;
5109 if (!monctx)
5110 {
5111 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
5112 }
5113
5114 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
5116 monctx->monitor);
5117 if (user_monitor->mon_sol)
5118 {
5119 Vec x;
5120 PetscErrorCode ierr;
5121
5122 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5123 mfem::PetscParVector V(x,true);
5124 user_monitor->MonitorSolution(it,res,V);
5125 }
5126 if (user_monitor->mon_res)
5127 {
5128 Vec x;
5129 PetscErrorCode ierr;
5130
5131 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5132 mfem::PetscParVector V(x,true);
5133 user_monitor->MonitorResidual(it,res,V);
5134 }
5135 user_monitor->MonitorSolver(solver);
5136 PetscFunctionReturn(PETSC_SUCCESS);
5137}
5138
5139static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5140{
5141 mfem::Operator *op;
5142 PetscErrorCode ierr;
5143
5144 PetscFunctionBeginUser;
5145 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5146 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5147 mfem::PetscParVector xx(x,true);
5148 mfem::PetscParVector yy(y,true);
5149 op->Mult(xx,yy);
5150 yy.UpdateVecFromFlags();
5151 PetscFunctionReturn(PETSC_SUCCESS);
5152}
5153
5154static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5155{
5156 mfem::Operator *op;
5157 PetscErrorCode ierr;
5158 PetscBool flg,symm;
5159
5160 PetscFunctionBeginUser;
5161 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5162 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5163 mfem::PetscParVector xx(x,true);
5164 mfem::PetscParVector yy(y,true);
5165 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5166 if (flg && symm)
5167 {
5168 op->Mult(xx,yy);
5169 }
5170 else
5171 {
5172 op->MultTranspose(xx,yy);
5173 }
5174 yy.UpdateVecFromFlags();
5175 PetscFunctionReturn(PETSC_SUCCESS);
5176}
5177
5178static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5179{
5180 mfem::Operator *op;
5181 PetscErrorCode ierr;
5182
5183 PetscFunctionBeginUser;
5184 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5185 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5186 ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
5187 PetscFunctionReturn(PETSC_SUCCESS);
5188}
5189
5190static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5191{
5192 PetscFunctionBeginUser;
5193 PetscFunctionReturn(PETSC_SUCCESS);
5194}
5195
5196static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5197{
5198 __mfem_pc_shell_ctx *ctx;
5199 PetscErrorCode ierr;
5200
5201 PetscFunctionBeginUser;
5202 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5203 if (ctx->op)
5204 {
5205 PetscBool isascii;
5206 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5207 CHKERRQ(ierr);
5208
5210 (ctx->op);
5211 if (ppc)
5212 {
5213 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5214 }
5215 else
5216 {
5217 if (isascii)
5218 {
5219 ierr = PetscViewerASCIIPrintf(viewer,
5220 "No information available on the mfem::Solver\n");
5221 CHKERRQ(ierr);
5222 }
5223 }
5224 if (isascii && ctx->factory)
5225 {
5226 ierr = PetscViewerASCIIPrintf(viewer,
5227 "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5228 CHKERRQ(ierr);
5229 }
5230 }
5231 PetscFunctionReturn(PETSC_SUCCESS);
5232}
5233
5234static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5235{
5236 __mfem_pc_shell_ctx *ctx;
5237 PetscErrorCode ierr;
5238
5239 PetscFunctionBeginUser;
5240 mfem::PetscParVector xx(x,true);
5241 mfem::PetscParVector yy(y,true);
5242 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5243 if (ctx->op)
5244 {
5245 ctx->op->Mult(xx,yy);
5246 yy.UpdateVecFromFlags();
5247 }
5248 else // operator is not present, copy x
5249 {
5250 yy = xx;
5251 }
5252 PetscFunctionReturn(PETSC_SUCCESS);
5253}
5254
5255static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5256{
5257 __mfem_pc_shell_ctx *ctx;
5258 PetscErrorCode ierr;
5259
5260 PetscFunctionBeginUser;
5261 mfem::PetscParVector xx(x,true);
5262 mfem::PetscParVector yy(y,true);
5263 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5264 if (ctx->op)
5265 {
5266 ctx->op->MultTranspose(xx,yy);
5267 yy.UpdateVecFromFlags();
5268 }
5269 else // operator is not present, copy x
5270 {
5271 yy = xx;
5272 }
5273 PetscFunctionReturn(PETSC_SUCCESS);
5274}
5275
5276static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5277{
5278 __mfem_pc_shell_ctx *ctx;
5279
5280 PetscFunctionBeginUser;
5281 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5282 if (ctx->factory)
5283 {
5284 // Delete any owned operator
5285 if (ctx->ownsop)
5286 {
5287 delete ctx->op;
5288 }
5289
5290 // Get current preconditioning Mat
5291 Mat B;
5292 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5293
5294 // Call user-defined setup
5295 mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
5296 mfem::PetscPreconditionerFactory *factory = ctx->factory;
5297 ctx->op = factory->NewPreconditioner(hB);
5298 ctx->ownsop = true;
5299 ctx->numprec++;
5300 }
5301 PetscFunctionReturn(PETSC_SUCCESS);
5302}
5303
5304static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5305{
5306 __mfem_pc_shell_ctx *ctx;
5307 PetscErrorCode ierr;
5308
5309 PetscFunctionBeginUser;
5310 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5311 if (ctx->ownsop)
5312 {
5313 delete ctx->op;
5314 }
5315 delete ctx;
5316 PetscFunctionReturn(PETSC_SUCCESS);
5317}
5318
5319#if PETSC_VERSION_LT(3,23,0)
5320
5321static PetscErrorCode __mfem_array_container_destroy(void *ptr)
5322{
5323 PetscErrorCode ierr;
5324
5325 PetscFunctionBeginUser;
5326 ierr = PetscFree(ptr); CHKERRQ(ierr);
5327 PetscFunctionReturn(PETSC_SUCCESS);
5328}
5329
5330static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
5331{
5333 PetscErrorCode ierr;
5334
5335 PetscFunctionBeginUser;
5336 for (int i=0; i<a->Size(); i++)
5337 {
5338 Mat M = (*a)[i];
5339 MPI_Comm comm = PetscObjectComm((PetscObject)M);
5340 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5341 }
5342 delete a;
5343 PetscFunctionReturn(PETSC_SUCCESS);
5344}
5345
5346#else
5347
5348static PetscErrorCode __mfem_array_container_destroy(void **ptr)
5349{
5350 PetscErrorCode ierr;
5351
5352 PetscFunctionBeginUser;
5353 ierr = PetscFree(*ptr); CHKERRQ(ierr);
5354 PetscFunctionReturn(PETSC_SUCCESS);
5355}
5356
5357static PetscErrorCode __mfem_matarray_container_destroy(void **ptr)
5358{
5360 PetscErrorCode ierr;
5361
5362 PetscFunctionBeginUser;
5363 for (int i=0; i<a->Size(); i++)
5364 {
5365 Mat M = (*a)[i];
5366 MPI_Comm comm = PetscObjectComm((PetscObject)M);
5367 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5368 }
5369 delete a;
5370 PetscFunctionReturn(PETSC_SUCCESS);
5371}
5372
5373#endif
5374
5375static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
5376{
5377 PetscErrorCode ierr;
5378
5379 PetscFunctionBeginUser;
5380 ierr = PetscFree(*ctx); CHKERRQ(ierr);
5381 PetscFunctionReturn(PETSC_SUCCESS);
5382}
5383
5384// Sets the type of PC to PCSHELL and wraps the solver action
5385// if ownsop is true, ownership of precond is transferred to the PETSc object
5386PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
5387{
5388 PetscFunctionBeginUser;
5389 __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5390 ctx->op = &precond;
5391 ctx->ownsop = ownsop;
5392 ctx->factory = NULL;
5393 ctx->numprec = 0;
5394
5395 // In case the PC was already of type SHELL, this will destroy any
5396 // previous user-defined data structure
5397 // We cannot call PCReset as it will wipe out any operator already set
5398 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5399
5400 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5401 ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5402 ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5403 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5404 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5405 CHKERRQ(ierr);
5406 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5407 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5408 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5409 PetscFunctionReturn(PETSC_SUCCESS);
5410}
5411
5412// Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
5413// Takes ownership of the solver created by the factory
5414PetscErrorCode MakeShellPCWithFactory(PC pc,
5416{
5417 PetscFunctionBeginUser;
5418 __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5419 ctx->op = NULL;
5420 ctx->ownsop = true;
5421 ctx->factory = factory;
5422 ctx->numprec = 0;
5423
5424 // In case the PC was already of type SHELL, this will destroy any
5425 // previous user-defined data structure
5426 // We cannot call PCReset as it will wipe out any operator already set
5427 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5428
5429 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5430 ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
5431 ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5432 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5433 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5434 CHKERRQ(ierr);
5435 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5436 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5437 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5438 PetscFunctionReturn(PETSC_SUCCESS);
5439}
5440
5441// Converts from a list (or a marked Array if islist is false) to an IS
5442// st indicates the offset where to start numbering
5443static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
5444 const mfem::Array<int> *list,
5445 PetscInt st, IS* is)
5446{
5447 PetscInt n = list ? list->Size() : 0,*idxs;
5448 const int *data = list ? list->GetData() : NULL;
5449 PetscErrorCode ierr;
5450
5451 PetscFunctionBeginUser;
5452 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5453 if (islist)
5454 {
5455 for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5456 }
5457 else
5458 {
5459 PetscInt cum = 0;
5460 for (PetscInt i=0; i<n; i++)
5461 {
5462 if (data[i]) { idxs[cum++] = i+st; }
5463 }
5464 n = cum;
5465 }
5466 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5467 CHKERRQ(ierr);
5468 PetscFunctionReturn(PETSC_SUCCESS);
5469}
5470
5471// Converts from a marked Array of Vdofs to an IS
5472// st indicates the offset where to start numbering
5473// l2l is a vector of matrices generated during RAP
5474static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5475 mfem::Array<Mat> &pl2l,
5476 const mfem::Array<int> *mark,
5477 PetscInt st, IS* is)
5478{
5479 mfem::Array<int> sub_dof_marker;
5481 PetscInt nl;
5482 PetscErrorCode ierr;
5483
5484 PetscFunctionBeginUser;
5485 for (int i = 0; i < pl2l.Size(); i++)
5486 {
5487 PetscInt m,n,*ii,*jj;
5488 PetscBool done;
5489 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
5490 (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5491 MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
5492 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5493#if defined(PETSC_USE_64BIT_INDICES)
5494 int nnz = (int)ii[m];
5495 int *mii = new int[m+1];
5496 int *mjj = new int[nnz];
5497 for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5498 for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5499 l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
5500#else
5501 l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
5502#endif
5503 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5504 (const PetscInt**)&ii,
5505 (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5506 MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
5507 << i << " l2l matrix");
5508 }
5509 nl = 0;
5510 for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5511 sub_dof_marker.SetSize(nl);
5512 const int* vdata = mark->GetData();
5513 int* sdata = sub_dof_marker.GetData();
5514 int cumh = 0, cumw = 0;
5515 for (int i = 0; i < l2l.Size(); i++)
5516 {
5517 const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
5518 l2l[i]->Height());
5519 mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
5520 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5521 cumh += l2l[i]->Height();
5522 cumw += l2l[i]->Width();
5523 }
5524 ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5525 for (int i = 0; i < pl2l.Size(); i++)
5526 {
5527 delete l2l[i];
5528 }
5529 PetscFunctionReturn(PETSC_SUCCESS);
5530}
5531
5532#if !defined(PETSC_HAVE_HYPRE)
5533
5534#if defined(HYPRE_MIXEDINT)
5535#error "HYPRE_MIXEDINT not supported"
5536#endif
5537
5538#include "_hypre_parcsr_mv.h"
5539static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5540{
5541 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5542 hypre_CSRMatrix *hdiag,*hoffd;
5543 PetscScalar *da,*oa,*aptr;
5544 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5545 PetscInt i,dnnz,onnz,m,n;
5546 PetscMPIInt size;
5547 PetscErrorCode ierr;
5548
5549 PetscFunctionBeginUser;
5550 hdiag = hypre_ParCSRMatrixDiag(hA);
5551 hoffd = hypre_ParCSRMatrixOffd(hA);
5552 m = hypre_CSRMatrixNumRows(hdiag);
5553 n = hypre_CSRMatrixNumCols(hdiag);
5554 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5555 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5556 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5557 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5558 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5559 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
5560 CHKERRQ(ierr);
5561 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
5562 CHKERRQ(ierr);
5563 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
5564 CHKERRQ(ierr);
5565 iptr = djj;
5566 aptr = da;
5567 for (i=0; i<m; i++)
5568 {
5569 PetscInt nc = dii[i+1]-dii[i];
5570 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5571 iptr += nc;
5572 aptr += nc;
5573 }
5574 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5575 if (size > 1)
5576 {
5577 PetscInt *offdj,*coffd;
5578
5579 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5580 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5581 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5582 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
5583 CHKERRQ(ierr);
5584 offdj = hypre_CSRMatrixJ(hoffd);
5585 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5586 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5587 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
5588 CHKERRQ(ierr);
5589 iptr = ojj;
5590 aptr = oa;
5591 for (i=0; i<m; i++)
5592 {
5593 PetscInt nc = oii[i+1]-oii[i];
5594 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5595 iptr += nc;
5596 aptr += nc;
5597 }
5598 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5599 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5600 }
5601 else
5602 {
5603 oii = ojj = NULL;
5604 oa = NULL;
5605 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5606 }
5607 /* We are responsible to free the CSR arrays. However, since we can take
5608 references of a PetscParMatrix but we cannot take reference of PETSc
5609 arrays, we need to create a PetscContainer object to take reference of
5610 these arrays in reference objects */
5611 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5612 const char *names[6] = {"_mfem_csr_dii",
5613 "_mfem_csr_djj",
5614 "_mfem_csr_da",
5615 "_mfem_csr_oii",
5616 "_mfem_csr_ojj",
5617 "_mfem_csr_oa"
5618 };
5619 for (i=0; i<6; i++)
5620 {
5621 PetscContainer c;
5622
5623 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5624 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5625#if PETSC_VERSION_LT(3,23,0)
5626 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5627#else
5628 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
5629#endif
5630 CHKERRQ(ierr);
5631 ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
5632 CHKERRQ(ierr);
5633 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5634 }
5635 PetscFunctionReturn(PETSC_SUCCESS);
5636}
5637
5638static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5639{
5640 Mat lA;
5641 ISLocalToGlobalMapping rl2g,cl2g;
5642 IS is;
5643 hypre_CSRMatrix *hdiag,*hoffd;
5644 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5645 void *ptrs[2];
5646 const char *names[2] = {"_mfem_csr_aux",
5647 "_mfem_csr_data"
5648 };
5649 PetscScalar *hdd,*hod,*aa,*data;
5650 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5651 PetscInt *aux,*ii,*jj;
5652 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5653 PetscErrorCode ierr;
5654
5655 PetscFunctionBeginUser;
5656 /* access relevant information in ParCSR */
5657 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5658 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5659 hdiag = hypre_ParCSRMatrixDiag(hA);
5660 hoffd = hypre_ParCSRMatrixOffd(hA);
5661 dr = hypre_CSRMatrixNumRows(hdiag);
5662 dc = hypre_CSRMatrixNumCols(hdiag);
5663 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5664 hdi = hypre_CSRMatrixI(hdiag);
5665 hdj = hypre_CSRMatrixJ(hdiag);
5666 hdd = hypre_CSRMatrixData(hdiag);
5667 oc = hypre_CSRMatrixNumCols(hoffd);
5668 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5669 hoi = hypre_CSRMatrixI(hoffd);
5670 hoj = hypre_CSRMatrixJ(hoffd);
5671 hod = hypre_CSRMatrixData(hoffd);
5672
5673 /* generate l2g maps for rows and cols */
5674 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5675 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5676 ierr = ISDestroy(&is); CHKERRQ(ierr);
5677 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5678 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5679 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5680 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5681 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5682 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5683 ierr = ISDestroy(&is); CHKERRQ(ierr);
5684
5685 /* create MATIS object */
5686 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5687 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5688 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5689 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5690 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5691 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5692
5693 /* merge local matrices */
5694 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5695 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5696 ii = aux;
5697 jj = aux+dr+1;
5698 aa = data;
5699 *ii = *(hdi++) + *(hoi++);
5700 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5701 {
5702 PetscScalar *aold = aa;
5703 PetscInt *jold = jj,nc = jd+jo;
5704 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5705 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5706 *(++ii) = *(hdi++) + *(hoi++);
5707 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5708 }
5709 for (; cum<dr; cum++) { *(++ii) = nnz; }
5710 ii = aux;
5711 jj = aux+dr+1;
5712 aa = data;
5713 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5714 CHKERRQ(ierr);
5715 ptrs[0] = aux;
5716 ptrs[1] = data;
5717 for (i=0; i<2; i++)
5718 {
5719 PetscContainer c;
5720
5721 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5722 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5723#if PETSC_VERSION_LT(3,23,0)
5724 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5725#else
5726 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
5727#endif
5728 CHKERRQ(ierr);
5729 ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
5730 CHKERRQ(ierr);
5731 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5732 }
5733 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5734 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5735 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5736 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5737 PetscFunctionReturn(PETSC_SUCCESS);
5738}
5739#endif
5740
5741#include <petsc/private/matimpl.h>
5742
5743static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm, PetscInt m,
5744 PetscInt n, Mat *A)
5745{
5746 PetscFunctionBegin;
5747 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5748 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5749 ierr = PetscObjectChangeTypeName((PetscObject)*A,"mfemdummy"); CHKERRQ(ierr);
5750 (*A)->preallocated = PETSC_TRUE;
5751 ierr = MatSetUp(*A); CHKERRQ(ierr);
5752 PetscFunctionReturn(PETSC_SUCCESS);
5753}
5754
5755#include <petsc/private/vecimpl.h>
5756
5757#if defined(PETSC_HAVE_DEVICE)
5758static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5759{
5760 PetscFunctionBegin;
5761 v->offloadmask = m;
5762 PetscFunctionReturn(PETSC_SUCCESS);
5763}
5764#endif
5765
5766static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5767{
5768 PetscFunctionBegin;
5769#if defined(PETSC_HAVE_DEVICE)
5770 *flg = v->boundtocpu;
5771#else
5772 *flg = PETSC_TRUE;
5773#endif
5774 PetscFunctionReturn(PETSC_SUCCESS);
5775}
5776
5777static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject o)
5778{
5779 PetscErrorCode ierr;
5780
5781 PetscFunctionBegin;
5782 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5783 PetscFunctionReturn(PETSC_SUCCESS);
5784}
5785
5786#endif // MFEM_USE_PETSC
5787#endif // MFEM_USE_MPI
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
Definition array.hpp:1112
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition array.hpp:840
int Size() const
Return the logical size of the array.
Definition array.hpp:166
T * GetData()
Returns the data.
Definition array.hpp:140
A class to handle Block systems in a matrix-free implementation.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
int NumRowBlocks() const
Return the number of row blocks.
int NumColBlocks() const
Return the number of column blocks.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
Definition device.hpp:262
static int GetId()
Get the device ID of the configured device.
Definition device.hpp:253
static MemoryType GetDeviceMemoryType()
Get the current Device MemoryType. This is the MemoryType used by most MFEM classes when allocating m...
Definition device.hpp:277
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Definition fe_coll.hpp:27
Ordering::Type GetOrdering() const
Return the ordering method.
Definition fespace.hpp:854
const FiniteElementCollection * FEColl() const
Definition fespace.hpp:856
int GetVDim() const
Returns the vector dimension of the finite element space.
Definition fespace.hpp:819
Wrapper for hypre's ParCSR matrix class.
Definition hypre.hpp:419
MPI_Comm GetComm() const
MPI communicator.
Definition hypre.hpp:609
Wrapper for hypre's parallel vector class.
Definition hypre.hpp:230
Identity Operator I: x -> x.
Definition operator.hpp:815
Class used by MFEM to store pointers to host and/or device memory.
bool DeviceIsValid() const
Return true if device pointer is valid.
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
bool HostIsValid() const
Return true if host pointer is valid.
bool UseDevice() const
Read the internal device flag.
void CopyToHost(T *dest, int size) const
Copy size entries from *this to the host pointer dest.
bool Empty() const
Return true if the Memory object is empty, see Reset().
void Sync(const Memory &other) const
Copy the host/device pointer validity flags from other to *this.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void Wrap(T *ptr, int size, bool own)
Wrap an externally allocated host pointer, ptr with the current host memory type returned by MemoryMa...
void Delete()
Delete the owned pointers and reset the Memory object.
int SpaceDimension() const
Dimension of the physical space containing the mesh.
Definition mesh.hpp:1309
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition ode.hpp:121
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition ode.hpp:124
Pointer to an Operator of a specified type.
Definition handle.hpp:34
Abstract operator.
Definition operator.hpp:25
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
Definition operator.hpp:86
int width
Dimension of the input / number of columns in the matrix.
Definition operator.hpp:28
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition operator.hpp:66
int height
Dimension of the output / number of rows in the matrix.
Definition operator.hpp:27
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition operator.hpp:72
Type
Enumeration defining IDs for some classes derived from Operator.
Definition operator.hpp:296
@ ANY_TYPE
ID for the base class Operator, i.e. any type.
Definition operator.hpp:297
@ PETSC_MATIS
ID for class PetscParMatrix, MATIS format.
Definition operator.hpp:301
@ PETSC_MATHYPRE
ID for class PetscParMatrix, MATHYPRE format.
Definition operator.hpp:304
@ PETSC_MATGENERIC
ID for class PetscParMatrix, unspecified format.
Definition operator.hpp:305
@ PETSC_MATAIJ
ID for class PetscParMatrix, MATAIJ format.
Definition operator.hpp:300
@ PETSC_MATNEST
ID for class PetscParMatrix, MATNEST format.
Definition operator.hpp:303
@ PETSC_MATSHELL
ID for class PetscParMatrix, MATSHELL format.
Definition operator.hpp:302
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition operator.hpp:100
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition operator.hpp:134
Abstract parallel finite element space.
Definition pfespace.hpp:31
MPI_Comm GetComm() const
Definition pfespace.hpp:337
HYPRE_BigInt * GetTrueDofOffsets() const
Definition pfespace.hpp:346
int GetTrueVSize() const override
Return the number of local vector true dofs.
Definition pfespace.hpp:353
ParMesh * GetParMesh() const
Definition pfespace.hpp:341
Helper class for handling essential boundary conditions.
Definition petsc.hpp:592
PetscBCHandler(Type type_=ZERO)
Definition petsc.hpp:601
@ CONSTANT
Constant in time b.c.
Definition petsc.hpp:597
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition petsc.cpp:2845
virtual void Eval(real_t t, Vector &g)
Boundary conditions evaluation.
Definition petsc.hpp:617
void SetTime(real_t t)
Sets the current time.
Definition petsc.hpp:627
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition petsc.cpp:2852
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition petsc.cpp:2945
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition petsc.hpp:624
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition petsc.cpp:2917
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition petsc.cpp:2868
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition petsc.cpp:2936
Auxiliary class for BDDC customization.
Definition petsc.hpp:831
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition petsc.cpp:3912
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition petsc.cpp:3921
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Definition petsc.cpp:3956
Abstract class for PETSc's linear solvers.
Definition petsc.hpp:750
void SetOperator(const Operator &op) override
Definition petsc.cpp:2998
operator petsc::KSP() const
Conversion function to PETSc's KSP type.
Definition petsc.hpp:787
virtual ~PetscLinearSolver()
Definition petsc.cpp:3233
void MultTranspose(const Vector &b, Vector &x) const override
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition petsc.cpp:3228
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
Definition petsc.cpp:2957
void SetPreconditioner(Solver &precond)
Definition petsc.cpp:3143
void Mult(const Vector &b, Vector &x) const override
Application of the solver.
Definition petsc.cpp:3223
bool DeviceRequested() const
Definition petsc.hpp:143
void SetHostInvalid() const
Definition petsc.hpp:98
void SetHostValid() const
Definition petsc.hpp:96
bool WriteRequested() const
Definition petsc.hpp:138
const real_t * GetDevicePointer() const
Definition petsc.cpp:261
const real_t * GetHostPointer() const
Definition petsc.cpp:252
void SyncBaseAndReset()
Definition petsc.hpp:127
bool IsAliasForSync() const
Definition petsc.hpp:100
void MakeAliasForSync(const Memory< real_t > &base_, int offset_, int size_, bool usedev_)
Definition petsc.hpp:102
void SetDeviceInvalid() const
Definition petsc.hpp:99
bool ReadRequested() const
Definition petsc.hpp:133
void SetDeviceValid() const
Definition petsc.hpp:97
void Mult(const Vector &b, Vector &x) const override
Application of the solver.
Definition petsc.cpp:4150
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition petsc.cpp:4123
void SetObjective(void(*obj)(Operator *op, const Vector &x, real_t *f))
Specification of an objective function to be used for line search.
Definition petsc.cpp:4112
operator petsc::SNES() const
Conversion function to PETSc's SNES type.
Definition petsc.hpp:944
virtual ~PetscNonlinearSolver()
Definition petsc.cpp:4036
void SetJacobianType(Operator::Type type)
Definition petsc.cpp:4106
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
Definition petsc.cpp:4137
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:4004
void SetOperator(const Operator &op) override
Specification of the nonlinear operator.
Definition petsc.cpp:4044
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition petsc.cpp:4206
virtual void Run(Vector &x, real_t &t, real_t &dt, real_t t_final)
Perform time integration from time t [in] to time tf [in].
Definition petsc.cpp:4357
virtual void Step(Vector &x, real_t &t, real_t &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in].
Definition petsc.cpp:4319
operator petsc::TS() const
Conversion function to PETSc's TS type.
Definition petsc.hpp:979
void SetType(PetscODESolver::Type)
Definition petsc.cpp:4301
PetscODESolver::Type GetType() const
Definition petsc.cpp:4295
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:4173
void SetJacobianType(Operator::Type type)
Definition petsc.cpp:4289
virtual ~PetscODESolver()
Definition petsc.cpp:4198
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
Definition petsc.cpp:3243
Wrapper for PETSc's matrix class.
Definition petsc.hpp:319
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition petsc.cpp:2055
PetscInt GetNumRows() const
Returns the local number of rows.
Definition petsc.cpp:996
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition petsc.cpp:2091
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition petsc.cpp:1969
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition petsc.cpp:2314
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
Definition petsc.cpp:1400
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition petsc.cpp:1181
void Mult(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec: y = a A x + b y.
Definition petsc.cpp:2018
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition petsc.cpp:989
PetscInt M() const
Returns the global number of rows.
Definition petsc.cpp:1010
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, real_t diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition petsc.cpp:2285
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition petsc.cpp:1989
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:1339
Type GetType() const
Definition petsc.cpp:2341
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition petsc.cpp:1150
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object.
Definition petsc.cpp:2328
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B.
Definition petsc.cpp:1352
PetscInt GetNumCols() const
Returns the local number of columns.
Definition petsc.cpp:1003
PetscInt NNZ() const
Returns the number of nonzeros.
Definition petsc.cpp:1024
petsc::Mat A
The actual PETSc object.
Definition petsc.hpp:322
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition petsc.cpp:1979
void Shift(real_t s)
Shift diagonal by a constant.
Definition petsc.cpp:2102
PetscParVector * Y
Definition petsc.hpp:325
PetscParVector * X
Auxiliary vectors for typecasting.
Definition petsc.hpp:325
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition petsc.cpp:1044
operator petsc::Mat() const
Typecasting to PETSc's Mat type.
Definition petsc.hpp:465
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition petsc.cpp:1834
void operator*=(real_t s)
Scale all entries by s: A_scaled = s*A.
Definition petsc.cpp:2013
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
Definition petsc.cpp:1031
PetscInt N() const
Returns the global number of columns.
Definition petsc.cpp:1017
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition petsc.cpp:1999
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition petsc.cpp:1037
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition petsc.cpp:1809
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition petsc.cpp:982
void MultTranspose(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition petsc.cpp:2036
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition petsc.cpp:1166
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition petsc.cpp:2080
void ResetMemory()
Completes the operation started with PlaceMemory.
Definition petsc.cpp:896
void SetFlagsFromMask_() const
Definition petsc.cpp:328
void PlaceMemory(Memory< real_t > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
Definition petsc.cpp:809
void Randomize(PetscInt seed=0)
Set random values.
Definition petsc.cpp:940
void SetBlockSize(PetscInt bs)
Set block size of a vector.
Definition petsc.cpp:529
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
Definition petsc.cpp:514
real_t * HostReadWrite() override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:500
PetscMemory pdata
Definition petsc.hpp:162
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array,...
Definition petsc.cpp:799
PetscInt GlobalSize() const
Returns the global number of rows.
Definition petsc.cpp:522
PetscParVector & operator-=(const PetscParVector &y)
Definition petsc.cpp:778
real_t * Write(bool=true) override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:445
void UpdateVecFromFlags()
Update PETSc's Vec after having accessed its data via GetMemory()
Definition petsc.cpp:362
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition petsc.cpp:955
real_t * ReadWrite(bool=true) override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:475
const real_t * HostRead() const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:440
const real_t * Read(bool=true) const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:416
real_t * HostWrite() override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:470
PetscParVector & operator+=(const PetscParVector &y)
Definition petsc.cpp:771
petsc::Vec x
The actual PETSc object.
Definition petsc.hpp:160
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:699
virtual ~PetscParVector()
Calls PETSc's destroy function.
Definition petsc.cpp:601
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray().
Definition petsc.cpp:804
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition petsc.cpp:704
operator petsc::Vec() const
Typecasting to PETSc's Vec type.
Definition petsc.hpp:234
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition petsc.cpp:750
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition petsc.cpp:583
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition petsc.cpp:736
PetscParVector & operator*=(PetscScalar d)
Definition petsc.cpp:785
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition petsc.cpp:729
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
Abstract class for PETSc's preconditioners.
Definition petsc.hpp:805
void Mult(const Vector &b, Vector &x) const override
Application of the preconditioner.
Definition petsc.cpp:3396
virtual ~PetscPreconditioner()
Definition petsc.cpp:3406
void SetOperator(const Operator &op) override
Set/update the solver for the given operator.
Definition petsc.cpp:3310
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:3275
operator petsc::PC() const
Conversion function to PETSc's PC type.
Definition petsc.hpp:825
void MultTranspose(const Vector &b, Vector &x) const override
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition petsc.cpp:3401
Abstract class for monitoring PETSc's solvers.
Definition petsc.hpp:985
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition petsc.hpp:1000
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition petsc.hpp:994
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition petsc.hpp:1006
Abstract class for PETSc's solvers.
Definition petsc.hpp:676
PetscClassId cid
The class id of the actual PETSc object.
Definition petsc.hpp:685
void SetAbsTol(real_t tol)
Definition petsc.cpp:2434
void SetTol(real_t tol)
Definition petsc.cpp:2404
void * private_ctx
Private context for solver.
Definition petsc.hpp:694
void SetPrintLevel(int plev)
Definition petsc.cpp:2486
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition petsc.cpp:2601
void SetMaxIter(int max_iter)
Definition petsc.cpp:2459
void SetRelTol(real_t tol)
Definition petsc.cpp:2409
PetscParVector * X
Definition petsc.hpp:688
void CreatePrivateContext()
Definition petsc.cpp:2785
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition petsc.cpp:2397
int GetNumIterations()
Definition petsc.cpp:2727
real_t GetFinalNorm()
Definition petsc.cpp:2760
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:2566
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition petsc.cpp:2387
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition petsc.hpp:682
bool clcustom
Boolean to handle SetFromOptions calls.
Definition petsc.hpp:679
bool operatorset
Boolean to handle SetOperator calls.
Definition petsc.hpp:697
void Customize(bool customize=true) const
Customize object with options set.
Definition petsc.cpp:2661
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition petsc.cpp:2571
void FreePrivateContext()
Definition petsc.cpp:2817
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition petsc.hpp:691
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition petsc.cpp:2620
PetscParVector * B
Right-hand side and solution vector.
Definition petsc.hpp:688
Base class for solvers.
Definition operator.hpp:792
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition operator.hpp:795
Data type sparse matrix.
Definition sparsemat.hpp:51
const int * HostReadJ() const
int NumNonZeroElems() const override
Returns the number of the nonzero elements in the matrix.
const real_t * HostReadData() const
void SortColumnIndices()
Sort the column indices corresponding to each row.
const int * HostReadI() const
Base abstract class for first order time dependent operators.
Definition operator.hpp:344
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition operator.hpp:413
virtual Operator & GetExplicitGradient(const Vector &u) const
Return an Operator representing dG/du at the given point u and the currently set time.
Definition operator.cpp:312
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition operator.hpp:411
virtual void ExplicitMult(const Vector &u, Vector &v) const
Perform the action of the explicit part of the operator, G: v = G(u, t) where t is the current time.
Definition operator.cpp:282
virtual Operator & GetImplicitGradient(const Vector &u, const Vector &k, real_t shift) const
Return an Operator representing (dF/dk shift + dF/du) at the given u, k, and the currently set time.
Definition operator.cpp:304
virtual void SetTime(const real_t t_)
Set the current time.
Definition operator.hpp:406
virtual void ImplicitMult(const Vector &u, const Vector &k, Vector &v) const
Perform the action of the implicit part of the operator, F: v = F(u, k, t) where t is the current tim...
Definition operator.cpp:287
Vector data type.
Definition vector.hpp:82
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition vector.hpp:222
Memory< real_t > & GetMemory()
Return a reference to the Memory object used by the Vector.
Definition vector.hpp:265
Memory< real_t > data
Definition vector.hpp:85
int Size() const
Returns the size of the vector.
Definition vector.hpp:234
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
Definition vector.hpp:145
void SetSize(int s)
Resize the vector to size s.
Definition vector.hpp:584
int dim
Definition ex24.cpp:53
void trans(const Vector &u, Vector &x)
Definition ex27.cpp:412
HYPRE_Int HYPRE_BigInt
real_t b
Definition lissajous.cpp:42
real_t a
Definition lissajous.cpp:41
real_t f(const Vector &p)
void write(std::ostream &os, T value)
Write 'value' to stream.
Definition binaryio.hpp:30
T read(std::istream &is)
Read a value from the stream and return it.
Definition binaryio.hpp:37
MFEM_HOST_DEVICE constexpr auto type(const tuple< T... > &values)
a function intended to be used for extracting the ith type from a tuple.
Definition tuple.hpp:347
struct ::_p_Vec * Vec
Definition petsc.hpp:72
struct ::_p_Mat * Mat
Definition petsc.hpp:73
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
Definition device.hpp:348
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
Definition device.hpp:389
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
Definition device.hpp:355
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
Definition device.hpp:365
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition globals.hpp:66
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition petsc.cpp:2121
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass,...
Definition device.hpp:382
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition petsc.cpp:206
void MFEMFinalizePetsc()
Definition petsc.cpp:246
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
Definition device.hpp:372
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition hypre.cpp:3021
float real_t
Definition config.hpp:46
MemoryType
Memory types supported by MFEM.
@ HOST
Host memory; using new[] and delete[].
@ DEVICE
Device memory; using CUDA or HIP *Malloc and *Free.
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s....
Definition hypre.cpp:3453
std::function< real_t(const Vector &)> f(real_t mass_coeff)
Definition lor_mms.hpp:30
struct s_NavierContext ctx
real_t p(const Vector &x, real_t t)
HYPRE_Int PetscInt
Definition petsc.hpp:50
struct _p_PetscObject * PetscObject
Definition petsc.hpp:54
real_t PetscScalar
Definition petsc.hpp:51
real_t PetscReal
Definition petsc.hpp:52
MFEM_HOST_DEVICE real_t norm(const Complex &z)
@ HIP_MASK
Biwise-OR of all HIP backends.
Definition device.hpp:93
@ CUDA_MASK
Biwise-OR of all CUDA backends.
Definition device.hpp:91