MFEM v4.8.0
Finite element discretization library
Loading...
Searching...
No Matches
petsc.cpp
Go to the documentation of this file.
1// Copyright (c) 2010-2025, Lawrence Livermore National Security, LLC. Produced
2// at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3// LICENSE and NOTICE for details. LLNL-CODE-806117.
4//
5// This file is part of the MFEM library. For more information and source code
6// availability visit https://mfem.org.
7//
8// MFEM is free software; you can redistribute it and/or modify it under the
9// terms of the BSD-3 license. We welcome feedback and contributions, see file
10// CONTRIBUTING.md for details.
11
12// Author: Stefano Zampini <stefano.zampini@gmail.com>
13
14#include "../config/config.hpp"
15
16#ifdef MFEM_USE_MPI
17#ifdef MFEM_USE_PETSC
18
19#include "linalg.hpp"
20#include "../fem/fem.hpp"
21
22#include "petsc.h"
23#if defined(PETSC_HAVE_HYPRE)
24#include "petscmathypre.h"
25#endif
26
27// Backward compatibility
28#if PETSC_VERSION_LT(3,11,0)
29#define VecLockReadPush VecLockPush
30#define VecLockReadPop VecLockPop
31#endif
32#if PETSC_VERSION_LT(3,12,0)
33#define VecGetArrayWrite VecGetArray
34#define VecRestoreArrayWrite VecRestoreArray
35#define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36#define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
37#endif
38#if PETSC_VERSION_LT(3,19,0)
39#define PETSC_SUCCESS 0
40#endif
41
42#include <fstream>
43#include <iomanip>
44#include <cmath>
45#include <cstdlib>
46
47// Note: there are additional #include statements below.
48
49#include "petscinternals.hpp"
50
51// Callback functions: these functions will be called by PETSc
52static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
53static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
54static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
55 void*);
56static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
57static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
58 PetscReal,Mat,
59 Mat,void*);
60static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
61 Mat,Mat,Mat,Mat);
62static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
63static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
64static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
65static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
66static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
67static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
68 PetscBool*,PetscBool*,void*);
69static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
70static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
71static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
72static PetscErrorCode __mfem_pc_shell_setup(PC);
73static PetscErrorCode __mfem_pc_shell_destroy(PC);
74static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
75static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
76static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
77static PetscErrorCode __mfem_mat_shell_destroy(Mat);
78static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
79#if PETSC_VERSION_LT(3,23,0)
80static PetscErrorCode __mfem_array_container_destroy(void*);
81static PetscErrorCode __mfem_matarray_container_destroy(void *);
82#else
83static PetscErrorCode __mfem_array_container_destroy(void**);
84static PetscErrorCode __mfem_matarray_container_destroy(void**);
85#endif
86static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
87
88// auxiliary functions
89static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
90 PetscInt,IS*);
91static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
92 const mfem::Array<int>*,PetscInt,IS*);
93static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
94static PetscErrorCode MakeShellPCWithFactory(PC,
96
97// Equivalent functions are present in PETSc source code
98// if PETSc has been compiled with hypre support
99// We provide them here in case PETSC_HAVE_HYPRE is not defined
100#if !defined(PETSC_HAVE_HYPRE)
101static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
102static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
103#endif
104
105#if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
106#if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
107#define _USE_DEVICE
108#define PETSC_VECDEVICE VECCUDA
109#define PETSC_MATAIJDEVICE MATAIJCUSPARSE
110#define VecDeviceGetArrayRead VecCUDAGetArrayRead
111#define VecDeviceGetArrayWrite VecCUDAGetArrayWrite
112#define VecDeviceGetArray VecCUDAGetArray
113#define VecDeviceRestoreArrayRead VecCUDARestoreArrayRead
114#define VecDeviceRestoreArrayWrite VecCUDARestoreArrayWrite
115#define VecDeviceRestoreArray VecCUDARestoreArray
116#define VecDevicePlaceArray VecCUDAPlaceArray
117#define VecDeviceResetArray VecCUDAResetArray
118#elif defined(MFEM_USE_HIP) && defined(PETSC_HAVE_HIP)
119#define _USE_DEVICE
120#define PETSC_VECDEVICE VECHIP
121#define PETSC_MATAIJDEVICE MATAIJHIPSPARSE
122#define VecDeviceGetArrayRead VecHIPGetArrayRead
123#define VecDeviceGetArrayWrite VecHIPGetArrayWrite
124#define VecDeviceGetArray VecHIPGetArray
125#define VecDeviceRestoreArrayRead VecHIPRestoreArrayRead
126#define VecDeviceRestoreArrayWrite VecHIPRestoreArrayWrite
127#define VecDeviceRestoreArray VecHIPRestoreArray
128#define VecDevicePlaceArray VecHIPPlaceArray
129#define VecDeviceResetArray VecHIPResetArray
130#else
131#define VecDeviceGetArrayRead VecGetArrayRead
132#define VecDeviceGetArrayWrite VecGetArrayWrite
133#define VecDeviceGetArray VecGetArray
134#define VecDeviceRestoreArrayRead VecRestoreArrayRead
135#define VecDeviceRestoreArrayWrite VecRestoreArrayWrite
136#define VecDeviceRestoreArray VecRestoreArray
137#define VecDevicePlaceArray VecPlaceArray
138#define VecDeviceResetArray VecResetArray
139#endif
140#endif
141
142#if defined(PETSC_HAVE_DEVICE)
143static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
144#endif
145static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
146static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject);
147static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,PetscInt,PetscInt,Mat*);
148
149// structs used by PETSc code
150typedef struct
151{
152 mfem::Solver *op;
154 bool ownsop;
155 unsigned long int numprec;
156} __mfem_pc_shell_ctx;
157
158typedef struct
159{
160 mfem::Operator *op; // The nonlinear operator
161 mfem::PetscBCHandler *bchandler; // Handling of essential bc
162 mfem::Vector *work; // Work vector
163 mfem::Operator::Type jacType; // OperatorType for the Jacobian
164 // Objective for line search
165 void (*objective)(mfem::Operator *op, const mfem::Vector&, mfem::real_t*);
166 // PostCheck function (to be called after successful line search)
167 void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
168 mfem::Vector&, bool&, bool&);
169 // General purpose update function (to be called at the beginning of
170 // each nonlinear step)
171 void (*update)(mfem::Operator *op, int,
172 const mfem::Vector&, const mfem::Vector&,
173 const mfem::Vector&, const mfem::Vector&);
174} __mfem_snes_ctx;
175
176typedef struct
177{
178 mfem::TimeDependentOperator *op; // The time-dependent operator
179 mfem::PetscBCHandler *bchandler; // Handling of essential bc
180 mfem::Vector *work; // Work vector
181 mfem::Vector *work2; // Work vector
182 mfem::Operator::Type jacType; // OperatorType for the Jacobian
184 PetscReal cached_shift;
185 PetscObjectState cached_ijacstate;
186 PetscObjectState cached_rhsjacstate;
187 PetscObjectState cached_splits_xstate;
188 PetscObjectState cached_splits_xdotstate;
189} __mfem_ts_ctx;
190
191typedef struct
192{
193 mfem::PetscSolver *solver; // The solver object
194 mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
195} __mfem_monitor_ctx;
196
197// use global scope ierr to check PETSc errors inside mfem calls
198static PetscErrorCode ierr;
199static PetscMPIInt mpiierr;
200
201using namespace std;
202
203namespace mfem
204{
205
207{
208 MFEMInitializePetsc(NULL,NULL,NULL,NULL);
209}
210
211void MFEMInitializePetsc(int *argc,char*** argv)
212{
213 MFEMInitializePetsc(argc,argv,NULL,NULL);
214}
215
216void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
217 const char help[])
218{
219 // Tell PETSc to use the same CUDA or HIP device as MFEM:
221 {
222#if PETSC_VERSION_LT(3,17,0)
223 const char *opts = "-cuda_device";
224#else
225 const char *opts = "-device_select_cuda";
226#endif
227 ierr = PetscOptionsSetValue(NULL,opts,
228 to_string(mfem::Device::GetId()).c_str());
229 MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
230 }
232 {
233#if PETSC_VERSION_LT(3,17,0)
234 const char *opts = "-hip_device";
235#else
236 const char *opts = "-device_select_hip";
237#endif
238 ierr = PetscOptionsSetValue(NULL,opts,
239 to_string(mfem::Device::GetId()).c_str());
240 MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
241 }
242 ierr = PetscInitialize(argc,argv,rc_file,help);
243 MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
244}
245
247{
248 ierr = PetscFinalize();
249 MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
250}
251
253{
254 int oflags = flags;
255 SetHostValid();
256 const mfem::real_t *v = mfem::Read(*this,Capacity(),false);
257 flags = oflags;
258 return v;
259}
260
262{
263 int oflags = flags;
265 const mfem::real_t *v = mfem::Read(*this,Capacity(),true);
266 flags = oflags;
267 return v;
268}
269
270// PetscParVector methods
271
273{
274 PetscScalar *array;
275 PetscInt n;
276 PetscBool isnest;
277
278 MFEM_VERIFY(x,"Missing Vec");
279 ierr = VecSetUp(x); PCHKERRQ(x,ierr);
280 ierr = PetscObjectTypeCompare((PetscObject)x,VECNEST,&isnest); PCHKERRQ(x,ierr);
281 MFEM_VERIFY(!isnest,"Not for type nest");
282 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
283 MFEM_VERIFY(n >= 0,"Invalid local size");
284 size = n;
285#if defined(PETSC_HAVE_DEVICE)
286 PetscOffloadMask omask;
287 PetscBool isdevice;
288
289 ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
290 if (omask != PETSC_OFFLOAD_BOTH)
291 {
292 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
293 }
294#endif
295 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
296#if defined(PETSC_HAVE_DEVICE)
297 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
298 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
299 ""); PCHKERRQ(x,ierr);
300 if (isdevice)
301 {
302 if (omask != PETSC_OFFLOAD_BOTH)
303 {
304 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
305 }
306 PetscScalar *darray;
307 ierr = VecDeviceGetArrayRead(x,(const PetscScalar**)&darray);
308 PCHKERRQ(x,ierr);
309 pdata.Wrap(array,darray,size,MemoryType::HOST,false);
310 ierr = VecDeviceRestoreArrayRead(x,(const PetscScalar**)&darray);
311 PCHKERRQ(x,ierr);
312 }
313 else
314#endif
315 {
316 pdata.Wrap(array,size,MemoryType::HOST,false);
317 }
318 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
319
320#if defined(PETSC_HAVE_DEVICE)
321 if (omask == PETSC_OFFLOAD_UNALLOCATED && isdevice) { omask = PETSC_OFFLOAD_CPU; }
322 ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
323#endif
326}
327
329{
330 MFEM_VERIFY(x,"Missing Vec");
331#if defined(_USE_DEVICE)
332 PetscOffloadMask mask;
333 PetscBool isdevice;
334 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
335 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
336 ""); PCHKERRQ(x,ierr);
337 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
338 if (isdevice)
339 {
340 switch (mask)
341 {
342 case PETSC_OFFLOAD_CPU:
345 break;
346 case PETSC_OFFLOAD_GPU:
349 break;
350 case PETSC_OFFLOAD_BOTH:
353 break;
354 default:
355 MFEM_ABORT("Unhandled case " << mask);
356 }
357 }
358#endif
359 data.Sync(pdata);
360}
361
363{
364 MFEM_VERIFY(x,"Missing Vec");
365 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
366#if defined(_USE_DEVICE)
367 PetscBool isdevice;
368 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
369 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
370 ""); PCHKERRQ(x,ierr);
371 if (isdevice)
372 {
373 bool dv = pdata.DeviceIsValid();
374 bool hv = pdata.HostIsValid();
375 PetscOffloadMask mask;
376 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
377 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
378 else { mask = PETSC_OFFLOAD_CPU; }
379 ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
380 }
381 else
382#endif
383 {
384 /* Just make sure we have an up-to-date copy on the CPU for PETSc */
385 PetscScalar *v;
386 ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
388 ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
389 }
390}
391
393{
394 VecType vectype;
395 MFEM_VERIFY(x,"Missing Vec");
396 ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
397#if defined(_USE_DEVICE)
399 {
402 ierr = VecSetType(x,PETSC_VECDEVICE); PCHKERRQ(x,ierr);
403 break;
404 default:
405 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
406 break;
407 }
408#else
409 if (!vectype)
410 {
411 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
412 }
413#endif
414}
415
416const mfem::real_t* PetscParVector::Read(bool on_dev) const
417{
418 const PetscScalar *dummy;
419 MFEM_VERIFY(x,"Missing Vec");
420#if defined(PETSC_HAVE_DEVICE)
421 PetscBool isdevice;
422 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
423 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
424 ""); PCHKERRQ(x,ierr);
425 if (on_dev && isdevice)
426 {
427 ierr = VecDeviceGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
428 ierr = VecDeviceRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
429 }
430 else
431#endif
432 {
433 ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
434 ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
435 }
437 return mfem::Read(pdata, size, on_dev);
438}
439
441{
442 return Read(false);
443}
444
446{
447 PetscScalar *dummy;
448 MFEM_VERIFY(x,"Missing Vec");
449#if defined(PETSC_HAVE_DEVICE)
450 PetscBool isdevice;
451 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
452 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
453 ""); PCHKERRQ(x,ierr);
454 if (on_dev && isdevice)
455 {
456 ierr = VecDeviceGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
457 ierr = VecDeviceRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
458 }
459 else
460#endif
461 {
462 ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
463 ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
464 }
465 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
467 return mfem::Write(pdata, size, on_dev);
468}
469
471{
472 return Write(false);
473}
474
476{
477 PetscScalar *dummy;
478 MFEM_VERIFY(x,"Missing Vec");
479#if defined(PETSC_HAVE_DEVICE)
480 PetscBool isdevice;
481 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
482 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
483 ""); PCHKERRQ(x,ierr);
484 if (on_dev && isdevice)
485 {
486 ierr = VecDeviceGetArray(x,&dummy); PCHKERRQ(x,ierr);
487 ierr = VecDeviceRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
488 }
489 else
490#endif
491 {
492 ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
493 ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
494 }
495 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
497 return mfem::ReadWrite(pdata, size, on_dev);
498}
499
504
505void PetscParVector::UseDevice(bool dev) const
506{
507 MFEM_VERIFY(x,"Missing Vec");
508#if defined(PETSC_HAVE_DEVICE)
509 ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
511#endif
512}
513
515{
516 PetscBool flg;
517 MFEM_VERIFY(x,"Missing Vec");
518 ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
519 return flg ? false : true;
520}
521
523{
524 PetscInt N;
525 ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
526 return N;
527}
528
530{
531 ierr = VecSetBlockSize(x,bs); PCHKERRQ(x,ierr);
532}
533
534PetscParVector::PetscParVector(MPI_Comm comm, const Vector &x_,
535 bool copy) : Vector()
536{
537 PetscBool isdevice;
538
539 int n = x_.Size();
540 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
541 ierr = VecSetSizes(x,n,PETSC_DECIDE); PCHKERRQ(x,ierr);
542 SetVecType_();
544 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
545 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
546 ""); PCHKERRQ(x,ierr);
547 if (copy)
548 {
549 /* we use PETSc accessors to flag valid memory location to PETSc */
550 PetscErrorCode (*rest)(Vec,PetscScalar**);
551 PetscScalar *array;
552#if defined(PETSC_HAVE_DEVICE)
553 if (isdevice && x_.UseDevice())
554 {
555 UseDevice(true);
556 ierr = VecDeviceGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
557 rest = VecDeviceRestoreArrayWrite;
558 }
559 else
560#endif
561 {
562 UseDevice(false);
563 ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
564 rest = VecRestoreArrayWrite;
565 }
566 pdata.CopyFrom(x_.GetMemory(), n);
567 ierr = (*rest)(x,&array); PCHKERRQ(x,ierr);
569 }
570 else // don't copy, just set the device flag
571 {
572 if (isdevice && x_.UseDevice())
573 {
574 UseDevice(true);
575 }
576 else
577 {
578 UseDevice(false);
579 }
580 }
581}
582
584 PetscInt *col) : Vector()
585{
586 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
587 if (col)
588 {
589 PetscMPIInt myid;
590 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
591 ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
592 }
593 else
594 {
595 ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
596 }
597 SetVecType_();
599}
600
602{
603 MPI_Comm comm = PetscObjectComm((PetscObject)x);
604 ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
605 pdata.Delete();
606}
607
609 PetscScalar *data_, PetscInt *col) : Vector()
610{
611 MFEM_VERIFY(col,"Missing distribution");
612 PetscMPIInt myid;
613 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
614 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
615 &x); CCHKERRQ(comm,ierr)
616 SetVecType_();
618}
619
621{
622 ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
624}
625
627 bool transpose, bool allocate) : Vector()
628{
629 PetscInt loc = transpose ? op.Height() : op.Width();
630
631 ierr = VecCreate(comm,&x);
632 CCHKERRQ(comm,ierr);
633 ierr = VecSetSizes(x,loc,PETSC_DECIDE);
634 PCHKERRQ(x,ierr);
635
636 SetVecType_();
637 if (allocate)
638 {
640 }
641 else /* Vector intended to be used with Place/ResetMemory calls */
642 {
643 size = loc;
644 }
645}
646
648 bool transpose, bool allocate) : Vector()
649{
650 Mat pA = const_cast<PetscParMatrix&>(A);
651 if (!transpose)
652 {
653 ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
654 }
655 else
656 {
657 ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
658 }
659 SetVecType_();
660 if (!allocate) /* Vector intended to be used with Place/ResetMemory calls */
661 {
662 PetscInt n;
663 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
664 size = n;
665 }
666 else
667 {
669 }
670}
671
673{
674 if (ref)
675 {
676 ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
677 }
678 x = y;
680}
681
683{
684 HYPRE_BigInt* offsets = pfes->GetTrueDofOffsets();
685 MPI_Comm comm = pfes->GetComm();
686 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
687
688 PetscMPIInt myid = 0;
689 if (!HYPRE_AssumedPartitionCheck())
690 {
691 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
692 }
693 ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
694 PCHKERRQ(x,ierr);
695 SetVecType_();
697}
698
700{
701 return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
702}
703
705{
706 VecScatter scctx;
707 Vec vout;
708 const PetscScalar *array;
710
711 ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
712 ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
713 PCHKERRQ(x,ierr);
714 ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
715 PCHKERRQ(x,ierr);
716 ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
717 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(x,ierr);
718 ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
720 data.Assign(array);
721 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(x,ierr);
722 ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
723 Vector *v = new Vector(data, internal::to_int(size));
724 v->MakeDataOwner();
725 data.LoseData();
726 return v;
727}
728
730{
731 ierr = VecSet(x,d); PCHKERRQ(x,ierr);
733 return *this;
734}
735
737 const Array<PetscScalar>& vals)
738{
739 MFEM_VERIFY(idx.Size() == vals.Size(),
740 "Size mismatch between indices and values");
741 PetscInt n = idx.Size();
742 ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
743 PCHKERRQ(x,ierr);
744 ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
745 ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
747 return *this;
748}
749
751 const Array<PetscScalar>& vals)
752{
753 MFEM_VERIFY(idx.Size() == vals.Size(),
754 "Size mismatch between indices and values");
755 PetscInt n = idx.Size();
756 ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
757 PCHKERRQ(x,ierr);
758 ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
759 ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
761 return *this;
762}
763
765{
766 ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
768 return *this;
769}
770
772{
773 ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
775 return *this;
776}
777
779{
780 ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
782 return *this;
783}
784
786{
787 ierr = VecScale(x,s); PCHKERRQ(x,ierr);
789 return *this;
790}
791
793{
794 ierr = VecShift(x,s); PCHKERRQ(x,ierr);
796 return *this;
797}
798
800{
801 ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
802}
803
805{
806 ierr = VecResetArray(x); PCHKERRQ(x,ierr);
807}
808
810{
811 PetscInt n;
812
813 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
814 MFEM_VERIFY(n <= mem.Capacity(),
815 "Memory size " << mem.Capacity() << " < " << n << " vector size!");
816 MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
817 MFEM_VERIFY(data.Empty(),"Vector data is not empty");
818#if defined(_USE_DEVICE)
819 PetscBool isdevice;
820 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
821 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
822 ""); PCHKERRQ(x,ierr);
823 if (isdevice)
824 {
825 bool usedev = mem.DeviceIsValid() || (!rw && mem.UseDevice());
826 pdata.MakeAliasForSync(mem,0,n,rw,true,usedev);
827 if (usedev)
828 {
829 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
830 ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
831 }
832 else
833 {
834 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
835 ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
836 }
837 }
838 else
839#endif
840 {
842 size);
843 pdata.MakeAliasForSync(mem,0,n,rw,true,false);
844#if defined(PETSC_HAVE_DEVICE)
845 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
846#endif
847 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
848 }
849 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
851}
852
854{
855 PetscInt n;
856
857 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
858 MFEM_VERIFY(n <= mem.Capacity(),
859 "Memory size " << mem.Capacity() << " < " << n << " vector size!");
860 MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
861 MFEM_VERIFY(data.Empty(),"Vector data is not empty");
862#if defined(_USE_DEVICE)
863 PetscBool isdevice;
864 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
865 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
866 ""); PCHKERRQ(x,ierr);
867 if (isdevice)
868 {
869 pdata.MakeAliasForSync(mem,0,n,mem.DeviceIsValid());
870 if (mem.DeviceIsValid())
871 {
872 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
873 ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
874 }
875 else
876 {
877 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
878 ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
879 }
880 }
881 else
882#endif
883 {
884 const mfem::real_t *w = mfem::HostRead(mem,size);
885 pdata.MakeAliasForSync(mem,0,n,false);
886#if defined(PETSC_HAVE_DEVICE)
887 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
888#endif
889 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
890 }
892 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
893 ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
894}
895
897{
898 MFEM_VERIFY(pdata.IsAliasForSync(),"Vector data is not an alias");
899 MFEM_VERIFY(!pdata.Empty(),"Vector data is empty");
900 bool read = pdata.ReadRequested();
901 bool usedev = pdata.DeviceRequested();
902 bool write = pdata.WriteRequested();
903 /*
904 check for strange corner cases
905 - device memory used but somehow PETSc ended up putting up to date data on host
906 - host memory used but somehow PETSc ended up putting up to date data on device
907 */
908 if (write)
909 {
910 const PetscScalar *v;
911#if defined(PETSC_HAVE_DEVICE)
912 PetscOffloadMask mask;
913 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
914 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
915 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
916#endif
917 {
918 ierr = VecGetArrayRead(x,&v); PCHKERRQ(x,ierr);
920 ierr = VecRestoreArrayRead(x,&v); PCHKERRQ(x,ierr);
921 }
922 }
924 data.Reset();
925 if (read && !write) { ierr = VecLockReadPop(x); PCHKERRQ(x,ierr); }
926 if (usedev)
927 {
928#if defined(PETSC_HAVE_DEVICE)
929 ierr = VecDeviceResetArray(x); PCHKERRQ(x,ierr);
930#else
931 MFEM_VERIFY(false,"This should not happen");
932#endif
933 }
934 else
935 {
936 ierr = VecResetArray(x); PCHKERRQ(x,ierr);
937 }
938}
939
941{
942 PetscRandom rctx = NULL;
943
944 if (seed)
945 {
946 ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
947 PCHKERRQ(x,ierr);
948 ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
949 ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
950 }
951 ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
952 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
953}
954
955void PetscParVector::Print(const char *fname, bool binary) const
956{
957 if (fname)
958 {
959 PetscViewer view;
960
961 if (binary)
962 {
963 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
964 FILE_MODE_WRITE,&view);
965 }
966 else
967 {
968 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
969 }
970 PCHKERRQ(x,ierr);
971 ierr = VecView(x,view); PCHKERRQ(x,ierr);
972 ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
973 }
974 else
975 {
976 ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
977 }
978}
979
980// PetscParMatrix methods
981
983{
984 PetscInt N;
985 ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
986 return N;
987}
988
990{
991 PetscInt N;
992 ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
993 return N;
994}
995
997{
998 PetscInt N;
999 ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
1000 return N;
1001}
1002
1004{
1005 PetscInt N;
1006 ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
1007 return N;
1008}
1009
1011{
1012 PetscInt N;
1013 ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
1014 return N;
1015}
1016
1018{
1019 PetscInt N;
1020 ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
1021 return N;
1022}
1023
1025{
1026 MatInfo info;
1027 ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
1028 return (PetscInt)info.nz_used;
1029}
1030
1032{
1033 if (cbs < 0) { cbs = rbs; }
1034 ierr = MatSetBlockSizes(A,rbs,cbs); PCHKERRQ(A,ierr);
1035}
1036
1038{
1039 A = NULL;
1040 X = Y = NULL;
1041 height = width = 0;
1042}
1043
1048
1050 const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
1051{
1052 Init();
1053
1054 Mat B = const_cast<PetscParMatrix&>(pB);
1055
1056 IS isr,isc;
1057 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
1058 rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
1059 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
1060 cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
1061 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
1062 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
1063 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1064
1065 height = GetNumRows();
1066 width = GetNumCols();
1067}
1068
1070{
1071 Init();
1072 height = pa->Height();
1073 width = pa->Width();
1074 ConvertOperator(pa->GetComm(),*pa,&A,tid);
1075}
1076
1078{
1079 Init();
1080 height = ha->Height();
1081 width = ha->Width();
1082 ConvertOperator(ha->GetComm(),*ha,&A,tid);
1083}
1084
1086{
1087 Init();
1088 height = sa->Height();
1089 width = sa->Width();
1090 ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
1091}
1092
1094 Operator::Type tid)
1095{
1096 Init();
1097 height = op->Height();
1098 width = op->Width();
1099 ConvertOperator(comm,*op,&A,tid);
1100}
1101
1103 PetscInt *row_starts, SparseMatrix *diag,
1104 Operator::Type tid)
1105{
1106 Init();
1107 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1108 tid==PETSC_MATAIJ,&A);
1109 SetUpForDevice();
1110 // update base class
1111 height = GetNumRows();
1112 width = GetNumCols();
1113}
1114
1115PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
1116 PetscInt global_num_cols, PetscInt *row_starts,
1117 PetscInt *col_starts, SparseMatrix *diag,
1118 Operator::Type tid)
1119{
1120 Init();
1121 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1122 tid==PETSC_MATAIJ,&A);
1123 SetUpForDevice();
1124 // update base class
1125 height = GetNumRows();
1126 width = GetNumCols();
1127}
1128
1130{
1131 if (A)
1132 {
1133 MPI_Comm comm = PetscObjectComm((PetscObject)A);
1134 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1135 if (X) { delete X; }
1136 if (Y) { delete Y; }
1137 X = Y = NULL;
1138 }
1139 height = B.Height();
1140 width = B.Width();
1141#if defined(PETSC_HAVE_HYPRE)
1142 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
1143#else
1144 ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
1145#endif
1146 SetUpForDevice();
1147 return *this;
1148}
1149
1151{
1152 if (A)
1153 {
1154 MPI_Comm comm = PetscObjectComm((PetscObject)A);
1155 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1156 if (X) { delete X; }
1157 if (Y) { delete Y; }
1158 X = Y = NULL;
1159 }
1160 height = B.Height();
1161 width = B.Width();
1162 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1163 return *this;
1164}
1165
1167{
1168 if (!A)
1169 {
1170 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1171 }
1172 else
1173 {
1174 MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1175 MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1176 ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1177 }
1178 return *this;
1179}
1180
1182{
1183 if (!A)
1184 {
1185 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1186 ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
1187 }
1188 else
1189 {
1190 MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1191 MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1192 ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1193 }
1194 return *this;
1195}
1196
1197void PetscParMatrix::
1198BlockDiagonalConstructor(MPI_Comm comm,
1199 PetscInt *row_starts, PetscInt *col_starts,
1200 SparseMatrix *diag, bool assembled, Mat* Ad)
1201{
1202 Mat A;
1203 PetscInt lrsize,lcsize,rstart,cstart;
1204 PetscMPIInt myid = 0,commsize;
1205
1206 mpiierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,mpiierr);
1207 if (!HYPRE_AssumedPartitionCheck())
1208 {
1209 mpiierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,mpiierr);
1210 }
1211 lrsize = row_starts[myid+1]-row_starts[myid];
1212 rstart = row_starts[myid];
1213 lcsize = col_starts[myid+1]-col_starts[myid];
1214 cstart = col_starts[myid];
1215
1216 if (!assembled)
1217 {
1218 IS is;
1219 ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1220 ISLocalToGlobalMapping rl2g,cl2g;
1221 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1222 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1223 if (row_starts != col_starts)
1224 {
1225 ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
1226 CCHKERRQ(comm,ierr);
1227 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1228 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1229 }
1230 else
1231 {
1232 ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1233 cl2g = rl2g;
1234 }
1235
1236 // Create the PETSc object (MATIS format)
1237 ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1238 ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1239 PCHKERRQ(A,ierr);
1240 ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1241 ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1242 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1243 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1244
1245 // Copy SparseMatrix into PETSc SeqAIJ format
1246 // pass through host for now
1247 Mat lA;
1248 ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1249 const int *II = diag->HostReadI();
1250 const int *JJ = diag->HostReadJ();
1251#if defined(PETSC_USE_64BIT_INDICES)
1252 PetscInt *pII,*pJJ;
1253 int m = diag->Height()+1, nnz = II[diag->Height()];
1254 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1255 for (int i = 0; i < m; i++) { pII[i] = II[i]; }
1256 for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1257 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1258 diag->HostReadData()); PCHKERRQ(lA,ierr);
1259 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1260#else
1261 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1262 diag->HostReadData()); PCHKERRQ(lA,ierr);
1263#endif
1264 }
1265 else
1266 {
1267 PetscScalar *da;
1268 PetscInt *dii,*djj,*oii,
1269 m = diag->Height()+1, nnz = diag->NumNonZeroElems();
1270
1271 diag->SortColumnIndices();
1272 // if we can take ownership of the SparseMatrix arrays, we can avoid this
1273 // step
1274 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1275 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1276 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1277 if (sizeof(PetscInt) == sizeof(int))
1278 {
1279 ierr = PetscMemcpy(dii,diag->HostReadI(),m*sizeof(PetscInt));
1280 CCHKERRQ(PETSC_COMM_SELF,ierr);
1281 ierr = PetscMemcpy(djj,diag->HostReadJ(),nnz*sizeof(PetscInt));
1282 CCHKERRQ(PETSC_COMM_SELF,ierr);
1283 }
1284 else
1285 {
1286 const int *iii = diag->HostReadI();
1287 const int *jjj = diag->HostReadJ();
1288 for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
1289 for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1290 }
1291 ierr = PetscMemcpy(da,diag->HostReadData(),nnz*sizeof(PetscScalar));
1292 CCHKERRQ(PETSC_COMM_SELF,ierr);
1293 ierr = PetscCalloc1(m,&oii);
1294 CCHKERRQ(PETSC_COMM_SELF,ierr);
1295 if (commsize > 1)
1296 {
1297 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1298 PETSC_DECIDE,
1299 dii,djj,da,oii,NULL,NULL,&A);
1300 CCHKERRQ(comm,ierr);
1301 }
1302 else
1303 {
1304 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1305 CCHKERRQ(comm,ierr);
1306 }
1307
1308 void *ptrs[4] = {dii,djj,da,oii};
1309 const char *names[4] = {"_mfem_csr_dii",
1310 "_mfem_csr_djj",
1311 "_mfem_csr_da",
1312 "_mfem_csr_oii",
1313 };
1314 for (PetscInt i=0; i<4; i++)
1315 {
1316 PetscContainer c;
1317
1318 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1319 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1320#if PETSC_VERSION_LT(3,23,0)
1321 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1322#else
1323 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
1324#endif
1325 CCHKERRQ(comm,ierr);
1326 ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
1327 CCHKERRQ(comm,ierr);
1328 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1329 }
1330 }
1331
1332 // Tell PETSc the matrix is ready to be used
1333 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1334 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1335
1336 *Ad = A;
1337}
1338
1340{
1341 return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
1342}
1343
1344// TODO ADD THIS CONSTRUCTOR
1345//PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
1346// PetscInt glob_ncols, int *I, PetscInt *J,
1347// mfem::real_t *data, PetscInt *rows, PetscInt *cols)
1348//{
1349//}
1350
1351// TODO This should take a reference on op but how?
1352void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
1353{
1354 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1355 ierr = MatSetSizes(*A,op->Height(),op->Width(),
1356 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1357 ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1358 ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
1359 ierr = MatShellSetOperation(*A,MATOP_MULT,
1360 (void (*)())__mfem_mat_shell_apply);
1361 PCHKERRQ(A,ierr);
1362 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1363 (void (*)())__mfem_mat_shell_apply_transpose);
1364 PCHKERRQ(A,ierr);
1365 ierr = MatShellSetOperation(*A,MATOP_COPY,
1366 (void (*)())__mfem_mat_shell_copy);
1367 PCHKERRQ(A,ierr);
1368 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1369 (void (*)())__mfem_mat_shell_destroy);
1370#if defined(_USE_DEVICE)
1372 if (mt == MemoryType::DEVICE || mt == MemoryType::MANAGED)
1373 {
1374 ierr = MatShellSetVecType(*A,PETSC_VECDEVICE); PCHKERRQ(A,ierr);
1375 ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1376 }
1377 else
1378 {
1379 ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1380 }
1381#endif
1382 PCHKERRQ(A,ierr);
1383 ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1384}
1385
1386void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
1387 Operator::Type tid)
1388{
1389 PetscParMatrix *pA = const_cast<PetscParMatrix *>
1390 (dynamic_cast<const PetscParMatrix *>(&op));
1391 HypreParMatrix *pH = const_cast<HypreParMatrix *>
1392 (dynamic_cast<const HypreParMatrix *>(&op));
1393 BlockOperator *pB = const_cast<BlockOperator *>
1394 (dynamic_cast<const BlockOperator *>(&op));
1395 IdentityOperator *pI = const_cast<IdentityOperator *>
1396 (dynamic_cast<const IdentityOperator *>(&op));
1397 SparseMatrix *pS = const_cast<SparseMatrix *>
1398 (dynamic_cast<const SparseMatrix *>(&op));
1399
1400 if (pA && tid == ANY_TYPE) // use same object and return
1401 {
1402 ierr = PetscObjectReference((PetscObject)(pA->A));
1403 CCHKERRQ(pA->GetComm(),ierr);
1404 *A = pA->A;
1405 return;
1406 }
1407
1408 PetscBool avoidmatconvert = PETSC_FALSE;
1409 if (pA) // we test for these types since MatConvert will fail
1410 {
1411 ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
1412 MATSHELL,"");
1413 CCHKERRQ(comm,ierr);
1414 }
1415 if (pA && !avoidmatconvert)
1416 {
1417 Mat At = NULL;
1418 PetscBool istrans;
1419#if PETSC_VERSION_LT(3,10,0)
1420 PetscBool ismatis;
1421#endif
1422
1423#if PETSC_VERSION_LT(3,18,0)
1424 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
1425#else
1426 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEVIRTUAL,
1427 &istrans);
1428#endif
1429 CCHKERRQ(pA->GetComm(),ierr);
1430 if (!istrans)
1431 {
1432 if (tid == pA->GetType()) // use same object and return
1433 {
1434 ierr = PetscObjectReference((PetscObject)(pA->A));
1435 CCHKERRQ(pA->GetComm(),ierr);
1436 *A = pA->A;
1437 return;
1438 }
1439#if PETSC_VERSION_LT(3,10,0)
1440 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
1441 CCHKERRQ(pA->GetComm(),ierr);
1442#endif
1443 }
1444 else
1445 {
1446 ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
1447#if PETSC_VERSION_LT(3,10,0)
1448 ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
1449#endif
1450 CCHKERRQ(pA->GetComm(),ierr);
1451 }
1452
1453 // Try to convert
1454 if (tid == PETSC_MATAIJ)
1455 {
1456#if PETSC_VERSION_LT(3,10,0)
1457 if (ismatis)
1458 {
1459 if (istrans)
1460 {
1461 Mat B;
1462
1463 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1464 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1465 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1466 }
1467 else
1468 {
1469 ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
1470 PCHKERRQ(pA->A,ierr);
1471 }
1472 }
1473 else
1474#endif
1475 {
1476 PetscMPIInt size;
1477 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1478
1479 // call MatConvert and see if a converter is available
1480 if (istrans)
1481 {
1482 Mat B;
1483 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1484 PCHKERRQ(pA->A,ierr);
1485 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1486 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1487 }
1488 else
1489 {
1490 ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1491 PCHKERRQ(pA->A,ierr);
1492 }
1493 }
1494 }
1495 else if (tid == PETSC_MATIS)
1496 {
1497 if (istrans)
1498 {
1499 Mat B;
1500 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1501 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1502 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1503 }
1504 else
1505 {
1506 ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1507 }
1508 }
1509 else if (tid == PETSC_MATHYPRE)
1510 {
1511#if defined(PETSC_HAVE_HYPRE)
1512 if (istrans)
1513 {
1514 Mat B;
1515 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1516 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1517 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1518 }
1519 else
1520 {
1521 ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1522 }
1523#else
1524 MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1525#endif
1526 }
1527 else if (tid == PETSC_MATSHELL)
1528 {
1529 MakeWrapper(comm,&op,A);
1530 }
1531 else
1532 {
1533 MFEM_ABORT("Unsupported operator type conversion " << tid)
1534 }
1535 }
1536 else if (pH)
1537 {
1538 if (tid == PETSC_MATAIJ)
1539 {
1540#if defined(PETSC_HAVE_HYPRE)
1541 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1542 PETSC_USE_POINTER,A);
1543#else
1544 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1545#endif
1546 CCHKERRQ(pH->GetComm(),ierr);
1547 }
1548 else if (tid == PETSC_MATIS)
1549 {
1550#if defined(PETSC_HAVE_HYPRE)
1551 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1552 PETSC_USE_POINTER,A);
1553#else
1554 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1555#endif
1556 CCHKERRQ(pH->GetComm(),ierr);
1557 }
1558 else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1559 {
1560#if defined(PETSC_HAVE_HYPRE)
1561 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1562 PETSC_USE_POINTER,A);
1563 CCHKERRQ(pH->GetComm(),ierr);
1564#else
1565 MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1566#endif
1567 }
1568 else if (tid == PETSC_MATSHELL)
1569 {
1570 MakeWrapper(comm,&op,A);
1571 }
1572 else
1573 {
1574 MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1575 " is not implemented");
1576 }
1577 }
1578 else if (pB)
1579 {
1580 Mat *mats,*matsl2l = NULL;
1581 PetscInt i,j,nr,nc;
1582
1583 nr = pB->NumRowBlocks();
1584 nc = pB->NumColBlocks();
1585 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1586 if (tid == PETSC_MATIS)
1587 {
1588 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1589 }
1590 for (i=0; i<nr; i++)
1591 {
1592 PetscBool needl2l = PETSC_TRUE;
1593
1594 for (j=0; j<nc; j++)
1595 {
1596 if (!pB->IsZeroBlock(i,j))
1597 {
1598 ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1599 if (tid == PETSC_MATIS && needl2l)
1600 {
1601 PetscContainer c;
1602 ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1603 (PetscObject*)&c);
1604 PCHKERRQ(mats[i*nc+j],ierr);
1605 // special case for block operators: the local Vdofs should be
1606 // ordered as:
1607 // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1608 // with m fields, Ni the number of Vdofs for the i-th field
1609 if (c)
1610 {
1611 Array<Mat> *l2l = NULL;
1612 ierr = PetscContainerGetPointer(c,(void**)&l2l);
1613 PCHKERRQ(c,ierr);
1614 MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1615 << l2l->Size() << " for block row " << i );
1616 ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1617 PCHKERRQ(c,ierr);
1618 matsl2l[i] = (*l2l)[0];
1619 needl2l = PETSC_FALSE;
1620 }
1621 }
1622 }
1623 }
1624 }
1625 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1626 if (tid == PETSC_MATIS)
1627 {
1628 ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1629
1630 mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1631 for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1632 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1633
1634 PetscContainer c;
1635 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1636 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1637#if PETSC_VERSION_LT(3,23,0)
1638 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1639#else
1640 ierr = PetscContainerSetCtxDestroy(c,__mfem_matarray_container_destroy);
1641#endif
1642 PCHKERRQ(c,ierr);
1643 ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1644 PCHKERRQ((*A),ierr);
1645 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1646 }
1647 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1648 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1649 }
1650 else if (pI && tid == PETSC_MATAIJ)
1651 {
1652 PetscInt rst;
1653
1654 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1655 ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1656 PCHKERRQ(A,ierr);
1657 ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1658 ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1659 ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1660 ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1661 ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1662 for (PetscInt i = rst; i < rst+pI->Height(); i++)
1663 {
1664 ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1665 }
1666 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1667 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1668 }
1669 else if (pS)
1670 {
1671 if (tid == PETSC_MATSHELL)
1672 {
1673 MakeWrapper(comm,&op,A);
1674 }
1675 else
1676 {
1677 /* from SparseMatrix to SEQAIJ -> always pass through host for now */
1678 Mat B;
1679 PetscScalar *pdata;
1680 PetscInt *pii,*pjj,*oii;
1681 PetscMPIInt size;
1682
1683 int m = pS->Height();
1684 int n = pS->Width();
1685 const int *ii = pS->HostReadI();
1686 const int *jj = pS->HostReadJ();
1687 const mfem::real_t *data = pS->HostReadData();
1688
1689 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1690 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1691 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1692 pii[0] = ii[0];
1693 for (int i = 0; i < m; i++)
1694 {
1695 bool issorted = true;
1696 pii[i+1] = ii[i+1];
1697 for (int j = ii[i]; j < ii[i+1]; j++)
1698 {
1699 pjj[j] = jj[j];
1700 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1701 pdata[j] = data[j];
1702 }
1703 if (!issorted)
1704 {
1705 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1706 CCHKERRQ(PETSC_COMM_SELF,ierr);
1707 }
1708 }
1709
1710 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1711 if (size == 1)
1712 {
1713 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1714 CCHKERRQ(comm,ierr);
1715 oii = NULL;
1716 }
1717 else // block diagonal constructor
1718 {
1719 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1720 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1721 PETSC_DECIDE,
1722 pii,pjj,pdata,oii,NULL,NULL,&B);
1723 CCHKERRQ(comm,ierr);
1724 }
1725 void *ptrs[4] = {pii,pjj,pdata,oii};
1726 const char *names[4] = {"_mfem_csr_pii",
1727 "_mfem_csr_pjj",
1728 "_mfem_csr_pdata",
1729 "_mfem_csr_oii"
1730 };
1731 for (int i=0; i<4; i++)
1732 {
1733 PetscContainer c;
1734
1735 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1736 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1737#if PETSC_VERSION_LT(3,23,0)
1738 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1739#else
1740 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
1741#endif
1742 PCHKERRQ(B,ierr);
1743 ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1744 PCHKERRQ(B,ierr);
1745 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1746 }
1747 if (tid == PETSC_MATAIJ)
1748 {
1749 *A = B;
1750 }
1751 else if (tid == PETSC_MATHYPRE)
1752 {
1753 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1754 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1755 }
1756 else if (tid == PETSC_MATIS)
1757 {
1758 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1759 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1760 }
1761 else
1762 {
1763 MFEM_ABORT("Unsupported operator type conversion " << tid)
1764 }
1765 }
1766 }
1767 else // fallback to general operator
1768 {
1769 MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1770 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1771 MakeWrapper(comm,&op,A);
1772 if (tid == PETSC_MATAIJ)
1773 {
1774 Mat B;
1775 PetscBool isaij;
1776
1777 ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1778 ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1779 CCHKERRQ(comm,ierr);
1780 ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1781 if (!isaij)
1782 {
1783 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1784 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1785 }
1786 else
1787 {
1788 *A = B;
1789 }
1790 }
1791 }
1792 SetUpForDevice();
1793}
1794
1796{
1797 if (A != NULL)
1798 {
1799 MPI_Comm comm = MPI_COMM_NULL;
1800 ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1801 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1802 }
1803 delete X;
1804 delete Y;
1805 X = Y = NULL;
1806}
1807
1809{
1810 if (ref)
1811 {
1812 ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1813 }
1814 Init();
1815 A = a;
1816 height = GetNumRows();
1817 width = GetNumCols();
1818}
1819
1821{
1822 if (A_ == A) { return; }
1823 Destroy();
1824 ierr = PetscObjectReference((PetscObject)A_); PCHKERRQ(A_,ierr);
1825 A = A_;
1826 height = GetNumRows();
1827 width = GetNumCols();
1828}
1829
1830void PetscParMatrix::SetUpForDevice()
1831{
1832#if !defined(_USE_DEVICE)
1833 return;
1834#else
1835 if (!A || (!Device::Allows(Backend::CUDA_MASK) &&
1837 {
1838 if (A) { ierr = MatBindToCPU(A, PETSC_TRUE); PCHKERRQ(A,ierr); }
1839 return;
1840 }
1841 PetscBool ismatis,isnest,isaij;
1842 ierr = PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
1843 PCHKERRQ(A,ierr);
1844 ierr = PetscObjectTypeCompare((PetscObject)A,MATNEST,&isnest);
1845 PCHKERRQ(A,ierr);
1846 Mat tA = A;
1847 if (ismatis)
1848 {
1849 ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1850 ierr = PetscObjectTypeCompare((PetscObject)tA,MATNEST,&isnest);
1851 PCHKERRQ(tA,ierr);
1852 }
1853 if (isnest)
1854 {
1855 PetscInt n,m;
1856 Mat **sub;
1857 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1858 bool dvec = false;
1859 for (PetscInt i = 0; i < n; i++)
1860 {
1861 for (PetscInt j = 0; j < m; j++)
1862 {
1863 if (sub[i][j])
1864 {
1865 bool expT = false;
1866 Mat sA = sub[i][j];
1867 ierr = PetscObjectTypeCompareAny((PetscObject)sA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1868 PCHKERRQ(sA,ierr);
1869 if (isaij)
1870 {
1871 ierr = MatSetType(sA,PETSC_MATAIJDEVICE); PCHKERRQ(sA,ierr);
1872 dvec = true;
1873 expT = true;
1874 }
1875 if (expT)
1876 {
1877 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1878 PETSC_TRUE); PCHKERRQ(sA,ierr);
1879 }
1880 }
1881 }
1882 }
1883 if (dvec)
1884 {
1885 ierr = MatSetVecType(tA,PETSC_VECDEVICE); PCHKERRQ(tA,ierr);
1886 }
1887 }
1888 else
1889 {
1890 bool expT = false;
1891 ierr = PetscObjectTypeCompareAny((PetscObject)tA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1892 PCHKERRQ(tA,ierr);
1893 if (isaij)
1894 {
1895 ierr = MatSetType(tA,PETSC_MATAIJDEVICE); PCHKERRQ(tA,ierr);
1896 expT = true;
1897 }
1898 if (expT)
1899 {
1900 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1901 PETSC_TRUE); PCHKERRQ(tA,ierr);
1902 }
1903 }
1904#endif
1905}
1906
1907// Computes y = alpha * A * x + beta * y
1908// or y = alpha * A^T* x + beta * y
1909static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1910 bool transpose)
1911{
1912 PetscErrorCode (*f)(Mat,Vec,Vec);
1913 PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1914 if (transpose)
1915 {
1916 f = MatMultTranspose;
1917 fadd = MatMultTransposeAdd;
1918 }
1919 else
1920 {
1921 f = MatMult;
1922 fadd = MatMultAdd;
1923 }
1924 if (a != 0.)
1925 {
1926 if (b != 0.)
1927 {
1928 ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1929 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1930 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1931 }
1932 else
1933 {
1934 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1935 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1936 }
1937 }
1938 else
1939 {
1940 if (b == 1.)
1941 {
1942 // do nothing
1943 }
1944 else if (b != 0.)
1945 {
1946 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1947 }
1948 else
1949 {
1950 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1951 }
1952 }
1953}
1954
1956{
1957 ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1958 Destroy();
1959 Init();
1960 A = master.A;
1961 height = master.height;
1962 width = master.width;
1963}
1964
1966{
1967 if (!X)
1968 {
1969 MFEM_VERIFY(A,"Mat not present");
1970 X = new PetscParVector(*this,false,false); PCHKERRQ(A,ierr);
1971 }
1972 return X;
1973}
1974
1976{
1977 if (!Y)
1978 {
1979 MFEM_VERIFY(A,"Mat not present");
1980 Y = new PetscParVector(*this,true,false); PCHKERRQ(A,ierr);
1981 }
1982 return Y;
1983}
1984
1986{
1987 Mat B;
1988 if (action)
1989 {
1990 ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1991 }
1992 else
1993 {
1994 ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1995 }
1996 return new PetscParMatrix(B,false);
1997}
1998
2000{
2001 ierr = MatScale(A,s); PCHKERRQ(A,ierr);
2002}
2003
2005 Vector &y) const
2006{
2007 MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
2008 << ", expected size = " << Width());
2009 MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
2010 << ", expected size = " << Height());
2011
2012 PetscParVector *XX = GetX();
2013 PetscParVector *YY = GetY();
2014 bool rw = (b != 0.0);
2015 XX->PlaceMemory(x.GetMemory());
2016 YY->PlaceMemory(y.GetMemory(),rw);
2017 MatMultKernel(A,a,XX->x,b,YY->x,false);
2018 XX->ResetMemory();
2019 YY->ResetMemory();
2020}
2021
2024 Vector &y) const
2025{
2026 MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
2027 << ", expected size = " << Height());
2028 MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
2029 << ", expected size = " << Width());
2030
2031 PetscParVector *XX = GetX();
2032 PetscParVector *YY = GetY();
2033 bool rw = (b != 0.0);
2034 XX->PlaceMemory(y.GetMemory(),rw);
2035 YY->PlaceMemory(x.GetMemory());
2036 MatMultKernel(A,a,YY->x,b,XX->x,true);
2037 XX->ResetMemory();
2038 YY->ResetMemory();
2039}
2040
2041void PetscParMatrix::Print(const char *fname, bool binary) const
2042{
2043 if (fname)
2044 {
2045 PetscViewer view;
2046
2047 if (binary)
2048 {
2049 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
2050 FILE_MODE_WRITE,&view);
2051 }
2052 else
2053 {
2054 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
2055 }
2056 PCHKERRQ(A,ierr);
2057 ierr = MatView(A,view); PCHKERRQ(A,ierr);
2058 ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
2059 }
2060 else
2061 {
2062 ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
2063 }
2064}
2065
2067{
2068 MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2069 << ", expected size = " << Height());
2070
2071 PetscParVector *YY = GetY();
2072 YY->PlaceMemory(s.GetMemory());
2073 ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
2074 YY->ResetMemory();
2075}
2076
2078{
2079 MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2080 << ", expected size = " << Width());
2081
2082 PetscParVector *XX = GetX();
2083 XX->PlaceMemory(s.GetMemory());
2084 ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
2085 XX->ResetMemory();
2086}
2087
2089{
2090 ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
2091}
2092
2094{
2095 // for matrices with square diagonal blocks only
2096 MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2097 << ", expected size = " << Height());
2098 MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2099 << ", expected size = " << Width());
2100
2101 PetscParVector *XX = GetX();
2102 XX->PlaceMemory(s.GetMemory());
2103 ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2104 XX->ResetMemory();
2105}
2106
2108 PetscParMatrix *P)
2109{
2110 MFEM_VERIFY(A->Width() == P->Height(),
2111 "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
2112 " differs from number of local rows of P " << P->Height());
2113 MFEM_VERIFY(A->Height() == R->Width(),
2114 "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
2115 " differs from number of local cols of R " << R->Width());
2116 Mat B;
2117 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2118 PCHKERRQ(*R,ierr);
2119 return new PetscParMatrix(B);
2120}
2121
2123{
2124 Mat pA = *A,pP = *P,pRt = *Rt;
2125 Mat B;
2126 PetscBool Aismatis,Pismatis,Rtismatis;
2127
2128 MFEM_VERIFY(A->Width() == P->Height(),
2129 "Petsc RAP: Number of local cols of A " << A->Width() <<
2130 " differs from number of local rows of P " << P->Height());
2131 MFEM_VERIFY(A->Height() == Rt->Height(),
2132 "Petsc RAP: Number of local rows of A " << A->Height() <<
2133 " differs from number of local rows of Rt " << Rt->Height());
2134 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
2135 PCHKERRQ(pA,ierr);
2136 ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
2137 PCHKERRQ(pA,ierr);
2138 ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
2139 PCHKERRQ(pA,ierr);
2140 if (Aismatis &&
2141 Pismatis &&
2142 Rtismatis) // handle special case (this code will eventually go into PETSc)
2143 {
2144 Mat lA,lP,lB,lRt;
2145 ISLocalToGlobalMapping cl2gP,cl2gRt;
2146 PetscInt rlsize,clsize,rsize,csize;
2147
2148 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2149 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2150 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2151 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2152 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2153 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2154 ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
2155 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2156 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2157 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2158 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2159 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2160 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2161 if (lRt == lP)
2162 {
2163 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2164 PCHKERRQ(lA,ierr);
2165 }
2166 else
2167 {
2168 Mat lR;
2169 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2170 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2171 PCHKERRQ(lRt,ierr);
2172 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2173 }
2174
2175 // attach lRt matrix to the subdomain local matrix
2176 // it may be used if markers on vdofs have to be mapped on
2177 // subdomain true dofs
2178 {
2179 mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
2180 ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
2181 (*vmatsl2l)[0] = lRt;
2182
2183 PetscContainer c;
2184 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
2185 PCHKERRQ(B,ierr);
2186 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2187#if PETSC_VERSION_LT(3,23,0)
2188 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2189#else
2190 ierr = PetscContainerSetCtxDestroy(c,__mfem_matarray_container_destroy);
2191#endif
2192 PCHKERRQ(c,ierr);
2193 ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
2194 PCHKERRQ(B,ierr);
2195 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2196 }
2197
2198 // Set local problem
2199 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2200 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2201 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2202 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2203 }
2204 else // it raises an error if the PtAP is not supported in PETSc
2205 {
2206 if (pP == pRt)
2207 {
2208 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2209 PCHKERRQ(pA,ierr);
2210 }
2211 else
2212 {
2213 Mat pR;
2214 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2215 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2216 PCHKERRQ(pRt,ierr);
2217 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2218 }
2219 }
2220 return new PetscParMatrix(B);
2221}
2222
2224{
2225 PetscParMatrix *out = RAP(P,A,P);
2226 return out;
2227}
2228
2230{
2231 PetscParMatrix *out,*A;
2232#if defined(PETSC_HAVE_HYPRE)
2234#else
2235 A = new PetscParMatrix(hA);
2236#endif
2237 out = RAP(P,A,P);
2238 delete A;
2239 return out;
2240}
2241
2242
2244{
2245 Mat AB;
2246
2247 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2248 CCHKERRQ(A->GetComm(),ierr);
2249 return new PetscParMatrix(AB);
2250}
2251
2253{
2254 Mat Ae;
2255
2256 PetscParVector dummy(GetComm(),0);
2257 ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2258 EliminateRowsCols(rows_cols,dummy,dummy);
2259 ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2260 return new PetscParMatrix(Ae);
2261}
2262
2264 const HypreParVector &X,
2265 HypreParVector &B,
2266 mfem::real_t diag)
2267{
2268 MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2269}
2270
2272 const PetscParVector &X,
2273 PetscParVector &B,
2274 mfem::real_t diag)
2275{
2276 PetscInt M,N;
2277 ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2278 MFEM_VERIFY(M == N,"Rectangular case unsupported");
2279
2280 // TODO: what if a diagonal term is not present?
2281 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2282
2283 // rows need to be in global numbering
2284 PetscInt rst;
2285 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2286
2287 IS dir;
2288 ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2289 if (!X.GlobalSize() && !B.GlobalSize())
2290 {
2291 ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2292 }
2293 else
2294 {
2295 ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2296 }
2297 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2298}
2299
2301{
2302 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2303
2304 // rows need to be in global numbering
2305 PetscInt rst;
2306 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2307
2308 IS dir;
2309 ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2310 ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2311 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2312}
2313
2314Mat PetscParMatrix::ReleaseMat(bool dereference)
2315{
2316
2317 Mat B = A;
2318 if (dereference)
2319 {
2320 MPI_Comm comm = GetComm();
2321 ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
2322 }
2323 A = NULL;
2324 return B;
2325}
2326
2328{
2329 PetscBool ok;
2330 MFEM_VERIFY(A, "no associated PETSc Mat object");
2331 PetscObject oA = (PetscObject)(this->A);
2332 // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
2333 ierr = PetscObjectBaseTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2334 if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2335 ierr = PetscObjectBaseTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2336 if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2337 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2338 if (ok == PETSC_TRUE) { return PETSC_MATIS; }
2339 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2340 if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
2341 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2342 if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
2343#if defined(PETSC_HAVE_HYPRE)
2344 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2345 if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
2346#endif
2347 return PETSC_MATGENERIC;
2348}
2349
2351 const Array<int> &ess_dof_list,
2352 const Vector &X, Vector &B)
2353{
2354 const PetscScalar *array;
2355 Mat pA = const_cast<PetscParMatrix&>(A);
2356
2357 // B -= Ae*X
2358 Ae.Mult(-1.0, X, 1.0, B);
2359
2360 Vec diag = const_cast<PetscParVector&>((*A.GetX()));
2361 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2362 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2363 for (int i = 0; i < ess_dof_list.Size(); i++)
2364 {
2365 int r = ess_dof_list[i];
2366 B(r) = array[r] * X(r);
2367 }
2368 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2369}
2370
2371// PetscSolver methods
2372
2373PetscSolver::PetscSolver() : clcustom(false)
2374{
2375 obj = NULL;
2376 B = X = NULL;
2377 cid = -1;
2378 operatorset = false;
2379 bchandler = NULL;
2380 private_ctx = NULL;
2381}
2382
2384{
2385 delete B;
2386 delete X;
2388}
2389
2391{
2392 SetRelTol(tol);
2393}
2394
2396{
2397 if (cid == KSP_CLASSID)
2398 {
2399 KSP ksp = (KSP)obj;
2400 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2401 }
2402 else if (cid == SNES_CLASSID)
2403 {
2404 SNES snes = (SNES)obj;
2405 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2406 PETSC_DEFAULT);
2407 }
2408 else if (cid == TS_CLASSID)
2409 {
2410 TS ts = (TS)obj;
2411 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2412 }
2413 else
2414 {
2415 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2416 }
2417 PCHKERRQ(obj,ierr);
2418}
2419
2421{
2422 if (cid == KSP_CLASSID)
2423 {
2424 KSP ksp = (KSP)obj;
2425 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2426 }
2427 else if (cid == SNES_CLASSID)
2428 {
2429 SNES snes = (SNES)obj;
2430 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2431 PETSC_DEFAULT);
2432 }
2433 else if (cid == TS_CLASSID)
2434 {
2435 TS ts = (TS)obj;
2436 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2437 }
2438 else
2439 {
2440 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2441 }
2442 PCHKERRQ(obj,ierr);
2443}
2444
2445void PetscSolver::SetMaxIter(int max_iter)
2446{
2447 if (cid == KSP_CLASSID)
2448 {
2449 KSP ksp = (KSP)obj;
2450 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2451 max_iter);
2452 }
2453 else if (cid == SNES_CLASSID)
2454 {
2455 SNES snes = (SNES)obj;
2456 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2457 max_iter,PETSC_DEFAULT);
2458 }
2459 else if (cid == TS_CLASSID)
2460 {
2461 TS ts = (TS)obj;
2462 ierr = TSSetMaxSteps(ts,max_iter);
2463 }
2464 else
2465 {
2466 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2467 }
2468 PCHKERRQ(obj,ierr);
2469}
2470
2471
2473{
2474 typedef PetscErrorCode (*myPetscFunc)(void**);
2475 PetscViewerAndFormat *vf = NULL;
2476 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
2477
2478 if (plev > 0)
2479 {
2480 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2481 PCHKERRQ(obj,ierr);
2482 }
2483 if (cid == KSP_CLASSID)
2484 {
2485 // there are many other options, see the function KSPSetFromOptions() in
2486 // src/ksp/ksp/interface/itcl.c
2487 typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
2488 KSP ksp = (KSP)obj;
2489 if (plev >= 0)
2490 {
2491 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2492 }
2493 if (plev == 1)
2494 {
2495#if PETSC_VERSION_LT(3,15,0)
2496 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2497#else
2498 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2499#endif
2500 (myPetscFunc)PetscViewerAndFormatDestroy);
2501 PCHKERRQ(ksp,ierr);
2502 }
2503 else if (plev > 1)
2504 {
2505 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2506 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2507 (myPetscFunc)PetscViewerAndFormatDestroy);
2508 PCHKERRQ(ksp,ierr);
2509 if (plev > 2)
2510 {
2511 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2512 PCHKERRQ(viewer,ierr);
2513#if PETSC_VERSION_LT(3,15,0)
2514 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2515#else
2516 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2517#endif
2518 (myPetscFunc)PetscViewerAndFormatDestroy);
2519 PCHKERRQ(ksp,ierr);
2520 }
2521 }
2522 }
2523 else if (cid == SNES_CLASSID)
2524 {
2525 typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
2526 SNES snes = (SNES)obj;
2527 if (plev >= 0)
2528 {
2529 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2530 }
2531 if (plev > 0)
2532 {
2533 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2534 (myPetscFunc)PetscViewerAndFormatDestroy);
2535 PCHKERRQ(snes,ierr);
2536 }
2537 }
2538 else if (cid == TS_CLASSID)
2539 {
2540 TS ts = (TS)obj;
2541 if (plev >= 0)
2542 {
2543 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2544 }
2545 }
2546 else
2547 {
2548 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2549 }
2550}
2551
2552MPI_Comm PetscSolver::GetComm() const
2553{
2554 return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
2555}
2556
2558{
2559 __mfem_monitor_ctx *monctx;
2560 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2561 monctx->solver = this;
2562 monctx->monitor = ctx;
2563 if (cid == KSP_CLASSID)
2564 {
2565 ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
2566 __mfem_monitor_ctx_destroy);
2567 PCHKERRQ(obj,ierr);
2568 }
2569 else if (cid == SNES_CLASSID)
2570 {
2571 ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
2572 __mfem_monitor_ctx_destroy);
2573 PCHKERRQ(obj,ierr);
2574 }
2575 else if (cid == TS_CLASSID)
2576 {
2577 ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
2578 __mfem_monitor_ctx_destroy);
2579 PCHKERRQ(obj,ierr);
2580 }
2581 else
2582 {
2583 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2584 }
2585}
2586
2588{
2589 bchandler = bch;
2590 if (cid == SNES_CLASSID)
2591 {
2592 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
2593 snes_ctx->bchandler = bchandler;
2594 }
2595 else if (cid == TS_CLASSID)
2596 {
2597 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
2598 ts_ctx->bchandler = bchandler;
2599 }
2600 else
2601 {
2602 MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2603 }
2604}
2605
2607{
2608 PC pc = NULL;
2609 if (cid == TS_CLASSID)
2610 {
2611 SNES snes;
2612 KSP ksp;
2613
2614 ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
2615 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2616 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2617 }
2618 else if (cid == SNES_CLASSID)
2619 {
2620 KSP ksp;
2621
2622 ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
2623 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2624 }
2625 else if (cid == KSP_CLASSID)
2626 {
2627 ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2628 }
2629 else if (cid == PC_CLASSID)
2630 {
2631 pc = (PC)obj;
2632 }
2633 else
2634 {
2635 MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2636 }
2637 if (factory)
2638 {
2639 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2640 }
2641 else
2642 {
2643 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2644 }
2645}
2646
2647void PetscSolver::Customize(bool customize) const
2648{
2649 if (!customize) { clcustom = true; }
2650 if (!clcustom)
2651 {
2652 if (cid == PC_CLASSID)
2653 {
2654 PC pc = (PC)obj;
2655 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2656 }
2657 else if (cid == KSP_CLASSID)
2658 {
2659 KSP ksp = (KSP)obj;
2660 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2661 }
2662 else if (cid == SNES_CLASSID)
2663 {
2664 SNES snes = (SNES)obj;
2665 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2666 }
2667 else if (cid == TS_CLASSID)
2668 {
2669 TS ts = (TS)obj;
2670 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2671 }
2672 else
2673 {
2674 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2675 }
2676 }
2677 clcustom = true;
2678}
2679
2681{
2682 if (cid == KSP_CLASSID)
2683 {
2684 KSP ksp = (KSP)obj;
2685 KSPConvergedReason reason;
2686 ierr = KSPGetConvergedReason(ksp,&reason);
2687 PCHKERRQ(ksp,ierr);
2688 return reason > 0 ? 1 : 0;
2689 }
2690 else if (cid == SNES_CLASSID)
2691 {
2692 SNES snes = (SNES)obj;
2693 SNESConvergedReason reason;
2694 ierr = SNESGetConvergedReason(snes,&reason);
2695 PCHKERRQ(snes,ierr);
2696 return reason > 0 ? 1 : 0;
2697 }
2698 else if (cid == TS_CLASSID)
2699 {
2700 TS ts = (TS)obj;
2701 TSConvergedReason reason;
2702 ierr = TSGetConvergedReason(ts,&reason);
2703 PCHKERRQ(ts,ierr);
2704 return reason > 0 ? 1 : 0;
2705 }
2706 else
2707 {
2708 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2709 return -1;
2710 }
2711}
2712
2714{
2715 if (cid == KSP_CLASSID)
2716 {
2717 KSP ksp = (KSP)obj;
2718 PetscInt its;
2719 ierr = KSPGetIterationNumber(ksp,&its);
2720 PCHKERRQ(ksp,ierr);
2721 return its;
2722 }
2723 else if (cid == SNES_CLASSID)
2724 {
2725 SNES snes = (SNES)obj;
2726 PetscInt its;
2727 ierr = SNESGetIterationNumber(snes,&its);
2728 PCHKERRQ(snes,ierr);
2729 return its;
2730 }
2731 else if (cid == TS_CLASSID)
2732 {
2733 TS ts = (TS)obj;
2734 PetscInt its;
2735 ierr = TSGetStepNumber(ts,&its);
2736 PCHKERRQ(ts,ierr);
2737 return its;
2738 }
2739 else
2740 {
2741 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2742 return -1;
2743 }
2744}
2745
2747{
2748 if (cid == KSP_CLASSID)
2749 {
2750 KSP ksp = (KSP)obj;
2751 PetscReal norm;
2752 ierr = KSPGetResidualNorm(ksp,&norm);
2753 PCHKERRQ(ksp,ierr);
2754 return norm;
2755 }
2756 if (cid == SNES_CLASSID)
2757 {
2758 SNES snes = (SNES)obj;
2759 PetscReal norm;
2760 ierr = SNESGetFunctionNorm(snes,&norm);
2761 PCHKERRQ(snes,ierr);
2762 return norm;
2763 }
2764 else
2765 {
2766 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2767 return PETSC_MAX_REAL;
2768 }
2769}
2770
2772{
2774 if (cid == SNES_CLASSID)
2775 {
2776 __mfem_snes_ctx *snes_ctx;
2777 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2778 snes_ctx->op = NULL;
2779 snes_ctx->bchandler = NULL;
2780 snes_ctx->work = NULL;
2781 snes_ctx->jacType = Operator::PETSC_MATAIJ;
2782 private_ctx = (void*) snes_ctx;
2783 }
2784 else if (cid == TS_CLASSID)
2785 {
2786 __mfem_ts_ctx *ts_ctx;
2787 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2788 ts_ctx->op = NULL;
2789 ts_ctx->bchandler = NULL;
2790 ts_ctx->work = NULL;
2791 ts_ctx->work2 = NULL;
2792 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2793 ts_ctx->cached_ijacstate = -1;
2794 ts_ctx->cached_rhsjacstate = -1;
2795 ts_ctx->cached_splits_xstate = -1;
2796 ts_ctx->cached_splits_xdotstate = -1;
2798 ts_ctx->jacType = Operator::PETSC_MATAIJ;
2799 private_ctx = (void*) ts_ctx;
2800 }
2801}
2802
2804{
2805 if (!private_ctx) { return; }
2806 // free private context's owned objects
2807 if (cid == SNES_CLASSID)
2808 {
2809 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2810 delete snes_ctx->work;
2811 }
2812 else if (cid == TS_CLASSID)
2813 {
2814 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2815 delete ts_ctx->work;
2816 delete ts_ctx->work2;
2817 }
2818 ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2819}
2820
2821// PetscBCHandler methods
2822
2824 enum PetscBCHandler::Type type_)
2825 : bctype(type_), setup(false), eval_t(0.0),
2826 eval_t_cached(std::numeric_limits<mfem::real_t>::min())
2827{
2828 SetTDofs(ess_tdof_list);
2829}
2830
2832{
2833 ess_tdof_list.SetSize(list.Size());
2834 ess_tdof_list.Assign(list);
2835 setup = false;
2836}
2837
2839{
2840 if (setup) { return; }
2841 if (bctype == CONSTANT)
2842 {
2843 eval_g.SetSize(n);
2844 this->Eval(eval_t,eval_g);
2845 eval_t_cached = eval_t;
2846 }
2847 else if (bctype == TIME_DEPENDENT)
2848 {
2849 eval_g.SetSize(n);
2850 }
2851 setup = true;
2852}
2853
2855{
2856 (*this).SetUp(x.Size());
2857 y = x;
2858 if (bctype == ZERO)
2859 {
2860 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2861 {
2862 y[ess_tdof_list[i]] = 0.0;
2863 }
2864 }
2865 else
2866 {
2867 if (bctype != CONSTANT && eval_t != eval_t_cached)
2868 {
2869 Eval(eval_t,eval_g);
2870 eval_t_cached = eval_t;
2871 }
2872 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2873 {
2874 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2875 }
2876 }
2877}
2878
2880{
2881 (*this).SetUp(x.Size());
2882 if (bctype == ZERO)
2883 {
2884 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2885 {
2886 x[ess_tdof_list[i]] = 0.0;
2887 }
2888 }
2889 else
2890 {
2891 if (bctype != CONSTANT && eval_t != eval_t_cached)
2892 {
2893 Eval(eval_t,eval_g);
2894 eval_t_cached = eval_t;
2895 }
2896 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2897 {
2898 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2899 }
2900 }
2901}
2902
2904{
2905 (*this).SetUp(x.Size());
2906 if (bctype == ZERO)
2907 {
2908 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2909 {
2910 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2911 }
2912 }
2913 else
2914 {
2915 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2916 {
2917 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2918 }
2919 }
2920}
2921
2923{
2924 (*this).SetUp(x.Size());
2925 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2926 {
2927 x[ess_tdof_list[i]] = 0.0;
2928 }
2929}
2930
2932{
2933 (*this).SetUp(x.Size());
2934 y = x;
2935 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2936 {
2937 y[ess_tdof_list[i]] = 0.0;
2938 }
2939}
2940
2941// PetscLinearSolver methods
2942
2943PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2944 bool wrapin, bool iter_mode)
2945 : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2946{
2947 KSP ksp;
2948 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2949 obj = (PetscObject)ksp;
2950 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2951 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2952 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2953 PCHKERRQ(ksp, ierr);
2954}
2955
2957 const std::string &prefix, bool iter_mode)
2958 : PetscSolver(), Solver(0,iter_mode), wrap(false)
2959{
2960 KSP ksp;
2961 ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2962 obj = (PetscObject)ksp;
2963 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2964 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2965 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2966 PCHKERRQ(ksp, ierr);
2967 SetOperator(A);
2968}
2969
2971 const std::string &prefix, bool iter_mode)
2972 : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2973{
2974 KSP ksp;
2975 ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2976 obj = (PetscObject)ksp;
2977 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2978 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2979 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2980 PCHKERRQ(ksp, ierr);
2981 SetOperator(A);
2982}
2983
2985{
2986 const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2987 PetscParMatrix *pA = const_cast<PetscParMatrix *>
2988 (dynamic_cast<const PetscParMatrix *>(&op));
2989 const Operator *oA = dynamic_cast<const Operator *>(&op);
2990
2991 // update base classes: Operator, Solver, PetscLinearSolver
2992 bool delete_pA = false;
2993 if (!pA)
2994 {
2995 if (hA)
2996 {
2997 // Create MATSHELL object or convert into a format suitable to construct preconditioners
2998 pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2999 delete_pA = true;
3000 }
3001 else if (oA) // fallback to general operator
3002 {
3003 // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
3004 // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
3005 pA = new PetscParMatrix(PetscObjectComm(obj),oA,
3006 wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3007 delete_pA = true;
3008 }
3009 }
3010 MFEM_VERIFY(pA, "Unsupported operation!");
3011
3012 // Set operators into PETSc KSP
3013 KSP ksp = (KSP)obj;
3014 Mat A = pA->A;
3015 if (operatorset)
3016 {
3017 Mat C;
3018 PetscInt nheight,nwidth,oheight,owidth;
3019
3020 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3021 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3022 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3023 if (nheight != oheight || nwidth != owidth)
3024 {
3025 // reinit without destroying the KSP
3026 // communicator remains the same
3027 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3028 delete X;
3029 delete B;
3030 X = B = NULL;
3031 }
3032 }
3033 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
3034
3035 // Update PetscSolver
3036 operatorset = true;
3037
3038 // Update the Operator fields.
3039 height = pA->Height();
3040 width = pA->Width();
3041
3042 if (delete_pA) { delete pA; }
3043}
3044
3046{
3047 const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
3048 PetscParMatrix *pA = const_cast<PetscParMatrix *>
3049 (dynamic_cast<const PetscParMatrix *>(&op));
3050 const Operator *oA = dynamic_cast<const Operator *>(&op);
3051
3052 PetscParMatrix *ppA = const_cast<PetscParMatrix *>
3053 (dynamic_cast<const PetscParMatrix *>(&pop));
3054 const Operator *poA = dynamic_cast<const Operator *>(&pop);
3055
3056 // Convert Operator for linear system
3057 bool delete_pA = false;
3058 if (!pA)
3059 {
3060 if (hA)
3061 {
3062 // Create MATSHELL object or convert into a format suitable to construct preconditioners
3063 pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3064 delete_pA = true;
3065 }
3066 else if (oA) // fallback to general operator
3067 {
3068 // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
3069 // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
3070 pA = new PetscParMatrix(PetscObjectComm(obj),oA,
3071 wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3072 delete_pA = true;
3073 }
3074 }
3075 MFEM_VERIFY(pA, "Unsupported operation!");
3076
3077 // Convert Operator to be preconditioned
3078 bool delete_ppA = false;
3079 if (!ppA)
3080 {
3081 if (oA == poA && !wrap) // Same operator, already converted
3082 {
3083 ppA = pA;
3084 }
3085 else
3086 {
3087 ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
3088 delete_ppA = true;
3089 }
3090 }
3091 MFEM_VERIFY(ppA, "Unsupported operation!");
3092
3093 // Set operators into PETSc KSP
3094 KSP ksp = (KSP)obj;
3095 Mat A = pA->A;
3096 Mat P = ppA->A;
3097 if (operatorset)
3098 {
3099 Mat C;
3100 PetscInt nheight,nwidth,oheight,owidth;
3101
3102 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3103 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3104 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3105 if (nheight != oheight || nwidth != owidth)
3106 {
3107 // reinit without destroying the KSP
3108 // communicator remains the same
3109 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3110 delete X;
3111 delete B;
3112 X = B = NULL;
3113 wrap = false;
3114 }
3115 }
3116 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3117
3118 // Update PetscSolver
3119 operatorset = true;
3120
3121 // Update the Operator fields.
3122 height = pA->Height();
3123 width = pA->Width();
3124
3125 if (delete_pA) { delete pA; }
3126 if (delete_ppA) { delete ppA; }
3127}
3128
3130{
3131 KSP ksp = (KSP)obj;
3132
3133 // Preserve Amat if already set
3134 Mat A = NULL;
3135 PetscBool amat;
3136 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3137 if (amat)
3138 {
3139 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3140 ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
3141 }
3142 PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
3143 if (ppc)
3144 {
3145 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3146 }
3147 else
3148 {
3149 // wrap the Solver action
3150 // Solver is assumed to be already setup
3151 // ownership of precond is not transferred,
3152 // consistently with other MFEM's linear solvers
3153 PC pc;
3154 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3155 ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
3156 }
3157 if (A)
3158 {
3159 Mat P;
3160
3161 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3162 ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
3163 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3164 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3165 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3166 }
3167}
3168
3169void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
3170{
3171 KSP ksp = (KSP)obj;
3172
3173 if (!B || !X)
3174 {
3175 Mat pA = NULL;
3176 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
3177 if (!B)
3178 {
3179 PetscParMatrix A = PetscParMatrix(pA, true);
3180 B = new PetscParVector(A, true, false);
3181 }
3182 if (!X)
3183 {
3184 PetscParMatrix A = PetscParMatrix(pA, true);
3185 X = new PetscParVector(A, false, false);
3186 }
3187 }
3188 B->PlaceMemory(b.GetMemory());
3189
3190 Customize();
3191
3192 PetscBool flg;
3193 ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3194 X->PlaceMemory(x.GetMemory(),flg);
3195
3196 // Solve the system.
3197 if (trans)
3198 {
3199 ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3200 }
3201 else
3202 {
3203 ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3204 }
3205 B->ResetMemory();
3206 X->ResetMemory();
3207}
3208
3210{
3211 (*this).MultKernel(b,x,false);
3212}
3213
3215{
3216 (*this).MultKernel(b,x,true);
3217}
3218
3220{
3221 MPI_Comm comm;
3222 KSP ksp = (KSP)obj;
3223 ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3224 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3225}
3226
3227// PetscPCGSolver methods
3228
3229PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix,
3230 bool iter_mode)
3231 : PetscLinearSolver(comm,prefix,true,iter_mode)
3232{
3233 KSP ksp = (KSP)obj;
3234 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3235 // this is to obtain a textbook PCG
3236 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3237}
3238
3240 bool iter_mode)
3241 : PetscLinearSolver(A,prefix,iter_mode)
3242{
3243 KSP ksp = (KSP)obj;
3244 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3245 // this is to obtain a textbook PCG
3246 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3247}
3248
3250 const std::string &prefix, bool iter_mode)
3251 : PetscLinearSolver(A,wrap,prefix,iter_mode)
3252{
3253 KSP ksp = (KSP)obj;
3254 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3255 // this is to obtain a textbook PCG
3256 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3257}
3258
3259// PetscPreconditioner methods
3260
3262 const std::string &prefix)
3263 : PetscSolver(), Solver()
3264{
3265 PC pc;
3266 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3267 obj = (PetscObject)pc;
3268 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3269 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3270}
3271
3273 const string &prefix)
3274 : PetscSolver(), Solver()
3275{
3276 PC pc;
3277 ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
3278 obj = (PetscObject)pc;
3279 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3280 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3281 SetOperator(A);
3282}
3283
3285 const string &prefix)
3286 : PetscSolver(), Solver()
3287{
3288 PC pc;
3289 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3290 obj = (PetscObject)pc;
3291 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3292 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3293 SetOperator(op);
3294}
3295
3297{
3298 bool delete_pA = false;
3299 PetscParMatrix *pA = const_cast<PetscParMatrix *>
3300 (dynamic_cast<const PetscParMatrix *>(&op));
3301
3302 if (!pA)
3303 {
3304 const Operator *cop = dynamic_cast<const Operator *>(&op);
3305 pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
3306 delete_pA = true;
3307 }
3308
3309 // Set operators into PETSc PC
3310 PC pc = (PC)obj;
3311 Mat A = pA->A;
3312 if (operatorset)
3313 {
3314 Mat C;
3315 PetscInt nheight,nwidth,oheight,owidth;
3316
3317 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3318 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3319 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3320 if (nheight != oheight || nwidth != owidth)
3321 {
3322 // reinit without destroying the PC
3323 // communicator remains the same
3324 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3325 delete X;
3326 delete B;
3327 X = B = NULL;
3328 }
3329 }
3330 ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
3331
3332 // Update PetscSolver
3333 operatorset = true;
3334
3335 // Update the Operator fields.
3336 height = pA->Height();
3337 width = pA->Width();
3338
3339 if (delete_pA) { delete pA; };
3340}
3341
3342void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
3343 bool trans) const
3344{
3345 MFEM_VERIFY(!iterative_mode,
3346 "Iterative mode not supported for PetscPreconditioner");
3347 PC pc = (PC)obj;
3348
3349 if (!B || !X)
3350 {
3351 Mat pA = NULL;
3352 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
3353 if (!B)
3354 {
3355 PetscParMatrix A(pA, true);
3356 B = new PetscParVector(A, true, false);
3357 }
3358 if (!X)
3359 {
3360 PetscParMatrix A(pA, true);
3361 X = new PetscParVector(A, false, false);
3362 }
3363 }
3364 B->PlaceMemory(b.GetMemory());
3365 X->PlaceMemory(x.GetMemory());
3366
3367 Customize();
3368
3369 // Apply the preconditioner.
3370 if (trans)
3371 {
3372 ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3373 }
3374 else
3375 {
3376 ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3377 }
3378 B->ResetMemory();
3379 X->ResetMemory();
3380}
3381
3383{
3384 (*this).MultKernel(b,x,false);
3385}
3386
3388{
3389 (*this).MultKernel(b,x,true);
3390}
3391
3393{
3394 MPI_Comm comm;
3395 PC pc = (PC)obj;
3396 ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3397 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3398}
3399
3400// PetscBDDCSolver methods
3401
3402// Coordinates sampling function
3403static void func_coords(const Vector &x, Vector &y)
3404{
3405 y = x;
3406}
3407
3408void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
3409{
3410 MPI_Comm comm = PetscObjectComm(obj);
3411
3412 // get PETSc object
3413 PC pc = (PC)obj;
3414 Mat pA;
3415 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3416
3417 // matrix type should be of type MATIS
3418 PetscBool ismatis;
3419 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
3420 PCHKERRQ(pA,ierr);
3421 MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
3422
3423 // Check options
3424 ParFiniteElementSpace *fespace = opts.fespace;
3425 if (opts.netflux && !fespace)
3426 {
3427 MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3428 }
3429
3430 // Attach default near-null space to local matrices
3431 {
3432 MatNullSpace nnsp;
3433 Mat lA;
3434 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3435 ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
3436 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3437 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3438 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3439 }
3440
3441 // set PETSc PC type to PCBDDC
3442 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
3443
3444 PetscInt rst,nl;
3445 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3446 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3447
3448 // index sets for fields splitting and coordinates for nodal spaces
3449 IS *fields = NULL;
3450 PetscInt nf = 0;
3451 PetscInt sdim = 0;
3452 PetscReal *coords = NULL;
3453 if (fespace)
3454 {
3455 int vdim = fespace->GetVDim();
3456
3457 // Ideally, the block size should be set at matrix creation
3458 // but the MFEM assembly does not allow to do so
3459 if (fespace->GetOrdering() == Ordering::byVDIM)
3460 {
3461 Mat lA;
3462 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3463 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3464 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3465 }
3466
3467 // fields
3468 if (vdim > 1)
3469 {
3470 PetscInt st = rst, bs, inc, nlf;
3471 nf = vdim;
3472 nlf = nl/nf;
3473 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3474 if (fespace->GetOrdering() == Ordering::byVDIM)
3475 {
3476 inc = 1;
3477 bs = vdim;
3478 }
3479 else
3480 {
3481 inc = nlf;
3482 bs = 1;
3483 }
3484 for (PetscInt i = 0; i < nf; i++)
3485 {
3486 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3487 st += inc;
3488 }
3489 }
3490
3491 // coordinates
3492 const FiniteElementCollection *fec = fespace->FEColl();
3493 bool h1space = dynamic_cast<const H1_FECollection*>(fec);
3494 if (h1space)
3495 {
3496 ParFiniteElementSpace *fespace_coords = fespace;
3497
3498 sdim = fespace->GetParMesh()->SpaceDimension();
3499 if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
3500 {
3501 fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3503 }
3504 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3505 ParGridFunction gf_coords(fespace_coords);
3506 gf_coords.ProjectCoefficient(coeff_coords);
3507 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3508 PetscScalar *data_coords = (PetscScalar*)mfem::Read(hvec_coords->GetMemory(),
3509 hvec_coords->Size(),false);
3510
3511 // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
3512 // and to the global matrix
3513 if (vdim == sdim)
3514 {
3515 MatNullSpace nnsp;
3516 Mat lA;
3517 Vec pvec_coords,lvec_coords;
3518 ISLocalToGlobalMapping l2g;
3519 PetscSF sf;
3520 PetscLayout rmap;
3521 const PetscInt *gidxs;
3522 PetscInt nleaves;
3523
3524 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3525 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3526 CCHKERRQ(comm,ierr);
3527 ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3528 if (!nnsp)
3529 {
3530 ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3531 CCHKERRQ(comm,ierr);
3532 ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3533 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3534 }
3535 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3536 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3537 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3538 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3539 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3540 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3541 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3542 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3543 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3544 CCHKERRQ(comm,ierr);
3545 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3546 {
3547 const PetscScalar *garray;
3548 PetscScalar *larray;
3549
3550 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3551 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3552#if PETSC_VERSION_LT(3,15,0)
3553 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3554 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3555#else
3556 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3557 CCHKERRQ(comm,ierr);
3558 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3559 CCHKERRQ(comm,ierr);
3560#endif
3561 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3562 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3563 }
3564 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3565 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3566 CCHKERRQ(PETSC_COMM_SELF,ierr);
3567 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3568 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3569 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3570 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3571 }
3572
3573 // each single dof has associated a tuple of coordinates
3574 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3575 if (nf > 0)
3576 {
3577 for (PetscInt i = 0; i < nf; i++)
3578 {
3579 const PetscInt *idxs;
3580 PetscInt nn;
3581
3582 // It also handles the case of fespace not ordered by VDIM
3583 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3584 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3585 for (PetscInt j = 0; j < nn; j++)
3586 {
3587 PetscInt idx = idxs[j]-rst;
3588 for (PetscInt d = 0; d < sdim; d++)
3589 {
3590 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3591 }
3592 }
3593 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3594 }
3595 }
3596 else
3597 {
3598 for (PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3599 }
3600 if (fespace_coords != fespace)
3601 {
3602 delete fespace_coords;
3603 }
3604 delete hvec_coords;
3605 }
3606 }
3607
3608 // index sets for boundary dofs specification (Essential = dir, Natural = neu)
3609 IS dir = NULL, neu = NULL;
3610
3611 // Extract l2l matrices
3612 Array<Mat> *l2l = NULL;
3613 if (opts.ess_dof_local || opts.nat_dof_local)
3614 {
3615 PetscContainer c;
3616
3617 ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
3618 MFEM_VERIFY(c,"Local-to-local PETSc container not present");
3619 ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
3620 }
3621
3622 // check information about index sets (essential dofs, fields, etc.)
3623#ifdef MFEM_DEBUG
3624 {
3625 // make sure ess/nat_dof have been collectively set
3626 PetscBool lpr = PETSC_FALSE,pr;
3627 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3628 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3629 CCHKERRQ(comm,mpiierr);
3630 MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
3631 lpr = PETSC_FALSE;
3632 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3633 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3634 CCHKERRQ(comm,mpiierr);
3635 MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
3636 // make sure fields have been collectively set
3637 PetscInt ms[2],Ms[2];
3638 ms[0] = -nf; ms[1] = nf;
3639 mpiierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3640 CCHKERRQ(comm,mpiierr);
3641 MFEM_VERIFY(-Ms[0] == Ms[1],
3642 "number of fields should be the same across processes");
3643 }
3644#endif
3645
3646 // boundary sets
3647 if (opts.ess_dof)
3648 {
3649 PetscInt st = opts.ess_dof_local ? 0 : rst;
3650 if (!opts.ess_dof_local)
3651 {
3652 // need to compute the boundary dofs in global ordering
3653 ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3654 CCHKERRQ(comm,ierr);
3655 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3656 }
3657 else
3658 {
3659 // need to compute a list for the marked boundary dofs in local ordering
3660 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3661 CCHKERRQ(comm,ierr);
3662 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3663 }
3664 }
3665 if (opts.nat_dof)
3666 {
3667 PetscInt st = opts.nat_dof_local ? 0 : rst;
3668 if (!opts.nat_dof_local)
3669 {
3670 // need to compute the boundary dofs in global ordering
3671 ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3672 CCHKERRQ(comm,ierr);
3673 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3674 }
3675 else
3676 {
3677 // need to compute a list for the marked boundary dofs in local ordering
3678 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3679 CCHKERRQ(comm,ierr);
3680 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3681 }
3682 }
3683
3684 // field splitting
3685 if (fields)
3686 {
3687 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3688 }
3689 for (PetscInt i = 0; i < nf; i++)
3690 {
3691 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3692 }
3693 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3694
3695 // coordinates
3696 if (coords)
3697 {
3698 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3699 }
3700 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3701
3702 // code for block size is disabled since we cannot change the matrix
3703 // block size after it has been setup
3704 // int bs = 1;
3705
3706 // Customize using the finite element space (if any)
3707 if (fespace)
3708 {
3709 const FiniteElementCollection *fec = fespace->FEColl();
3710 bool edgespace, rtspace, h1space;
3711 bool needint = opts.netflux;
3712 bool tracespace, rt_tracespace, edge_tracespace;
3713 int vdim, dim, p;
3714 PetscBool B_is_Trans = PETSC_FALSE;
3715
3716 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3717 dim = pmesh->Dimension();
3718 vdim = fespace->GetVDim();
3719 h1space = dynamic_cast<const H1_FECollection*>(fec);
3720 rtspace = dynamic_cast<const RT_FECollection*>(fec);
3721 edgespace = dynamic_cast<const ND_FECollection*>(fec);
3722 edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3723 rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3724 tracespace = edge_tracespace || rt_tracespace;
3725
3726 p = 1;
3727 if (fespace->GetNE() > 0)
3728 {
3729 if (!tracespace)
3730 {
3731 p = fespace->GetElementOrder(0);
3732 }
3733 else
3734 {
3735 p = fespace->GetFaceOrder(0);
3736 if (dim == 2) { p++; }
3737 }
3738 }
3739
3740 if (edgespace) // H(curl)
3741 {
3742 if (dim == 2)
3743 {
3744 needint = true;
3745 if (tracespace)
3746 {
3747 MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3748 " not using auxiliary quadrature");
3749 needint = false;
3750 }
3751 }
3752 else
3753 {
3754 FiniteElementCollection *vfec;
3755 if (tracespace)
3756 {
3757 vfec = new H1_Trace_FECollection(p,dim);
3758 }
3759 else
3760 {
3761 vfec = new H1_FECollection(p,dim);
3762 }
3763 ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3764 ParDiscreteLinearOperator *grad;
3765 grad = new ParDiscreteLinearOperator(vfespace,fespace);
3766 if (tracespace)
3767 {
3768 grad->AddTraceFaceInterpolator(new GradientInterpolator);
3769 }
3770 else
3771 {
3772 grad->AddDomainInterpolator(new GradientInterpolator);
3773 }
3774 grad->Assemble();
3775 grad->Finalize();
3776 HypreParMatrix *hG = grad->ParallelAssemble();
3777 PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3778 delete hG;
3779 delete grad;
3780
3781 PetscBool conforming = PETSC_TRUE;
3782 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3783 ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3784 PCHKERRQ(pc,ierr);
3785 delete vfec;
3786 delete vfespace;
3787 delete G;
3788 }
3789 }
3790 else if (rtspace) // H(div)
3791 {
3792 needint = true;
3793 if (tracespace)
3794 {
3795 MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3796 " auxiliary quadrature");
3797 needint = false;
3798 }
3799 }
3800 else if (h1space) // H(grad), only for the vector case
3801 {
3802 if (vdim != dim) { needint = false; }
3803 }
3804
3805 PetscParMatrix *B = NULL;
3806 if (needint)
3807 {
3808 // Generate bilinear form in unassembled format which is used to
3809 // compute the net-flux across subdomain boundaries for H(div) and
3810 // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3811 FiniteElementCollection *auxcoll;
3812 if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3813 else
3814 {
3815 if (h1space)
3816 {
3817 auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3818 }
3819 else
3820 {
3821 auxcoll = new L2_FECollection(p,dim);
3822 }
3823 }
3824 ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3825 ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3826
3827 if (edgespace)
3828 {
3829 if (tracespace)
3830 {
3831 b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3832 }
3833 else
3834 {
3835 b->AddDomainIntegrator(new VectorFECurlIntegrator);
3836 }
3837 }
3838 else if (rtspace)
3839 {
3840 if (tracespace)
3841 {
3842 b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3843 }
3844 else
3845 {
3846 b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3847 }
3848 }
3849 else
3850 {
3851 b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3852 }
3853 b->Assemble();
3854 b->Finalize();
3855 OperatorHandle Bh(Operator::PETSC_MATIS);
3856 b->ParallelAssemble(Bh);
3857 Bh.Get(B);
3858 Bh.SetOperatorOwner(false);
3859
3860 if (dir) // if essential dofs are present, we need to zero the columns
3861 {
3862 Mat pB = *B;
3863 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3864 if (!opts.ess_dof_local)
3865 {
3866 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3867 }
3868 else
3869 {
3870 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3871 }
3872 B_is_Trans = PETSC_TRUE;
3873 }
3874 delete b;
3875 delete pspace;
3876 delete auxcoll;
3877 }
3878
3879 if (B)
3880 {
3881 ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3882 }
3883 delete B;
3884 }
3885 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3886 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3887}
3888
3890 const PetscBDDCSolverParams &opts,
3891 const std::string &prefix)
3892 : PetscPreconditioner(A,prefix)
3893{
3894 BDDCSolverConstructor(opts);
3895 Customize();
3896}
3897
3899 const PetscBDDCSolverParams &opts,
3900 const std::string &prefix)
3901 : PetscPreconditioner(comm,op,prefix)
3902{
3903 BDDCSolverConstructor(opts);
3904 Customize();
3905}
3906
3908 const string &prefix)
3909 : PetscPreconditioner(comm,op,prefix)
3910{
3911 PC pc = (PC)obj;
3912 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3913
3914 Mat pA;
3915 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3916
3917 // Check if pA is of type MATNEST
3918 PetscBool isnest;
3919 ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3920
3921 PetscInt nr = 0;
3922 IS *isrow = NULL;
3923 if (isnest) // we know the fields
3924 {
3925 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3926 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3927 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3928 }
3929
3930 // We need to customize here, before setting the index sets.
3931 // This is because PCFieldSplitSetType customizes the function
3932 // pointers. SubSolver options will be processed during PCApply
3933 Customize();
3934
3935 for (PetscInt i=0; i<nr; i++)
3936 {
3937 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3938 }
3939 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3940}
3941
3944 const std::string &prefix)
3945 : PetscPreconditioner(fes->GetParMesh()->GetComm(),prefix)
3946{
3948 ierr = MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); PCHKERRQ(A,ierr);
3949 ierr = MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE); PCHKERRQ(A,ierr);
3950 SetOperator(A);
3951 H2SolverConstructor(fes);
3952 Customize();
3953}
3954
3955void PetscH2Solver::H2SolverConstructor(ParFiniteElementSpace *fes)
3956{
3957#if defined(PETSC_HAVE_H2OPUS)
3958 int sdim = fes->GetParMesh()->SpaceDimension();
3959 int vdim = fes->GetVDim();
3960 const FiniteElementCollection *fec = fes->FEColl();
3961 ParFiniteElementSpace *fes_coords = NULL;
3962
3963 if (vdim != sdim || fes->GetOrdering() != Ordering::byVDIM)
3964 {
3965 fes_coords = new ParFiniteElementSpace(fes->GetParMesh(),fec,sdim,
3967 fes = fes_coords;
3968 }
3969 VectorFunctionCoefficient ccoords(sdim, func_coords);
3970
3971 ParGridFunction coords(fes);
3972 coords.ProjectCoefficient(ccoords);
3973 Vector c(fes->GetTrueVSize());
3974 coords.ParallelProject(c);
3975 delete fes_coords;
3976
3977 PC pc = (PC)obj;
3978 ierr = PCSetType(pc,PCH2OPUS); PCHKERRQ(obj, ierr);
3979 ierr = PCSetCoordinates(pc,sdim,c.Size()/sdim,
3980 (PetscReal*)mfem::Read(c.GetMemory(),
3981 c.Size(),false));
3982 ierr = PCSetFromOptions(pc); PCHKERRQ(obj, ierr);
3983#else
3984 MFEM_ABORT("Need PETSc configured with --download-h2opus");
3985#endif
3986}
3987
3988// PetscNonlinearSolver methods
3989
3991 const std::string &prefix)
3992 : PetscSolver(), Solver()
3993{
3994 // Create the actual solver object
3995 SNES snes;
3996 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3997 obj = (PetscObject)snes;
3998 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3999 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
4000
4001 // Allocate private solver context
4003}
4004
4006 const std::string &prefix)
4007 : PetscSolver(), Solver()
4008{
4009 // Create the actual solver object
4010 SNES snes;
4011 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
4012 obj = (PetscObject)snes;
4013 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
4014 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
4015
4016 // Allocate private solver context
4018
4019 SetOperator(op);
4020}
4021
4023{
4024 MPI_Comm comm;
4025 SNES snes = (SNES)obj;
4026 ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
4027 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
4028}
4029
4031{
4032 SNES snes = (SNES)obj;
4033
4034 if (operatorset)
4035 {
4036 PetscBool ls,gs;
4037 void *fctx,*jctx;
4038
4039 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
4040 PCHKERRQ(snes, ierr);
4041 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
4042 PCHKERRQ(snes, ierr);
4043
4044 ls = (PetscBool)(height == op.Height() && width == op.Width() &&
4045 (void*)&op == fctx &&
4046 (void*)&op == jctx);
4047 mpiierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
4048 PetscObjectComm((PetscObject)snes));
4049 CCHKERRQ(PetscObjectComm((PetscObject)snes),mpiierr);
4050 if (!gs)
4051 {
4052 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
4053 delete X;
4054 delete B;
4055 X = B = NULL;
4056 }
4057 }
4058 else
4059 {
4060 /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
4061 yet set. We default to backtracking */
4062 SNESLineSearch ls;
4063 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4064 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
4065 }
4066
4067 // If we do not pass matrices in, the default matrix type for DMShell is MATDENSE
4068 // in 3.15, which may cause issues.
4069 Mat dummy;
4070 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)snes),op.Height(),
4071 op.Height(),&dummy);
4072
4073 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4074 snes_ctx->op = (Operator*)&op;
4075 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
4076 PCHKERRQ(snes, ierr);
4077 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
4078 (void *)snes_ctx);
4079 PCHKERRQ(snes, ierr);
4080
4081 ierr = MatDestroy(&dummy);
4082 PCHKERRQ(snes, ierr);
4083
4084 // Update PetscSolver
4085 operatorset = true;
4086
4087 // Update the Operator fields.
4088 height = op.Height();
4089 width = op.Width();
4090}
4091
4093{
4094 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4095 snes_ctx->jacType = jacType;
4096}
4097
4099 mfem::real_t*))
4100{
4101 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4102 snes_ctx->objective = objfn;
4103
4104 SNES snes = (SNES)obj;
4105 ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
4106 PCHKERRQ(snes, ierr);
4107}
4108
4110 Vector&, Vector&,
4111 bool&, bool&))
4112{
4113 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4114 snes_ctx->postcheck = post;
4115
4116 SNES snes = (SNES)obj;
4117 SNESLineSearch ls;
4118 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4119 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
4120 PCHKERRQ(ls, ierr);
4121}
4122
4124 const Vector&,
4125 const Vector&,
4126 const Vector&,
4127 const Vector&))
4128{
4129 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4130 snes_ctx->update = update;
4131
4132 SNES snes = (SNES)obj;
4133 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4134}
4135
4137{
4138 SNES snes = (SNES)obj;
4139
4140 bool b_nonempty = b.Size();
4141 if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
4142 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
4144 if (b_nonempty) { B->PlaceMemory(b.GetMemory()); }
4145 else { *B = 0.0; }
4146
4147 Customize();
4148
4149 if (!iterative_mode) { *X = 0.; }
4150
4151 // Solve the system.
4152 ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4153 X->ResetMemory();
4154 if (b_nonempty) { B->ResetMemory(); }
4155}
4156
4157// PetscODESolver methods
4158
4159PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
4160 : PetscSolver(), ODESolver()
4161{
4162 // Create the actual solver object
4163 TS ts;
4164 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4165 obj = (PetscObject)ts;
4166 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
4167 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4168
4169 // Allocate private solver context
4171
4172 // Default options, to comply with the current interface to ODESolver.
4173 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4174 PCHKERRQ(ts,ierr);
4175 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4176 PCHKERRQ(ts,ierr);
4177 TSAdapt tsad;
4178 ierr = TSGetAdapt(ts,&tsad);
4179 PCHKERRQ(ts,ierr);
4180 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4181 PCHKERRQ(ts,ierr);
4182}
4183
4185{
4186 MPI_Comm comm;
4187 TS ts = (TS)obj;
4188 ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
4189 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4190}
4191
4193 enum PetscODESolver::Type type)
4194{
4195 TS ts = (TS)obj;
4196
4197 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4198 if (operatorset)
4199 {
4200 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4201 delete X;
4202 X = NULL;
4203 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4204 ts_ctx->cached_ijacstate = -1;
4205 ts_ctx->cached_rhsjacstate = -1;
4206 ts_ctx->cached_splits_xstate = -1;
4207 ts_ctx->cached_splits_xdotstate = -1;
4208 }
4209 f = &f_;
4210
4211 // Set functions in TS
4212 ts_ctx->op = &f_;
4213 if (f_.isImplicit())
4214 {
4215 Mat dummy;
4216 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4217 f_.Height(),&dummy);
4218 PCHKERRQ(ts, ierr);
4219 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
4220 PCHKERRQ(ts, ierr);
4221 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (void *)ts_ctx);
4222 PCHKERRQ(ts, ierr);
4223 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4224 PCHKERRQ(ts, ierr);
4225 ierr = MatDestroy(&dummy);
4226 PCHKERRQ(ts, ierr);
4227 }
4228 if (!f_.isHomogeneous())
4229 {
4230 Mat dummy = NULL;
4231 if (!f_.isImplicit())
4232 {
4233 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4234 PCHKERRQ(ts, ierr);
4235 }
4236 else
4237 {
4238 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4239 f_.Height(),&dummy);
4240 PCHKERRQ(ts, ierr);
4241 }
4242 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
4243 PCHKERRQ(ts, ierr);
4244 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4245 (void *)ts_ctx);
4246 PCHKERRQ(ts, ierr);
4247 ierr = MatDestroy(&dummy);
4248 PCHKERRQ(ts, ierr);
4249 }
4250 operatorset = true;
4251
4252 SetType(type);
4253
4254 // Set solution vector
4255 PetscParVector X(PetscObjectComm(obj),*f,false,true);
4256 ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4257
4258 // Compose special purpose function for PDE-constrained optimization
4259 PetscBool use = PETSC_TRUE;
4260 ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
4261 if (use && f_.isImplicit())
4262 {
4263 ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4264 __mfem_ts_computesplits);
4265 PCHKERRQ(ts,ierr);
4266 }
4267 else
4268 {
4269 ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4270 NULL);
4271 PCHKERRQ(ts,ierr);
4272 }
4273}
4274
4276{
4277 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4278 ts_ctx->jacType = jacType;
4279}
4280
4282{
4283 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4284 return ts_ctx->type;
4285}
4286
4288{
4289 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4290
4291 TS ts = (TS)obj;
4292 ts_ctx->type = type;
4293 if (type == ODE_SOLVER_LINEAR)
4294 {
4295 ierr = TSSetProblemType(ts, TS_LINEAR);
4296 PCHKERRQ(ts, ierr);
4297 }
4298 else
4299 {
4300 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4301 PCHKERRQ(ts, ierr);
4302 }
4303}
4304
4306{
4307 // Pass the parameters to PETSc.
4308 TS ts = (TS)obj;
4309 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4310 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4311
4312 PetscInt i;
4313 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4314
4315 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4316 X->PlaceMemory(x.GetMemory(),true);
4317
4318 Customize();
4319
4320 // Monitor initial step
4321 if (!i)
4322 {
4323 ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4324 }
4325
4326 // Take the step.
4327 ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4328 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4329
4330 // Get back current time and the time step used to caller.
4331 // We cannot use TSGetTimeStep() as it returns the next candidate step
4332 PetscReal pt;
4333 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4334 dt = pt - (PetscReal)t;
4335 t = pt;
4336
4337 // Monitor current step
4338 ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4339
4340 X->ResetMemory();
4341}
4342
4344 mfem::real_t t_final)
4345{
4346 // Give the parameters to PETSc.
4347 TS ts = (TS)obj;
4348 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4349 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4350 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4351 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4352 PCHKERRQ(ts, ierr);
4353
4354 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4355 X->PlaceMemory(x.GetMemory(),true);
4356
4357 Customize();
4358
4359 // Reset Jacobian caching since the user may have changed
4360 // the parameters of the solver
4361 // We don't do this in the Step method because two consecutive
4362 // Step() calls are done with the same operator
4363 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4364 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4365 ts_ctx->cached_ijacstate = -1;
4366 ts_ctx->cached_rhsjacstate = -1;
4367 ts_ctx->cached_splits_xstate = -1;
4368 ts_ctx->cached_splits_xdotstate = -1;
4369
4370 // Take the steps.
4371 ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4372 X->ResetMemory();
4373
4374 // Get back final time and time step to caller.
4375 PetscReal pt;
4376 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4377 t = pt;
4378 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4379 dt = pt;
4380}
4381
4382} // namespace mfem
4383
4384#include "petsc/private/petscimpl.h"
4385#include "petsc/private/matimpl.h"
4386
4387// auxiliary functions
4388static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
4389 void* ctx)
4390{
4391 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4392
4393 PetscFunctionBeginUser;
4394 if (!monctx)
4395 {
4396 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4397 }
4398 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4400 monctx->monitor);
4401
4402 if (user_monitor->mon_sol)
4403 {
4404 mfem::PetscParVector V(x,true);
4405 user_monitor->MonitorSolution(it,t,V);
4406 }
4407 user_monitor->MonitorSolver(solver);
4408 PetscFunctionReturn(PETSC_SUCCESS);
4409}
4410
4411static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
4412 Vec f,void *ctx)
4413{
4414 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4415
4416 PetscFunctionBeginUser;
4417 mfem::PetscParVector xx(x,true);
4418 mfem::PetscParVector yy(xp,true);
4419 mfem::PetscParVector ff(f,true);
4420
4421 mfem::TimeDependentOperator *op = ts_ctx->op;
4422 op->SetTime(t);
4423
4424 if (ts_ctx->bchandler)
4425 {
4426 // we evaluate the ImplicitMult method with the correct bc
4427 // this means the correct time derivative for essential boundary
4428 // dofs is zero
4429 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
4430 if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
4431 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4432 mfem::Vector* txx = ts_ctx->work;
4433 mfem::Vector* txp = ts_ctx->work2;
4434 bchandler->SetTime(t);
4435 bchandler->ApplyBC(xx,*txx);
4436 bchandler->ZeroBC(yy,*txp);
4437 op->ImplicitMult(*txx,*txp,ff);
4438 // and fix the residual (i.e. f_\partial\Omega = u - g(t))
4439 bchandler->FixResidualBC(xx,ff);
4440 }
4441 else
4442 {
4443 // use the ImplicitMult method of the class
4444 op->ImplicitMult(xx,yy,ff);
4445 }
4446 ff.UpdateVecFromFlags();
4447 PetscFunctionReturn(PETSC_SUCCESS);
4448}
4449
4450static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
4451 void *ctx)
4452{
4453 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4454
4455 PetscFunctionBeginUser;
4456 if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
4457 mfem::PetscParVector xx(x,true);
4458 mfem::PetscParVector ff(f,true);
4459 mfem::TimeDependentOperator *top = ts_ctx->op;
4460 top->SetTime(t);
4461
4462 // use the ExplicitMult method - compute the RHS function
4463 top->ExplicitMult(xx,ff);
4464
4465 ff.UpdateVecFromFlags();
4466 PetscFunctionReturn(PETSC_SUCCESS);
4467}
4468
4469static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
4470 Vec xp, PetscReal shift, Mat A, Mat P,
4471 void *ctx)
4472{
4473 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4474 mfem::Vector *xx;
4475 PetscScalar *array;
4476 PetscReal eps = 0.001; /* 0.1% difference */
4477 PetscInt n;
4478 PetscObjectState state;
4479 PetscErrorCode ierr;
4480
4481 PetscFunctionBeginUser;
4482 // Matrix-free case
4483 if (A && A != P)
4484 {
4485 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4486 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4487 }
4488
4489 // prevent to recompute a Jacobian if we already did so
4490 // the relative tolerance comparison should be fine given the fact
4491 // that two consecutive shifts should have similar magnitude
4492 ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4493 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4494 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4495 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4496
4497 // update time
4498 mfem::TimeDependentOperator *op = ts_ctx->op;
4499 op->SetTime(t);
4500
4501 // wrap Vecs with Vectors
4502 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4503 ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4504 mfem::Vector yy(array,n);
4505 ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4506 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4507 if (!ts_ctx->bchandler)
4508 {
4509 xx = new mfem::Vector(array,n);
4510 }
4511 else
4512 {
4513 // make sure we compute a Jacobian with the correct boundary values
4514 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4515 mfem::Vector txx(array,n);
4516 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4517 xx = ts_ctx->work;
4518 bchandler->SetTime(t);
4519 bchandler->ApplyBC(txx,*xx);
4520 }
4521 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4522
4523 // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
4524 mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
4525 if (!ts_ctx->bchandler) { delete xx; }
4526 ts_ctx->cached_shift = shift;
4527
4528 // Convert to the operator type requested if needed
4529 bool delete_pA = false;
4530 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4531 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4532 if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4533 pA->GetType() != ts_ctx->jacType))
4534 {
4535 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4536 ts_ctx->jacType);
4537 delete_pA = true;
4538 }
4539
4540 // Eliminate essential dofs
4541 if (ts_ctx->bchandler)
4542 {
4543 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4544 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4545 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4546 }
4547
4548 // Get nonzerostate
4549 PetscObjectState nonzerostate;
4550 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4551
4552 // Avoid unneeded copy of the matrix by hacking
4553 Mat B;
4554 B = pA->ReleaseMat(false);
4555 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4556 if (delete_pA) { delete pA; }
4557
4558 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4559 // preconditioner fails because MatCreateSubMatrix_Nest does not
4560 // actually return a matrix. Instead, for efficiency reasons,
4561 // it returns a reference to the submatrix. The second time it
4562 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4563 // aborts since the two submatrices are actually different.
4564 // We circumvent this issue by incrementing the nonzero state
4565 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4566 // This does not impact performances in the case of MATNEST
4567 PetscBool isnest;
4568 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4569 CHKERRQ(ierr);
4570 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4571
4572 // Jacobian reusage
4573 ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
4574 CHKERRQ(ierr);
4575
4576 // Fool DM
4577 DM dm;
4578 MatType mtype;
4579 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4580 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4581 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4582 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4583 PetscFunctionReturn(PETSC_SUCCESS);
4584}
4585
4586static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
4587 Mat Ax,Mat Jx,
4588 Mat Axp,Mat Jxp)
4589{
4590 __mfem_ts_ctx* ts_ctx;
4591 mfem::Vector *xx;
4592 PetscScalar *array;
4593 PetscInt n;
4594 PetscObjectState state;
4595 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4596 PetscBool assembled;
4597 PetscErrorCode ierr;
4598
4599 PetscFunctionBeginUser;
4600 // Matrix-free cases
4601 if (Ax && Ax != Jx)
4602 {
4603 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4604 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4605 }
4606 if (Axp && Axp != Jxp)
4607 {
4608 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4609 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4610 }
4611
4612 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
4613
4614 // prevent to recompute the Jacobians if we already did so
4615 ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
4616 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4617 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4618 ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
4619 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4620 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4621 if (!rx && !rxp) { PetscFunctionReturn(PETSC_SUCCESS); }
4622
4623 // update time
4624 mfem::TimeDependentOperator *op = ts_ctx->op;
4625 op->SetTime(t);
4626
4627 // wrap Vecs with Vectors
4628 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4629 ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4630 mfem::Vector yy(array,n);
4631 ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4632 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4633 if (!ts_ctx->bchandler)
4634 {
4635 xx = new mfem::Vector(array,n);
4636 }
4637 else
4638 {
4639 // make sure we compute a Jacobian with the correct boundary values
4640 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4641 mfem::Vector txx(array,n);
4642 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4643 xx = ts_ctx->work;
4644 bchandler->SetTime(t);
4645 bchandler->ApplyBC(txx,*xx);
4646 }
4647 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4648
4649 // We don't have a specialized interface, so we just compute the split jacobians
4650 // evaluating twice the implicit gradient method with the correct shifts
4651
4652 // first we do the state jacobian
4653 mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
4654
4655 // Convert to the operator type requested if needed
4656 bool delete_mat = false;
4657 mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
4658 (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
4659 if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4660 pJx->GetType() != ts_ctx->jacType))
4661 {
4662 if (pJx)
4663 {
4664 Mat B = *pJx;
4665 ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4666 }
4667 pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
4668 ts_ctx->jacType);
4669 delete_mat = true;
4670 }
4671 if (rx)
4672 {
4673 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4674 if (assembled)
4675 {
4676 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4677 }
4678 else
4679 {
4680 Mat B;
4681 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4682 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4683 }
4684 }
4685 if (delete_mat) { delete pJx; }
4686 pJx = new mfem::PetscParMatrix(Jx,true);
4687
4688 // Eliminate essential dofs
4689 if (ts_ctx->bchandler)
4690 {
4691 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4692 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4693 pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4694 }
4695
4696 // Then we do the jacobian wrt the time derivative of the state
4697 // Note that this is usually the mass matrix
4698 mfem::PetscParMatrix *pJxp = NULL;
4699 if (rxp)
4700 {
4701 delete_mat = false;
4702 mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
4703 pJxp = const_cast<mfem::PetscParMatrix *>
4704 (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
4705 if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4706 pJxp->GetType() != ts_ctx->jacType))
4707 {
4708 if (pJxp)
4709 {
4710 Mat B = *pJxp;
4711 ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4712 }
4713 pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
4714 &oJxp,ts_ctx->jacType);
4715 delete_mat = true;
4716 }
4717
4718 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4719 if (assembled)
4720 {
4721 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4722 }
4723 else
4724 {
4725 Mat B;
4726 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4727 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4728 }
4729 if (delete_mat) { delete pJxp; }
4730 pJxp = new mfem::PetscParMatrix(Jxp,true);
4731
4732 // Eliminate essential dofs
4733 if (ts_ctx->bchandler)
4734 {
4735 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4736 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4737 pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
4738 }
4739
4740 // Obtain the time dependent part of the jacobian by subtracting
4741 // the state jacobian
4742 // We don't do it with the class operator "-=" since we know that
4743 // the sparsity pattern of the two matrices is the same
4744 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4745 }
4746
4747 // Jacobian reusage
4748 ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4749 CHKERRQ(ierr);
4750 ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4751 CHKERRQ(ierr);
4752
4753 delete pJx;
4754 delete pJxp;
4755 if (!ts_ctx->bchandler) { delete xx; }
4756 PetscFunctionReturn(PETSC_SUCCESS);
4757}
4758
4759static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4760 Mat A, Mat P, void *ctx)
4761{
4762 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4763 mfem::Vector *xx;
4764 PetscScalar *array;
4765 PetscInt n;
4766 PetscObjectState state;
4767 PetscErrorCode ierr;
4768
4769 PetscFunctionBeginUser;
4770 // Matrix-free case
4771 if (A && A != P)
4772 {
4773 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4774 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4775 }
4776
4777 // prevent to recompute a Jacobian if we already did so
4778 ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4779 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4780 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4781
4782 // update time
4783 mfem::TimeDependentOperator *op = ts_ctx->op;
4784 op->SetTime(t);
4785
4786 // wrap Vec with Vector
4787 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4788 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4789 if (!ts_ctx->bchandler)
4790 {
4791 xx = new mfem::Vector(array,n);
4792 }
4793 else
4794 {
4795 // make sure we compute a Jacobian with the correct boundary values
4796 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4797 mfem::Vector txx(array,n);
4798 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4799 xx = ts_ctx->work;
4800 bchandler->SetTime(t);
4801 bchandler->ApplyBC(txx,*xx);
4802 }
4803 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4804
4805 // Use TimeDependentOperator::GetExplicitGradient(x)
4806 mfem::Operator& J = op->GetExplicitGradient(*xx);
4807 if (!ts_ctx->bchandler) { delete xx; }
4808
4809 // Convert to the operator type requested if needed
4810 bool delete_pA = false;
4811 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4812 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4813 if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4814 pA->GetType() != ts_ctx->jacType))
4815 {
4816 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4817 ts_ctx->jacType);
4818 delete_pA = true;
4819 }
4820
4821 // Eliminate essential dofs
4822 if (ts_ctx->bchandler)
4823 {
4824 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4825 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4826 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4827 }
4828
4829 // Get nonzerostate
4830 PetscObjectState nonzerostate;
4831 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4832
4833 // Avoid unneeded copy of the matrix by hacking
4834 Mat B;
4835 B = pA->ReleaseMat(false);
4836 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4837 if (delete_pA) { delete pA; }
4838
4839 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4840 // preconditioner fails because MatCreateSubMatrix_Nest does not
4841 // actually return a matrix. Instead, for efficiency reasons,
4842 // it returns a reference to the submatrix. The second time it
4843 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4844 // aborts since the two submatrices are actually different.
4845 // We circumvent this issue by incrementing the nonzero state
4846 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4847 // This does not impact performances in the case of MATNEST
4848 PetscBool isnest;
4849 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4850 CHKERRQ(ierr);
4851 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4852
4853 // Jacobian reusage
4854 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4855 {
4856 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4857 }
4858 ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_rhsjacstate);
4859 CHKERRQ(ierr);
4860
4861 // Fool DM
4862 DM dm;
4863 MatType mtype;
4864 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4865 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4866 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4867 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4868 PetscFunctionReturn(PETSC_SUCCESS);
4869}
4870
4871static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4872 void* ctx)
4873{
4874 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4875
4876 PetscFunctionBeginUser;
4877 if (!monctx)
4878 {
4879 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4880 }
4881
4882 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4884 monctx->monitor);
4885 if (user_monitor->mon_sol)
4886 {
4887 Vec x;
4888 PetscErrorCode ierr;
4889
4890 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4891 mfem::PetscParVector V(x,true);
4892 user_monitor->MonitorSolution(it,res,V);
4893 }
4894 if (user_monitor->mon_res)
4895 {
4896 Vec x;
4897 PetscErrorCode ierr;
4898
4899 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4900 mfem::PetscParVector V(x,true);
4901 user_monitor->MonitorResidual(it,res,V);
4902 }
4903 user_monitor->MonitorSolver(solver);
4904 PetscFunctionReturn(PETSC_SUCCESS);
4905}
4906
4907static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4908 void *ctx)
4909{
4910 PetscScalar *array;
4911 PetscInt n;
4912 PetscErrorCode ierr;
4913 mfem::Vector *xx;
4914 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4915
4916 PetscFunctionBeginUser;
4917 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4918 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4919 if (!snes_ctx->bchandler)
4920 {
4921 xx = new mfem::Vector(array,n);
4922 }
4923 else
4924 {
4925 // make sure we compute a Jacobian with the correct boundary values
4926 if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4927 mfem::Vector txx(array,n);
4928 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4929 xx = snes_ctx->work;
4930 bchandler->ApplyBC(txx,*xx);
4931 }
4932
4933 // Use Operator::GetGradient(x)
4934 mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4935 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4936 if (!snes_ctx->bchandler) { delete xx; }
4937
4938 // Convert to the operator type requested if needed
4939 bool delete_pA = false;
4940 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4941 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4942 if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4943 pA->GetType() != snes_ctx->jacType))
4944 {
4945 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4946 snes_ctx->jacType);
4947 delete_pA = true;
4948 }
4949
4950 // Eliminate essential dofs
4951 if (snes_ctx->bchandler)
4952 {
4953 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4954 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4955 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4956 }
4957
4958 // Get nonzerostate
4959 PetscObjectState nonzerostate;
4960 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4961
4962 // Avoid unneeded copy of the matrix by hacking
4963 Mat B = pA->ReleaseMat(false);
4964 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4965 if (delete_pA) { delete pA; }
4966
4967 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4968 // preconditioner fails because MatCreateSubMatrix_Nest does not
4969 // actually return a matrix. Instead, for efficiency reasons,
4970 // it returns a reference to the submatrix. The second time it
4971 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4972 // aborts since the two submatrices are actually different.
4973 // We circumvent this issue by incrementing the nonzero state
4974 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4975 // This does not impact performances in the case of MATNEST
4976 PetscBool isnest;
4977 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4978 CHKERRQ(ierr);
4979 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4980
4981 // Matrix-free case
4982 if (A && A != P)
4983 {
4984 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4985 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4986 }
4987
4988 // Fool DM
4989 DM dm;
4990 MatType mtype;
4991 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4992 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4993 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4994 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4995 PetscFunctionReturn(PETSC_SUCCESS);
4996}
4997
4998static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4999{
5000 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5001
5002 PetscFunctionBeginUser;
5003 mfem::PetscParVector xx(x,true);
5004 mfem::PetscParVector ff(f,true);
5005 if (snes_ctx->bchandler)
5006 {
5007 // we evaluate the Mult method with the correct bc
5008 if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
5009 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
5010 mfem::Vector* txx = snes_ctx->work;
5011 bchandler->ApplyBC(xx,*txx);
5012 snes_ctx->op->Mult(*txx,ff);
5013 // and fix the residual (i.e. f_\partial\Omega = u - g)
5014 bchandler->FixResidualBC(xx,ff);
5015 }
5016 else
5017 {
5018 // use the Mult method of the class
5019 snes_ctx->op->Mult(xx,ff);
5020 }
5021 ff.UpdateVecFromFlags();
5022 PetscFunctionReturn(PETSC_SUCCESS);
5023}
5024
5025static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
5026 void *ctx)
5027{
5028 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5029
5030 PetscFunctionBeginUser;
5031 if (!snes_ctx->objective)
5032 {
5033 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
5034 }
5035 mfem::PetscParVector xx(x,true);
5036 mfem::real_t lf;
5037 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
5038 *f = (PetscReal)lf;
5039 PetscFunctionReturn(PETSC_SUCCESS);
5040}
5041
5042static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
5043 PetscBool *cy,PetscBool *cw, void* ctx)
5044{
5045 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5046 bool lcy = false,lcw = false;
5047
5048 PetscFunctionBeginUser;
5049 mfem::PetscParVector x(X,true);
5050 mfem::PetscParVector y(Y,true);
5051 mfem::PetscParVector w(W,true);
5052 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
5053 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
5054 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
5055 PetscFunctionReturn(PETSC_SUCCESS);
5056}
5057
5058static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
5059{
5060 Vec F,X,dX,pX;
5061 __mfem_snes_ctx* snes_ctx;
5062
5063 PetscFunctionBeginUser;
5064 /* Update callback does not use the context */
5065 ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
5066 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
5067 if (!it)
5068 {
5069 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
5070 ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
5071 CHKERRQ(ierr);
5072 ierr = VecDestroy(&pX); CHKERRQ(ierr);
5073 }
5074 ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
5075 CHKERRQ(ierr);
5076 if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
5077 "Missing previous solution");
5078 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
5079 mfem::PetscParVector f(F,true);
5080 mfem::PetscParVector x(X,true);
5081 mfem::PetscParVector dx(dX,true);
5082 mfem::PetscParVector px(pX,true);
5083 (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
5084 /* Store previous solution */
5085 ierr = VecCopy(X,pX); CHKERRQ(ierr);
5086 PetscFunctionReturn(PETSC_SUCCESS);
5087}
5088
5089static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
5090 void* ctx)
5091{
5092 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
5093
5094 PetscFunctionBeginUser;
5095 if (!monctx)
5096 {
5097 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
5098 }
5099
5100 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
5102 monctx->monitor);
5103 if (user_monitor->mon_sol)
5104 {
5105 Vec x;
5106 PetscErrorCode ierr;
5107
5108 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5109 mfem::PetscParVector V(x,true);
5110 user_monitor->MonitorSolution(it,res,V);
5111 }
5112 if (user_monitor->mon_res)
5113 {
5114 Vec x;
5115 PetscErrorCode ierr;
5116
5117 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5118 mfem::PetscParVector V(x,true);
5119 user_monitor->MonitorResidual(it,res,V);
5120 }
5121 user_monitor->MonitorSolver(solver);
5122 PetscFunctionReturn(PETSC_SUCCESS);
5123}
5124
5125static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5126{
5127 mfem::Operator *op;
5128 PetscErrorCode ierr;
5129
5130 PetscFunctionBeginUser;
5131 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5132 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5133 mfem::PetscParVector xx(x,true);
5134 mfem::PetscParVector yy(y,true);
5135 op->Mult(xx,yy);
5136 yy.UpdateVecFromFlags();
5137 PetscFunctionReturn(PETSC_SUCCESS);
5138}
5139
5140static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5141{
5142 mfem::Operator *op;
5143 PetscErrorCode ierr;
5144 PetscBool flg,symm;
5145
5146 PetscFunctionBeginUser;
5147 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5148 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5149 mfem::PetscParVector xx(x,true);
5150 mfem::PetscParVector yy(y,true);
5151 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5152 if (flg && symm)
5153 {
5154 op->Mult(xx,yy);
5155 }
5156 else
5157 {
5158 op->MultTranspose(xx,yy);
5159 }
5160 yy.UpdateVecFromFlags();
5161 PetscFunctionReturn(PETSC_SUCCESS);
5162}
5163
5164static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5165{
5166 mfem::Operator *op;
5167 PetscErrorCode ierr;
5168
5169 PetscFunctionBeginUser;
5170 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5171 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5172 ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
5173 PetscFunctionReturn(PETSC_SUCCESS);
5174}
5175
5176static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5177{
5178 PetscFunctionBeginUser;
5179 PetscFunctionReturn(PETSC_SUCCESS);
5180}
5181
5182static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5183{
5184 __mfem_pc_shell_ctx *ctx;
5185 PetscErrorCode ierr;
5186
5187 PetscFunctionBeginUser;
5188 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5189 if (ctx->op)
5190 {
5191 PetscBool isascii;
5192 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5193 CHKERRQ(ierr);
5194
5196 (ctx->op);
5197 if (ppc)
5198 {
5199 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5200 }
5201 else
5202 {
5203 if (isascii)
5204 {
5205 ierr = PetscViewerASCIIPrintf(viewer,
5206 "No information available on the mfem::Solver\n");
5207 CHKERRQ(ierr);
5208 }
5209 }
5210 if (isascii && ctx->factory)
5211 {
5212 ierr = PetscViewerASCIIPrintf(viewer,
5213 "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5214 CHKERRQ(ierr);
5215 }
5216 }
5217 PetscFunctionReturn(PETSC_SUCCESS);
5218}
5219
5220static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5221{
5222 __mfem_pc_shell_ctx *ctx;
5223 PetscErrorCode ierr;
5224
5225 PetscFunctionBeginUser;
5226 mfem::PetscParVector xx(x,true);
5227 mfem::PetscParVector yy(y,true);
5228 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5229 if (ctx->op)
5230 {
5231 ctx->op->Mult(xx,yy);
5232 yy.UpdateVecFromFlags();
5233 }
5234 else // operator is not present, copy x
5235 {
5236 yy = xx;
5237 }
5238 PetscFunctionReturn(PETSC_SUCCESS);
5239}
5240
5241static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5242{
5243 __mfem_pc_shell_ctx *ctx;
5244 PetscErrorCode ierr;
5245
5246 PetscFunctionBeginUser;
5247 mfem::PetscParVector xx(x,true);
5248 mfem::PetscParVector yy(y,true);
5249 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5250 if (ctx->op)
5251 {
5252 ctx->op->MultTranspose(xx,yy);
5253 yy.UpdateVecFromFlags();
5254 }
5255 else // operator is not present, copy x
5256 {
5257 yy = xx;
5258 }
5259 PetscFunctionReturn(PETSC_SUCCESS);
5260}
5261
5262static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5263{
5264 __mfem_pc_shell_ctx *ctx;
5265
5266 PetscFunctionBeginUser;
5267 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5268 if (ctx->factory)
5269 {
5270 // Delete any owned operator
5271 if (ctx->ownsop)
5272 {
5273 delete ctx->op;
5274 }
5275
5276 // Get current preconditioning Mat
5277 Mat B;
5278 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5279
5280 // Call user-defined setup
5281 mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
5282 mfem::PetscPreconditionerFactory *factory = ctx->factory;
5283 ctx->op = factory->NewPreconditioner(hB);
5284 ctx->ownsop = true;
5285 ctx->numprec++;
5286 }
5287 PetscFunctionReturn(PETSC_SUCCESS);
5288}
5289
5290static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5291{
5292 __mfem_pc_shell_ctx *ctx;
5293 PetscErrorCode ierr;
5294
5295 PetscFunctionBeginUser;
5296 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5297 if (ctx->ownsop)
5298 {
5299 delete ctx->op;
5300 }
5301 delete ctx;
5302 PetscFunctionReturn(PETSC_SUCCESS);
5303}
5304
5305#if PETSC_VERSION_LT(3,23,0)
5306
5307static PetscErrorCode __mfem_array_container_destroy(void *ptr)
5308{
5309 PetscErrorCode ierr;
5310
5311 PetscFunctionBeginUser;
5312 ierr = PetscFree(ptr); CHKERRQ(ierr);
5313 PetscFunctionReturn(PETSC_SUCCESS);
5314}
5315
5316static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
5317{
5319 PetscErrorCode ierr;
5320
5321 PetscFunctionBeginUser;
5322 for (int i=0; i<a->Size(); i++)
5323 {
5324 Mat M = (*a)[i];
5325 MPI_Comm comm = PetscObjectComm((PetscObject)M);
5326 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5327 }
5328 delete a;
5329 PetscFunctionReturn(PETSC_SUCCESS);
5330}
5331
5332#else
5333
5334static PetscErrorCode __mfem_array_container_destroy(void **ptr)
5335{
5336 PetscErrorCode ierr;
5337
5338 PetscFunctionBeginUser;
5339 ierr = PetscFree(*ptr); CHKERRQ(ierr);
5340 PetscFunctionReturn(PETSC_SUCCESS);
5341}
5342
5343static PetscErrorCode __mfem_matarray_container_destroy(void **ptr)
5344{
5346 PetscErrorCode ierr;
5347
5348 PetscFunctionBeginUser;
5349 for (int i=0; i<a->Size(); i++)
5350 {
5351 Mat M = (*a)[i];
5352 MPI_Comm comm = PetscObjectComm((PetscObject)M);
5353 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5354 }
5355 delete a;
5356 PetscFunctionReturn(PETSC_SUCCESS);
5357}
5358
5359#endif
5360
5361static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
5362{
5363 PetscErrorCode ierr;
5364
5365 PetscFunctionBeginUser;
5366 ierr = PetscFree(*ctx); CHKERRQ(ierr);
5367 PetscFunctionReturn(PETSC_SUCCESS);
5368}
5369
5370// Sets the type of PC to PCSHELL and wraps the solver action
5371// if ownsop is true, ownership of precond is transferred to the PETSc object
5372PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
5373{
5374 PetscFunctionBeginUser;
5375 __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5376 ctx->op = &precond;
5377 ctx->ownsop = ownsop;
5378 ctx->factory = NULL;
5379 ctx->numprec = 0;
5380
5381 // In case the PC was already of type SHELL, this will destroy any
5382 // previous user-defined data structure
5383 // We cannot call PCReset as it will wipe out any operator already set
5384 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5385
5386 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5387 ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5388 ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5389 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5390 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5391 CHKERRQ(ierr);
5392 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5393 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5394 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5395 PetscFunctionReturn(PETSC_SUCCESS);
5396}
5397
5398// Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
5399// Takes ownership of the solver created by the factory
5400PetscErrorCode MakeShellPCWithFactory(PC pc,
5402{
5403 PetscFunctionBeginUser;
5404 __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5405 ctx->op = NULL;
5406 ctx->ownsop = true;
5407 ctx->factory = factory;
5408 ctx->numprec = 0;
5409
5410 // In case the PC was already of type SHELL, this will destroy any
5411 // previous user-defined data structure
5412 // We cannot call PCReset as it will wipe out any operator already set
5413 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5414
5415 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5416 ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
5417 ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5418 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5419 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5420 CHKERRQ(ierr);
5421 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5422 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5423 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5424 PetscFunctionReturn(PETSC_SUCCESS);
5425}
5426
5427// Converts from a list (or a marked Array if islist is false) to an IS
5428// st indicates the offset where to start numbering
5429static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
5430 const mfem::Array<int> *list,
5431 PetscInt st, IS* is)
5432{
5433 PetscInt n = list ? list->Size() : 0,*idxs;
5434 const int *data = list ? list->GetData() : NULL;
5435 PetscErrorCode ierr;
5436
5437 PetscFunctionBeginUser;
5438 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5439 if (islist)
5440 {
5441 for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5442 }
5443 else
5444 {
5445 PetscInt cum = 0;
5446 for (PetscInt i=0; i<n; i++)
5447 {
5448 if (data[i]) { idxs[cum++] = i+st; }
5449 }
5450 n = cum;
5451 }
5452 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5453 CHKERRQ(ierr);
5454 PetscFunctionReturn(PETSC_SUCCESS);
5455}
5456
5457// Converts from a marked Array of Vdofs to an IS
5458// st indicates the offset where to start numbering
5459// l2l is a vector of matrices generated during RAP
5460static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5461 mfem::Array<Mat> &pl2l,
5462 const mfem::Array<int> *mark,
5463 PetscInt st, IS* is)
5464{
5465 mfem::Array<int> sub_dof_marker;
5467 PetscInt nl;
5468 PetscErrorCode ierr;
5469
5470 PetscFunctionBeginUser;
5471 for (int i = 0; i < pl2l.Size(); i++)
5472 {
5473 PetscInt m,n,*ii,*jj;
5474 PetscBool done;
5475 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
5476 (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5477 MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
5478 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5479#if defined(PETSC_USE_64BIT_INDICES)
5480 int nnz = (int)ii[m];
5481 int *mii = new int[m+1];
5482 int *mjj = new int[nnz];
5483 for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5484 for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5485 l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
5486#else
5487 l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
5488#endif
5489 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5490 (const PetscInt**)&ii,
5491 (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5492 MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
5493 << i << " l2l matrix");
5494 }
5495 nl = 0;
5496 for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5497 sub_dof_marker.SetSize(nl);
5498 const int* vdata = mark->GetData();
5499 int* sdata = sub_dof_marker.GetData();
5500 int cumh = 0, cumw = 0;
5501 for (int i = 0; i < l2l.Size(); i++)
5502 {
5503 const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
5504 l2l[i]->Height());
5505 mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
5506 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5507 cumh += l2l[i]->Height();
5508 cumw += l2l[i]->Width();
5509 }
5510 ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5511 for (int i = 0; i < pl2l.Size(); i++)
5512 {
5513 delete l2l[i];
5514 }
5515 PetscFunctionReturn(PETSC_SUCCESS);
5516}
5517
5518#if !defined(PETSC_HAVE_HYPRE)
5519
5520#if defined(HYPRE_MIXEDINT)
5521#error "HYPRE_MIXEDINT not supported"
5522#endif
5523
5524#include "_hypre_parcsr_mv.h"
5525static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5526{
5527 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5528 hypre_CSRMatrix *hdiag,*hoffd;
5529 PetscScalar *da,*oa,*aptr;
5530 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5531 PetscInt i,dnnz,onnz,m,n;
5532 PetscMPIInt size;
5533 PetscErrorCode ierr;
5534
5535 PetscFunctionBeginUser;
5536 hdiag = hypre_ParCSRMatrixDiag(hA);
5537 hoffd = hypre_ParCSRMatrixOffd(hA);
5538 m = hypre_CSRMatrixNumRows(hdiag);
5539 n = hypre_CSRMatrixNumCols(hdiag);
5540 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5541 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5542 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5543 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5544 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5545 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
5546 CHKERRQ(ierr);
5547 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
5548 CHKERRQ(ierr);
5549 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
5550 CHKERRQ(ierr);
5551 iptr = djj;
5552 aptr = da;
5553 for (i=0; i<m; i++)
5554 {
5555 PetscInt nc = dii[i+1]-dii[i];
5556 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5557 iptr += nc;
5558 aptr += nc;
5559 }
5560 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5561 if (size > 1)
5562 {
5563 PetscInt *offdj,*coffd;
5564
5565 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5566 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5567 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5568 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
5569 CHKERRQ(ierr);
5570 offdj = hypre_CSRMatrixJ(hoffd);
5571 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5572 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5573 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
5574 CHKERRQ(ierr);
5575 iptr = ojj;
5576 aptr = oa;
5577 for (i=0; i<m; i++)
5578 {
5579 PetscInt nc = oii[i+1]-oii[i];
5580 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5581 iptr += nc;
5582 aptr += nc;
5583 }
5584 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5585 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5586 }
5587 else
5588 {
5589 oii = ojj = NULL;
5590 oa = NULL;
5591 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5592 }
5593 /* We are responsible to free the CSR arrays. However, since we can take
5594 references of a PetscParMatrix but we cannot take reference of PETSc
5595 arrays, we need to create a PetscContainer object to take reference of
5596 these arrays in reference objects */
5597 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5598 const char *names[6] = {"_mfem_csr_dii",
5599 "_mfem_csr_djj",
5600 "_mfem_csr_da",
5601 "_mfem_csr_oii",
5602 "_mfem_csr_ojj",
5603 "_mfem_csr_oa"
5604 };
5605 for (i=0; i<6; i++)
5606 {
5607 PetscContainer c;
5608
5609 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5610 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5611#if PETSC_VERSION_LT(3,23,0)
5612 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5613#else
5614 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
5615#endif
5616 CHKERRQ(ierr);
5617 ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
5618 CHKERRQ(ierr);
5619 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5620 }
5621 PetscFunctionReturn(PETSC_SUCCESS);
5622}
5623
5624static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5625{
5626 Mat lA;
5627 ISLocalToGlobalMapping rl2g,cl2g;
5628 IS is;
5629 hypre_CSRMatrix *hdiag,*hoffd;
5630 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5631 void *ptrs[2];
5632 const char *names[2] = {"_mfem_csr_aux",
5633 "_mfem_csr_data"
5634 };
5635 PetscScalar *hdd,*hod,*aa,*data;
5636 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5637 PetscInt *aux,*ii,*jj;
5638 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5639 PetscErrorCode ierr;
5640
5641 PetscFunctionBeginUser;
5642 /* access relevant information in ParCSR */
5643 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5644 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5645 hdiag = hypre_ParCSRMatrixDiag(hA);
5646 hoffd = hypre_ParCSRMatrixOffd(hA);
5647 dr = hypre_CSRMatrixNumRows(hdiag);
5648 dc = hypre_CSRMatrixNumCols(hdiag);
5649 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5650 hdi = hypre_CSRMatrixI(hdiag);
5651 hdj = hypre_CSRMatrixJ(hdiag);
5652 hdd = hypre_CSRMatrixData(hdiag);
5653 oc = hypre_CSRMatrixNumCols(hoffd);
5654 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5655 hoi = hypre_CSRMatrixI(hoffd);
5656 hoj = hypre_CSRMatrixJ(hoffd);
5657 hod = hypre_CSRMatrixData(hoffd);
5658
5659 /* generate l2g maps for rows and cols */
5660 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5661 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5662 ierr = ISDestroy(&is); CHKERRQ(ierr);
5663 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5664 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5665 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5666 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5667 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5668 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5669 ierr = ISDestroy(&is); CHKERRQ(ierr);
5670
5671 /* create MATIS object */
5672 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5673 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5674 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5675 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5676 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5677 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5678
5679 /* merge local matrices */
5680 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5681 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5682 ii = aux;
5683 jj = aux+dr+1;
5684 aa = data;
5685 *ii = *(hdi++) + *(hoi++);
5686 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5687 {
5688 PetscScalar *aold = aa;
5689 PetscInt *jold = jj,nc = jd+jo;
5690 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5691 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5692 *(++ii) = *(hdi++) + *(hoi++);
5693 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5694 }
5695 for (; cum<dr; cum++) { *(++ii) = nnz; }
5696 ii = aux;
5697 jj = aux+dr+1;
5698 aa = data;
5699 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5700 CHKERRQ(ierr);
5701 ptrs[0] = aux;
5702 ptrs[1] = data;
5703 for (i=0; i<2; i++)
5704 {
5705 PetscContainer c;
5706
5707 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5708 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5709#if PETSC_VERSION_LT(3,23,0)
5710 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5711#else
5712 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
5713#endif
5714 CHKERRQ(ierr);
5715 ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
5716 CHKERRQ(ierr);
5717 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5718 }
5719 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5720 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5721 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5722 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5723 PetscFunctionReturn(PETSC_SUCCESS);
5724}
5725#endif
5726
5727#include <petsc/private/matimpl.h>
5728
5729static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm, PetscInt m,
5730 PetscInt n, Mat *A)
5731{
5732 PetscFunctionBegin;
5733 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5734 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5735 ierr = PetscObjectChangeTypeName((PetscObject)*A,"mfemdummy"); CHKERRQ(ierr);
5736 (*A)->preallocated = PETSC_TRUE;
5737 ierr = MatSetUp(*A); CHKERRQ(ierr);
5738 PetscFunctionReturn(PETSC_SUCCESS);
5739}
5740
5741#include <petsc/private/vecimpl.h>
5742
5743#if defined(PETSC_HAVE_DEVICE)
5744static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5745{
5746 PetscFunctionBegin;
5747 v->offloadmask = m;
5748 PetscFunctionReturn(PETSC_SUCCESS);
5749}
5750#endif
5751
5752static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5753{
5754 PetscFunctionBegin;
5755#if defined(PETSC_HAVE_DEVICE)
5756 *flg = v->boundtocpu;
5757#else
5758 *flg = PETSC_TRUE;
5759#endif
5760 PetscFunctionReturn(PETSC_SUCCESS);
5761}
5762
5763static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject o)
5764{
5765 PetscErrorCode ierr;
5766
5767 PetscFunctionBegin;
5768 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5769 PetscFunctionReturn(PETSC_SUCCESS);
5770}
5771
5772#endif // MFEM_USE_PETSC
5773#endif // MFEM_USE_MPI
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
Definition array.hpp:986
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition array.hpp:758
int Size() const
Return the logical size of the array.
Definition array.hpp:147
T * GetData()
Returns the data.
Definition array.hpp:121
A class to handle Block systems in a matrix-free implementation.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
int NumRowBlocks() const
Return the number of row blocks.
int NumColBlocks() const
Return the number of column blocks.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
Definition device.hpp:259
static int GetId()
Get the device id of the configured device.
Definition device.hpp:253
static MemoryType GetDeviceMemoryType()
Get the current Device MemoryType. This is the MemoryType used by most MFEM classes when allocating m...
Definition device.hpp:274
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Definition fe_coll.hpp:27
Ordering::Type GetOrdering() const
Return the ordering method.
Definition fespace.hpp:876
const FiniteElementCollection * FEColl() const
Definition fespace.hpp:878
int GetVDim() const
Returns the vector dimension of the finite element space.
Definition fespace.hpp:841
Wrapper for hypre's ParCSR matrix class.
Definition hypre.hpp:408
MPI_Comm GetComm() const
MPI communicator.
Definition hypre.hpp:598
Wrapper for hypre's parallel vector class.
Definition hypre.hpp:219
Identity Operator I: x -> x.
Definition operator.hpp:803
Class used by MFEM to store pointers to host and/or device memory.
bool DeviceIsValid() const
Return true if device pointer is valid.
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
bool HostIsValid() const
Return true if host pointer is valid.
bool UseDevice() const
Read the internal device flag.
void CopyToHost(T *dest, int size) const
Copy size entries from *this to the host pointer dest.
bool Empty() const
Return true if the Memory object is empty, see Reset().
void Sync(const Memory &other) const
Copy the host/device pointer validity flags from other to *this.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void Wrap(T *ptr, int size, bool own)
Wrap an externally allocated host pointer, ptr with the current host memory type returned by MemoryMa...
void Delete()
Delete the owned pointers and reset the Memory object.
int SpaceDimension() const
Dimension of the physical space containing the mesh.
Definition mesh.hpp:1219
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition ode.hpp:110
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition ode.hpp:113
Pointer to an Operator of a specified type.
Definition handle.hpp:34
Abstract operator.
Definition operator.hpp:25
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
Definition operator.hpp:86
int width
Dimension of the input / number of columns in the matrix.
Definition operator.hpp:28
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition operator.hpp:66
int height
Dimension of the output / number of rows in the matrix.
Definition operator.hpp:27
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition operator.hpp:72
Type
Enumeration defining IDs for some classes derived from Operator.
Definition operator.hpp:284
@ ANY_TYPE
ID for the base class Operator, i.e. any type.
Definition operator.hpp:285
@ PETSC_MATIS
ID for class PetscParMatrix, MATIS format.
Definition operator.hpp:289
@ PETSC_MATHYPRE
ID for class PetscParMatrix, MATHYPRE format.
Definition operator.hpp:292
@ PETSC_MATGENERIC
ID for class PetscParMatrix, unspecified format.
Definition operator.hpp:293
@ PETSC_MATAIJ
ID for class PetscParMatrix, MATAIJ format.
Definition operator.hpp:288
@ PETSC_MATNEST
ID for class PetscParMatrix, MATNEST format.
Definition operator.hpp:291
@ PETSC_MATSHELL
ID for class PetscParMatrix, MATSHELL format.
Definition operator.hpp:290
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition operator.hpp:93
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition operator.hpp:122
Abstract parallel finite element space.
Definition pfespace.hpp:29
MPI_Comm GetComm() const
Definition pfespace.hpp:334
HYPRE_BigInt * GetTrueDofOffsets() const
Definition pfespace.hpp:343
int GetTrueVSize() const override
Return the number of local vector true dofs.
Definition pfespace.hpp:350
ParMesh * GetParMesh() const
Definition pfespace.hpp:338
Helper class for handling essential boundary conditions.
Definition petsc.hpp:592
PetscBCHandler(Type type_=ZERO)
Definition petsc.hpp:601
@ CONSTANT
Constant in time b.c.
Definition petsc.hpp:597
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition petsc.cpp:2831
virtual void Eval(real_t t, Vector &g)
Boundary conditions evaluation.
Definition petsc.hpp:617
void SetTime(real_t t)
Sets the current time.
Definition petsc.hpp:627
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition petsc.cpp:2838
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition petsc.cpp:2931
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition petsc.hpp:624
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition petsc.cpp:2903
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition petsc.cpp:2854
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition petsc.cpp:2922
Auxiliary class for BDDC customization.
Definition petsc.hpp:831
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition petsc.cpp:3898
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition petsc.cpp:3907
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Definition petsc.cpp:3942
Abstract class for PETSc's linear solvers.
Definition petsc.hpp:750
void SetOperator(const Operator &op) override
Definition petsc.cpp:2984
operator petsc::KSP() const
Conversion function to PETSc's KSP type.
Definition petsc.hpp:787
virtual ~PetscLinearSolver()
Definition petsc.cpp:3219
void MultTranspose(const Vector &b, Vector &x) const override
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition petsc.cpp:3214
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
Definition petsc.cpp:2943
void SetPreconditioner(Solver &precond)
Definition petsc.cpp:3129
void Mult(const Vector &b, Vector &x) const override
Application of the solver.
Definition petsc.cpp:3209
bool DeviceRequested() const
Definition petsc.hpp:143
void SetHostInvalid() const
Definition petsc.hpp:98
void SetHostValid() const
Definition petsc.hpp:96
bool WriteRequested() const
Definition petsc.hpp:138
const real_t * GetDevicePointer() const
Definition petsc.cpp:261
const real_t * GetHostPointer() const
Definition petsc.cpp:252
void SyncBaseAndReset()
Definition petsc.hpp:127
bool IsAliasForSync() const
Definition petsc.hpp:100
void MakeAliasForSync(const Memory< real_t > &base_, int offset_, int size_, bool usedev_)
Definition petsc.hpp:102
void SetDeviceInvalid() const
Definition petsc.hpp:99
bool ReadRequested() const
Definition petsc.hpp:133
void SetDeviceValid() const
Definition petsc.hpp:97
void Mult(const Vector &b, Vector &x) const override
Application of the solver.
Definition petsc.cpp:4136
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition petsc.cpp:4109
void SetObjective(void(*obj)(Operator *op, const Vector &x, real_t *f))
Specification of an objective function to be used for line search.
Definition petsc.cpp:4098
operator petsc::SNES() const
Conversion function to PETSc's SNES type.
Definition petsc.hpp:944
virtual ~PetscNonlinearSolver()
Definition petsc.cpp:4022
void SetJacobianType(Operator::Type type)
Definition petsc.cpp:4092
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
Definition petsc.cpp:4123
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:3990
void SetOperator(const Operator &op) override
Specification of the nonlinear operator.
Definition petsc.cpp:4030
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition petsc.cpp:4192
virtual void Run(Vector &x, real_t &t, real_t &dt, real_t t_final)
Perform time integration from time t [in] to time tf [in].
Definition petsc.cpp:4343
virtual void Step(Vector &x, real_t &t, real_t &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in].
Definition petsc.cpp:4305
operator petsc::TS() const
Conversion function to PETSc's TS type.
Definition petsc.hpp:979
void SetType(PetscODESolver::Type)
Definition petsc.cpp:4287
PetscODESolver::Type GetType() const
Definition petsc.cpp:4281
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:4159
void SetJacobianType(Operator::Type type)
Definition petsc.cpp:4275
virtual ~PetscODESolver()
Definition petsc.cpp:4184
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
Definition petsc.cpp:3229
Wrapper for PETSc's matrix class.
Definition petsc.hpp:319
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition petsc.cpp:2041
PetscInt GetNumRows() const
Returns the local number of rows.
Definition petsc.cpp:996
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition petsc.cpp:2077
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition petsc.cpp:1955
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition petsc.cpp:2300
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
Definition petsc.cpp:1386
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition petsc.cpp:1181
void Mult(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec: y = a A x + b y.
Definition petsc.cpp:2004
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition petsc.cpp:989
PetscInt M() const
Returns the global number of rows.
Definition petsc.cpp:1010
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, real_t diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition petsc.cpp:2271
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition petsc.cpp:1975
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:1339
Type GetType() const
Definition petsc.cpp:2327
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition petsc.cpp:1150
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object.
Definition petsc.cpp:2314
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B.
Definition petsc.cpp:1352
PetscInt GetNumCols() const
Returns the local number of columns.
Definition petsc.cpp:1003
PetscInt NNZ() const
Returns the number of nonzeros.
Definition petsc.cpp:1024
petsc::Mat A
The actual PETSc object.
Definition petsc.hpp:322
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition petsc.cpp:1965
void Shift(real_t s)
Shift diagonal by a constant.
Definition petsc.cpp:2088
PetscParVector * Y
Definition petsc.hpp:325
PetscParVector * X
Auxiliary vectors for typecasting.
Definition petsc.hpp:325
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition petsc.cpp:1044
operator petsc::Mat() const
Typecasting to PETSc's Mat type.
Definition petsc.hpp:465
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition petsc.cpp:1820
void operator*=(real_t s)
Scale all entries by s: A_scaled = s*A.
Definition petsc.cpp:1999
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
Definition petsc.cpp:1031
PetscInt N() const
Returns the global number of columns.
Definition petsc.cpp:1017
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition petsc.cpp:1985
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition petsc.cpp:1037
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition petsc.cpp:1795
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition petsc.cpp:982
void MultTranspose(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition petsc.cpp:2022
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition petsc.cpp:1166
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition petsc.cpp:2066
void ResetMemory()
Completes the operation started with PlaceMemory.
Definition petsc.cpp:896
void SetFlagsFromMask_() const
Definition petsc.cpp:328
void PlaceMemory(Memory< real_t > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
Definition petsc.cpp:809
void Randomize(PetscInt seed=0)
Set random values.
Definition petsc.cpp:940
void SetBlockSize(PetscInt bs)
Set block size of a vector.
Definition petsc.cpp:529
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
Definition petsc.cpp:514
real_t * HostReadWrite() override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:500
PetscMemory pdata
Definition petsc.hpp:162
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array,...
Definition petsc.cpp:799
PetscInt GlobalSize() const
Returns the global number of rows.
Definition petsc.cpp:522
PetscParVector & operator-=(const PetscParVector &y)
Definition petsc.cpp:778
real_t * Write(bool=true) override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:445
void UpdateVecFromFlags()
Update PETSc's Vec after having accessed its data via GetMemory()
Definition petsc.cpp:362
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition petsc.cpp:955
real_t * ReadWrite(bool=true) override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:475
const real_t * HostRead() const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:440
const real_t * Read(bool=true) const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:416
real_t * HostWrite() override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:470
PetscParVector & operator+=(const PetscParVector &y)
Definition petsc.cpp:771
petsc::Vec x
The actual PETSc object.
Definition petsc.hpp:160
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:699
virtual ~PetscParVector()
Calls PETSc's destroy function.
Definition petsc.cpp:601
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray().
Definition petsc.cpp:804
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition petsc.cpp:704
operator petsc::Vec() const
Typecasting to PETSc's Vec type.
Definition petsc.hpp:234
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition petsc.cpp:750
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition petsc.cpp:583
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition petsc.cpp:736
PetscParVector & operator*=(PetscScalar d)
Definition petsc.cpp:785
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition petsc.cpp:729
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
Abstract class for PETSc's preconditioners.
Definition petsc.hpp:805
void Mult(const Vector &b, Vector &x) const override
Application of the preconditioner.
Definition petsc.cpp:3382
virtual ~PetscPreconditioner()
Definition petsc.cpp:3392
void SetOperator(const Operator &op) override
Set/update the solver for the given operator.
Definition petsc.cpp:3296
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:3261
operator petsc::PC() const
Conversion function to PETSc's PC type.
Definition petsc.hpp:825
void MultTranspose(const Vector &b, Vector &x) const override
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition petsc.cpp:3387
Abstract class for monitoring PETSc's solvers.
Definition petsc.hpp:985
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition petsc.hpp:1000
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition petsc.hpp:994
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition petsc.hpp:1006
Abstract class for PETSc's solvers.
Definition petsc.hpp:676
PetscClassId cid
The class id of the actual PETSc object.
Definition petsc.hpp:685
void SetAbsTol(real_t tol)
Definition petsc.cpp:2420
void SetTol(real_t tol)
Definition petsc.cpp:2390
void * private_ctx
Private context for solver.
Definition petsc.hpp:694
void SetPrintLevel(int plev)
Definition petsc.cpp:2472
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition petsc.cpp:2587
void SetMaxIter(int max_iter)
Definition petsc.cpp:2445
void SetRelTol(real_t tol)
Definition petsc.cpp:2395
PetscParVector * X
Definition petsc.hpp:688
void CreatePrivateContext()
Definition petsc.cpp:2771
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition petsc.cpp:2383
int GetNumIterations()
Definition petsc.cpp:2713
real_t GetFinalNorm()
Definition petsc.cpp:2746
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:2552
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition petsc.cpp:2373
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition petsc.hpp:682
bool clcustom
Boolean to handle SetFromOptions calls.
Definition petsc.hpp:679
bool operatorset
Boolean to handle SetOperator calls.
Definition petsc.hpp:697
void Customize(bool customize=true) const
Customize object with options set.
Definition petsc.cpp:2647
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition petsc.cpp:2557
void FreePrivateContext()
Definition petsc.cpp:2803
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition petsc.hpp:691
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition petsc.cpp:2606
PetscParVector * B
Right-hand side and solution vector.
Definition petsc.hpp:688
Base class for solvers.
Definition operator.hpp:780
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition operator.hpp:783
Data type sparse matrix.
Definition sparsemat.hpp:51
const int * HostReadJ() const
int NumNonZeroElems() const override
Returns the number of the nonzero elements in the matrix.
const real_t * HostReadData() const
void SortColumnIndices()
Sort the column indices corresponding to each row.
const int * HostReadI() const
Base abstract class for first order time dependent operators.
Definition operator.hpp:332
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition operator.hpp:401
virtual Operator & GetExplicitGradient(const Vector &u) const
Return an Operator representing dG/du at the given point u and the currently set time.
Definition operator.cpp:312
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition operator.hpp:399
virtual void ExplicitMult(const Vector &u, Vector &v) const
Perform the action of the explicit part of the operator, G: v = G(u, t) where t is the current time.
Definition operator.cpp:282
virtual Operator & GetImplicitGradient(const Vector &u, const Vector &k, real_t shift) const
Return an Operator representing (dF/dk shift + dF/du) at the given u, k, and the currently set time.
Definition operator.cpp:304
virtual void SetTime(const real_t t_)
Set the current time.
Definition operator.hpp:394
virtual void ImplicitMult(const Vector &u, const Vector &k, Vector &v) const
Perform the action of the implicit part of the operator, F: v = F(u, k, t) where t is the current tim...
Definition operator.cpp:287
Vector data type.
Definition vector.hpp:82
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition vector.hpp:214
Memory< real_t > & GetMemory()
Return a reference to the Memory object used by the Vector.
Definition vector.hpp:257
Memory< real_t > data
Definition vector.hpp:85
int Size() const
Returns the size of the vector.
Definition vector.hpp:226
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
Definition vector.hpp:144
void SetSize(int s)
Resize the vector to size s.
Definition vector.hpp:558
int dim
Definition ex24.cpp:53
void trans(const Vector &u, Vector &x)
Definition ex27.cpp:412
HYPRE_Int HYPRE_BigInt
real_t b
Definition lissajous.cpp:42
real_t a
Definition lissajous.cpp:41
real_t f(const Vector &p)
void write(std::ostream &os, T value)
Write 'value' to stream.
Definition binaryio.hpp:30
T read(std::istream &is)
Read a value from the stream and return it.
Definition binaryio.hpp:37
struct ::_p_Vec * Vec
Definition petsc.hpp:72
struct ::_p_Mat * Mat
Definition petsc.hpp:73
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
Definition device.hpp:341
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
Definition device.hpp:382
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
Definition device.hpp:348
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
Definition device.hpp:358
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition globals.hpp:66
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition petsc.cpp:2107
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass,...
Definition device.hpp:375
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition petsc.cpp:206
void MFEMFinalizePetsc()
Definition petsc.cpp:246
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
Definition device.hpp:365
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition hypre.cpp:3007
float real_t
Definition config.hpp:43
MemoryType
Memory types supported by MFEM.
@ HOST
Host memory; using new[] and delete[].
@ DEVICE
Device memory; using CUDA or HIP *Malloc and *Free.
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s....
Definition hypre.cpp:3437
std::function< real_t(const Vector &)> f(real_t mass_coeff)
Definition lor_mms.hpp:30
struct s_NavierContext ctx
real_t p(const Vector &x, real_t t)
HYPRE_Int PetscInt
Definition petsc.hpp:50
struct _p_PetscObject * PetscObject
Definition petsc.hpp:54
real_t PetscScalar
Definition petsc.hpp:51
real_t PetscReal
Definition petsc.hpp:52
@ HIP_MASK
Biwise-OR of all HIP backends.
Definition device.hpp:91
@ CUDA_MASK
Biwise-OR of all CUDA backends.
Definition device.hpp:89