23#if defined(PETSC_HAVE_HYPRE)
24#include "petscmathypre.h"
28#if PETSC_VERSION_LT(3,11,0)
29#define VecLockReadPush VecLockPush
30#define VecLockReadPop VecLockPop
32#if PETSC_VERSION_LT(3,12,0)
33#define VecGetArrayWrite VecGetArray
34#define VecRestoreArrayWrite VecRestoreArray
35#define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36#define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
38#if PETSC_VERSION_LT(3,19,0)
39#define PETSC_SUCCESS 0
53static PetscErrorCode __mfem_ts_rhsfunction(TS,
PetscReal,Vec,Vec,
void*);
54static PetscErrorCode __mfem_ts_rhsjacobian(TS,
PetscReal,Vec,Mat,Mat,
56static PetscErrorCode __mfem_ts_ifunction(TS,
PetscReal,Vec,Vec,Vec,
void*);
57static PetscErrorCode __mfem_ts_ijacobian(TS,
PetscReal,Vec,Vec,
60static PetscErrorCode __mfem_ts_computesplits(TS,
PetscReal,Vec,Vec,
63static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,
void*);
64static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,
void*);
65static PetscErrorCode __mfem_snes_objective(SNES,Vec,
PetscReal*,
void*);
66static PetscErrorCode __mfem_snes_update(SNES,
PetscInt);
67static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
68 PetscBool*,PetscBool*,
void*);
70static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
71static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
72static PetscErrorCode __mfem_pc_shell_setup(PC);
73static PetscErrorCode __mfem_pc_shell_destroy(PC);
74static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
75static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
76static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
77static PetscErrorCode __mfem_mat_shell_destroy(Mat);
78static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
79#if PETSC_VERSION_LT(3,23,0)
80static PetscErrorCode __mfem_array_container_destroy(
void*);
81static PetscErrorCode __mfem_matarray_container_destroy(
void *);
83static PetscErrorCode __mfem_array_container_destroy(
void**);
84static PetscErrorCode __mfem_matarray_container_destroy(
void**);
86static PetscErrorCode __mfem_monitor_ctx_destroy(
void**);
94static PetscErrorCode MakeShellPCWithFactory(PC,
100#if !defined(PETSC_HAVE_HYPRE)
101static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
102static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
105#if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
106#if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
108#define PETSC_VECDEVICE VECCUDA
109#define PETSC_MATAIJDEVICE MATAIJCUSPARSE
110#define VecDeviceGetArrayRead VecCUDAGetArrayRead
111#define VecDeviceGetArrayWrite VecCUDAGetArrayWrite
112#define VecDeviceGetArray VecCUDAGetArray
113#define VecDeviceRestoreArrayRead VecCUDARestoreArrayRead
114#define VecDeviceRestoreArrayWrite VecCUDARestoreArrayWrite
115#define VecDeviceRestoreArray VecCUDARestoreArray
116#define VecDevicePlaceArray VecCUDAPlaceArray
117#define VecDeviceResetArray VecCUDAResetArray
118#elif defined(MFEM_USE_HIP) && defined(PETSC_HAVE_HIP)
120#define PETSC_VECDEVICE VECHIP
121#define PETSC_MATAIJDEVICE MATAIJHIPSPARSE
122#define VecDeviceGetArrayRead VecHIPGetArrayRead
123#define VecDeviceGetArrayWrite VecHIPGetArrayWrite
124#define VecDeviceGetArray VecHIPGetArray
125#define VecDeviceRestoreArrayRead VecHIPRestoreArrayRead
126#define VecDeviceRestoreArrayWrite VecHIPRestoreArrayWrite
127#define VecDeviceRestoreArray VecHIPRestoreArray
128#define VecDevicePlaceArray VecHIPPlaceArray
129#define VecDeviceResetArray VecHIPResetArray
131#define VecDeviceGetArrayRead VecGetArrayRead
132#define VecDeviceGetArrayWrite VecGetArrayWrite
133#define VecDeviceGetArray VecGetArray
134#define VecDeviceRestoreArrayRead VecRestoreArrayRead
135#define VecDeviceRestoreArrayWrite VecRestoreArrayWrite
136#define VecDeviceRestoreArray VecRestoreArray
137#define VecDevicePlaceArray VecPlaceArray
138#define VecDeviceResetArray VecResetArray
142#if defined(PETSC_HAVE_DEVICE)
143static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
145static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
146static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject);
147static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,
PetscInt,
PetscInt,Mat*);
155 unsigned long int numprec;
156} __mfem_pc_shell_ctx;
185 PetscObjectState cached_ijacstate;
186 PetscObjectState cached_rhsjacstate;
187 PetscObjectState cached_splits_xstate;
188 PetscObjectState cached_splits_xdotstate;
198static PetscErrorCode ierr;
199static PetscMPIInt mpiierr;
222#if PETSC_VERSION_LT(3,17,0)
223 const char *opts =
"-cuda_device";
225 const char *opts =
"-device_select_cuda";
227 ierr = PetscOptionsSetValue(NULL,opts,
229 MFEM_VERIFY(!ierr,
"Unable to set initial option value to PETSc");
233#if PETSC_VERSION_LT(3,17,0)
234 const char *opts =
"-hip_device";
236 const char *opts =
"-device_select_hip";
238 ierr = PetscOptionsSetValue(NULL,opts,
240 MFEM_VERIFY(!ierr,
"Unable to set initial option value to PETSc");
242 ierr = PetscInitialize(argc,argv,rc_file,help);
243 MFEM_VERIFY(!ierr,
"Unable to initialize PETSc");
248 ierr = PetscFinalize();
249 MFEM_VERIFY(!ierr,
"Unable to finalize PETSc");
278 MFEM_VERIFY(
x,
"Missing Vec");
279 ierr = VecSetUp(
x); PCHKERRQ(
x,ierr);
280 ierr = PetscObjectTypeCompare((
PetscObject)
x,VECNEST,&isnest); PCHKERRQ(
x,ierr);
281 MFEM_VERIFY(!isnest,
"Not for type nest");
282 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
283 MFEM_VERIFY(n >= 0,
"Invalid local size");
285#if defined(PETSC_HAVE_DEVICE)
286 PetscOffloadMask omask;
289 ierr = VecGetOffloadMask(
x,&omask); PCHKERRQ(
x,ierr);
290 if (omask != PETSC_OFFLOAD_BOTH)
292 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
295 ierr = VecGetArrayRead(
x,(
const PetscScalar**)&array); PCHKERRQ(
x,ierr);
296#if defined(PETSC_HAVE_DEVICE)
297 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
298 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
299 ""); PCHKERRQ(
x,ierr);
302 if (omask != PETSC_OFFLOAD_BOTH)
304 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_GPU); PCHKERRQ(
x,ierr);
307 ierr = VecDeviceGetArrayRead(
x,(
const PetscScalar**)&darray);
310 ierr = VecDeviceRestoreArrayRead(
x,(
const PetscScalar**)&darray);
318 ierr = VecRestoreArrayRead(
x,(
const PetscScalar**)&array); PCHKERRQ(
x,ierr);
320#if defined(PETSC_HAVE_DEVICE)
321 if (omask == PETSC_OFFLOAD_UNALLOCATED && isdevice) { omask = PETSC_OFFLOAD_CPU; }
322 ierr = __mfem_VecSetOffloadMask(
x,omask); PCHKERRQ(
x,ierr);
330 MFEM_VERIFY(
x,
"Missing Vec");
331#if defined(_USE_DEVICE)
332 PetscOffloadMask mask;
334 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
335 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
336 ""); PCHKERRQ(
x,ierr);
337 ierr = VecGetOffloadMask(
x,&mask); PCHKERRQ(
x,ierr);
342 case PETSC_OFFLOAD_CPU:
346 case PETSC_OFFLOAD_GPU:
350 case PETSC_OFFLOAD_BOTH:
355 MFEM_ABORT(
"Unhandled case " << mask);
364 MFEM_VERIFY(
x,
"Missing Vec");
365 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)
x); PCHKERRQ(
x,ierr);
366#if defined(_USE_DEVICE)
368 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
369 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
370 ""); PCHKERRQ(
x,ierr);
375 PetscOffloadMask mask;
376 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
377 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
378 else { mask = PETSC_OFFLOAD_CPU; }
379 ierr = __mfem_VecSetOffloadMask(
x,mask); PCHKERRQ(
x,ierr);
386 ierr = VecGetArrayWrite(
x,&v); PCHKERRQ(
x,ierr);
388 ierr = VecRestoreArrayWrite(
x,&v); PCHKERRQ(
x,ierr);
395 MFEM_VERIFY(
x,
"Missing Vec");
396 ierr = VecGetType(
x,&vectype); PCHKERRQ(
x,ierr);
397#if defined(_USE_DEVICE)
402 ierr = VecSetType(
x,PETSC_VECDEVICE); PCHKERRQ(
x,ierr);
405 ierr = VecSetType(
x,VECSTANDARD); PCHKERRQ(
x,ierr);
411 ierr = VecSetType(
x,VECSTANDARD); PCHKERRQ(
x,ierr);
419 MFEM_VERIFY(
x,
"Missing Vec");
420#if defined(PETSC_HAVE_DEVICE)
422 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
423 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
424 ""); PCHKERRQ(
x,ierr);
425 if (on_dev && isdevice)
427 ierr = VecDeviceGetArrayRead(
x,&dummy); PCHKERRQ(
x,ierr);
428 ierr = VecDeviceRestoreArrayRead(
x,&dummy); PCHKERRQ(
x,ierr);
433 ierr = VecGetArrayRead(
x,&dummy); PCHKERRQ(
x,ierr);
434 ierr = VecRestoreArrayRead(
x,&dummy); PCHKERRQ(
x,ierr);
448 MFEM_VERIFY(
x,
"Missing Vec");
449#if defined(PETSC_HAVE_DEVICE)
451 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
452 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
453 ""); PCHKERRQ(
x,ierr);
454 if (on_dev && isdevice)
456 ierr = VecDeviceGetArrayWrite(
x,&dummy); PCHKERRQ(
x,ierr);
457 ierr = VecDeviceRestoreArrayWrite(
x,&dummy); PCHKERRQ(
x,ierr);
462 ierr = VecGetArrayWrite(
x,&dummy); PCHKERRQ(
x,ierr);
463 ierr = VecRestoreArrayWrite(
x,&dummy); PCHKERRQ(
x,ierr);
465 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)
x); PCHKERRQ(
x,ierr);
478 MFEM_VERIFY(
x,
"Missing Vec");
479#if defined(PETSC_HAVE_DEVICE)
481 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
482 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
483 ""); PCHKERRQ(
x,ierr);
484 if (on_dev && isdevice)
486 ierr = VecDeviceGetArray(
x,&dummy); PCHKERRQ(
x,ierr);
487 ierr = VecDeviceRestoreArray(
x,&dummy); PCHKERRQ(
x,ierr);
492 ierr = VecGetArray(
x,&dummy); PCHKERRQ(
x,ierr);
493 ierr = VecRestoreArray(
x,&dummy); PCHKERRQ(
x,ierr);
495 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)
x); PCHKERRQ(
x,ierr);
507 MFEM_VERIFY(
x,
"Missing Vec");
508#if defined(PETSC_HAVE_DEVICE)
509 ierr = VecBindToCPU(
x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(
x,ierr);
517 MFEM_VERIFY(
x,
"Missing Vec");
518 ierr = __mfem_VecBoundToCPU(
x,&flg); PCHKERRQ(
x,ierr);
519 return flg ? false :
true;
525 ierr = VecGetSize(
x,&N); PCHKERRQ(
x,ierr);
531 ierr = VecSetBlockSize(
x,bs); PCHKERRQ(
x,ierr);
540 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
541 ierr = VecSetSizes(
x,n,PETSC_DECIDE); PCHKERRQ(
x,ierr);
544 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
545 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
546 ""); PCHKERRQ(
x,ierr);
552#if defined(PETSC_HAVE_DEVICE)
556 ierr = VecDeviceGetArrayWrite(
x,&array); PCHKERRQ(
x,ierr);
557 rest = VecDeviceRestoreArrayWrite;
563 ierr = VecGetArrayWrite(
x,&array); PCHKERRQ(
x,ierr);
564 rest = VecRestoreArrayWrite;
567 ierr = (*rest)(
x,&array); PCHKERRQ(
x,ierr);
586 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
590 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
591 ierr = VecSetSizes(
x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(
x,ierr);
595 ierr = VecSetSizes(
x,PETSC_DECIDE,glob_size); PCHKERRQ(
x,ierr);
604 ierr = VecDestroy(&
x); CCHKERRQ(comm,ierr);
611 MFEM_VERIFY(col,
"Missing distribution");
613 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
614 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
615 &
x); CCHKERRQ(comm,ierr)
622 ierr = VecDuplicate(y.
x,&
x); PCHKERRQ(
x,ierr);
627 bool transpose,
bool allocate) :
Vector()
631 ierr = VecCreate(comm,&
x);
633 ierr = VecSetSizes(
x,loc,PETSC_DECIDE);
648 bool transpose,
bool allocate) :
Vector()
653 ierr = MatCreateVecs(pA,&
x,NULL); PCHKERRQ(pA,ierr);
657 ierr = MatCreateVecs(pA,NULL,&
x); PCHKERRQ(pA,ierr);
663 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
676 ierr = PetscObjectReference((
PetscObject)y); PCHKERRQ(y,ierr);
685 MPI_Comm comm = pfes->
GetComm();
686 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
688 PetscMPIInt myid = 0;
689 if (!HYPRE_AssumedPartitionCheck())
691 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
693 ierr = VecSetSizes(
x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
711 ierr = VecScatterCreateToAll(
x,&scctx,&vout); PCHKERRQ(
x,ierr);
712 ierr = VecScatterBegin(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
714 ierr = VecScatterEnd(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
716 ierr = VecScatterDestroy(&scctx); PCHKERRQ(
x,ierr);
717 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(
x,ierr);
718 ierr = VecGetLocalSize(vout,&
size); PCHKERRQ(
x,ierr);
721 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(
x,ierr);
722 ierr = VecDestroy(&vout); PCHKERRQ(
x,ierr);
731 ierr = VecSet(
x,d); PCHKERRQ(
x,ierr);
739 MFEM_VERIFY(idx.
Size() == vals.
Size(),
740 "Size mismatch between indices and values");
744 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
745 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
753 MFEM_VERIFY(idx.
Size() == vals.
Size(),
754 "Size mismatch between indices and values");
758 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
759 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
766 ierr = VecCopy(y.
x,
x); PCHKERRQ(
x,ierr);
773 ierr = VecAXPY(
x,1.0,y.
x); PCHKERRQ(
x,ierr);
780 ierr = VecAXPY(
x,-1.0,y.
x); PCHKERRQ(
x,ierr);
787 ierr = VecScale(
x,s); PCHKERRQ(
x,ierr);
794 ierr = VecShift(
x,s); PCHKERRQ(
x,ierr);
801 ierr = VecPlaceArray(
x,temp_data); PCHKERRQ(
x,ierr);
806 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
813 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
815 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
816 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
817 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
818#if defined(_USE_DEVICE)
820 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
821 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
822 ""); PCHKERRQ(
x,ierr);
829 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_GPU); PCHKERRQ(
x,ierr);
834 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
844#if defined(PETSC_HAVE_DEVICE)
845 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
847 ierr = VecPlaceArray(
x,w); PCHKERRQ(
x,ierr);
849 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)
x); PCHKERRQ(
x,ierr);
857 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
859 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
860 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
861 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
862#if defined(_USE_DEVICE)
864 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
865 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
866 ""); PCHKERRQ(
x,ierr);
872 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_GPU); PCHKERRQ(
x,ierr);
877 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
886#if defined(PETSC_HAVE_DEVICE)
887 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
889 ierr = VecPlaceArray(
x,w); PCHKERRQ(
x,ierr);
892 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)
x); PCHKERRQ(
x,ierr);
893 ierr = VecLockReadPush(
x); PCHKERRQ(
x,ierr);
899 MFEM_VERIFY(!
pdata.
Empty(),
"Vector data is empty");
911#if defined(PETSC_HAVE_DEVICE)
912 PetscOffloadMask mask;
913 ierr = VecGetOffloadMask(
x,&mask); PCHKERRQ(
x,ierr);
914 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
915 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
918 ierr = VecGetArrayRead(
x,&v); PCHKERRQ(
x,ierr);
920 ierr = VecRestoreArrayRead(
x,&v); PCHKERRQ(
x,ierr);
925 if (
read && !
write) { ierr = VecLockReadPop(
x); PCHKERRQ(
x,ierr); }
928#if defined(PETSC_HAVE_DEVICE)
929 ierr = VecDeviceResetArray(
x); PCHKERRQ(
x,ierr);
931 MFEM_VERIFY(
false,
"This should not happen");
936 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
942 PetscRandom rctx = NULL;
946 ierr = PetscRandomCreate(PetscObjectComm((
PetscObject)
x),&rctx);
948 ierr = PetscRandomSetSeed(rctx,(
unsigned long)seed); PCHKERRQ(
x,ierr);
949 ierr = PetscRandomSeed(rctx); PCHKERRQ(
x,ierr);
951 ierr = VecSetRandom(
x,rctx); PCHKERRQ(
x,ierr);
952 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(
x,ierr);
963 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)
x),fname,
964 FILE_MODE_WRITE,&view);
968 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)
x),fname,&view);
971 ierr = VecView(
x,view); PCHKERRQ(
x,ierr);
972 ierr = PetscViewerDestroy(&view); PCHKERRQ(
x,ierr);
976 ierr = VecView(
x,NULL); PCHKERRQ(
x,ierr);
985 ierr = MatGetOwnershipRange(
A,&
N,NULL); PCHKERRQ(
A,ierr);
992 ierr = MatGetOwnershipRangeColumn(
A,&
N,NULL); PCHKERRQ(
A,ierr);
999 ierr = MatGetLocalSize(
A,&
N,NULL); PCHKERRQ(
A,ierr);
1006 ierr = MatGetLocalSize(
A,NULL,&
N); PCHKERRQ(
A,ierr);
1013 ierr = MatGetSize(
A,&
N,NULL); PCHKERRQ(
A,ierr);
1020 ierr = MatGetSize(
A,NULL,&
N); PCHKERRQ(
A,ierr);
1027 ierr = MatGetInfo(
A,MAT_GLOBAL_SUM,&info); PCHKERRQ(
A,ierr);
1033 if (cbs < 0) { cbs = rbs; }
1034 ierr = MatSetBlockSizes(
A,rbs,cbs); PCHKERRQ(
A,ierr);
1058 rows.
GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
1060 cols.
GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
1061 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&
A); PCHKERRQ(B,ierr);
1062 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
1063 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1107 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1121 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1134 ierr = MatDestroy(&
A); CCHKERRQ(comm,ierr);
1135 if (
X) {
delete X; }
1136 if (
Y) {
delete Y; }
1141#if defined(PETSC_HAVE_HYPRE)
1142 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&
A);
1144 ierr = MatConvert_hypreParCSR_AIJ(B,&
A); CCHKERRQ(B.
GetComm(),ierr);
1155 ierr = MatDestroy(&
A); CCHKERRQ(comm,ierr);
1156 if (
X) {
delete X; }
1157 if (
Y) {
delete Y; }
1162 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1170 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1174 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1175 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1176 ierr = MatAXPY(
A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1185 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1186 ierr = MatScale(
A,-1.0); PCHKERRQ(
A,ierr);
1190 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1191 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1192 ierr = MatAXPY(
A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1197void PetscParMatrix::
1198BlockDiagonalConstructor(MPI_Comm comm,
1203 PetscInt lrsize,lcsize,rstart,cstart;
1204 PetscMPIInt myid = 0,commsize;
1206 mpiierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,mpiierr);
1207 if (!HYPRE_AssumedPartitionCheck())
1209 mpiierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,mpiierr);
1211 lrsize = row_starts[myid+1]-row_starts[myid];
1212 rstart = row_starts[myid];
1213 lcsize = col_starts[myid+1]-col_starts[myid];
1214 cstart = col_starts[myid];
1219 ierr = ISCreateStride(comm,diag->
Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1220 ISLocalToGlobalMapping rl2g,cl2g;
1221 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1222 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1223 if (row_starts != col_starts)
1225 ierr = ISCreateStride(comm,diag->
Width(),cstart,1,&is);
1226 CCHKERRQ(comm,ierr);
1227 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1228 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1232 ierr = PetscObjectReference((
PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1237 ierr = MatCreate(comm,&
A); CCHKERRQ(comm,ierr);
1238 ierr = MatSetSizes(
A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1240 ierr = MatSetType(
A,MATIS); PCHKERRQ(
A,ierr);
1241 ierr = MatSetLocalToGlobalMapping(
A,rl2g,cl2g); PCHKERRQ(
A,ierr);
1242 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(
A,ierr)
1243 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(
A,ierr)
1248 ierr = MatISGetLocalMat(
A,&lA); PCHKERRQ(
A,ierr);
1251#if defined(PETSC_USE_64BIT_INDICES)
1254 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1255 for (
int i = 0; i < m; i++) { pII[i] = II[i]; }
1256 for (
int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1257 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1259 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1261 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1274 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1275 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1276 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1277 if (
sizeof(
PetscInt) ==
sizeof(
int))
1280 CCHKERRQ(PETSC_COMM_SELF,ierr);
1282 CCHKERRQ(PETSC_COMM_SELF,ierr);
1288 for (
int i = 0; i < m; i++) { dii[i] = iii[i]; }
1289 for (
int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1292 CCHKERRQ(PETSC_COMM_SELF,ierr);
1293 ierr = PetscCalloc1(m,&oii);
1294 CCHKERRQ(PETSC_COMM_SELF,ierr);
1297 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1299 dii,djj,da,oii,NULL,NULL,&
A);
1300 CCHKERRQ(comm,ierr);
1304 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&
A);
1305 CCHKERRQ(comm,ierr);
1308 void *ptrs[4] = {dii,djj,da,oii};
1309 const char *names[4] = {
"_mfem_csr_dii",
1318 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1319 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1320#if PETSC_VERSION_LT(3,23,0)
1321 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1323 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
1325 CCHKERRQ(comm,ierr);
1327 CCHKERRQ(comm,ierr);
1328 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1333 ierr = MatAssemblyBegin(
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(
A,ierr);
1334 ierr = MatAssemblyEnd(
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(
A,ierr);
1354 ierr = MatCreate(comm,
A); CCHKERRQ(comm,ierr);
1356 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(
A,ierr);
1357 ierr = MatSetType(*
A,MATSHELL); PCHKERRQ(
A,ierr);
1358 ierr = MatShellSetContext(*
A,(
void *)op); PCHKERRQ(
A,ierr);
1359 ierr = MatShellSetOperation(*
A,MATOP_MULT,
1360 (
void (*)())__mfem_mat_shell_apply);
1362 ierr = MatShellSetOperation(*
A,MATOP_MULT_TRANSPOSE,
1363 (
void (*)())__mfem_mat_shell_apply_transpose);
1365 ierr = MatShellSetOperation(*
A,MATOP_COPY,
1366 (
void (*)())__mfem_mat_shell_copy);
1368 ierr = MatShellSetOperation(*
A,MATOP_DESTROY,
1369 (
void (*)())__mfem_mat_shell_destroy);
1370#if defined(_USE_DEVICE)
1374 ierr = MatShellSetVecType(*
A,PETSC_VECDEVICE); PCHKERRQ(
A,ierr);
1375 ierr = MatBindToCPU(*
A,PETSC_FALSE); PCHKERRQ(
A,ierr);
1379 ierr = MatBindToCPU(*
A,PETSC_TRUE); PCHKERRQ(
A,ierr);
1383 ierr = MatSetUp(*
A); PCHKERRQ(*
A,ierr);
1408 PetscBool avoidmatconvert = PETSC_FALSE;
1411 ierr = PetscObjectTypeCompareAny((
PetscObject)(pA->
A),&avoidmatconvert,MATMFFD,
1413 CCHKERRQ(comm,ierr);
1415 if (pA && !avoidmatconvert)
1419#if PETSC_VERSION_LT(3,10,0)
1423#if PETSC_VERSION_LT(3,18,0)
1424 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEMAT,&istrans);
1426 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEVIRTUAL,
1439#if PETSC_VERSION_LT(3,10,0)
1440 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATIS,&ismatis);
1446 ierr = MatTransposeGetMat(pA->
A,&At); CCHKERRQ(pA->
GetComm(),ierr);
1447#if PETSC_VERSION_LT(3,10,0)
1448 ierr = PetscObjectTypeCompare((
PetscObject)(At),MATIS,&ismatis);
1456#if PETSC_VERSION_LT(3,10,0)
1463 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1464 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1465 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1469 ierr = MatISGetMPIXAIJ(pA->
A,MAT_INITIAL_MATRIX,
A);
1470 PCHKERRQ(pA->
A,ierr);
1477 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1483 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1484 PCHKERRQ(pA->
A,ierr);
1485 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1486 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1490 ierr = MatConvert(pA->
A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,
A);
1491 PCHKERRQ(pA->
A,ierr);
1500 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1501 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1502 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1506 ierr = MatConvert(pA->
A,MATIS,MAT_INITIAL_MATRIX,
A); PCHKERRQ(pA->
A,ierr);
1511#if defined(PETSC_HAVE_HYPRE)
1515 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1516 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1517 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1521 ierr = MatConvert(pA->
A,MATHYPRE,MAT_INITIAL_MATRIX,
A); PCHKERRQ(pA->
A,ierr);
1524 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1533 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1540#if defined(PETSC_HAVE_HYPRE)
1541 ierr = MatCreateFromParCSR(
const_cast<HypreParMatrix&
>(*pH),MATAIJ,
1542 PETSC_USE_POINTER,
A);
1544 ierr = MatConvert_hypreParCSR_AIJ(
const_cast<HypreParMatrix&
>(*pH),
A);
1550#if defined(PETSC_HAVE_HYPRE)
1551 ierr = MatCreateFromParCSR(
const_cast<HypreParMatrix&
>(*pH),MATIS,
1552 PETSC_USE_POINTER,
A);
1560#if defined(PETSC_HAVE_HYPRE)
1561 ierr = MatCreateFromParCSR(
const_cast<HypreParMatrix&
>(*pH),MATHYPRE,
1562 PETSC_USE_POINTER,
A);
1565 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1574 MFEM_ABORT(
"Conversion from HypreParCSR to operator type = " << tid <<
1575 " is not implemented");
1580 Mat *mats,*matsl2l = NULL;
1585 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1588 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1590 for (i=0; i<nr; i++)
1592 PetscBool needl2l = PETSC_TRUE;
1594 for (j=0; j<nc; j++)
1602 ierr = PetscObjectQuery((
PetscObject)mats[i*nc+j],
"_MatIS_PtAP_l2l",
1604 PCHKERRQ(mats[i*nc+j],ierr);
1612 ierr = PetscContainerGetPointer(c,(
void**)&l2l);
1614 MFEM_VERIFY(l2l->
Size() == 1,
"Unexpected size "
1615 << l2l->
Size() <<
" for block row " << i );
1616 ierr = PetscObjectReference((
PetscObject)(*l2l)[0]);
1618 matsl2l[i] = (*l2l)[0];
1619 needl2l = PETSC_FALSE;
1625 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,
A); CCHKERRQ(comm,ierr);
1628 ierr = MatConvert(*
A,MATIS,MAT_INPLACE_MATRIX,
A); CCHKERRQ(comm,ierr);
1631 for (
int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1632 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1635 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1636 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1637#if PETSC_VERSION_LT(3,23,0)
1638 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1640 ierr = PetscContainerSetCtxDestroy(c,__mfem_matarray_container_destroy);
1644 PCHKERRQ((*
A),ierr);
1645 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1647 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1648 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1654 ierr = MatCreate(comm,
A); CCHKERRQ(comm,ierr);
1655 ierr = MatSetSizes(*
A,pI->
Height(),pI->
Width(),PETSC_DECIDE,PETSC_DECIDE);
1657 ierr = MatSetType(*
A,MATAIJ); PCHKERRQ(*
A,ierr);
1658 ierr = MatMPIAIJSetPreallocation(*
A,1,NULL,0,NULL); PCHKERRQ(*
A,ierr);
1659 ierr = MatSeqAIJSetPreallocation(*
A,1,NULL); PCHKERRQ(*
A,ierr);
1660 ierr = MatSetOption(*
A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*
A,ierr);
1661 ierr = MatGetOwnershipRange(*
A,&rst,NULL); PCHKERRQ(*
A,ierr);
1664 ierr = MatSetValue(*
A,i,i,1.,INSERT_VALUES); PCHKERRQ(*
A,ierr);
1666 ierr = MatAssemblyBegin(*
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*
A,ierr);
1667 ierr = MatAssemblyEnd(*
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*
A,ierr);
1684 int n = pS->
Width();
1689 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1690 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1691 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1693 for (
int i = 0; i < m; i++)
1695 bool issorted =
true;
1697 for (
int j = ii[i]; j < ii[i+1]; j++)
1700 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1705 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1706 CCHKERRQ(PETSC_COMM_SELF,ierr);
1710 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1713 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1714 CCHKERRQ(comm,ierr);
1719 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1720 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1722 pii,pjj,pdata,oii,NULL,NULL,&B);
1723 CCHKERRQ(comm,ierr);
1725 void *ptrs[4] = {pii,pjj,pdata,oii};
1726 const char *names[4] = {
"_mfem_csr_pii",
1731 for (
int i=0; i<4; i++)
1735 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1736 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1737#if PETSC_VERSION_LT(3,23,0)
1738 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1740 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
1745 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1753 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,
A); PCHKERRQ(B,ierr);
1754 ierr = MatDestroy(&B); PCHKERRQ(*
A,ierr);
1758 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,
A); PCHKERRQ(B,ierr);
1759 ierr = MatDestroy(&B); PCHKERRQ(*
A,ierr);
1763 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1770 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1777 ierr = MatComputeOperator(*
A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1778 ierr = PetscObjectTypeCompare((
PetscObject)B,MATMPIAIJ,&isaij);
1779 CCHKERRQ(comm,ierr);
1780 ierr = MatDestroy(
A); CCHKERRQ(comm,ierr);
1783 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,
A); CCHKERRQ(comm,ierr);
1784 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1799 MPI_Comm comm = MPI_COMM_NULL;
1800 ierr = PetscObjectGetComm((
PetscObject)
A,&comm); PCHKERRQ(
A,ierr);
1801 ierr = MatDestroy(&
A); CCHKERRQ(comm,ierr);
1812 ierr = PetscObjectReference((
PetscObject)
a); PCHKERRQ(
a,ierr);
1822 if (A_ ==
A) {
return; }
1824 ierr = PetscObjectReference((
PetscObject)A_); PCHKERRQ(A_,ierr);
1830void PetscParMatrix::SetUpForDevice()
1832#if !defined(_USE_DEVICE)
1838 if (
A) { ierr = MatBindToCPU(
A, PETSC_TRUE); PCHKERRQ(
A,ierr); }
1841 PetscBool ismatis,isnest,isaij;
1842 ierr = PetscObjectTypeCompare((
PetscObject)
A,MATIS,&ismatis);
1844 ierr = PetscObjectTypeCompare((
PetscObject)
A,MATNEST,&isnest);
1849 ierr = MatISGetLocalMat(
A,&tA); PCHKERRQ(
A,ierr);
1850 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATNEST,&isnest);
1857 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1867 ierr = PetscObjectTypeCompareAny((
PetscObject)sA,&isaij,MATSEQAIJ,MATMPIAIJ,
"");
1871 ierr = MatSetType(sA,PETSC_MATAIJDEVICE); PCHKERRQ(sA,ierr);
1877 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1878 PETSC_TRUE); PCHKERRQ(sA,ierr);
1885 ierr = MatSetVecType(tA,PETSC_VECDEVICE); PCHKERRQ(tA,ierr);
1891 ierr = PetscObjectTypeCompareAny((
PetscObject)tA,&isaij,MATSEQAIJ,MATMPIAIJ,
"");
1895 ierr = MatSetType(tA,PETSC_MATAIJDEVICE); PCHKERRQ(tA,ierr);
1900 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1901 PETSC_TRUE); PCHKERRQ(tA,ierr);
1916 f = MatMultTranspose;
1917 fadd = MatMultTransposeAdd;
1928 ierr = VecScale(Y,
b/
a); PCHKERRQ(A,ierr);
1929 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1930 ierr = VecScale(Y,
a); PCHKERRQ(A,ierr);
1934 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1935 ierr = VecScale(Y,
a); PCHKERRQ(A,ierr);
1946 ierr = VecScale(Y,
b); PCHKERRQ(A,ierr);
1950 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1957 ierr = PetscObjectReference((
PetscObject)master.
A); PCHKERRQ(master.
A,ierr);
1969 MFEM_VERIFY(
A,
"Mat not present");
1979 MFEM_VERIFY(
A,
"Mat not present");
1990 ierr = MatCreateTranspose(
A,&B); PCHKERRQ(
A,ierr);
1994 ierr = MatTranspose(
A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(
A,ierr);
2001 ierr = MatScale(
A,s); PCHKERRQ(
A,ierr);
2007 MFEM_ASSERT(x.
Size() ==
Width(),
"invalid x.Size() = " << x.
Size()
2008 <<
", expected size = " <<
Width());
2009 MFEM_ASSERT(y.
Size() ==
Height(),
"invalid y.Size() = " << y.
Size()
2010 <<
", expected size = " <<
Height());
2014 bool rw = (
b != 0.0);
2017 MatMultKernel(
A,
a,XX->
x,
b,YY->
x,
false);
2026 MFEM_ASSERT(x.
Size() ==
Height(),
"invalid x.Size() = " << x.
Size()
2027 <<
", expected size = " <<
Height());
2028 MFEM_ASSERT(y.
Size() ==
Width(),
"invalid y.Size() = " << y.
Size()
2029 <<
", expected size = " <<
Width());
2033 bool rw = (
b != 0.0);
2036 MatMultKernel(
A,
a,YY->
x,
b,XX->
x,
true);
2049 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)
A),fname,
2050 FILE_MODE_WRITE,&view);
2054 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)
A),fname,&view);
2057 ierr = MatView(
A,view); PCHKERRQ(
A,ierr);
2058 ierr = PetscViewerDestroy(&view); PCHKERRQ(
A,ierr);
2062 ierr = MatView(
A,NULL); PCHKERRQ(
A,ierr);
2068 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
2069 <<
", expected size = " <<
Height());
2073 ierr = MatDiagonalScale(
A,*YY,NULL); PCHKERRQ(
A,ierr);
2079 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
2080 <<
", expected size = " <<
Width());
2084 ierr = MatDiagonalScale(
A,NULL,*XX); PCHKERRQ(
A,ierr);
2096 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
2097 <<
", expected size = " <<
Height());
2098 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
2099 <<
", expected size = " <<
Width());
2103 ierr = MatDiagonalSet(
A,*XX,ADD_VALUES); PCHKERRQ(
A,ierr);
2111 "Petsc TripleMatrixProduct: Number of local cols of A " << A->
Width() <<
2112 " differs from number of local rows of P " << P->
Height());
2114 "Petsc TripleMatrixProduct: Number of local rows of A " << A->
Height() <<
2115 " differs from number of local cols of R " << R->
Width());
2117 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2124 Mat pA = *A,pP = *P,pRt = *Rt;
2126 PetscBool Aismatis,Pismatis,Rtismatis;
2129 "Petsc RAP: Number of local cols of A " << A->
Width() <<
2130 " differs from number of local rows of P " << P->
Height());
2132 "Petsc RAP: Number of local rows of A " << A->
Height() <<
2133 " differs from number of local rows of Rt " << Rt->
Height());
2134 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&Aismatis);
2136 ierr = PetscObjectTypeCompare((
PetscObject)pP,MATIS,&Pismatis);
2138 ierr = PetscObjectTypeCompare((
PetscObject)pRt,MATIS,&Rtismatis);
2145 ISLocalToGlobalMapping cl2gP,cl2gRt;
2146 PetscInt rlsize,clsize,rsize,csize;
2148 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2149 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2150 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2151 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2152 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2153 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2154 ierr = MatCreate(A->
GetComm(),&B); PCHKERRQ(pA,ierr);
2155 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2156 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2157 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2158 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2159 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2160 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2163 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2169 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2170 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2172 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2180 ierr = PetscObjectReference((
PetscObject)lRt); PCHKERRQ(lRt,ierr);
2181 (*vmatsl2l)[0] = lRt;
2184 ierr = PetscContainerCreate(PetscObjectComm((
PetscObject)B),&c);
2186 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2187#if PETSC_VERSION_LT(3,23,0)
2188 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2190 ierr = PetscContainerSetCtxDestroy(c,__mfem_matarray_container_destroy);
2195 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2199 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2200 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2201 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2202 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2208 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2214 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2215 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2217 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2232#if defined(PETSC_HAVE_HYPRE)
2247 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2257 ierr = MatDuplicate(
A,MAT_COPY_VALUES,&Ae); PCHKERRQ(
A,ierr);
2259 ierr = MatAXPY(Ae,-1.,
A,SAME_NONZERO_PATTERN); PCHKERRQ(
A,ierr);
2268 MFEM_ABORT(
"Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2277 ierr = MatGetSize(
A,&
M,&
N); PCHKERRQ(
A,ierr);
2278 MFEM_VERIFY(
M ==
N,
"Rectangular case unsupported");
2281 ierr = MatSetOption(
A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(
A,ierr);
2285 ierr = MatGetOwnershipRange(
A,&rst,NULL); PCHKERRQ(
A,ierr);
2288 ierr = Convert_Array_IS(
GetComm(),
true,&rows_cols,rst,&dir); PCHKERRQ(
A,ierr);
2291 ierr = MatZeroRowsColumnsIS(
A,dir,diag,NULL,NULL); PCHKERRQ(
A,ierr);
2295 ierr = MatZeroRowsColumnsIS(
A,dir,diag,
X,B); PCHKERRQ(
A,ierr);
2297 ierr = ISDestroy(&dir); PCHKERRQ(
A,ierr);
2302 ierr = MatSetOption(
A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(
A,ierr);
2306 ierr = MatGetOwnershipRange(
A,&rst,NULL); PCHKERRQ(
A,ierr);
2309 ierr = Convert_Array_IS(
GetComm(),
true,&rows,rst,&dir); PCHKERRQ(
A,ierr);
2310 ierr = MatZeroRowsIS(
A,dir,0.0,NULL,NULL); PCHKERRQ(
A,ierr);
2311 ierr = ISDestroy(&dir); PCHKERRQ(
A,ierr);
2321 ierr = PetscObjectDereference((
PetscObject)
A); CCHKERRQ(comm,ierr);
2330 MFEM_VERIFY(
A,
"no associated PETSc Mat object");
2333 ierr = PetscObjectBaseTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(
A,ierr);
2335 ierr = PetscObjectBaseTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(
A,ierr);
2337 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(
A,ierr);
2339 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(
A,ierr);
2341 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(
A,ierr);
2343#if defined(PETSC_HAVE_HYPRE)
2344 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(
A,ierr);
2358 Ae.
Mult(-1.0, X, 1.0, B);
2361 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2362 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2363 for (
int i = 0; i < ess_dof_list.
Size(); i++)
2365 int r = ess_dof_list[i];
2366 B(r) = array[r] * X(r);
2368 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2397 if (
cid == KSP_CLASSID)
2400 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2402 else if (
cid == SNES_CLASSID)
2404 SNES snes = (SNES)
obj;
2405 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2408 else if (
cid == TS_CLASSID)
2411 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2415 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2422 if (
cid == KSP_CLASSID)
2425 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2427 else if (
cid == SNES_CLASSID)
2429 SNES snes = (SNES)
obj;
2430 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2433 else if (
cid == TS_CLASSID)
2436 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2440 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2447 if (
cid == KSP_CLASSID)
2450 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2453 else if (
cid == SNES_CLASSID)
2455 SNES snes = (SNES)
obj;
2456 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2457 max_iter,PETSC_DEFAULT);
2459 else if (
cid == TS_CLASSID)
2462 ierr = TSSetMaxSteps(ts,max_iter);
2466 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2474 typedef PetscErrorCode (*myPetscFunc)(
void**);
2475 PetscViewerAndFormat *vf = NULL;
2476 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(
obj));
2480 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2483 if (
cid == KSP_CLASSID)
2491 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2495#if PETSC_VERSION_LT(3,15,0)
2496 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2498 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2500 (myPetscFunc)PetscViewerAndFormatDestroy);
2505 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2506 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2507 (myPetscFunc)PetscViewerAndFormatDestroy);
2511 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2512 PCHKERRQ(viewer,ierr);
2513#if PETSC_VERSION_LT(3,15,0)
2514 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2516 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2518 (myPetscFunc)PetscViewerAndFormatDestroy);
2523 else if (
cid == SNES_CLASSID)
2526 SNES snes = (SNES)
obj;
2529 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2533 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2534 (myPetscFunc)PetscViewerAndFormatDestroy);
2535 PCHKERRQ(snes,ierr);
2538 else if (
cid == TS_CLASSID)
2543 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2548 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2554 return obj ? PetscObjectComm(
obj) : MPI_COMM_NULL;
2559 __mfem_monitor_ctx *monctx;
2560 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2561 monctx->solver =
this;
2562 monctx->monitor =
ctx;
2563 if (
cid == KSP_CLASSID)
2565 ierr = KSPMonitorSet((KSP)
obj,__mfem_ksp_monitor,monctx,
2566 __mfem_monitor_ctx_destroy);
2569 else if (
cid == SNES_CLASSID)
2571 ierr = SNESMonitorSet((SNES)
obj,__mfem_snes_monitor,monctx,
2572 __mfem_monitor_ctx_destroy);
2575 else if (
cid == TS_CLASSID)
2577 ierr = TSMonitorSet((TS)
obj,__mfem_ts_monitor,monctx,
2578 __mfem_monitor_ctx_destroy);
2583 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2590 if (
cid == SNES_CLASSID)
2592 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
private_ctx;
2595 else if (
cid == TS_CLASSID)
2597 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
private_ctx;
2602 MFEM_ABORT(
"Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2609 if (
cid == TS_CLASSID)
2614 ierr = TSGetSNES((TS)
obj,&snes); PCHKERRQ(
obj,ierr);
2615 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(
obj,ierr);
2616 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(
obj,ierr);
2618 else if (
cid == SNES_CLASSID)
2622 ierr = SNESGetKSP((SNES)
obj,&ksp); PCHKERRQ(
obj,ierr);
2623 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(
obj,ierr);
2625 else if (
cid == KSP_CLASSID)
2627 ierr = KSPGetPC((KSP)
obj,&pc); PCHKERRQ(
obj,ierr);
2629 else if (
cid == PC_CLASSID)
2635 MFEM_ABORT(
"No support for PetscPreconditionerFactory for this object");
2639 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2643 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2649 if (!customize) {
clcustom =
true; }
2652 if (
cid == PC_CLASSID)
2655 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2657 else if (
cid == KSP_CLASSID)
2660 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2662 else if (
cid == SNES_CLASSID)
2664 SNES snes = (SNES)
obj;
2665 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2667 else if (
cid == TS_CLASSID)
2670 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2674 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2682 if (
cid == KSP_CLASSID)
2685 KSPConvergedReason reason;
2686 ierr = KSPGetConvergedReason(ksp,&reason);
2688 return reason > 0 ? 1 : 0;
2690 else if (
cid == SNES_CLASSID)
2692 SNES snes = (SNES)
obj;
2693 SNESConvergedReason reason;
2694 ierr = SNESGetConvergedReason(snes,&reason);
2695 PCHKERRQ(snes,ierr);
2696 return reason > 0 ? 1 : 0;
2698 else if (
cid == TS_CLASSID)
2701 TSConvergedReason reason;
2702 ierr = TSGetConvergedReason(ts,&reason);
2704 return reason > 0 ? 1 : 0;
2708 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2715 if (
cid == KSP_CLASSID)
2719 ierr = KSPGetIterationNumber(ksp,&its);
2723 else if (
cid == SNES_CLASSID)
2725 SNES snes = (SNES)
obj;
2727 ierr = SNESGetIterationNumber(snes,&its);
2728 PCHKERRQ(snes,ierr);
2731 else if (
cid == TS_CLASSID)
2735 ierr = TSGetStepNumber(ts,&its);
2741 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2748 if (
cid == KSP_CLASSID)
2752 ierr = KSPGetResidualNorm(ksp,&norm);
2756 if (
cid == SNES_CLASSID)
2758 SNES snes = (SNES)
obj;
2760 ierr = SNESGetFunctionNorm(snes,&norm);
2761 PCHKERRQ(snes,ierr);
2766 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2767 return PETSC_MAX_REAL;
2774 if (
cid == SNES_CLASSID)
2776 __mfem_snes_ctx *snes_ctx;
2777 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2778 snes_ctx->op = NULL;
2779 snes_ctx->bchandler = NULL;
2780 snes_ctx->work = NULL;
2784 else if (
cid == TS_CLASSID)
2786 __mfem_ts_ctx *ts_ctx;
2787 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2789 ts_ctx->bchandler = NULL;
2790 ts_ctx->work = NULL;
2791 ts_ctx->work2 = NULL;
2792 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2793 ts_ctx->cached_ijacstate = -1;
2794 ts_ctx->cached_rhsjacstate = -1;
2795 ts_ctx->cached_splits_xstate = -1;
2796 ts_ctx->cached_splits_xdotstate = -1;
2807 if (
cid == SNES_CLASSID)
2809 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)
private_ctx;
2810 delete snes_ctx->work;
2812 else if (
cid == TS_CLASSID)
2814 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)
private_ctx;
2815 delete ts_ctx->work;
2816 delete ts_ctx->work2;
2818 ierr = PetscFree(
private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2825 : bctype(type_), setup(false), eval_t(0.0),
2826 eval_t_cached(std::numeric_limits<
mfem::
real_t>::min())
2833 ess_tdof_list.
SetSize(list.Size());
2834 ess_tdof_list.
Assign(list);
2840 if (setup) {
return; }
2844 this->
Eval(eval_t,eval_g);
2845 eval_t_cached = eval_t;
2856 (*this).SetUp(x.
Size());
2860 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2862 y[ess_tdof_list[i]] = 0.0;
2867 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2869 Eval(eval_t,eval_g);
2870 eval_t_cached = eval_t;
2872 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2874 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2881 (*this).SetUp(x.
Size());
2884 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2886 x[ess_tdof_list[i]] = 0.0;
2891 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2893 Eval(eval_t,eval_g);
2894 eval_t_cached = eval_t;
2896 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2898 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2905 (*this).SetUp(x.
Size());
2908 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2910 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2915 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2917 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2924 (*this).SetUp(x.
Size());
2925 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2927 x[ess_tdof_list[i]] = 0.0;
2933 (*this).SetUp(x.
Size());
2935 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2937 y[ess_tdof_list[i]] = 0.0;
2944 bool wrapin,
bool iter_mode)
2948 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2950 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2951 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2952 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2953 PCHKERRQ(ksp, ierr);
2957 const std::string &prefix,
bool iter_mode)
2963 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2964 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2965 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2966 PCHKERRQ(ksp, ierr);
2971 const std::string &prefix,
bool iter_mode)
2977 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
2978 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2979 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2980 PCHKERRQ(ksp, ierr);
2992 bool delete_pA =
false;
3010 MFEM_VERIFY(pA,
"Unsupported operation!");
3018 PetscInt nheight,nwidth,oheight,owidth;
3020 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3021 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3022 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3023 if (nheight != oheight || nwidth != owidth)
3027 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3033 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
3042 if (delete_pA) {
delete pA; }
3057 bool delete_pA =
false;
3075 MFEM_VERIFY(pA,
"Unsupported operation!");
3078 bool delete_ppA =
false;
3081 if (oA == poA && !wrap)
3091 MFEM_VERIFY(ppA,
"Unsupported operation!");
3100 PetscInt nheight,nwidth,oheight,owidth;
3102 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3103 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3104 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3105 if (nheight != oheight || nwidth != owidth)
3109 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3116 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3125 if (delete_pA) {
delete pA; }
3126 if (delete_ppA) {
delete ppA; }
3136 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3139 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3140 ierr = PetscObjectReference((
PetscObject)A); PCHKERRQ(ksp,ierr);
3145 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3154 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3155 ierr = MakeShellPC(pc,precond,
false); PCHKERRQ(ksp,ierr);
3161 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3162 ierr = PetscObjectReference((
PetscObject)P); PCHKERRQ(ksp,ierr);
3163 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3164 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3165 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3176 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(
obj, ierr);
3184 PetscParMatrix A = PetscParMatrix(pA,
true);
3185 X =
new PetscParVector(A,
false,
false);
3193 ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3199 ierr = KSPSolveTranspose(ksp,
B->
x,
X->
x); PCHKERRQ(ksp,ierr);
3203 ierr = KSPSolve(ksp,
B->
x,
X->
x); PCHKERRQ(ksp,ierr);
3211 (*this).MultKernel(
b,x,
false);
3216 (*this).MultKernel(
b,x,
true);
3223 ierr = PetscObjectGetComm((
PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3224 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3234 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3236 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3244 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3246 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3250 const std::string &prefix,
bool iter_mode)
3254 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3256 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3262 const std::string &prefix)
3266 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3268 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3269 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3273 const string &prefix)
3279 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3280 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3285 const string &prefix)
3289 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3291 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3292 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3298 bool delete_pA =
false;
3315 PetscInt nheight,nwidth,oheight,owidth;
3317 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3318 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3319 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3320 if (nheight != oheight || nwidth != owidth)
3324 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3330 ierr = PCSetOperators(pc,pA->
A,pA->
A); PCHKERRQ(
obj,ierr);
3339 if (delete_pA) {
delete pA; };
3342void PetscPreconditioner::MultKernel(
const Vector &
b,
Vector &x,
3346 "Iterative mode not supported for PetscPreconditioner");
3352 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(
obj, ierr);
3360 PetscParMatrix A(pA,
true);
3361 X =
new PetscParVector(A,
false,
false);
3372 ierr = PCApplyTranspose(pc,
B->
x,
X->
x); PCHKERRQ(pc, ierr);
3376 ierr = PCApply(pc,
B->
x,
X->
x); PCHKERRQ(pc, ierr);
3384 (*this).MultKernel(
b,x,
false);
3389 (*this).MultKernel(
b,x,
true);
3396 ierr = PetscObjectGetComm((
PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3397 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3408void PetscBDDCSolver::BDDCSolverConstructor(
const PetscBDDCSolverParams &opts)
3410 MPI_Comm comm = PetscObjectComm(
obj);
3415 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3419 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&ismatis);
3421 MFEM_VERIFY(ismatis,
"PetscBDDCSolver needs the matrix in unassembled format");
3424 ParFiniteElementSpace *fespace = opts.fespace;
3425 if (opts.netflux && !fespace)
3427 MFEM_WARNING(
"Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3434 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3435 ierr = MatNullSpaceCreate(PetscObjectComm((
PetscObject)lA),PETSC_TRUE,0,NULL,
3436 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3437 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3438 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3442 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(
obj,ierr);
3445 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3446 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3455 int vdim = fespace->GetVDim();
3462 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3463 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3464 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3473 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3486 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3492 const FiniteElementCollection *fec = fespace->FEColl();
3493 bool h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3496 ParFiniteElementSpace *fespace_coords = fespace;
3498 sdim = fespace->GetParMesh()->SpaceDimension();
3501 fespace_coords =
new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3504 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3505 ParGridFunction gf_coords(fespace_coords);
3506 gf_coords.ProjectCoefficient(coeff_coords);
3507 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3509 hvec_coords->Size(),
false);
3517 Vec pvec_coords,lvec_coords;
3518 ISLocalToGlobalMapping l2g;
3524 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3525 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3526 CCHKERRQ(comm,ierr);
3527 ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3530 ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3531 CCHKERRQ(comm,ierr);
3532 ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3533 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3535 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3536 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3537 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3538 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3539 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3540 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3541 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3542 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3543 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3544 CCHKERRQ(comm,ierr);
3545 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3550 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3551 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3552#if PETSC_VERSION_LT(3,15,0)
3553 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3554 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3556 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3557 CCHKERRQ(comm,ierr);
3558 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3559 CCHKERRQ(comm,ierr);
3561 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3562 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3564 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3565 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3566 CCHKERRQ(PETSC_COMM_SELF,ierr);
3567 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3568 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3569 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3570 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3574 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3583 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3584 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3588 for (
PetscInt d = 0; d < sdim; d++)
3590 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3593 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3598 for (
PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3600 if (fespace_coords != fespace)
3602 delete fespace_coords;
3609 IS dir = NULL, neu = NULL;
3612 Array<Mat> *l2l = NULL;
3613 if (opts.ess_dof_local || opts.nat_dof_local)
3618 MFEM_VERIFY(c,
"Local-to-local PETSc container not present");
3619 ierr = PetscContainerGetPointer(c,(
void**)&l2l); PCHKERRQ(c,ierr);
3626 PetscBool lpr = PETSC_FALSE,pr;
3627 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3628 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3629 CCHKERRQ(comm,mpiierr);
3630 MFEM_VERIFY(lpr == pr,
"ess_dof should be collectively set");
3632 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3633 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3634 CCHKERRQ(comm,mpiierr);
3635 MFEM_VERIFY(lpr == pr,
"nat_dof should be collectively set");
3638 ms[0] = -nf; ms[1] = nf;
3639 mpiierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3640 CCHKERRQ(comm,mpiierr);
3641 MFEM_VERIFY(-Ms[0] == Ms[1],
3642 "number of fields should be the same across processes");
3649 PetscInt st = opts.ess_dof_local ? 0 : rst;
3650 if (!opts.ess_dof_local)
3653 ierr = Convert_Array_IS(comm,
true,opts.ess_dof,st,&dir);
3654 CCHKERRQ(comm,ierr);
3655 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3660 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3661 CCHKERRQ(comm,ierr);
3662 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3667 PetscInt st = opts.nat_dof_local ? 0 : rst;
3668 if (!opts.nat_dof_local)
3671 ierr = Convert_Array_IS(comm,
true,opts.nat_dof,st,&neu);
3672 CCHKERRQ(comm,ierr);
3673 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3678 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3679 CCHKERRQ(comm,ierr);
3680 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3687 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3691 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3693 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3698 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3700 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3709 const FiniteElementCollection *fec = fespace->FEColl();
3710 bool edgespace, rtspace, h1space;
3711 bool needint = opts.netflux;
3712 bool tracespace, rt_tracespace, edge_tracespace;
3714 PetscBool B_is_Trans = PETSC_FALSE;
3716 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3717 dim = pmesh->Dimension();
3718 vdim = fespace->GetVDim();
3719 h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3720 rtspace =
dynamic_cast<const RT_FECollection*
>(fec);
3721 edgespace =
dynamic_cast<const ND_FECollection*
>(fec);
3722 edge_tracespace =
dynamic_cast<const ND_Trace_FECollection*
>(fec);
3723 rt_tracespace =
dynamic_cast<const RT_Trace_FECollection*
>(fec);
3724 tracespace = edge_tracespace || rt_tracespace;
3727 if (fespace->GetNE() > 0)
3731 p = fespace->GetElementOrder(0);
3735 p = fespace->GetFaceOrder(0);
3736 if (
dim == 2) {
p++; }
3747 MFEM_WARNING(
"Tracespace case doesn't work for H(curl) and p=2,"
3748 " not using auxiliary quadrature");
3754 FiniteElementCollection *vfec;
3757 vfec =
new H1_Trace_FECollection(
p,
dim);
3761 vfec =
new H1_FECollection(
p,
dim);
3763 ParFiniteElementSpace *vfespace =
new ParFiniteElementSpace(pmesh,vfec);
3764 ParDiscreteLinearOperator *grad;
3765 grad =
new ParDiscreteLinearOperator(vfespace,fespace);
3768 grad->AddTraceFaceInterpolator(
new GradientInterpolator);
3772 grad->AddDomainInterpolator(
new GradientInterpolator);
3776 HypreParMatrix *hG = grad->ParallelAssemble();
3777 PetscParMatrix *G =
new PetscParMatrix(hG,
PETSC_MATAIJ);
3781 PetscBool conforming = PETSC_TRUE;
3782 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3783 ierr = PCBDDCSetDiscreteGradient(pc,*G,
p,0,PETSC_TRUE,conforming);
3795 MFEM_WARNING(
"Tracespace case doesn't work for H(div), not using"
3796 " auxiliary quadrature");
3802 if (vdim !=
dim) { needint =
false; }
3805 PetscParMatrix *
B = NULL;
3811 FiniteElementCollection *auxcoll;
3812 if (tracespace) { auxcoll =
new RT_Trace_FECollection(
p,
dim); }
3817 auxcoll =
new H1_FECollection(std::max(
p-1,1),
dim);
3821 auxcoll =
new L2_FECollection(
p,
dim);
3824 ParFiniteElementSpace *pspace =
new ParFiniteElementSpace(pmesh,auxcoll);
3825 ParMixedBilinearForm *
b =
new ParMixedBilinearForm(fespace,pspace);
3831 b->AddTraceFaceIntegrator(
new VectorFECurlIntegrator);
3835 b->AddDomainIntegrator(
new VectorFECurlIntegrator);
3842 b->AddTraceFaceIntegrator(
new VectorFEDivergenceIntegrator);
3846 b->AddDomainIntegrator(
new VectorFEDivergenceIntegrator);
3851 b->AddDomainIntegrator(
new VectorDivergenceIntegrator);
3856 b->ParallelAssemble(Bh);
3858 Bh.SetOperatorOwner(
false);
3863 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3864 if (!opts.ess_dof_local)
3866 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3870 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3872 B_is_Trans = PETSC_TRUE;
3881 ierr = PCBDDCSetDivergenceMat(pc,*
B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3885 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3886 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3891 const std::string &prefix)
3894 BDDCSolverConstructor(opts);
3900 const std::string &prefix)
3903 BDDCSolverConstructor(opts);
3908 const string &prefix)
3912 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3915 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3919 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATNEST,&isnest);
3925 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3926 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3927 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3937 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3939 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3944 const std::string &prefix)
3948 ierr = MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); PCHKERRQ(A,ierr);
3949 ierr = MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE); PCHKERRQ(A,ierr);
3951 H2SolverConstructor(fes);
3957#if defined(PETSC_HAVE_H2OPUS)
3969 VectorFunctionCoefficient ccoords(sdim, func_coords);
3971 ParGridFunction coords(fes);
3972 coords.ProjectCoefficient(ccoords);
3974 coords.ParallelProject(c);
3978 ierr = PCSetType(pc,PCH2OPUS); PCHKERRQ(
obj, ierr);
3979 ierr = PCSetCoordinates(pc,sdim,c.Size()/sdim,
3982 ierr = PCSetFromOptions(pc); PCHKERRQ(
obj, ierr);
3984 MFEM_ABORT(
"Need PETSc configured with --download-h2opus");
3991 const std::string &prefix)
3996 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3998 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3999 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
4006 const std::string &prefix)
4011 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
4013 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
4014 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
4026 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj, ierr);
4027 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
4039 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
4040 PCHKERRQ(snes, ierr);
4041 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
4042 PCHKERRQ(snes, ierr);
4045 (
void*)&op == fctx &&
4046 (
void*)&op == jctx);
4047 mpiierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
4049 CCHKERRQ(PetscObjectComm((
PetscObject)snes),mpiierr);
4052 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
4063 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4064 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
4073 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4075 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (
void *)snes_ctx);
4076 PCHKERRQ(snes, ierr);
4077 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
4079 PCHKERRQ(snes, ierr);
4081 ierr = MatDestroy(&dummy);
4082 PCHKERRQ(snes, ierr);
4094 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4095 snes_ctx->jacType = jacType;
4101 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4102 snes_ctx->objective = objfn;
4105 ierr = SNESSetObjective(snes, __mfem_snes_objective, (
void *)snes_ctx);
4106 PCHKERRQ(snes, ierr);
4113 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4114 snes_ctx->postcheck = post;
4118 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4119 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (
void *)snes_ctx);
4129 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4130 snes_ctx->update = update;
4133 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4140 bool b_nonempty =
b.Size();
4152 ierr = SNESSolve(snes,
B->
x,
X->
x); PCHKERRQ(snes, ierr);
4164 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4166 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
4167 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4173 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4175 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4178 ierr = TSGetAdapt(ts,&tsad);
4180 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4188 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj,ierr);
4189 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4197 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4200 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4203 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4204 ts_ctx->cached_ijacstate = -1;
4205 ts_ctx->cached_rhsjacstate = -1;
4206 ts_ctx->cached_splits_xstate = -1;
4207 ts_ctx->cached_splits_xdotstate = -1;
4219 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (
void *)ts_ctx);
4221 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (
void *)ts_ctx);
4223 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4225 ierr = MatDestroy(&dummy);
4233 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4242 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (
void *)ts_ctx);
4244 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4247 ierr = MatDestroy(&dummy);
4256 ierr = TSSetSolution(ts,
X); PCHKERRQ(ts,ierr);
4259 PetscBool use = PETSC_TRUE;
4260 ierr = PetscOptionsGetBool(NULL,NULL,
"-mfem_use_splitjac",&use,NULL);
4263 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4264 __mfem_ts_computesplits);
4269 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4277 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4278 ts_ctx->jacType = jacType;
4283 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4284 return ts_ctx->type;
4289 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4292 ts_ctx->type = type;
4295 ierr = TSSetProblemType(ts, TS_LINEAR);
4300 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4309 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4310 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4313 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4323 ierr = TSMonitor(ts, i, t, *
X); PCHKERRQ(ts,ierr);
4327 ierr = TSSetSolution(ts, *
X); PCHKERRQ(ts, ierr);
4328 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4333 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4338 ierr = TSMonitor(ts, i+1, pt, *
X); PCHKERRQ(ts,ierr);
4348 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4349 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4350 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4351 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4363 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4364 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4365 ts_ctx->cached_ijacstate = -1;
4366 ts_ctx->cached_rhsjacstate = -1;
4367 ts_ctx->cached_splits_xstate = -1;
4368 ts_ctx->cached_splits_xdotstate = -1;
4371 ierr = TSSolve(ts,
X->
x); PCHKERRQ(ts, ierr);
4376 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4378 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4384#include "petsc/private/petscimpl.h"
4385#include "petsc/private/matimpl.h"
4391 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)
ctx;
4393 PetscFunctionBeginUser;
4396 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4408 PetscFunctionReturn(PETSC_SUCCESS);
4411static PetscErrorCode __mfem_ts_ifunction(TS ts,
PetscReal t, Vec x, Vec xp,
4414 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4416 PetscFunctionBeginUser;
4424 if (ts_ctx->bchandler)
4429 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(xx.Size()); }
4430 if (!ts_ctx->work2) { ts_ctx->work2 =
new mfem::Vector(xx.Size()); }
4436 bchandler->
ZeroBC(yy,*txp);
4446 ff.UpdateVecFromFlags();
4447 PetscFunctionReturn(PETSC_SUCCESS);
4450static PetscErrorCode __mfem_ts_rhsfunction(TS ts,
PetscReal t, Vec x, Vec
f,
4453 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4455 PetscFunctionBeginUser;
4456 if (ts_ctx->bchandler) { MFEM_ABORT(
"RHS evaluation with bc not implemented"); }
4465 ff.UpdateVecFromFlags();
4466 PetscFunctionReturn(PETSC_SUCCESS);
4469static PetscErrorCode __mfem_ts_ijacobian(TS ts,
PetscReal t, Vec x,
4473 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4478 PetscObjectState state;
4479 PetscErrorCode ierr;
4481 PetscFunctionBeginUser;
4485 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4486 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4492 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4494 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4495 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4502 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4503 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4505 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4506 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4507 if (!ts_ctx->bchandler)
4514 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4521 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4525 if (!ts_ctx->bchandler) {
delete xx; }
4526 ts_ctx->cached_shift = shift;
4529 bool delete_pA =
false;
4533 pA->
GetType() != ts_ctx->jacType))
4541 if (ts_ctx->bchandler)
4549 PetscObjectState nonzerostate;
4550 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4555 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4556 if (delete_pA) {
delete pA; }
4568 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4570 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4573 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_ijacstate);
4579 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4580 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4581 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4582 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4583 PetscFunctionReturn(PETSC_SUCCESS);
4586static PetscErrorCode __mfem_ts_computesplits(TS ts,
PetscReal t,Vec x,Vec xp,
4590 __mfem_ts_ctx* ts_ctx;
4594 PetscObjectState state;
4595 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4596 PetscBool assembled;
4597 PetscErrorCode ierr;
4599 PetscFunctionBeginUser;
4603 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4604 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4606 if (Axp && Axp != Jxp)
4608 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4609 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4612 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(
void**)&ts_ctx); CHKERRQ(ierr);
4615 ierr = PetscObjectStateGet((
PetscObject)Jx,&state); CHKERRQ(ierr);
4617 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4618 ierr = PetscObjectStateGet((
PetscObject)Jxp,&state); CHKERRQ(ierr);
4620 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4621 if (!rx && !rxp) { PetscFunctionReturn(PETSC_SUCCESS); }
4628 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4629 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4631 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4632 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4633 if (!ts_ctx->bchandler)
4640 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4647 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4656 bool delete_mat =
false;
4660 pJx->
GetType() != ts_ctx->jacType))
4665 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4673 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4676 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4681 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4682 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4685 if (delete_mat) {
delete pJx; }
4689 if (ts_ctx->bchandler)
4706 pJxp->
GetType() != ts_ctx->jacType))
4711 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4714 &oJxp,ts_ctx->jacType);
4718 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4721 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4726 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4727 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4729 if (delete_mat) {
delete pJxp; }
4733 if (ts_ctx->bchandler)
4744 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4748 ierr = PetscObjectStateGet((
PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4750 ierr = PetscObjectStateGet((
PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4755 if (!ts_ctx->bchandler) {
delete xx; }
4756 PetscFunctionReturn(PETSC_SUCCESS);
4759static PetscErrorCode __mfem_ts_rhsjacobian(TS ts,
PetscReal t, Vec x,
4760 Mat A, Mat P,
void *
ctx)
4762 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4766 PetscObjectState state;
4767 PetscErrorCode ierr;
4769 PetscFunctionBeginUser;
4773 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4774 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4778 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4780 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4787 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4788 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4789 if (!ts_ctx->bchandler)
4796 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4803 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4807 if (!ts_ctx->bchandler) {
delete xx; }
4810 bool delete_pA =
false;
4814 pA->
GetType() != ts_ctx->jacType))
4822 if (ts_ctx->bchandler)
4830 PetscObjectState nonzerostate;
4831 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4836 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4837 if (delete_pA) {
delete pA; }
4849 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4851 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4856 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4858 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_rhsjacstate);
4864 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4865 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4866 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4867 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4868 PetscFunctionReturn(PETSC_SUCCESS);
4874 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)
ctx;
4876 PetscFunctionBeginUser;
4879 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4888 PetscErrorCode ierr;
4890 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4897 PetscErrorCode ierr;
4899 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4904 PetscFunctionReturn(PETSC_SUCCESS);
4907static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4912 PetscErrorCode ierr;
4914 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
ctx;
4916 PetscFunctionBeginUser;
4917 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4918 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4919 if (!snes_ctx->bchandler)
4926 if (!snes_ctx->work) { snes_ctx->work =
new mfem::Vector(n); }
4929 xx = snes_ctx->work;
4935 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4936 if (!snes_ctx->bchandler) {
delete xx; }
4939 bool delete_pA =
false;
4943 pA->
GetType() != snes_ctx->jacType))
4951 if (snes_ctx->bchandler)
4959 PetscObjectState nonzerostate;
4960 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4964 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4965 if (delete_pA) {
delete pA; }
4977 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4979 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4984 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4985 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4991 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4992 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4993 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4994 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4995 PetscFunctionReturn(PETSC_SUCCESS);
4998static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec
f,
void *
ctx)
5000 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
ctx;
5002 PetscFunctionBeginUser;
5005 if (snes_ctx->bchandler)
5012 snes_ctx->op->
Mult(*txx,ff);
5019 snes_ctx->op->
Mult(xx,ff);
5021 ff.UpdateVecFromFlags();
5022 PetscFunctionReturn(PETSC_SUCCESS);
5025static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x,
PetscReal *
f,
5028 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
ctx;
5030 PetscFunctionBeginUser;
5031 if (!snes_ctx->objective)
5033 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing objective function");
5037 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
5039 PetscFunctionReturn(PETSC_SUCCESS);
5042static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
5043 PetscBool *cy,PetscBool *cw,
void*
ctx)
5045 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
ctx;
5046 bool lcy =
false,lcw =
false;
5048 PetscFunctionBeginUser;
5052 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
5053 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
5054 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
5055 PetscFunctionReturn(PETSC_SUCCESS);
5058static PetscErrorCode __mfem_snes_update(SNES snes,
PetscInt it)
5061 __mfem_snes_ctx* snes_ctx;
5063 PetscFunctionBeginUser;
5065 ierr = SNESGetFunction(snes,&F,NULL,(
void **)&snes_ctx); CHKERRQ(ierr);
5066 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
5069 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
5072 ierr = VecDestroy(&pX); CHKERRQ(ierr);
5076 if (!pX) SETERRQ(PetscObjectComm((
PetscObject)snes),PETSC_ERR_USER,
5077 "Missing previous solution");
5078 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
5083 (*snes_ctx->update)(snes_ctx->op,it,
f,x,dx,px);
5085 ierr = VecCopy(X,pX); CHKERRQ(ierr);
5086 PetscFunctionReturn(PETSC_SUCCESS);
5092 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)
ctx;
5094 PetscFunctionBeginUser;
5097 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
5106 PetscErrorCode ierr;
5108 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5115 PetscErrorCode ierr;
5117 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5122 PetscFunctionReturn(PETSC_SUCCESS);
5125static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5128 PetscErrorCode ierr;
5130 PetscFunctionBeginUser;
5131 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5132 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5136 yy.UpdateVecFromFlags();
5137 PetscFunctionReturn(PETSC_SUCCESS);
5140static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5143 PetscErrorCode ierr;
5146 PetscFunctionBeginUser;
5147 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5148 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5151 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5160 yy.UpdateVecFromFlags();
5161 PetscFunctionReturn(PETSC_SUCCESS);
5164static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5167 PetscErrorCode ierr;
5169 PetscFunctionBeginUser;
5170 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5171 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5172 ierr = MatShellSetContext(B,(
void *)op); CHKERRQ(ierr);
5173 PetscFunctionReturn(PETSC_SUCCESS);
5176static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5178 PetscFunctionBeginUser;
5179 PetscFunctionReturn(PETSC_SUCCESS);
5182static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5184 __mfem_pc_shell_ctx *
ctx;
5185 PetscErrorCode ierr;
5187 PetscFunctionBeginUser;
5188 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5192 ierr = PetscObjectTypeCompare((
PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5199 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5205 ierr = PetscViewerASCIIPrintf(viewer,
5206 "No information available on the mfem::Solver\n");
5210 if (isascii &&
ctx->factory)
5212 ierr = PetscViewerASCIIPrintf(viewer,
5213 "Number of preconditioners created by the factory %lu\n",
ctx->numprec);
5217 PetscFunctionReturn(PETSC_SUCCESS);
5220static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5222 __mfem_pc_shell_ctx *
ctx;
5223 PetscErrorCode ierr;
5225 PetscFunctionBeginUser;
5228 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5231 ctx->op->Mult(xx,yy);
5232 yy.UpdateVecFromFlags();
5238 PetscFunctionReturn(PETSC_SUCCESS);
5241static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5243 __mfem_pc_shell_ctx *
ctx;
5244 PetscErrorCode ierr;
5246 PetscFunctionBeginUser;
5249 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5252 ctx->op->MultTranspose(xx,yy);
5253 yy.UpdateVecFromFlags();
5259 PetscFunctionReturn(PETSC_SUCCESS);
5262static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5264 __mfem_pc_shell_ctx *
ctx;
5266 PetscFunctionBeginUser;
5267 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5278 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5287 PetscFunctionReturn(PETSC_SUCCESS);
5290static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5292 __mfem_pc_shell_ctx *
ctx;
5293 PetscErrorCode ierr;
5295 PetscFunctionBeginUser;
5296 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5302 PetscFunctionReturn(PETSC_SUCCESS);
5305#if PETSC_VERSION_LT(3,23,0)
5307static PetscErrorCode __mfem_array_container_destroy(
void *ptr)
5309 PetscErrorCode ierr;
5311 PetscFunctionBeginUser;
5312 ierr = PetscFree(ptr); CHKERRQ(ierr);
5313 PetscFunctionReturn(PETSC_SUCCESS);
5316static PetscErrorCode __mfem_matarray_container_destroy(
void *ptr)
5319 PetscErrorCode ierr;
5321 PetscFunctionBeginUser;
5322 for (
int i=0; i<
a->Size(); i++)
5326 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5329 PetscFunctionReturn(PETSC_SUCCESS);
5334static PetscErrorCode __mfem_array_container_destroy(
void **ptr)
5336 PetscErrorCode ierr;
5338 PetscFunctionBeginUser;
5339 ierr = PetscFree(*ptr); CHKERRQ(ierr);
5340 PetscFunctionReturn(PETSC_SUCCESS);
5343static PetscErrorCode __mfem_matarray_container_destroy(
void **ptr)
5346 PetscErrorCode ierr;
5348 PetscFunctionBeginUser;
5349 for (
int i=0; i<
a->Size(); i++)
5353 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5356 PetscFunctionReturn(PETSC_SUCCESS);
5361static PetscErrorCode __mfem_monitor_ctx_destroy(
void **
ctx)
5363 PetscErrorCode ierr;
5365 PetscFunctionBeginUser;
5366 ierr = PetscFree(*
ctx); CHKERRQ(ierr);
5367 PetscFunctionReturn(PETSC_SUCCESS);
5372PetscErrorCode MakeShellPC(PC pc,
mfem::Solver &precond,
bool ownsop)
5374 PetscFunctionBeginUser;
5375 __mfem_pc_shell_ctx *
ctx =
new __mfem_pc_shell_ctx;
5377 ctx->ownsop = ownsop;
5378 ctx->factory = NULL;
5384 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5386 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5387 ierr = PCShellSetName(pc,
"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5388 ierr = PCShellSetContext(pc,(
void *)
ctx); CHKERRQ(ierr);
5389 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5390 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5392 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5393 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5394 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5395 PetscFunctionReturn(PETSC_SUCCESS);
5400PetscErrorCode MakeShellPCWithFactory(PC pc,
5403 PetscFunctionBeginUser;
5404 __mfem_pc_shell_ctx *
ctx =
new __mfem_pc_shell_ctx;
5407 ctx->factory = factory;
5413 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5415 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5416 ierr = PCShellSetName(pc,factory->
GetName()); CHKERRQ(ierr);
5417 ierr = PCShellSetContext(pc,(
void *)
ctx); CHKERRQ(ierr);
5418 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5419 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5421 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5422 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5423 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5424 PetscFunctionReturn(PETSC_SUCCESS);
5429static PetscErrorCode Convert_Array_IS(MPI_Comm comm,
bool islist,
5433 PetscInt n = list ? list->Size() : 0,*idxs;
5434 const int *data = list ? list->GetData() : NULL;
5435 PetscErrorCode ierr;
5437 PetscFunctionBeginUser;
5438 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5441 for (
PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5448 if (data[i]) { idxs[cum++] = i+st; }
5452 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5454 PetscFunctionReturn(PETSC_SUCCESS);
5460static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5468 PetscErrorCode ierr;
5470 PetscFunctionBeginUser;
5471 for (
int i = 0; i < pl2l.
Size(); i++)
5475 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(
const PetscInt**)&ii,
5476 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5477 MFEM_VERIFY(done,
"Unable to perform MatGetRowIJ on " << i <<
" l2l matrix");
5478 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5479#if defined(PETSC_USE_64BIT_INDICES)
5480 int nnz = (int)ii[m];
5481 int *mii =
new int[m+1];
5482 int *mjj =
new int[nnz];
5483 for (
int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5484 for (
int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5489 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5491 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5492 MFEM_VERIFY(done,
"Unable to perform MatRestoreRowIJ on "
5493 << i <<
" l2l matrix");
5496 for (
int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5498 const int* vdata = mark->
GetData();
5499 int* sdata = sub_dof_marker.
GetData();
5500 int cumh = 0, cumw = 0;
5501 for (
int i = 0; i < l2l.Size(); i++)
5506 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5507 cumh += l2l[i]->Height();
5508 cumw += l2l[i]->Width();
5510 ierr = Convert_Array_IS(comm,
false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5511 for (
int i = 0; i < pl2l.
Size(); i++)
5515 PetscFunctionReturn(PETSC_SUCCESS);
5518#if !defined(PETSC_HAVE_HYPRE)
5520#if defined(HYPRE_MIXEDINT)
5521#error "HYPRE_MIXEDINT not supported"
5524#include "_hypre_parcsr_mv.h"
5525static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5527 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5528 hypre_CSRMatrix *hdiag,*hoffd;
5530 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5533 PetscErrorCode ierr;
5535 PetscFunctionBeginUser;
5536 hdiag = hypre_ParCSRMatrixDiag(hA);
5537 hoffd = hypre_ParCSRMatrixOffd(hA);
5538 m = hypre_CSRMatrixNumRows(hdiag);
5539 n = hypre_CSRMatrixNumCols(hdiag);
5540 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5541 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5542 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5543 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5544 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5545 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*
sizeof(
PetscInt));
5547 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*
sizeof(
PetscInt));
5549 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*
sizeof(
PetscScalar));
5556 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5560 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5565 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5566 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5567 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5568 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*
sizeof(
PetscInt));
5570 offdj = hypre_CSRMatrixJ(hoffd);
5571 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5572 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5573 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*
sizeof(
PetscScalar));
5580 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5584 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5585 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5591 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5597 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5598 const char *names[6] = {
"_mfem_csr_dii",
5609 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5610 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5611#if PETSC_VERSION_LT(3,23,0)
5612 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5614 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
5619 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5621 PetscFunctionReturn(PETSC_SUCCESS);
5624static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5627 ISLocalToGlobalMapping rl2g,cl2g;
5629 hypre_CSRMatrix *hdiag,*hoffd;
5630 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5632 const char *names[2] = {
"_mfem_csr_aux",
5636 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5638 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5639 PetscErrorCode ierr;
5641 PetscFunctionBeginUser;
5643 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5644 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5645 hdiag = hypre_ParCSRMatrixDiag(hA);
5646 hoffd = hypre_ParCSRMatrixOffd(hA);
5647 dr = hypre_CSRMatrixNumRows(hdiag);
5648 dc = hypre_CSRMatrixNumCols(hdiag);
5649 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5650 hdi = hypre_CSRMatrixI(hdiag);
5651 hdj = hypre_CSRMatrixJ(hdiag);
5652 hdd = hypre_CSRMatrixData(hdiag);
5653 oc = hypre_CSRMatrixNumCols(hoffd);
5654 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5655 hoi = hypre_CSRMatrixI(hoffd);
5656 hoj = hypre_CSRMatrixJ(hoffd);
5657 hod = hypre_CSRMatrixData(hoffd);
5660 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5661 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5662 ierr = ISDestroy(&is); CHKERRQ(ierr);
5663 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5664 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5665 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5666 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5667 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5668 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5669 ierr = ISDestroy(&is); CHKERRQ(ierr);
5672 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5673 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5674 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5675 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5676 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5677 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5680 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5681 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5685 *ii = *(hdi++) + *(hoi++);
5686 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5690 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5691 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5692 *(++ii) = *(hdi++) + *(hoi++);
5693 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5695 for (; cum<dr; cum++) { *(++ii) = nnz; }
5699 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5707 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5708 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5709#if PETSC_VERSION_LT(3,23,0)
5710 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5712 ierr = PetscContainerSetCtxDestroy(c,__mfem_array_container_destroy);
5717 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5719 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5720 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5721 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5722 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5723 PetscFunctionReturn(PETSC_SUCCESS);
5727#include <petsc/private/matimpl.h>
5729static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm,
PetscInt m,
5733 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5734 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5735 ierr = PetscObjectChangeTypeName((
PetscObject)*A,
"mfemdummy"); CHKERRQ(ierr);
5736 (*A)->preallocated = PETSC_TRUE;
5737 ierr = MatSetUp(*A); CHKERRQ(ierr);
5738 PetscFunctionReturn(PETSC_SUCCESS);
5741#include <petsc/private/vecimpl.h>
5743#if defined(PETSC_HAVE_DEVICE)
5744static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5748 PetscFunctionReturn(PETSC_SUCCESS);
5752static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5755#if defined(PETSC_HAVE_DEVICE)
5756 *flg = v->boundtocpu;
5760 PetscFunctionReturn(PETSC_SUCCESS);
5763static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject o)
5765 PetscErrorCode ierr;
5768 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5769 PetscFunctionReturn(PETSC_SUCCESS);
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
int Size() const
Return the logical size of the array.
T * GetData()
Returns the data.
A class to handle Block systems in a matrix-free implementation.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
int NumRowBlocks() const
Return the number of row blocks.
int NumColBlocks() const
Return the number of column blocks.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
static int GetId()
Get the device id of the configured device.
static MemoryType GetDeviceMemoryType()
Get the current Device MemoryType. This is the MemoryType used by most MFEM classes when allocating m...
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Ordering::Type GetOrdering() const
Return the ordering method.
const FiniteElementCollection * FEColl() const
int GetVDim() const
Returns the vector dimension of the finite element space.
Wrapper for hypre's ParCSR matrix class.
MPI_Comm GetComm() const
MPI communicator.
Wrapper for hypre's parallel vector class.
Identity Operator I: x -> x.
Class used by MFEM to store pointers to host and/or device memory.
bool DeviceIsValid() const
Return true if device pointer is valid.
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
bool HostIsValid() const
Return true if host pointer is valid.
bool UseDevice() const
Read the internal device flag.
void CopyToHost(T *dest, int size) const
Copy size entries from *this to the host pointer dest.
bool Empty() const
Return true if the Memory object is empty, see Reset().
void Sync(const Memory &other) const
Copy the host/device pointer validity flags from other to *this.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void Wrap(T *ptr, int size, bool own)
Wrap an externally allocated host pointer, ptr with the current host memory type returned by MemoryMa...
void Delete()
Delete the owned pointers and reset the Memory object.
int SpaceDimension() const
Dimension of the physical space containing the mesh.
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Pointer to an Operator of a specified type.
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
int width
Dimension of the input / number of columns in the matrix.
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
int height
Dimension of the output / number of rows in the matrix.
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Type
Enumeration defining IDs for some classes derived from Operator.
@ ANY_TYPE
ID for the base class Operator, i.e. any type.
@ PETSC_MATIS
ID for class PetscParMatrix, MATIS format.
@ PETSC_MATHYPRE
ID for class PetscParMatrix, MATHYPRE format.
@ PETSC_MATGENERIC
ID for class PetscParMatrix, unspecified format.
@ PETSC_MATAIJ
ID for class PetscParMatrix, MATAIJ format.
@ PETSC_MATNEST
ID for class PetscParMatrix, MATNEST format.
@ PETSC_MATSHELL
ID for class PetscParMatrix, MATSHELL format.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Abstract parallel finite element space.
HYPRE_BigInt * GetTrueDofOffsets() const
int GetTrueVSize() const override
Return the number of local vector true dofs.
ParMesh * GetParMesh() const
Helper class for handling essential boundary conditions.
PetscBCHandler(Type type_=ZERO)
@ CONSTANT
Constant in time b.c.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
virtual void Eval(real_t t, Vector &g)
Boundary conditions evaluation.
void SetTime(real_t t)
Sets the current time.
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
void Zero(Vector &x)
Replace boundary dofs with 0.
Auxiliary class for BDDC customization.
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Abstract class for PETSc's linear solvers.
void SetOperator(const Operator &op) override
operator petsc::KSP() const
Conversion function to PETSc's KSP type.
virtual ~PetscLinearSolver()
void MultTranspose(const Vector &b, Vector &x) const override
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
void SetPreconditioner(Solver &precond)
void Mult(const Vector &b, Vector &x) const override
Application of the solver.
bool DeviceRequested() const
void SetHostInvalid() const
void SetHostValid() const
bool WriteRequested() const
const real_t * GetDevicePointer() const
const real_t * GetHostPointer() const
bool IsAliasForSync() const
void MakeAliasForSync(const Memory< real_t > &base_, int offset_, int size_, bool usedev_)
void SetDeviceInvalid() const
bool ReadRequested() const
void SetDeviceValid() const
void Mult(const Vector &b, Vector &x) const override
Application of the solver.
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
void SetObjective(void(*obj)(Operator *op, const Vector &x, real_t *f))
Specification of an objective function to be used for line search.
operator petsc::SNES() const
Conversion function to PETSc's SNES type.
virtual ~PetscNonlinearSolver()
void SetJacobianType(Operator::Type type)
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
void SetOperator(const Operator &op) override
Specification of the nonlinear operator.
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
virtual void Run(Vector &x, real_t &t, real_t &dt, real_t t_final)
Perform time integration from time t [in] to time tf [in].
virtual void Step(Vector &x, real_t &t, real_t &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in].
operator petsc::TS() const
Conversion function to PETSc's TS type.
void SetType(PetscODESolver::Type)
PetscODESolver::Type GetType() const
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
void SetJacobianType(Operator::Type type)
virtual ~PetscODESolver()
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
Wrapper for PETSc's matrix class.
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
PetscInt GetNumRows() const
Returns the local number of rows.
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
PetscParMatrix & operator-=(const PetscParMatrix &B)
void Mult(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec: y = a A x + b y.
PetscInt GetColStart() const
Returns the global index of the first local column.
PetscInt M() const
Returns the global number of rows.
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, real_t diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
MPI_Comm GetComm() const
Get the associated MPI communicator.
PetscParMatrix & operator=(const PetscParMatrix &B)
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object.
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B.
PetscInt GetNumCols() const
Returns the local number of columns.
PetscInt NNZ() const
Returns the number of nonzeros.
petsc::Mat A
The actual PETSc object.
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
void Shift(real_t s)
Shift diagonal by a constant.
PetscParVector * X
Auxiliary vectors for typecasting.
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
operator petsc::Mat() const
Typecasting to PETSc's Mat type.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
void operator*=(real_t s)
Scale all entries by s: A_scaled = s*A.
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
PetscInt N() const
Returns the global number of columns.
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
void Init()
Initialize with defaults. Does not initialize inherited members.
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
PetscInt GetRowStart() const
Returns the global index of the first local row.
void MultTranspose(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
PetscParMatrix & operator+=(const PetscParMatrix &B)
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
void ResetMemory()
Completes the operation started with PlaceMemory.
void SetFlagsFromMask_() const
void PlaceMemory(Memory< real_t > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
void Randomize(PetscInt seed=0)
Set random values.
void SetBlockSize(PetscInt bs)
Set block size of a vector.
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
real_t * HostReadWrite() override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), false).
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array,...
PetscInt GlobalSize() const
Returns the global number of rows.
PetscParVector & operator-=(const PetscParVector &y)
real_t * Write(bool=true) override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), on_dev).
void UpdateVecFromFlags()
Update PETSc's Vec after having accessed its data via GetMemory()
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
real_t * ReadWrite(bool=true) override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), on_dev).
const real_t * HostRead() const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), false).
const real_t * Read(bool=true) const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), on_dev).
real_t * HostWrite() override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), false).
PetscParVector & operator+=(const PetscParVector &y)
petsc::Vec x
The actual PETSc object.
MPI_Comm GetComm() const
Get the associated MPI communicator.
virtual ~PetscParVector()
Calls PETSc's destroy function.
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray().
Vector * GlobalVector() const
Returns the global vector in each processor.
operator petsc::Vec() const
Typecasting to PETSc's Vec type.
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
PetscParVector & operator*=(PetscScalar d)
PetscParVector & operator=(PetscScalar d)
Set constant values.
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
Abstract class for PETSc's preconditioners.
void Mult(const Vector &b, Vector &x) const override
Application of the preconditioner.
virtual ~PetscPreconditioner()
void SetOperator(const Operator &op) override
Set/update the solver for the given operator.
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
operator petsc::PC() const
Conversion function to PETSc's PC type.
void MultTranspose(const Vector &b, Vector &x) const override
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Abstract class for monitoring PETSc's solvers.
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Abstract class for PETSc's solvers.
PetscClassId cid
The class id of the actual PETSc object.
void SetAbsTol(real_t tol)
void * private_ctx
Private context for solver.
void SetPrintLevel(int plev)
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
void SetMaxIter(int max_iter)
void SetRelTol(real_t tol)
void CreatePrivateContext()
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
MPI_Comm GetComm() const
Get the associated MPI communicator.
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
bool clcustom
Boolean to handle SetFromOptions calls.
bool operatorset
Boolean to handle SetOperator calls.
void Customize(bool customize=true) const
Customize object with options set.
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
void FreePrivateContext()
PetscBCHandler * bchandler
Handler for boundary conditions.
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
PetscParVector * B
Right-hand side and solution vector.
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
const int * HostReadJ() const
int NumNonZeroElems() const override
Returns the number of the nonzero elements in the matrix.
const real_t * HostReadData() const
void SortColumnIndices()
Sort the column indices corresponding to each row.
const int * HostReadI() const
Base abstract class for first order time dependent operators.
bool isHomogeneous() const
True if type is HOMOGENEOUS.
virtual Operator & GetExplicitGradient(const Vector &u) const
Return an Operator representing dG/du at the given point u and the currently set time.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
virtual void ExplicitMult(const Vector &u, Vector &v) const
Perform the action of the explicit part of the operator, G: v = G(u, t) where t is the current time.
virtual Operator & GetImplicitGradient(const Vector &u, const Vector &k, real_t shift) const
Return an Operator representing (dF/dk shift + dF/du) at the given u, k, and the currently set time.
virtual void SetTime(const real_t t_)
Set the current time.
virtual void ImplicitMult(const Vector &u, const Vector &k, Vector &v) const
Perform the action of the implicit part of the operator, F: v = F(u, k, t) where t is the current tim...
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Memory< real_t > & GetMemory()
Return a reference to the Memory object used by the Vector.
int Size() const
Returns the size of the vector.
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
void SetSize(int s)
Resize the vector to size s.
void trans(const Vector &u, Vector &x)
real_t f(const Vector &p)
void write(std::ostream &os, T value)
Write 'value' to stream.
T read(std::istream &is)
Read a value from the stream and return it.
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass,...
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
MemoryType
Memory types supported by MFEM.
@ HOST
Host memory; using new[] and delete[].
@ DEVICE
Device memory; using CUDA or HIP *Malloc and *Free.
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s....
std::function< real_t(const Vector &)> f(real_t mass_coeff)
struct s_NavierContext ctx
real_t p(const Vector &x, real_t t)
struct _p_PetscObject * PetscObject
@ HIP_MASK
Biwise-OR of all HIP backends.
@ CUDA_MASK
Biwise-OR of all CUDA backends.