14 #include "../config/config.hpp"
20 #include "../fem/fem.hpp"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
28 #if PETSC_VERSION_LT(3,11,0)
29 #define VecLockReadPush VecLockPush
30 #define VecLockReadPop VecLockPop
32 #if PETSC_VERSION_LT(3,12,0)
33 #define VecGetArrayWrite VecGetArray
34 #define VecRestoreArrayWrite VecRestoreArray
35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
60 static PetscErrorCode __mfem_snes_jacobian(
SNES,
Vec,
Mat,
Mat,
void*);
61 static PetscErrorCode __mfem_snes_function(
SNES,
Vec,
Vec,
void*);
64 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,
Vec,
Vec,
Vec,
65 PetscBool*,PetscBool*,
void*);
67 static PetscErrorCode __mfem_pc_shell_apply(
PC,
Vec,
Vec);
68 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC,
Vec,
Vec);
69 static PetscErrorCode __mfem_pc_shell_setup(
PC);
70 static PetscErrorCode __mfem_pc_shell_destroy(
PC);
71 static PetscErrorCode __mfem_pc_shell_view(
PC,PetscViewer);
72 static PetscErrorCode __mfem_mat_shell_apply(
Mat,
Vec,
Vec);
73 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat,
Vec,
Vec);
74 static PetscErrorCode __mfem_mat_shell_destroy(
Mat);
75 static PetscErrorCode __mfem_mat_shell_copy(
Mat,
Mat,MatStructure);
76 static PetscErrorCode __mfem_array_container_destroy(
void*);
77 static PetscErrorCode __mfem_matarray_container_destroy(
void*);
78 static PetscErrorCode __mfem_monitor_ctx_destroy(
void**);
81 static PetscErrorCode Convert_Array_IS(MPI_Comm,
bool,
const mfem::Array<int>*,
86 static PetscErrorCode MakeShellPCWithFactory(
PC,
92 #if !defined(PETSC_HAVE_HYPRE)
93 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,
Mat*);
94 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,
Mat*);
97 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
98 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
105 #if defined(PETSC_HAVE_DEVICE)
106 static PetscErrorCode __mfem_VecSetOffloadMask(
Vec,PetscOffloadMask);
108 static PetscErrorCode __mfem_VecBoundToCPU(
Vec,PetscBool*);
109 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject);
118 unsigned long int numprec;
119 } __mfem_pc_shell_ctx;
130 void (*postcheck)(
mfem::Operator *op,
const mfem::Vector&, mfem::Vector&,
131 mfem::Vector&,
bool&,
bool&);
135 const mfem::Vector&,
const mfem::Vector&,
136 const mfem::Vector&,
const mfem::Vector&);
148 PetscObjectState cached_ijacstate;
149 PetscObjectState cached_rhsjacstate;
150 PetscObjectState cached_splits_xstate;
151 PetscObjectState cached_splits_xdotstate;
158 } __mfem_monitor_ctx;
161 static PetscErrorCode ierr;
184 ierr = PetscOptionsSetValue(NULL,
"-cuda_device",
186 MFEM_VERIFY(!ierr,
"Unable to set initial option value to PETSc");
188 ierr = PetscInitialize(argc,argv,rc_file,help);
189 MFEM_VERIFY(!ierr,
"Unable to initialize PETSc");
194 ierr = PetscFinalize();
195 MFEM_VERIFY(!ierr,
"Unable to finalize PETSc");
198 const double* PetscMemory::GetHostPointer()
const
202 const double *v =
mfem::Read(*
this,Capacity(),
false);
207 const double* PetscMemory::GetDevicePointer()
const
211 const double *v =
mfem::Read(*
this,Capacity(),
true);
218 void PetscParVector::SetDataAndSize_()
223 MFEM_VERIFY(x,
"Missing Vec");
224 ierr = VecSetUp(x); PCHKERRQ(x,ierr);
225 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
226 MFEM_VERIFY(n >= 0,
"Invalid local size");
228 #if defined(PETSC_HAVE_DEVICE)
229 PetscOffloadMask omask;
232 ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
233 if (omask != PETSC_OFFLOAD_BOTH)
235 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
238 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
239 #if defined(PETSC_HAVE_DEVICE)
240 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
241 ""); PCHKERRQ(x,ierr);
244 if (omask != PETSC_OFFLOAD_BOTH)
246 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
249 ierr = VecCUDAGetArrayRead(x,(
const PetscScalar**)&darray); PCHKERRQ(x,ierr);
250 pdata.Wrap(array,darray,size,MemoryType::HOST,
false);
251 ierr = VecCUDARestoreArrayRead(x,(
const PetscScalar**)&darray);
257 pdata.Wrap(array,size,MemoryType::HOST,
false);
259 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
261 #if defined(PETSC_HAVE_DEVICE)
262 ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
264 data.MakeAlias(pdata,0,size);
268 void PetscParVector::SetFlagsFromMask_()
const
270 MFEM_VERIFY(x,
"Missing Vec");
271 #if defined(_USE_DEVICE)
272 PetscOffloadMask mask;
274 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
275 ""); PCHKERRQ(x,ierr);
276 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
281 case PETSC_OFFLOAD_CPU:
282 pdata.SetHostValid();
283 pdata.SetDeviceInvalid();
285 case PETSC_OFFLOAD_GPU:
286 pdata.SetHostInvalid();
287 pdata.SetDeviceValid();
289 case PETSC_OFFLOAD_BOTH:
290 pdata.SetHostValid();
291 pdata.SetDeviceValid();
294 MFEM_ABORT(
"Unhandled case " << mask);
301 void PetscParVector::UpdateVecFromFlags()
303 MFEM_VERIFY(x,
"Missing Vec");
304 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
305 #if defined(_USE_DEVICE)
307 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
308 ""); PCHKERRQ(x,ierr);
311 bool dv = pdata.DeviceIsValid();
312 bool hv = pdata.HostIsValid();
313 PetscOffloadMask mask;
314 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
315 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
316 else { mask = PETSC_OFFLOAD_CPU; }
317 ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
324 ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
325 pdata.CopyToHost(v,size);
326 ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
330 void PetscParVector::SetVecType_()
333 MFEM_VERIFY(x,
"Missing Vec");
334 ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
335 #if defined(_USE_DEVICE)
336 switch (Device::GetDeviceMemoryType())
338 case MemoryType::DEVICE:
339 case MemoryType::MANAGED:
340 ierr = VecSetType(x,VECCUDA); PCHKERRQ(x,ierr);
343 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
349 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
357 MFEM_VERIFY(x,
"Missing Vec");
358 #if defined(PETSC_HAVE_DEVICE)
360 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
361 ""); PCHKERRQ(x,ierr);
362 if (on_dev && iscuda)
364 ierr = VecCUDAGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
365 ierr = VecCUDARestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
370 ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
371 ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
385 MFEM_VERIFY(x,
"Missing Vec");
386 #if defined(PETSC_HAVE_DEVICE)
388 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
389 ""); PCHKERRQ(x,ierr);
390 if (on_dev && iscuda)
392 ierr = VecCUDAGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
393 ierr = VecCUDARestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
398 ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
399 ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
401 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
414 MFEM_VERIFY(x,
"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE)
417 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
418 ""); PCHKERRQ(x,ierr);
419 if (on_dev && iscuda)
421 ierr = VecCUDAGetArray(x,&dummy); PCHKERRQ(x,ierr);
422 ierr = VecCUDARestoreArray(x,&dummy); PCHKERRQ(x,ierr);
427 ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
428 ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
430 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
440 void PetscParVector::UseDevice(
bool dev)
const
442 MFEM_VERIFY(x,
"Missing Vec");
443 #if defined(PETSC_HAVE_DEVICE)
444 ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
448 bool PetscParVector::UseDevice()
const
451 MFEM_VERIFY(x,
"Missing Vec");
452 ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
453 return flg ?
false :
true;
459 ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
465 ierr = VecSetBlockSize(x,bs); PCHKERRQ(x,ierr);
468 PetscParVector::PetscParVector(MPI_Comm comm,
const Vector &x_,
472 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
473 ierr = VecSetSizes(
x,n,PETSC_DECIDE); PCHKERRQ(
x,ierr);
482 #if defined(PETSC_HAVE_DEVICE)
484 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
485 ""); PCHKERRQ(x,ierr);
489 ierr = VecCUDAGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
490 rest = VecCUDARestoreArrayWrite;
496 ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
497 rest = VecRestoreArrayWrite;
500 ierr = (*rest)(
x,&array); PCHKERRQ(x,ierr);
508 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
512 MPI_Comm_rank(comm, &myid);
513 ierr = VecSetSizes(
x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(
x,ierr);
517 ierr = VecSetSizes(
x,PETSC_DECIDE,glob_size); PCHKERRQ(
x,ierr);
526 ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
533 MFEM_VERIFY(col,
"Missing distribution");
535 MPI_Comm_rank(comm, &myid);
536 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
537 &
x); CCHKERRQ(comm,ierr)
544 ierr = VecDuplicate(y.
x,&
x); PCHKERRQ(
x,ierr);
549 bool transpose,
bool allocate) :
Vector()
553 ierr = VecCreate(comm,&
x);
555 ierr = VecSetSizes(
x,loc,PETSC_DECIDE);
570 bool transpose,
bool allocate) :
Vector()
575 ierr = MatCreateVecs(pA,&
x,NULL); PCHKERRQ(pA,ierr);
579 ierr = MatCreateVecs(pA,NULL,&
x); PCHKERRQ(pA,ierr);
585 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
598 ierr = PetscObjectReference((
PetscObject)y); PCHKERRQ(y,ierr);
607 MPI_Comm comm = pfes->
GetComm();
608 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
610 PetscMPIInt myid = 0;
611 if (!HYPRE_AssumedPartitionCheck())
613 MPI_Comm_rank(comm,&myid);
615 ierr = VecSetSizes(
x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
633 ierr = VecScatterCreateToAll(
x,&scctx,&vout); PCHKERRQ(
x,ierr);
634 ierr = VecScatterBegin(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
636 ierr = VecScatterEnd(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
638 ierr = VecScatterDestroy(&scctx); PCHKERRQ(
x,ierr);
639 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(
x,ierr);
640 ierr = VecGetLocalSize(vout,&size); PCHKERRQ(
x,ierr);
643 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(
x,ierr);
644 ierr = VecDestroy(&vout); PCHKERRQ(
x,ierr);
653 ierr = VecSet(
x,d); PCHKERRQ(
x,ierr);
661 MFEM_VERIFY(idx.
Size() == vals.
Size(),
662 "Size mismatch between indices and values");
666 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
667 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
675 MFEM_VERIFY(idx.
Size() == vals.
Size(),
676 "Size mismatch between indices and values");
680 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
681 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
688 ierr = VecCopy(y.
x,
x); PCHKERRQ(
x,ierr);
695 ierr = VecAXPY(
x,1.0,y.
x); PCHKERRQ(
x,ierr);
702 ierr = VecAXPY(
x,-1.0,y.
x); PCHKERRQ(
x,ierr);
709 ierr = VecScale(
x,s); PCHKERRQ(
x,ierr);
716 ierr = VecShift(
x,s); PCHKERRQ(
x,ierr);
723 ierr = VecPlaceArray(
x,temp_data); PCHKERRQ(
x,ierr);
728 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
735 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
737 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
738 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
739 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
740 #if defined(_USE_DEVICE)
742 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
743 ""); PCHKERRQ(x,ierr);
750 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
755 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
764 #if defined(PETSC_HAVE_DEVICE)
765 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
767 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
769 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
777 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
779 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
780 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
781 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
782 #if defined(_USE_DEVICE)
784 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
785 ""); PCHKERRQ(x,ierr);
791 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
796 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
805 #if defined(PETSC_HAVE_DEVICE)
806 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
808 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
811 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
812 ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
818 MFEM_VERIFY(!
pdata.
Empty(),
"Vector data is empty");
830 #if defined(PETSC_HAVE_DEVICE)
831 PetscOffloadMask mask;
832 ierr = VecGetOffloadMask(
x,&mask); PCHKERRQ(
x,ierr);
833 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
834 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
837 ierr = VecGetArrayRead(
x,&v); PCHKERRQ(
x,ierr);
839 ierr = VecRestoreArrayRead(
x,&v); PCHKERRQ(
x,ierr);
844 if (read && !write) { ierr = VecLockReadPop(
x); PCHKERRQ(
x,ierr); }
847 #if defined(PETSC_HAVE_DEVICE)
848 ierr = VecCUDAResetArray(
x); PCHKERRQ(
x,ierr);
850 MFEM_VERIFY(
false,
"This should not happen");
855 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
861 PetscRandom rctx = NULL;
865 ierr = PetscRandomCreate(PetscObjectComm((
PetscObject)
x),&rctx);
867 ierr = PetscRandomSetSeed(rctx,(
unsigned long)seed); PCHKERRQ(x,ierr);
868 ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
870 ierr = VecSetRandom(
x,rctx); PCHKERRQ(
x,ierr);
871 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(
x,ierr);
882 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)
x),fname,
883 FILE_MODE_WRITE,&view);
887 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)
x),fname,&view);
890 ierr = VecView(
x,view); PCHKERRQ(
x,ierr);
891 ierr = PetscViewerDestroy(&view); PCHKERRQ(
x,ierr);
895 ierr = VecView(
x,NULL); PCHKERRQ(
x,ierr);
904 ierr = MatGetOwnershipRange(
A,&N,NULL); PCHKERRQ(
A,ierr);
911 ierr = MatGetOwnershipRangeColumn(
A,&N,NULL); PCHKERRQ(
A,ierr);
918 ierr = MatGetLocalSize(
A,&N,NULL); PCHKERRQ(
A,ierr);
925 ierr = MatGetLocalSize(
A,NULL,&N); PCHKERRQ(
A,ierr);
932 ierr = MatGetSize(
A,&N,NULL); PCHKERRQ(
A,ierr);
939 ierr = MatGetSize(
A,NULL,&N); PCHKERRQ(
A,ierr);
946 ierr = MatGetInfo(
A,MAT_GLOBAL_SUM,&info); PCHKERRQ(
A,ierr);
952 if (cbs < 0) { cbs = rbs; }
953 ierr = MatSetBlockSizes(
A,rbs,cbs); PCHKERRQ(
A,ierr);
977 rows.
GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
979 cols.
GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
980 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&
A); PCHKERRQ(B,ierr);
981 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
982 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1026 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1040 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1053 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1054 if (
X) {
delete X; }
1055 if (
Y) {
delete Y; }
1060 #if defined(PETSC_HAVE_HYPRE)
1061 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&
A);
1063 ierr = MatConvert_hypreParCSR_AIJ(B,&
A); CCHKERRQ(B.
GetComm(),ierr);
1074 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1075 if (
X) {
delete X; }
1076 if (
Y) {
delete Y; }
1081 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1089 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1093 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1094 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1095 ierr = MatAXPY(
A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1104 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1105 ierr = MatScale(
A,-1.0); PCHKERRQ(
A,ierr);
1109 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1110 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1111 ierr = MatAXPY(
A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1116 void PetscParMatrix::
1117 BlockDiagonalConstructor(MPI_Comm comm,
1122 PetscInt lrsize,lcsize,rstart,cstart;
1123 PetscMPIInt myid = 0,commsize;
1125 ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
1126 if (!HYPRE_AssumedPartitionCheck())
1128 ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
1130 lrsize = row_starts[myid+1]-row_starts[myid];
1131 rstart = row_starts[myid];
1132 lcsize = col_starts[myid+1]-col_starts[myid];
1133 cstart = col_starts[myid];
1138 ierr = ISCreateStride(comm,diag->
Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1139 ISLocalToGlobalMapping rl2g,cl2g;
1140 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1141 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1142 if (row_starts != col_starts)
1144 ierr = ISCreateStride(comm,diag->
Width(),cstart,1,&is);
1145 CCHKERRQ(comm,ierr);
1146 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1147 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1151 ierr = PetscObjectReference((
PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1156 ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1157 ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1159 ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1160 ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1161 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1162 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1167 ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1168 const
int *II = diag->HostReadI();
1169 const
int *JJ = diag->HostReadJ();
1170 #if defined(PETSC_USE_64BIT_INDICES)
1173 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1174 for (
int i = 0; i < m; i++) { pII[i] = II[i]; }
1175 for (
int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1176 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1178 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1180 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1193 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1194 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1195 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1196 if (
sizeof(
PetscInt) ==
sizeof(
int))
1199 CCHKERRQ(PETSC_COMM_SELF,ierr);
1201 CCHKERRQ(PETSC_COMM_SELF,ierr);
1207 for (
int i = 0; i < m; i++) { dii[i] = iii[i]; }
1208 for (
int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1211 CCHKERRQ(PETSC_COMM_SELF,ierr);
1212 ierr = PetscCalloc1(m,&oii);
1213 CCHKERRQ(PETSC_COMM_SELF,ierr);
1216 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1218 dii,djj,da,oii,NULL,NULL,&A);
1219 CCHKERRQ(comm,ierr);
1223 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1224 CCHKERRQ(comm,ierr);
1227 void *ptrs[4] = {dii,djj,da,oii};
1228 const char *names[4] = {
"_mfem_csr_dii",
1237 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1238 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1239 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1240 CCHKERRQ(comm,ierr);
1242 CCHKERRQ(comm,ierr);
1243 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1248 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1249 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1256 return A ? PetscObjectComm((
PetscObject)A) : MPI_COMM_NULL;
1269 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1271 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1272 ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1273 ierr = MatShellSetContext(*A,(
void *)op); PCHKERRQ(A,ierr);
1274 ierr = MatShellSetOperation(*A,MATOP_MULT,
1275 (
void (*)())__mfem_mat_shell_apply);
1277 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1278 (
void (*)())__mfem_mat_shell_apply_transpose);
1280 ierr = MatShellSetOperation(*A,MATOP_COPY,
1281 (
void (*)())__mfem_mat_shell_copy);
1283 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1284 (
void (*)())__mfem_mat_shell_destroy);
1285 #if defined(_USE_DEVICE)
1289 ierr = MatShellSetVecType(*A,VECCUDA); PCHKERRQ(A,ierr);
1290 ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1294 ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1298 ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1323 PetscBool avoidmatconvert = PETSC_FALSE;
1326 ierr = PetscObjectTypeCompareAny((
PetscObject)(pA->
A),&avoidmatconvert,MATMFFD,
1328 CCHKERRQ(comm,ierr);
1330 if (pA && !avoidmatconvert)
1334 #if PETSC_VERSION_LT(3,10,0)
1338 #if PETSC_VERSION_LT(3,18,0)
1339 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEMAT,&istrans);
1341 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEVIRTUAL,
1354 #if PETSC_VERSION_LT(3,10,0)
1355 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATIS,&ismatis);
1361 ierr = MatTransposeGetMat(pA->
A,&At); CCHKERRQ(pA->
GetComm(),ierr);
1362 #if PETSC_VERSION_LT(3,10,0)
1363 ierr = PetscObjectTypeCompare((
PetscObject)(At),MATIS,&ismatis);
1371 #if PETSC_VERSION_LT(3,10,0)
1378 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1379 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1380 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1384 ierr = MatISGetMPIXAIJ(pA->
A,MAT_INITIAL_MATRIX,A);
1385 PCHKERRQ(pA->
A,ierr);
1392 ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1398 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1399 PCHKERRQ(pA->
A,ierr);
1400 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1401 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1405 ierr = MatConvert(pA->
A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1406 PCHKERRQ(pA->
A,ierr);
1415 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1416 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1417 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1421 ierr = MatConvert(pA->
A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->
A,ierr);
1426 #if defined(PETSC_HAVE_HYPRE)
1430 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1431 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1432 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1436 ierr = MatConvert(pA->
A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->
A,ierr);
1439 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1448 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1455 #if defined(PETSC_HAVE_HYPRE)
1456 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1457 PETSC_USE_POINTER,A);
1459 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1465 #if defined(PETSC_HAVE_HYPRE)
1466 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1467 PETSC_USE_POINTER,A);
1469 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1475 #if defined(PETSC_HAVE_HYPRE)
1476 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1477 PETSC_USE_POINTER,A);
1480 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1489 MFEM_ABORT(
"Conversion from HypreParCSR to operator type = " << tid <<
1490 " is not implemented");
1495 Mat *mats,*matsl2l = NULL;
1500 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1503 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1505 for (i=0; i<nr; i++)
1507 PetscBool needl2l = PETSC_TRUE;
1509 for (j=0; j<nc; j++)
1517 ierr = PetscObjectQuery((
PetscObject)mats[i*nc+j],
"_MatIS_PtAP_l2l",
1519 PCHKERRQ(mats[i*nc+j],ierr);
1527 ierr = PetscContainerGetPointer(c,(
void**)&l2l);
1529 MFEM_VERIFY(l2l->
Size() == 1,
"Unexpected size "
1530 << l2l->
Size() <<
" for block row " << i );
1531 ierr = PetscObjectReference((
PetscObject)(*l2l)[0]);
1533 matsl2l[i] = (*l2l)[0];
1534 needl2l = PETSC_FALSE;
1540 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1543 ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1546 for (
int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1547 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1550 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1551 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1552 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1555 PCHKERRQ((*A),ierr);
1556 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1558 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1559 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1565 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1566 ierr = MatSetSizes(*A,pI->
Height(),pI->
Width(),PETSC_DECIDE,PETSC_DECIDE);
1568 ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1569 ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1570 ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1571 ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1572 ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1575 ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1577 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1578 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1595 int n = pS->
Width();
1600 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1601 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1602 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1604 for (
int i = 0; i < m; i++)
1606 bool issorted =
true;
1608 for (
int j = ii[i]; j < ii[i+1]; j++)
1611 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1616 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1617 CCHKERRQ(PETSC_COMM_SELF,ierr);
1621 ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1624 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1625 CCHKERRQ(comm,ierr);
1630 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1631 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1633 pii,pjj,pdata,oii,NULL,NULL,&B);
1634 CCHKERRQ(comm,ierr);
1636 void *ptrs[4] = {pii,pjj,pdata,oii};
1637 const char *names[4] = {
"_mfem_csr_pii",
1642 for (
int i=0; i<4; i++)
1646 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1647 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1648 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1652 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1660 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1661 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1665 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1666 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1670 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1677 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1684 ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1685 ierr = PetscObjectTypeCompare((
PetscObject)B,MATMPIAIJ,&isaij);
1686 CCHKERRQ(comm,ierr);
1687 ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1690 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1691 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1706 MPI_Comm comm = MPI_COMM_NULL;
1707 ierr = PetscObjectGetComm((
PetscObject)A,&comm); PCHKERRQ(A,ierr);
1708 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1719 ierr = PetscObjectReference((
PetscObject)a); PCHKERRQ(a,ierr);
1729 if (A_ == A) {
return; }
1731 ierr = PetscObjectReference((
PetscObject)A_); PCHKERRQ(A_,ierr);
1737 void PetscParMatrix::SetUpForDevice()
1739 #if !defined(_USE_DEVICE)
1744 PetscBool ismatis,isnest,isseqaij,ismpiaij;
1745 ierr = PetscObjectTypeCompare((
PetscObject)A,MATIS,&ismatis);
1747 ierr = PetscObjectTypeCompare((
PetscObject)A,MATNEST,&isnest);
1752 ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1753 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATNEST,&isnest);
1760 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1770 ierr = PetscObjectTypeCompare((
PetscObject)sA,MATSEQAIJ,&isseqaij);
1772 ierr = PetscObjectTypeCompare((
PetscObject)sA,MATMPIAIJ,&ismpiaij);
1776 ierr = MatSetType(sA,MATSEQAIJCUSPARSE); PCHKERRQ(sA,ierr);
1782 ierr = MatSetType(sA,MATMPIAIJCUSPARSE); PCHKERRQ(sA,ierr);
1788 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1789 PETSC_TRUE); PCHKERRQ(sA,ierr);
1796 ierr = MatSetVecType(tA,VECCUDA); PCHKERRQ(tA,ierr);
1802 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATSEQAIJ,&isseqaij);
1804 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATMPIAIJ,&ismpiaij);
1808 ierr = MatSetType(tA,MATSEQAIJCUSPARSE); PCHKERRQ(tA,ierr);
1813 ierr = MatSetType(tA,MATMPIAIJCUSPARSE); PCHKERRQ(tA,ierr);
1818 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1819 PETSC_TRUE); PCHKERRQ(tA,ierr);
1834 f = MatMultTranspose;
1835 fadd = MatMultTransposeAdd;
1846 ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1847 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1848 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1852 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1853 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1864 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1868 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1875 ierr = PetscObjectReference((
PetscObject)master.
A); PCHKERRQ(master.
A,ierr);
1887 MFEM_VERIFY(A,
"Mat not present");
1897 MFEM_VERIFY(A,
"Mat not present");
1908 ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1912 ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1919 ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1924 MFEM_ASSERT(x.
Size() ==
Width(),
"invalid x.Size() = " << x.
Size()
1925 <<
", expected size = " <<
Width());
1926 MFEM_ASSERT(y.
Size() ==
Height(),
"invalid y.Size() = " << y.
Size()
1927 <<
", expected size = " <<
Height());
1931 bool rw = (b != 0.0);
1934 MatMultKernel(A,a,XX->x,b,YY->
x,
false);
1942 MFEM_ASSERT(x.
Size() ==
Height(),
"invalid x.Size() = " << x.
Size()
1943 <<
", expected size = " <<
Height());
1944 MFEM_ASSERT(y.
Size() ==
Width(),
"invalid y.Size() = " << y.
Size()
1945 <<
", expected size = " <<
Width());
1949 bool rw = (b != 0.0);
1952 MatMultKernel(A,a,YY->
x,b,XX->x,
true);
1965 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)A),fname,
1966 FILE_MODE_WRITE,&view);
1970 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)A),fname,&view);
1973 ierr = MatView(A,view); PCHKERRQ(A,ierr);
1974 ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1978 ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1984 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
1985 <<
", expected size = " <<
Height());
1989 ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1995 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
1996 <<
", expected size = " <<
Width());
2000 ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
2006 ierr = MatShift(A,(
PetscScalar)s); PCHKERRQ(A,ierr);
2012 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
2013 <<
", expected size = " <<
Height());
2014 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
2015 <<
", expected size = " <<
Width());
2019 ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2027 "Petsc TripleMatrixProduct: Number of local cols of A " << A->
Width() <<
2028 " differs from number of local rows of P " << P->
Height());
2030 "Petsc TripleMatrixProduct: Number of local rows of A " << A->
Height() <<
2031 " differs from number of local cols of R " << R->
Width());
2033 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2040 Mat pA = *A,pP = *P,pRt = *Rt;
2042 PetscBool Aismatis,Pismatis,Rtismatis;
2045 "Petsc RAP: Number of local cols of A " << A->
Width() <<
2046 " differs from number of local rows of P " << P->
Height());
2048 "Petsc RAP: Number of local rows of A " << A->
Height() <<
2049 " differs from number of local rows of Rt " << Rt->
Height());
2050 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&Aismatis);
2052 ierr = PetscObjectTypeCompare((
PetscObject)pP,MATIS,&Pismatis);
2054 ierr = PetscObjectTypeCompare((
PetscObject)pRt,MATIS,&Rtismatis);
2061 ISLocalToGlobalMapping cl2gP,cl2gRt;
2062 PetscInt rlsize,clsize,rsize,csize;
2064 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2065 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2066 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2067 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2068 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2069 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2070 ierr = MatCreate(A->
GetComm(),&B); PCHKERRQ(pA,ierr);
2071 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2072 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2073 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2074 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2075 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2076 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2079 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2085 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2086 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2088 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2096 ierr = PetscObjectReference((
PetscObject)lRt); PCHKERRQ(lRt,ierr);
2097 (*vmatsl2l)[0] = lRt;
2100 ierr = PetscContainerCreate(PetscObjectComm((
PetscObject)B),&c);
2102 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2103 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2107 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2111 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2112 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2113 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2114 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2120 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2126 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2127 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2129 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2144 #if defined(PETSC_HAVE_HYPRE)
2159 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2169 ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2171 ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2180 MFEM_ABORT(
"Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2189 ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2190 MFEM_VERIFY(M == N,
"Rectangular case unsupported");
2193 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2197 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2200 ierr = Convert_Array_IS(
GetComm(),
true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2203 ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2207 ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2209 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2214 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2218 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2221 ierr = Convert_Array_IS(
GetComm(),
true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2222 ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2223 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2233 ierr = PetscObjectDereference((
PetscObject)A); CCHKERRQ(comm,ierr);
2242 MFEM_VERIFY(A,
"no associated PETSc Mat object");
2245 ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2247 ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2249 ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2251 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2253 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2255 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2257 #if defined(PETSC_HAVE_HYPRE)
2258 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2272 Ae.
Mult(-1.0, X, 1.0, B);
2275 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2276 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2277 for (
int i = 0; i < ess_dof_list.
Size(); i++)
2279 int r = ess_dof_list[i];
2280 B(r) = array[r] * X(r);
2282 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2311 if (
cid == KSP_CLASSID)
2314 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2316 else if (
cid == SNES_CLASSID)
2319 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2322 else if (
cid == TS_CLASSID)
2325 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2329 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2336 if (
cid == KSP_CLASSID)
2339 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2341 else if (
cid == SNES_CLASSID)
2344 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2347 else if (
cid == TS_CLASSID)
2350 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2354 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2361 if (
cid == KSP_CLASSID)
2364 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2367 else if (
cid == SNES_CLASSID)
2370 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2371 max_iter,PETSC_DEFAULT);
2373 else if (
cid == TS_CLASSID)
2376 ierr = TSSetMaxSteps(ts,max_iter);
2380 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2388 typedef PetscErrorCode (*myPetscFunc)(
void**);
2389 PetscViewerAndFormat *vf = NULL;
2390 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(
obj));
2394 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2397 if (
cid == KSP_CLASSID)
2405 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2409 #if PETSC_VERSION_LT(3,15,0)
2410 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2412 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2414 (myPetscFunc)PetscViewerAndFormatDestroy);
2419 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2420 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2421 (myPetscFunc)PetscViewerAndFormatDestroy);
2425 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2426 PCHKERRQ(viewer,ierr);
2427 #if PETSC_VERSION_LT(3,15,0)
2428 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2430 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2432 (myPetscFunc)PetscViewerAndFormatDestroy);
2437 else if (
cid == SNES_CLASSID)
2443 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2447 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2448 (myPetscFunc)PetscViewerAndFormatDestroy);
2449 PCHKERRQ(snes,ierr);
2452 else if (
cid == TS_CLASSID)
2457 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2462 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2468 return obj ? PetscObjectComm(
obj) : MPI_COMM_NULL;
2473 __mfem_monitor_ctx *monctx;
2474 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2475 monctx->solver =
this;
2476 monctx->monitor =
ctx;
2477 if (
cid == KSP_CLASSID)
2479 ierr = KSPMonitorSet((
KSP)
obj,__mfem_ksp_monitor,monctx,
2480 __mfem_monitor_ctx_destroy);
2483 else if (
cid == SNES_CLASSID)
2485 ierr = SNESMonitorSet((
SNES)
obj,__mfem_snes_monitor,monctx,
2486 __mfem_monitor_ctx_destroy);
2489 else if (
cid == TS_CLASSID)
2491 ierr = TSMonitorSet((
TS)
obj,__mfem_ts_monitor,monctx,
2492 __mfem_monitor_ctx_destroy);
2497 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2504 if (
cid == SNES_CLASSID)
2506 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
private_ctx;
2509 else if (
cid == TS_CLASSID)
2511 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
private_ctx;
2516 MFEM_ABORT(
"Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2523 if (
cid == TS_CLASSID)
2528 ierr = TSGetSNES((
TS)
obj,&snes); PCHKERRQ(obj,ierr);
2529 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2530 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2532 else if (
cid == SNES_CLASSID)
2536 ierr = SNESGetKSP((
SNES)
obj,&ksp); PCHKERRQ(obj,ierr);
2537 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2539 else if (
cid == KSP_CLASSID)
2541 ierr = KSPGetPC((
KSP)
obj,&pc); PCHKERRQ(obj,ierr);
2543 else if (
cid == PC_CLASSID)
2549 MFEM_ABORT(
"No support for PetscPreconditionerFactory for this object");
2553 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2557 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2563 if (!customize) {
clcustom =
true; }
2566 if (
cid == PC_CLASSID)
2569 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2571 else if (
cid == KSP_CLASSID)
2574 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2576 else if (
cid == SNES_CLASSID)
2579 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2581 else if (
cid == TS_CLASSID)
2584 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2588 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2596 if (
cid == KSP_CLASSID)
2599 KSPConvergedReason reason;
2600 ierr = KSPGetConvergedReason(ksp,&reason);
2602 return reason > 0 ? 1 : 0;
2604 else if (
cid == SNES_CLASSID)
2607 SNESConvergedReason reason;
2608 ierr = SNESGetConvergedReason(snes,&reason);
2609 PCHKERRQ(snes,ierr);
2610 return reason > 0 ? 1 : 0;
2612 else if (
cid == TS_CLASSID)
2615 TSConvergedReason reason;
2616 ierr = TSGetConvergedReason(ts,&reason);
2618 return reason > 0 ? 1 : 0;
2622 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2629 if (
cid == KSP_CLASSID)
2633 ierr = KSPGetIterationNumber(ksp,&its);
2637 else if (
cid == SNES_CLASSID)
2641 ierr = SNESGetIterationNumber(snes,&its);
2642 PCHKERRQ(snes,ierr);
2645 else if (
cid == TS_CLASSID)
2649 ierr = TSGetStepNumber(ts,&its);
2655 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2662 if (
cid == KSP_CLASSID)
2666 ierr = KSPGetResidualNorm(ksp,&norm);
2670 if (
cid == SNES_CLASSID)
2674 ierr = SNESGetFunctionNorm(snes,&norm);
2675 PCHKERRQ(snes,ierr);
2680 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2681 return PETSC_MAX_REAL;
2688 if (
cid == SNES_CLASSID)
2690 __mfem_snes_ctx *snes_ctx;
2691 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2692 snes_ctx->op = NULL;
2693 snes_ctx->bchandler = NULL;
2694 snes_ctx->work = NULL;
2698 else if (
cid == TS_CLASSID)
2700 __mfem_ts_ctx *ts_ctx;
2701 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2703 ts_ctx->bchandler = NULL;
2704 ts_ctx->work = NULL;
2705 ts_ctx->work2 = NULL;
2706 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2707 ts_ctx->cached_ijacstate = -1;
2708 ts_ctx->cached_rhsjacstate = -1;
2709 ts_ctx->cached_splits_xstate = -1;
2710 ts_ctx->cached_splits_xdotstate = -1;
2721 if (
cid == SNES_CLASSID)
2723 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)
private_ctx;
2724 delete snes_ctx->work;
2726 else if (
cid == TS_CLASSID)
2728 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)
private_ctx;
2729 delete ts_ctx->work;
2730 delete ts_ctx->work2;
2732 ierr = PetscFree(
private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2739 : bctype(type_), setup(false), eval_t(0.0),
2740 eval_t_cached(std::numeric_limits<double>::min())
2748 ess_tdof_list.
Assign(list);
2754 if (setup) {
return; }
2758 this->
Eval(eval_t,eval_g);
2759 eval_t_cached = eval_t;
2770 (*this).SetUp(x.
Size());
2774 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2776 y[ess_tdof_list[i]] = 0.0;
2781 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2783 Eval(eval_t,eval_g);
2784 eval_t_cached = eval_t;
2786 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2788 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2795 (*this).SetUp(x.
Size());
2798 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2800 x[ess_tdof_list[i]] = 0.0;
2805 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2807 Eval(eval_t,eval_g);
2808 eval_t_cached = eval_t;
2810 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2812 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2819 (*this).SetUp(x.
Size());
2822 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2824 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2829 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2831 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2838 (*this).SetUp(x.
Size());
2839 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2841 x[ess_tdof_list[i]] = 0.0;
2847 (*this).SetUp(x.
Size());
2849 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2851 y[ess_tdof_list[i]] = 0.0;
2858 bool wrapin,
bool iter_mode)
2862 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2864 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2865 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2866 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2867 PCHKERRQ(ksp, ierr);
2871 const std::string &prefix,
bool iter_mode)
2877 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
2878 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2879 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2880 PCHKERRQ(ksp, ierr);
2885 const std::string &prefix,
bool iter_mode)
2891 ierr = PetscObjectGetClassId(obj, &
cid); PCHKERRQ(obj, ierr);
2892 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2893 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2894 PCHKERRQ(ksp, ierr);
2906 bool delete_pA =
false;
2924 MFEM_VERIFY(pA,
"Unsupported operation!");
2932 PetscInt nheight,nwidth,oheight,owidth;
2934 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2935 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2936 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2937 if (nheight != oheight || nwidth != owidth)
2941 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2947 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2956 if (delete_pA) {
delete pA; }
2971 bool delete_pA =
false;
2989 MFEM_VERIFY(pA,
"Unsupported operation!");
2992 bool delete_ppA =
false;
2995 if (oA == poA && !wrap)
3005 MFEM_VERIFY(ppA,
"Unsupported operation!");
3014 PetscInt nheight,nwidth,oheight,owidth;
3016 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3017 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3018 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3019 if (nheight != oheight || nwidth != owidth)
3023 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3030 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3039 if (delete_pA) {
delete pA; }
3040 if (delete_ppA) {
delete ppA; }
3050 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3053 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3054 ierr = PetscObjectReference((
PetscObject)A); PCHKERRQ(ksp,ierr);
3059 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3068 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3069 ierr = MakeShellPC(pc,precond,
false); PCHKERRQ(ksp,ierr);
3075 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3076 ierr = PetscObjectReference((
PetscObject)P); PCHKERRQ(ksp,ierr);
3077 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3078 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3079 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3083 void PetscLinearSolver::MultKernel(
const Vector &b,
Vector &x,
bool trans)
const
3090 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(
obj, ierr);
3098 PetscParMatrix A = PetscParMatrix(pA,
true);
3099 X =
new PetscParVector(A,
false,
false);
3107 ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3113 ierr = KSPSolveTranspose(ksp,
B->
x, X->x); PCHKERRQ(ksp,ierr);
3117 ierr = KSPSolve(ksp,
B->
x, X->x); PCHKERRQ(ksp,ierr);
3125 (*this).MultKernel(b,x,
false);
3130 (*this).MultKernel(b,x,
true);
3137 ierr = PetscObjectGetComm((
PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3138 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3148 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3150 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3158 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3160 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3164 const std::string &prefix,
bool iter_mode)
3168 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3170 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3176 const std::string &prefix)
3180 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3182 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3183 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3187 const string &prefix)
3193 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
3194 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3199 const string &prefix)
3203 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3205 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3206 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3212 bool delete_pA =
false;
3229 PetscInt nheight,nwidth,oheight,owidth;
3231 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3232 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3233 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3234 if (nheight != oheight || nwidth != owidth)
3238 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3244 ierr = PCSetOperators(pc,pA->
A,pA->
A); PCHKERRQ(
obj,ierr);
3253 if (delete_pA) {
delete pA; };
3256 void PetscPreconditioner::MultKernel(
const Vector &b,
Vector &x,
3260 "Iterative mode not supported for PetscPreconditioner");
3266 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(
obj, ierr);
3274 PetscParMatrix A(pA,
true);
3275 X =
new PetscParVector(A,
false,
false);
3286 ierr = PCApplyTranspose(pc,
B->
x, X->x); PCHKERRQ(pc, ierr);
3290 ierr = PCApply(pc,
B->
x, X->x); PCHKERRQ(pc, ierr);
3298 (*this).MultKernel(b,x,
false);
3303 (*this).MultKernel(b,x,
true);
3310 ierr = PetscObjectGetComm((
PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3311 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3322 void PetscBDDCSolver::BDDCSolverConstructor(
const PetscBDDCSolverParams &opts)
3324 MPI_Comm comm = PetscObjectComm(
obj);
3329 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3333 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&ismatis);
3335 MFEM_VERIFY(ismatis,
"PetscBDDCSolver needs the matrix in unassembled format");
3338 ParFiniteElementSpace *fespace = opts.fespace;
3339 if (opts.netflux && !fespace)
3341 MFEM_WARNING(
"Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3348 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3349 ierr = MatNullSpaceCreate(PetscObjectComm((
PetscObject)lA),PETSC_TRUE,0,NULL,
3350 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3351 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3352 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3356 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(
obj,ierr);
3359 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3360 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3369 int vdim = fespace->GetVDim();
3376 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3377 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3378 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3387 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3400 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3406 const FiniteElementCollection *fec = fespace->FEColl();
3407 bool h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3410 ParFiniteElementSpace *fespace_coords = fespace;
3412 sdim = fespace->GetParMesh()->SpaceDimension();
3415 fespace_coords =
new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3418 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3419 ParGridFunction gf_coords(fespace_coords);
3420 gf_coords.ProjectCoefficient(coeff_coords);
3421 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3423 hvec_coords->Size(),
false);
3431 Vec pvec_coords,lvec_coords;
3432 ISLocalToGlobalMapping l2g;
3438 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3439 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3440 CCHKERRQ(comm,ierr);
3441 ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3444 ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3445 CCHKERRQ(comm,ierr);
3446 ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3447 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3449 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3450 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3451 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3452 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3453 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3454 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3455 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3456 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3457 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3458 CCHKERRQ(comm,ierr);
3459 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3464 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3465 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3466 #if PETSC_VERSION_LT(3,15,0)
3467 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3468 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3470 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3471 CCHKERRQ(comm,ierr);
3472 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3473 CCHKERRQ(comm,ierr);
3475 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3476 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3478 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3479 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3480 CCHKERRQ(PETSC_COMM_SELF,ierr);
3481 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3482 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3483 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3484 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3488 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3497 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3498 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3502 for (
PetscInt d = 0; d < sdim; d++)
3504 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3507 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3512 for (
PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3514 if (fespace_coords != fespace)
3516 delete fespace_coords;
3523 IS dir = NULL, neu = NULL;
3526 Array<Mat> *l2l = NULL;
3527 if (opts.ess_dof_local || opts.nat_dof_local)
3532 MFEM_VERIFY(c,
"Local-to-local PETSc container not present");
3533 ierr = PetscContainerGetPointer(c,(
void**)&l2l); PCHKERRQ(c,ierr);
3540 PetscBool lpr = PETSC_FALSE,pr;
3541 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3542 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3544 MFEM_VERIFY(lpr == pr,
"ess_dof should be collectively set");
3546 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3547 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3549 MFEM_VERIFY(lpr == pr,
"nat_dof should be collectively set");
3552 ms[0] = -nf; ms[1] = nf;
3553 ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3555 MFEM_VERIFY(-Ms[0] == Ms[1],
3556 "number of fields should be the same across processes");
3563 PetscInt st = opts.ess_dof_local ? 0 : rst;
3564 if (!opts.ess_dof_local)
3567 ierr = Convert_Array_IS(comm,
true,opts.ess_dof,st,&dir);
3568 CCHKERRQ(comm,ierr);
3569 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3574 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3575 CCHKERRQ(comm,ierr);
3576 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3581 PetscInt st = opts.nat_dof_local ? 0 : rst;
3582 if (!opts.nat_dof_local)
3585 ierr = Convert_Array_IS(comm,
true,opts.nat_dof,st,&neu);
3586 CCHKERRQ(comm,ierr);
3587 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3592 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3593 CCHKERRQ(comm,ierr);
3594 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3601 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3605 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3607 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3612 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3614 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3623 const FiniteElementCollection *fec = fespace->FEColl();
3624 bool edgespace, rtspace, h1space;
3625 bool needint = opts.netflux;
3626 bool tracespace, rt_tracespace, edge_tracespace;
3628 PetscBool B_is_Trans = PETSC_FALSE;
3630 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3631 dim = pmesh->Dimension();
3632 vdim = fespace->GetVDim();
3633 h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3634 rtspace =
dynamic_cast<const RT_FECollection*
>(fec);
3635 edgespace =
dynamic_cast<const ND_FECollection*
>(fec);
3636 edge_tracespace =
dynamic_cast<const ND_Trace_FECollection*
>(fec);
3637 rt_tracespace =
dynamic_cast<const RT_Trace_FECollection*
>(fec);
3638 tracespace = edge_tracespace || rt_tracespace;
3641 if (fespace->GetNE() > 0)
3645 p = fespace->GetElementOrder(0);
3649 p = fespace->GetFaceOrder(0);
3650 if (dim == 2) { p++; }
3661 MFEM_WARNING(
"Tracespace case doesn't work for H(curl) and p=2,"
3662 " not using auxiliary quadrature");
3668 FiniteElementCollection *vfec;
3671 vfec =
new H1_Trace_FECollection(p,dim);
3675 vfec =
new H1_FECollection(p,dim);
3677 ParFiniteElementSpace *vfespace =
new ParFiniteElementSpace(pmesh,vfec);
3678 ParDiscreteLinearOperator *grad;
3679 grad =
new ParDiscreteLinearOperator(vfespace,fespace);
3682 grad->AddTraceFaceInterpolator(
new GradientInterpolator);
3686 grad->AddDomainInterpolator(
new GradientInterpolator);
3690 HypreParMatrix *hG = grad->ParallelAssemble();
3691 PetscParMatrix *G =
new PetscParMatrix(hG,
PETSC_MATAIJ);
3695 PetscBool conforming = PETSC_TRUE;
3696 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3697 ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3709 MFEM_WARNING(
"Tracespace case doesn't work for H(div), not using"
3710 " auxiliary quadrature");
3716 if (vdim != dim) { needint =
false; }
3719 PetscParMatrix *
B = NULL;
3725 FiniteElementCollection *auxcoll;
3726 if (tracespace) { auxcoll =
new RT_Trace_FECollection(p,dim); }
3731 auxcoll =
new H1_FECollection(std::max(p-1,1),dim);
3735 auxcoll =
new L2_FECollection(p,dim);
3738 ParFiniteElementSpace *pspace =
new ParFiniteElementSpace(pmesh,auxcoll);
3739 ParMixedBilinearForm *b =
new ParMixedBilinearForm(fespace,pspace);
3745 b->AddTraceFaceIntegrator(
new VectorFECurlIntegrator);
3749 b->AddDomainIntegrator(
new VectorFECurlIntegrator);
3756 b->AddTraceFaceIntegrator(
new VectorFEDivergenceIntegrator);
3760 b->AddDomainIntegrator(
new VectorFEDivergenceIntegrator);
3765 b->AddDomainIntegrator(
new VectorDivergenceIntegrator);
3770 b->ParallelAssemble(Bh);
3772 Bh.SetOperatorOwner(
false);
3777 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3778 if (!opts.ess_dof_local)
3780 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3784 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3786 B_is_Trans = PETSC_TRUE;
3795 ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3799 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3800 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3805 const std::string &prefix)
3808 BDDCSolverConstructor(opts);
3814 const std::string &prefix)
3817 BDDCSolverConstructor(opts);
3822 const string &prefix)
3826 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3829 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3833 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATNEST,&isnest);
3839 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3840 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3841 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3851 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3853 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3858 const std::string &prefix)
3862 MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
3863 MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);
3865 H2SolverConstructor(fes);
3871 #if defined(PETSC_HAVE_H2OPUS)
3883 VectorFunctionCoefficient ccoords(sdim, func_coords);
3885 ParGridFunction coords(fes);
3886 coords.ProjectCoefficient(ccoords);
3888 coords.ParallelProject(c);
3890 PCSetType(*
this,PCH2OPUS);
3893 PCSetFromOptions(*
this);
3895 MFEM_ABORT(
"Need PETSc configured with --download-h2opus");
3902 const std::string &prefix)
3907 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3909 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3910 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3917 const std::string &prefix)
3922 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3924 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3925 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3937 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj, ierr);
3938 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3950 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3951 PCHKERRQ(snes, ierr);
3952 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3953 PCHKERRQ(snes, ierr);
3956 (
void*)&op == fctx &&
3957 (
void*)&op == jctx);
3958 ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3960 PCHKERRQ(snes,ierr);
3963 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3974 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3975 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3984 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3986 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (
void *)snes_ctx);
3987 PCHKERRQ(snes, ierr);
3988 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
3990 PCHKERRQ(snes, ierr);
3992 ierr = MatDestroy(&dummy);
3993 PCHKERRQ(snes, ierr);
4005 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4006 snes_ctx->jacType = jacType;
4012 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4013 snes_ctx->objective = objfn;
4016 ierr = SNESSetObjective(snes, __mfem_snes_objective, (
void *)snes_ctx);
4017 PCHKERRQ(snes, ierr);
4024 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4025 snes_ctx->postcheck = post;
4029 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4030 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (
void *)snes_ctx);
4040 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4041 snes_ctx->update = update;
4044 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4051 bool b_nonempty = b.
Size();
4055 if (b_nonempty) { B->PlaceMemory(b.
GetMemory()); }
4063 ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4065 if (b_nonempty) { B->ResetMemory(); }
4075 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4077 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
4078 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4084 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4086 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4089 ierr = TSGetAdapt(ts,&tsad);
4091 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4099 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj,ierr);
4100 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4108 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4111 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4114 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4115 ts_ctx->cached_ijacstate = -1;
4116 ts_ctx->cached_rhsjacstate = -1;
4117 ts_ctx->cached_splits_xstate = -1;
4118 ts_ctx->cached_splits_xdotstate = -1;
4130 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (
void *)ts_ctx);
4132 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (
void *)ts_ctx);
4134 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4136 ierr = MatDestroy(&dummy);
4144 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4153 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (
void *)ts_ctx);
4155 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4158 ierr = MatDestroy(&dummy);
4167 ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4170 PetscBool use = PETSC_TRUE;
4171 ierr = PetscOptionsGetBool(NULL,NULL,
"-mfem_use_splitjac",&use,NULL);
4174 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4175 __mfem_ts_computesplits);
4180 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4188 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4189 ts_ctx->jacType = jacType;
4194 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4195 return ts_ctx->type;
4200 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4203 ts_ctx->type = type;
4206 ierr = TSSetProblemType(ts, TS_LINEAR);
4211 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4220 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4221 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4224 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4234 ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4238 ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4239 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4244 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4249 ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4258 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4259 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4260 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4261 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4273 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4274 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4275 ts_ctx->cached_ijacstate = -1;
4276 ts_ctx->cached_rhsjacstate = -1;
4277 ts_ctx->cached_splits_xstate = -1;
4278 ts_ctx->cached_splits_xdotstate = -1;
4281 ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4286 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4288 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4294 #include "petsc/private/petscimpl.h"
4295 #include "petsc/private/matimpl.h"
4301 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4303 PetscFunctionBeginUser;
4306 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4318 PetscFunctionReturn(0);
4321 static PetscErrorCode __mfem_ts_ifunction(
TS ts,
PetscReal t, Vec x, Vec xp,
4324 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4326 PetscFunctionBeginUser;
4334 if (ts_ctx->bchandler)
4339 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(xx.Size()); }
4340 if (!ts_ctx->work2) { ts_ctx->work2 =
new mfem::Vector(xx.Size()); }
4346 bchandler->
ZeroBC(yy,*txp);
4356 ff.UpdateVecFromFlags();
4357 PetscFunctionReturn(0);
4360 static PetscErrorCode __mfem_ts_rhsfunction(
TS ts,
PetscReal t, Vec x, Vec f,
4363 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4365 PetscFunctionBeginUser;
4366 if (ts_ctx->bchandler) { MFEM_ABORT(
"RHS evaluation with bc not implemented"); }
4375 ff.UpdateVecFromFlags();
4376 PetscFunctionReturn(0);
4379 static PetscErrorCode __mfem_ts_ijacobian(
TS ts,
PetscReal t, Vec x,
4383 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4388 PetscObjectState state;
4389 PetscErrorCode ierr;
4391 PetscFunctionBeginUser;
4395 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4396 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4402 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4404 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4405 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
4412 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4413 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4415 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4416 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4417 if (!ts_ctx->bchandler)
4424 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4431 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4435 if (!ts_ctx->bchandler) {
delete xx; }
4436 ts_ctx->cached_shift = shift;
4439 bool delete_pA =
false;
4443 pA->
GetType() != ts_ctx->jacType))
4451 if (ts_ctx->bchandler)
4459 PetscObjectState nonzerostate;
4460 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4465 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4466 if (delete_pA) {
delete pA; }
4478 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4480 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4483 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_ijacstate);
4489 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4490 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4491 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4492 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4493 PetscFunctionReturn(0);
4496 static PetscErrorCode __mfem_ts_computesplits(
TS ts,
PetscReal t,Vec x,Vec xp,
4500 __mfem_ts_ctx* ts_ctx;
4504 PetscObjectState state;
4505 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4506 PetscBool assembled;
4507 PetscErrorCode ierr;
4509 PetscFunctionBeginUser;
4513 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4514 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4516 if (Axp && Axp != Jxp)
4518 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4519 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4522 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(
void**)&ts_ctx); CHKERRQ(ierr);
4525 ierr = PetscObjectStateGet((
PetscObject)Jx,&state); CHKERRQ(ierr);
4527 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4528 ierr = PetscObjectStateGet((
PetscObject)Jxp,&state); CHKERRQ(ierr);
4530 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4531 if (!rx && !rxp) { PetscFunctionReturn(0); }
4538 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4539 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4541 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4542 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4543 if (!ts_ctx->bchandler)
4550 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4557 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4566 bool delete_mat =
false;
4570 pJx->
GetType() != ts_ctx->jacType))
4575 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4583 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4586 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4591 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4592 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4595 if (delete_mat) {
delete pJx; }
4599 if (ts_ctx->bchandler)
4616 pJxp->
GetType() != ts_ctx->jacType))
4621 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4624 &oJxp,ts_ctx->jacType);
4628 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4631 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4636 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4637 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4639 if (delete_mat) {
delete pJxp; }
4643 if (ts_ctx->bchandler)
4654 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4658 ierr = PetscObjectStateGet((
PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4660 ierr = PetscObjectStateGet((
PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4665 if (!ts_ctx->bchandler) {
delete xx; }
4666 PetscFunctionReturn(0);
4669 static PetscErrorCode __mfem_ts_rhsjacobian(
TS ts,
PetscReal t, Vec x,
4672 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4676 PetscObjectState state;
4677 PetscErrorCode ierr;
4679 PetscFunctionBeginUser;
4683 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4684 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4688 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4690 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4697 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4698 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4699 if (!ts_ctx->bchandler)
4706 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4713 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4717 if (!ts_ctx->bchandler) {
delete xx; }
4720 bool delete_pA =
false;
4724 pA->
GetType() != ts_ctx->jacType))
4732 if (ts_ctx->bchandler)
4740 PetscObjectState nonzerostate;
4741 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4746 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4747 if (delete_pA) {
delete pA; }
4759 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4761 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4766 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4768 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_rhsjacstate);
4774 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4775 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4776 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4777 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4778 PetscFunctionReturn(0);
4784 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4786 PetscFunctionBeginUser;
4789 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4798 PetscErrorCode ierr;
4800 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4807 PetscErrorCode ierr;
4809 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4814 PetscFunctionReturn(0);
4817 static PetscErrorCode __mfem_snes_jacobian(
SNES snes, Vec x,
Mat A,
Mat P,
4822 PetscErrorCode ierr;
4824 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4826 PetscFunctionBeginUser;
4827 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4828 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4829 if (!snes_ctx->bchandler)
4836 if (!snes_ctx->work) { snes_ctx->work =
new mfem::Vector(n); }
4839 xx = snes_ctx->work;
4845 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4846 if (!snes_ctx->bchandler) {
delete xx; }
4849 bool delete_pA =
false;
4853 pA->
GetType() != snes_ctx->jacType))
4861 if (snes_ctx->bchandler)
4869 PetscObjectState nonzerostate;
4870 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4874 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4875 if (delete_pA) {
delete pA; }
4887 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4889 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4894 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4895 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4901 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4902 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4903 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4904 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4905 PetscFunctionReturn(0);
4908 static PetscErrorCode __mfem_snes_function(
SNES snes, Vec x, Vec f,
void *ctx)
4910 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4912 PetscFunctionBeginUser;
4915 if (snes_ctx->bchandler)
4922 snes_ctx->op->Mult(*txx,ff);
4929 snes_ctx->op->Mult(xx,ff);
4931 ff.UpdateVecFromFlags();
4932 PetscFunctionReturn(0);
4935 static PetscErrorCode __mfem_snes_objective(
SNES snes, Vec x,
PetscReal *f,
4938 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4940 PetscFunctionBeginUser;
4941 if (!snes_ctx->objective)
4943 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing objective function");
4947 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4949 PetscFunctionReturn(0);
4952 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4953 PetscBool *cy,PetscBool *cw,
void* ctx)
4955 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4956 bool lcy =
false,lcw =
false;
4958 PetscFunctionBeginUser;
4962 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4963 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
4964 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
4965 PetscFunctionReturn(0);
4968 static PetscErrorCode __mfem_snes_update(
SNES snes,
PetscInt it)
4971 __mfem_snes_ctx* snes_ctx;
4973 PetscFunctionBeginUser;
4975 ierr = SNESGetFunction(snes,&F,NULL,(
void **)&snes_ctx); CHKERRQ(ierr);
4976 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4979 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4982 ierr = VecDestroy(&pX); CHKERRQ(ierr);
4986 if (!pX) SETERRQ(PetscObjectComm((
PetscObject)snes),PETSC_ERR_USER,
4987 "Missing previous solution");
4988 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4993 (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4995 ierr = VecCopy(X,pX); CHKERRQ(ierr);
4996 PetscFunctionReturn(0);
5002 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
5004 PetscFunctionBeginUser;
5007 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
5016 PetscErrorCode ierr;
5018 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5025 PetscErrorCode ierr;
5027 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5032 PetscFunctionReturn(0);
5035 static PetscErrorCode __mfem_mat_shell_apply(
Mat A, Vec x, Vec y)
5038 PetscErrorCode ierr;
5040 PetscFunctionBeginUser;
5041 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5042 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5046 yy.UpdateVecFromFlags();
5047 PetscFunctionReturn(0);
5050 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat A, Vec x, Vec y)
5053 PetscErrorCode ierr;
5056 PetscFunctionBeginUser;
5057 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5058 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5061 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5070 yy.UpdateVecFromFlags();
5071 PetscFunctionReturn(0);
5074 static PetscErrorCode __mfem_mat_shell_copy(
Mat A,
Mat B, MatStructure str)
5077 PetscErrorCode ierr;
5079 PetscFunctionBeginUser;
5080 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5081 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5082 ierr = MatShellSetContext(B,(
void *)op); CHKERRQ(ierr);
5083 PetscFunctionReturn(0);
5086 static PetscErrorCode __mfem_mat_shell_destroy(
Mat A)
5088 PetscFunctionBeginUser;
5089 PetscFunctionReturn(0);
5092 static PetscErrorCode __mfem_pc_shell_view(
PC pc, PetscViewer viewer)
5094 __mfem_pc_shell_ctx *
ctx;
5095 PetscErrorCode ierr;
5097 PetscFunctionBeginUser;
5098 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5102 ierr = PetscObjectTypeCompare((
PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5109 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5115 ierr = PetscViewerASCIIPrintf(viewer,
5116 "No information available on the mfem::Solver\n");
5120 if (isascii && ctx->factory)
5122 ierr = PetscViewerASCIIPrintf(viewer,
5123 "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5127 PetscFunctionReturn(0);
5130 static PetscErrorCode __mfem_pc_shell_apply(
PC pc, Vec x, Vec y)
5132 __mfem_pc_shell_ctx *
ctx;
5133 PetscErrorCode ierr;
5135 PetscFunctionBeginUser;
5138 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5141 ctx->op->Mult(xx,yy);
5142 yy.UpdateVecFromFlags();
5148 PetscFunctionReturn(0);
5151 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC pc, Vec x, Vec y)
5153 __mfem_pc_shell_ctx *
ctx;
5154 PetscErrorCode ierr;
5156 PetscFunctionBeginUser;
5159 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5162 ctx->op->MultTranspose(xx,yy);
5163 yy.UpdateVecFromFlags();
5169 PetscFunctionReturn(0);
5172 static PetscErrorCode __mfem_pc_shell_setup(
PC pc)
5174 __mfem_pc_shell_ctx *
ctx;
5176 PetscFunctionBeginUser;
5177 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5188 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5197 PetscFunctionReturn(0);
5200 static PetscErrorCode __mfem_pc_shell_destroy(
PC pc)
5202 __mfem_pc_shell_ctx *
ctx;
5203 PetscErrorCode ierr;
5205 PetscFunctionBeginUser;
5206 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5212 PetscFunctionReturn(0);
5215 static PetscErrorCode __mfem_array_container_destroy(
void *ptr)
5217 PetscErrorCode ierr;
5219 PetscFunctionBeginUser;
5220 ierr = PetscFree(ptr); CHKERRQ(ierr);
5221 PetscFunctionReturn(0);
5224 static PetscErrorCode __mfem_matarray_container_destroy(
void *ptr)
5227 PetscErrorCode ierr;
5229 PetscFunctionBeginUser;
5230 for (
int i=0; i<a->
Size(); i++)
5234 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5237 PetscFunctionReturn(0);
5240 static PetscErrorCode __mfem_monitor_ctx_destroy(
void **ctx)
5242 PetscErrorCode ierr;
5244 PetscFunctionBeginUser;
5245 ierr = PetscFree(*ctx); CHKERRQ(ierr);
5246 PetscFunctionReturn(0);
5251 PetscErrorCode MakeShellPC(
PC pc,
mfem::Solver &precond,
bool ownsop)
5253 PetscFunctionBeginUser;
5254 __mfem_pc_shell_ctx *ctx =
new __mfem_pc_shell_ctx;
5256 ctx->ownsop = ownsop;
5257 ctx->factory = NULL;
5263 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5265 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5266 ierr = PCShellSetName(pc,
"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5267 ierr = PCShellSetContext(pc,(
void *)ctx); CHKERRQ(ierr);
5268 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5269 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5271 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5272 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5273 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5274 PetscFunctionReturn(0);
5279 PetscErrorCode MakeShellPCWithFactory(
PC pc,
5282 PetscFunctionBeginUser;
5283 __mfem_pc_shell_ctx *ctx =
new __mfem_pc_shell_ctx;
5286 ctx->factory = factory;
5292 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5294 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5295 ierr = PCShellSetName(pc,factory->
GetName()); CHKERRQ(ierr);
5296 ierr = PCShellSetContext(pc,(
void *)ctx); CHKERRQ(ierr);
5297 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5298 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5300 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5301 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5302 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5303 PetscFunctionReturn(0);
5308 static PetscErrorCode Convert_Array_IS(MPI_Comm comm,
bool islist,
5313 const int *data = list ? list->
GetData() : NULL;
5314 PetscErrorCode ierr;
5316 PetscFunctionBeginUser;
5317 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5320 for (
PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5327 if (data[i]) { idxs[cum++] = i+st; }
5331 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5333 PetscFunctionReturn(0);
5339 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5347 PetscErrorCode ierr;
5349 PetscFunctionBeginUser;
5350 for (
int i = 0; i < pl2l.
Size(); i++)
5354 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(
const PetscInt**)&ii,
5355 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5356 MFEM_VERIFY(done,
"Unable to perform MatGetRowIJ on " << i <<
" l2l matrix");
5357 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5358 #if defined(PETSC_USE_64BIT_INDICES)
5359 int nnz = (int)ii[m];
5360 int *mii =
new int[m+1];
5361 int *mjj =
new int[nnz];
5362 for (
int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5363 for (
int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5368 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5370 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5371 MFEM_VERIFY(done,
"Unable to perform MatRestoreRowIJ on "
5372 << i <<
" l2l matrix");
5375 for (
int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5377 const int* vdata = mark->
GetData();
5378 int* sdata = sub_dof_marker.
GetData();
5379 int cumh = 0, cumw = 0;
5380 for (
int i = 0; i < l2l.Size(); i++)
5385 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5386 cumh += l2l[i]->Height();
5387 cumw += l2l[i]->Width();
5389 ierr = Convert_Array_IS(comm,
false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5390 for (
int i = 0; i < pl2l.
Size(); i++)
5394 PetscFunctionReturn(0);
5397 #if !defined(PETSC_HAVE_HYPRE)
5399 #if defined(HYPRE_MIXEDINT)
5400 #error "HYPRE_MIXEDINT not supported"
5403 #include "_hypre_parcsr_mv.h"
5404 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,
Mat* pA)
5406 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5407 hypre_CSRMatrix *hdiag,*hoffd;
5409 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5412 PetscErrorCode ierr;
5414 PetscFunctionBeginUser;
5415 hdiag = hypre_ParCSRMatrixDiag(hA);
5416 hoffd = hypre_ParCSRMatrixOffd(hA);
5417 m = hypre_CSRMatrixNumRows(hdiag);
5418 n = hypre_CSRMatrixNumCols(hdiag);
5419 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5420 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5421 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5422 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5423 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5424 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*
sizeof(
PetscInt));
5426 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*
sizeof(
PetscInt));
5428 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*
sizeof(
PetscScalar));
5435 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5439 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5444 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5445 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5446 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5447 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*
sizeof(
PetscInt));
5449 offdj = hypre_CSRMatrixJ(hoffd);
5450 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5451 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5452 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*
sizeof(
PetscScalar));
5459 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5463 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5464 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5470 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5476 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5477 const char *names[6] = {
"_mfem_csr_dii",
5488 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5489 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5490 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5494 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5496 PetscFunctionReturn(0);
5499 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,
Mat* pA)
5502 ISLocalToGlobalMapping rl2g,cl2g;
5504 hypre_CSRMatrix *hdiag,*hoffd;
5505 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5507 const char *names[2] = {
"_mfem_csr_aux",
5511 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5513 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5514 PetscErrorCode ierr;
5516 PetscFunctionBeginUser;
5518 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5519 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5520 hdiag = hypre_ParCSRMatrixDiag(hA);
5521 hoffd = hypre_ParCSRMatrixOffd(hA);
5522 dr = hypre_CSRMatrixNumRows(hdiag);
5523 dc = hypre_CSRMatrixNumCols(hdiag);
5524 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5525 hdi = hypre_CSRMatrixI(hdiag);
5526 hdj = hypre_CSRMatrixJ(hdiag);
5527 hdd = hypre_CSRMatrixData(hdiag);
5528 oc = hypre_CSRMatrixNumCols(hoffd);
5529 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5530 hoi = hypre_CSRMatrixI(hoffd);
5531 hoj = hypre_CSRMatrixJ(hoffd);
5532 hod = hypre_CSRMatrixData(hoffd);
5535 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5536 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5537 ierr = ISDestroy(&is); CHKERRQ(ierr);
5538 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5539 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5540 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5541 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5542 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5543 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5544 ierr = ISDestroy(&is); CHKERRQ(ierr);
5547 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5548 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5549 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5550 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5551 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5552 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5555 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5556 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5560 *ii = *(hdi++) + *(hoi++);
5561 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5565 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5566 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5567 *(++ii) = *(hdi++) + *(hoi++);
5568 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5570 for (; cum<dr; cum++) { *(++ii) = nnz; }
5574 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5582 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5583 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5584 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5588 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5590 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5591 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5592 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5593 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5594 PetscFunctionReturn(0);
5598 #include <petsc/private/matimpl.h>
5600 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm,
PetscInt m,
5604 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5605 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5606 ierr = PetscObjectChangeTypeName((
PetscObject)*A,
"mfemdummy"); CHKERRQ(ierr);
5607 (*A)->preallocated = PETSC_TRUE;
5608 ierr = MatSetUp(*A); CHKERRQ(ierr);
5609 PetscFunctionReturn(0);
5612 #include <petsc/private/vecimpl.h>
5614 #if defined(PETSC_HAVE_DEVICE)
5615 static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5619 PetscFunctionReturn(0);
5623 static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5626 #if defined(PETSC_HAVE_DEVICE)
5627 *flg = v->boundtocpu;
5631 PetscFunctionReturn(0);
5634 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject o)
5636 PetscErrorCode ierr;
5639 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5640 PetscFunctionReturn(0);
5643 #endif // MFEM_USE_PETSC
5644 #endif // MFEM_USE_MPI
Ordering::Type GetOrdering() const
Return the ordering method.
int Size() const
Return the logical size of the array.
void SetPreconditioner(Solver &precond)
void SetPrintLevel(int plev)
static int GetId()
Get the device id of the configured device.
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
PetscParVector * B
Right-hand side and solution vector.
void CreatePrivateContext()
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
void trans(const Vector &u, Vector &x)
MPI_Comm GetComm() const
MPI communicator.
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
bool isHomogeneous() const
True if type is HOMOGENEOUS.
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Abstract class for PETSc's preconditioners.
PetscParMatrix & operator+=(const PetscParMatrix &B)
ParMesh * GetParMesh() const
petsc::Vec x
The actual PETSc object.
virtual void SetOperator(const Operator &op)
bool clcustom
Boolean to handle SetFromOptions calls.
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Device memory; using CUDA or HIP *Malloc and *Free.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
petsc::Mat A
The actual PETSc object.
Wrapper for PETSc's matrix class.
PetscParVector & operator+=(const PetscParVector &y)
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Abstract class for PETSc's linear solvers.
void SetSize(int s)
Resize the vector to size s.
Abstract class for PETSc's solvers.
PetscODESolver::Type GetType() const
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
PetscBCHandler(Type type_=ZERO)
void Delete()
Delete the owned pointers and reset the Memory object.
Base abstract class for first order time dependent operators.
PetscInt GetNumCols() const
Returns the local number of columns.
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
void SetType(PetscODESolver::Type)
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Pointer to an Operator of a specified type.
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
bool DeviceRequested() const
T * GetData()
Returns the data.
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
int Size() const
Returns the size of the vector.
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
void Init()
Initialize with defaults. Does not initialize inherited members.
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
PetscClassId cid
The class id of the actual PETSc object.
ID for class PetscParMatrix, MATHYPRE format.
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Abstract parallel finite element space.
void SetRelTol(double tol)
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Randomize(PetscInt seed=0)
Set random values.
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
void SetMaxIter(int max_iter)
PetscParVector & operator*=(PetscScalar d)
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
void SortColumnIndices()
Sort the column indices corresponding to each row.
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
int Capacity() const
Return the size of the allocated memory.
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
void Customize(bool customize=true) const
Customize object with options set.
PetscInt M() const
Returns the global number of rows.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
virtual ~PetscLinearSolver()
PetscInt GetNumRows() const
Returns the local number of rows.
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
const double * GetHostPointer() const
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
ID for the base class Operator, i.e. any type.
double f(const Vector &xvec)
void write(std::ostream &os, T value)
Write 'value' to stream.
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc's solvers.
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
struct s_NavierContext ctx
void SetJacobianType(Operator::Type type)
int to_int(const std::string &str)
Convert a string to an int.
const double * HostReadData() const
Auxiliary class for BDDC customization.
virtual ~PetscPreconditioner()
ID for class PetscParMatrix, unspecified format.
MPI_Comm GetComm() const
Get the associated MPI communicator.
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
void LoseData()
NULL-ifies the data.
void SetFlagsFromMask_() const
bool DeviceIsValid() const
Return true if device pointer is valid.
const int * HostReadJ() const
PetscParMatrix & operator=(const PetscParMatrix &B)
bool ReadRequested() const
PetscInt GetColStart() const
Returns the global index of the first local column.
virtual ~PetscParVector()
Calls PETSc's destroy function.
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
PetscBCHandler * bchandler
Handler for boundary conditions.
PetscParMatrix & operator-=(const PetscParMatrix &B)
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
void Zero(Vector &x)
Replace boundary dofs with 0.
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
MPI_Comm GetComm() const
Get the associated MPI communicator.
T read(std::istream &is)
Read a value from the stream and return it.
bool WriteRequested() const
void Shift(double s)
Shift diagonal by a constant.
ID for class PetscParMatrix, MATNEST format.
int GetVDim() const
Returns vector dimension.
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
void SetTime(double t)
Sets the current time.
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
ID for class PetscParMatrix, MATSHELL format.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Biwise-OR of all CUDA backends.
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B...
int SpaceDimension() const
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Wrapper for hypre's parallel vector class.
HYPRE_BigInt * GetTrueDofOffsets() const
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Type
Enumeration defining IDs for some classes derived from Operator.
double p(const Vector &x, double t)
PetscInt NNZ() const
Returns the number of nonzeros.
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
MemoryType
Memory types supported by MFEM.
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
PetscInt N() const
Returns the global number of columns.
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
void SetJacobianType(Operator::Type type)
bool operatorset
Boolean to handle SetOperator calls.
PetscParVector * X
Auxiliary vectors for typecasting.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
bool IsAliasForSync() const
int height
Dimension of the output / number of rows in the matrix.
PetscParVector & operator-=(const PetscParVector &y)
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true, or the mfem::Device's HostMemoryClass, otherwise.
virtual ~PetscNonlinearSolver()
virtual ~PetscODESolver()
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
bool Empty() const
Return true if the Memory object is empty, see Reset().
ID for class PetscParMatrix, MATAIJ format.
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
MPI_Comm GetComm() const
Get the associated MPI communicator.
PetscInt GlobalSize() const
Returns the global number of rows.
PetscInt GetRowStart() const
Returns the global index of the first local row.
struct _p_PetscObject * PetscObject
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
void ResetMemory()
Completes the operation started with PlaceMemory.
void SetAbsTol(double tol)
Helper class for handling essential boundary conditions.
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
PetscParVector & operator=(PetscScalar d)
Set constant values.
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
int NumRowBlocks() const
Return the number of row blocks.
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
const FiniteElementCollection * FEColl() const
const int * HostReadI() const
Identity Operator I: x -> x.
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
bool UseDevice() const
Read the internal device flag.
void FreePrivateContext()
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Vector * GlobalVector() const
Returns the global vector in each processor.
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Wrapper for hypre's ParCSR matrix class.
A class to handle Block systems in a matrix-free implementation.
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
ID for class PetscParMatrix, MATIS format.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
int width
Dimension of the input / number of columns in the matrix.
void * private_ctx
Private context for solver.
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
const double * GetDevicePointer() const
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
double f(const Vector &p)