14 #include "../config/config.hpp"
20 #include "../fem/fem.hpp"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
28 #if PETSC_VERSION_LT(3,11,0)
29 #define VecLockReadPush VecLockPush
30 #define VecLockReadPop VecLockPop
32 #if PETSC_VERSION_LT(3,12,0)
33 #define VecGetArrayWrite VecGetArray
34 #define VecRestoreArrayWrite VecRestoreArray
35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
60 static PetscErrorCode __mfem_snes_jacobian(
SNES,
Vec,
Mat,
Mat,
void*);
61 static PetscErrorCode __mfem_snes_function(
SNES,
Vec,
Vec,
void*);
64 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,
Vec,
Vec,
Vec,
65 PetscBool*,PetscBool*,
void*);
67 static PetscErrorCode __mfem_pc_shell_apply(
PC,
Vec,
Vec);
68 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC,
Vec,
Vec);
69 static PetscErrorCode __mfem_pc_shell_setup(
PC);
70 static PetscErrorCode __mfem_pc_shell_destroy(
PC);
71 static PetscErrorCode __mfem_pc_shell_view(
PC,PetscViewer);
72 static PetscErrorCode __mfem_mat_shell_apply(
Mat,
Vec,
Vec);
73 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat,
Vec,
Vec);
74 static PetscErrorCode __mfem_mat_shell_destroy(
Mat);
75 static PetscErrorCode __mfem_mat_shell_copy(
Mat,
Mat,MatStructure);
76 static PetscErrorCode __mfem_array_container_destroy(
void*);
77 static PetscErrorCode __mfem_matarray_container_destroy(
void*);
78 static PetscErrorCode __mfem_monitor_ctx_destroy(
void**);
81 static PetscErrorCode Convert_Array_IS(MPI_Comm,
bool,
const mfem::Array<int>*,
86 static PetscErrorCode MakeShellPCWithFactory(
PC,
92 #if !defined(PETSC_HAVE_HYPRE)
93 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,
Mat*);
94 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,
Mat*);
97 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
98 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
105 #if defined(PETSC_HAVE_DEVICE)
106 static PetscErrorCode __mfem_VecSetOffloadMask(
Vec,PetscOffloadMask);
108 static PetscErrorCode __mfem_VecBoundToCPU(
Vec,PetscBool*);
109 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject);
118 unsigned long int numprec;
119 } __mfem_pc_shell_ctx;
130 void (*postcheck)(
mfem::Operator *op,
const mfem::Vector&, mfem::Vector&,
131 mfem::Vector&,
bool&,
bool&);
135 const mfem::Vector&,
const mfem::Vector&,
136 const mfem::Vector&,
const mfem::Vector&);
148 PetscObjectState cached_ijacstate;
149 PetscObjectState cached_rhsjacstate;
150 PetscObjectState cached_splits_xstate;
151 PetscObjectState cached_splits_xdotstate;
158 } __mfem_monitor_ctx;
161 static PetscErrorCode ierr;
184 ierr = PetscOptionsSetValue(NULL,
"-cuda_device",
186 MFEM_VERIFY(!ierr,
"Unable to set initial option value to PETSc");
188 ierr = PetscInitialize(argc,argv,rc_file,help);
189 MFEM_VERIFY(!ierr,
"Unable to initialize PETSc");
194 ierr = PetscFinalize();
195 MFEM_VERIFY(!ierr,
"Unable to finalize PETSc");
198 const double* PetscMemory::GetHostPointer()
const
202 const double *v =
mfem::Read(*
this,Capacity(),
false);
207 const double* PetscMemory::GetDevicePointer()
const
211 const double *v =
mfem::Read(*
this,Capacity(),
true);
218 void PetscParVector::SetDataAndSize_()
223 MFEM_VERIFY(x,
"Missing Vec");
224 ierr = VecSetUp(x); PCHKERRQ(x,ierr);
225 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
226 MFEM_VERIFY(n >= 0,
"Invalid local size");
228 #if defined(PETSC_HAVE_DEVICE)
229 PetscOffloadMask omask;
232 ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
233 if (omask != PETSC_OFFLOAD_BOTH)
235 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
238 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
239 #if defined(PETSC_HAVE_DEVICE)
240 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
241 ""); PCHKERRQ(x,ierr);
244 if (omask != PETSC_OFFLOAD_BOTH)
246 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
249 ierr = VecCUDAGetArrayRead(x,(
const PetscScalar**)&darray); PCHKERRQ(x,ierr);
250 pdata.Wrap(array,darray,size,MemoryType::HOST,
false);
251 ierr = VecCUDARestoreArrayRead(x,(
const PetscScalar**)&darray);
257 pdata.Wrap(array,size,MemoryType::HOST,
false);
259 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
261 #if defined(PETSC_HAVE_DEVICE)
262 ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
264 data.MakeAlias(pdata,0,size);
268 void PetscParVector::SetFlagsFromMask_()
const
270 MFEM_VERIFY(x,
"Missing Vec");
271 #if defined(_USE_DEVICE)
272 PetscOffloadMask mask;
274 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
275 ""); PCHKERRQ(x,ierr);
276 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
281 case PETSC_OFFLOAD_CPU:
282 pdata.SetHostValid();
283 pdata.SetDeviceInvalid();
285 case PETSC_OFFLOAD_GPU:
286 pdata.SetHostInvalid();
287 pdata.SetDeviceValid();
289 case PETSC_OFFLOAD_BOTH:
290 pdata.SetHostValid();
291 pdata.SetDeviceValid();
294 MFEM_ABORT(
"Unhandled case " << mask);
301 void PetscParVector::UpdateVecFromFlags()
303 MFEM_VERIFY(x,
"Missing Vec");
304 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
305 #if defined(_USE_DEVICE)
307 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
308 ""); PCHKERRQ(x,ierr);
311 bool dv = pdata.DeviceIsValid();
312 bool hv = pdata.HostIsValid();
313 PetscOffloadMask mask;
314 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
315 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
316 else { mask = PETSC_OFFLOAD_CPU; }
317 ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
324 ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
325 pdata.CopyToHost(v,size);
326 ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
330 void PetscParVector::SetVecType_()
333 MFEM_VERIFY(x,
"Missing Vec");
334 ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
335 #if defined(_USE_DEVICE)
336 switch (Device::GetDeviceMemoryType())
338 case MemoryType::DEVICE:
339 case MemoryType::MANAGED:
340 ierr = VecSetType(x,VECCUDA); PCHKERRQ(x,ierr);
343 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
349 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
357 MFEM_VERIFY(x,
"Missing Vec");
358 #if defined(PETSC_HAVE_DEVICE)
360 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
361 ""); PCHKERRQ(x,ierr);
362 if (on_dev && iscuda)
364 ierr = VecCUDAGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
365 ierr = VecCUDARestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
370 ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
371 ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
385 MFEM_VERIFY(x,
"Missing Vec");
386 #if defined(PETSC_HAVE_DEVICE)
388 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
389 ""); PCHKERRQ(x,ierr);
390 if (on_dev && iscuda)
392 ierr = VecCUDAGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
393 ierr = VecCUDARestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
398 ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
399 ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
401 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
414 MFEM_VERIFY(x,
"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE)
417 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
418 ""); PCHKERRQ(x,ierr);
419 if (on_dev && iscuda)
421 ierr = VecCUDAGetArray(x,&dummy); PCHKERRQ(x,ierr);
422 ierr = VecCUDARestoreArray(x,&dummy); PCHKERRQ(x,ierr);
427 ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
428 ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
430 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
440 void PetscParVector::UseDevice(
bool dev)
const
442 MFEM_VERIFY(x,
"Missing Vec");
443 #if defined(PETSC_HAVE_DEVICE)
444 ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
448 bool PetscParVector::UseDevice()
const
451 MFEM_VERIFY(x,
"Missing Vec");
452 ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
453 return flg ?
false :
true;
459 ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
463 PetscParVector::PetscParVector(MPI_Comm comm,
const Vector &x_,
467 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
468 ierr = VecSetSizes(
x,n,PETSC_DECIDE); PCHKERRQ(
x,ierr);
477 #if defined(PETSC_HAVE_DEVICE)
479 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
480 ""); PCHKERRQ(x,ierr);
484 ierr = VecCUDAGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
485 rest = VecCUDARestoreArrayWrite;
491 ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
492 rest = VecRestoreArrayWrite;
495 ierr = (*rest)(
x,&array); PCHKERRQ(x,ierr);
503 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
507 MPI_Comm_rank(comm, &myid);
508 ierr = VecSetSizes(
x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(
x,ierr);
512 ierr = VecSetSizes(
x,PETSC_DECIDE,glob_size); PCHKERRQ(
x,ierr);
521 ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
528 MFEM_VERIFY(col,
"Missing distribution");
530 MPI_Comm_rank(comm, &myid);
531 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
532 &
x); CCHKERRQ(comm,ierr)
539 ierr = VecDuplicate(y.
x,&
x); PCHKERRQ(
x,ierr);
544 bool transpose,
bool allocate) :
Vector()
548 ierr = VecCreate(comm,&
x);
550 ierr = VecSetSizes(
x,loc,PETSC_DECIDE);
567 bool transpose,
bool allocate) :
Vector()
572 ierr = MatCreateVecs(pA,&
x,NULL); PCHKERRQ(pA,ierr);
576 ierr = MatCreateVecs(pA,NULL,&
x); PCHKERRQ(pA,ierr);
582 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
597 ierr = PetscObjectReference((
PetscObject)y); PCHKERRQ(y,ierr);
606 MPI_Comm comm = pfes->
GetComm();
607 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
609 PetscMPIInt myid = 0;
610 if (!HYPRE_AssumedPartitionCheck())
612 MPI_Comm_rank(comm,&myid);
614 ierr = VecSetSizes(
x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
632 ierr = VecScatterCreateToAll(
x,&scctx,&vout); PCHKERRQ(
x,ierr);
633 ierr = VecScatterBegin(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
635 ierr = VecScatterEnd(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
637 ierr = VecScatterDestroy(&scctx); PCHKERRQ(
x,ierr);
638 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(
x,ierr);
639 ierr = VecGetLocalSize(vout,&size); PCHKERRQ(
x,ierr);
642 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(
x,ierr);
643 ierr = VecDestroy(&vout); PCHKERRQ(
x,ierr);
652 ierr = VecSet(
x,d); PCHKERRQ(
x,ierr);
660 MFEM_VERIFY(idx.
Size() == vals.
Size(),
661 "Size mismatch between indices and values");
665 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
666 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
674 MFEM_VERIFY(idx.
Size() == vals.
Size(),
675 "Size mismatch between indices and values");
679 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
680 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
687 ierr = VecCopy(y.
x,
x); PCHKERRQ(
x,ierr);
694 ierr = VecAXPY(
x,1.0,y.
x); PCHKERRQ(
x,ierr);
701 ierr = VecAXPY(
x,-1.0,y.
x); PCHKERRQ(
x,ierr);
708 ierr = VecScale(
x,s); PCHKERRQ(
x,ierr);
715 ierr = VecShift(
x,s); PCHKERRQ(
x,ierr);
722 ierr = VecPlaceArray(
x,temp_data); PCHKERRQ(
x,ierr);
727 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
734 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
736 "Memory size " << mem.
Capacity() <<
" != " << n <<
" vector size!");
737 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
738 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
739 #if defined(_USE_DEVICE)
741 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
742 ""); PCHKERRQ(x,ierr);
749 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
754 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
763 #if defined(PETSC_HAVE_DEVICE)
764 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
766 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
768 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
776 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
778 "Memory size " << mem.
Capacity() <<
" != " << n <<
" vector size!");
779 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
780 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
781 #if defined(_USE_DEVICE)
783 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
784 ""); PCHKERRQ(x,ierr);
790 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
795 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
804 #if defined(PETSC_HAVE_DEVICE)
805 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
807 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
810 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
811 ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
817 MFEM_VERIFY(!
pdata.
Empty(),
"Vector data is empty");
829 #if defined(PETSC_HAVE_DEVICE)
830 PetscOffloadMask mask;
831 ierr = VecGetOffloadMask(
x,&mask); PCHKERRQ(
x,ierr);
832 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
833 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
836 ierr = VecGetArrayRead(
x,&v); PCHKERRQ(
x,ierr);
838 ierr = VecRestoreArrayRead(
x,&v); PCHKERRQ(
x,ierr);
843 if (read && !write) { ierr = VecLockReadPop(
x); PCHKERRQ(
x,ierr); }
846 #if defined(PETSC_HAVE_DEVICE)
847 ierr = VecCUDAResetArray(
x); PCHKERRQ(
x,ierr);
849 MFEM_VERIFY(
false,
"This should not happen");
854 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
860 PetscRandom rctx = NULL;
864 ierr = PetscRandomCreate(PetscObjectComm((
PetscObject)
x),&rctx);
866 ierr = PetscRandomSetSeed(rctx,(
unsigned long)seed); PCHKERRQ(x,ierr);
867 ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
869 ierr = VecSetRandom(
x,rctx); PCHKERRQ(
x,ierr);
870 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(
x,ierr);
881 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)
x),fname,
882 FILE_MODE_WRITE,&view);
886 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)
x),fname,&view);
889 ierr = VecView(
x,view); PCHKERRQ(
x,ierr);
890 ierr = PetscViewerDestroy(&view); PCHKERRQ(
x,ierr);
894 ierr = VecView(
x,NULL); PCHKERRQ(
x,ierr);
903 ierr = MatGetOwnershipRange(
A,&N,NULL); PCHKERRQ(
A,ierr);
910 ierr = MatGetOwnershipRangeColumn(
A,&N,NULL); PCHKERRQ(
A,ierr);
917 ierr = MatGetLocalSize(
A,&N,NULL); PCHKERRQ(
A,ierr);
924 ierr = MatGetLocalSize(
A,NULL,&N); PCHKERRQ(
A,ierr);
931 ierr = MatGetSize(
A,&N,NULL); PCHKERRQ(
A,ierr);
938 ierr = MatGetSize(
A,NULL,&N); PCHKERRQ(
A,ierr);
945 ierr = MatGetInfo(
A,MAT_GLOBAL_SUM,&info); PCHKERRQ(
A,ierr);
970 rows.
GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
972 cols.
GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
973 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&
A); PCHKERRQ(B,ierr);
974 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
975 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1019 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1033 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1046 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1047 if (
X) {
delete X; }
1048 if (
Y) {
delete Y; }
1053 #if defined(PETSC_HAVE_HYPRE)
1054 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&
A);
1056 ierr = MatConvert_hypreParCSR_AIJ(B,&
A); CCHKERRQ(B.
GetComm(),ierr);
1067 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1068 if (
X) {
delete X; }
1069 if (
Y) {
delete Y; }
1074 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1082 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1086 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1087 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1088 ierr = MatAXPY(
A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1097 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1098 ierr = MatScale(
A,-1.0); PCHKERRQ(
A,ierr);
1102 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1103 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1104 ierr = MatAXPY(
A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1109 void PetscParMatrix::
1110 BlockDiagonalConstructor(MPI_Comm comm,
1115 PetscInt lrsize,lcsize,rstart,cstart;
1116 PetscMPIInt myid = 0,commsize;
1118 ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
1119 if (!HYPRE_AssumedPartitionCheck())
1121 ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
1123 lrsize = row_starts[myid+1]-row_starts[myid];
1124 rstart = row_starts[myid];
1125 lcsize = col_starts[myid+1]-col_starts[myid];
1126 cstart = col_starts[myid];
1131 ierr = ISCreateStride(comm,diag->
Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1132 ISLocalToGlobalMapping rl2g,cl2g;
1133 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1134 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1135 if (row_starts != col_starts)
1137 ierr = ISCreateStride(comm,diag->
Width(),cstart,1,&is);
1138 CCHKERRQ(comm,ierr);
1139 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1140 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1144 ierr = PetscObjectReference((
PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1149 ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1150 ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1152 ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1153 ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1154 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1155 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1160 ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1161 const
int *II = diag->HostReadI();
1162 const
int *JJ = diag->HostReadJ();
1163 #if defined(PETSC_USE_64BIT_INDICES)
1166 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1167 for (
int i = 0; i < m; i++) { pII[i] = II[i]; }
1168 for (
int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1169 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1171 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1173 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1186 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1187 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1188 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1189 if (
sizeof(
PetscInt) ==
sizeof(
int))
1192 CCHKERRQ(PETSC_COMM_SELF,ierr);
1194 CCHKERRQ(PETSC_COMM_SELF,ierr);
1200 for (
int i = 0; i < m; i++) { dii[i] = iii[i]; }
1201 for (
int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1204 CCHKERRQ(PETSC_COMM_SELF,ierr);
1205 ierr = PetscCalloc1(m,&oii);
1206 CCHKERRQ(PETSC_COMM_SELF,ierr);
1209 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1211 dii,djj,da,oii,NULL,NULL,&A);
1212 CCHKERRQ(comm,ierr);
1216 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1217 CCHKERRQ(comm,ierr);
1220 void *ptrs[4] = {dii,djj,da,oii};
1221 const char *names[4] = {
"_mfem_csr_dii",
1230 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1231 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1232 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1233 CCHKERRQ(comm,ierr);
1235 CCHKERRQ(comm,ierr);
1236 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1241 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1242 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1249 return A ? PetscObjectComm((
PetscObject)A) : MPI_COMM_NULL;
1262 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1264 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1265 ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1266 ierr = MatShellSetContext(*A,(
void *)op); PCHKERRQ(A,ierr);
1267 ierr = MatShellSetOperation(*A,MATOP_MULT,
1268 (
void (*)())__mfem_mat_shell_apply);
1270 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1271 (
void (*)())__mfem_mat_shell_apply_transpose);
1273 ierr = MatShellSetOperation(*A,MATOP_COPY,
1274 (
void (*)())__mfem_mat_shell_copy);
1276 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1277 (
void (*)())__mfem_mat_shell_destroy);
1278 #if defined(_USE_DEVICE)
1282 ierr = MatShellSetVecType(*A,VECCUDA); PCHKERRQ(A,ierr);
1283 ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1287 ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1291 ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1316 PetscBool avoidmatconvert = PETSC_FALSE;
1319 ierr = PetscObjectTypeCompareAny((
PetscObject)(pA->
A),&avoidmatconvert,MATMFFD,
1321 CCHKERRQ(comm,ierr);
1323 if (pA && !avoidmatconvert)
1327 #if PETSC_VERSION_LT(3,10,0)
1331 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEMAT,&istrans);
1342 #if PETSC_VERSION_LT(3,10,0)
1343 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATIS,&ismatis);
1349 ierr = MatTransposeGetMat(pA->
A,&At); CCHKERRQ(pA->
GetComm(),ierr);
1350 #if PETSC_VERSION_LT(3,10,0)
1351 ierr = PetscObjectTypeCompare((
PetscObject)(At),MATIS,&ismatis);
1359 #if PETSC_VERSION_LT(3,10,0)
1366 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1367 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1368 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1372 ierr = MatISGetMPIXAIJ(pA->
A,MAT_INITIAL_MATRIX,A);
1373 PCHKERRQ(pA->
A,ierr);
1380 ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1386 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1387 PCHKERRQ(pA->
A,ierr);
1388 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1389 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1393 ierr = MatConvert(pA->
A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1394 PCHKERRQ(pA->
A,ierr);
1403 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1404 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1405 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1409 ierr = MatConvert(pA->
A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->
A,ierr);
1414 #if defined(PETSC_HAVE_HYPRE)
1418 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1419 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1420 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1424 ierr = MatConvert(pA->
A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->
A,ierr);
1427 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1436 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1443 #if defined(PETSC_HAVE_HYPRE)
1444 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1445 PETSC_USE_POINTER,A);
1447 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1453 #if defined(PETSC_HAVE_HYPRE)
1454 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1455 PETSC_USE_POINTER,A);
1457 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1463 #if defined(PETSC_HAVE_HYPRE)
1464 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1465 PETSC_USE_POINTER,A);
1468 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1477 MFEM_ABORT(
"Conversion from HypreParCSR to operator type = " << tid <<
1478 " is not implemented");
1483 Mat *mats,*matsl2l = NULL;
1488 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1491 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1493 for (i=0; i<nr; i++)
1495 PetscBool needl2l = PETSC_TRUE;
1497 for (j=0; j<nc; j++)
1505 ierr = PetscObjectQuery((
PetscObject)mats[i*nc+j],
"_MatIS_PtAP_l2l",
1507 PCHKERRQ(mats[i*nc+j],ierr);
1515 ierr = PetscContainerGetPointer(c,(
void**)&l2l);
1517 MFEM_VERIFY(l2l->
Size() == 1,
"Unexpected size "
1518 << l2l->
Size() <<
" for block row " << i );
1519 ierr = PetscObjectReference((
PetscObject)(*l2l)[0]);
1521 matsl2l[i] = (*l2l)[0];
1522 needl2l = PETSC_FALSE;
1528 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1531 ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1534 for (
int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1535 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1538 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1539 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1540 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1543 PCHKERRQ((*A),ierr);
1544 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1546 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1547 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1553 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1554 ierr = MatSetSizes(*A,pI->
Height(),pI->
Width(),PETSC_DECIDE,PETSC_DECIDE);
1556 ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1557 ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1558 ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1559 ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1560 ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1563 ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1565 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1566 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1583 int n = pS->
Width();
1588 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1589 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1590 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1592 for (
int i = 0; i < m; i++)
1594 bool issorted =
true;
1596 for (
int j = ii[i]; j < ii[i+1]; j++)
1599 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1604 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1605 CCHKERRQ(PETSC_COMM_SELF,ierr);
1609 ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1612 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1613 CCHKERRQ(comm,ierr);
1618 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1619 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1621 pii,pjj,pdata,oii,NULL,NULL,&B);
1622 CCHKERRQ(comm,ierr);
1624 void *ptrs[4] = {pii,pjj,pdata,oii};
1625 const char *names[4] = {
"_mfem_csr_pii",
1630 for (
int i=0; i<4; i++)
1634 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1635 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1636 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1640 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1648 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1649 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1653 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1654 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1658 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1665 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1672 ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1673 ierr = PetscObjectTypeCompare((
PetscObject)B,MATMPIAIJ,&isaij);
1674 CCHKERRQ(comm,ierr);
1675 ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1678 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1679 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1694 MPI_Comm comm = MPI_COMM_NULL;
1695 ierr = PetscObjectGetComm((
PetscObject)A,&comm); PCHKERRQ(A,ierr);
1696 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1707 ierr = PetscObjectReference((
PetscObject)a); PCHKERRQ(a,ierr);
1717 if (A_ == A) {
return; }
1719 ierr = PetscObjectReference((
PetscObject)A_); PCHKERRQ(A_,ierr);
1725 void PetscParMatrix::SetUpForDevice()
1727 #if !defined(_USE_DEVICE)
1732 PetscBool ismatis,isnest,isseqaij,ismpiaij;
1733 ierr = PetscObjectTypeCompare((
PetscObject)A,MATIS,&ismatis);
1735 ierr = PetscObjectTypeCompare((
PetscObject)A,MATNEST,&isnest);
1740 ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1741 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATNEST,&isnest);
1748 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1758 ierr = PetscObjectTypeCompare((
PetscObject)sA,MATSEQAIJ,&isseqaij);
1760 ierr = PetscObjectTypeCompare((
PetscObject)sA,MATMPIAIJ,&ismpiaij);
1764 ierr = MatSetType(sA,MATSEQAIJCUSPARSE); PCHKERRQ(sA,ierr);
1770 ierr = MatSetType(sA,MATMPIAIJCUSPARSE); PCHKERRQ(sA,ierr);
1776 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1777 PETSC_TRUE); PCHKERRQ(sA,ierr);
1784 ierr = MatSetVecType(tA,VECCUDA); PCHKERRQ(tA,ierr);
1790 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATSEQAIJ,&isseqaij);
1792 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATMPIAIJ,&ismpiaij);
1796 ierr = MatSetType(tA,MATSEQAIJCUSPARSE); PCHKERRQ(tA,ierr);
1801 ierr = MatSetType(tA,MATMPIAIJCUSPARSE); PCHKERRQ(tA,ierr);
1806 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1807 PETSC_TRUE); PCHKERRQ(tA,ierr);
1822 f = MatMultTranspose;
1823 fadd = MatMultTransposeAdd;
1834 ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1835 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1836 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1840 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1841 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1852 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1856 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1863 ierr = PetscObjectReference((
PetscObject)master.
A); PCHKERRQ(master.
A,ierr);
1875 MFEM_VERIFY(A,
"Mat not present");
1885 MFEM_VERIFY(A,
"Mat not present");
1896 ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1900 ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1907 ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1912 MFEM_ASSERT(x.
Size() ==
Width(),
"invalid x.Size() = " << x.
Size()
1913 <<
", expected size = " <<
Width());
1914 MFEM_ASSERT(y.
Size() ==
Height(),
"invalid y.Size() = " << y.
Size()
1915 <<
", expected size = " <<
Height());
1919 bool rw = (b != 0.0);
1922 MatMultKernel(A,a,XX->x,b,YY->
x,
false);
1930 MFEM_ASSERT(x.
Size() ==
Height(),
"invalid x.Size() = " << x.
Size()
1931 <<
", expected size = " <<
Height());
1932 MFEM_ASSERT(y.
Size() ==
Width(),
"invalid y.Size() = " << y.
Size()
1933 <<
", expected size = " <<
Width());
1937 bool rw = (b != 0.0);
1940 MatMultKernel(A,a,YY->
x,b,XX->x,
true);
1953 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)A),fname,
1954 FILE_MODE_WRITE,&view);
1958 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)A),fname,&view);
1961 ierr = MatView(A,view); PCHKERRQ(A,ierr);
1962 ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1966 ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1972 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
1973 <<
", expected size = " <<
Height());
1977 ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1983 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
1984 <<
", expected size = " <<
Width());
1988 ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
1994 ierr = MatShift(A,(
PetscScalar)s); PCHKERRQ(A,ierr);
2000 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
2001 <<
", expected size = " <<
Height());
2002 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
2003 <<
", expected size = " <<
Width());
2007 ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2015 "Petsc TripleMatrixProduct: Number of local cols of A " << A->
Width() <<
2016 " differs from number of local rows of P " << P->
Height());
2018 "Petsc TripleMatrixProduct: Number of local rows of A " << A->
Height() <<
2019 " differs from number of local cols of R " << R->
Width());
2021 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2028 Mat pA = *A,pP = *P,pRt = *Rt;
2030 PetscBool Aismatis,Pismatis,Rtismatis;
2033 "Petsc RAP: Number of local cols of A " << A->
Width() <<
2034 " differs from number of local rows of P " << P->
Height());
2036 "Petsc RAP: Number of local rows of A " << A->
Height() <<
2037 " differs from number of local rows of Rt " << Rt->
Height());
2038 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&Aismatis);
2040 ierr = PetscObjectTypeCompare((
PetscObject)pP,MATIS,&Pismatis);
2042 ierr = PetscObjectTypeCompare((
PetscObject)pRt,MATIS,&Rtismatis);
2049 ISLocalToGlobalMapping cl2gP,cl2gRt;
2050 PetscInt rlsize,clsize,rsize,csize;
2052 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2053 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2054 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2055 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2056 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2057 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2058 ierr = MatCreate(A->
GetComm(),&B); PCHKERRQ(pA,ierr);
2059 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2060 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2061 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2062 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2063 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2064 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2067 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2073 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2074 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2076 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2084 ierr = PetscObjectReference((
PetscObject)lRt); PCHKERRQ(lRt,ierr);
2085 (*vmatsl2l)[0] = lRt;
2088 ierr = PetscContainerCreate(PetscObjectComm((
PetscObject)B),&c);
2090 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2091 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2095 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2099 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2100 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2101 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2102 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2108 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2114 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2115 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2117 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2132 #if defined(PETSC_HAVE_HYPRE)
2147 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2157 ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2159 ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2168 MFEM_ABORT(
"Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2177 ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2178 MFEM_VERIFY(M == N,
"Rectangular case unsupported");
2181 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2185 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2188 ierr = Convert_Array_IS(
GetComm(),
true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2191 ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2195 ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2197 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2202 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2206 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2209 ierr = Convert_Array_IS(
GetComm(),
true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2210 ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2211 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2221 ierr = PetscObjectDereference((
PetscObject)A); CCHKERRQ(comm,ierr);
2230 MFEM_VERIFY(A,
"no associated PETSc Mat object");
2233 ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2235 ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2237 ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2239 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2241 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2243 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2245 #if defined(PETSC_HAVE_HYPRE)
2246 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2260 Ae.
Mult(-1.0, X, 1.0, B);
2263 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2264 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2265 for (
int i = 0; i < ess_dof_list.
Size(); i++)
2267 int r = ess_dof_list[i];
2268 B(r) = array[r] * X(r);
2270 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2299 if (
cid == KSP_CLASSID)
2302 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2304 else if (
cid == SNES_CLASSID)
2307 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2310 else if (
cid == TS_CLASSID)
2313 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2317 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2324 if (
cid == KSP_CLASSID)
2327 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2329 else if (
cid == SNES_CLASSID)
2332 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2335 else if (
cid == TS_CLASSID)
2338 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2342 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2349 if (
cid == KSP_CLASSID)
2352 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2355 else if (
cid == SNES_CLASSID)
2358 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2359 max_iter,PETSC_DEFAULT);
2361 else if (
cid == TS_CLASSID)
2364 ierr = TSSetMaxSteps(ts,max_iter);
2368 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2376 typedef PetscErrorCode (*myPetscFunc)(
void**);
2377 PetscViewerAndFormat *vf = NULL;
2378 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(
obj));
2382 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2385 if (
cid == KSP_CLASSID)
2393 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2397 #if PETSC_VERSION_LT(3,15,0)
2398 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2400 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2402 (myPetscFunc)PetscViewerAndFormatDestroy);
2407 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2408 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2409 (myPetscFunc)PetscViewerAndFormatDestroy);
2413 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2414 PCHKERRQ(viewer,ierr);
2415 #if PETSC_VERSION_LT(3,15,0)
2416 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2418 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2420 (myPetscFunc)PetscViewerAndFormatDestroy);
2425 else if (
cid == SNES_CLASSID)
2431 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2435 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2436 (myPetscFunc)PetscViewerAndFormatDestroy);
2437 PCHKERRQ(snes,ierr);
2440 else if (
cid == TS_CLASSID)
2445 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2450 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2456 return obj ? PetscObjectComm(
obj) : MPI_COMM_NULL;
2461 __mfem_monitor_ctx *monctx;
2462 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2463 monctx->solver =
this;
2464 monctx->monitor =
ctx;
2465 if (
cid == KSP_CLASSID)
2467 ierr = KSPMonitorSet((
KSP)
obj,__mfem_ksp_monitor,monctx,
2468 __mfem_monitor_ctx_destroy);
2471 else if (
cid == SNES_CLASSID)
2473 ierr = SNESMonitorSet((
SNES)
obj,__mfem_snes_monitor,monctx,
2474 __mfem_monitor_ctx_destroy);
2477 else if (
cid == TS_CLASSID)
2479 ierr = TSMonitorSet((
TS)
obj,__mfem_ts_monitor,monctx,
2480 __mfem_monitor_ctx_destroy);
2485 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2492 if (
cid == SNES_CLASSID)
2494 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
private_ctx;
2497 else if (
cid == TS_CLASSID)
2499 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
private_ctx;
2504 MFEM_ABORT(
"Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2511 if (
cid == TS_CLASSID)
2516 ierr = TSGetSNES((
TS)
obj,&snes); PCHKERRQ(obj,ierr);
2517 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2518 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2520 else if (
cid == SNES_CLASSID)
2524 ierr = SNESGetKSP((
SNES)
obj,&ksp); PCHKERRQ(obj,ierr);
2525 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2527 else if (
cid == KSP_CLASSID)
2529 ierr = KSPGetPC((
KSP)
obj,&pc); PCHKERRQ(obj,ierr);
2531 else if (
cid == PC_CLASSID)
2537 MFEM_ABORT(
"No support for PetscPreconditionerFactory for this object");
2541 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2545 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2551 if (!customize) {
clcustom =
true; }
2554 if (
cid == PC_CLASSID)
2557 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2559 else if (
cid == KSP_CLASSID)
2562 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2564 else if (
cid == SNES_CLASSID)
2567 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2569 else if (
cid == TS_CLASSID)
2572 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2576 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2584 if (
cid == KSP_CLASSID)
2587 KSPConvergedReason reason;
2588 ierr = KSPGetConvergedReason(ksp,&reason);
2590 return reason > 0 ? 1 : 0;
2592 else if (
cid == SNES_CLASSID)
2595 SNESConvergedReason reason;
2596 ierr = SNESGetConvergedReason(snes,&reason);
2597 PCHKERRQ(snes,ierr);
2598 return reason > 0 ? 1 : 0;
2600 else if (
cid == TS_CLASSID)
2603 TSConvergedReason reason;
2604 ierr = TSGetConvergedReason(ts,&reason);
2606 return reason > 0 ? 1 : 0;
2610 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2617 if (
cid == KSP_CLASSID)
2621 ierr = KSPGetIterationNumber(ksp,&its);
2625 else if (
cid == SNES_CLASSID)
2629 ierr = SNESGetIterationNumber(snes,&its);
2630 PCHKERRQ(snes,ierr);
2633 else if (
cid == TS_CLASSID)
2637 ierr = TSGetStepNumber(ts,&its);
2643 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2650 if (
cid == KSP_CLASSID)
2654 ierr = KSPGetResidualNorm(ksp,&norm);
2658 if (
cid == SNES_CLASSID)
2662 ierr = SNESGetFunctionNorm(snes,&norm);
2663 PCHKERRQ(snes,ierr);
2668 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2669 return PETSC_MAX_REAL;
2676 if (
cid == SNES_CLASSID)
2678 __mfem_snes_ctx *snes_ctx;
2679 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2680 snes_ctx->op = NULL;
2681 snes_ctx->bchandler = NULL;
2682 snes_ctx->work = NULL;
2686 else if (
cid == TS_CLASSID)
2688 __mfem_ts_ctx *ts_ctx;
2689 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2691 ts_ctx->bchandler = NULL;
2692 ts_ctx->work = NULL;
2693 ts_ctx->work2 = NULL;
2694 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2695 ts_ctx->cached_ijacstate = -1;
2696 ts_ctx->cached_rhsjacstate = -1;
2697 ts_ctx->cached_splits_xstate = -1;
2698 ts_ctx->cached_splits_xdotstate = -1;
2709 if (
cid == SNES_CLASSID)
2711 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)
private_ctx;
2712 delete snes_ctx->work;
2714 else if (
cid == TS_CLASSID)
2716 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)
private_ctx;
2717 delete ts_ctx->work;
2718 delete ts_ctx->work2;
2720 ierr = PetscFree(
private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2727 : bctype(type_), setup(false), eval_t(0.0),
2728 eval_t_cached(std::numeric_limits<double>::min())
2736 ess_tdof_list.
Assign(list);
2742 if (setup) {
return; }
2746 this->
Eval(eval_t,eval_g);
2747 eval_t_cached = eval_t;
2758 (*this).SetUp(x.
Size());
2762 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2764 y[ess_tdof_list[i]] = 0.0;
2769 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2771 Eval(eval_t,eval_g);
2772 eval_t_cached = eval_t;
2774 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2776 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2783 (*this).SetUp(x.
Size());
2786 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2788 x[ess_tdof_list[i]] = 0.0;
2793 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2795 Eval(eval_t,eval_g);
2796 eval_t_cached = eval_t;
2798 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2800 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2807 (*this).SetUp(x.
Size());
2810 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2812 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2817 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2819 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2826 (*this).SetUp(x.
Size());
2827 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2829 x[ess_tdof_list[i]] = 0.0;
2835 (*this).SetUp(x.
Size());
2837 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2839 y[ess_tdof_list[i]] = 0.0;
2850 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2852 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2853 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2857 const std::string &prefix)
2863 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
2864 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2869 const std::string &prefix)
2875 ierr = PetscObjectGetClassId(obj, &
cid); PCHKERRQ(obj, ierr);
2876 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2888 bool delete_pA =
false;
2906 MFEM_VERIFY(pA,
"Unsupported operation!");
2914 PetscInt nheight,nwidth,oheight,owidth;
2916 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2917 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2918 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2919 if (nheight != oheight || nwidth != owidth)
2923 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2929 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2938 if (delete_pA) {
delete pA; }
2953 bool delete_pA =
false;
2971 MFEM_VERIFY(pA,
"Unsupported operation!");
2974 bool delete_ppA =
false;
2977 if (oA == poA && !wrap)
2987 MFEM_VERIFY(ppA,
"Unsupported operation!");
2996 PetscInt nheight,nwidth,oheight,owidth;
2998 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2999 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3000 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3001 if (nheight != oheight || nwidth != owidth)
3005 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3012 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3021 if (delete_pA) {
delete pA; }
3022 if (delete_ppA) {
delete ppA; }
3032 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3035 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3036 ierr = PetscObjectReference((
PetscObject)A); PCHKERRQ(ksp,ierr);
3041 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3050 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3051 ierr = MakeShellPC(pc,precond,
false); PCHKERRQ(ksp,ierr);
3057 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3058 ierr = PetscObjectReference((
PetscObject)P); PCHKERRQ(ksp,ierr);
3059 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3060 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3061 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3065 void PetscLinearSolver::MultKernel(
const Vector &b,
Vector &x,
bool trans)
const
3072 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(
obj, ierr);
3080 PetscParMatrix A = PetscParMatrix(pA,
true);
3081 X =
new PetscParVector(A,
false,
false);
3089 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
3090 PCHKERRQ(ksp, ierr);
3095 ierr = KSPSolveTranspose(ksp,
B->
x, X->x); PCHKERRQ(ksp,ierr);
3099 ierr = KSPSolve(ksp,
B->
x, X->x); PCHKERRQ(ksp,ierr);
3107 (*this).MultKernel(b,x,
false);
3112 (*this).MultKernel(b,x,
true);
3119 ierr = PetscObjectGetComm((
PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3120 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3129 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3131 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3138 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3140 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3144 const std::string &prefix)
3148 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3150 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3156 const std::string &prefix)
3160 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3162 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3163 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3167 const string &prefix)
3173 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
3174 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3179 const string &prefix)
3183 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3185 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3186 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3192 bool delete_pA =
false;
3209 PetscInt nheight,nwidth,oheight,owidth;
3211 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3212 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3213 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3214 if (nheight != oheight || nwidth != owidth)
3218 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3224 ierr = PCSetOperators(pc,pA->
A,pA->
A); PCHKERRQ(
obj,ierr);
3233 if (delete_pA) {
delete pA; };
3236 void PetscPreconditioner::MultKernel(
const Vector &b,
Vector &x,
3240 "Iterative mode not supported for PetscPreconditioner");
3246 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(
obj, ierr);
3254 PetscParMatrix A(pA,
true);
3255 X =
new PetscParVector(A,
false,
false);
3266 ierr = PCApplyTranspose(pc,
B->
x, X->x); PCHKERRQ(pc, ierr);
3270 ierr = PCApply(pc,
B->
x, X->x); PCHKERRQ(pc, ierr);
3278 (*this).MultKernel(b,x,
false);
3283 (*this).MultKernel(b,x,
true);
3290 ierr = PetscObjectGetComm((
PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3291 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3302 void PetscBDDCSolver::BDDCSolverConstructor(
const PetscBDDCSolverParams &opts)
3304 MPI_Comm comm = PetscObjectComm(
obj);
3309 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3313 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&ismatis);
3315 MFEM_VERIFY(ismatis,
"PetscBDDCSolver needs the matrix in unassembled format");
3318 ParFiniteElementSpace *fespace = opts.fespace;
3319 if (opts.netflux && !fespace)
3321 MFEM_WARNING(
"Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3328 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3329 ierr = MatNullSpaceCreate(PetscObjectComm((
PetscObject)lA),PETSC_TRUE,0,NULL,
3330 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3331 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3332 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3336 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(
obj,ierr);
3339 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3340 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3349 int vdim = fespace->GetVDim();
3356 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3357 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3358 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3367 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3380 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3386 const FiniteElementCollection *fec = fespace->FEColl();
3387 bool h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3390 ParFiniteElementSpace *fespace_coords = fespace;
3392 sdim = fespace->GetParMesh()->SpaceDimension();
3395 fespace_coords =
new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3398 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3399 ParGridFunction gf_coords(fespace_coords);
3400 gf_coords.ProjectCoefficient(coeff_coords);
3401 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3403 hvec_coords->Size(),
false);
3410 Vec pvec_coords,lvec_coords;
3411 ISLocalToGlobalMapping l2g;
3417 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3418 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3419 CCHKERRQ(comm,ierr);
3420 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3421 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3422 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3423 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3424 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3425 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3426 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3427 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3428 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3429 CCHKERRQ(comm,ierr);
3430 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3435 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3436 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3437 #if PETSC_VERSION_LT(3,15,0)
3438 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3439 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3441 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3442 CCHKERRQ(comm,ierr);
3443 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3444 CCHKERRQ(comm,ierr);
3446 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3447 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3449 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3450 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3451 CCHKERRQ(PETSC_COMM_SELF,ierr);
3452 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3453 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3454 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3455 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3459 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3468 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3469 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3473 for (
PetscInt d = 0; d < sdim; d++)
3475 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3478 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3483 for (
PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3485 if (fespace_coords != fespace)
3487 delete fespace_coords;
3494 IS dir = NULL, neu = NULL;
3497 Array<Mat> *l2l = NULL;
3498 if (opts.ess_dof_local || opts.nat_dof_local)
3503 MFEM_VERIFY(c,
"Local-to-local PETSc container not present");
3504 ierr = PetscContainerGetPointer(c,(
void**)&l2l); PCHKERRQ(c,ierr);
3511 PetscBool lpr = PETSC_FALSE,pr;
3512 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3513 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3515 MFEM_VERIFY(lpr == pr,
"ess_dof should be collectively set");
3517 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3518 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3520 MFEM_VERIFY(lpr == pr,
"nat_dof should be collectively set");
3523 ms[0] = -nf; ms[1] = nf;
3524 ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3526 MFEM_VERIFY(-Ms[0] == Ms[1],
3527 "number of fields should be the same across processes");
3534 PetscInt st = opts.ess_dof_local ? 0 : rst;
3535 if (!opts.ess_dof_local)
3538 ierr = Convert_Array_IS(comm,
true,opts.ess_dof,st,&dir);
3539 CCHKERRQ(comm,ierr);
3540 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3545 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3546 CCHKERRQ(comm,ierr);
3547 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3552 PetscInt st = opts.nat_dof_local ? 0 : rst;
3553 if (!opts.nat_dof_local)
3556 ierr = Convert_Array_IS(comm,
true,opts.nat_dof,st,&neu);
3557 CCHKERRQ(comm,ierr);
3558 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3563 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3564 CCHKERRQ(comm,ierr);
3565 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3572 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3576 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3578 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3583 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3585 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3594 const FiniteElementCollection *fec = fespace->FEColl();
3595 bool edgespace, rtspace, h1space;
3596 bool needint = opts.netflux;
3597 bool tracespace, rt_tracespace, edge_tracespace;
3599 PetscBool B_is_Trans = PETSC_FALSE;
3601 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3602 dim = pmesh->Dimension();
3603 vdim = fespace->GetVDim();
3604 h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3605 rtspace =
dynamic_cast<const RT_FECollection*
>(fec);
3606 edgespace =
dynamic_cast<const ND_FECollection*
>(fec);
3607 edge_tracespace =
dynamic_cast<const ND_Trace_FECollection*
>(fec);
3608 rt_tracespace =
dynamic_cast<const RT_Trace_FECollection*
>(fec);
3609 tracespace = edge_tracespace || rt_tracespace;
3612 if (fespace->GetNE() > 0)
3616 p = fespace->GetElementOrder(0);
3620 p = fespace->GetFaceOrder(0);
3621 if (dim == 2) { p++; }
3632 MFEM_WARNING(
"Tracespace case doesn't work for H(curl) and p=2,"
3633 " not using auxiliary quadrature");
3639 FiniteElementCollection *vfec;
3642 vfec =
new H1_Trace_FECollection(p,dim);
3646 vfec =
new H1_FECollection(p,dim);
3648 ParFiniteElementSpace *vfespace =
new ParFiniteElementSpace(pmesh,vfec);
3649 ParDiscreteLinearOperator *grad;
3650 grad =
new ParDiscreteLinearOperator(vfespace,fespace);
3653 grad->AddTraceFaceInterpolator(
new GradientInterpolator);
3657 grad->AddDomainInterpolator(
new GradientInterpolator);
3661 HypreParMatrix *hG = grad->ParallelAssemble();
3662 PetscParMatrix *G =
new PetscParMatrix(hG,
PETSC_MATAIJ);
3666 PetscBool conforming = PETSC_TRUE;
3667 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3668 ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3680 MFEM_WARNING(
"Tracespace case doesn't work for H(div), not using"
3681 " auxiliary quadrature");
3687 if (vdim != dim) { needint =
false; }
3690 PetscParMatrix *
B = NULL;
3696 FiniteElementCollection *auxcoll;
3697 if (tracespace) { auxcoll =
new RT_Trace_FECollection(p,dim); }
3702 auxcoll =
new H1_FECollection(std::max(p-1,1),dim);
3706 auxcoll =
new L2_FECollection(p,dim);
3709 ParFiniteElementSpace *pspace =
new ParFiniteElementSpace(pmesh,auxcoll);
3710 ParMixedBilinearForm *b =
new ParMixedBilinearForm(fespace,pspace);
3716 b->AddTraceFaceIntegrator(
new VectorFECurlIntegrator);
3720 b->AddDomainIntegrator(
new VectorFECurlIntegrator);
3727 b->AddTraceFaceIntegrator(
new VectorFEDivergenceIntegrator);
3731 b->AddDomainIntegrator(
new VectorFEDivergenceIntegrator);
3736 b->AddDomainIntegrator(
new VectorDivergenceIntegrator);
3741 b->ParallelAssemble(Bh);
3743 Bh.SetOperatorOwner(
false);
3748 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3749 if (!opts.ess_dof_local)
3751 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3755 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3757 B_is_Trans = PETSC_TRUE;
3766 ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3770 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3771 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3776 const std::string &prefix)
3779 BDDCSolverConstructor(opts);
3785 const std::string &prefix)
3788 BDDCSolverConstructor(opts);
3793 const string &prefix)
3797 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3800 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3804 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATNEST,&isnest);
3810 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3811 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3812 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3822 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3824 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3829 const std::string &prefix)
3833 MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
3834 MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);
3836 H2SolverConstructor(fes);
3842 #if defined(PETSC_HAVE_H2OPUS)
3854 VectorFunctionCoefficient ccoords(sdim, func_coords);
3856 ParGridFunction coords(fes);
3857 coords.ProjectCoefficient(ccoords);
3859 coords.ParallelProject(c);
3861 PCSetType(*
this,PCH2OPUS);
3864 PCSetFromOptions(*
this);
3866 MFEM_ABORT(
"Need PETSc configured with --download-h2opus");
3873 const std::string &prefix)
3878 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3880 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3881 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3888 const std::string &prefix)
3893 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3895 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3896 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3908 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj, ierr);
3909 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3921 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3922 PCHKERRQ(snes, ierr);
3923 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3924 PCHKERRQ(snes, ierr);
3927 (
void*)&op == fctx &&
3928 (
void*)&op == jctx);
3929 ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3931 PCHKERRQ(snes,ierr);
3934 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3945 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3946 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3955 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3957 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (
void *)snes_ctx);
3958 PCHKERRQ(snes, ierr);
3959 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
3961 PCHKERRQ(snes, ierr);
3963 ierr = MatDestroy(&dummy);
3964 PCHKERRQ(snes, ierr);
3976 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3977 snes_ctx->jacType = jacType;
3983 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3984 snes_ctx->objective = objfn;
3987 ierr = SNESSetObjective(snes, __mfem_snes_objective, (
void *)snes_ctx);
3988 PCHKERRQ(snes, ierr);
3995 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3996 snes_ctx->postcheck = post;
4000 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4001 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (
void *)snes_ctx);
4011 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4012 snes_ctx->update = update;
4015 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4022 bool b_nonempty = b.
Size();
4026 if (b_nonempty) { B->PlaceMemory(b.
GetMemory()); }
4034 ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4036 if (b_nonempty) { B->ResetMemory(); }
4046 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4048 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
4049 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4055 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4057 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4060 ierr = TSGetAdapt(ts,&tsad);
4062 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4070 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj,ierr);
4071 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4079 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4082 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4085 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4086 ts_ctx->cached_ijacstate = -1;
4087 ts_ctx->cached_rhsjacstate = -1;
4088 ts_ctx->cached_splits_xstate = -1;
4089 ts_ctx->cached_splits_xdotstate = -1;
4101 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (
void *)ts_ctx);
4103 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (
void *)ts_ctx);
4105 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4107 ierr = MatDestroy(&dummy);
4115 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4124 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (
void *)ts_ctx);
4126 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4129 ierr = MatDestroy(&dummy);
4138 ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4141 PetscBool use = PETSC_TRUE;
4142 ierr = PetscOptionsGetBool(NULL,NULL,
"-mfem_use_splitjac",&use,NULL);
4145 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4146 __mfem_ts_computesplits);
4151 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4159 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4160 ts_ctx->jacType = jacType;
4165 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4166 return ts_ctx->type;
4171 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4174 ts_ctx->type = type;
4177 ierr = TSSetProblemType(ts, TS_LINEAR);
4182 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4191 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4192 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4195 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4205 ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4209 ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4210 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4215 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4220 ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4229 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4230 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4231 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4232 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4244 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4245 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4246 ts_ctx->cached_ijacstate = -1;
4247 ts_ctx->cached_rhsjacstate = -1;
4248 ts_ctx->cached_splits_xstate = -1;
4249 ts_ctx->cached_splits_xdotstate = -1;
4252 ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4257 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4259 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4265 #include "petsc/private/petscimpl.h"
4266 #include "petsc/private/matimpl.h"
4272 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4274 PetscFunctionBeginUser;
4277 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4289 PetscFunctionReturn(0);
4292 static PetscErrorCode __mfem_ts_ifunction(
TS ts,
PetscReal t, Vec x, Vec xp,
4295 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4297 PetscFunctionBeginUser;
4305 if (ts_ctx->bchandler)
4310 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(xx.Size()); }
4311 if (!ts_ctx->work2) { ts_ctx->work2 =
new mfem::Vector(xx.Size()); }
4317 bchandler->
ZeroBC(yy,*txp);
4327 ff.UpdateVecFromFlags();
4328 PetscFunctionReturn(0);
4331 static PetscErrorCode __mfem_ts_rhsfunction(
TS ts,
PetscReal t, Vec x, Vec f,
4334 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4336 PetscFunctionBeginUser;
4337 if (ts_ctx->bchandler) { MFEM_ABORT(
"RHS evaluation with bc not implemented"); }
4346 ff.UpdateVecFromFlags();
4347 PetscFunctionReturn(0);
4350 static PetscErrorCode __mfem_ts_ijacobian(
TS ts,
PetscReal t, Vec x,
4354 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4359 PetscObjectState state;
4360 PetscErrorCode ierr;
4362 PetscFunctionBeginUser;
4366 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4367 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4373 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4375 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4376 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
4383 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4384 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4386 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4387 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4388 if (!ts_ctx->bchandler)
4395 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4402 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4406 if (!ts_ctx->bchandler) {
delete xx; }
4407 ts_ctx->cached_shift = shift;
4410 bool delete_pA =
false;
4414 pA->
GetType() != ts_ctx->jacType))
4422 if (ts_ctx->bchandler)
4430 PetscObjectState nonzerostate;
4431 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4436 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4437 if (delete_pA) {
delete pA; }
4449 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4451 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4454 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_ijacstate);
4460 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4461 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4462 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4463 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4464 PetscFunctionReturn(0);
4467 static PetscErrorCode __mfem_ts_computesplits(
TS ts,
PetscReal t,Vec x,Vec xp,
4471 __mfem_ts_ctx* ts_ctx;
4475 PetscObjectState state;
4476 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4477 PetscBool assembled;
4478 PetscErrorCode ierr;
4480 PetscFunctionBeginUser;
4484 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4485 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4487 if (Axp && Axp != Jxp)
4489 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4490 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4493 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(
void**)&ts_ctx); CHKERRQ(ierr);
4496 ierr = PetscObjectStateGet((
PetscObject)Jx,&state); CHKERRQ(ierr);
4498 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4499 ierr = PetscObjectStateGet((
PetscObject)Jxp,&state); CHKERRQ(ierr);
4501 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4502 if (!rx && !rxp) { PetscFunctionReturn(0); }
4509 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4510 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4512 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4513 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4514 if (!ts_ctx->bchandler)
4521 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4528 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4537 bool delete_mat =
false;
4541 pJx->
GetType() != ts_ctx->jacType))
4546 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4554 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4557 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4562 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4563 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4566 if (delete_mat) {
delete pJx; }
4570 if (ts_ctx->bchandler)
4587 pJxp->
GetType() != ts_ctx->jacType))
4592 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4595 &oJxp,ts_ctx->jacType);
4599 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4602 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4607 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4608 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4610 if (delete_mat) {
delete pJxp; }
4614 if (ts_ctx->bchandler)
4625 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4629 ierr = PetscObjectStateGet((
PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4631 ierr = PetscObjectStateGet((
PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4636 if (!ts_ctx->bchandler) {
delete xx; }
4637 PetscFunctionReturn(0);
4640 static PetscErrorCode __mfem_ts_rhsjacobian(
TS ts,
PetscReal t, Vec x,
4643 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4647 PetscObjectState state;
4648 PetscErrorCode ierr;
4650 PetscFunctionBeginUser;
4654 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4655 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4659 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4661 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4668 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4669 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4670 if (!ts_ctx->bchandler)
4677 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4684 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4688 if (!ts_ctx->bchandler) {
delete xx; }
4691 bool delete_pA =
false;
4695 pA->
GetType() != ts_ctx->jacType))
4703 if (ts_ctx->bchandler)
4711 PetscObjectState nonzerostate;
4712 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4717 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4718 if (delete_pA) {
delete pA; }
4730 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4732 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4737 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4739 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_rhsjacstate);
4745 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4746 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4747 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4748 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4749 PetscFunctionReturn(0);
4755 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4757 PetscFunctionBeginUser;
4760 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4769 PetscErrorCode ierr;
4771 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4778 PetscErrorCode ierr;
4780 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4785 PetscFunctionReturn(0);
4788 static PetscErrorCode __mfem_snes_jacobian(
SNES snes, Vec x,
Mat A,
Mat P,
4793 PetscErrorCode ierr;
4795 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4797 PetscFunctionBeginUser;
4798 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4799 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4800 if (!snes_ctx->bchandler)
4807 if (!snes_ctx->work) { snes_ctx->work =
new mfem::Vector(n); }
4810 xx = snes_ctx->work;
4816 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4817 if (!snes_ctx->bchandler) {
delete xx; }
4820 bool delete_pA =
false;
4824 pA->
GetType() != snes_ctx->jacType))
4832 if (snes_ctx->bchandler)
4840 PetscObjectState nonzerostate;
4841 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4845 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4846 if (delete_pA) {
delete pA; }
4858 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4860 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4865 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4866 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4872 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4873 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4874 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4875 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4876 PetscFunctionReturn(0);
4879 static PetscErrorCode __mfem_snes_function(
SNES snes, Vec x, Vec f,
void *ctx)
4881 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4883 PetscFunctionBeginUser;
4886 if (snes_ctx->bchandler)
4893 snes_ctx->op->Mult(*txx,ff);
4900 snes_ctx->op->Mult(xx,ff);
4902 ff.UpdateVecFromFlags();
4903 PetscFunctionReturn(0);
4906 static PetscErrorCode __mfem_snes_objective(
SNES snes, Vec x,
PetscReal *f,
4909 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4911 PetscFunctionBeginUser;
4912 if (!snes_ctx->objective)
4914 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing objective function");
4918 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4920 PetscFunctionReturn(0);
4923 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4924 PetscBool *cy,PetscBool *cw,
void* ctx)
4926 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4927 bool lcy =
false,lcw =
false;
4929 PetscFunctionBeginUser;
4933 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4934 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
4935 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
4936 PetscFunctionReturn(0);
4939 static PetscErrorCode __mfem_snes_update(
SNES snes,
PetscInt it)
4942 __mfem_snes_ctx* snes_ctx;
4944 PetscFunctionBeginUser;
4946 ierr = SNESGetFunction(snes,&F,NULL,(
void **)&snes_ctx); CHKERRQ(ierr);
4947 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4950 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4953 ierr = VecDestroy(&pX); CHKERRQ(ierr);
4957 if (!pX) SETERRQ(PetscObjectComm((
PetscObject)snes),PETSC_ERR_USER,
4958 "Missing previous solution");
4959 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4964 (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4966 ierr = VecCopy(X,pX); CHKERRQ(ierr);
4967 PetscFunctionReturn(0);
4973 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4975 PetscFunctionBeginUser;
4978 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4987 PetscErrorCode ierr;
4989 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
4996 PetscErrorCode ierr;
4998 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5003 PetscFunctionReturn(0);
5006 static PetscErrorCode __mfem_mat_shell_apply(
Mat A, Vec x, Vec y)
5009 PetscErrorCode ierr;
5011 PetscFunctionBeginUser;
5012 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5013 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5017 yy.UpdateVecFromFlags();
5018 PetscFunctionReturn(0);
5021 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat A, Vec x, Vec y)
5024 PetscErrorCode ierr;
5027 PetscFunctionBeginUser;
5028 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5029 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5032 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5041 yy.UpdateVecFromFlags();
5042 PetscFunctionReturn(0);
5045 static PetscErrorCode __mfem_mat_shell_copy(
Mat A,
Mat B, MatStructure str)
5048 PetscErrorCode ierr;
5050 PetscFunctionBeginUser;
5051 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5052 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5053 ierr = MatShellSetContext(B,(
void *)op); CHKERRQ(ierr);
5054 PetscFunctionReturn(0);
5057 static PetscErrorCode __mfem_mat_shell_destroy(
Mat A)
5059 PetscFunctionBeginUser;
5060 PetscFunctionReturn(0);
5063 static PetscErrorCode __mfem_pc_shell_view(
PC pc, PetscViewer viewer)
5065 __mfem_pc_shell_ctx *
ctx;
5066 PetscErrorCode ierr;
5068 PetscFunctionBeginUser;
5069 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5073 ierr = PetscObjectTypeCompare((
PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5080 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5086 ierr = PetscViewerASCIIPrintf(viewer,
5087 "No information available on the mfem::Solver\n");
5091 if (isascii && ctx->factory)
5093 ierr = PetscViewerASCIIPrintf(viewer,
5094 "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5098 PetscFunctionReturn(0);
5101 static PetscErrorCode __mfem_pc_shell_apply(
PC pc, Vec x, Vec y)
5103 __mfem_pc_shell_ctx *
ctx;
5104 PetscErrorCode ierr;
5106 PetscFunctionBeginUser;
5109 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5112 ctx->op->Mult(xx,yy);
5113 yy.UpdateVecFromFlags();
5119 PetscFunctionReturn(0);
5122 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC pc, Vec x, Vec y)
5124 __mfem_pc_shell_ctx *
ctx;
5125 PetscErrorCode ierr;
5127 PetscFunctionBeginUser;
5130 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5133 ctx->op->MultTranspose(xx,yy);
5134 yy.UpdateVecFromFlags();
5140 PetscFunctionReturn(0);
5143 static PetscErrorCode __mfem_pc_shell_setup(
PC pc)
5145 __mfem_pc_shell_ctx *
ctx;
5147 PetscFunctionBeginUser;
5148 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5159 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5168 PetscFunctionReturn(0);
5171 static PetscErrorCode __mfem_pc_shell_destroy(
PC pc)
5173 __mfem_pc_shell_ctx *
ctx;
5174 PetscErrorCode ierr;
5176 PetscFunctionBeginUser;
5177 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5183 PetscFunctionReturn(0);
5186 static PetscErrorCode __mfem_array_container_destroy(
void *ptr)
5188 PetscErrorCode ierr;
5190 PetscFunctionBeginUser;
5191 ierr = PetscFree(ptr); CHKERRQ(ierr);
5192 PetscFunctionReturn(0);
5195 static PetscErrorCode __mfem_matarray_container_destroy(
void *ptr)
5198 PetscErrorCode ierr;
5200 PetscFunctionBeginUser;
5201 for (
int i=0; i<a->
Size(); i++)
5205 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5208 PetscFunctionReturn(0);
5211 static PetscErrorCode __mfem_monitor_ctx_destroy(
void **ctx)
5213 PetscErrorCode ierr;
5215 PetscFunctionBeginUser;
5216 ierr = PetscFree(*ctx); CHKERRQ(ierr);
5217 PetscFunctionReturn(0);
5222 PetscErrorCode MakeShellPC(
PC pc,
mfem::Solver &precond,
bool ownsop)
5224 PetscFunctionBeginUser;
5225 __mfem_pc_shell_ctx *ctx =
new __mfem_pc_shell_ctx;
5227 ctx->ownsop = ownsop;
5228 ctx->factory = NULL;
5234 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5236 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5237 ierr = PCShellSetName(pc,
"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5238 ierr = PCShellSetContext(pc,(
void *)ctx); CHKERRQ(ierr);
5239 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5240 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5242 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5243 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5244 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5245 PetscFunctionReturn(0);
5250 PetscErrorCode MakeShellPCWithFactory(
PC pc,
5253 PetscFunctionBeginUser;
5254 __mfem_pc_shell_ctx *ctx =
new __mfem_pc_shell_ctx;
5257 ctx->factory = factory;
5263 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5265 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5266 ierr = PCShellSetName(pc,factory->
GetName()); CHKERRQ(ierr);
5267 ierr = PCShellSetContext(pc,(
void *)ctx); CHKERRQ(ierr);
5268 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5269 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5271 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5272 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5273 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5274 PetscFunctionReturn(0);
5279 static PetscErrorCode Convert_Array_IS(MPI_Comm comm,
bool islist,
5284 const int *data = list ? list->
GetData() : NULL;
5285 PetscErrorCode ierr;
5287 PetscFunctionBeginUser;
5288 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5291 for (
PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5298 if (data[i]) { idxs[cum++] = i+st; }
5302 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5304 PetscFunctionReturn(0);
5310 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5318 PetscErrorCode ierr;
5320 PetscFunctionBeginUser;
5321 for (
int i = 0; i < pl2l.
Size(); i++)
5325 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(
const PetscInt**)&ii,
5326 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5327 MFEM_VERIFY(done,
"Unable to perform MatGetRowIJ on " << i <<
" l2l matrix");
5328 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5329 #if defined(PETSC_USE_64BIT_INDICES)
5330 int nnz = (int)ii[m];
5331 int *mii =
new int[m+1];
5332 int *mjj =
new int[nnz];
5333 for (
int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5334 for (
int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5339 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5341 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5342 MFEM_VERIFY(done,
"Unable to perform MatRestoreRowIJ on "
5343 << i <<
" l2l matrix");
5346 for (
int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5348 const int* vdata = mark->
GetData();
5349 int* sdata = sub_dof_marker.
GetData();
5350 int cumh = 0, cumw = 0;
5351 for (
int i = 0; i < l2l.Size(); i++)
5356 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5357 cumh += l2l[i]->Height();
5358 cumw += l2l[i]->Width();
5360 ierr = Convert_Array_IS(comm,
false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5361 for (
int i = 0; i < pl2l.
Size(); i++)
5365 PetscFunctionReturn(0);
5368 #if !defined(PETSC_HAVE_HYPRE)
5370 #if defined(HYPRE_MIXEDINT)
5371 #error "HYPRE_MIXEDINT not supported"
5374 #include "_hypre_parcsr_mv.h"
5375 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,
Mat* pA)
5377 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5378 hypre_CSRMatrix *hdiag,*hoffd;
5380 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5383 PetscErrorCode ierr;
5385 PetscFunctionBeginUser;
5386 hdiag = hypre_ParCSRMatrixDiag(hA);
5387 hoffd = hypre_ParCSRMatrixOffd(hA);
5388 m = hypre_CSRMatrixNumRows(hdiag);
5389 n = hypre_CSRMatrixNumCols(hdiag);
5390 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5391 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5392 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5393 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5394 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5395 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*
sizeof(
PetscInt));
5397 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*
sizeof(
PetscInt));
5399 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*
sizeof(
PetscScalar));
5406 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5410 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5415 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5416 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5417 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5418 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*
sizeof(
PetscInt));
5420 offdj = hypre_CSRMatrixJ(hoffd);
5421 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5422 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5423 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*
sizeof(
PetscScalar));
5430 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5434 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5435 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5441 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5447 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5448 const char *names[6] = {
"_mfem_csr_dii",
5459 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5460 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5461 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5465 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5467 PetscFunctionReturn(0);
5470 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,
Mat* pA)
5473 ISLocalToGlobalMapping rl2g,cl2g;
5475 hypre_CSRMatrix *hdiag,*hoffd;
5476 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5478 const char *names[2] = {
"_mfem_csr_aux",
5482 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5484 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5485 PetscErrorCode ierr;
5487 PetscFunctionBeginUser;
5489 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5490 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5491 hdiag = hypre_ParCSRMatrixDiag(hA);
5492 hoffd = hypre_ParCSRMatrixOffd(hA);
5493 dr = hypre_CSRMatrixNumRows(hdiag);
5494 dc = hypre_CSRMatrixNumCols(hdiag);
5495 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5496 hdi = hypre_CSRMatrixI(hdiag);
5497 hdj = hypre_CSRMatrixJ(hdiag);
5498 hdd = hypre_CSRMatrixData(hdiag);
5499 oc = hypre_CSRMatrixNumCols(hoffd);
5500 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5501 hoi = hypre_CSRMatrixI(hoffd);
5502 hoj = hypre_CSRMatrixJ(hoffd);
5503 hod = hypre_CSRMatrixData(hoffd);
5506 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5507 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5508 ierr = ISDestroy(&is); CHKERRQ(ierr);
5509 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5510 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5511 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5512 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5513 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5514 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5515 ierr = ISDestroy(&is); CHKERRQ(ierr);
5518 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5519 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5520 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5521 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5522 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5523 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5526 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5527 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5531 *ii = *(hdi++) + *(hoi++);
5532 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5536 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5537 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5538 *(++ii) = *(hdi++) + *(hoi++);
5539 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5541 for (; cum<dr; cum++) { *(++ii) = nnz; }
5545 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5553 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5554 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5555 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5559 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5561 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5562 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5563 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5564 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5565 PetscFunctionReturn(0);
5569 #include <petsc/private/matimpl.h>
5571 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm,
PetscInt m,
5575 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5576 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5577 ierr = PetscObjectChangeTypeName((
PetscObject)*A,
"mfemdummy"); CHKERRQ(ierr);
5578 (*A)->preallocated = PETSC_TRUE;
5579 ierr = MatSetUp(*A); CHKERRQ(ierr);
5580 PetscFunctionReturn(0);
5583 #include <petsc/private/vecimpl.h>
5585 #if defined(PETSC_HAVE_DEVICE)
5586 static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5590 PetscFunctionReturn(0);
5594 static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5597 #if defined(PETSC_HAVE_DEVICE)
5598 *flg = v->boundtocpu;
5602 PetscFunctionReturn(0);
5605 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject o)
5607 PetscErrorCode ierr;
5610 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5611 PetscFunctionReturn(0);
5614 #endif // MFEM_USE_PETSC
5615 #endif // MFEM_USE_MPI
Ordering::Type GetOrdering() const
Return the ordering method.
int Size() const
Return the logical size of the array.
void SetPreconditioner(Solver &precond)
void SetPrintLevel(int plev)
static int GetId()
Get the device id of the configured device.
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
PetscParVector * B
Right-hand side and solution vector.
void CreatePrivateContext()
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Vector()
Default constructor for Vector. Sets size = 0 and data = NULL.
void trans(const Vector &u, Vector &x)
MPI_Comm GetComm() const
MPI communicator.
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
bool isHomogeneous() const
True if type is HOMOGENEOUS.
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Abstract class for PETSc's preconditioners.
PetscParMatrix & operator+=(const PetscParMatrix &B)
ParMesh * GetParMesh() const
petsc::Vec x
The actual PETSc object.
virtual void SetOperator(const Operator &op)
HypreParMatrix * RAP(const HypreParMatrix *A, const HypreParMatrix *P)
Returns the matrix P^t * A * P.
bool clcustom
Boolean to handle SetFromOptions calls.
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Device memory; using CUDA or HIP *Malloc and *Free.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
petsc::Mat A
The actual PETSc object.
Wrapper for PETSc's matrix class.
PetscParVector & operator+=(const PetscParVector &y)
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Abstract class for PETSc's linear solvers.
void SetSize(int s)
Resize the vector to size s.
Abstract class for PETSc's solvers.
PetscODESolver::Type GetType() const
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
PetscBCHandler(Type type_=ZERO)
void Delete()
Delete the owned pointers. The Memory is not reset by this method, i.e. it will, generally, not be Empty() after this call.
Base abstract class for first order time dependent operators.
PetscInt GetNumCols() const
Returns the local number of columns.
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
void SetType(PetscODESolver::Type)
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Pointer to an Operator of a specified type.
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
bool DeviceRequested() const
T * GetData()
Returns the data.
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
int Size() const
Returns the size of the vector.
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
void Init()
Initialize with defaults. Does not initialize inherited members.
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
PetscClassId cid
The class id of the actual PETSc object.
ID for class PetscParMatrix, MATHYPRE format.
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Abstract parallel finite element space.
void SetRelTol(double tol)
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Randomize(PetscInt seed=0)
Set random values.
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
void SetMaxIter(int max_iter)
PetscParVector & operator*=(PetscScalar d)
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
void SortColumnIndices()
Sort the column indices corresponding to each row.
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
int Capacity() const
Return the size of the allocated memory.
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
void Customize(bool customize=true) const
Customize object with options set.
PetscInt M() const
Returns the global number of rows.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
virtual ~PetscLinearSolver()
PetscInt GetNumRows() const
Returns the local number of rows.
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
const double * GetHostPointer() const
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
ID for the base class Operator, i.e. any type.
double f(const Vector &xvec)
void write(std::ostream &os, T value)
Write 'value' to stream.
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc's solvers.
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
struct s_NavierContext ctx
void SetJacobianType(Operator::Type type)
int to_int(const std::string &str)
Convert a string to an int.
const double * HostReadData() const
Auxiliary class for BDDC customization.
virtual ~PetscPreconditioner()
ID for class PetscParMatrix, unspecified format.
MPI_Comm GetComm() const
Get the associated MPI communicator.
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
void LoseData()
NULL-ifies the data.
void SetFlagsFromMask_() const
bool DeviceIsValid() const
Return true if device pointer is valid.
const int * HostReadJ() const
PetscParMatrix & operator=(const PetscParMatrix &B)
bool ReadRequested() const
PetscInt GetColStart() const
Returns the global index of the first local column.
virtual ~PetscParVector()
Calls PETSc's destroy function.
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
PetscBCHandler * bchandler
Handler for boundary conditions.
PetscParMatrix & operator-=(const PetscParMatrix &B)
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
void Zero(Vector &x)
Replace boundary dofs with 0.
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
MPI_Comm GetComm() const
Get the associated MPI communicator.
T read(std::istream &is)
Read a value from the stream and return it.
bool WriteRequested() const
void Shift(double s)
Shift diagonal by a constant.
ID for class PetscParMatrix, MATNEST format.
int GetVDim() const
Returns vector dimension.
void SetTime(double t)
Sets the current time.
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
ID for class PetscParMatrix, MATSHELL format.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Biwise-OR of all CUDA backends.
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B...
int SpaceDimension() const
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Wrapper for hypre's parallel vector class.
HYPRE_BigInt * GetTrueDofOffsets() const
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Type
Enumeration defining IDs for some classes derived from Operator.
double p(const Vector &x, double t)
PetscInt NNZ() const
Returns the number of nonzeros.
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true)
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
MemoryType
Memory types supported by MFEM.
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
PetscInt N() const
Returns the global number of columns.
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
void SetJacobianType(Operator::Type type)
bool operatorset
Boolean to handle SetOperator calls.
PetscParVector * X
Auxiliary vectors for typecasting.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
bool IsAliasForSync() const
int height
Dimension of the output / number of rows in the matrix.
PetscParVector & operator-=(const PetscParVector &y)
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true, or the mfem::Device's HostMemoryClass, otherwise.
virtual ~PetscNonlinearSolver()
virtual ~PetscODESolver()
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
bool Empty() const
Return true if the Memory object is empty, see Reset().
ID for class PetscParMatrix, MATAIJ format.
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
MPI_Comm GetComm() const
Get the associated MPI communicator.
PetscInt GlobalSize() const
Returns the global number of rows.
PetscInt GetRowStart() const
Returns the global index of the first local row.
struct _p_PetscObject * PetscObject
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
void ResetMemory()
Completes the operation started with PlaceMemory.
void SetAbsTol(double tol)
Helper class for handling essential boundary conditions.
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
PetscParVector & operator=(PetscScalar d)
Set constant values.
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
int NumRowBlocks() const
Return the number of row blocks.
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
const FiniteElementCollection * FEColl() const
const int * HostReadI() const
Identity Operator I: x -> x.
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
bool UseDevice() const
Read the internal device flag.
void FreePrivateContext()
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Vector * GlobalVector() const
Returns the global vector in each processor.
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Wrapper for hypre's ParCSR matrix class.
A class to handle Block systems in a matrix-free implementation.
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
ID for class PetscParMatrix, MATIS format.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
int width
Dimension of the input / number of columns in the matrix.
void * private_ctx
Private context for solver.
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
const double * GetDevicePointer() const
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
double f(const Vector &p)