14 #include "../config/config.hpp"
20 #include "../fem/fem.hpp"
23 #if defined(PETSC_HAVE_HYPRE)
24 #include "petscmathypre.h"
28 #if PETSC_VERSION_LT(3,11,0)
29 #define VecLockReadPush VecLockPush
30 #define VecLockReadPop VecLockPop
32 #if PETSC_VERSION_LT(3,12,0)
33 #define VecGetArrayWrite VecGetArray
34 #define VecRestoreArrayWrite VecRestoreArray
35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
60 static PetscErrorCode __mfem_snes_jacobian(
SNES,
Vec,
Mat,
Mat,
void*);
61 static PetscErrorCode __mfem_snes_function(
SNES,
Vec,
Vec,
void*);
64 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,
Vec,
Vec,
Vec,
65 PetscBool*,PetscBool*,
void*);
67 static PetscErrorCode __mfem_pc_shell_apply(
PC,
Vec,
Vec);
68 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC,
Vec,
Vec);
69 static PetscErrorCode __mfem_pc_shell_setup(
PC);
70 static PetscErrorCode __mfem_pc_shell_destroy(
PC);
71 static PetscErrorCode __mfem_pc_shell_view(
PC,PetscViewer);
72 static PetscErrorCode __mfem_mat_shell_apply(
Mat,
Vec,
Vec);
73 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat,
Vec,
Vec);
74 static PetscErrorCode __mfem_mat_shell_destroy(
Mat);
75 static PetscErrorCode __mfem_mat_shell_copy(
Mat,
Mat,MatStructure);
76 static PetscErrorCode __mfem_array_container_destroy(
void*);
77 static PetscErrorCode __mfem_matarray_container_destroy(
void*);
78 static PetscErrorCode __mfem_monitor_ctx_destroy(
void**);
81 static PetscErrorCode Convert_Array_IS(MPI_Comm,
bool,
const mfem::Array<int>*,
86 static PetscErrorCode MakeShellPCWithFactory(
PC,
92 #if !defined(PETSC_HAVE_HYPRE)
93 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,
Mat*);
94 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,
Mat*);
97 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
98 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
105 #if defined(PETSC_HAVE_DEVICE)
106 static PetscErrorCode __mfem_VecSetOffloadMask(
Vec,PetscOffloadMask);
108 static PetscErrorCode __mfem_VecBoundToCPU(
Vec,PetscBool*);
109 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject);
118 unsigned long int numprec;
119 } __mfem_pc_shell_ctx;
130 void (*postcheck)(
mfem::Operator *op,
const mfem::Vector&, mfem::Vector&,
131 mfem::Vector&,
bool&,
bool&);
135 const mfem::Vector&,
const mfem::Vector&,
136 const mfem::Vector&,
const mfem::Vector&);
148 PetscObjectState cached_ijacstate;
149 PetscObjectState cached_rhsjacstate;
150 PetscObjectState cached_splits_xstate;
151 PetscObjectState cached_splits_xdotstate;
158 } __mfem_monitor_ctx;
161 static PetscErrorCode ierr;
184 ierr = PetscOptionsSetValue(NULL,
"-cuda_device",
186 MFEM_VERIFY(!ierr,
"Unable to set initial option value to PETSc");
188 ierr = PetscInitialize(argc,argv,rc_file,help);
189 MFEM_VERIFY(!ierr,
"Unable to initialize PETSc");
194 ierr = PetscFinalize();
195 MFEM_VERIFY(!ierr,
"Unable to finalize PETSc");
198 const double* PetscMemory::GetHostPointer()
const
202 const double *v =
mfem::Read(*
this,Capacity(),
false);
207 const double* PetscMemory::GetDevicePointer()
const
211 const double *v =
mfem::Read(*
this,Capacity(),
true);
218 void PetscParVector::SetDataAndSize_()
223 MFEM_VERIFY(x,
"Missing Vec");
224 ierr = VecSetUp(x); PCHKERRQ(x,ierr);
225 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
226 MFEM_VERIFY(n >= 0,
"Invalid local size");
228 #if defined(PETSC_HAVE_DEVICE)
229 PetscOffloadMask omask;
232 ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
233 if (omask != PETSC_OFFLOAD_BOTH)
235 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
238 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
239 #if defined(PETSC_HAVE_DEVICE)
240 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
241 ""); PCHKERRQ(x,ierr);
244 if (omask != PETSC_OFFLOAD_BOTH)
246 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
249 ierr = VecCUDAGetArrayRead(x,(
const PetscScalar**)&darray); PCHKERRQ(x,ierr);
250 pdata.Wrap(array,darray,size,MemoryType::HOST,
false);
251 ierr = VecCUDARestoreArrayRead(x,(
const PetscScalar**)&darray);
257 pdata.Wrap(array,size,MemoryType::HOST,
false);
259 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
261 #if defined(PETSC_HAVE_DEVICE)
262 ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
264 data.MakeAlias(pdata,0,size);
268 void PetscParVector::SetFlagsFromMask_()
const
270 MFEM_VERIFY(x,
"Missing Vec");
271 #if defined(_USE_DEVICE)
272 PetscOffloadMask mask;
274 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
275 ""); PCHKERRQ(x,ierr);
276 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
281 case PETSC_OFFLOAD_CPU:
282 pdata.SetHostValid();
283 pdata.SetDeviceInvalid();
285 case PETSC_OFFLOAD_GPU:
286 pdata.SetHostInvalid();
287 pdata.SetDeviceValid();
289 case PETSC_OFFLOAD_BOTH:
290 pdata.SetHostValid();
291 pdata.SetDeviceValid();
294 MFEM_ABORT(
"Unhandled case " << mask);
301 void PetscParVector::UpdateVecFromFlags()
303 MFEM_VERIFY(x,
"Missing Vec");
304 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
305 #if defined(_USE_DEVICE)
307 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
308 ""); PCHKERRQ(x,ierr);
311 bool dv = pdata.DeviceIsValid();
312 bool hv = pdata.HostIsValid();
313 PetscOffloadMask mask;
314 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
315 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
316 else { mask = PETSC_OFFLOAD_CPU; }
317 ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
324 ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
325 pdata.CopyToHost(v,size);
326 ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
330 void PetscParVector::SetVecType_()
333 MFEM_VERIFY(x,
"Missing Vec");
334 ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
335 #if defined(_USE_DEVICE)
336 switch (Device::GetDeviceMemoryType())
338 case MemoryType::DEVICE:
339 case MemoryType::MANAGED:
340 ierr = VecSetType(x,VECCUDA); PCHKERRQ(x,ierr);
343 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
349 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
357 MFEM_VERIFY(x,
"Missing Vec");
358 #if defined(PETSC_HAVE_DEVICE)
360 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
361 ""); PCHKERRQ(x,ierr);
362 if (on_dev && iscuda)
364 ierr = VecCUDAGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
365 ierr = VecCUDARestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
370 ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
371 ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
385 MFEM_VERIFY(x,
"Missing Vec");
386 #if defined(PETSC_HAVE_DEVICE)
388 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
389 ""); PCHKERRQ(x,ierr);
390 if (on_dev && iscuda)
392 ierr = VecCUDAGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
393 ierr = VecCUDARestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
398 ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
399 ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
401 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
414 MFEM_VERIFY(x,
"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE)
417 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&iscuda,VECSEQCUDA,VECMPICUDA,
418 ""); PCHKERRQ(x,ierr);
419 if (on_dev && iscuda)
421 ierr = VecCUDAGetArray(x,&dummy); PCHKERRQ(x,ierr);
422 ierr = VecCUDARestoreArray(x,&dummy); PCHKERRQ(x,ierr);
427 ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
428 ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
430 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
440 void PetscParVector::UseDevice(
bool dev)
const
442 MFEM_VERIFY(x,
"Missing Vec");
443 #if defined(PETSC_HAVE_DEVICE)
444 ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
448 bool PetscParVector::UseDevice()
const
451 MFEM_VERIFY(x,
"Missing Vec");
452 ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
453 return flg ?
false :
true;
459 ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
463 PetscParVector::PetscParVector(MPI_Comm comm,
const Vector &x_,
467 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
468 ierr = VecSetSizes(
x,n,PETSC_DECIDE); PCHKERRQ(
x,ierr);
477 #if defined(PETSC_HAVE_DEVICE)
479 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
480 ""); PCHKERRQ(x,ierr);
484 ierr = VecCUDAGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
485 rest = VecCUDARestoreArrayWrite;
491 ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
492 rest = VecRestoreArrayWrite;
495 ierr = (*rest)(
x,&array); PCHKERRQ(x,ierr);
503 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
507 MPI_Comm_rank(comm, &myid);
508 ierr = VecSetSizes(
x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(
x,ierr);
512 ierr = VecSetSizes(
x,PETSC_DECIDE,glob_size); PCHKERRQ(
x,ierr);
521 ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
528 MFEM_VERIFY(col,
"Missing distribution");
530 MPI_Comm_rank(comm, &myid);
531 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
532 &
x); CCHKERRQ(comm,ierr)
539 ierr = VecDuplicate(y.
x,&
x); PCHKERRQ(
x,ierr);
544 bool transpose,
bool allocate) :
Vector()
548 ierr = VecCreate(comm,&
x);
550 ierr = VecSetSizes(
x,loc,PETSC_DECIDE);
565 bool transpose,
bool allocate) :
Vector()
570 ierr = MatCreateVecs(pA,&
x,NULL); PCHKERRQ(pA,ierr);
574 ierr = MatCreateVecs(pA,NULL,&
x); PCHKERRQ(pA,ierr);
580 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
593 ierr = PetscObjectReference((
PetscObject)y); PCHKERRQ(y,ierr);
602 MPI_Comm comm = pfes->
GetComm();
603 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
605 PetscMPIInt myid = 0;
606 if (!HYPRE_AssumedPartitionCheck())
608 MPI_Comm_rank(comm,&myid);
610 ierr = VecSetSizes(
x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
628 ierr = VecScatterCreateToAll(
x,&scctx,&vout); PCHKERRQ(
x,ierr);
629 ierr = VecScatterBegin(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
631 ierr = VecScatterEnd(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
633 ierr = VecScatterDestroy(&scctx); PCHKERRQ(
x,ierr);
634 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(
x,ierr);
635 ierr = VecGetLocalSize(vout,&size); PCHKERRQ(
x,ierr);
638 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(
x,ierr);
639 ierr = VecDestroy(&vout); PCHKERRQ(
x,ierr);
648 ierr = VecSet(
x,d); PCHKERRQ(
x,ierr);
656 MFEM_VERIFY(idx.
Size() == vals.
Size(),
657 "Size mismatch between indices and values");
661 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
662 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
670 MFEM_VERIFY(idx.
Size() == vals.
Size(),
671 "Size mismatch between indices and values");
675 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
676 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
683 ierr = VecCopy(y.
x,
x); PCHKERRQ(
x,ierr);
690 ierr = VecAXPY(
x,1.0,y.
x); PCHKERRQ(
x,ierr);
697 ierr = VecAXPY(
x,-1.0,y.
x); PCHKERRQ(
x,ierr);
704 ierr = VecScale(
x,s); PCHKERRQ(
x,ierr);
711 ierr = VecShift(
x,s); PCHKERRQ(
x,ierr);
718 ierr = VecPlaceArray(
x,temp_data); PCHKERRQ(
x,ierr);
723 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
730 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
732 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
733 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
734 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
735 #if defined(_USE_DEVICE)
737 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
738 ""); PCHKERRQ(x,ierr);
745 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
750 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
759 #if defined(PETSC_HAVE_DEVICE)
760 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
762 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
764 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
772 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
774 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
775 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
776 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
777 #if defined(_USE_DEVICE)
779 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&iscuda,VECSEQCUDA,VECMPICUDA,
780 ""); PCHKERRQ(x,ierr);
786 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
791 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
800 #if defined(PETSC_HAVE_DEVICE)
801 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
803 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
806 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
807 ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
813 MFEM_VERIFY(!
pdata.
Empty(),
"Vector data is empty");
825 #if defined(PETSC_HAVE_DEVICE)
826 PetscOffloadMask mask;
827 ierr = VecGetOffloadMask(
x,&mask); PCHKERRQ(
x,ierr);
828 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
829 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
832 ierr = VecGetArrayRead(
x,&v); PCHKERRQ(
x,ierr);
834 ierr = VecRestoreArrayRead(
x,&v); PCHKERRQ(
x,ierr);
839 if (read && !write) { ierr = VecLockReadPop(
x); PCHKERRQ(
x,ierr); }
842 #if defined(PETSC_HAVE_DEVICE)
843 ierr = VecCUDAResetArray(
x); PCHKERRQ(
x,ierr);
845 MFEM_VERIFY(
false,
"This should not happen");
850 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
856 PetscRandom rctx = NULL;
860 ierr = PetscRandomCreate(PetscObjectComm((
PetscObject)
x),&rctx);
862 ierr = PetscRandomSetSeed(rctx,(
unsigned long)seed); PCHKERRQ(x,ierr);
863 ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
865 ierr = VecSetRandom(
x,rctx); PCHKERRQ(
x,ierr);
866 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(
x,ierr);
877 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)
x),fname,
878 FILE_MODE_WRITE,&view);
882 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)
x),fname,&view);
885 ierr = VecView(
x,view); PCHKERRQ(
x,ierr);
886 ierr = PetscViewerDestroy(&view); PCHKERRQ(
x,ierr);
890 ierr = VecView(
x,NULL); PCHKERRQ(
x,ierr);
899 ierr = MatGetOwnershipRange(
A,&N,NULL); PCHKERRQ(
A,ierr);
906 ierr = MatGetOwnershipRangeColumn(
A,&N,NULL); PCHKERRQ(
A,ierr);
913 ierr = MatGetLocalSize(
A,&N,NULL); PCHKERRQ(
A,ierr);
920 ierr = MatGetLocalSize(
A,NULL,&N); PCHKERRQ(
A,ierr);
927 ierr = MatGetSize(
A,&N,NULL); PCHKERRQ(
A,ierr);
934 ierr = MatGetSize(
A,NULL,&N); PCHKERRQ(
A,ierr);
941 ierr = MatGetInfo(
A,MAT_GLOBAL_SUM,&info); PCHKERRQ(
A,ierr);
966 rows.
GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
968 cols.
GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
969 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&
A); PCHKERRQ(B,ierr);
970 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
971 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1015 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1029 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1042 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1043 if (
X) {
delete X; }
1044 if (
Y) {
delete Y; }
1049 #if defined(PETSC_HAVE_HYPRE)
1050 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&
A);
1052 ierr = MatConvert_hypreParCSR_AIJ(B,&
A); CCHKERRQ(B.
GetComm(),ierr);
1063 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1064 if (
X) {
delete X; }
1065 if (
Y) {
delete Y; }
1070 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1078 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1082 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1083 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1084 ierr = MatAXPY(
A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1093 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1094 ierr = MatScale(
A,-1.0); PCHKERRQ(
A,ierr);
1098 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1099 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1100 ierr = MatAXPY(
A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1105 void PetscParMatrix::
1106 BlockDiagonalConstructor(MPI_Comm comm,
1111 PetscInt lrsize,lcsize,rstart,cstart;
1112 PetscMPIInt myid = 0,commsize;
1114 ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,ierr);
1115 if (!HYPRE_AssumedPartitionCheck())
1117 ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,ierr);
1119 lrsize = row_starts[myid+1]-row_starts[myid];
1120 rstart = row_starts[myid];
1121 lcsize = col_starts[myid+1]-col_starts[myid];
1122 cstart = col_starts[myid];
1127 ierr = ISCreateStride(comm,diag->
Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1128 ISLocalToGlobalMapping rl2g,cl2g;
1129 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1130 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1131 if (row_starts != col_starts)
1133 ierr = ISCreateStride(comm,diag->
Width(),cstart,1,&is);
1134 CCHKERRQ(comm,ierr);
1135 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1136 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1140 ierr = PetscObjectReference((
PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1145 ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1146 ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1148 ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1149 ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1150 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1151 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1156 ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1157 const
int *II = diag->HostReadI();
1158 const
int *JJ = diag->HostReadJ();
1159 #if defined(PETSC_USE_64BIT_INDICES)
1162 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1163 for (
int i = 0; i < m; i++) { pII[i] = II[i]; }
1164 for (
int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1165 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1167 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1169 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1182 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1183 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1184 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1185 if (
sizeof(
PetscInt) ==
sizeof(
int))
1188 CCHKERRQ(PETSC_COMM_SELF,ierr);
1190 CCHKERRQ(PETSC_COMM_SELF,ierr);
1196 for (
int i = 0; i < m; i++) { dii[i] = iii[i]; }
1197 for (
int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1200 CCHKERRQ(PETSC_COMM_SELF,ierr);
1201 ierr = PetscCalloc1(m,&oii);
1202 CCHKERRQ(PETSC_COMM_SELF,ierr);
1205 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1207 dii,djj,da,oii,NULL,NULL,&A);
1208 CCHKERRQ(comm,ierr);
1212 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1213 CCHKERRQ(comm,ierr);
1216 void *ptrs[4] = {dii,djj,da,oii};
1217 const char *names[4] = {
"_mfem_csr_dii",
1226 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1227 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1228 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1229 CCHKERRQ(comm,ierr);
1231 CCHKERRQ(comm,ierr);
1232 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1237 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1238 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1245 return A ? PetscObjectComm((
PetscObject)A) : MPI_COMM_NULL;
1258 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1260 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1261 ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1262 ierr = MatShellSetContext(*A,(
void *)op); PCHKERRQ(A,ierr);
1263 ierr = MatShellSetOperation(*A,MATOP_MULT,
1264 (
void (*)())__mfem_mat_shell_apply);
1266 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1267 (
void (*)())__mfem_mat_shell_apply_transpose);
1269 ierr = MatShellSetOperation(*A,MATOP_COPY,
1270 (
void (*)())__mfem_mat_shell_copy);
1272 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1273 (
void (*)())__mfem_mat_shell_destroy);
1274 #if defined(_USE_DEVICE)
1278 ierr = MatShellSetVecType(*A,VECCUDA); PCHKERRQ(A,ierr);
1279 ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1283 ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1287 ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1312 PetscBool avoidmatconvert = PETSC_FALSE;
1315 ierr = PetscObjectTypeCompareAny((
PetscObject)(pA->
A),&avoidmatconvert,MATMFFD,
1317 CCHKERRQ(comm,ierr);
1319 if (pA && !avoidmatconvert)
1323 #if PETSC_VERSION_LT(3,10,0)
1327 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEMAT,&istrans);
1338 #if PETSC_VERSION_LT(3,10,0)
1339 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATIS,&ismatis);
1345 ierr = MatTransposeGetMat(pA->
A,&At); CCHKERRQ(pA->
GetComm(),ierr);
1346 #if PETSC_VERSION_LT(3,10,0)
1347 ierr = PetscObjectTypeCompare((
PetscObject)(At),MATIS,&ismatis);
1355 #if PETSC_VERSION_LT(3,10,0)
1362 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1363 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1364 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1368 ierr = MatISGetMPIXAIJ(pA->
A,MAT_INITIAL_MATRIX,A);
1369 PCHKERRQ(pA->
A,ierr);
1376 ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1382 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1383 PCHKERRQ(pA->
A,ierr);
1384 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1385 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1389 ierr = MatConvert(pA->
A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1390 PCHKERRQ(pA->
A,ierr);
1399 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1400 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1401 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1405 ierr = MatConvert(pA->
A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->
A,ierr);
1410 #if defined(PETSC_HAVE_HYPRE)
1414 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1415 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
1416 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1420 ierr = MatConvert(pA->
A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->
A,ierr);
1423 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1432 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1439 #if defined(PETSC_HAVE_HYPRE)
1440 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1441 PETSC_USE_POINTER,A);
1443 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1449 #if defined(PETSC_HAVE_HYPRE)
1450 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1451 PETSC_USE_POINTER,A);
1453 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1459 #if defined(PETSC_HAVE_HYPRE)
1460 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1461 PETSC_USE_POINTER,A);
1464 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1473 MFEM_ABORT(
"Conversion from HypreParCSR to operator type = " << tid <<
1474 " is not implemented");
1479 Mat *mats,*matsl2l = NULL;
1484 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1487 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1489 for (i=0; i<nr; i++)
1491 PetscBool needl2l = PETSC_TRUE;
1493 for (j=0; j<nc; j++)
1501 ierr = PetscObjectQuery((
PetscObject)mats[i*nc+j],
"_MatIS_PtAP_l2l",
1503 PCHKERRQ(mats[i*nc+j],ierr);
1511 ierr = PetscContainerGetPointer(c,(
void**)&l2l);
1513 MFEM_VERIFY(l2l->
Size() == 1,
"Unexpected size "
1514 << l2l->
Size() <<
" for block row " << i );
1515 ierr = PetscObjectReference((
PetscObject)(*l2l)[0]);
1517 matsl2l[i] = (*l2l)[0];
1518 needl2l = PETSC_FALSE;
1524 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1527 ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1530 for (
int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1531 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1534 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1535 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1536 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1539 PCHKERRQ((*A),ierr);
1540 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1542 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1543 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1549 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1550 ierr = MatSetSizes(*A,pI->
Height(),pI->
Width(),PETSC_DECIDE,PETSC_DECIDE);
1552 ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1553 ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1554 ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1555 ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1556 ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1559 ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1561 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1562 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1579 int n = pS->
Width();
1584 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1585 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1586 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1588 for (
int i = 0; i < m; i++)
1590 bool issorted =
true;
1592 for (
int j = ii[i]; j < ii[i+1]; j++)
1595 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1600 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1601 CCHKERRQ(PETSC_COMM_SELF,ierr);
1605 ierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,ierr);
1608 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1609 CCHKERRQ(comm,ierr);
1614 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1615 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1617 pii,pjj,pdata,oii,NULL,NULL,&B);
1618 CCHKERRQ(comm,ierr);
1620 void *ptrs[4] = {pii,pjj,pdata,oii};
1621 const char *names[4] = {
"_mfem_csr_pii",
1626 for (
int i=0; i<4; i++)
1630 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1631 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1632 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1636 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1644 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1645 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1649 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1650 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1654 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1661 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1668 ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1669 ierr = PetscObjectTypeCompare((
PetscObject)B,MATMPIAIJ,&isaij);
1670 CCHKERRQ(comm,ierr);
1671 ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1674 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1675 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1690 MPI_Comm comm = MPI_COMM_NULL;
1691 ierr = PetscObjectGetComm((
PetscObject)A,&comm); PCHKERRQ(A,ierr);
1692 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1703 ierr = PetscObjectReference((
PetscObject)a); PCHKERRQ(a,ierr);
1713 if (A_ == A) {
return; }
1715 ierr = PetscObjectReference((
PetscObject)A_); PCHKERRQ(A_,ierr);
1721 void PetscParMatrix::SetUpForDevice()
1723 #if !defined(_USE_DEVICE)
1728 PetscBool ismatis,isnest,isseqaij,ismpiaij;
1729 ierr = PetscObjectTypeCompare((
PetscObject)A,MATIS,&ismatis);
1731 ierr = PetscObjectTypeCompare((
PetscObject)A,MATNEST,&isnest);
1736 ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1737 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATNEST,&isnest);
1744 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1754 ierr = PetscObjectTypeCompare((
PetscObject)sA,MATSEQAIJ,&isseqaij);
1756 ierr = PetscObjectTypeCompare((
PetscObject)sA,MATMPIAIJ,&ismpiaij);
1760 ierr = MatSetType(sA,MATSEQAIJCUSPARSE); PCHKERRQ(sA,ierr);
1766 ierr = MatSetType(sA,MATMPIAIJCUSPARSE); PCHKERRQ(sA,ierr);
1772 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1773 PETSC_TRUE); PCHKERRQ(sA,ierr);
1780 ierr = MatSetVecType(tA,VECCUDA); PCHKERRQ(tA,ierr);
1786 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATSEQAIJ,&isseqaij);
1788 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATMPIAIJ,&ismpiaij);
1792 ierr = MatSetType(tA,MATSEQAIJCUSPARSE); PCHKERRQ(tA,ierr);
1797 ierr = MatSetType(tA,MATMPIAIJCUSPARSE); PCHKERRQ(tA,ierr);
1802 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1803 PETSC_TRUE); PCHKERRQ(tA,ierr);
1818 f = MatMultTranspose;
1819 fadd = MatMultTransposeAdd;
1830 ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1831 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1832 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1836 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1837 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1848 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1852 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1859 ierr = PetscObjectReference((
PetscObject)master.
A); PCHKERRQ(master.
A,ierr);
1871 MFEM_VERIFY(A,
"Mat not present");
1881 MFEM_VERIFY(A,
"Mat not present");
1892 ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1896 ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1903 ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1908 MFEM_ASSERT(x.
Size() ==
Width(),
"invalid x.Size() = " << x.
Size()
1909 <<
", expected size = " <<
Width());
1910 MFEM_ASSERT(y.
Size() ==
Height(),
"invalid y.Size() = " << y.
Size()
1911 <<
", expected size = " <<
Height());
1915 bool rw = (b != 0.0);
1918 MatMultKernel(A,a,XX->x,b,YY->
x,
false);
1926 MFEM_ASSERT(x.
Size() ==
Height(),
"invalid x.Size() = " << x.
Size()
1927 <<
", expected size = " <<
Height());
1928 MFEM_ASSERT(y.
Size() ==
Width(),
"invalid y.Size() = " << y.
Size()
1929 <<
", expected size = " <<
Width());
1933 bool rw = (b != 0.0);
1936 MatMultKernel(A,a,YY->
x,b,XX->x,
true);
1949 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)A),fname,
1950 FILE_MODE_WRITE,&view);
1954 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)A),fname,&view);
1957 ierr = MatView(A,view); PCHKERRQ(A,ierr);
1958 ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
1962 ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1968 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
1969 <<
", expected size = " <<
Height());
1973 ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
1979 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
1980 <<
", expected size = " <<
Width());
1984 ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
1990 ierr = MatShift(A,(
PetscScalar)s); PCHKERRQ(A,ierr);
1996 MFEM_ASSERT(s.
Size() ==
Height(),
"invalid s.Size() = " << s.
Size()
1997 <<
", expected size = " <<
Height());
1998 MFEM_ASSERT(s.
Size() ==
Width(),
"invalid s.Size() = " << s.
Size()
1999 <<
", expected size = " <<
Width());
2003 ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2011 "Petsc TripleMatrixProduct: Number of local cols of A " << A->
Width() <<
2012 " differs from number of local rows of P " << P->
Height());
2014 "Petsc TripleMatrixProduct: Number of local rows of A " << A->
Height() <<
2015 " differs from number of local cols of R " << R->
Width());
2017 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2024 Mat pA = *A,pP = *P,pRt = *Rt;
2026 PetscBool Aismatis,Pismatis,Rtismatis;
2029 "Petsc RAP: Number of local cols of A " << A->
Width() <<
2030 " differs from number of local rows of P " << P->
Height());
2032 "Petsc RAP: Number of local rows of A " << A->
Height() <<
2033 " differs from number of local rows of Rt " << Rt->
Height());
2034 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&Aismatis);
2036 ierr = PetscObjectTypeCompare((
PetscObject)pP,MATIS,&Pismatis);
2038 ierr = PetscObjectTypeCompare((
PetscObject)pRt,MATIS,&Rtismatis);
2045 ISLocalToGlobalMapping cl2gP,cl2gRt;
2046 PetscInt rlsize,clsize,rsize,csize;
2048 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2049 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2050 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2051 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2052 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2053 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2054 ierr = MatCreate(A->
GetComm(),&B); PCHKERRQ(pA,ierr);
2055 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2056 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2057 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2058 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2059 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2060 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2063 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2069 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2070 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2072 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2080 ierr = PetscObjectReference((
PetscObject)lRt); PCHKERRQ(lRt,ierr);
2081 (*vmatsl2l)[0] = lRt;
2084 ierr = PetscContainerCreate(PetscObjectComm((
PetscObject)B),&c);
2086 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2087 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2091 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2095 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2096 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2097 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2098 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2104 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2110 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2111 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2113 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2128 #if defined(PETSC_HAVE_HYPRE)
2143 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2153 ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2155 ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2164 MFEM_ABORT(
"Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2173 ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2174 MFEM_VERIFY(M == N,
"Rectangular case unsupported");
2177 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2181 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2184 ierr = Convert_Array_IS(
GetComm(),
true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2187 ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2191 ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2193 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2198 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2202 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2205 ierr = Convert_Array_IS(
GetComm(),
true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2206 ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2207 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2217 ierr = PetscObjectDereference((
PetscObject)A); CCHKERRQ(comm,ierr);
2226 MFEM_VERIFY(A,
"no associated PETSc Mat object");
2229 ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2231 ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2233 ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2235 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2237 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2239 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2241 #if defined(PETSC_HAVE_HYPRE)
2242 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2256 Ae.
Mult(-1.0, X, 1.0, B);
2259 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2260 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2261 for (
int i = 0; i < ess_dof_list.
Size(); i++)
2263 int r = ess_dof_list[i];
2264 B(r) = array[r] * X(r);
2266 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2295 if (
cid == KSP_CLASSID)
2298 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2300 else if (
cid == SNES_CLASSID)
2303 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2306 else if (
cid == TS_CLASSID)
2309 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2313 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2320 if (
cid == KSP_CLASSID)
2323 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2325 else if (
cid == SNES_CLASSID)
2328 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2331 else if (
cid == TS_CLASSID)
2334 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2338 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2345 if (
cid == KSP_CLASSID)
2348 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2351 else if (
cid == SNES_CLASSID)
2354 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2355 max_iter,PETSC_DEFAULT);
2357 else if (
cid == TS_CLASSID)
2360 ierr = TSSetMaxSteps(ts,max_iter);
2364 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2372 typedef PetscErrorCode (*myPetscFunc)(
void**);
2373 PetscViewerAndFormat *vf = NULL;
2374 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(
obj));
2378 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2381 if (
cid == KSP_CLASSID)
2389 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2393 #if PETSC_VERSION_LT(3,15,0)
2394 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2396 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2398 (myPetscFunc)PetscViewerAndFormatDestroy);
2403 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2404 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2405 (myPetscFunc)PetscViewerAndFormatDestroy);
2409 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2410 PCHKERRQ(viewer,ierr);
2411 #if PETSC_VERSION_LT(3,15,0)
2412 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2414 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2416 (myPetscFunc)PetscViewerAndFormatDestroy);
2421 else if (
cid == SNES_CLASSID)
2427 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2431 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2432 (myPetscFunc)PetscViewerAndFormatDestroy);
2433 PCHKERRQ(snes,ierr);
2436 else if (
cid == TS_CLASSID)
2441 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2446 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2452 return obj ? PetscObjectComm(
obj) : MPI_COMM_NULL;
2457 __mfem_monitor_ctx *monctx;
2458 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2459 monctx->solver =
this;
2460 monctx->monitor =
ctx;
2461 if (
cid == KSP_CLASSID)
2463 ierr = KSPMonitorSet((
KSP)
obj,__mfem_ksp_monitor,monctx,
2464 __mfem_monitor_ctx_destroy);
2467 else if (
cid == SNES_CLASSID)
2469 ierr = SNESMonitorSet((
SNES)
obj,__mfem_snes_monitor,monctx,
2470 __mfem_monitor_ctx_destroy);
2473 else if (
cid == TS_CLASSID)
2475 ierr = TSMonitorSet((
TS)
obj,__mfem_ts_monitor,monctx,
2476 __mfem_monitor_ctx_destroy);
2481 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2488 if (
cid == SNES_CLASSID)
2490 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
private_ctx;
2493 else if (
cid == TS_CLASSID)
2495 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
private_ctx;
2500 MFEM_ABORT(
"Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2507 if (
cid == TS_CLASSID)
2512 ierr = TSGetSNES((
TS)
obj,&snes); PCHKERRQ(obj,ierr);
2513 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2514 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2516 else if (
cid == SNES_CLASSID)
2520 ierr = SNESGetKSP((
SNES)
obj,&ksp); PCHKERRQ(obj,ierr);
2521 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2523 else if (
cid == KSP_CLASSID)
2525 ierr = KSPGetPC((
KSP)
obj,&pc); PCHKERRQ(obj,ierr);
2527 else if (
cid == PC_CLASSID)
2533 MFEM_ABORT(
"No support for PetscPreconditionerFactory for this object");
2537 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2541 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2547 if (!customize) {
clcustom =
true; }
2550 if (
cid == PC_CLASSID)
2553 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2555 else if (
cid == KSP_CLASSID)
2558 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2560 else if (
cid == SNES_CLASSID)
2563 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2565 else if (
cid == TS_CLASSID)
2568 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2572 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2580 if (
cid == KSP_CLASSID)
2583 KSPConvergedReason reason;
2584 ierr = KSPGetConvergedReason(ksp,&reason);
2586 return reason > 0 ? 1 : 0;
2588 else if (
cid == SNES_CLASSID)
2591 SNESConvergedReason reason;
2592 ierr = SNESGetConvergedReason(snes,&reason);
2593 PCHKERRQ(snes,ierr);
2594 return reason > 0 ? 1 : 0;
2596 else if (
cid == TS_CLASSID)
2599 TSConvergedReason reason;
2600 ierr = TSGetConvergedReason(ts,&reason);
2602 return reason > 0 ? 1 : 0;
2606 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2613 if (
cid == KSP_CLASSID)
2617 ierr = KSPGetIterationNumber(ksp,&its);
2621 else if (
cid == SNES_CLASSID)
2625 ierr = SNESGetIterationNumber(snes,&its);
2626 PCHKERRQ(snes,ierr);
2629 else if (
cid == TS_CLASSID)
2633 ierr = TSGetStepNumber(ts,&its);
2639 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2646 if (
cid == KSP_CLASSID)
2650 ierr = KSPGetResidualNorm(ksp,&norm);
2654 if (
cid == SNES_CLASSID)
2658 ierr = SNESGetFunctionNorm(snes,&norm);
2659 PCHKERRQ(snes,ierr);
2664 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2665 return PETSC_MAX_REAL;
2672 if (
cid == SNES_CLASSID)
2674 __mfem_snes_ctx *snes_ctx;
2675 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2676 snes_ctx->op = NULL;
2677 snes_ctx->bchandler = NULL;
2678 snes_ctx->work = NULL;
2682 else if (
cid == TS_CLASSID)
2684 __mfem_ts_ctx *ts_ctx;
2685 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2687 ts_ctx->bchandler = NULL;
2688 ts_ctx->work = NULL;
2689 ts_ctx->work2 = NULL;
2690 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2691 ts_ctx->cached_ijacstate = -1;
2692 ts_ctx->cached_rhsjacstate = -1;
2693 ts_ctx->cached_splits_xstate = -1;
2694 ts_ctx->cached_splits_xdotstate = -1;
2705 if (
cid == SNES_CLASSID)
2707 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)
private_ctx;
2708 delete snes_ctx->work;
2710 else if (
cid == TS_CLASSID)
2712 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)
private_ctx;
2713 delete ts_ctx->work;
2714 delete ts_ctx->work2;
2716 ierr = PetscFree(
private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2723 : bctype(type_), setup(false), eval_t(0.0),
2724 eval_t_cached(std::numeric_limits<double>::min())
2732 ess_tdof_list.
Assign(list);
2738 if (setup) {
return; }
2742 this->
Eval(eval_t,eval_g);
2743 eval_t_cached = eval_t;
2754 (*this).SetUp(x.
Size());
2758 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2760 y[ess_tdof_list[i]] = 0.0;
2765 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2767 Eval(eval_t,eval_g);
2768 eval_t_cached = eval_t;
2770 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2772 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2779 (*this).SetUp(x.
Size());
2782 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2784 x[ess_tdof_list[i]] = 0.0;
2789 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2791 Eval(eval_t,eval_g);
2792 eval_t_cached = eval_t;
2794 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2796 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2803 (*this).SetUp(x.
Size());
2806 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2808 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2813 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2815 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2822 (*this).SetUp(x.
Size());
2823 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2825 x[ess_tdof_list[i]] = 0.0;
2831 (*this).SetUp(x.
Size());
2833 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2835 y[ess_tdof_list[i]] = 0.0;
2846 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2848 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2849 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2853 const std::string &prefix)
2859 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
2860 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2865 const std::string &prefix)
2871 ierr = PetscObjectGetClassId(obj, &
cid); PCHKERRQ(obj, ierr);
2872 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2884 bool delete_pA =
false;
2902 MFEM_VERIFY(pA,
"Unsupported operation!");
2910 PetscInt nheight,nwidth,oheight,owidth;
2912 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2913 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2914 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2915 if (nheight != oheight || nwidth != owidth)
2919 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
2925 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
2934 if (delete_pA) {
delete pA; }
2949 bool delete_pA =
false;
2967 MFEM_VERIFY(pA,
"Unsupported operation!");
2970 bool delete_ppA =
false;
2973 if (oA == poA && !wrap)
2983 MFEM_VERIFY(ppA,
"Unsupported operation!");
2992 PetscInt nheight,nwidth,oheight,owidth;
2994 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2995 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
2996 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
2997 if (nheight != oheight || nwidth != owidth)
3001 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3008 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3017 if (delete_pA) {
delete pA; }
3018 if (delete_ppA) {
delete ppA; }
3028 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3031 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3032 ierr = PetscObjectReference((
PetscObject)A); PCHKERRQ(ksp,ierr);
3037 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3046 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3047 ierr = MakeShellPC(pc,precond,
false); PCHKERRQ(ksp,ierr);
3053 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3054 ierr = PetscObjectReference((
PetscObject)P); PCHKERRQ(ksp,ierr);
3055 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3056 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3057 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3061 void PetscLinearSolver::MultKernel(
const Vector &b,
Vector &x,
bool trans)
const
3068 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(
obj, ierr);
3076 PetscParMatrix A = PetscParMatrix(pA,
true);
3077 X =
new PetscParVector(A,
false,
false);
3085 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
3086 PCHKERRQ(ksp, ierr);
3091 ierr = KSPSolveTranspose(ksp,
B->
x, X->x); PCHKERRQ(ksp,ierr);
3095 ierr = KSPSolve(ksp,
B->
x, X->x); PCHKERRQ(ksp,ierr);
3103 (*this).MultKernel(b,x,
false);
3108 (*this).MultKernel(b,x,
true);
3115 ierr = PetscObjectGetComm((
PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3116 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3125 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3127 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3134 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3136 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3140 const std::string &prefix)
3144 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3146 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3152 const std::string &prefix)
3156 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3158 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3159 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3163 const string &prefix)
3169 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
3170 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3175 const string &prefix)
3179 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3181 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3182 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3188 bool delete_pA =
false;
3205 PetscInt nheight,nwidth,oheight,owidth;
3207 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3208 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3209 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3210 if (nheight != oheight || nwidth != owidth)
3214 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3220 ierr = PCSetOperators(pc,pA->
A,pA->
A); PCHKERRQ(
obj,ierr);
3229 if (delete_pA) {
delete pA; };
3232 void PetscPreconditioner::MultKernel(
const Vector &b,
Vector &x,
3236 "Iterative mode not supported for PetscPreconditioner");
3242 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(
obj, ierr);
3250 PetscParMatrix A(pA,
true);
3251 X =
new PetscParVector(A,
false,
false);
3262 ierr = PCApplyTranspose(pc,
B->
x, X->x); PCHKERRQ(pc, ierr);
3266 ierr = PCApply(pc,
B->
x, X->x); PCHKERRQ(pc, ierr);
3274 (*this).MultKernel(b,x,
false);
3279 (*this).MultKernel(b,x,
true);
3286 ierr = PetscObjectGetComm((
PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3287 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3298 void PetscBDDCSolver::BDDCSolverConstructor(
const PetscBDDCSolverParams &opts)
3300 MPI_Comm comm = PetscObjectComm(
obj);
3305 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3309 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&ismatis);
3311 MFEM_VERIFY(ismatis,
"PetscBDDCSolver needs the matrix in unassembled format");
3314 ParFiniteElementSpace *fespace = opts.fespace;
3315 if (opts.netflux && !fespace)
3317 MFEM_WARNING(
"Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3324 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3325 ierr = MatNullSpaceCreate(PetscObjectComm((
PetscObject)lA),PETSC_TRUE,0,NULL,
3326 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3327 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3328 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3332 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(
obj,ierr);
3335 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3336 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3345 int vdim = fespace->GetVDim();
3352 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3353 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3354 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3363 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3376 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3382 const FiniteElementCollection *fec = fespace->FEColl();
3383 bool h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3386 ParFiniteElementSpace *fespace_coords = fespace;
3388 sdim = fespace->GetParMesh()->SpaceDimension();
3391 fespace_coords =
new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3394 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3395 ParGridFunction gf_coords(fespace_coords);
3396 gf_coords.ProjectCoefficient(coeff_coords);
3397 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3399 hvec_coords->Size(),
false);
3407 Vec pvec_coords,lvec_coords;
3408 ISLocalToGlobalMapping l2g;
3414 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3415 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3416 CCHKERRQ(comm,ierr);
3417 ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3420 ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3421 CCHKERRQ(comm,ierr);
3422 ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3423 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3425 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3426 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3427 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3428 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3429 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3430 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3431 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3432 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3433 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3434 CCHKERRQ(comm,ierr);
3435 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3440 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3441 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3442 #if PETSC_VERSION_LT(3,15,0)
3443 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3444 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3446 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3447 CCHKERRQ(comm,ierr);
3448 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3449 CCHKERRQ(comm,ierr);
3451 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3452 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3454 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3455 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3456 CCHKERRQ(PETSC_COMM_SELF,ierr);
3457 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3458 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3459 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3460 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3464 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3473 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3474 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3478 for (
PetscInt d = 0; d < sdim; d++)
3480 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3483 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3488 for (
PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3490 if (fespace_coords != fespace)
3492 delete fespace_coords;
3499 IS dir = NULL, neu = NULL;
3502 Array<Mat> *l2l = NULL;
3503 if (opts.ess_dof_local || opts.nat_dof_local)
3508 MFEM_VERIFY(c,
"Local-to-local PETSc container not present");
3509 ierr = PetscContainerGetPointer(c,(
void**)&l2l); PCHKERRQ(c,ierr);
3516 PetscBool lpr = PETSC_FALSE,pr;
3517 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3518 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3520 MFEM_VERIFY(lpr == pr,
"ess_dof should be collectively set");
3522 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3523 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3525 MFEM_VERIFY(lpr == pr,
"nat_dof should be collectively set");
3528 ms[0] = -nf; ms[1] = nf;
3529 ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3531 MFEM_VERIFY(-Ms[0] == Ms[1],
3532 "number of fields should be the same across processes");
3539 PetscInt st = opts.ess_dof_local ? 0 : rst;
3540 if (!opts.ess_dof_local)
3543 ierr = Convert_Array_IS(comm,
true,opts.ess_dof,st,&dir);
3544 CCHKERRQ(comm,ierr);
3545 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3550 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3551 CCHKERRQ(comm,ierr);
3552 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3557 PetscInt st = opts.nat_dof_local ? 0 : rst;
3558 if (!opts.nat_dof_local)
3561 ierr = Convert_Array_IS(comm,
true,opts.nat_dof,st,&neu);
3562 CCHKERRQ(comm,ierr);
3563 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3568 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3569 CCHKERRQ(comm,ierr);
3570 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3577 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3581 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3583 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3588 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3590 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3599 const FiniteElementCollection *fec = fespace->FEColl();
3600 bool edgespace, rtspace, h1space;
3601 bool needint = opts.netflux;
3602 bool tracespace, rt_tracespace, edge_tracespace;
3604 PetscBool B_is_Trans = PETSC_FALSE;
3606 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3607 dim = pmesh->Dimension();
3608 vdim = fespace->GetVDim();
3609 h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3610 rtspace =
dynamic_cast<const RT_FECollection*
>(fec);
3611 edgespace =
dynamic_cast<const ND_FECollection*
>(fec);
3612 edge_tracespace =
dynamic_cast<const ND_Trace_FECollection*
>(fec);
3613 rt_tracespace =
dynamic_cast<const RT_Trace_FECollection*
>(fec);
3614 tracespace = edge_tracespace || rt_tracespace;
3617 if (fespace->GetNE() > 0)
3621 p = fespace->GetElementOrder(0);
3625 p = fespace->GetFaceOrder(0);
3626 if (dim == 2) { p++; }
3637 MFEM_WARNING(
"Tracespace case doesn't work for H(curl) and p=2,"
3638 " not using auxiliary quadrature");
3644 FiniteElementCollection *vfec;
3647 vfec =
new H1_Trace_FECollection(p,dim);
3651 vfec =
new H1_FECollection(p,dim);
3653 ParFiniteElementSpace *vfespace =
new ParFiniteElementSpace(pmesh,vfec);
3654 ParDiscreteLinearOperator *grad;
3655 grad =
new ParDiscreteLinearOperator(vfespace,fespace);
3658 grad->AddTraceFaceInterpolator(
new GradientInterpolator);
3662 grad->AddDomainInterpolator(
new GradientInterpolator);
3666 HypreParMatrix *hG = grad->ParallelAssemble();
3667 PetscParMatrix *G =
new PetscParMatrix(hG,
PETSC_MATAIJ);
3671 PetscBool conforming = PETSC_TRUE;
3672 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3673 ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3685 MFEM_WARNING(
"Tracespace case doesn't work for H(div), not using"
3686 " auxiliary quadrature");
3692 if (vdim != dim) { needint =
false; }
3695 PetscParMatrix *
B = NULL;
3701 FiniteElementCollection *auxcoll;
3702 if (tracespace) { auxcoll =
new RT_Trace_FECollection(p,dim); }
3707 auxcoll =
new H1_FECollection(std::max(p-1,1),dim);
3711 auxcoll =
new L2_FECollection(p,dim);
3714 ParFiniteElementSpace *pspace =
new ParFiniteElementSpace(pmesh,auxcoll);
3715 ParMixedBilinearForm *b =
new ParMixedBilinearForm(fespace,pspace);
3721 b->AddTraceFaceIntegrator(
new VectorFECurlIntegrator);
3725 b->AddDomainIntegrator(
new VectorFECurlIntegrator);
3732 b->AddTraceFaceIntegrator(
new VectorFEDivergenceIntegrator);
3736 b->AddDomainIntegrator(
new VectorFEDivergenceIntegrator);
3741 b->AddDomainIntegrator(
new VectorDivergenceIntegrator);
3746 b->ParallelAssemble(Bh);
3748 Bh.SetOperatorOwner(
false);
3753 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3754 if (!opts.ess_dof_local)
3756 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3760 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3762 B_is_Trans = PETSC_TRUE;
3771 ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3775 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3776 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3781 const std::string &prefix)
3784 BDDCSolverConstructor(opts);
3790 const std::string &prefix)
3793 BDDCSolverConstructor(opts);
3798 const string &prefix)
3802 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3805 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3809 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATNEST,&isnest);
3815 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3816 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3817 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3827 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3829 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3834 const std::string &prefix)
3838 MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
3839 MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);
3841 H2SolverConstructor(fes);
3847 #if defined(PETSC_HAVE_H2OPUS)
3859 VectorFunctionCoefficient ccoords(sdim, func_coords);
3861 ParGridFunction coords(fes);
3862 coords.ProjectCoefficient(ccoords);
3864 coords.ParallelProject(c);
3866 PCSetType(*
this,PCH2OPUS);
3869 PCSetFromOptions(*
this);
3871 MFEM_ABORT(
"Need PETSc configured with --download-h2opus");
3878 const std::string &prefix)
3883 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3885 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3886 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3893 const std::string &prefix)
3898 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3900 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3901 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3913 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj, ierr);
3914 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
3926 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
3927 PCHKERRQ(snes, ierr);
3928 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
3929 PCHKERRQ(snes, ierr);
3932 (
void*)&op == fctx &&
3933 (
void*)&op == jctx);
3934 ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
3936 PCHKERRQ(snes,ierr);
3939 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
3950 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
3951 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
3960 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3962 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (
void *)snes_ctx);
3963 PCHKERRQ(snes, ierr);
3964 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
3966 PCHKERRQ(snes, ierr);
3968 ierr = MatDestroy(&dummy);
3969 PCHKERRQ(snes, ierr);
3981 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3982 snes_ctx->jacType = jacType;
3988 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
3989 snes_ctx->objective = objfn;
3992 ierr = SNESSetObjective(snes, __mfem_snes_objective, (
void *)snes_ctx);
3993 PCHKERRQ(snes, ierr);
4000 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4001 snes_ctx->postcheck = post;
4005 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4006 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (
void *)snes_ctx);
4016 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4017 snes_ctx->update = update;
4020 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4027 bool b_nonempty = b.
Size();
4031 if (b_nonempty) { B->PlaceMemory(b.
GetMemory()); }
4039 ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4041 if (b_nonempty) { B->ResetMemory(); }
4051 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4053 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
4054 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4060 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4062 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4065 ierr = TSGetAdapt(ts,&tsad);
4067 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4075 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj,ierr);
4076 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4084 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4087 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4090 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4091 ts_ctx->cached_ijacstate = -1;
4092 ts_ctx->cached_rhsjacstate = -1;
4093 ts_ctx->cached_splits_xstate = -1;
4094 ts_ctx->cached_splits_xdotstate = -1;
4106 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (
void *)ts_ctx);
4108 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (
void *)ts_ctx);
4110 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4112 ierr = MatDestroy(&dummy);
4120 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4129 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (
void *)ts_ctx);
4131 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4134 ierr = MatDestroy(&dummy);
4143 ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4146 PetscBool use = PETSC_TRUE;
4147 ierr = PetscOptionsGetBool(NULL,NULL,
"-mfem_use_splitjac",&use,NULL);
4150 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4151 __mfem_ts_computesplits);
4156 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4164 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4165 ts_ctx->jacType = jacType;
4170 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4171 return ts_ctx->type;
4176 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4179 ts_ctx->type = type;
4182 ierr = TSSetProblemType(ts, TS_LINEAR);
4187 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4196 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4197 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4200 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4210 ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4214 ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4215 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4220 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4225 ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4234 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4235 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4236 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4237 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4249 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4250 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4251 ts_ctx->cached_ijacstate = -1;
4252 ts_ctx->cached_rhsjacstate = -1;
4253 ts_ctx->cached_splits_xstate = -1;
4254 ts_ctx->cached_splits_xdotstate = -1;
4257 ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4262 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4264 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4270 #include "petsc/private/petscimpl.h"
4271 #include "petsc/private/matimpl.h"
4277 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4279 PetscFunctionBeginUser;
4282 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4294 PetscFunctionReturn(0);
4297 static PetscErrorCode __mfem_ts_ifunction(
TS ts,
PetscReal t, Vec x, Vec xp,
4300 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4302 PetscFunctionBeginUser;
4310 if (ts_ctx->bchandler)
4315 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(xx.Size()); }
4316 if (!ts_ctx->work2) { ts_ctx->work2 =
new mfem::Vector(xx.Size()); }
4322 bchandler->
ZeroBC(yy,*txp);
4332 ff.UpdateVecFromFlags();
4333 PetscFunctionReturn(0);
4336 static PetscErrorCode __mfem_ts_rhsfunction(
TS ts,
PetscReal t, Vec x, Vec f,
4339 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4341 PetscFunctionBeginUser;
4342 if (ts_ctx->bchandler) { MFEM_ABORT(
"RHS evaluation with bc not implemented"); }
4351 ff.UpdateVecFromFlags();
4352 PetscFunctionReturn(0);
4355 static PetscErrorCode __mfem_ts_ijacobian(
TS ts,
PetscReal t, Vec x,
4359 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4364 PetscObjectState state;
4365 PetscErrorCode ierr;
4367 PetscFunctionBeginUser;
4371 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4372 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4378 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4380 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4381 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(0); }
4388 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4389 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4391 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4392 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4393 if (!ts_ctx->bchandler)
4400 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4407 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4411 if (!ts_ctx->bchandler) {
delete xx; }
4412 ts_ctx->cached_shift = shift;
4415 bool delete_pA =
false;
4419 pA->
GetType() != ts_ctx->jacType))
4427 if (ts_ctx->bchandler)
4435 PetscObjectState nonzerostate;
4436 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4441 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4442 if (delete_pA) {
delete pA; }
4454 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4456 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4459 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_ijacstate);
4465 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4466 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4467 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4468 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4469 PetscFunctionReturn(0);
4472 static PetscErrorCode __mfem_ts_computesplits(
TS ts,
PetscReal t,Vec x,Vec xp,
4476 __mfem_ts_ctx* ts_ctx;
4480 PetscObjectState state;
4481 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4482 PetscBool assembled;
4483 PetscErrorCode ierr;
4485 PetscFunctionBeginUser;
4489 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4490 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4492 if (Axp && Axp != Jxp)
4494 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4495 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4498 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(
void**)&ts_ctx); CHKERRQ(ierr);
4501 ierr = PetscObjectStateGet((
PetscObject)Jx,&state); CHKERRQ(ierr);
4503 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4504 ierr = PetscObjectStateGet((
PetscObject)Jxp,&state); CHKERRQ(ierr);
4506 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4507 if (!rx && !rxp) { PetscFunctionReturn(0); }
4514 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4515 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4517 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4518 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4519 if (!ts_ctx->bchandler)
4526 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4533 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4542 bool delete_mat =
false;
4546 pJx->
GetType() != ts_ctx->jacType))
4551 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4559 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4562 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4567 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4568 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4571 if (delete_mat) {
delete pJx; }
4575 if (ts_ctx->bchandler)
4592 pJxp->
GetType() != ts_ctx->jacType))
4597 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4600 &oJxp,ts_ctx->jacType);
4604 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4607 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4612 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4613 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4615 if (delete_mat) {
delete pJxp; }
4619 if (ts_ctx->bchandler)
4630 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4634 ierr = PetscObjectStateGet((
PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4636 ierr = PetscObjectStateGet((
PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4641 if (!ts_ctx->bchandler) {
delete xx; }
4642 PetscFunctionReturn(0);
4645 static PetscErrorCode __mfem_ts_rhsjacobian(
TS ts,
PetscReal t, Vec x,
4648 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4652 PetscObjectState state;
4653 PetscErrorCode ierr;
4655 PetscFunctionBeginUser;
4659 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4660 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4664 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4666 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(0); }
4673 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4674 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4675 if (!ts_ctx->bchandler)
4682 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4689 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4693 if (!ts_ctx->bchandler) {
delete xx; }
4696 bool delete_pA =
false;
4700 pA->
GetType() != ts_ctx->jacType))
4708 if (ts_ctx->bchandler)
4716 PetscObjectState nonzerostate;
4717 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4722 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4723 if (delete_pA) {
delete pA; }
4735 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4737 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4742 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4744 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_rhsjacstate);
4750 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4751 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4752 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4753 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4754 PetscFunctionReturn(0);
4760 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4762 PetscFunctionBeginUser;
4765 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4774 PetscErrorCode ierr;
4776 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4783 PetscErrorCode ierr;
4785 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4790 PetscFunctionReturn(0);
4793 static PetscErrorCode __mfem_snes_jacobian(
SNES snes, Vec x,
Mat A,
Mat P,
4798 PetscErrorCode ierr;
4800 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4802 PetscFunctionBeginUser;
4803 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4804 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4805 if (!snes_ctx->bchandler)
4812 if (!snes_ctx->work) { snes_ctx->work =
new mfem::Vector(n); }
4815 xx = snes_ctx->work;
4821 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4822 if (!snes_ctx->bchandler) {
delete xx; }
4825 bool delete_pA =
false;
4829 pA->
GetType() != snes_ctx->jacType))
4837 if (snes_ctx->bchandler)
4845 PetscObjectState nonzerostate;
4846 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4850 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4851 if (delete_pA) {
delete pA; }
4863 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4865 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4870 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4871 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4877 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4878 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4879 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4880 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4881 PetscFunctionReturn(0);
4884 static PetscErrorCode __mfem_snes_function(
SNES snes, Vec x, Vec f,
void *ctx)
4886 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4888 PetscFunctionBeginUser;
4891 if (snes_ctx->bchandler)
4898 snes_ctx->op->Mult(*txx,ff);
4905 snes_ctx->op->Mult(xx,ff);
4907 ff.UpdateVecFromFlags();
4908 PetscFunctionReturn(0);
4911 static PetscErrorCode __mfem_snes_objective(
SNES snes, Vec x,
PetscReal *f,
4914 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4916 PetscFunctionBeginUser;
4917 if (!snes_ctx->objective)
4919 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing objective function");
4923 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
4925 PetscFunctionReturn(0);
4928 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
4929 PetscBool *cy,PetscBool *cw,
void* ctx)
4931 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4932 bool lcy =
false,lcw =
false;
4934 PetscFunctionBeginUser;
4938 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
4939 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
4940 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
4941 PetscFunctionReturn(0);
4944 static PetscErrorCode __mfem_snes_update(
SNES snes,
PetscInt it)
4947 __mfem_snes_ctx* snes_ctx;
4949 PetscFunctionBeginUser;
4951 ierr = SNESGetFunction(snes,&F,NULL,(
void **)&snes_ctx); CHKERRQ(ierr);
4952 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
4955 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
4958 ierr = VecDestroy(&pX); CHKERRQ(ierr);
4962 if (!pX) SETERRQ(PetscObjectComm((
PetscObject)snes),PETSC_ERR_USER,
4963 "Missing previous solution");
4964 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
4969 (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
4971 ierr = VecCopy(X,pX); CHKERRQ(ierr);
4972 PetscFunctionReturn(0);
4978 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4980 PetscFunctionBeginUser;
4983 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4992 PetscErrorCode ierr;
4994 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5001 PetscErrorCode ierr;
5003 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5008 PetscFunctionReturn(0);
5011 static PetscErrorCode __mfem_mat_shell_apply(
Mat A, Vec x, Vec y)
5014 PetscErrorCode ierr;
5016 PetscFunctionBeginUser;
5017 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5018 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5022 yy.UpdateVecFromFlags();
5023 PetscFunctionReturn(0);
5026 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat A, Vec x, Vec y)
5029 PetscErrorCode ierr;
5032 PetscFunctionBeginUser;
5033 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5034 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5037 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5046 yy.UpdateVecFromFlags();
5047 PetscFunctionReturn(0);
5050 static PetscErrorCode __mfem_mat_shell_copy(
Mat A,
Mat B, MatStructure str)
5053 PetscErrorCode ierr;
5055 PetscFunctionBeginUser;
5056 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5057 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5058 ierr = MatShellSetContext(B,(
void *)op); CHKERRQ(ierr);
5059 PetscFunctionReturn(0);
5062 static PetscErrorCode __mfem_mat_shell_destroy(
Mat A)
5064 PetscFunctionBeginUser;
5065 PetscFunctionReturn(0);
5068 static PetscErrorCode __mfem_pc_shell_view(
PC pc, PetscViewer viewer)
5070 __mfem_pc_shell_ctx *
ctx;
5071 PetscErrorCode ierr;
5073 PetscFunctionBeginUser;
5074 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5078 ierr = PetscObjectTypeCompare((
PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5085 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5091 ierr = PetscViewerASCIIPrintf(viewer,
5092 "No information available on the mfem::Solver\n");
5096 if (isascii && ctx->factory)
5098 ierr = PetscViewerASCIIPrintf(viewer,
5099 "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5103 PetscFunctionReturn(0);
5106 static PetscErrorCode __mfem_pc_shell_apply(
PC pc, Vec x, Vec y)
5108 __mfem_pc_shell_ctx *
ctx;
5109 PetscErrorCode ierr;
5111 PetscFunctionBeginUser;
5114 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5117 ctx->op->Mult(xx,yy);
5118 yy.UpdateVecFromFlags();
5124 PetscFunctionReturn(0);
5127 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC pc, Vec x, Vec y)
5129 __mfem_pc_shell_ctx *
ctx;
5130 PetscErrorCode ierr;
5132 PetscFunctionBeginUser;
5135 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5138 ctx->op->MultTranspose(xx,yy);
5139 yy.UpdateVecFromFlags();
5145 PetscFunctionReturn(0);
5148 static PetscErrorCode __mfem_pc_shell_setup(
PC pc)
5150 __mfem_pc_shell_ctx *
ctx;
5152 PetscFunctionBeginUser;
5153 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5164 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5173 PetscFunctionReturn(0);
5176 static PetscErrorCode __mfem_pc_shell_destroy(
PC pc)
5178 __mfem_pc_shell_ctx *
ctx;
5179 PetscErrorCode ierr;
5181 PetscFunctionBeginUser;
5182 ierr = PCShellGetContext(pc,(
void **)&ctx); CHKERRQ(ierr);
5188 PetscFunctionReturn(0);
5191 static PetscErrorCode __mfem_array_container_destroy(
void *ptr)
5193 PetscErrorCode ierr;
5195 PetscFunctionBeginUser;
5196 ierr = PetscFree(ptr); CHKERRQ(ierr);
5197 PetscFunctionReturn(0);
5200 static PetscErrorCode __mfem_matarray_container_destroy(
void *ptr)
5203 PetscErrorCode ierr;
5205 PetscFunctionBeginUser;
5206 for (
int i=0; i<a->
Size(); i++)
5210 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5213 PetscFunctionReturn(0);
5216 static PetscErrorCode __mfem_monitor_ctx_destroy(
void **ctx)
5218 PetscErrorCode ierr;
5220 PetscFunctionBeginUser;
5221 ierr = PetscFree(*ctx); CHKERRQ(ierr);
5222 PetscFunctionReturn(0);
5227 PetscErrorCode MakeShellPC(
PC pc,
mfem::Solver &precond,
bool ownsop)
5229 PetscFunctionBeginUser;
5230 __mfem_pc_shell_ctx *ctx =
new __mfem_pc_shell_ctx;
5232 ctx->ownsop = ownsop;
5233 ctx->factory = NULL;
5239 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5241 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5242 ierr = PCShellSetName(pc,
"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5243 ierr = PCShellSetContext(pc,(
void *)ctx); CHKERRQ(ierr);
5244 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5245 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5247 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5248 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5249 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5250 PetscFunctionReturn(0);
5255 PetscErrorCode MakeShellPCWithFactory(
PC pc,
5258 PetscFunctionBeginUser;
5259 __mfem_pc_shell_ctx *ctx =
new __mfem_pc_shell_ctx;
5262 ctx->factory = factory;
5268 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5270 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5271 ierr = PCShellSetName(pc,factory->
GetName()); CHKERRQ(ierr);
5272 ierr = PCShellSetContext(pc,(
void *)ctx); CHKERRQ(ierr);
5273 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5274 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5276 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5277 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5278 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5279 PetscFunctionReturn(0);
5284 static PetscErrorCode Convert_Array_IS(MPI_Comm comm,
bool islist,
5289 const int *data = list ? list->
GetData() : NULL;
5290 PetscErrorCode ierr;
5292 PetscFunctionBeginUser;
5293 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5296 for (
PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5303 if (data[i]) { idxs[cum++] = i+st; }
5307 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5309 PetscFunctionReturn(0);
5315 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5323 PetscErrorCode ierr;
5325 PetscFunctionBeginUser;
5326 for (
int i = 0; i < pl2l.
Size(); i++)
5330 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(
const PetscInt**)&ii,
5331 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5332 MFEM_VERIFY(done,
"Unable to perform MatGetRowIJ on " << i <<
" l2l matrix");
5333 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5334 #if defined(PETSC_USE_64BIT_INDICES)
5335 int nnz = (int)ii[m];
5336 int *mii =
new int[m+1];
5337 int *mjj =
new int[nnz];
5338 for (
int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5339 for (
int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5344 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5346 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5347 MFEM_VERIFY(done,
"Unable to perform MatRestoreRowIJ on "
5348 << i <<
" l2l matrix");
5351 for (
int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5353 const int* vdata = mark->
GetData();
5354 int* sdata = sub_dof_marker.
GetData();
5355 int cumh = 0, cumw = 0;
5356 for (
int i = 0; i < l2l.Size(); i++)
5361 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5362 cumh += l2l[i]->Height();
5363 cumw += l2l[i]->Width();
5365 ierr = Convert_Array_IS(comm,
false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5366 for (
int i = 0; i < pl2l.
Size(); i++)
5370 PetscFunctionReturn(0);
5373 #if !defined(PETSC_HAVE_HYPRE)
5375 #if defined(HYPRE_MIXEDINT)
5376 #error "HYPRE_MIXEDINT not supported"
5379 #include "_hypre_parcsr_mv.h"
5380 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,
Mat* pA)
5382 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5383 hypre_CSRMatrix *hdiag,*hoffd;
5385 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5388 PetscErrorCode ierr;
5390 PetscFunctionBeginUser;
5391 hdiag = hypre_ParCSRMatrixDiag(hA);
5392 hoffd = hypre_ParCSRMatrixOffd(hA);
5393 m = hypre_CSRMatrixNumRows(hdiag);
5394 n = hypre_CSRMatrixNumCols(hdiag);
5395 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5396 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5397 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5398 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5399 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5400 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*
sizeof(
PetscInt));
5402 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*
sizeof(
PetscInt));
5404 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*
sizeof(
PetscScalar));
5411 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5415 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5420 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5421 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5422 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5423 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*
sizeof(
PetscInt));
5425 offdj = hypre_CSRMatrixJ(hoffd);
5426 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5427 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5428 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*
sizeof(
PetscScalar));
5435 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5439 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5440 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5446 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5452 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5453 const char *names[6] = {
"_mfem_csr_dii",
5464 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5465 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5466 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5470 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5472 PetscFunctionReturn(0);
5475 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,
Mat* pA)
5478 ISLocalToGlobalMapping rl2g,cl2g;
5480 hypre_CSRMatrix *hdiag,*hoffd;
5481 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5483 const char *names[2] = {
"_mfem_csr_aux",
5487 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5489 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5490 PetscErrorCode ierr;
5492 PetscFunctionBeginUser;
5494 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5495 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5496 hdiag = hypre_ParCSRMatrixDiag(hA);
5497 hoffd = hypre_ParCSRMatrixOffd(hA);
5498 dr = hypre_CSRMatrixNumRows(hdiag);
5499 dc = hypre_CSRMatrixNumCols(hdiag);
5500 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5501 hdi = hypre_CSRMatrixI(hdiag);
5502 hdj = hypre_CSRMatrixJ(hdiag);
5503 hdd = hypre_CSRMatrixData(hdiag);
5504 oc = hypre_CSRMatrixNumCols(hoffd);
5505 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5506 hoi = hypre_CSRMatrixI(hoffd);
5507 hoj = hypre_CSRMatrixJ(hoffd);
5508 hod = hypre_CSRMatrixData(hoffd);
5511 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5512 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5513 ierr = ISDestroy(&is); CHKERRQ(ierr);
5514 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5515 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5516 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5517 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5518 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5519 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5520 ierr = ISDestroy(&is); CHKERRQ(ierr);
5523 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5524 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5525 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5526 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5527 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5528 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5531 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5532 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5536 *ii = *(hdi++) + *(hoi++);
5537 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5541 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5542 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5543 *(++ii) = *(hdi++) + *(hoi++);
5544 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5546 for (; cum<dr; cum++) { *(++ii) = nnz; }
5550 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5558 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5559 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5560 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5564 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5566 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5567 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5568 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5569 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5570 PetscFunctionReturn(0);
5574 #include <petsc/private/matimpl.h>
5576 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm,
PetscInt m,
5580 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5581 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5582 ierr = PetscObjectChangeTypeName((
PetscObject)*A,
"mfemdummy"); CHKERRQ(ierr);
5583 (*A)->preallocated = PETSC_TRUE;
5584 ierr = MatSetUp(*A); CHKERRQ(ierr);
5585 PetscFunctionReturn(0);
5588 #include <petsc/private/vecimpl.h>
5590 #if defined(PETSC_HAVE_DEVICE)
5591 static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5595 PetscFunctionReturn(0);
5599 static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5602 #if defined(PETSC_HAVE_DEVICE)
5603 *flg = v->boundtocpu;
5607 PetscFunctionReturn(0);
5610 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject o)
5612 PetscErrorCode ierr;
5615 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5616 PetscFunctionReturn(0);
5619 #endif // MFEM_USE_PETSC
5620 #endif // MFEM_USE_MPI
Ordering::Type GetOrdering() const
Return the ordering method.
int Size() const
Return the logical size of the array.
void SetPreconditioner(Solver &precond)
void SetPrintLevel(int plev)
static int GetId()
Get the device id of the configured device.
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
PetscParVector * B
Right-hand side and solution vector.
void CreatePrivateContext()
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
void trans(const Vector &u, Vector &x)
MPI_Comm GetComm() const
MPI communicator.
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
bool isHomogeneous() const
True if type is HOMOGENEOUS.
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Abstract class for PETSc's preconditioners.
PetscParMatrix & operator+=(const PetscParMatrix &B)
ParMesh * GetParMesh() const
petsc::Vec x
The actual PETSc object.
virtual void SetOperator(const Operator &op)
HypreParMatrix * RAP(const HypreParMatrix *A, const HypreParMatrix *P)
Returns the matrix P^t * A * P.
bool clcustom
Boolean to handle SetFromOptions calls.
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Device memory; using CUDA or HIP *Malloc and *Free.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
petsc::Mat A
The actual PETSc object.
Wrapper for PETSc's matrix class.
PetscParVector & operator+=(const PetscParVector &y)
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Abstract class for PETSc's linear solvers.
void SetSize(int s)
Resize the vector to size s.
Abstract class for PETSc's solvers.
PetscODESolver::Type GetType() const
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
PetscBCHandler(Type type_=ZERO)
void Delete()
Delete the owned pointers and reset the Memory object.
Base abstract class for first order time dependent operators.
PetscInt GetNumCols() const
Returns the local number of columns.
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
void SetType(PetscODESolver::Type)
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Pointer to an Operator of a specified type.
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
bool DeviceRequested() const
T * GetData()
Returns the data.
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
int Size() const
Returns the size of the vector.
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
void Init()
Initialize with defaults. Does not initialize inherited members.
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
PetscClassId cid
The class id of the actual PETSc object.
ID for class PetscParMatrix, MATHYPRE format.
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Abstract parallel finite element space.
void SetRelTol(double tol)
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Randomize(PetscInt seed=0)
Set random values.
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
void SetMaxIter(int max_iter)
PetscParVector & operator*=(PetscScalar d)
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
void SortColumnIndices()
Sort the column indices corresponding to each row.
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
int Capacity() const
Return the size of the allocated memory.
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
void Customize(bool customize=true) const
Customize object with options set.
PetscInt M() const
Returns the global number of rows.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
virtual ~PetscLinearSolver()
PetscInt GetNumRows() const
Returns the local number of rows.
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
const double * GetHostPointer() const
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
ID for the base class Operator, i.e. any type.
double f(const Vector &xvec)
void write(std::ostream &os, T value)
Write 'value' to stream.
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc's solvers.
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
struct s_NavierContext ctx
void SetJacobianType(Operator::Type type)
int to_int(const std::string &str)
Convert a string to an int.
const double * HostReadData() const
Auxiliary class for BDDC customization.
virtual ~PetscPreconditioner()
ID for class PetscParMatrix, unspecified format.
MPI_Comm GetComm() const
Get the associated MPI communicator.
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
void LoseData()
NULL-ifies the data.
void SetFlagsFromMask_() const
bool DeviceIsValid() const
Return true if device pointer is valid.
const int * HostReadJ() const
PetscParMatrix & operator=(const PetscParMatrix &B)
bool ReadRequested() const
PetscInt GetColStart() const
Returns the global index of the first local column.
virtual ~PetscParVector()
Calls PETSc's destroy function.
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
PetscBCHandler * bchandler
Handler for boundary conditions.
PetscParMatrix & operator-=(const PetscParMatrix &B)
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
void Zero(Vector &x)
Replace boundary dofs with 0.
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
MPI_Comm GetComm() const
Get the associated MPI communicator.
T read(std::istream &is)
Read a value from the stream and return it.
bool WriteRequested() const
void Shift(double s)
Shift diagonal by a constant.
ID for class PetscParMatrix, MATNEST format.
int GetVDim() const
Returns vector dimension.
void SetTime(double t)
Sets the current time.
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
ID for class PetscParMatrix, MATSHELL format.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Biwise-OR of all CUDA backends.
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B...
int SpaceDimension() const
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Wrapper for hypre's parallel vector class.
HYPRE_BigInt * GetTrueDofOffsets() const
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Type
Enumeration defining IDs for some classes derived from Operator.
double p(const Vector &x, double t)
PetscInt NNZ() const
Returns the number of nonzeros.
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true)
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
MemoryType
Memory types supported by MFEM.
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
PetscInt N() const
Returns the global number of columns.
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
void SetJacobianType(Operator::Type type)
bool operatorset
Boolean to handle SetOperator calls.
PetscParVector * X
Auxiliary vectors for typecasting.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
bool IsAliasForSync() const
int height
Dimension of the output / number of rows in the matrix.
PetscParVector & operator-=(const PetscParVector &y)
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true, or the mfem::Device's HostMemoryClass, otherwise.
virtual ~PetscNonlinearSolver()
virtual ~PetscODESolver()
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
bool Empty() const
Return true if the Memory object is empty, see Reset().
ID for class PetscParMatrix, MATAIJ format.
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
MPI_Comm GetComm() const
Get the associated MPI communicator.
PetscInt GlobalSize() const
Returns the global number of rows.
PetscInt GetRowStart() const
Returns the global index of the first local row.
struct _p_PetscObject * PetscObject
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
void ResetMemory()
Completes the operation started with PlaceMemory.
void SetAbsTol(double tol)
Helper class for handling essential boundary conditions.
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
PetscParVector & operator=(PetscScalar d)
Set constant values.
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
int NumRowBlocks() const
Return the number of row blocks.
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
const FiniteElementCollection * FEColl() const
const int * HostReadI() const
Identity Operator I: x -> x.
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
bool UseDevice() const
Read the internal device flag.
void FreePrivateContext()
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Vector * GlobalVector() const
Returns the global vector in each processor.
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Wrapper for hypre's ParCSR matrix class.
A class to handle Block systems in a matrix-free implementation.
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
int NumColBlocks() const
Return the number of column blocks.
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
ID for class PetscParMatrix, MATIS format.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
int width
Dimension of the input / number of columns in the matrix.
void * private_ctx
Private context for solver.
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
const double * GetDevicePointer() const
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
double f(const Vector &p)