14 #include "../config/config.hpp" 20 #include "../fem/fem.hpp" 23 #if defined(PETSC_HAVE_HYPRE) 24 #include "petscmathypre.h" 28 #if PETSC_VERSION_LT(3,11,0) 29 #define VecLockReadPush VecLockPush 30 #define VecLockReadPop VecLockPop 32 #if PETSC_VERSION_LT(3,12,0) 33 #define VecGetArrayWrite VecGetArray 34 #define VecRestoreArrayWrite VecRestoreArray 35 #define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C) 36 #define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C) 38 #if PETSC_VERSION_LT(3,19,0) 39 #define PETSC_SUCCESS 0 63 static PetscErrorCode __mfem_snes_jacobian(
SNES,
Vec,
Mat,
Mat,
void*);
64 static PetscErrorCode __mfem_snes_function(
SNES,
Vec,
Vec,
void*);
67 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,
Vec,
Vec,
Vec,
68 PetscBool*,PetscBool*,
void*);
70 static PetscErrorCode __mfem_pc_shell_apply(
PC,
Vec,
Vec);
71 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC,
Vec,
Vec);
72 static PetscErrorCode __mfem_pc_shell_setup(
PC);
73 static PetscErrorCode __mfem_pc_shell_destroy(
PC);
74 static PetscErrorCode __mfem_pc_shell_view(
PC,PetscViewer);
75 static PetscErrorCode __mfem_mat_shell_apply(
Mat,
Vec,
Vec);
76 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat,
Vec,
Vec);
77 static PetscErrorCode __mfem_mat_shell_destroy(
Mat);
78 static PetscErrorCode __mfem_mat_shell_copy(
Mat,
Mat,MatStructure);
79 static PetscErrorCode __mfem_array_container_destroy(
void*);
80 static PetscErrorCode __mfem_matarray_container_destroy(
void*);
81 static PetscErrorCode __mfem_monitor_ctx_destroy(
void**);
84 static PetscErrorCode Convert_Array_IS(MPI_Comm,
bool,
const mfem::Array<int>*,
89 static PetscErrorCode MakeShellPCWithFactory(
PC,
95 #if !defined(PETSC_HAVE_HYPRE) 96 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,
Mat*);
97 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,
Mat*);
100 #if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE) 101 #if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA) 103 #define PETSC_VECDEVICE VECCUDA 104 #define PETSC_MATAIJDEVICE MATAIJCUSPARSE 105 #define VecDeviceGetArrayRead VecCUDAGetArrayRead 106 #define VecDeviceGetArrayWrite VecCUDAGetArrayWrite 107 #define VecDeviceGetArray VecCUDAGetArray 108 #define VecDeviceRestoreArrayRead VecCUDARestoreArrayRead 109 #define VecDeviceRestoreArrayWrite VecCUDARestoreArrayWrite 110 #define VecDeviceRestoreArray VecCUDARestoreArray 111 #define VecDevicePlaceArray VecCUDAPlaceArray 112 #define VecDeviceResetArray VecCUDAResetArray 113 #elif defined(MFEM_USE_HIP) && defined(PETSC_HAVE_HIP) 115 #define PETSC_VECDEVICE VECHIP 116 #define PETSC_MATAIJDEVICE MATAIJHIPSPARSE 117 #define VecDeviceGetArrayRead VecHIPGetArrayRead 118 #define VecDeviceGetArrayWrite VecHIPGetArrayWrite 119 #define VecDeviceGetArray VecHIPGetArray 120 #define VecDeviceRestoreArrayRead VecHIPRestoreArrayRead 121 #define VecDeviceRestoreArrayWrite VecHIPRestoreArrayWrite 122 #define VecDeviceRestoreArray VecHIPRestoreArray 123 #define VecDevicePlaceArray VecHIPPlaceArray 124 #define VecDeviceResetArray VecHIPResetArray 126 #define VecDeviceGetArrayRead VecGetArrayRead 127 #define VecDeviceGetArrayWrite VecGetArrayWrite 128 #define VecDeviceGetArray VecGetArray 129 #define VecDeviceRestoreArrayRead VecRestoreArrayRead 130 #define VecDeviceRestoreArrayWrite VecRestoreArrayWrite 131 #define VecDeviceRestoreArray VecRestoreArray 132 #define VecDevicePlaceArray VecPlaceArray 133 #define VecDeviceResetArray VecResetArray 137 #if defined(PETSC_HAVE_DEVICE) 138 static PetscErrorCode __mfem_VecSetOffloadMask(
Vec,PetscOffloadMask);
140 static PetscErrorCode __mfem_VecBoundToCPU(
Vec,PetscBool*);
141 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject);
150 unsigned long int numprec;
151 } __mfem_pc_shell_ctx;
180 PetscObjectState cached_ijacstate;
181 PetscObjectState cached_rhsjacstate;
182 PetscObjectState cached_splits_xstate;
183 PetscObjectState cached_splits_xdotstate;
190 } __mfem_monitor_ctx;
193 static PetscErrorCode ierr;
194 static PetscMPIInt mpiierr;
217 #if PETSC_VERSION_LT(3,17,0) 218 const char *opts =
"-cuda_device";
220 const char *opts =
"-device_select_cuda";
222 ierr = PetscOptionsSetValue(NULL,opts,
224 MFEM_VERIFY(!ierr,
"Unable to set initial option value to PETSc");
228 #if PETSC_VERSION_LT(3,17,0) 229 const char *opts =
"-hip_device";
231 const char *opts =
"-device_select_hip";
233 ierr = PetscOptionsSetValue(NULL,opts,
235 MFEM_VERIFY(!ierr,
"Unable to set initial option value to PETSc");
237 ierr = PetscInitialize(argc,argv,rc_file,help);
238 MFEM_VERIFY(!ierr,
"Unable to initialize PETSc");
243 ierr = PetscFinalize();
244 MFEM_VERIFY(!ierr,
"Unable to finalize PETSc");
247 const double* PetscMemory::GetHostPointer()
const 251 const double *v =
mfem::Read(*
this,Capacity(),
false);
256 const double* PetscMemory::GetDevicePointer()
const 260 const double *v =
mfem::Read(*
this,Capacity(),
true);
267 void PetscParVector::SetDataAndSize_()
273 MFEM_VERIFY(x,
"Missing Vec");
274 ierr = VecSetUp(x); PCHKERRQ(x,ierr);
275 ierr = PetscObjectTypeCompare((
PetscObject)x,VECNEST,&isnest); PCHKERRQ(x,ierr);
276 MFEM_VERIFY(!isnest,
"Not for type nest");
277 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
278 MFEM_VERIFY(n >= 0,
"Invalid local size");
280 #if defined(PETSC_HAVE_DEVICE) 281 PetscOffloadMask omask;
284 ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
285 if (omask != PETSC_OFFLOAD_BOTH)
287 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
290 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
291 #if defined(PETSC_HAVE_DEVICE) 292 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&isdevice,
293 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
294 ""); PCHKERRQ(x,ierr);
297 if (omask != PETSC_OFFLOAD_BOTH)
299 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
302 ierr = VecDeviceGetArrayRead(x,(
const PetscScalar**)&darray);
304 pdata.Wrap(array,darray,size,MemoryType::HOST,
false);
305 ierr = VecDeviceRestoreArrayRead(x,(
const PetscScalar**)&darray);
311 pdata.Wrap(array,size,MemoryType::HOST,
false);
313 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); PCHKERRQ(x,ierr);
315 #if defined(PETSC_HAVE_DEVICE) 316 if (omask == PETSC_OFFLOAD_UNALLOCATED && isdevice) { omask = PETSC_OFFLOAD_CPU; }
317 ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
319 data.MakeAlias(pdata,0,size);
323 void PetscParVector::SetFlagsFromMask_()
const 325 MFEM_VERIFY(x,
"Missing Vec");
326 #if defined(_USE_DEVICE) 327 PetscOffloadMask mask;
329 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&isdevice,
330 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
331 ""); PCHKERRQ(x,ierr);
332 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
337 case PETSC_OFFLOAD_CPU:
338 pdata.SetHostValid();
339 pdata.SetDeviceInvalid();
341 case PETSC_OFFLOAD_GPU:
342 pdata.SetHostInvalid();
343 pdata.SetDeviceValid();
345 case PETSC_OFFLOAD_BOTH:
346 pdata.SetHostValid();
347 pdata.SetDeviceValid();
350 MFEM_ABORT(
"Unhandled case " << mask);
357 void PetscParVector::UpdateVecFromFlags()
359 MFEM_VERIFY(x,
"Missing Vec");
360 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
361 #if defined(_USE_DEVICE) 363 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&isdevice,
364 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
365 ""); PCHKERRQ(x,ierr);
368 bool dv = pdata.DeviceIsValid();
369 bool hv = pdata.HostIsValid();
370 PetscOffloadMask mask;
371 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
372 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
373 else { mask = PETSC_OFFLOAD_CPU; }
374 ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
381 ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
382 pdata.CopyToHost(v,size);
383 ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
387 void PetscParVector::SetVecType_()
390 MFEM_VERIFY(x,
"Missing Vec");
391 ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
392 #if defined(_USE_DEVICE) 393 switch (Device::GetDeviceMemoryType())
395 case MemoryType::DEVICE:
396 case MemoryType::MANAGED:
397 ierr = VecSetType(x,PETSC_VECDEVICE); PCHKERRQ(x,ierr);
400 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
406 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
414 MFEM_VERIFY(x,
"Missing Vec");
415 #if defined(PETSC_HAVE_DEVICE) 417 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&isdevice,
418 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
419 ""); PCHKERRQ(x,ierr);
420 if (on_dev && isdevice)
422 ierr = VecDeviceGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
423 ierr = VecDeviceRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
428 ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
429 ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
443 MFEM_VERIFY(x,
"Missing Vec");
444 #if defined(PETSC_HAVE_DEVICE) 446 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&isdevice,
447 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
448 ""); PCHKERRQ(x,ierr);
449 if (on_dev && isdevice)
451 ierr = VecDeviceGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
452 ierr = VecDeviceRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
457 ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
458 ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
460 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
473 MFEM_VERIFY(x,
"Missing Vec");
474 #if defined(PETSC_HAVE_DEVICE) 476 ierr = PetscObjectTypeCompareAny((
PetscObject)x,&isdevice,
477 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
478 ""); PCHKERRQ(x,ierr);
479 if (on_dev && isdevice)
481 ierr = VecDeviceGetArray(x,&dummy); PCHKERRQ(x,ierr);
482 ierr = VecDeviceRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
487 ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
488 ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
490 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)x); PCHKERRQ(x,ierr);
500 void PetscParVector::UseDevice(
bool dev)
const 502 MFEM_VERIFY(x,
"Missing Vec");
503 #if defined(PETSC_HAVE_DEVICE) 504 ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
509 bool PetscParVector::UseDevice()
const 512 MFEM_VERIFY(x,
"Missing Vec");
513 ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
514 return flg ? false :
true;
520 ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
526 ierr = VecSetBlockSize(x,bs); PCHKERRQ(x,ierr);
529 PetscParVector::PetscParVector(MPI_Comm comm,
const Vector &x_,
535 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
536 ierr = VecSetSizes(
x,n,PETSC_DECIDE); PCHKERRQ(
x,ierr);
539 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
540 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
541 ""); PCHKERRQ(
x,ierr);
547 #if defined(PETSC_HAVE_DEVICE) 551 ierr = VecDeviceGetArrayWrite(
x,&array); PCHKERRQ(
x,ierr);
552 rest = VecDeviceRestoreArrayWrite;
558 ierr = VecGetArrayWrite(
x,&array); PCHKERRQ(
x,ierr);
559 rest = VecRestoreArrayWrite;
562 ierr = (*rest)(
x,&array); PCHKERRQ(
x,ierr);
581 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
585 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
586 ierr = VecSetSizes(
x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(
x,ierr);
590 ierr = VecSetSizes(
x,PETSC_DECIDE,glob_size); PCHKERRQ(
x,ierr);
599 ierr = VecDestroy(&
x); CCHKERRQ(comm,ierr);
606 MFEM_VERIFY(col,
"Missing distribution");
608 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
609 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
610 &
x); CCHKERRQ(comm,ierr)
617 ierr = VecDuplicate(y.
x,&
x); PCHKERRQ(
x,ierr);
622 bool transpose,
bool allocate) :
Vector()
626 ierr = VecCreate(comm,&
x);
628 ierr = VecSetSizes(
x,loc,PETSC_DECIDE);
643 bool transpose,
bool allocate) :
Vector()
648 ierr = MatCreateVecs(pA,&
x,NULL); PCHKERRQ(pA,ierr);
652 ierr = MatCreateVecs(pA,NULL,&
x); PCHKERRQ(pA,ierr);
658 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
671 ierr = PetscObjectReference((
PetscObject)y); PCHKERRQ(y,ierr);
680 MPI_Comm comm = pfes->
GetComm();
681 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,ierr);
683 PetscMPIInt myid = 0;
684 if (!HYPRE_AssumedPartitionCheck())
686 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
688 ierr = VecSetSizes(
x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
706 ierr = VecScatterCreateToAll(
x,&scctx,&vout); PCHKERRQ(
x,ierr);
707 ierr = VecScatterBegin(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
709 ierr = VecScatterEnd(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
711 ierr = VecScatterDestroy(&scctx); PCHKERRQ(
x,ierr);
712 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(
x,ierr);
713 ierr = VecGetLocalSize(vout,&
size); PCHKERRQ(
x,ierr);
716 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(
x,ierr);
717 ierr = VecDestroy(&vout); PCHKERRQ(
x,ierr);
726 ierr = VecSet(
x,d); PCHKERRQ(
x,ierr);
734 MFEM_VERIFY(idx.
Size() == vals.
Size(),
735 "Size mismatch between indices and values");
739 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
740 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
748 MFEM_VERIFY(idx.
Size() == vals.
Size(),
749 "Size mismatch between indices and values");
753 ierr = VecAssemblyBegin(
x); PCHKERRQ(
x,ierr);
754 ierr = VecAssemblyEnd(
x); PCHKERRQ(
x,ierr);
761 ierr = VecCopy(y.
x,
x); PCHKERRQ(
x,ierr);
768 ierr = VecAXPY(
x,1.0,y.
x); PCHKERRQ(
x,ierr);
775 ierr = VecAXPY(
x,-1.0,y.
x); PCHKERRQ(
x,ierr);
782 ierr = VecScale(
x,
s); PCHKERRQ(
x,ierr);
789 ierr = VecShift(
x,
s); PCHKERRQ(
x,ierr);
796 ierr = VecPlaceArray(
x,temp_data); PCHKERRQ(
x,ierr);
801 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
808 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
810 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
811 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
812 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
813 #if defined(_USE_DEVICE) 815 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
816 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
817 ""); PCHKERRQ(
x,ierr);
824 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_GPU); PCHKERRQ(
x,ierr);
829 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
838 #if defined(PETSC_HAVE_DEVICE) 839 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
841 ierr = VecPlaceArray(
x,w); PCHKERRQ(
x,ierr);
843 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)
x); PCHKERRQ(
x,ierr);
851 ierr = VecGetLocalSize(
x,&n); PCHKERRQ(
x,ierr);
853 "Memory size " << mem.
Capacity() <<
" < " << n <<
" vector size!");
854 MFEM_VERIFY(
pdata.
Empty(),
"Vector data is not empty");
855 MFEM_VERIFY(
data.
Empty(),
"Vector data is not empty");
856 #if defined(_USE_DEVICE) 858 ierr = PetscObjectTypeCompareAny((
PetscObject)
x,&isdevice,
859 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
860 ""); PCHKERRQ(
x,ierr);
866 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_GPU); PCHKERRQ(
x,ierr);
871 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
880 #if defined(PETSC_HAVE_DEVICE) 881 ierr = __mfem_VecSetOffloadMask(
x,PETSC_OFFLOAD_CPU); PCHKERRQ(
x,ierr);
883 ierr = VecPlaceArray(
x,w); PCHKERRQ(
x,ierr);
886 ierr = __mfem_PetscObjectStateIncrease((
PetscObject)
x); PCHKERRQ(
x,ierr);
887 ierr = VecLockReadPush(
x); PCHKERRQ(
x,ierr);
893 MFEM_VERIFY(!
pdata.
Empty(),
"Vector data is empty");
905 #if defined(PETSC_HAVE_DEVICE) 906 PetscOffloadMask mask;
907 ierr = VecGetOffloadMask(
x,&mask); PCHKERRQ(
x,ierr);
908 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
909 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
912 ierr = VecGetArrayRead(
x,&v); PCHKERRQ(
x,ierr);
914 ierr = VecRestoreArrayRead(
x,&v); PCHKERRQ(
x,ierr);
919 if (
read && !
write) { ierr = VecLockReadPop(
x); PCHKERRQ(
x,ierr); }
922 #if defined(PETSC_HAVE_DEVICE) 923 ierr = VecDeviceResetArray(
x); PCHKERRQ(
x,ierr);
925 MFEM_VERIFY(
false,
"This should not happen");
930 ierr = VecResetArray(
x); PCHKERRQ(
x,ierr);
936 PetscRandom rctx = NULL;
940 ierr = PetscRandomCreate(PetscObjectComm((
PetscObject)
x),&rctx);
942 ierr = PetscRandomSetSeed(rctx,(
unsigned long)seed); PCHKERRQ(
x,ierr);
943 ierr = PetscRandomSeed(rctx); PCHKERRQ(
x,ierr);
945 ierr = VecSetRandom(
x,rctx); PCHKERRQ(
x,ierr);
946 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(
x,ierr);
957 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)
x),fname,
958 FILE_MODE_WRITE,&view);
962 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)
x),fname,&view);
965 ierr = VecView(
x,view); PCHKERRQ(
x,ierr);
966 ierr = PetscViewerDestroy(&view); PCHKERRQ(
x,ierr);
970 ierr = VecView(
x,NULL); PCHKERRQ(
x,ierr);
979 ierr = MatGetOwnershipRange(
A,&
N,NULL); PCHKERRQ(
A,ierr);
986 ierr = MatGetOwnershipRangeColumn(
A,&
N,NULL); PCHKERRQ(
A,ierr);
993 ierr = MatGetLocalSize(
A,&
N,NULL); PCHKERRQ(
A,ierr);
1000 ierr = MatGetLocalSize(
A,NULL,&
N); PCHKERRQ(
A,ierr);
1007 ierr = MatGetSize(
A,&
N,NULL); PCHKERRQ(
A,ierr);
1014 ierr = MatGetSize(
A,NULL,&
N); PCHKERRQ(
A,ierr);
1021 ierr = MatGetInfo(
A,MAT_GLOBAL_SUM,&info); PCHKERRQ(
A,ierr);
1027 if (cbs < 0) { cbs = rbs; }
1028 ierr = MatSetBlockSizes(
A,rbs,cbs); PCHKERRQ(
A,ierr);
1052 rows.
GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
1054 cols.
GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
1055 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&
A); PCHKERRQ(B,ierr);
1056 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
1057 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1101 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1115 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1128 ierr = MatDestroy(&
A); CCHKERRQ(comm,ierr);
1129 if (
X) {
delete X; }
1130 if (
Y) {
delete Y; }
1135 #if defined(PETSC_HAVE_HYPRE) 1136 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&
A);
1138 ierr = MatConvert_hypreParCSR_AIJ(B,&
A); CCHKERRQ(B.
GetComm(),ierr);
1149 ierr = MatDestroy(&
A); CCHKERRQ(comm,ierr);
1150 if (
X) {
delete X; }
1151 if (
Y) {
delete Y; }
1156 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1164 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1168 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1169 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1170 ierr = MatAXPY(
A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1179 ierr = MatDuplicate(B,MAT_COPY_VALUES,&
A); CCHKERRQ(B.
GetComm(),ierr);
1180 ierr = MatScale(
A,-1.0); PCHKERRQ(
A,ierr);
1184 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
1185 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
1186 ierr = MatAXPY(
A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),ierr);
1191 void PetscParMatrix::
1192 BlockDiagonalConstructor(MPI_Comm comm,
1197 PetscInt lrsize,lcsize,rstart,cstart;
1198 PetscMPIInt myid = 0,commsize;
1200 mpiierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,mpiierr);
1201 if (!HYPRE_AssumedPartitionCheck())
1203 mpiierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,mpiierr);
1205 lrsize = row_starts[myid+1]-row_starts[myid];
1206 rstart = row_starts[myid];
1207 lcsize = col_starts[myid+1]-col_starts[myid];
1208 cstart = col_starts[myid];
1213 ierr = ISCreateStride(comm,diag->
Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1214 ISLocalToGlobalMapping rl2g,cl2g;
1215 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1216 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1217 if (row_starts != col_starts)
1219 ierr = ISCreateStride(comm,diag->
Width(),cstart,1,&is);
1220 CCHKERRQ(comm,ierr);
1221 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1222 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1226 ierr = PetscObjectReference((
PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1231 ierr = MatCreate(comm,&
A); CCHKERRQ(comm,ierr);
1232 ierr = MatSetSizes(
A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1234 ierr = MatSetType(
A,MATIS); PCHKERRQ(
A,ierr);
1235 ierr = MatSetLocalToGlobalMapping(
A,rl2g,cl2g); PCHKERRQ(
A,ierr);
1236 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(
A,ierr)
1237 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(
A,ierr)
1242 ierr = MatISGetLocalMat(
A,&lA); PCHKERRQ(
A,ierr);
1245 #if defined(PETSC_USE_64BIT_INDICES) 1248 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1249 for (
int i = 0; i < m; i++) { pII[i] = II[i]; }
1250 for (
int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1251 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1253 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1255 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1268 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1269 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1270 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1271 if (
sizeof(
PetscInt) ==
sizeof(
int))
1274 CCHKERRQ(PETSC_COMM_SELF,ierr);
1276 CCHKERRQ(PETSC_COMM_SELF,ierr);
1282 for (
int i = 0; i < m; i++) { dii[i] = iii[i]; }
1283 for (
int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1286 CCHKERRQ(PETSC_COMM_SELF,ierr);
1287 ierr = PetscCalloc1(m,&oii);
1288 CCHKERRQ(PETSC_COMM_SELF,ierr);
1291 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1293 dii,djj,da,oii,NULL,NULL,&
A);
1294 CCHKERRQ(comm,ierr);
1298 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&
A);
1299 CCHKERRQ(comm,ierr);
1302 void *ptrs[4] = {dii,djj,da,oii};
1303 const char *names[4] = {
"_mfem_csr_dii",
1312 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1313 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1314 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1315 CCHKERRQ(comm,ierr);
1317 CCHKERRQ(comm,ierr);
1318 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1323 ierr = MatAssemblyBegin(
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(
A,ierr);
1324 ierr = MatAssemblyEnd(
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(
A,ierr);
1344 ierr = MatCreate(comm,
A); CCHKERRQ(comm,ierr);
1346 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(
A,ierr);
1347 ierr = MatSetType(*
A,MATSHELL); PCHKERRQ(
A,ierr);
1348 ierr = MatShellSetContext(*
A,(
void *)op); PCHKERRQ(
A,ierr);
1349 ierr = MatShellSetOperation(*
A,MATOP_MULT,
1350 (
void (*)())__mfem_mat_shell_apply);
1352 ierr = MatShellSetOperation(*
A,MATOP_MULT_TRANSPOSE,
1353 (
void (*)())__mfem_mat_shell_apply_transpose);
1355 ierr = MatShellSetOperation(*
A,MATOP_COPY,
1356 (
void (*)())__mfem_mat_shell_copy);
1358 ierr = MatShellSetOperation(*
A,MATOP_DESTROY,
1359 (
void (*)())__mfem_mat_shell_destroy);
1360 #if defined(_USE_DEVICE) 1364 ierr = MatShellSetVecType(*
A,PETSC_VECDEVICE); PCHKERRQ(
A,ierr);
1365 ierr = MatBindToCPU(*
A,PETSC_FALSE); PCHKERRQ(
A,ierr);
1369 ierr = MatBindToCPU(*
A,PETSC_TRUE); PCHKERRQ(
A,ierr);
1373 ierr = MatSetUp(*
A); PCHKERRQ(*
A,ierr);
1398 PetscBool avoidmatconvert = PETSC_FALSE;
1401 ierr = PetscObjectTypeCompareAny((
PetscObject)(pA->
A),&avoidmatconvert,MATMFFD,
1403 CCHKERRQ(comm,ierr);
1405 if (pA && !avoidmatconvert)
1409 #if PETSC_VERSION_LT(3,10,0) 1413 #if PETSC_VERSION_LT(3,18,0) 1414 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEMAT,&istrans);
1416 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATTRANSPOSEVIRTUAL,
1429 #if PETSC_VERSION_LT(3,10,0) 1430 ierr = PetscObjectTypeCompare((
PetscObject)(pA->
A),MATIS,&ismatis);
1436 ierr = MatTransposeGetMat(pA->
A,&At); CCHKERRQ(pA->
GetComm(),ierr);
1437 #if PETSC_VERSION_LT(3,10,0) 1438 ierr = PetscObjectTypeCompare((
PetscObject)(At),MATIS,&ismatis);
1446 #if PETSC_VERSION_LT(3,10,0) 1453 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1454 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1455 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1459 ierr = MatISGetMPIXAIJ(pA->
A,MAT_INITIAL_MATRIX,
A);
1460 PCHKERRQ(pA->
A,ierr);
1467 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1473 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1474 PCHKERRQ(pA->
A,ierr);
1475 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1476 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1480 ierr = MatConvert(pA->
A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,
A);
1481 PCHKERRQ(pA->
A,ierr);
1490 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1491 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1492 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1496 ierr = MatConvert(pA->
A,MATIS,MAT_INITIAL_MATRIX,
A); PCHKERRQ(pA->
A,ierr);
1501 #if defined(PETSC_HAVE_HYPRE) 1505 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
1506 ierr = MatCreateTranspose(B,
A); PCHKERRQ(pA->
A,ierr);
1507 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
1511 ierr = MatConvert(pA->
A,MATHYPRE,MAT_INITIAL_MATRIX,
A); PCHKERRQ(pA->
A,ierr);
1514 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1523 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1530 #if defined(PETSC_HAVE_HYPRE) 1531 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1532 PETSC_USE_POINTER,
A);
1534 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),
A);
1540 #if defined(PETSC_HAVE_HYPRE) 1541 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1542 PETSC_USE_POINTER,
A);
1544 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),
A);
1550 #if defined(PETSC_HAVE_HYPRE) 1551 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1552 PETSC_USE_POINTER,
A);
1555 MFEM_ABORT(
"Reconfigure PETSc with --download-hypre or --with-hypre")
1564 MFEM_ABORT(
"Conversion from HypreParCSR to operator type = " << tid <<
1565 " is not implemented");
1570 Mat *mats,*matsl2l = NULL;
1575 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1578 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1580 for (i=0; i<nr; i++)
1582 PetscBool needl2l = PETSC_TRUE;
1584 for (j=0; j<nc; j++)
1592 ierr = PetscObjectQuery((
PetscObject)mats[i*nc+j],
"_MatIS_PtAP_l2l",
1594 PCHKERRQ(mats[i*nc+j],ierr);
1602 ierr = PetscContainerGetPointer(c,(
void**)&l2l);
1604 MFEM_VERIFY(l2l->Size() == 1,
"Unexpected size " 1605 << l2l->Size() <<
" for block row " << i );
1606 ierr = PetscObjectReference((
PetscObject)(*l2l)[0]);
1608 matsl2l[i] = (*l2l)[0];
1609 needl2l = PETSC_FALSE;
1615 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,
A); CCHKERRQ(comm,ierr);
1618 ierr = MatConvert(*
A,MATIS,MAT_INPLACE_MATRIX,
A); CCHKERRQ(comm,ierr);
1621 for (
int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1622 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1625 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1626 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1627 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1630 PCHKERRQ((*
A),ierr);
1631 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1633 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1634 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1640 ierr = MatCreate(comm,
A); CCHKERRQ(comm,ierr);
1641 ierr = MatSetSizes(*
A,pI->
Height(),pI->
Width(),PETSC_DECIDE,PETSC_DECIDE);
1643 ierr = MatSetType(*
A,MATAIJ); PCHKERRQ(*
A,ierr);
1644 ierr = MatMPIAIJSetPreallocation(*
A,1,NULL,0,NULL); PCHKERRQ(*
A,ierr);
1645 ierr = MatSeqAIJSetPreallocation(*
A,1,NULL); PCHKERRQ(*
A,ierr);
1646 ierr = MatSetOption(*
A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*
A,ierr);
1647 ierr = MatGetOwnershipRange(*
A,&rst,NULL); PCHKERRQ(*
A,ierr);
1650 ierr = MatSetValue(*
A,i,i,1.,INSERT_VALUES); PCHKERRQ(*
A,ierr);
1652 ierr = MatAssemblyBegin(*
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*
A,ierr);
1653 ierr = MatAssemblyEnd(*
A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*
A,ierr);
1670 int n = pS->
Width();
1675 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1676 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1677 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1679 for (
int i = 0; i < m; i++)
1681 bool issorted =
true;
1683 for (
int j = ii[i]; j < ii[i+1]; j++)
1686 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1691 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1692 CCHKERRQ(PETSC_COMM_SELF,ierr);
1696 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1699 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1700 CCHKERRQ(comm,ierr);
1705 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1706 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1708 pii,pjj,pdata,oii,NULL,NULL,&B);
1709 CCHKERRQ(comm,ierr);
1711 void *ptrs[4] = {pii,pjj,pdata,oii};
1712 const char *names[4] = {
"_mfem_csr_pii",
1717 for (
int i=0; i<4; i++)
1721 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1722 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1723 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1727 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1735 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,
A); PCHKERRQ(B,ierr);
1736 ierr = MatDestroy(&B); PCHKERRQ(*
A,ierr);
1740 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,
A); PCHKERRQ(B,ierr);
1741 ierr = MatDestroy(&B); PCHKERRQ(*
A,ierr);
1745 MFEM_ABORT(
"Unsupported operator type conversion " << tid)
1752 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1759 ierr = MatComputeOperator(*
A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1760 ierr = PetscObjectTypeCompare((
PetscObject)B,MATMPIAIJ,&isaij);
1761 CCHKERRQ(comm,ierr);
1762 ierr = MatDestroy(
A); CCHKERRQ(comm,ierr);
1765 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,
A); CCHKERRQ(comm,ierr);
1766 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1781 MPI_Comm comm = MPI_COMM_NULL;
1782 ierr = PetscObjectGetComm((
PetscObject)
A,&comm); PCHKERRQ(
A,ierr);
1783 ierr = MatDestroy(&
A); CCHKERRQ(comm,ierr);
1794 ierr = PetscObjectReference((
PetscObject)
a); PCHKERRQ(
a,ierr);
1804 if (A_ ==
A) {
return; }
1806 ierr = PetscObjectReference((
PetscObject)A_); PCHKERRQ(A_,ierr);
1812 void PetscParMatrix::SetUpForDevice()
1814 #if !defined(_USE_DEVICE) 1820 if (
A) { ierr = MatBindToCPU(
A, PETSC_TRUE); PCHKERRQ(
A,ierr); }
1823 PetscBool ismatis,isnest,isaij;
1824 ierr = PetscObjectTypeCompare((
PetscObject)
A,MATIS,&ismatis);
1826 ierr = PetscObjectTypeCompare((
PetscObject)
A,MATNEST,&isnest);
1831 ierr = MatISGetLocalMat(
A,&tA); PCHKERRQ(
A,ierr);
1832 ierr = PetscObjectTypeCompare((
PetscObject)tA,MATNEST,&isnest);
1839 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1849 ierr = PetscObjectTypeCompareAny((
PetscObject)sA,&isaij,MATSEQAIJ,MATMPIAIJ,
"");
1853 ierr = MatSetType(sA,PETSC_MATAIJDEVICE); PCHKERRQ(sA,ierr);
1859 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1860 PETSC_TRUE); PCHKERRQ(sA,ierr);
1867 ierr = MatSetVecType(tA,PETSC_VECDEVICE); PCHKERRQ(tA,ierr);
1873 ierr = PetscObjectTypeCompareAny((
PetscObject)tA,&isaij,MATSEQAIJ,MATMPIAIJ,
"");
1877 ierr = MatSetType(tA,PETSC_MATAIJDEVICE); PCHKERRQ(tA,ierr);
1882 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1883 PETSC_TRUE); PCHKERRQ(tA,ierr);
1898 f = MatMultTranspose;
1899 fadd = MatMultTransposeAdd;
1910 ierr = VecScale(Y,
b/
a); PCHKERRQ(A,ierr);
1911 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1912 ierr = VecScale(Y,
a); PCHKERRQ(A,ierr);
1916 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1917 ierr = VecScale(Y,
a); PCHKERRQ(A,ierr);
1928 ierr = VecScale(Y,
b); PCHKERRQ(A,ierr);
1932 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1939 ierr = PetscObjectReference((
PetscObject)master.
A); PCHKERRQ(master.
A,ierr);
1951 MFEM_VERIFY(
A,
"Mat not present");
1961 MFEM_VERIFY(
A,
"Mat not present");
1972 ierr = MatCreateTranspose(
A,&B); PCHKERRQ(
A,ierr);
1976 ierr = MatTranspose(
A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(
A,ierr);
1983 ierr = MatScale(
A,
s); PCHKERRQ(
A,ierr);
1988 MFEM_ASSERT(x.
Size() ==
Width(),
"invalid x.Size() = " << x.
Size()
1989 <<
", expected size = " <<
Width());
1990 MFEM_ASSERT(y.
Size() ==
Height(),
"invalid y.Size() = " << y.
Size()
1991 <<
", expected size = " <<
Height());
1995 bool rw = (
b != 0.0);
1998 MatMultKernel(
A,
a,XX->x,
b,YY->
x,
false);
2006 MFEM_ASSERT(x.
Size() ==
Height(),
"invalid x.Size() = " << x.
Size()
2007 <<
", expected size = " <<
Height());
2008 MFEM_ASSERT(y.
Size() ==
Width(),
"invalid y.Size() = " << y.
Size()
2009 <<
", expected size = " <<
Width());
2013 bool rw = (
b != 0.0);
2016 MatMultKernel(
A,
a,YY->
x,
b,XX->x,
true);
2029 ierr = PetscViewerBinaryOpen(PetscObjectComm((
PetscObject)
A),fname,
2030 FILE_MODE_WRITE,&view);
2034 ierr = PetscViewerASCIIOpen(PetscObjectComm((
PetscObject)
A),fname,&view);
2037 ierr = MatView(
A,view); PCHKERRQ(
A,ierr);
2038 ierr = PetscViewerDestroy(&view); PCHKERRQ(
A,ierr);
2042 ierr = MatView(
A,NULL); PCHKERRQ(
A,ierr);
2048 MFEM_ASSERT(
s.Size() ==
Height(),
"invalid s.Size() = " <<
s.Size()
2049 <<
", expected size = " <<
Height());
2052 YY->PlaceMemory(
s.GetMemory());
2053 ierr = MatDiagonalScale(
A,*YY,NULL); PCHKERRQ(
A,ierr);
2059 MFEM_ASSERT(
s.Size() ==
Width(),
"invalid s.Size() = " <<
s.Size()
2060 <<
", expected size = " <<
Width());
2063 XX->PlaceMemory(
s.GetMemory());
2064 ierr = MatDiagonalScale(
A,NULL,*XX); PCHKERRQ(
A,ierr);
2076 MFEM_ASSERT(
s.Size() ==
Height(),
"invalid s.Size() = " <<
s.Size()
2077 <<
", expected size = " <<
Height());
2078 MFEM_ASSERT(
s.Size() ==
Width(),
"invalid s.Size() = " <<
s.Size()
2079 <<
", expected size = " <<
Width());
2082 XX->PlaceMemory(
s.GetMemory());
2083 ierr = MatDiagonalSet(
A,*XX,ADD_VALUES); PCHKERRQ(
A,ierr);
2091 "Petsc TripleMatrixProduct: Number of local cols of A " << A->
Width() <<
2092 " differs from number of local rows of P " << P->
Height());
2094 "Petsc TripleMatrixProduct: Number of local rows of A " << A->
Height() <<
2095 " differs from number of local cols of R " << R->
Width());
2097 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2104 Mat pA = *A,pP = *P,pRt = *Rt;
2106 PetscBool Aismatis,Pismatis,Rtismatis;
2109 "Petsc RAP: Number of local cols of A " << A->
Width() <<
2110 " differs from number of local rows of P " << P->
Height());
2112 "Petsc RAP: Number of local rows of A " << A->
Height() <<
2113 " differs from number of local rows of Rt " << Rt->
Height());
2114 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&Aismatis);
2116 ierr = PetscObjectTypeCompare((
PetscObject)pP,MATIS,&Pismatis);
2118 ierr = PetscObjectTypeCompare((
PetscObject)pRt,MATIS,&Rtismatis);
2125 ISLocalToGlobalMapping cl2gP,cl2gRt;
2126 PetscInt rlsize,clsize,rsize,csize;
2128 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2129 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2130 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2131 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2132 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2133 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2134 ierr = MatCreate(A->
GetComm(),&B); PCHKERRQ(pA,ierr);
2135 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2136 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2137 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2138 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2139 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2140 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2143 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2149 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2150 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2152 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2160 ierr = PetscObjectReference((
PetscObject)lRt); PCHKERRQ(lRt,ierr);
2161 (*vmatsl2l)[0] = lRt;
2164 ierr = PetscContainerCreate(PetscObjectComm((
PetscObject)B),&c);
2166 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2167 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2171 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2175 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2176 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2177 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2178 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2184 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2190 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2191 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2193 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2208 #if defined(PETSC_HAVE_HYPRE) 2223 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2233 ierr = MatDuplicate(
A,MAT_COPY_VALUES,&Ae); PCHKERRQ(
A,ierr);
2235 ierr = MatAXPY(Ae,-1.,
A,SAME_NONZERO_PATTERN); PCHKERRQ(
A,ierr);
2244 MFEM_ABORT(
"Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2253 ierr = MatGetSize(
A,&
M,&
N); PCHKERRQ(
A,ierr);
2254 MFEM_VERIFY(
M ==
N,
"Rectangular case unsupported");
2257 ierr = MatSetOption(
A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(
A,ierr);
2261 ierr = MatGetOwnershipRange(
A,&rst,NULL); PCHKERRQ(
A,ierr);
2264 ierr = Convert_Array_IS(
GetComm(),
true,&rows_cols,rst,&dir); PCHKERRQ(
A,ierr);
2267 ierr = MatZeroRowsColumnsIS(
A,dir,diag,NULL,NULL); PCHKERRQ(
A,ierr);
2271 ierr = MatZeroRowsColumnsIS(
A,dir,diag,
X,B); PCHKERRQ(
A,ierr);
2273 ierr = ISDestroy(&dir); PCHKERRQ(
A,ierr);
2278 ierr = MatSetOption(
A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(
A,ierr);
2282 ierr = MatGetOwnershipRange(
A,&rst,NULL); PCHKERRQ(
A,ierr);
2285 ierr = Convert_Array_IS(
GetComm(),
true,&rows,rst,&dir); PCHKERRQ(
A,ierr);
2286 ierr = MatZeroRowsIS(
A,dir,0.0,NULL,NULL); PCHKERRQ(
A,ierr);
2287 ierr = ISDestroy(&dir); PCHKERRQ(
A,ierr);
2297 ierr = PetscObjectDereference((
PetscObject)
A); CCHKERRQ(comm,ierr);
2306 MFEM_VERIFY(
A,
"no associated PETSc Mat object");
2309 ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(
A,ierr);
2311 ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(
A,ierr);
2313 ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(
A,ierr);
2315 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(
A,ierr);
2317 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(
A,ierr);
2319 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(
A,ierr);
2321 #if defined(PETSC_HAVE_HYPRE) 2322 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(
A,ierr);
2336 Ae.
Mult(-1.0, X, 1.0, B);
2339 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2340 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2341 for (
int i = 0; i < ess_dof_list.
Size(); i++)
2343 int r = ess_dof_list[i];
2344 B(r) = array[r] * X(r);
2346 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2375 if (
cid == KSP_CLASSID)
2378 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2380 else if (
cid == SNES_CLASSID)
2383 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2386 else if (
cid == TS_CLASSID)
2389 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2393 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2400 if (
cid == KSP_CLASSID)
2403 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2405 else if (
cid == SNES_CLASSID)
2408 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2411 else if (
cid == TS_CLASSID)
2414 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2418 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2425 if (
cid == KSP_CLASSID)
2428 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2431 else if (
cid == SNES_CLASSID)
2434 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2435 max_iter,PETSC_DEFAULT);
2437 else if (
cid == TS_CLASSID)
2440 ierr = TSSetMaxSteps(ts,max_iter);
2444 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2452 typedef PetscErrorCode (*myPetscFunc)(
void**);
2453 PetscViewerAndFormat *vf = NULL;
2454 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(
obj));
2458 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2461 if (
cid == KSP_CLASSID)
2469 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2473 #if PETSC_VERSION_LT(3,15,0) 2474 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2476 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2478 (myPetscFunc)PetscViewerAndFormatDestroy);
2483 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2484 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2485 (myPetscFunc)PetscViewerAndFormatDestroy);
2489 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2490 PCHKERRQ(viewer,ierr);
2491 #if PETSC_VERSION_LT(3,15,0) 2492 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2494 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2496 (myPetscFunc)PetscViewerAndFormatDestroy);
2501 else if (
cid == SNES_CLASSID)
2507 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2511 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2512 (myPetscFunc)PetscViewerAndFormatDestroy);
2513 PCHKERRQ(snes,ierr);
2516 else if (
cid == TS_CLASSID)
2521 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2526 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2532 return obj ? PetscObjectComm(
obj) : MPI_COMM_NULL;
2537 __mfem_monitor_ctx *monctx;
2538 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2539 monctx->solver =
this;
2540 monctx->monitor =
ctx;
2541 if (
cid == KSP_CLASSID)
2543 ierr = KSPMonitorSet((
KSP)
obj,__mfem_ksp_monitor,monctx,
2544 __mfem_monitor_ctx_destroy);
2547 else if (
cid == SNES_CLASSID)
2549 ierr = SNESMonitorSet((
SNES)
obj,__mfem_snes_monitor,monctx,
2550 __mfem_monitor_ctx_destroy);
2553 else if (
cid == TS_CLASSID)
2555 ierr = TSMonitorSet((
TS)
obj,__mfem_ts_monitor,monctx,
2556 __mfem_monitor_ctx_destroy);
2561 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2568 if (
cid == SNES_CLASSID)
2570 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
private_ctx;
2573 else if (
cid == TS_CLASSID)
2575 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
private_ctx;
2580 MFEM_ABORT(
"Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2587 if (
cid == TS_CLASSID)
2592 ierr = TSGetSNES((
TS)
obj,&snes); PCHKERRQ(
obj,ierr);
2593 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(
obj,ierr);
2594 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(
obj,ierr);
2596 else if (
cid == SNES_CLASSID)
2600 ierr = SNESGetKSP((
SNES)
obj,&ksp); PCHKERRQ(
obj,ierr);
2601 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(
obj,ierr);
2603 else if (
cid == KSP_CLASSID)
2605 ierr = KSPGetPC((
KSP)
obj,&pc); PCHKERRQ(
obj,ierr);
2607 else if (
cid == PC_CLASSID)
2613 MFEM_ABORT(
"No support for PetscPreconditionerFactory for this object");
2617 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2621 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2627 if (!customize) {
clcustom =
true; }
2630 if (
cid == PC_CLASSID)
2633 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2635 else if (
cid == KSP_CLASSID)
2638 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2640 else if (
cid == SNES_CLASSID)
2643 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2645 else if (
cid == TS_CLASSID)
2648 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2652 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2660 if (
cid == KSP_CLASSID)
2663 KSPConvergedReason reason;
2664 ierr = KSPGetConvergedReason(ksp,&reason);
2666 return reason > 0 ? 1 : 0;
2668 else if (
cid == SNES_CLASSID)
2671 SNESConvergedReason reason;
2672 ierr = SNESGetConvergedReason(snes,&reason);
2673 PCHKERRQ(snes,ierr);
2674 return reason > 0 ? 1 : 0;
2676 else if (
cid == TS_CLASSID)
2679 TSConvergedReason reason;
2680 ierr = TSGetConvergedReason(ts,&reason);
2682 return reason > 0 ? 1 : 0;
2686 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2693 if (
cid == KSP_CLASSID)
2697 ierr = KSPGetIterationNumber(ksp,&its);
2701 else if (
cid == SNES_CLASSID)
2705 ierr = SNESGetIterationNumber(snes,&its);
2706 PCHKERRQ(snes,ierr);
2709 else if (
cid == TS_CLASSID)
2713 ierr = TSGetStepNumber(ts,&its);
2719 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2726 if (
cid == KSP_CLASSID)
2730 ierr = KSPGetResidualNorm(ksp,&norm);
2734 if (
cid == SNES_CLASSID)
2738 ierr = SNESGetFunctionNorm(snes,&norm);
2739 PCHKERRQ(snes,ierr);
2744 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
2745 return PETSC_MAX_REAL;
2752 if (
cid == SNES_CLASSID)
2754 __mfem_snes_ctx *snes_ctx;
2755 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2756 snes_ctx->op = NULL;
2757 snes_ctx->bchandler = NULL;
2758 snes_ctx->work = NULL;
2762 else if (
cid == TS_CLASSID)
2764 __mfem_ts_ctx *ts_ctx;
2765 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2767 ts_ctx->bchandler = NULL;
2768 ts_ctx->work = NULL;
2769 ts_ctx->work2 = NULL;
2770 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2771 ts_ctx->cached_ijacstate = -1;
2772 ts_ctx->cached_rhsjacstate = -1;
2773 ts_ctx->cached_splits_xstate = -1;
2774 ts_ctx->cached_splits_xdotstate = -1;
2785 if (
cid == SNES_CLASSID)
2787 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)
private_ctx;
2788 delete snes_ctx->work;
2790 else if (
cid == TS_CLASSID)
2792 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)
private_ctx;
2793 delete ts_ctx->work;
2794 delete ts_ctx->work2;
2796 ierr = PetscFree(
private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2803 : bctype(type_), setup(false), eval_t(0.0),
2804 eval_t_cached(
std::numeric_limits<double>::min())
2812 ess_tdof_list.
Assign(list);
2818 if (setup) {
return; }
2822 this->
Eval(eval_t,eval_g);
2823 eval_t_cached = eval_t;
2834 (*this).SetUp(x.
Size());
2838 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2840 y[ess_tdof_list[i]] = 0.0;
2845 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2847 Eval(eval_t,eval_g);
2848 eval_t_cached = eval_t;
2850 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2852 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2859 (*this).SetUp(x.
Size());
2862 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2864 x[ess_tdof_list[i]] = 0.0;
2869 if (bctype !=
CONSTANT && eval_t != eval_t_cached)
2871 Eval(eval_t,eval_g);
2872 eval_t_cached = eval_t;
2874 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2876 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2883 (*this).SetUp(x.
Size());
2886 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2888 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2893 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2895 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2902 (*this).SetUp(x.
Size());
2903 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2905 x[ess_tdof_list[i]] = 0.0;
2911 (*this).SetUp(x.
Size());
2913 for (
int i = 0; i < ess_tdof_list.
Size(); ++i)
2915 y[ess_tdof_list[i]] = 0.0;
2922 bool wrapin,
bool iter_mode)
2926 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2928 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2929 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2930 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2931 PCHKERRQ(ksp, ierr);
2935 const std::string &prefix,
bool iter_mode)
2941 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2942 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2943 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2944 PCHKERRQ(ksp, ierr);
2949 const std::string &prefix,
bool iter_mode)
2955 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
2956 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2957 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
2958 PCHKERRQ(ksp, ierr);
2970 bool delete_pA =
false;
2988 MFEM_VERIFY(pA,
"Unsupported operation!");
2996 PetscInt nheight,nwidth,oheight,owidth;
2998 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
2999 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3000 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3001 if (nheight != oheight || nwidth != owidth)
3005 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3011 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
3020 if (delete_pA) {
delete pA; }
3035 bool delete_pA =
false;
3053 MFEM_VERIFY(pA,
"Unsupported operation!");
3056 bool delete_ppA =
false;
3059 if (oA == poA && !wrap)
3069 MFEM_VERIFY(ppA,
"Unsupported operation!");
3078 PetscInt nheight,nwidth,oheight,owidth;
3080 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3081 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3082 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3083 if (nheight != oheight || nwidth != owidth)
3087 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3094 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3103 if (delete_pA) {
delete pA; }
3104 if (delete_ppA) {
delete ppA; }
3114 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3117 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3118 ierr = PetscObjectReference((
PetscObject)A); PCHKERRQ(ksp,ierr);
3123 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3132 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3133 ierr = MakeShellPC(pc,precond,
false); PCHKERRQ(ksp,ierr);
3139 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3140 ierr = PetscObjectReference((
PetscObject)P); PCHKERRQ(ksp,ierr);
3141 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3142 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3143 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3154 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(
obj, ierr);
3162 PetscParMatrix A = PetscParMatrix(pA,
true);
3163 X =
new PetscParVector(A,
false,
false);
3171 ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3177 ierr = KSPSolveTranspose(ksp,
B->
x,
X->
x); PCHKERRQ(ksp,ierr);
3181 ierr = KSPSolve(ksp,
B->
x,
X->
x); PCHKERRQ(ksp,ierr);
3189 (*this).MultKernel(
b,x,
false);
3194 (*this).MultKernel(
b,x,
true);
3201 ierr = PetscObjectGetComm((
PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3202 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3212 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3214 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3222 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3224 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3228 const std::string &prefix,
bool iter_mode)
3232 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3234 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3240 const std::string &prefix)
3244 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3246 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3247 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3251 const string &prefix)
3257 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3258 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3263 const string &prefix)
3267 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3269 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
3270 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3276 bool delete_pA =
false;
3293 PetscInt nheight,nwidth,oheight,owidth;
3295 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3296 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3297 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3298 if (nheight != oheight || nwidth != owidth)
3302 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3308 ierr = PCSetOperators(pc,pA->
A,pA->
A); PCHKERRQ(
obj,ierr);
3317 if (delete_pA) {
delete pA; };
3320 void PetscPreconditioner::MultKernel(
const Vector &
b,
Vector &x,
3324 "Iterative mode not supported for PetscPreconditioner");
3330 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(
obj, ierr);
3338 PetscParMatrix A(pA,
true);
3339 X =
new PetscParVector(A,
false,
false);
3350 ierr = PCApplyTranspose(pc,
B->
x,
X->
x); PCHKERRQ(pc, ierr);
3354 ierr = PCApply(pc,
B->
x,
X->
x); PCHKERRQ(pc, ierr);
3362 (*this).MultKernel(
b,x,
false);
3367 (*this).MultKernel(
b,x,
true);
3374 ierr = PetscObjectGetComm((
PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3375 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3386 void PetscBDDCSolver::BDDCSolverConstructor(
const PetscBDDCSolverParams &opts)
3388 MPI_Comm comm = PetscObjectComm(
obj);
3393 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3397 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATIS,&ismatis);
3399 MFEM_VERIFY(ismatis,
"PetscBDDCSolver needs the matrix in unassembled format");
3402 ParFiniteElementSpace *fespace = opts.fespace;
3403 if (opts.netflux && !fespace)
3405 MFEM_WARNING(
"Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3412 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3413 ierr = MatNullSpaceCreate(PetscObjectComm((
PetscObject)lA),PETSC_TRUE,0,NULL,
3414 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3415 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3416 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3420 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(
obj,ierr);
3423 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3424 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3433 int vdim = fespace->GetVDim();
3440 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3441 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3442 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3451 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3464 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3470 const FiniteElementCollection *fec = fespace->FEColl();
3471 bool h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3474 ParFiniteElementSpace *fespace_coords = fespace;
3476 sdim = fespace->GetParMesh()->SpaceDimension();
3479 fespace_coords =
new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3482 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3483 ParGridFunction gf_coords(fespace_coords);
3484 gf_coords.ProjectCoefficient(coeff_coords);
3485 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3487 hvec_coords->Size(),
false);
3495 Vec pvec_coords,lvec_coords;
3496 ISLocalToGlobalMapping l2g;
3502 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3503 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3504 CCHKERRQ(comm,ierr);
3505 ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3508 ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3509 CCHKERRQ(comm,ierr);
3510 ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3511 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3513 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3514 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3515 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3516 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3517 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3518 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3519 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3520 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3521 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3522 CCHKERRQ(comm,ierr);
3523 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3528 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3529 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3530 #if PETSC_VERSION_LT(3,15,0) 3531 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3532 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3534 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3535 CCHKERRQ(comm,ierr);
3536 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3537 CCHKERRQ(comm,ierr);
3539 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3540 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3542 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3543 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3544 CCHKERRQ(PETSC_COMM_SELF,ierr);
3545 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3546 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3547 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3548 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3552 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3561 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3562 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3566 for (
PetscInt d = 0; d < sdim; d++)
3568 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3571 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3576 for (
PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3578 if (fespace_coords != fespace)
3580 delete fespace_coords;
3587 IS dir = NULL, neu = NULL;
3590 Array<Mat> *l2l = NULL;
3591 if (opts.ess_dof_local || opts.nat_dof_local)
3596 MFEM_VERIFY(c,
"Local-to-local PETSc container not present");
3597 ierr = PetscContainerGetPointer(c,(
void**)&l2l); PCHKERRQ(c,ierr);
3604 PetscBool lpr = PETSC_FALSE,pr;
3605 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3606 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3607 CCHKERRQ(comm,mpiierr);
3608 MFEM_VERIFY(lpr == pr,
"ess_dof should be collectively set");
3610 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3611 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3612 CCHKERRQ(comm,mpiierr);
3613 MFEM_VERIFY(lpr == pr,
"nat_dof should be collectively set");
3616 ms[0] = -nf; ms[1] = nf;
3617 mpiierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3618 CCHKERRQ(comm,mpiierr);
3619 MFEM_VERIFY(-Ms[0] == Ms[1],
3620 "number of fields should be the same across processes");
3627 PetscInt st = opts.ess_dof_local ? 0 : rst;
3628 if (!opts.ess_dof_local)
3631 ierr = Convert_Array_IS(comm,
true,opts.ess_dof,st,&dir);
3632 CCHKERRQ(comm,ierr);
3633 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3638 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3639 CCHKERRQ(comm,ierr);
3640 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3645 PetscInt st = opts.nat_dof_local ? 0 : rst;
3646 if (!opts.nat_dof_local)
3649 ierr = Convert_Array_IS(comm,
true,opts.nat_dof,st,&neu);
3650 CCHKERRQ(comm,ierr);
3651 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3656 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3657 CCHKERRQ(comm,ierr);
3658 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3665 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3669 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3671 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3676 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3678 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3687 const FiniteElementCollection *fec = fespace->FEColl();
3688 bool edgespace, rtspace, h1space;
3689 bool needint = opts.netflux;
3690 bool tracespace, rt_tracespace, edge_tracespace;
3692 PetscBool B_is_Trans = PETSC_FALSE;
3694 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3695 dim = pmesh->Dimension();
3696 vdim = fespace->GetVDim();
3697 h1space =
dynamic_cast<const H1_FECollection*
>(fec);
3698 rtspace =
dynamic_cast<const RT_FECollection*
>(fec);
3699 edgespace =
dynamic_cast<const ND_FECollection*
>(fec);
3700 edge_tracespace =
dynamic_cast<const ND_Trace_FECollection*
>(fec);
3701 rt_tracespace =
dynamic_cast<const RT_Trace_FECollection*
>(fec);
3702 tracespace = edge_tracespace || rt_tracespace;
3705 if (fespace->GetNE() > 0)
3709 p = fespace->GetElementOrder(0);
3713 p = fespace->GetFaceOrder(0);
3714 if (
dim == 2) {
p++; }
3725 MFEM_WARNING(
"Tracespace case doesn't work for H(curl) and p=2," 3726 " not using auxiliary quadrature");
3732 FiniteElementCollection *vfec;
3735 vfec =
new H1_Trace_FECollection(
p,
dim);
3739 vfec =
new H1_FECollection(
p,
dim);
3741 ParFiniteElementSpace *vfespace =
new ParFiniteElementSpace(pmesh,vfec);
3742 ParDiscreteLinearOperator *grad;
3743 grad =
new ParDiscreteLinearOperator(vfespace,fespace);
3746 grad->AddTraceFaceInterpolator(
new GradientInterpolator);
3750 grad->AddDomainInterpolator(
new GradientInterpolator);
3754 HypreParMatrix *hG = grad->ParallelAssemble();
3755 PetscParMatrix *G =
new PetscParMatrix(hG,
PETSC_MATAIJ);
3759 PetscBool conforming = PETSC_TRUE;
3760 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3761 ierr = PCBDDCSetDiscreteGradient(pc,*G,
p,0,PETSC_TRUE,conforming);
3773 MFEM_WARNING(
"Tracespace case doesn't work for H(div), not using" 3774 " auxiliary quadrature");
3780 if (vdim !=
dim) { needint =
false; }
3783 PetscParMatrix *
B = NULL;
3789 FiniteElementCollection *auxcoll;
3790 if (tracespace) { auxcoll =
new RT_Trace_FECollection(
p,
dim); }
3795 auxcoll =
new H1_FECollection(std::max(
p-1,1),
dim);
3799 auxcoll =
new L2_FECollection(
p,
dim);
3802 ParFiniteElementSpace *pspace =
new ParFiniteElementSpace(pmesh,auxcoll);
3803 ParMixedBilinearForm *
b =
new ParMixedBilinearForm(fespace,pspace);
3809 b->AddTraceFaceIntegrator(
new VectorFECurlIntegrator);
3813 b->AddDomainIntegrator(
new VectorFECurlIntegrator);
3820 b->AddTraceFaceIntegrator(
new VectorFEDivergenceIntegrator);
3824 b->AddDomainIntegrator(
new VectorFEDivergenceIntegrator);
3829 b->AddDomainIntegrator(
new VectorDivergenceIntegrator);
3834 b->ParallelAssemble(Bh);
3836 Bh.SetOperatorOwner(
false);
3841 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3842 if (!opts.ess_dof_local)
3844 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3848 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3850 B_is_Trans = PETSC_TRUE;
3859 ierr = PCBDDCSetDivergenceMat(pc,*
B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3863 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3864 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3869 const std::string &prefix)
3872 BDDCSolverConstructor(opts);
3878 const std::string &prefix)
3881 BDDCSolverConstructor(opts);
3886 const string &prefix)
3890 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3893 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3897 ierr = PetscObjectTypeCompare((
PetscObject)pA,MATNEST,&isnest);
3903 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3904 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3905 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3915 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3917 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3922 const std::string &prefix)
3926 ierr = MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); PCHKERRQ(A,ierr);
3927 ierr = MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE); PCHKERRQ(A,ierr);
3929 H2SolverConstructor(fes);
3935 #if defined(PETSC_HAVE_H2OPUS) 3947 VectorFunctionCoefficient ccoords(sdim, func_coords);
3949 ParGridFunction coords(fes);
3950 coords.ProjectCoefficient(ccoords);
3952 coords.ParallelProject(c);
3954 PCSetType(*
this,PCH2OPUS);
3957 PCSetFromOptions(*
this);
3959 MFEM_ABORT(
"Need PETSc configured with --download-h2opus");
3966 const std::string &prefix)
3971 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3973 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3974 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3981 const std::string &prefix)
3986 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3988 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
3989 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
4001 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj, ierr);
4002 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
4014 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
4015 PCHKERRQ(snes, ierr);
4016 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
4017 PCHKERRQ(snes, ierr);
4020 (
void*)&op == fctx &&
4021 (
void*)&op == jctx);
4022 mpiierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
4024 CCHKERRQ(PetscObjectComm((
PetscObject)snes),mpiierr);
4027 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
4038 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4039 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
4048 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4050 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (
void *)snes_ctx);
4051 PCHKERRQ(snes, ierr);
4052 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
4054 PCHKERRQ(snes, ierr);
4056 ierr = MatDestroy(&dummy);
4057 PCHKERRQ(snes, ierr);
4069 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4070 snes_ctx->jacType = jacType;
4076 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4077 snes_ctx->objective = objfn;
4080 ierr = SNESSetObjective(snes, __mfem_snes_objective, (
void *)snes_ctx);
4081 PCHKERRQ(snes, ierr);
4088 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4089 snes_ctx->postcheck = post;
4093 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4094 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (
void *)snes_ctx);
4104 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
private_ctx;
4105 snes_ctx->update = update;
4108 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4115 bool b_nonempty =
b.Size();
4127 ierr = SNESSolve(snes,
B->
x,
X->
x); PCHKERRQ(snes, ierr);
4139 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4141 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
4142 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4148 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4150 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4153 ierr = TSGetAdapt(ts,&tsad);
4155 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4163 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj,ierr);
4164 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4172 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4175 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4178 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4179 ts_ctx->cached_ijacstate = -1;
4180 ts_ctx->cached_rhsjacstate = -1;
4181 ts_ctx->cached_splits_xstate = -1;
4182 ts_ctx->cached_splits_xdotstate = -1;
4194 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (
void *)ts_ctx);
4196 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (
void *)ts_ctx);
4198 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4200 ierr = MatDestroy(&dummy);
4208 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4217 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (
void *)ts_ctx);
4219 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4222 ierr = MatDestroy(&dummy);
4231 ierr = TSSetSolution(ts,
X); PCHKERRQ(ts,ierr);
4234 PetscBool use = PETSC_TRUE;
4235 ierr = PetscOptionsGetBool(NULL,NULL,
"-mfem_use_splitjac",&use,NULL);
4238 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4239 __mfem_ts_computesplits);
4244 ierr = PetscObjectComposeFunction((
PetscObject)ts,
"TSComputeSplitJacobians_C",
4252 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4253 ts_ctx->jacType = jacType;
4258 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4259 return ts_ctx->type;
4264 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4267 ts_ctx->type = type;
4270 ierr = TSSetProblemType(ts, TS_LINEAR);
4275 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4284 ierr = TSSetTime(ts,
t); PCHKERRQ(ts, ierr);
4285 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4288 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4298 ierr = TSMonitor(ts, i,
t, *
X); PCHKERRQ(ts,ierr);
4302 ierr = TSSetSolution(ts, *
X); PCHKERRQ(ts, ierr);
4303 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4308 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4313 ierr = TSMonitor(ts, i+1, pt, *
X); PCHKERRQ(ts,ierr);
4322 ierr = TSSetTime(ts,
t); PCHKERRQ(ts, ierr);
4323 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4324 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4325 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4337 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)
private_ctx;
4338 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4339 ts_ctx->cached_ijacstate = -1;
4340 ts_ctx->cached_rhsjacstate = -1;
4341 ts_ctx->cached_splits_xstate = -1;
4342 ts_ctx->cached_splits_xdotstate = -1;
4345 ierr = TSSolve(ts,
X->
x); PCHKERRQ(ts, ierr);
4350 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4352 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4358 #include "petsc/private/petscimpl.h" 4359 #include "petsc/private/matimpl.h" 4365 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)
ctx;
4367 PetscFunctionBeginUser;
4370 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4382 PetscFunctionReturn(PETSC_SUCCESS);
4388 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4390 PetscFunctionBeginUser;
4398 if (ts_ctx->bchandler)
4403 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(xx.Size()); }
4404 if (!ts_ctx->work2) { ts_ctx->work2 =
new mfem::Vector(xx.Size()); }
4410 bchandler->
ZeroBC(yy,*txp);
4420 ff.UpdateVecFromFlags();
4421 PetscFunctionReturn(PETSC_SUCCESS);
4427 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4429 PetscFunctionBeginUser;
4430 if (ts_ctx->bchandler) { MFEM_ABORT(
"RHS evaluation with bc not implemented"); }
4439 ff.UpdateVecFromFlags();
4440 PetscFunctionReturn(PETSC_SUCCESS);
4447 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4452 PetscObjectState state;
4453 PetscErrorCode ierr;
4455 PetscFunctionBeginUser;
4459 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4460 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4466 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4468 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4469 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4476 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4477 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4479 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4480 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4481 if (!ts_ctx->bchandler)
4488 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4495 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4499 if (!ts_ctx->bchandler) {
delete xx; }
4500 ts_ctx->cached_shift = shift;
4503 bool delete_pA =
false;
4507 pA->
GetType() != ts_ctx->jacType))
4515 if (ts_ctx->bchandler)
4523 PetscObjectState nonzerostate;
4524 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4529 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4530 if (delete_pA) {
delete pA; }
4542 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4544 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4547 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_ijacstate);
4553 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4554 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4555 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4556 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4557 PetscFunctionReturn(PETSC_SUCCESS);
4564 __mfem_ts_ctx* ts_ctx;
4568 PetscObjectState state;
4569 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4570 PetscBool assembled;
4571 PetscErrorCode ierr;
4573 PetscFunctionBeginUser;
4577 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4578 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4580 if (Axp && Axp != Jxp)
4582 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4583 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4586 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(
void**)&ts_ctx); CHKERRQ(ierr);
4589 ierr = PetscObjectStateGet((
PetscObject)Jx,&state); CHKERRQ(ierr);
4591 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4592 ierr = PetscObjectStateGet((
PetscObject)Jxp,&state); CHKERRQ(ierr);
4594 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4595 if (!rx && !rxp) { PetscFunctionReturn(PETSC_SUCCESS); }
4602 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4603 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4605 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
4606 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4607 if (!ts_ctx->bchandler)
4614 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4621 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4630 bool delete_mat =
false;
4634 pJx->
GetType() != ts_ctx->jacType))
4639 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4647 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4650 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4655 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4656 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4659 if (delete_mat) {
delete pJx; }
4663 if (ts_ctx->bchandler)
4680 pJxp->
GetType() != ts_ctx->jacType))
4685 ierr = PetscObjectReference((
PetscObject)B); CHKERRQ(ierr);
4688 &oJxp,ts_ctx->jacType);
4692 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4695 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4700 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4701 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4703 if (delete_mat) {
delete pJxp; }
4707 if (ts_ctx->bchandler)
4718 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4722 ierr = PetscObjectStateGet((
PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4724 ierr = PetscObjectStateGet((
PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4729 if (!ts_ctx->bchandler) {
delete xx; }
4730 PetscFunctionReturn(PETSC_SUCCESS);
4733 static PetscErrorCode __mfem_ts_rhsjacobian(
TS ts,
PetscReal t,
Vec x,
4736 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)
ctx;
4740 PetscObjectState state;
4741 PetscErrorCode ierr;
4743 PetscFunctionBeginUser;
4747 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4748 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4752 ierr = PetscObjectStateGet((
PetscObject)P,&state); CHKERRQ(ierr);
4754 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4761 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4762 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4763 if (!ts_ctx->bchandler)
4770 if (!ts_ctx->work) { ts_ctx->work =
new mfem::Vector(n); }
4777 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4781 if (!ts_ctx->bchandler) {
delete xx; }
4784 bool delete_pA =
false;
4788 pA->
GetType() != ts_ctx->jacType))
4796 if (ts_ctx->bchandler)
4804 PetscObjectState nonzerostate;
4805 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4810 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4811 if (delete_pA) {
delete pA; }
4823 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4825 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4830 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4832 ierr = PetscObjectStateGet((
PetscObject)P,&ts_ctx->cached_rhsjacstate);
4838 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4839 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4840 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4841 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4842 PetscFunctionReturn(PETSC_SUCCESS);
4848 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)
ctx;
4850 PetscFunctionBeginUser;
4853 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
4862 PetscErrorCode ierr;
4864 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4871 PetscErrorCode ierr;
4873 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4878 PetscFunctionReturn(PETSC_SUCCESS);
4881 static PetscErrorCode __mfem_snes_jacobian(
SNES snes,
Vec x,
Mat A,
Mat P,
4886 PetscErrorCode ierr;
4888 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)
ctx;
4890 PetscFunctionBeginUser;
4891 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4892 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4893 if (!snes_ctx->bchandler)
4900 if (!snes_ctx->work) { snes_ctx->work =
new mfem::Vector(n); }
4903 xx = snes_ctx->work;
4909 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
4910 if (!snes_ctx->bchandler) {
delete xx; }
4913 bool delete_pA =
false;
4917 pA->
GetType() != snes_ctx->jacType))
4925 if (snes_ctx->bchandler)
4933 PetscObjectState nonzerostate;
4934 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4938 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4939 if (delete_pA) {
delete pA; }
4951 ierr = PetscObjectTypeCompare((
PetscObject)P,MATNEST,&isnest);
4953 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4958 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4959 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4965 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4966 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4967 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4968 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4969 PetscFunctionReturn(PETSC_SUCCESS);
4972 static PetscErrorCode __mfem_snes_function(
SNES snes,
Vec x,
Vec f,
void *
ctx)
4974 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
ctx;
4976 PetscFunctionBeginUser;
4979 if (snes_ctx->bchandler)
4986 snes_ctx->op->Mult(*txx,ff);
4993 snes_ctx->op->Mult(xx,ff);
4995 ff.UpdateVecFromFlags();
4996 PetscFunctionReturn(PETSC_SUCCESS);
5002 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
ctx;
5004 PetscFunctionBeginUser;
5005 if (!snes_ctx->objective)
5007 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing objective function");
5011 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
5013 PetscFunctionReturn(PETSC_SUCCESS);
5016 static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,
Vec X,
Vec Y,
Vec W,
5017 PetscBool *cy,PetscBool *cw,
void*
ctx)
5019 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)
ctx;
5020 bool lcy =
false,lcw =
false;
5022 PetscFunctionBeginUser;
5026 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
5027 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
5028 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
5029 PetscFunctionReturn(PETSC_SUCCESS);
5032 static PetscErrorCode __mfem_snes_update(
SNES snes,
PetscInt it)
5035 __mfem_snes_ctx* snes_ctx;
5037 PetscFunctionBeginUser;
5039 ierr = SNESGetFunction(snes,&F,NULL,(
void **)&snes_ctx); CHKERRQ(ierr);
5040 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
5043 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
5046 ierr = VecDestroy(&pX); CHKERRQ(ierr);
5050 if (!pX) SETERRQ(PetscObjectComm((
PetscObject)snes),PETSC_ERR_USER,
5051 "Missing previous solution");
5052 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
5057 (*snes_ctx->update)(snes_ctx->op,it,
f,x,dx,px);
5059 ierr = VecCopy(X,pX); CHKERRQ(ierr);
5060 PetscFunctionReturn(PETSC_SUCCESS);
5066 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)
ctx;
5068 PetscFunctionBeginUser;
5071 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"Missing monitor context");
5080 PetscErrorCode ierr;
5082 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5089 PetscErrorCode ierr;
5091 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5096 PetscFunctionReturn(PETSC_SUCCESS);
5099 static PetscErrorCode __mfem_mat_shell_apply(
Mat A,
Vec x,
Vec y)
5102 PetscErrorCode ierr;
5104 PetscFunctionBeginUser;
5105 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5106 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5110 yy.UpdateVecFromFlags();
5111 PetscFunctionReturn(PETSC_SUCCESS);
5114 static PetscErrorCode __mfem_mat_shell_apply_transpose(
Mat A,
Vec x,
Vec y)
5117 PetscErrorCode ierr;
5120 PetscFunctionBeginUser;
5121 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5122 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5125 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5134 yy.UpdateVecFromFlags();
5135 PetscFunctionReturn(PETSC_SUCCESS);
5138 static PetscErrorCode __mfem_mat_shell_copy(
Mat A,
Mat B, MatStructure str)
5141 PetscErrorCode ierr;
5143 PetscFunctionBeginUser;
5144 ierr = MatShellGetContext(A,(
void **)&op); CHKERRQ(ierr);
5145 if (!op) { SETERRQ(PetscObjectComm((
PetscObject)A),PETSC_ERR_LIB,
"Missing operator"); }
5146 ierr = MatShellSetContext(B,(
void *)op); CHKERRQ(ierr);
5147 PetscFunctionReturn(PETSC_SUCCESS);
5150 static PetscErrorCode __mfem_mat_shell_destroy(
Mat A)
5152 PetscFunctionBeginUser;
5153 PetscFunctionReturn(PETSC_SUCCESS);
5156 static PetscErrorCode __mfem_pc_shell_view(
PC pc, PetscViewer viewer)
5158 __mfem_pc_shell_ctx *
ctx;
5159 PetscErrorCode ierr;
5161 PetscFunctionBeginUser;
5162 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5166 ierr = PetscObjectTypeCompare((
PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5173 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5179 ierr = PetscViewerASCIIPrintf(viewer,
5180 "No information available on the mfem::Solver\n");
5184 if (isascii &&
ctx->factory)
5186 ierr = PetscViewerASCIIPrintf(viewer,
5187 "Number of preconditioners created by the factory %lu\n",
ctx->numprec);
5191 PetscFunctionReturn(PETSC_SUCCESS);
5194 static PetscErrorCode __mfem_pc_shell_apply(
PC pc,
Vec x,
Vec y)
5196 __mfem_pc_shell_ctx *
ctx;
5197 PetscErrorCode ierr;
5199 PetscFunctionBeginUser;
5202 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5205 ctx->op->Mult(xx,yy);
5206 yy.UpdateVecFromFlags();
5212 PetscFunctionReturn(PETSC_SUCCESS);
5215 static PetscErrorCode __mfem_pc_shell_apply_transpose(
PC pc,
Vec x,
Vec y)
5217 __mfem_pc_shell_ctx *
ctx;
5218 PetscErrorCode ierr;
5220 PetscFunctionBeginUser;
5223 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5226 ctx->op->MultTranspose(xx,yy);
5227 yy.UpdateVecFromFlags();
5233 PetscFunctionReturn(PETSC_SUCCESS);
5236 static PetscErrorCode __mfem_pc_shell_setup(
PC pc)
5238 __mfem_pc_shell_ctx *
ctx;
5240 PetscFunctionBeginUser;
5241 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5252 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5261 PetscFunctionReturn(PETSC_SUCCESS);
5264 static PetscErrorCode __mfem_pc_shell_destroy(
PC pc)
5266 __mfem_pc_shell_ctx *
ctx;
5267 PetscErrorCode ierr;
5269 PetscFunctionBeginUser;
5270 ierr = PCShellGetContext(pc,(
void **)&
ctx); CHKERRQ(ierr);
5276 PetscFunctionReturn(PETSC_SUCCESS);
5279 static PetscErrorCode __mfem_array_container_destroy(
void *ptr)
5281 PetscErrorCode ierr;
5283 PetscFunctionBeginUser;
5284 ierr = PetscFree(ptr); CHKERRQ(ierr);
5285 PetscFunctionReturn(PETSC_SUCCESS);
5288 static PetscErrorCode __mfem_matarray_container_destroy(
void *ptr)
5291 PetscErrorCode ierr;
5293 PetscFunctionBeginUser;
5294 for (
int i=0; i<
a->Size(); i++)
5298 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5301 PetscFunctionReturn(PETSC_SUCCESS);
5304 static PetscErrorCode __mfem_monitor_ctx_destroy(
void **
ctx)
5306 PetscErrorCode ierr;
5308 PetscFunctionBeginUser;
5309 ierr = PetscFree(*
ctx); CHKERRQ(ierr);
5310 PetscFunctionReturn(PETSC_SUCCESS);
5315 PetscErrorCode MakeShellPC(
PC pc,
mfem::Solver &precond,
bool ownsop)
5317 PetscFunctionBeginUser;
5318 __mfem_pc_shell_ctx *
ctx =
new __mfem_pc_shell_ctx;
5320 ctx->ownsop = ownsop;
5321 ctx->factory = NULL;
5327 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5329 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5330 ierr = PCShellSetName(pc,
"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5331 ierr = PCShellSetContext(pc,(
void *)
ctx); CHKERRQ(ierr);
5332 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5333 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5335 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5336 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5337 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5338 PetscFunctionReturn(PETSC_SUCCESS);
5343 PetscErrorCode MakeShellPCWithFactory(
PC pc,
5346 PetscFunctionBeginUser;
5347 __mfem_pc_shell_ctx *
ctx =
new __mfem_pc_shell_ctx;
5350 ctx->factory = factory;
5356 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5358 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5359 ierr = PCShellSetName(pc,factory->
GetName()); CHKERRQ(ierr);
5360 ierr = PCShellSetContext(pc,(
void *)
ctx); CHKERRQ(ierr);
5361 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5362 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5364 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5365 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5366 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5367 PetscFunctionReturn(PETSC_SUCCESS);
5372 static PetscErrorCode Convert_Array_IS(MPI_Comm comm,
bool islist,
5377 const int *data = list ? list->
GetData() : NULL;
5378 PetscErrorCode ierr;
5380 PetscFunctionBeginUser;
5381 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5384 for (
PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5391 if (data[i]) { idxs[cum++] = i+st; }
5395 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5397 PetscFunctionReturn(PETSC_SUCCESS);
5403 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5411 PetscErrorCode ierr;
5413 PetscFunctionBeginUser;
5414 for (
int i = 0; i < pl2l.
Size(); i++)
5418 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(
const PetscInt**)&ii,
5419 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5420 MFEM_VERIFY(done,
"Unable to perform MatGetRowIJ on " << i <<
" l2l matrix");
5421 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5422 #if defined(PETSC_USE_64BIT_INDICES) 5423 int nnz = (int)ii[m];
5424 int *mii =
new int[m+1];
5425 int *mjj =
new int[nnz];
5426 for (
int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5427 for (
int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5432 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5434 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
5435 MFEM_VERIFY(done,
"Unable to perform MatRestoreRowIJ on " 5436 << i <<
" l2l matrix");
5439 for (
int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5441 const int* vdata = mark->
GetData();
5442 int* sdata = sub_dof_marker.
GetData();
5443 int cumh = 0, cumw = 0;
5444 for (
int i = 0; i < l2l.Size(); i++)
5449 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5450 cumh += l2l[i]->Height();
5451 cumw += l2l[i]->Width();
5453 ierr = Convert_Array_IS(comm,
false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5454 for (
int i = 0; i < pl2l.
Size(); i++)
5458 PetscFunctionReturn(PETSC_SUCCESS);
5461 #if !defined(PETSC_HAVE_HYPRE) 5463 #if defined(HYPRE_MIXEDINT) 5464 #error "HYPRE_MIXEDINT not supported" 5467 #include "_hypre_parcsr_mv.h" 5468 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,
Mat* pA)
5470 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5471 hypre_CSRMatrix *hdiag,*hoffd;
5473 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5476 PetscErrorCode ierr;
5478 PetscFunctionBeginUser;
5479 hdiag = hypre_ParCSRMatrixDiag(hA);
5480 hoffd = hypre_ParCSRMatrixOffd(hA);
5481 m = hypre_CSRMatrixNumRows(hdiag);
5482 n = hypre_CSRMatrixNumCols(hdiag);
5483 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5484 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5485 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5486 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5487 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5488 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*
sizeof(
PetscInt));
5490 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*
sizeof(
PetscInt));
5492 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*
sizeof(
PetscScalar));
5499 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5503 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5508 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5509 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5510 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5511 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*
sizeof(
PetscInt));
5513 offdj = hypre_CSRMatrixJ(hoffd);
5514 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5515 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5516 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*
sizeof(
PetscScalar));
5523 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5527 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5528 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5534 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5540 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5541 const char *names[6] = {
"_mfem_csr_dii",
5552 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5553 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5554 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5558 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5560 PetscFunctionReturn(PETSC_SUCCESS);
5563 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,
Mat* pA)
5566 ISLocalToGlobalMapping rl2g,cl2g;
5568 hypre_CSRMatrix *hdiag,*hoffd;
5569 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5571 const char *names[2] = {
"_mfem_csr_aux",
5575 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5577 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5578 PetscErrorCode ierr;
5580 PetscFunctionBeginUser;
5582 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5583 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5584 hdiag = hypre_ParCSRMatrixDiag(hA);
5585 hoffd = hypre_ParCSRMatrixOffd(hA);
5586 dr = hypre_CSRMatrixNumRows(hdiag);
5587 dc = hypre_CSRMatrixNumCols(hdiag);
5588 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5589 hdi = hypre_CSRMatrixI(hdiag);
5590 hdj = hypre_CSRMatrixJ(hdiag);
5591 hdd = hypre_CSRMatrixData(hdiag);
5592 oc = hypre_CSRMatrixNumCols(hoffd);
5593 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5594 hoi = hypre_CSRMatrixI(hoffd);
5595 hoj = hypre_CSRMatrixJ(hoffd);
5596 hod = hypre_CSRMatrixData(hoffd);
5599 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5600 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5601 ierr = ISDestroy(&is); CHKERRQ(ierr);
5602 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5603 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5604 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5605 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5606 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5607 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5608 ierr = ISDestroy(&is); CHKERRQ(ierr);
5611 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5612 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5613 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5614 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5615 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5616 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5619 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5620 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5624 *ii = *(hdi++) + *(hoi++);
5625 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5629 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5630 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5631 *(++ii) = *(hdi++) + *(hoi++);
5632 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5634 for (; cum<dr; cum++) { *(++ii) = nnz; }
5638 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5646 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5647 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5648 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5652 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5654 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5655 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5656 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5657 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5658 PetscFunctionReturn(PETSC_SUCCESS);
5662 #include <petsc/private/matimpl.h> 5664 static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm,
PetscInt m,
5668 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5669 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5670 ierr = PetscObjectChangeTypeName((
PetscObject)*A,
"mfemdummy"); CHKERRQ(ierr);
5671 (*A)->preallocated = PETSC_TRUE;
5672 ierr = MatSetUp(*A); CHKERRQ(ierr);
5673 PetscFunctionReturn(PETSC_SUCCESS);
5676 #include <petsc/private/vecimpl.h> 5678 #if defined(PETSC_HAVE_DEVICE) 5679 static PetscErrorCode __mfem_VecSetOffloadMask(
Vec v, PetscOffloadMask m)
5683 PetscFunctionReturn(PETSC_SUCCESS);
5687 static PetscErrorCode __mfem_VecBoundToCPU(
Vec v, PetscBool *flg)
5690 #if defined(PETSC_HAVE_DEVICE) 5691 *flg = v->boundtocpu;
5695 PetscFunctionReturn(PETSC_SUCCESS);
5698 static PetscErrorCode __mfem_PetscObjectStateIncrease(
PetscObject o)
5700 PetscErrorCode ierr;
5703 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5704 PetscFunctionReturn(PETSC_SUCCESS);
5707 #endif // MFEM_USE_PETSC 5708 #endif // MFEM_USE_MPI void MFEMInitializePetsc(int *argc, char ***argv, const char rc_file[], const char help[])
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
const double * GetDevicePointer() const
const double * HostReadData() const
void SetPreconditioner(Solver &precond)
void SetPrintLevel(int plev)
static int GetId()
Get the device id of the configured device.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
PetscParVector * B
Right-hand side and solution vector.
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
void CreatePrivateContext()
void trans(const Vector &u, Vector &x)
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s. B.
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
bool DeviceIsValid() const
Return true if device pointer is valid.
Abstract class for PETSc's preconditioners.
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
PetscParMatrix & operator+=(const PetscParMatrix &B)
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
petsc::Vec x
The actual PETSc object.
virtual void SetOperator(const Operator &op)
bool clcustom
Boolean to handle SetFromOptions calls.
void SetObjective(void(*obj)(Operator *op, const Vector &x, double *f))
Specification of an objective function to be used for line search.
Device memory; using CUDA or HIP *Malloc and *Free.
PetscODESolver::Type GetType() const
petsc::Mat A
The actual PETSc object.
Wrapper for PETSc's matrix class.
PetscParVector & operator+=(const PetscParVector &y)
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Abstract class for PETSc's linear solvers.
void SetSize(int s)
Resize the vector to size s.
Abstract class for PETSc's solvers.
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
PetscBCHandler(Type type_=ZERO)
void Delete()
Delete the owned pointers and reset the Memory object.
int NumRowBlocks() const
Return the number of row blocks.
Base abstract class for first order time dependent operators.
void MakeAliasForSync(const Memory< double > &base_, int offset_, int size_, bool usedev_)
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
void SetType(PetscODESolver::Type)
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Pointer to an Operator of a specified type.
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Biwise-OR of all HIP backends.
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
T * GetData()
Returns the data.
int Size() const
Returns the size of the vector.
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
void Init()
Initialize with defaults. Does not initialize inherited members.
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
PetscClassId cid
The class id of the actual PETSc object.
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
PetscInt GetRowStart() const
Returns the global index of the first local row.
ID for class PetscParMatrix, MATHYPRE format.
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
virtual void SetTime(const double t_)
Set the current time.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Abstract parallel finite element space.
void SetRelTol(double tol)
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
PetscInt M() const
Returns the global number of rows.
void Randomize(PetscInt seed=0)
Set random values.
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
void SetMaxIter(int max_iter)
MPI_Comm GetComm() const
Get the associated MPI communicator.
PetscParVector & operator*=(PetscScalar d)
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
void SortColumnIndices()
Sort the column indices corresponding to each row.
Memory< double > & GetMemory()
Return a reference to the Memory object used by the Vector.
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, double diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
int NumColBlocks() const
Return the number of column blocks.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
virtual ~PetscLinearSolver()
std::function< double(const Vector &)> f(double mass_coeff)
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
MPI_Comm GetComm() const
Get the associated MPI communicator.
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
ID for the base class Operator, i.e. any type.
void write(std::ostream &os, T value)
Write 'value' to stream.
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
Abstract class for monitoring PETSc's solvers.
bool DeviceRequested() const
bool Empty() const
Return true if the Memory object is empty, see Reset().
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
struct s_NavierContext ctx
void SetJacobianType(Operator::Type type)
const FiniteElementCollection * FEColl() const
int to_int(const std::string &str)
Convert a string to an int.
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Auxiliary class for BDDC customization.
PetscInt GetNumRows() const
Returns the local number of rows.
virtual ~PetscPreconditioner()
ID for class PetscParMatrix, unspecified format.
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
const int * HostReadJ() const
ParMesh * GetParMesh() const
PetscParMatrix & operator=(const PetscParMatrix &B)
virtual ~PetscParVector()
Calls PETSc's destroy function.
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
PetscBCHandler * bchandler
Handler for boundary conditions.
PetscParMatrix & operator-=(const PetscParMatrix &B)
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
void Zero(Vector &x)
Replace boundary dofs with 0.
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
T read(std::istream &is)
Read a value from the stream and return it.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
void Shift(double s)
Shift diagonal by a constant.
PetscInt GlobalSize() const
Returns the global number of rows.
ID for class PetscParMatrix, MATNEST format.
int Capacity() const
Return the size of the allocated memory.
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
void SetTime(double t)
Sets the current time.
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true...
ID for class PetscParMatrix, MATSHELL format.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Biwise-OR of all CUDA backends.
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B...
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
int GetVDim() const
Returns vector dimension.
MPI_Comm GetComm() const
MPI communicator.
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Wrapper for hypre's parallel vector class.
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
virtual void Eval(double t, Vector &g)
Boundary conditions evaluation.
Type
Enumeration defining IDs for some classes derived from Operator.
double p(const Vector &x, double t)
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
PetscInt N() const
Returns the global number of columns.
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
void SetFlagsFromMask_() const
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
MemoryType
Memory types supported by MFEM.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
PetscInt GetNumCols() const
Returns the local number of columns.
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
void SetJacobianType(Operator::Type type)
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
bool operatorset
Boolean to handle SetOperator calls.
PetscParVector * X
Auxiliary vectors for typecasting.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
int SpaceDimension() const
Dimension of the physical space containing the mesh.
const int * HostReadI() const
int height
Dimension of the output / number of rows in the matrix.
PetscInt NNZ() const
Returns the number of nonzeros.
PetscParVector & operator-=(const PetscParVector &y)
virtual int GetTrueVSize() const
Return the number of local vector true dofs.
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true, or the mfem::Device's HostMemoryClass, otherwise.
const double * GetHostPointer() const
virtual ~PetscNonlinearSolver()
virtual ~PetscODESolver()
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Ordering::Type GetOrdering() const
Return the ordering method.
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
ID for class PetscParMatrix, MATAIJ format.
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
struct _p_PetscObject * PetscObject
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
void ResetMemory()
Completes the operation started with PlaceMemory.
void SetAbsTol(double tol)
Helper class for handling essential boundary conditions.
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
void Customize(bool customize=true) const
Customize object with options set.
PetscParVector & operator=(PetscScalar d)
Set constant values.
int Size() const
Return the logical size of the array.
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
bool WriteRequested() const
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Identity Operator I: x -> x.
bool UseDevice() const
Read the internal device flag.
bool IsAliasForSync() const
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
void FreePrivateContext()
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
bool ReadRequested() const
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
Wrapper for hypre's ParCSR matrix class.
A class to handle Block systems in a matrix-free implementation.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
HYPRE_BigInt * GetTrueDofOffsets() const
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Vector * GlobalVector() const
Returns the global vector in each processor.
ID for class PetscParMatrix, MATIS format.
PetscInt GetColStart() const
Returns the global index of the first local column.
void PlaceMemory(Memory< double > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
MPI_Comm GetComm() const
Get the associated MPI communicator.
int width
Dimension of the input / number of columns in the matrix.
void * private_ctx
Private context for solver.
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
double f(const Vector &p)