14 #include "../config/config.hpp"
20 #if defined(PETSC_HAVE_HYPRE)
21 #include "petscmathypre.h"
23 #include "../fem/fem.hpp"
34 #define PCHKERRQ(obj,err) do { \
37 PetscError(PetscObjectComm((PetscObject)obj),__LINE__,_MFEM_FUNC_NAME, \
38 __FILE__,err,PETSC_ERROR_REPEAT,NULL); \
39 MFEM_ABORT("Error in PETSc. See stacktrace above."); \
42 #define CCHKERRQ(comm,err) do { \
45 PetscError(comm,__LINE__,_MFEM_FUNC_NAME, \
46 __FILE__,err,PETSC_ERROR_REPEAT,NULL); \
47 MFEM_ABORT("Error in PETSc. See stacktrace above."); \
52 static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,
void*);
53 static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,
void*);
54 static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
56 static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,
void*);
57 static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
60 static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,
void*);
61 static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,
void*);
62 static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,
void*);
63 static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
64 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
65 static PetscErrorCode __mfem_pc_shell_setup(PC);
66 static PetscErrorCode __mfem_pc_shell_destroy(PC);
67 static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
68 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
69 static PetscErrorCode __mfem_mat_shell_destroy(Mat);
70 static PetscErrorCode __mfem_array_container_destroy(
void*);
71 static PetscErrorCode __mfem_matarray_container_destroy(
void*);
74 static PetscErrorCode Convert_Array_IS(MPI_Comm,
bool,
const mfem::Array<int>*,
82 #if !defined(PETSC_HAVE_HYPRE)
83 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
84 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
108 void PetscParVector::_SetDataAndSize_()
110 const PetscScalar *array;
113 ierr = VecGetArrayRead(x,&array); PCHKERRQ(x,
ierr);
114 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,
ierr);
115 SetDataAndSize((PetscScalar*)array,n);
116 ierr = VecRestoreArrayRead(x,&array); PCHKERRQ(x,
ierr);
119 PetscInt PetscParVector::GlobalSize()
const
122 ierr = VecGetSize(x,&N); PCHKERRQ(x,
ierr);
126 PetscParVector::PetscParVector(MPI_Comm comm, PetscInt glob_size,
129 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,
ierr);
133 MPI_Comm_rank(comm, &myid);
134 ierr = VecSetSizes(
x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(
x,
ierr);
138 ierr = VecSetSizes(
x,PETSC_DECIDE,glob_size); PCHKERRQ(
x,
ierr);
140 ierr = VecSetType(
x,VECSTANDARD); PCHKERRQ(
x,
ierr);
146 MPI_Comm comm = PetscObjectComm((PetscObject)
x);
147 ierr = VecDestroy(&x); CCHKERRQ(comm,
ierr);
151 PetscScalar *_data, PetscInt *col) :
Vector()
153 MFEM_VERIFY(col,
"Missing distribution");
155 MPI_Comm_rank(comm, &myid);
156 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,_data,
157 &
x); CCHKERRQ(comm,
ierr)
168 bool transpose,
bool allocate) :
Vector()
170 PetscInt loc = transpose ? op.
Height() : op.
Width();
173 ierr = VecCreate(comm,&
x);
175 ierr = VecSetSizes(
x,loc,PETSC_DECIDE);
177 ierr = VecSetType(
x,VECSTANDARD);
184 ierr = VecCreateMPIWithArray(comm,1,loc,PETSC_DECIDE,NULL,
185 &
x); CCHKERRQ(comm,
ierr);
191 bool transpose,
bool allocate) :
Vector()
196 ierr = MatCreateVecs(pA,&
x,NULL);
200 ierr = MatCreateVecs(pA,NULL,&
x);
204 ierr = VecReplaceArray(
x,NULL); PCHKERRQ(
x,
ierr);
214 ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,
ierr);
224 MPI_Comm comm = pfes->
GetComm();
225 ierr = VecCreate(comm,&
x); CCHKERRQ(comm,
ierr);
227 PetscMPIInt myid = 0;
228 if (!HYPRE_AssumedPartitionCheck())
230 MPI_Comm_rank(comm,&myid);
232 ierr = VecSetSizes(
x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
234 ierr = VecSetType(
x,VECSTANDARD); PCHKERRQ(
x,
ierr);
245 ierr = VecScatterCreateToAll(
x,&scctx,&vout); PCHKERRQ(
x,
ierr);
246 ierr = VecScatterBegin(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
248 ierr = VecScatterEnd(scctx,
x,vout,INSERT_VALUES,SCATTER_FORWARD);
250 ierr = VecScatterDestroy(&scctx); PCHKERRQ(
x,
ierr);
251 ierr = VecGetArray(vout,&array); PCHKERRQ(
x,
ierr);
252 ierr = VecGetLocalSize(vout,&size); PCHKERRQ(
x,
ierr);
255 ierr = VecRestoreArray(vout,&array); PCHKERRQ(
x,
ierr);
256 ierr = VecDestroy(&vout); PCHKERRQ(
x,
ierr);
277 ierr = VecPlaceArray(
x,temp_data); PCHKERRQ(
x,
ierr);
289 ierr = PetscRandomCreate(PetscObjectComm((PetscObject)
x),&rctx);
291 ierr = PetscRandomSetSeed(rctx,(
unsigned long)seed); PCHKERRQ(x,
ierr);
292 ierr = PetscRandomSeed(rctx); PCHKERRQ(x,
ierr);
293 ierr = VecSetRandom(x,rctx); PCHKERRQ(x,
ierr);
294 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,
ierr);
305 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)
x),fname,
306 FILE_MODE_WRITE,&view);
310 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)
x),fname,&view);
314 ierr = PetscViewerDestroy(&view); PCHKERRQ(
x,
ierr);
327 ierr = MatGetLocalSize(
A,&N,NULL); PCHKERRQ(
A,
ierr);
334 ierr = MatGetLocalSize(
A,NULL,&N); PCHKERRQ(
A,
ierr);
341 ierr = MatGetSize(
A,&N,NULL); PCHKERRQ(
A,
ierr);
348 ierr = MatGetSize(
A,NULL,&N); PCHKERRQ(
A,
ierr);
355 ierr = MatGetInfo(
A,MAT_GLOBAL_SUM,&info); PCHKERRQ(
A,
ierr);
356 return (PetscInt)info.nz_used;
386 MFEM_ABORT(
"unsupported PETSc format: type id = " << tid);
407 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
415 PetscInt global_num_cols, PetscInt *row_starts,
420 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
431 MPI_Comm comm = PetscObjectComm((PetscObject)
A);
432 ierr = MatDestroy(&A); CCHKERRQ(comm,
ierr);
439 #if defined(PETSC_HAVE_HYPRE)
440 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&
A);
451 MPI_Comm comm = PetscObjectComm((PetscObject)
A);
452 ierr = MatDestroy(&A); CCHKERRQ(comm,
ierr);
471 MFEM_VERIFY(
height == B.
Height(),
"Invalid number of local rows");
472 MFEM_VERIFY(
width == B.
Width(),
"Invalid number of local columns");
473 ierr = MatAXPY(
A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.
GetComm(),
ierr);
478 void PetscParMatrix::
479 BlockDiagonalConstructor(MPI_Comm comm,
480 PetscInt *row_starts, PetscInt *col_starts,
484 PetscInt lrsize,lcsize,rstart,cstart;
485 PetscMPIInt myid = 0,commsize;
487 ierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,
ierr);
488 if (!HYPRE_AssumedPartitionCheck())
490 ierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,
ierr);
492 lrsize = row_starts[myid+1]-row_starts[myid];
493 rstart = row_starts[myid];
494 lcsize = col_starts[myid+1]-col_starts[myid];
495 cstart = col_starts[myid];
500 ierr = ISCreateStride(comm,diag->
Height(),rstart,1,&is); CCHKERRQ(comm,
ierr);
501 ISLocalToGlobalMapping rl2g,cl2g;
502 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,
ierr);
503 ierr = ISDestroy(&is); CCHKERRQ(comm,
ierr);
504 if (row_starts != col_starts)
506 ierr = ISCreateStride(comm,diag->
Width(),cstart,1,&is);
508 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,
ierr);
509 ierr = ISDestroy(&is); CCHKERRQ(comm,
ierr);
513 ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,
ierr);
518 ierr = MatCreate(comm,&A); CCHKERRQ(comm,
ierr);
519 ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
521 ierr = MatSetType(A,MATIS); PCHKERRQ(A,
ierr);
522 ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,
ierr);
523 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,
ierr)
524 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,
ierr)
528 ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,
ierr);
529 if (sizeof(PetscInt) == sizeof(
int))
531 ierr = MatSeqAIJSetPreallocationCSR(lA,diag->
GetI(),diag->
GetJ(),
532 diag->
GetData()); PCHKERRQ(lA,ierr);
536 MFEM_ABORT(
"64bit indices not yet supported");
542 PetscInt *dii,*djj,*oii,
548 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
549 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
550 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
551 if (
sizeof(PetscInt) ==
sizeof(
int))
553 ierr = PetscMemcpy(dii,diag->
GetI(),m*
sizeof(PetscInt));
554 CCHKERRQ(PETSC_COMM_SELF,ierr);
555 ierr = PetscMemcpy(djj,diag->
GetJ(),nnz*
sizeof(PetscInt));
556 CCHKERRQ(PETSC_COMM_SELF,ierr);
557 ierr = PetscMemcpy(da,diag->
GetData(),nnz*
sizeof(PetscScalar));
558 CCHKERRQ(PETSC_COMM_SELF,ierr);
562 MFEM_ABORT(
"64bit indices not yet supported");
564 ierr = PetscCalloc1(m,&oii);
565 CCHKERRQ(PETSC_COMM_SELF,ierr);
568 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
570 dii,djj,da,oii,NULL,NULL,&A);
575 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
579 void *ptrs[4] = {dii,djj,da,oii};
580 const char *names[4] = {
"_mfem_csr_dii",
585 for (PetscInt i=0; i<4; i++)
589 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
590 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
591 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
593 ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
595 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
600 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
601 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
616 mat_shell_ctx *ctx =
new mat_shell_ctx;
617 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
619 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
620 ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
621 ierr = MatShellSetContext(*A,(
void *)ctx); PCHKERRQ(A,ierr);
622 ierr = MatShellSetOperation(*A,MATOP_MULT,
623 (
void (*)())__mfem_mat_shell_apply);
625 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
626 (
void (*)())__mfem_mat_shell_apply_transpose);
628 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
629 (
void (*)())__mfem_mat_shell_destroy);
631 ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
632 ctx->op =
const_cast<Operator *
>(op);
649 PetscBool ismatis,istrans;
651 ierr = PetscObjectTypeCompare((PetscObject)(pA->
A),MATTRANSPOSEMAT,&istrans);
655 ierr = PetscObjectTypeCompare((PetscObject)(pA->
A),MATIS,&ismatis);
660 ierr = MatTransposeGetMat(pA->
A,&At); CCHKERRQ(pA->
GetComm(),
ierr);
661 ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
664 if (assembled && ismatis)
670 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->
A,ierr);
671 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->
A,ierr);
672 ierr = MatDestroy(&B); PCHKERRQ(pA->
A,ierr);
676 ierr = MatISGetMPIXAIJ(pA->
A,MAT_INITIAL_MATRIX,A);
677 PCHKERRQ(pA->
A,ierr);
682 if ((!assembled && !ismatis))
685 ierr = MatConvert(pA->
A,MATIS,MAT_INITIAL_MATRIX,A);
690 ierr = PetscObjectReference((PetscObject)(pA->
A));
700 #if defined(PETSC_HAVE_HYPRE)
701 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
702 PETSC_USE_POINTER,A);
704 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
709 #if defined(PETSC_HAVE_HYPRE)
710 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
711 PETSC_USE_POINTER,A);
713 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
720 Mat *mats,*matsl2l = NULL;
725 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
728 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
732 PetscBool needl2l = PETSC_TRUE;
739 if (!assembled && needl2l)
742 ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],
"__mfem_l2l",
744 PCHKERRQ(mats[i*nc+j],ierr);
752 ierr = PetscContainerGetPointer(c,(
void**)&l2l);
754 MFEM_VERIFY(l2l->
Size() == 1,
"Unexpected size "
755 << l2l->
Size() <<
" for block row " << i );
756 ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
758 matsl2l[i] = (*l2l)[0];
759 needl2l = PETSC_FALSE;
765 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
768 ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
771 for (PetscInt i=0; i<nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
772 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
775 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
776 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
777 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
779 ierr = PetscObjectCompose((PetscObject)(*A),
"__mfem_l2l",(PetscObject)c);
781 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
783 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
784 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
788 MFEM_VERIFY(assembled,
"Unsupported operation");
791 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
792 ierr = MatSetSizes(*A,pI->
Height(),pI->
Width(),PETSC_DECIDE,PETSC_DECIDE);
794 ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
795 ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
796 ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
797 ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
798 ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
799 for (PetscInt i = rst; i < rst+pI->
Height(); i++)
801 ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
803 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
804 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
817 MPI_Comm comm = MPI_COMM_NULL;
818 ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
819 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
830 ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
840 static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
843 PetscErrorCode (*f)(Mat,Vec,Vec);
844 PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
847 f = MatMultTranspose;
848 fadd = MatMultTransposeAdd;
859 ierr = VecScale(X,a); PCHKERRQ(A,ierr);
860 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
861 ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
865 ierr = VecScale(X,a); PCHKERRQ(A,ierr);
866 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
867 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
868 ierr = VecScale(X,1./a); PCHKERRQ(A,ierr);
872 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
875 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
887 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
891 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
898 ierr = PetscObjectReference((PetscObject)master.
A); PCHKERRQ(master.
A,ierr);
910 MFEM_VERIFY(A,
"Mat not present");
920 MFEM_VERIFY(A,
"Mat not present");
931 ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
935 ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
942 ierr = MatScale(A,s); PCHKERRQ(A,ierr);
947 MFEM_ASSERT(x.
Size() ==
Width(),
"invalid x.Size() = " << x.
Size()
948 <<
", expected size = " <<
Width());
949 MFEM_ASSERT(y.
Size() ==
Height(),
"invalid y.Size() = " << y.
Size()
950 <<
", expected size = " <<
Height());
956 MatMultKernel(A,a,XX->x,b,YY->
x,
false);
964 MFEM_ASSERT(x.
Size() ==
Height(),
"invalid x.Size() = " << x.
Size()
965 <<
", expected size = " <<
Height());
966 MFEM_ASSERT(y.
Size() ==
Width(),
"invalid y.Size() = " << y.
Size()
967 <<
", expected size = " <<
Width());
973 MatMultKernel(A,a,YY->
x,b,XX->x,
true);
986 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
987 FILE_MODE_WRITE,&view);
991 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
994 ierr = MatView(A,view); PCHKERRQ(A,ierr);
995 ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
999 ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
1006 Mat pA = *A,pP = *P,pRt = *Rt;
1008 PetscBool Aismatis,Pismatis,Rtismatis;
1011 "Petsc RAP: Number of local cols of A " << A->
Width() <<
1012 " differs from number of local rows of P " << P->
Height());
1014 "Petsc RAP: Number of local rows of A " << A->
Height() <<
1015 " differs from number of local rows of Rt " << Rt->
Height());
1016 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
1018 ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
1020 ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
1027 ISLocalToGlobalMapping cl2gP,cl2gRt;
1028 PetscInt rlsize,clsize,rsize,csize;
1030 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
1031 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
1032 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
1033 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
1034 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
1035 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
1036 ierr = MatCreate(A->
GetComm(),&B); PCHKERRQ(pA,ierr);
1037 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
1038 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
1039 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
1040 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
1041 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
1042 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
1045 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1051 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
1052 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
1054 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
1062 ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
1063 (*vmatsl2l)[0] = lRt;
1066 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
1068 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1069 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1071 ierr = PetscObjectCompose((PetscObject)B,
"__mfem_l2l",(PetscObject)c);
1073 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1077 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
1078 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
1079 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1080 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
1086 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1092 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
1093 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
1095 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
1111 PetscInt
M,
N,i,n,*idxs,rst;
1113 ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
1114 MFEM_VERIFY(M == N,
"Rectangular case unsupported");
1115 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
1116 ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
1117 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
1119 n = rows_cols.
Size();
1121 ierr = PetscMalloc1(n,&idxs); PCHKERRQ(A,ierr);
1122 for (i=0; i<n; i++) { idxs[i] = data[i] + rst; }
1123 ierr = MatZeroRowsColumns(A,n,idxs,1.,NULL,NULL); PCHKERRQ(A,ierr);
1124 ierr = PetscFree(idxs); PCHKERRQ(A,ierr);
1125 ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
1133 MFEM_ABORT(
"To be implemented");
1143 ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
1152 MFEM_VERIFY(A,
"no associated PETSc Mat object");
1153 PetscObject oA = (PetscObject)(this->A);
1155 ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
1157 ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
1159 ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
1161 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
1163 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
1165 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
1168 ierr = MatGetType(A, &mat_type); PCHKERRQ(A,ierr);
1169 MFEM_ABORT(
"PETSc matrix type = '" << mat_type <<
"' is not implemented");
1177 const PetscScalar *array;
1181 Ae.
Mult(-1.0, X, 1.0, B);
1184 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
1185 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1186 for (
int i = 0; i < ess_dof_list.
Size(); i++)
1188 int r = ess_dof_list[i];
1189 B(r) = array[r] * X(r);
1191 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
1218 if (
cid == KSP_CLASSID)
1221 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
1223 else if (
cid == SNES_CLASSID)
1225 SNES snes = (SNES)
obj;
1226 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
1229 else if (
cid == TS_CLASSID)
1232 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
1236 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1243 if (
cid == KSP_CLASSID)
1246 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
1248 else if (
cid == SNES_CLASSID)
1250 SNES snes = (SNES)
obj;
1251 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1254 else if (
cid == TS_CLASSID)
1257 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
1261 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1268 if (
cid == KSP_CLASSID)
1271 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1274 else if (
cid == SNES_CLASSID)
1276 SNES snes = (SNES)
obj;
1277 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
1278 max_iter,PETSC_DEFAULT);
1280 else if (
cid == TS_CLASSID)
1283 ierr = TSSetDuration(ts,max_iter,PETSC_DEFAULT);
1287 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1295 typedef PetscErrorCode (*myPetscFunc)(
void**);
1296 PetscViewerAndFormat *vf;
1297 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(
obj));
1299 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1301 if (
cid == KSP_CLASSID)
1305 typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,
void*);
1309 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
1313 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
1314 (myPetscFunc)PetscViewerAndFormatDestroy);
1319 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
1320 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
1321 (myPetscFunc)PetscViewerAndFormatDestroy);
1325 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
1326 PCHKERRQ(viewer,ierr);
1327 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
1328 (myPetscFunc)PetscViewerAndFormatDestroy);
1335 ierr = KSPMonitorSet(ksp,__mfem_ksp_monitor,
monitor_ctx,NULL);
1339 else if (
cid == SNES_CLASSID)
1341 typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,
void*);
1342 SNES snes = (SNES)
obj;
1345 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
1349 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
1350 (myPetscFunc)PetscViewerAndFormatDestroy);
1351 PCHKERRQ(snes,ierr);
1354 else if (
cid == TS_CLASSID)
1359 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
1364 ierr = TSMonitorSet(ts,__mfem_ts_monitor,
monitor_ctx,NULL);
1370 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1382 if (!customize) {
clcustom =
true; }
1385 if (
cid == PC_CLASSID)
1388 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
1390 else if (
cid == KSP_CLASSID)
1393 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
1395 else if (
cid == SNES_CLASSID)
1397 SNES snes = (SNES)
obj;
1398 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
1400 else if (
cid == TS_CLASSID)
1403 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
1407 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1415 if (
cid == KSP_CLASSID)
1418 KSPConvergedReason reason;
1419 ierr = KSPGetConvergedReason(ksp,&reason);
1421 return reason > 0 ? 1 : 0;
1423 else if (
cid == SNES_CLASSID)
1425 SNES snes = (SNES)
obj;
1426 SNESConvergedReason reason;
1427 ierr = SNESGetConvergedReason(snes,&reason);
1428 PCHKERRQ(snes,ierr);
1429 return reason > 0 ? 1 : 0;
1431 else if (
cid == TS_CLASSID)
1434 TSConvergedReason reason;
1435 ierr = TSGetConvergedReason(ts,&reason);
1437 return reason > 0 ? 1 : 0;
1441 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1448 if (
cid == KSP_CLASSID)
1452 ierr = KSPGetIterationNumber(ksp,&its);
1456 else if (
cid == SNES_CLASSID)
1458 SNES snes = (SNES)
obj;
1460 ierr = SNESGetIterationNumber(snes,&its);
1461 PCHKERRQ(snes,ierr);
1464 else if (
cid == TS_CLASSID)
1468 ierr = TSGetTotalSteps(ts,&its);
1474 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1481 if (
cid == KSP_CLASSID)
1485 ierr = KSPGetResidualNorm(ksp,&norm);
1489 if (
cid == SNES_CLASSID)
1491 SNES snes = (SNES)
obj;
1493 ierr = SNESGetFunctionNorm(snes,&norm);
1494 PCHKERRQ(snes,ierr);
1499 MFEM_ABORT(
"CLASSID = " <<
cid <<
" is not implemented!");
1500 return PETSC_MAX_REAL;
1510 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
1511 obj = (PetscObject)ksp;
1512 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
1513 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
1517 const std::string &prefix)
1522 obj = (PetscObject)ksp;
1523 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
1524 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
1529 const std::string &prefix)
1534 obj = (PetscObject)ksp;
1535 ierr = PetscObjectGetClassId(obj, &
cid); PCHKERRQ(obj, ierr);
1536 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
1547 bool delete_pA =
false;
1564 MFEM_VERIFY(pA,
"Unsupported operation!");
1572 PetscInt nheight,nwidth,oheight,owidth;
1574 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
1575 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
1576 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
1577 if (nheight != oheight || nwidth != owidth)
1581 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
1588 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
1597 if (delete_pA) {
delete pA; }
1606 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
1611 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
1612 ierr = PCSetType(pc,PCSHELL); PCHKERRQ(pc,ierr);
1613 solver_shell_ctx *ctx =
new solver_shell_ctx;
1615 ierr = PCShellSetContext(pc,(
void *)ctx); PCHKERRQ(pc,ierr);
1616 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); PCHKERRQ(pc,ierr);
1617 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
1619 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); PCHKERRQ(pc,ierr);
1620 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); PCHKERRQ(pc,ierr);
1631 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(
obj, ierr);
1648 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)
iterative_mode);
1649 PCHKERRQ(ksp, ierr);
1652 ierr = KSPSolve(ksp,
B->
x, X->x); PCHKERRQ(ksp,ierr);
1661 ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
1662 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
1671 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
1673 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
1680 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
1682 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
1686 const std::string &prefix)
1690 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
1692 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
1698 const std::string &prefix)
1702 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
1703 obj = (PetscObject)pc;
1704 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
1705 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
1709 const string &prefix)
1714 obj = (PetscObject)pc;
1715 ierr = PetscObjectGetClassId(obj,&
cid); PCHKERRQ(obj,ierr);
1716 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
1721 const string &prefix)
1725 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
1726 obj = (PetscObject)pc;
1727 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
1728 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
1734 bool delete_pA =
false;
1749 PetscInt nheight,nwidth,oheight,owidth;
1751 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
1752 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
1753 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
1754 if (nheight != oheight || nwidth != owidth)
1758 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
1764 ierr = PCSetOperators(pc,pA->
A,pA->
A); PCHKERRQ(
obj,ierr);
1773 if (delete_pA) {
delete pA; };
1783 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(
obj, ierr);
1801 ierr = PCApply(pc,
B->
x, X->x); PCHKERRQ(pc, ierr);
1810 ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
1811 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
1818 MPI_Comm comm = PetscObjectComm(
obj);
1823 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
1827 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
1829 MFEM_VERIFY(ismatis,
"PetscBDDCSolver needs the matrix in unassembled format");
1832 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(
obj,ierr);
1839 IS dir = NULL, neu = NULL;
1848 ierr = PetscObjectQuery((PetscObject)pA,
"__mfem_l2l",(PetscObject*)&c);
1849 MFEM_VERIFY(c,
"Local-to-local PETSc container not present");
1850 ierr = PetscContainerGetPointer(c,(
void**)&l2l); PCHKERRQ(c,ierr);
1857 PetscBool lpr = PETSC_FALSE,pr;
1858 if (opts.
ess_dof) { lpr = PETSC_TRUE; }
1859 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
1861 MFEM_VERIFY(lpr == pr,
"ess_dof should be collectively set");
1863 if (opts.
nat_dof) { lpr = PETSC_TRUE; }
1864 ierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
1866 MFEM_VERIFY(lpr == pr,
"nat_dof should be collectively set");
1868 PetscInt ms[2],Ms[2];
1869 ms[0] = -nf; ms[1] = nf;
1870 ierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
1872 MFEM_VERIFY(-Ms[0] == Ms[1],
1873 "number of fields should be the same across processes");
1878 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
1885 ierr = Convert_Array_IS(comm,
true,opts.
ess_dof,st,&dir);
1886 CCHKERRQ(comm,ierr);
1887 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
1892 ierr = Convert_Vmarks_IS(comm,*l2l,opts.
ess_dof,st,&dir);
1893 CCHKERRQ(comm,ierr);
1894 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
1903 ierr = Convert_Array_IS(comm,
true,opts.
nat_dof,st,&neu);
1904 CCHKERRQ(comm,ierr);
1905 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
1910 ierr = Convert_Vmarks_IS(comm,*l2l,opts.
nat_dof,st,&neu);
1911 CCHKERRQ(comm,ierr);
1912 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
1919 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
1920 for (
int i = 0; i < nf; i++)
1922 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
1924 ierr = PetscFree(fields); PCHKERRQ(pc,ierr);
1932 ParFiniteElementSpace *fespace = opts.
fespace;
1935 const FiniteElementCollection *fec = fespace->
FEColl();
1936 bool edgespace, rtspace;
1937 bool needint =
false;
1938 bool tracespace, rt_tracespace, edge_tracespace;
1940 PetscBool B_is_Trans = PETSC_FALSE;
1942 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
1943 dim = pmesh->Dimension();
1946 rtspace =
dynamic_cast<const RT_FECollection*
>(fec);
1947 edgespace =
dynamic_cast<const ND_FECollection*
>(fec);
1948 edge_tracespace =
dynamic_cast<const ND_Trace_FECollection*
>(fec);
1949 rt_tracespace =
dynamic_cast<const RT_Trace_FECollection*
>(fec);
1950 tracespace = edge_tracespace || rt_tracespace;
1953 if (fespace->GetNE() > 0)
1957 p = fespace->GetOrder(0);
1961 p = fespace->GetFaceOrder(0);
1962 if (dim == 2) { p++; }
1973 MFEM_WARNING(
"Tracespace case doesn't work for H(curl) and p=2,"
1974 " not using auxiliary quadrature");
1980 FiniteElementCollection *vfec;
1983 vfec =
new H1_Trace_FECollection(p,dim);
1987 vfec =
new H1_FECollection(p,dim);
1989 ParFiniteElementSpace *vfespace =
new ParFiniteElementSpace(pmesh,vfec);
1990 ParDiscreteLinearOperator *grad;
1991 grad =
new ParDiscreteLinearOperator(vfespace,fespace);
1994 grad->AddTraceFaceInterpolator(
new GradientInterpolator);
1998 grad->AddDomainInterpolator(
new GradientInterpolator);
2002 HypreParMatrix *hG = grad->ParallelAssemble();
2003 PetscParMatrix *G =
new PetscParMatrix(hG,
PETSC_MATAIJ);
2007 PetscBool conforming = PETSC_TRUE;
2008 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
2009 ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
2021 MFEM_WARNING(
"Tracespace case doesn't work for H(div), not using"
2022 " auxiliary quadrature");
2031 PetscParMatrix *
B = NULL;
2037 FiniteElementCollection *auxcoll;
2038 if (tracespace) { auxcoll =
new RT_Trace_FECollection(p,dim); }
2039 else { auxcoll =
new L2_FECollection(p,dim); };
2040 ParFiniteElementSpace *pspace =
new ParFiniteElementSpace(pmesh,auxcoll);
2041 ParMixedBilinearForm *b =
new ParMixedBilinearForm(fespace,pspace);
2047 b->AddTraceFaceIntegrator(
new VectorFECurlIntegrator);
2051 b->AddDomainIntegrator(
new VectorFECurlIntegrator);
2058 b->AddTraceFaceIntegrator(
new VectorFEDivergenceIntegrator);
2062 b->AddDomainIntegrator(
new VectorFEDivergenceIntegrator);
2068 b->ParallelAssemble(Bh);
2070 Bh.SetOperatorOwner(
false);
2075 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
2078 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
2082 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
2084 B_is_Trans = PETSC_TRUE;
2093 ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
2097 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
2098 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
2103 const std::string &prefix)
2106 BDDCSolverConstructor(opts);
2112 const std::string &prefix)
2115 BDDCSolverConstructor(opts);
2120 const string &prefix)
2126 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
2131 ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
2134 "PetscFieldSplitSolver needs the matrix in nested format.");
2138 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
2139 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
2140 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
2141 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
2146 for (PetscInt i=0; i<nr; i++)
2148 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
2150 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
2156 const std::string &prefix)
2160 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
2161 obj = (PetscObject)snes;
2162 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
2163 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
2167 const std::string &prefix)
2171 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
2172 obj = (PetscObject)snes;
2173 ierr = PetscObjectGetClassId(
obj, &
cid); PCHKERRQ(
obj, ierr);
2174 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
2181 SNES snes = (SNES)
obj;
2182 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj, ierr);
2183 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
2188 SNES snes = (SNES)
obj;
2195 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
2196 PCHKERRQ(snes, ierr);
2197 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
2198 PCHKERRQ(snes, ierr);
2201 (
void*)&op == fctx && (
void*)&op == jctx);
2202 ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
2203 PetscObjectComm((PetscObject)snes));
2204 PCHKERRQ(snes,ierr);
2207 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
2214 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (
void *)&op);
2215 PCHKERRQ(snes, ierr);
2216 ierr = SNESSetJacobian(snes, NULL, NULL, __mfem_snes_jacobian, (
void *)&op);
2217 PCHKERRQ(snes, ierr);
2229 SNES snes = (SNES)
obj;
2231 bool b_nonempty = b.
Size();
2235 if (b_nonempty) { B->PlaceArray(b.
GetData()); }
2243 ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
2245 if (b_nonempty) { B->ResetArray(); }
2254 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
2255 obj = (PetscObject)ts;
2256 ierr = PetscObjectGetClassId(
obj,&
cid); PCHKERRQ(
obj,ierr);
2257 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
2260 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
2263 ierr = TSGetAdapt(ts,&tsad);
2265 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
2273 ierr = PetscObjectGetComm(
obj,&comm); PCHKERRQ(
obj,ierr);
2274 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
2284 void *fctx = NULL,*jctx = NULL,*rfctx = NULL,*rjctx = NULL;
2288 ierr = TSGetIFunction(ts, NULL, NULL, &fctx);
2290 ierr = TSGetIJacobian(ts, NULL, NULL, NULL, &jctx);
2295 ierr = TSGetRHSFunction(ts, NULL, NULL, &rfctx);
2297 ierr = TSGetRHSJacobian(ts, NULL, NULL, NULL, &rjctx);
2306 ls = (PetscBool)(ls && (
void*)&f_ == fctx && (
void*)&f_ == jctx);
2310 ls = (PetscBool)(ls && (
void*)&f_ == rfctx && (
void*)&f_ == rjctx);
2312 ierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
2313 PetscObjectComm((PetscObject)ts));
2317 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
2326 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (
void *)
f);
2328 ierr = TSSetIJacobian(ts, NULL, NULL, __mfem_ts_ijacobian, (
void *)
f);
2330 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
2335 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (
void *)
f);
2337 ierr = TSSetRHSJacobian(ts, NULL, NULL, __mfem_ts_rhsjacobian, (
void *)
f);
2347 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
2348 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
2356 ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
2357 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
2362 ierr = TSGetTime(ts,&pt); PCHKERRQ(ts,ierr);
2364 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
2372 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
2373 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
2374 ierr = TSSetDuration(ts, PETSC_DECIDE, t_final); PCHKERRQ(ts, ierr);
2375 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
2384 ierr = VecCopy(X->x, X->x); PCHKERRQ(ts, ierr);
2385 ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
2390 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
2392 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
2398 #include "petsc/private/petscimpl.h"
2402 #define __FUNCT__ "__mfem_ts_monitor"
2403 static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
2408 PetscFunctionBeginUser;
2411 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"No monitor context provided");
2420 SETERRQ(PetscObjectComm((PetscObject)ts),PETSC_ERR_SUP,
2421 "Cannot monitor the residual with TS");
2423 PetscFunctionReturn(0);
2427 #define __FUNCT__ "__mfem_ksp_monitor"
2428 static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
2433 PetscErrorCode
ierr;
2435 PetscFunctionBeginUser;
2438 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,
"No monitor context provided");
2442 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
2448 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
2452 PetscFunctionReturn(0);
2456 #define __FUNCT__ "__mfem_ts_ifunction"
2457 static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
2460 PetscErrorCode
ierr;
2462 PetscFunctionBeginUser;
2474 ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
2475 PetscFunctionReturn(0);
2479 #define __FUNCT__ "__mfem_ts_rhsfunction"
2480 static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
2483 PetscErrorCode
ierr;
2485 PetscFunctionBeginUser;
2496 ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
2497 PetscFunctionReturn(0);
2501 #define __FUNCT__ "__mfem_ts_ijacobian"
2502 static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
2503 Vec xp, PetscReal shift, Mat A, Mat P,
2508 PetscErrorCode
ierr;
2510 PetscFunctionBeginUser;
2512 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
2513 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
2515 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
2516 ierr = VecGetArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
2518 ierr = VecRestoreArrayRead(xp,(
const PetscScalar**)&array); CHKERRQ(ierr);
2539 B = p2A.ReleaseMat(
false);
2541 ierr = MatHeaderReplace(A,&B); CHKERRQ(ierr);
2542 PetscFunctionReturn(0);
2546 #define __FUNCT__ "__mfem_ts_rhsjacobian"
2547 static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
2548 Mat A, Mat P,
void *ctx)
2552 PetscErrorCode
ierr;
2554 PetscFunctionBeginUser;
2556 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
2557 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
2559 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
2579 B = p2A.ReleaseMat(
false);
2581 ierr = MatHeaderReplace(A,&B); CHKERRQ(ierr);
2582 PetscFunctionReturn(0);
2586 #define __FUNCT__ "__mfem_snes_jacobian"
2587 static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
2592 PetscErrorCode
ierr;
2594 PetscFunctionBeginUser;
2596 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
2597 ierr = VecGetArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
2599 ierr = VecRestoreArrayRead(x,(
const PetscScalar**)&array); CHKERRQ(ierr);
2617 B = p2A.ReleaseMat(
false);
2619 ierr = MatHeaderReplace(A,&B); CHKERRQ(ierr);
2624 PetscFunctionReturn(0);
2628 #define __FUNCT__ "__mfem_snes_function"
2629 static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f,
void *ctx)
2631 PetscFunctionBeginUser;
2637 ierr = PetscObjectStateIncrease((PetscObject)f); CHKERRQ(ierr);
2638 PetscFunctionReturn(0);
2642 #define __FUNCT__ "__mfem_mat_shell_apply"
2643 static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
2646 PetscErrorCode
ierr;
2648 PetscFunctionBeginUser;
2649 ierr = MatShellGetContext(A,(
void **)&ctx); PCHKERRQ(A,ierr);
2652 ctx->op->Mult(xx,yy);
2654 ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
2655 PetscFunctionReturn(0);
2659 #define __FUNCT__ "__mfem_mat_shell_apply_transpose"
2660 static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
2663 PetscErrorCode
ierr;
2665 PetscFunctionBeginUser;
2666 ierr = MatShellGetContext(A,(
void **)&ctx); PCHKERRQ(A,ierr);
2669 ctx->op->MultTranspose(xx,yy);
2671 ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
2672 PetscFunctionReturn(0);
2676 #define __FUNCT__ "__mfem_mat_shell_destroy"
2677 static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
2680 PetscErrorCode
ierr;
2682 PetscFunctionBeginUser;
2683 ierr = MatShellGetContext(A,(
void **)&ctx); PCHKERRQ(A,ierr);
2685 PetscFunctionReturn(0);
2689 #define __FUNCT__ "__mfem_pc_shell_apply"
2690 static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
2692 solver_shell_ctx *ctx;
2693 PetscErrorCode
ierr;
2695 PetscFunctionBeginUser;
2696 ierr = PCShellGetContext(pc,(
void **)&ctx); PCHKERRQ(pc,ierr);
2699 ctx->op->Mult(xx,yy);
2701 ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
2702 PetscFunctionReturn(0);
2706 #define __FUNCT__ "__mfem_pc_shell_apply_transpose"
2707 static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
2709 solver_shell_ctx *ctx;
2710 PetscErrorCode
ierr;
2712 PetscFunctionBeginUser;
2713 ierr = PCShellGetContext(pc,(
void **)&ctx); PCHKERRQ(pc,ierr);
2716 ctx->op->MultTranspose(xx,yy);
2718 ierr = PetscObjectStateIncrease((PetscObject)y); CHKERRQ(ierr);
2719 PetscFunctionReturn(0);
2723 #define __FUNCT__ "__mfem_pc_shell_setup"
2724 static PetscErrorCode __mfem_pc_shell_setup(PC pc)
2726 PetscFunctionBeginUser;
2727 PetscFunctionReturn(0);
2731 #define __FUNCT__ "__mfem_pc_shell_destroy"
2732 static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
2734 solver_shell_ctx *ctx;
2735 PetscErrorCode
ierr;
2737 PetscFunctionBeginUser;
2738 ierr = PCShellGetContext(pc,(
void **)&ctx); PCHKERRQ(pc,ierr);
2740 PetscFunctionReturn(0);
2744 #define __FUNCT__ "__mfem_array_container_destroy"
2745 static PetscErrorCode __mfem_array_container_destroy(
void *ptr)
2747 PetscErrorCode
ierr;
2749 PetscFunctionBeginUser;
2750 ierr = PetscFree(ptr); CHKERRQ(ierr);
2751 PetscFunctionReturn(0);
2755 #define __FUNCT__ "__mfem_matarray_container_destroy"
2756 static PetscErrorCode __mfem_matarray_container_destroy(
void *ptr)
2759 PetscErrorCode
ierr;
2761 PetscFunctionBeginUser;
2762 for (
int i=0; i<a->
Size(); i++)
2765 MPI_Comm comm = PetscObjectComm((PetscObject)M);
2766 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
2769 PetscFunctionReturn(0);
2775 #define __FUNCT__ "Convert_Array_IS"
2776 static PetscErrorCode Convert_Array_IS(MPI_Comm comm,
bool islist,
2778 PetscInt st, IS* is)
2780 PetscInt n = list->
Size(),*idxs;
2781 const int *data = list->
GetData();
2782 PetscErrorCode
ierr;
2784 PetscFunctionBeginUser;
2785 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
2788 for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
2793 for (PetscInt i=0; i<n; i++)
2795 if (data[i]) { idxs[cum++] = i+st; }
2799 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
2801 PetscFunctionReturn(0);
2808 #define __FUNCT__ "Convert_Vmarks_IS"
2809 static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
2812 PetscInt st, IS* is)
2817 PetscErrorCode
ierr;
2819 PetscFunctionBeginUser;
2820 for (
int i = 0; i < pl2l.
Size(); i++)
2822 PetscInt m,n,*ii,*jj;
2824 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(
const PetscInt**)&ii,
2825 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
2826 MFEM_VERIFY(done,
"Unable to perform MatGetRowIJ on " << i <<
" l2l matrix");
2827 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
2831 for (
int i = 0; i < l2l.
Size(); i++) { nl += l2l[i]->Width(); }
2833 const int* vdata = mark->
GetData();
2834 int* sdata = sub_dof_marker.
GetData();
2835 int cumh = 0, cumw = 0;
2836 for (
int i = 0; i < l2l.
Size(); i++)
2841 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
2842 cumh += l2l[i]->Height();
2843 cumw += l2l[i]->Width();
2845 ierr = Convert_Array_IS(comm,
false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
2846 for (
int i = 0; i < pl2l.
Size(); i++)
2848 PetscInt m = l2l[i]->Height();
2849 PetscInt *ii = l2l[i]->GetI(),*jj = l2l[i]->GetJ();
2851 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
2852 (
const PetscInt**)&ii,
2853 (
const PetscInt**)&jj,&done); CHKERRQ(ierr);
2854 MFEM_VERIFY(done,
"Unable to perform MatRestoreRowIJ on "
2855 << i <<
" l2l matrix");
2858 PetscFunctionReturn(0);
2861 #if !defined(PETSC_HAVE_HYPRE)
2863 #include "_hypre_parcsr_mv.h"
2865 #define __FUNCT__ "MatConvert_hypreParCSR_AIJ"
2866 static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
2868 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
2869 hypre_CSRMatrix *hdiag,*hoffd;
2870 PetscScalar *da,*oa,*aptr;
2871 PetscInt *dii,*djj,*oii,*ojj,*iptr;
2872 PetscInt i,dnnz,onnz,m,n;
2874 PetscErrorCode
ierr;
2876 PetscFunctionBeginUser;
2877 hdiag = hypre_ParCSRMatrixDiag(hA);
2878 hoffd = hypre_ParCSRMatrixOffd(hA);
2879 m = hypre_CSRMatrixNumRows(hdiag);
2880 n = hypre_CSRMatrixNumCols(hdiag);
2881 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
2882 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
2883 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
2884 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
2885 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
2886 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*
sizeof(PetscInt));
2888 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*
sizeof(PetscInt));
2890 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*
sizeof(PetscScalar));
2896 PetscInt nc = dii[i+1]-dii[i];
2897 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
2901 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
2904 PetscInt *offdj,*coffd;
2906 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
2907 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
2908 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
2909 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*
sizeof(PetscInt));
2911 offdj = hypre_CSRMatrixJ(hoffd);
2912 coffd = hypre_ParCSRMatrixColMapOffd(hA);
2913 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
2914 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*
sizeof(PetscScalar));
2920 PetscInt nc = oii[i+1]-oii[i];
2921 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
2925 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
2926 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
2932 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
2938 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
2939 const char *names[6] = {
"_mfem_csr_dii",
2950 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
2951 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
2952 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
2954 ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
2956 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
2958 PetscFunctionReturn(0);
2962 #define __FUNCT__ "MatConvert_hypreParCSR_IS"
2963 static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
2966 ISLocalToGlobalMapping rl2g,cl2g;
2968 hypre_CSRMatrix *hdiag,*hoffd;
2969 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
2971 const char *names[2] = {
"_mfem_csr_aux",
2974 PetscScalar *hdd,*hod,*aa,*data;
2975 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
2976 PetscInt *aux,*ii,*jj;
2977 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
2978 PetscErrorCode
ierr;
2980 PetscFunctionBeginUser;
2982 str = hypre_ParCSRMatrixFirstRowIndex(hA);
2983 stc = hypre_ParCSRMatrixFirstColDiag(hA);
2984 hdiag = hypre_ParCSRMatrixDiag(hA);
2985 hoffd = hypre_ParCSRMatrixOffd(hA);
2986 dr = hypre_CSRMatrixNumRows(hdiag);
2987 dc = hypre_CSRMatrixNumCols(hdiag);
2988 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
2989 hdi = hypre_CSRMatrixI(hdiag);
2990 hdj = hypre_CSRMatrixJ(hdiag);
2991 hdd = hypre_CSRMatrixData(hdiag);
2992 oc = hypre_CSRMatrixNumCols(hoffd);
2993 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
2994 hoi = hypre_CSRMatrixI(hoffd);
2995 hoj = hypre_CSRMatrixJ(hoffd);
2996 hod = hypre_CSRMatrixData(hoffd);
2999 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
3000 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
3001 ierr = ISDestroy(&is); CHKERRQ(ierr);
3002 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
3003 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
3004 for (i=0; i<dc; i++) { aux[i] = i+stc; }
3005 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
3006 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
3007 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
3008 ierr = ISDestroy(&is); CHKERRQ(ierr);
3011 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
3012 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
3013 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
3014 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
3015 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
3016 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
3019 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
3020 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
3024 *ii = *(hdi++) + *(hoi++);
3025 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
3027 PetscScalar *aold = aa;
3028 PetscInt *jold = jj,nc = jd+jo;
3029 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
3030 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
3031 *(++ii) = *(hdi++) + *(hoi++);
3032 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
3034 for (; cum<dr; cum++) { *(++ii) = nnz; }
3038 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
3046 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
3047 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
3048 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
3050 ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
3052 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
3054 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
3055 ierr = MatDestroy(&lA); CHKERRQ(ierr);
3056 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3057 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
3058 PetscFunctionReturn(0);
3062 #endif // MFEM_USE_PETSC
3063 #endif // MFEM_USE_MPI
int Size() const
Logical size of the array.
virtual void SetPreconditioner(Solver &precond)
void SetPrintLevel(int plev)
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
PetscParVector * B
Right-hand side and solution vector.
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Vector()
Default constructor for Vector. Sets size = 0 and data = NULL.
MPI_Comm GetComm() const
MPI communicator.
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
bool isHomogeneous() const
True if type is HOMOGENEOUS.
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Abstract class for PETSc's preconditioners.
PetscParMatrix & operator+=(const PetscParMatrix &B)
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col)
Creates vector with given global size and partitioning of the columns.
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
bool clcustom
Boolean to handle SetFromOptions calls.
Wrapper for PETSc's matrix class.
const Array< int > * nat_dof
Abstract class for PETSc's linear solvers.
Vec x
The actual PETSc object.
Abstract class for PETSc's solvers.
virtual void Run(Vector &x, double &t, double &dt, double t_final)
Perform time integration from time t [in] to time tf [in].
ParFiniteElementSpace * fespace
Base abstract class for time dependent operators.
PetscInt GetNumCols() const
Returns the local number of columns.
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
void Mult(double a, const Vector &x, double b, Vector &y) const
Matvec: y = a A x + b y.
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
T * GetData()
Returns the data.
virtual void SetTime(const double _t)
Set the current time.
int Size() const
Returns the size of the vector.
virtual void Step(Vector &x, double &t, double &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in]...
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
void Init()
Initialize with defaults. Does not initialize inherited members.
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
PetscClassId cid
The class id of the actual PETSc object.
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Abstract parallel finite element space.
void SetRelTol(double tol)
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string())
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
void SetMaxIter(int max_iter)
void SortColumnIndices()
Sort the column indices corresponding to each row.
Wrapper for PETSc's vector class.
void Customize(bool customize=true) const
Customize object with options set.
PetscInt M() const
Returns the global number of rows.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
virtual ~PetscLinearSolver()
HYPRE_Int * GetTrueDofOffsets()
PetscInt GetNumRows() const
Returns the local number of rows.
void Randomize(PetscInt seed)
Set random values.
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
void EliminateBC(HypreParMatrix &A, HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray()...
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, double shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time...
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Abstract class for monitoring PETSc's solvers.
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
int to_int(const std::string &str)
Auxiliary class for BDDC customization.
virtual ~PetscPreconditioner()
MPI_Comm GetComm() const
Get the associated MPI communicator.
void LoseData()
NULL-ifies the data.
HypreParMatrix * RAP(HypreParMatrix *A, HypreParMatrix *P)
Returns the matrix P^t * A * P.
PetscParMatrix & operator=(const PetscParMatrix &B)
virtual ~PetscParVector()
Calls PETSc's destroy function.
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
void Assign(const T *)
Copy data from a pointer. Size() elements are copied.
double * GetData() const
Return element data, i.e. array A.
int * GetI() const
Return the array I.
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Wrapper for hypre's parallel vector class.
Type
Enumeration defining IDs for some classes derived from Operator.
PetscInt NNZ() const
Returns the number of nonzeros.
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time...
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
void SetSize(int nsize)
Change logical size of the array, keep existing entries.
Mat A
The actual PETSc object.
PetscInt N() const
Returns the global number of columns.
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
void MakeWrapper(MPI_Comm comm, const Operator *op, Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B...
bool operatorset
Boolean to handle SetOperator calls.
PetscParVector * X
Auxiliary vectors for typecasting.
void MultTranspose(double a, const Vector &x, double b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
virtual void Init(TimeDependentOperator &f_)
Associate a TimeDependentOperator with the ODE solver.
int height
Dimension of the output / number of rows in the matrix.
virtual ~PetscNonlinearSolver()
virtual ~PetscODESolver()
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object...
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
const Array< int > * ess_dof
void SetAbsTol(double tol)
void EliminateRowsCols(const Array< int > &rows_cols, const HypreParVector &X, HypreParVector &B)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
PetscParVector & operator=(PetscScalar d)
Set constant values.
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
int NumRowBlocks() const
Return the number of row blocks.
const FiniteElementCollection * FEColl() const
Identity Operator I: x -> x.
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array...
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Vector * GlobalVector() const
Returns the global vector in each processor.
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
PetscSolverMonitor * monitor_ctx
Monitor context.
Wrapper for hypre's ParCSR matrix class.
A class to handle Block systems in a matrix-free implementation.
int * GetJ() const
Return the array J.
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
void operator*=(double s)
Scale all entries by s: A_scaled = s*A.
int NumColBlocks() const
Return the number of column blocks.
void ConvertOperator(MPI_Comm comm, const Operator &op, Mat *B, bool assembled)
Convert an mfem::Operator into a Mat B; op can be destroyed.
int width
Dimension of the input / number of columns in the matrix.
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...