MFEM v4.7.0
Finite element discretization library
Loading...
Searching...
No Matches
petsc.cpp
Go to the documentation of this file.
1// Copyright (c) 2010-2024, Lawrence Livermore National Security, LLC. Produced
2// at the Lawrence Livermore National Laboratory. All Rights reserved. See files
3// LICENSE and NOTICE for details. LLNL-CODE-806117.
4//
5// This file is part of the MFEM library. For more information and source code
6// availability visit https://mfem.org.
7//
8// MFEM is free software; you can redistribute it and/or modify it under the
9// terms of the BSD-3 license. We welcome feedback and contributions, see file
10// CONTRIBUTING.md for details.
11
12// Author: Stefano Zampini <stefano.zampini@gmail.com>
13
14#include "../config/config.hpp"
15
16#ifdef MFEM_USE_MPI
17#ifdef MFEM_USE_PETSC
18
19#include "linalg.hpp"
20#include "../fem/fem.hpp"
21
22#include "petsc.h"
23#if defined(PETSC_HAVE_HYPRE)
24#include "petscmathypre.h"
25#endif
26
27// Backward compatibility
28#if PETSC_VERSION_LT(3,11,0)
29#define VecLockReadPush VecLockPush
30#define VecLockReadPop VecLockPop
31#endif
32#if PETSC_VERSION_LT(3,12,0)
33#define VecGetArrayWrite VecGetArray
34#define VecRestoreArrayWrite VecRestoreArray
35#define MatComputeOperator(A,B,C) MatComputeExplicitOperator(A,C)
36#define MatComputeOperatorTranspose(A,B,C) MatComputeExplicitOperatorTranspose(A,C)
37#endif
38#if PETSC_VERSION_LT(3,19,0)
39#define PETSC_SUCCESS 0
40#endif
41
42#include <fstream>
43#include <iomanip>
44#include <cmath>
45#include <cstdlib>
46
47// Note: there are additional #include statements below.
48
49#include "petscinternals.hpp"
50
51// Callback functions: these functions will be called by PETSc
52static PetscErrorCode __mfem_ts_monitor(TS,PetscInt,PetscReal,Vec,void*);
53static PetscErrorCode __mfem_ts_rhsfunction(TS,PetscReal,Vec,Vec,void*);
54static PetscErrorCode __mfem_ts_rhsjacobian(TS,PetscReal,Vec,Mat,Mat,
55 void*);
56static PetscErrorCode __mfem_ts_ifunction(TS,PetscReal,Vec,Vec,Vec,void*);
57static PetscErrorCode __mfem_ts_ijacobian(TS,PetscReal,Vec,Vec,
58 PetscReal,Mat,
59 Mat,void*);
60static PetscErrorCode __mfem_ts_computesplits(TS,PetscReal,Vec,Vec,
61 Mat,Mat,Mat,Mat);
62static PetscErrorCode __mfem_snes_monitor(SNES,PetscInt,PetscReal,void*);
63static PetscErrorCode __mfem_snes_jacobian(SNES,Vec,Mat,Mat,void*);
64static PetscErrorCode __mfem_snes_function(SNES,Vec,Vec,void*);
65static PetscErrorCode __mfem_snes_objective(SNES,Vec,PetscReal*,void*);
66static PetscErrorCode __mfem_snes_update(SNES,PetscInt);
67static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch,Vec,Vec,Vec,
68 PetscBool*,PetscBool*,void*);
69static PetscErrorCode __mfem_ksp_monitor(KSP,PetscInt,PetscReal,void*);
70static PetscErrorCode __mfem_pc_shell_apply(PC,Vec,Vec);
71static PetscErrorCode __mfem_pc_shell_apply_transpose(PC,Vec,Vec);
72static PetscErrorCode __mfem_pc_shell_setup(PC);
73static PetscErrorCode __mfem_pc_shell_destroy(PC);
74static PetscErrorCode __mfem_pc_shell_view(PC,PetscViewer);
75static PetscErrorCode __mfem_mat_shell_apply(Mat,Vec,Vec);
76static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat,Vec,Vec);
77static PetscErrorCode __mfem_mat_shell_destroy(Mat);
78static PetscErrorCode __mfem_mat_shell_copy(Mat,Mat,MatStructure);
79static PetscErrorCode __mfem_array_container_destroy(void*);
80static PetscErrorCode __mfem_matarray_container_destroy(void*);
81static PetscErrorCode __mfem_monitor_ctx_destroy(void**);
82
83// auxiliary functions
84static PetscErrorCode Convert_Array_IS(MPI_Comm,bool,const mfem::Array<int>*,
85 PetscInt,IS*);
86static PetscErrorCode Convert_Vmarks_IS(MPI_Comm,mfem::Array<Mat>&,
87 const mfem::Array<int>*,PetscInt,IS*);
88static PetscErrorCode MakeShellPC(PC,mfem::Solver&,bool);
89static PetscErrorCode MakeShellPCWithFactory(PC,
91
92// Equivalent functions are present in PETSc source code
93// if PETSc has been compiled with hypre support
94// We provide them here in case PETSC_HAVE_HYPRE is not defined
95#if !defined(PETSC_HAVE_HYPRE)
96static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix*,Mat*);
97static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix*,Mat*);
98#endif
99
100#if PETSC_VERSION_GE(3,15,0) && defined(PETSC_HAVE_DEVICE)
101#if defined(MFEM_USE_CUDA) && defined(PETSC_HAVE_CUDA)
102#define _USE_DEVICE
103#define PETSC_VECDEVICE VECCUDA
104#define PETSC_MATAIJDEVICE MATAIJCUSPARSE
105#define VecDeviceGetArrayRead VecCUDAGetArrayRead
106#define VecDeviceGetArrayWrite VecCUDAGetArrayWrite
107#define VecDeviceGetArray VecCUDAGetArray
108#define VecDeviceRestoreArrayRead VecCUDARestoreArrayRead
109#define VecDeviceRestoreArrayWrite VecCUDARestoreArrayWrite
110#define VecDeviceRestoreArray VecCUDARestoreArray
111#define VecDevicePlaceArray VecCUDAPlaceArray
112#define VecDeviceResetArray VecCUDAResetArray
113#elif defined(MFEM_USE_HIP) && defined(PETSC_HAVE_HIP)
114#define _USE_DEVICE
115#define PETSC_VECDEVICE VECHIP
116#define PETSC_MATAIJDEVICE MATAIJHIPSPARSE
117#define VecDeviceGetArrayRead VecHIPGetArrayRead
118#define VecDeviceGetArrayWrite VecHIPGetArrayWrite
119#define VecDeviceGetArray VecHIPGetArray
120#define VecDeviceRestoreArrayRead VecHIPRestoreArrayRead
121#define VecDeviceRestoreArrayWrite VecHIPRestoreArrayWrite
122#define VecDeviceRestoreArray VecHIPRestoreArray
123#define VecDevicePlaceArray VecHIPPlaceArray
124#define VecDeviceResetArray VecHIPResetArray
125#else
126#define VecDeviceGetArrayRead VecGetArrayRead
127#define VecDeviceGetArrayWrite VecGetArrayWrite
128#define VecDeviceGetArray VecGetArray
129#define VecDeviceRestoreArrayRead VecRestoreArrayRead
130#define VecDeviceRestoreArrayWrite VecRestoreArrayWrite
131#define VecDeviceRestoreArray VecRestoreArray
132#define VecDevicePlaceArray VecPlaceArray
133#define VecDeviceResetArray VecResetArray
134#endif
135#endif
136
137#if defined(PETSC_HAVE_DEVICE)
138static PetscErrorCode __mfem_VecSetOffloadMask(Vec,PetscOffloadMask);
139#endif
140static PetscErrorCode __mfem_VecBoundToCPU(Vec,PetscBool*);
141static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject);
142static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm,PetscInt,PetscInt,Mat*);
143
144// structs used by PETSc code
145typedef struct
146{
147 mfem::Solver *op;
149 bool ownsop;
150 unsigned long int numprec;
151} __mfem_pc_shell_ctx;
152
153typedef struct
154{
155 mfem::Operator *op; // The nonlinear operator
156 mfem::PetscBCHandler *bchandler; // Handling of essential bc
157 mfem::Vector *work; // Work vector
158 mfem::Operator::Type jacType; // OperatorType for the Jacobian
159 // Objective for line search
160 void (*objective)(mfem::Operator *op, const mfem::Vector&, mfem::real_t*);
161 // PostCheck function (to be called after successful line search)
162 void (*postcheck)(mfem::Operator *op, const mfem::Vector&, mfem::Vector&,
163 mfem::Vector&, bool&, bool&);
164 // General purpose update function (to be called at the beginning of
165 // each nonlinear step)
166 void (*update)(mfem::Operator *op, int,
167 const mfem::Vector&, const mfem::Vector&,
168 const mfem::Vector&, const mfem::Vector&);
169} __mfem_snes_ctx;
170
171typedef struct
172{
173 mfem::TimeDependentOperator *op; // The time-dependent operator
174 mfem::PetscBCHandler *bchandler; // Handling of essential bc
175 mfem::Vector *work; // Work vector
176 mfem::Vector *work2; // Work vector
177 mfem::Operator::Type jacType; // OperatorType for the Jacobian
179 PetscReal cached_shift;
180 PetscObjectState cached_ijacstate;
181 PetscObjectState cached_rhsjacstate;
182 PetscObjectState cached_splits_xstate;
183 PetscObjectState cached_splits_xdotstate;
184} __mfem_ts_ctx;
185
186typedef struct
187{
188 mfem::PetscSolver *solver; // The solver object
189 mfem::PetscSolverMonitor *monitor; // The user-defined monitor class
190} __mfem_monitor_ctx;
191
192// use global scope ierr to check PETSc errors inside mfem calls
193static PetscErrorCode ierr;
194static PetscMPIInt mpiierr;
195
196using namespace std;
197
198namespace mfem
199{
200
202{
203 MFEMInitializePetsc(NULL,NULL,NULL,NULL);
204}
205
206void MFEMInitializePetsc(int *argc,char*** argv)
207{
208 MFEMInitializePetsc(argc,argv,NULL,NULL);
209}
210
211void MFEMInitializePetsc(int *argc,char ***argv,const char rc_file[],
212 const char help[])
213{
214 // Tell PETSc to use the same CUDA or HIP device as MFEM:
216 {
217#if PETSC_VERSION_LT(3,17,0)
218 const char *opts = "-cuda_device";
219#else
220 const char *opts = "-device_select_cuda";
221#endif
222 ierr = PetscOptionsSetValue(NULL,opts,
223 to_string(mfem::Device::GetId()).c_str());
224 MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
225 }
227 {
228#if PETSC_VERSION_LT(3,17,0)
229 const char *opts = "-hip_device";
230#else
231 const char *opts = "-device_select_hip";
232#endif
233 ierr = PetscOptionsSetValue(NULL,opts,
234 to_string(mfem::Device::GetId()).c_str());
235 MFEM_VERIFY(!ierr,"Unable to set initial option value to PETSc");
236 }
237 ierr = PetscInitialize(argc,argv,rc_file,help);
238 MFEM_VERIFY(!ierr,"Unable to initialize PETSc");
239}
240
242{
243 ierr = PetscFinalize();
244 MFEM_VERIFY(!ierr,"Unable to finalize PETSc");
245}
246
248{
249 int oflags = flags;
250 SetHostValid();
251 const mfem::real_t *v = mfem::Read(*this,Capacity(),false);
252 flags = oflags;
253 return v;
254}
255
257{
258 int oflags = flags;
260 const mfem::real_t *v = mfem::Read(*this,Capacity(),true);
261 flags = oflags;
262 return v;
263}
264
265// PetscParVector methods
266
268{
269 PetscScalar *array;
270 PetscInt n;
271 PetscBool isnest;
272
273 MFEM_VERIFY(x,"Missing Vec");
274 ierr = VecSetUp(x); PCHKERRQ(x,ierr);
275 ierr = PetscObjectTypeCompare((PetscObject)x,VECNEST,&isnest); PCHKERRQ(x,ierr);
276 MFEM_VERIFY(!isnest,"Not for type nest");
277 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
278 MFEM_VERIFY(n >= 0,"Invalid local size");
279 size = n;
280#if defined(PETSC_HAVE_DEVICE)
281 PetscOffloadMask omask;
282 PetscBool isdevice;
283
284 ierr = VecGetOffloadMask(x,&omask); PCHKERRQ(x,ierr);
285 if (omask != PETSC_OFFLOAD_BOTH)
286 {
287 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
288 }
289#endif
290 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
291#if defined(PETSC_HAVE_DEVICE)
292 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
293 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
294 ""); PCHKERRQ(x,ierr);
295 if (isdevice)
296 {
297 if (omask != PETSC_OFFLOAD_BOTH)
298 {
299 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
300 }
301 PetscScalar *darray;
302 ierr = VecDeviceGetArrayRead(x,(const PetscScalar**)&darray);
303 PCHKERRQ(x,ierr);
304 pdata.Wrap(array,darray,size,MemoryType::HOST,false);
305 ierr = VecDeviceRestoreArrayRead(x,(const PetscScalar**)&darray);
306 PCHKERRQ(x,ierr);
307 }
308 else
309#endif
310 {
311 pdata.Wrap(array,size,MemoryType::HOST,false);
312 }
313 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); PCHKERRQ(x,ierr);
314
315#if defined(PETSC_HAVE_DEVICE)
316 if (omask == PETSC_OFFLOAD_UNALLOCATED && isdevice) { omask = PETSC_OFFLOAD_CPU; }
317 ierr = __mfem_VecSetOffloadMask(x,omask); PCHKERRQ(x,ierr);
318#endif
321}
322
324{
325 MFEM_VERIFY(x,"Missing Vec");
326#if defined(_USE_DEVICE)
327 PetscOffloadMask mask;
328 PetscBool isdevice;
329 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
330 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
331 ""); PCHKERRQ(x,ierr);
332 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
333 if (isdevice)
334 {
335 switch (mask)
336 {
337 case PETSC_OFFLOAD_CPU:
340 break;
341 case PETSC_OFFLOAD_GPU:
344 break;
345 case PETSC_OFFLOAD_BOTH:
348 break;
349 default:
350 MFEM_ABORT("Unhandled case " << mask);
351 }
352 }
353#endif
354 data.Sync(pdata);
355}
356
358{
359 MFEM_VERIFY(x,"Missing Vec");
360 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
361#if defined(_USE_DEVICE)
362 PetscBool isdevice;
363 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
364 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
365 ""); PCHKERRQ(x,ierr);
366 if (isdevice)
367 {
368 bool dv = pdata.DeviceIsValid();
369 bool hv = pdata.HostIsValid();
370 PetscOffloadMask mask;
371 if (dv && hv) { mask = PETSC_OFFLOAD_BOTH; }
372 else if (dv) { mask = PETSC_OFFLOAD_GPU; }
373 else { mask = PETSC_OFFLOAD_CPU; }
374 ierr = __mfem_VecSetOffloadMask(x,mask); PCHKERRQ(x,ierr);
375 }
376 else
377#endif
378 {
379 /* Just make sure we have an up-to-date copy on the CPU for PETSc */
380 PetscScalar *v;
381 ierr = VecGetArrayWrite(x,&v); PCHKERRQ(x,ierr);
383 ierr = VecRestoreArrayWrite(x,&v); PCHKERRQ(x,ierr);
384 }
385}
386
388{
389 VecType vectype;
390 MFEM_VERIFY(x,"Missing Vec");
391 ierr = VecGetType(x,&vectype); PCHKERRQ(x,ierr);
392#if defined(_USE_DEVICE)
394 {
397 ierr = VecSetType(x,PETSC_VECDEVICE); PCHKERRQ(x,ierr);
398 break;
399 default:
400 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
401 break;
402 }
403#else
404 if (!vectype)
405 {
406 ierr = VecSetType(x,VECSTANDARD); PCHKERRQ(x,ierr);
407 }
408#endif
409}
410
411const mfem::real_t* PetscParVector::Read(bool on_dev) const
412{
413 const PetscScalar *dummy;
414 MFEM_VERIFY(x,"Missing Vec");
415#if defined(PETSC_HAVE_DEVICE)
416 PetscBool isdevice;
417 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
418 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
419 ""); PCHKERRQ(x,ierr);
420 if (on_dev && isdevice)
421 {
422 ierr = VecDeviceGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
423 ierr = VecDeviceRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
424 }
425 else
426#endif
427 {
428 ierr = VecGetArrayRead(x,&dummy); PCHKERRQ(x,ierr);
429 ierr = VecRestoreArrayRead(x,&dummy); PCHKERRQ(x,ierr);
430 }
432 return mfem::Read(pdata, size, on_dev);
433}
434
436{
437 return Read(false);
438}
439
441{
442 PetscScalar *dummy;
443 MFEM_VERIFY(x,"Missing Vec");
444#if defined(PETSC_HAVE_DEVICE)
445 PetscBool isdevice;
446 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
447 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
448 ""); PCHKERRQ(x,ierr);
449 if (on_dev && isdevice)
450 {
451 ierr = VecDeviceGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
452 ierr = VecDeviceRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
453 }
454 else
455#endif
456 {
457 ierr = VecGetArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
458 ierr = VecRestoreArrayWrite(x,&dummy); PCHKERRQ(x,ierr);
459 }
460 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
462 return mfem::Write(pdata, size, on_dev);
463}
464
466{
467 return Write(false);
468}
469
471{
472 PetscScalar *dummy;
473 MFEM_VERIFY(x,"Missing Vec");
474#if defined(PETSC_HAVE_DEVICE)
475 PetscBool isdevice;
476 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
477 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
478 ""); PCHKERRQ(x,ierr);
479 if (on_dev && isdevice)
480 {
481 ierr = VecDeviceGetArray(x,&dummy); PCHKERRQ(x,ierr);
482 ierr = VecDeviceRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
483 }
484 else
485#endif
486 {
487 ierr = VecGetArray(x,&dummy); PCHKERRQ(x,ierr);
488 ierr = VecRestoreArray(x,&dummy); PCHKERRQ(x,ierr);
489 }
490 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
492 return mfem::ReadWrite(pdata, size, on_dev);
493}
494
499
500void PetscParVector::UseDevice(bool dev) const
501{
502 MFEM_VERIFY(x,"Missing Vec");
503#if defined(PETSC_HAVE_DEVICE)
504 ierr = VecBindToCPU(x,!dev ? PETSC_TRUE : PETSC_FALSE); PCHKERRQ(x,ierr);
506#endif
507}
508
510{
511 PetscBool flg;
512 MFEM_VERIFY(x,"Missing Vec");
513 ierr = __mfem_VecBoundToCPU(x,&flg); PCHKERRQ(x,ierr);
514 return flg ? false : true;
515}
516
518{
519 PetscInt N;
520 ierr = VecGetSize(x,&N); PCHKERRQ(x,ierr);
521 return N;
522}
523
525{
526 ierr = VecSetBlockSize(x,bs); PCHKERRQ(x,ierr);
527}
528
529PetscParVector::PetscParVector(MPI_Comm comm, const Vector &x_,
530 bool copy) : Vector()
531{
532 PetscBool isdevice;
533
534 int n = x_.Size();
535 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
536 ierr = VecSetSizes(x,n,PETSC_DECIDE); PCHKERRQ(x,ierr);
537 SetVecType_();
539 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
540 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
541 ""); PCHKERRQ(x,ierr);
542 if (copy)
543 {
544 /* we use PETSc accessors to flag valid memory location to PETSc */
545 PetscErrorCode (*rest)(Vec,PetscScalar**);
546 PetscScalar *array;
547#if defined(PETSC_HAVE_DEVICE)
548 if (isdevice && x_.UseDevice())
549 {
550 UseDevice(true);
551 ierr = VecDeviceGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
552 rest = VecDeviceRestoreArrayWrite;
553 }
554 else
555#endif
556 {
557 UseDevice(false);
558 ierr = VecGetArrayWrite(x,&array); PCHKERRQ(x,ierr);
559 rest = VecRestoreArrayWrite;
560 }
561 pdata.CopyFrom(x_.GetMemory(), n);
562 ierr = (*rest)(x,&array); PCHKERRQ(x,ierr);
564 }
565 else // don't copy, just set the device flag
566 {
567 if (isdevice && x_.UseDevice())
568 {
569 UseDevice(true);
570 }
571 else
572 {
573 UseDevice(false);
574 }
575 }
576}
577
579 PetscInt *col) : Vector()
580{
581 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
582 if (col)
583 {
584 PetscMPIInt myid;
585 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
586 ierr = VecSetSizes(x,col[myid+1]-col[myid],PETSC_DECIDE); PCHKERRQ(x,ierr);
587 }
588 else
589 {
590 ierr = VecSetSizes(x,PETSC_DECIDE,glob_size); PCHKERRQ(x,ierr);
591 }
592 SetVecType_();
594}
595
597{
598 MPI_Comm comm = PetscObjectComm((PetscObject)x);
599 ierr = VecDestroy(&x); CCHKERRQ(comm,ierr);
600 pdata.Delete();
601}
602
604 PetscScalar *data_, PetscInt *col) : Vector()
605{
606 MFEM_VERIFY(col,"Missing distribution");
607 PetscMPIInt myid;
608 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
609 ierr = VecCreateMPIWithArray(comm,1,col[myid+1]-col[myid],glob_size,data_,
610 &x); CCHKERRQ(comm,ierr)
611 SetVecType_();
613}
614
616{
617 ierr = VecDuplicate(y.x,&x); PCHKERRQ(x,ierr);
619}
620
622 bool transpose, bool allocate) : Vector()
623{
624 PetscInt loc = transpose ? op.Height() : op.Width();
625
626 ierr = VecCreate(comm,&x);
627 CCHKERRQ(comm,ierr);
628 ierr = VecSetSizes(x,loc,PETSC_DECIDE);
629 PCHKERRQ(x,ierr);
630
631 SetVecType_();
632 if (allocate)
633 {
635 }
636 else /* Vector intended to be used with Place/ResetMemory calls */
637 {
638 size = loc;
639 }
640}
641
643 bool transpose, bool allocate) : Vector()
644{
645 Mat pA = const_cast<PetscParMatrix&>(A);
646 if (!transpose)
647 {
648 ierr = MatCreateVecs(pA,&x,NULL); PCHKERRQ(pA,ierr);
649 }
650 else
651 {
652 ierr = MatCreateVecs(pA,NULL,&x); PCHKERRQ(pA,ierr);
653 }
654 SetVecType_();
655 if (!allocate) /* Vector intended to be used with Place/ResetMemory calls */
656 {
657 PetscInt n;
658 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
659 size = n;
660 }
661 else
662 {
664 }
665}
666
668{
669 if (ref)
670 {
671 ierr = PetscObjectReference((PetscObject)y); PCHKERRQ(y,ierr);
672 }
673 x = y;
675}
676
678{
679 HYPRE_BigInt* offsets = pfes->GetTrueDofOffsets();
680 MPI_Comm comm = pfes->GetComm();
681 ierr = VecCreate(comm,&x); CCHKERRQ(comm,ierr);
682
683 PetscMPIInt myid = 0;
684 if (!HYPRE_AssumedPartitionCheck())
685 {
686 mpiierr = MPI_Comm_rank(comm, &myid); CCHKERRQ(comm, mpiierr);
687 }
688 ierr = VecSetSizes(x,offsets[myid+1]-offsets[myid],PETSC_DECIDE);
689 PCHKERRQ(x,ierr);
690 SetVecType_();
692}
693
695{
696 return x ? PetscObjectComm((PetscObject)x) : MPI_COMM_NULL;
697}
698
700{
701 VecScatter scctx;
702 Vec vout;
703 const PetscScalar *array;
705
706 ierr = VecScatterCreateToAll(x,&scctx,&vout); PCHKERRQ(x,ierr);
707 ierr = VecScatterBegin(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
708 PCHKERRQ(x,ierr);
709 ierr = VecScatterEnd(scctx,x,vout,INSERT_VALUES,SCATTER_FORWARD);
710 PCHKERRQ(x,ierr);
711 ierr = VecScatterDestroy(&scctx); PCHKERRQ(x,ierr);
712 ierr = VecGetArrayRead(vout,&array); PCHKERRQ(x,ierr);
713 ierr = VecGetLocalSize(vout,&size); PCHKERRQ(x,ierr);
715 data.Assign(array);
716 ierr = VecRestoreArrayRead(vout,&array); PCHKERRQ(x,ierr);
717 ierr = VecDestroy(&vout); PCHKERRQ(x,ierr);
718 Vector *v = new Vector(data, internal::to_int(size));
719 v->MakeDataOwner();
720 data.LoseData();
721 return v;
722}
723
725{
726 ierr = VecSet(x,d); PCHKERRQ(x,ierr);
728 return *this;
729}
730
732 const Array<PetscScalar>& vals)
733{
734 MFEM_VERIFY(idx.Size() == vals.Size(),
735 "Size mismatch between indices and values");
736 PetscInt n = idx.Size();
737 ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),INSERT_VALUES);
738 PCHKERRQ(x,ierr);
739 ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
740 ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
742 return *this;
743}
744
746 const Array<PetscScalar>& vals)
747{
748 MFEM_VERIFY(idx.Size() == vals.Size(),
749 "Size mismatch between indices and values");
750 PetscInt n = idx.Size();
751 ierr = VecSetValues(x,n,idx.GetData(),vals.GetData(),ADD_VALUES);
752 PCHKERRQ(x,ierr);
753 ierr = VecAssemblyBegin(x); PCHKERRQ(x,ierr);
754 ierr = VecAssemblyEnd(x); PCHKERRQ(x,ierr);
756 return *this;
757}
758
760{
761 ierr = VecCopy(y.x,x); PCHKERRQ(x,ierr);
763 return *this;
764}
765
767{
768 ierr = VecAXPY(x,1.0,y.x); PCHKERRQ(x,ierr);
770 return *this;
771}
772
774{
775 ierr = VecAXPY(x,-1.0,y.x); PCHKERRQ(x,ierr);
777 return *this;
778}
779
781{
782 ierr = VecScale(x,s); PCHKERRQ(x,ierr);
784 return *this;
785}
786
788{
789 ierr = VecShift(x,s); PCHKERRQ(x,ierr);
791 return *this;
792}
793
795{
796 ierr = VecPlaceArray(x,temp_data); PCHKERRQ(x,ierr);
797}
798
800{
801 ierr = VecResetArray(x); PCHKERRQ(x,ierr);
802}
803
805{
806 PetscInt n;
807
808 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
809 MFEM_VERIFY(n <= mem.Capacity(),
810 "Memory size " << mem.Capacity() << " < " << n << " vector size!");
811 MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
812 MFEM_VERIFY(data.Empty(),"Vector data is not empty");
813#if defined(_USE_DEVICE)
814 PetscBool isdevice;
815 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
816 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
817 ""); PCHKERRQ(x,ierr);
818 if (isdevice)
819 {
820 bool usedev = mem.DeviceIsValid() || (!rw && mem.UseDevice());
821 pdata.MakeAliasForSync(mem,0,n,rw,true,usedev);
822 if (usedev)
823 {
824 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
825 ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
826 }
827 else
828 {
829 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
830 ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
831 }
832 }
833 else
834#endif
835 {
837 size);
838 pdata.MakeAliasForSync(mem,0,n,rw,true,false);
839#if defined(PETSC_HAVE_DEVICE)
840 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
841#endif
842 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
843 }
844 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
846}
847
849{
850 PetscInt n;
851
852 ierr = VecGetLocalSize(x,&n); PCHKERRQ(x,ierr);
853 MFEM_VERIFY(n <= mem.Capacity(),
854 "Memory size " << mem.Capacity() << " < " << n << " vector size!");
855 MFEM_VERIFY(pdata.Empty(),"Vector data is not empty");
856 MFEM_VERIFY(data.Empty(),"Vector data is not empty");
857#if defined(_USE_DEVICE)
858 PetscBool isdevice;
859 ierr = PetscObjectTypeCompareAny((PetscObject)x,&isdevice,
860 VECSEQCUDA,VECMPICUDA,VECSEQHIP,VECMPIHIP,
861 ""); PCHKERRQ(x,ierr);
862 if (isdevice)
863 {
864 pdata.MakeAliasForSync(mem,0,n,mem.DeviceIsValid());
865 if (mem.DeviceIsValid())
866 {
867 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_GPU); PCHKERRQ(x,ierr);
868 ierr = VecDevicePlaceArray(x,pdata.GetDevicePointer()); PCHKERRQ(x,ierr);
869 }
870 else
871 {
872 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
873 ierr = VecPlaceArray(x,pdata.GetHostPointer()); PCHKERRQ(x,ierr);
874 }
875 }
876 else
877#endif
878 {
879 const mfem::real_t *w = mfem::HostRead(mem,size);
880 pdata.MakeAliasForSync(mem,0,n,false);
881#if defined(PETSC_HAVE_DEVICE)
882 ierr = __mfem_VecSetOffloadMask(x,PETSC_OFFLOAD_CPU); PCHKERRQ(x,ierr);
883#endif
884 ierr = VecPlaceArray(x,w); PCHKERRQ(x,ierr);
885 }
887 ierr = __mfem_PetscObjectStateIncrease((PetscObject)x); PCHKERRQ(x,ierr);
888 ierr = VecLockReadPush(x); PCHKERRQ(x,ierr);
889}
890
892{
893 MFEM_VERIFY(pdata.IsAliasForSync(),"Vector data is not an alias");
894 MFEM_VERIFY(!pdata.Empty(),"Vector data is empty");
895 bool read = pdata.ReadRequested();
896 bool usedev = pdata.DeviceRequested();
897 bool write = pdata.WriteRequested();
898 /*
899 check for strange corner cases
900 - device memory used but somehow PETSc ended up putting up to date data on host
901 - host memory used but somehow PETSc ended up putting up to date data on device
902 */
903 if (write)
904 {
905 const PetscScalar *v;
906#if defined(PETSC_HAVE_DEVICE)
907 PetscOffloadMask mask;
908 ierr = VecGetOffloadMask(x,&mask); PCHKERRQ(x,ierr);
909 if ((usedev && (mask != PETSC_OFFLOAD_GPU && mask != PETSC_OFFLOAD_BOTH)) ||
910 (!usedev && (mask != PETSC_OFFLOAD_CPU && mask != PETSC_OFFLOAD_BOTH)))
911#endif
912 {
913 ierr = VecGetArrayRead(x,&v); PCHKERRQ(x,ierr);
915 ierr = VecRestoreArrayRead(x,&v); PCHKERRQ(x,ierr);
916 }
917 }
919 data.Reset();
920 if (read && !write) { ierr = VecLockReadPop(x); PCHKERRQ(x,ierr); }
921 if (usedev)
922 {
923#if defined(PETSC_HAVE_DEVICE)
924 ierr = VecDeviceResetArray(x); PCHKERRQ(x,ierr);
925#else
926 MFEM_VERIFY(false,"This should not happen");
927#endif
928 }
929 else
930 {
931 ierr = VecResetArray(x); PCHKERRQ(x,ierr);
932 }
933}
934
936{
937 PetscRandom rctx = NULL;
938
939 if (seed)
940 {
941 ierr = PetscRandomCreate(PetscObjectComm((PetscObject)x),&rctx);
942 PCHKERRQ(x,ierr);
943 ierr = PetscRandomSetSeed(rctx,(unsigned long)seed); PCHKERRQ(x,ierr);
944 ierr = PetscRandomSeed(rctx); PCHKERRQ(x,ierr);
945 }
946 ierr = VecSetRandom(x,rctx); PCHKERRQ(x,ierr);
947 ierr = PetscRandomDestroy(&rctx); PCHKERRQ(x,ierr);
948}
949
950void PetscParVector::Print(const char *fname, bool binary) const
951{
952 if (fname)
953 {
954 PetscViewer view;
955
956 if (binary)
957 {
958 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)x),fname,
959 FILE_MODE_WRITE,&view);
960 }
961 else
962 {
963 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)x),fname,&view);
964 }
965 PCHKERRQ(x,ierr);
966 ierr = VecView(x,view); PCHKERRQ(x,ierr);
967 ierr = PetscViewerDestroy(&view); PCHKERRQ(x,ierr);
968 }
969 else
970 {
971 ierr = VecView(x,NULL); PCHKERRQ(x,ierr);
972 }
973}
974
975// PetscParMatrix methods
976
978{
979 PetscInt N;
980 ierr = MatGetOwnershipRange(A,&N,NULL); PCHKERRQ(A,ierr);
981 return N;
982}
983
985{
986 PetscInt N;
987 ierr = MatGetOwnershipRangeColumn(A,&N,NULL); PCHKERRQ(A,ierr);
988 return N;
989}
990
992{
993 PetscInt N;
994 ierr = MatGetLocalSize(A,&N,NULL); PCHKERRQ(A,ierr);
995 return N;
996}
997
999{
1000 PetscInt N;
1001 ierr = MatGetLocalSize(A,NULL,&N); PCHKERRQ(A,ierr);
1002 return N;
1003}
1004
1006{
1007 PetscInt N;
1008 ierr = MatGetSize(A,&N,NULL); PCHKERRQ(A,ierr);
1009 return N;
1010}
1011
1013{
1014 PetscInt N;
1015 ierr = MatGetSize(A,NULL,&N); PCHKERRQ(A,ierr);
1016 return N;
1017}
1018
1020{
1021 MatInfo info;
1022 ierr = MatGetInfo(A,MAT_GLOBAL_SUM,&info); PCHKERRQ(A,ierr);
1023 return (PetscInt)info.nz_used;
1024}
1025
1027{
1028 if (cbs < 0) { cbs = rbs; }
1029 ierr = MatSetBlockSizes(A,rbs,cbs); PCHKERRQ(A,ierr);
1030}
1031
1033{
1034 A = NULL;
1035 X = Y = NULL;
1036 height = width = 0;
1037}
1038
1043
1045 const mfem::Array<PetscInt>& rows, const mfem::Array<PetscInt>& cols)
1046{
1047 Init();
1048
1049 Mat B = const_cast<PetscParMatrix&>(pB);
1050
1051 IS isr,isc;
1052 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),rows.Size(),
1053 rows.GetData(),PETSC_USE_POINTER,&isr); PCHKERRQ(B,ierr);
1054 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)B),cols.Size(),
1055 cols.GetData(),PETSC_USE_POINTER,&isc); PCHKERRQ(B,ierr);
1056 ierr = MatCreateSubMatrix(B,isr,isc,MAT_INITIAL_MATRIX,&A); PCHKERRQ(B,ierr);
1057 ierr = ISDestroy(&isr); PCHKERRQ(B,ierr);
1058 ierr = ISDestroy(&isc); PCHKERRQ(B,ierr);
1059
1060 height = GetNumRows();
1061 width = GetNumCols();
1062}
1063
1065{
1066 Init();
1067 height = pa->Height();
1068 width = pa->Width();
1069 ConvertOperator(pa->GetComm(),*pa,&A,tid);
1070}
1071
1073{
1074 Init();
1075 height = ha->Height();
1076 width = ha->Width();
1077 ConvertOperator(ha->GetComm(),*ha,&A,tid);
1078}
1079
1081{
1082 Init();
1083 height = sa->Height();
1084 width = sa->Width();
1085 ConvertOperator(PETSC_COMM_SELF,*sa,&A,tid);
1086}
1087
1089 Operator::Type tid)
1090{
1091 Init();
1092 height = op->Height();
1093 width = op->Width();
1094 ConvertOperator(comm,*op,&A,tid);
1095}
1096
1098 PetscInt *row_starts, SparseMatrix *diag,
1099 Operator::Type tid)
1100{
1101 Init();
1102 BlockDiagonalConstructor(comm,row_starts,row_starts,diag,
1103 tid==PETSC_MATAIJ,&A);
1104 SetUpForDevice();
1105 // update base class
1106 height = GetNumRows();
1107 width = GetNumCols();
1108}
1109
1110PetscParMatrix::PetscParMatrix(MPI_Comm comm, PetscInt global_num_rows,
1111 PetscInt global_num_cols, PetscInt *row_starts,
1112 PetscInt *col_starts, SparseMatrix *diag,
1113 Operator::Type tid)
1114{
1115 Init();
1116 BlockDiagonalConstructor(comm,row_starts,col_starts,diag,
1117 tid==PETSC_MATAIJ,&A);
1118 SetUpForDevice();
1119 // update base class
1120 height = GetNumRows();
1121 width = GetNumCols();
1122}
1123
1125{
1126 if (A)
1127 {
1128 MPI_Comm comm = PetscObjectComm((PetscObject)A);
1129 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1130 if (X) { delete X; }
1131 if (Y) { delete Y; }
1132 X = Y = NULL;
1133 }
1134 height = B.Height();
1135 width = B.Width();
1136#if defined(PETSC_HAVE_HYPRE)
1137 ierr = MatCreateFromParCSR(B,MATAIJ,PETSC_USE_POINTER,&A);
1138#else
1139 ierr = MatConvert_hypreParCSR_AIJ(B,&A); CCHKERRQ(B.GetComm(),ierr);
1140#endif
1141 SetUpForDevice();
1142 return *this;
1143}
1144
1146{
1147 if (A)
1148 {
1149 MPI_Comm comm = PetscObjectComm((PetscObject)A);
1150 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1151 if (X) { delete X; }
1152 if (Y) { delete Y; }
1153 X = Y = NULL;
1154 }
1155 height = B.Height();
1156 width = B.Width();
1157 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1158 return *this;
1159}
1160
1162{
1163 if (!A)
1164 {
1165 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1166 }
1167 else
1168 {
1169 MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1170 MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1171 ierr = MatAXPY(A,1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1172 }
1173 return *this;
1174}
1175
1177{
1178 if (!A)
1179 {
1180 ierr = MatDuplicate(B,MAT_COPY_VALUES,&A); CCHKERRQ(B.GetComm(),ierr);
1181 ierr = MatScale(A,-1.0); PCHKERRQ(A,ierr);
1182 }
1183 else
1184 {
1185 MFEM_VERIFY(height == B.Height(),"Invalid number of local rows");
1186 MFEM_VERIFY(width == B.Width(), "Invalid number of local columns");
1187 ierr = MatAXPY(A,-1.0,B,DIFFERENT_NONZERO_PATTERN); CCHKERRQ(B.GetComm(),ierr);
1188 }
1189 return *this;
1190}
1191
1192void PetscParMatrix::
1193BlockDiagonalConstructor(MPI_Comm comm,
1194 PetscInt *row_starts, PetscInt *col_starts,
1195 SparseMatrix *diag, bool assembled, Mat* Ad)
1196{
1197 Mat A;
1198 PetscInt lrsize,lcsize,rstart,cstart;
1199 PetscMPIInt myid = 0,commsize;
1200
1201 mpiierr = MPI_Comm_size(comm,&commsize); CCHKERRQ(comm,mpiierr);
1202 if (!HYPRE_AssumedPartitionCheck())
1203 {
1204 mpiierr = MPI_Comm_rank(comm,&myid); CCHKERRQ(comm,mpiierr);
1205 }
1206 lrsize = row_starts[myid+1]-row_starts[myid];
1207 rstart = row_starts[myid];
1208 lcsize = col_starts[myid+1]-col_starts[myid];
1209 cstart = col_starts[myid];
1210
1211 if (!assembled)
1212 {
1213 IS is;
1214 ierr = ISCreateStride(comm,diag->Height(),rstart,1,&is); CCHKERRQ(comm,ierr);
1215 ISLocalToGlobalMapping rl2g,cl2g;
1216 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); PCHKERRQ(is,ierr);
1217 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1218 if (row_starts != col_starts)
1219 {
1220 ierr = ISCreateStride(comm,diag->Width(),cstart,1,&is);
1221 CCHKERRQ(comm,ierr);
1222 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); PCHKERRQ(is,ierr);
1223 ierr = ISDestroy(&is); CCHKERRQ(comm,ierr);
1224 }
1225 else
1226 {
1227 ierr = PetscObjectReference((PetscObject)rl2g); PCHKERRQ(rl2g,ierr);
1228 cl2g = rl2g;
1229 }
1230
1231 // Create the PETSc object (MATIS format)
1232 ierr = MatCreate(comm,&A); CCHKERRQ(comm,ierr);
1233 ierr = MatSetSizes(A,lrsize,lcsize,PETSC_DECIDE,PETSC_DECIDE);
1234 PCHKERRQ(A,ierr);
1235 ierr = MatSetType(A,MATIS); PCHKERRQ(A,ierr);
1236 ierr = MatSetLocalToGlobalMapping(A,rl2g,cl2g); PCHKERRQ(A,ierr);
1237 ierr = ISLocalToGlobalMappingDestroy(&rl2g); PCHKERRQ(A,ierr)
1238 ierr = ISLocalToGlobalMappingDestroy(&cl2g); PCHKERRQ(A,ierr)
1239
1240 // Copy SparseMatrix into PETSc SeqAIJ format
1241 // pass through host for now
1242 Mat lA;
1243 ierr = MatISGetLocalMat(A,&lA); PCHKERRQ(A,ierr);
1244 const int *II = diag->HostReadI();
1245 const int *JJ = diag->HostReadJ();
1246#if defined(PETSC_USE_64BIT_INDICES)
1247 PetscInt *pII,*pJJ;
1248 int m = diag->Height()+1, nnz = II[diag->Height()];
1249 ierr = PetscMalloc2(m,&pII,nnz,&pJJ); PCHKERRQ(lA,ierr);
1250 for (int i = 0; i < m; i++) { pII[i] = II[i]; }
1251 for (int i = 0; i < nnz; i++) { pJJ[i] = JJ[i]; }
1252 ierr = MatSeqAIJSetPreallocationCSR(lA,pII,pJJ,
1253 diag->HostReadData()); PCHKERRQ(lA,ierr);
1254 ierr = PetscFree2(pII,pJJ); PCHKERRQ(lA,ierr);
1255#else
1256 ierr = MatSeqAIJSetPreallocationCSR(lA,II,JJ,
1257 diag->HostReadData()); PCHKERRQ(lA,ierr);
1258#endif
1259 }
1260 else
1261 {
1262 PetscScalar *da;
1263 PetscInt *dii,*djj,*oii,
1264 m = diag->Height()+1, nnz = diag->NumNonZeroElems();
1265
1266 diag->SortColumnIndices();
1267 // if we can take ownership of the SparseMatrix arrays, we can avoid this
1268 // step
1269 ierr = PetscMalloc1(m,&dii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1270 ierr = PetscMalloc1(nnz,&djj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1271 ierr = PetscMalloc1(nnz,&da); CCHKERRQ(PETSC_COMM_SELF,ierr);
1272 if (sizeof(PetscInt) == sizeof(int))
1273 {
1274 ierr = PetscMemcpy(dii,diag->HostReadI(),m*sizeof(PetscInt));
1275 CCHKERRQ(PETSC_COMM_SELF,ierr);
1276 ierr = PetscMemcpy(djj,diag->HostReadJ(),nnz*sizeof(PetscInt));
1277 CCHKERRQ(PETSC_COMM_SELF,ierr);
1278 }
1279 else
1280 {
1281 const int *iii = diag->HostReadI();
1282 const int *jjj = diag->HostReadJ();
1283 for (int i = 0; i < m; i++) { dii[i] = iii[i]; }
1284 for (int i = 0; i < nnz; i++) { djj[i] = jjj[i]; }
1285 }
1286 ierr = PetscMemcpy(da,diag->HostReadData(),nnz*sizeof(PetscScalar));
1287 CCHKERRQ(PETSC_COMM_SELF,ierr);
1288 ierr = PetscCalloc1(m,&oii);
1289 CCHKERRQ(PETSC_COMM_SELF,ierr);
1290 if (commsize > 1)
1291 {
1292 ierr = MatCreateMPIAIJWithSplitArrays(comm,lrsize,lcsize,PETSC_DECIDE,
1293 PETSC_DECIDE,
1294 dii,djj,da,oii,NULL,NULL,&A);
1295 CCHKERRQ(comm,ierr);
1296 }
1297 else
1298 {
1299 ierr = MatCreateSeqAIJWithArrays(comm,lrsize,lcsize,dii,djj,da,&A);
1300 CCHKERRQ(comm,ierr);
1301 }
1302
1303 void *ptrs[4] = {dii,djj,da,oii};
1304 const char *names[4] = {"_mfem_csr_dii",
1305 "_mfem_csr_djj",
1306 "_mfem_csr_da",
1307 "_mfem_csr_oii",
1308 };
1309 for (PetscInt i=0; i<4; i++)
1310 {
1311 PetscContainer c;
1312
1313 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1314 ierr = PetscContainerSetPointer(c,ptrs[i]); CCHKERRQ(comm,ierr);
1315 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1316 CCHKERRQ(comm,ierr);
1317 ierr = PetscObjectCompose((PetscObject)A,names[i],(PetscObject)c);
1318 CCHKERRQ(comm,ierr);
1319 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1320 }
1321 }
1322
1323 // Tell PETSc the matrix is ready to be used
1324 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1325 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); PCHKERRQ(A,ierr);
1326
1327 *Ad = A;
1328}
1329
1331{
1332 return A ? PetscObjectComm((PetscObject)A) : MPI_COMM_NULL;
1333}
1334
1335// TODO ADD THIS CONSTRUCTOR
1336//PetscParMatrix::PetscParMatrix(MPI_Comm comm, int nrows, PetscInt glob_nrows,
1337// PetscInt glob_ncols, int *I, PetscInt *J,
1338// mfem::real_t *data, PetscInt *rows, PetscInt *cols)
1339//{
1340//}
1341
1342// TODO This should take a reference on op but how?
1343void PetscParMatrix::MakeWrapper(MPI_Comm comm, const Operator* op, Mat *A)
1344{
1345 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1346 ierr = MatSetSizes(*A,op->Height(),op->Width(),
1347 PETSC_DECIDE,PETSC_DECIDE); PCHKERRQ(A,ierr);
1348 ierr = MatSetType(*A,MATSHELL); PCHKERRQ(A,ierr);
1349 ierr = MatShellSetContext(*A,(void *)op); PCHKERRQ(A,ierr);
1350 ierr = MatShellSetOperation(*A,MATOP_MULT,
1351 (void (*)())__mfem_mat_shell_apply);
1352 PCHKERRQ(A,ierr);
1353 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,
1354 (void (*)())__mfem_mat_shell_apply_transpose);
1355 PCHKERRQ(A,ierr);
1356 ierr = MatShellSetOperation(*A,MATOP_COPY,
1357 (void (*)())__mfem_mat_shell_copy);
1358 PCHKERRQ(A,ierr);
1359 ierr = MatShellSetOperation(*A,MATOP_DESTROY,
1360 (void (*)())__mfem_mat_shell_destroy);
1361#if defined(_USE_DEVICE)
1363 if (mt == MemoryType::DEVICE || mt == MemoryType::MANAGED)
1364 {
1365 ierr = MatShellSetVecType(*A,PETSC_VECDEVICE); PCHKERRQ(A,ierr);
1366 ierr = MatBindToCPU(*A,PETSC_FALSE); PCHKERRQ(A,ierr);
1367 }
1368 else
1369 {
1370 ierr = MatBindToCPU(*A,PETSC_TRUE); PCHKERRQ(A,ierr);
1371 }
1372#endif
1373 PCHKERRQ(A,ierr);
1374 ierr = MatSetUp(*A); PCHKERRQ(*A,ierr);
1375}
1376
1377void PetscParMatrix::ConvertOperator(MPI_Comm comm, const Operator &op, Mat* A,
1378 Operator::Type tid)
1379{
1380 PetscParMatrix *pA = const_cast<PetscParMatrix *>
1381 (dynamic_cast<const PetscParMatrix *>(&op));
1382 HypreParMatrix *pH = const_cast<HypreParMatrix *>
1383 (dynamic_cast<const HypreParMatrix *>(&op));
1384 BlockOperator *pB = const_cast<BlockOperator *>
1385 (dynamic_cast<const BlockOperator *>(&op));
1386 IdentityOperator *pI = const_cast<IdentityOperator *>
1387 (dynamic_cast<const IdentityOperator *>(&op));
1388 SparseMatrix *pS = const_cast<SparseMatrix *>
1389 (dynamic_cast<const SparseMatrix *>(&op));
1390
1391 if (pA && tid == ANY_TYPE) // use same object and return
1392 {
1393 ierr = PetscObjectReference((PetscObject)(pA->A));
1394 CCHKERRQ(pA->GetComm(),ierr);
1395 *A = pA->A;
1396 return;
1397 }
1398
1399 PetscBool avoidmatconvert = PETSC_FALSE;
1400 if (pA) // we test for these types since MatConvert will fail
1401 {
1402 ierr = PetscObjectTypeCompareAny((PetscObject)(pA->A),&avoidmatconvert,MATMFFD,
1403 MATSHELL,"");
1404 CCHKERRQ(comm,ierr);
1405 }
1406 if (pA && !avoidmatconvert)
1407 {
1408 Mat At = NULL;
1409 PetscBool istrans;
1410#if PETSC_VERSION_LT(3,10,0)
1411 PetscBool ismatis;
1412#endif
1413
1414#if PETSC_VERSION_LT(3,18,0)
1415 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEMAT,&istrans);
1416#else
1417 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATTRANSPOSEVIRTUAL,
1418 &istrans);
1419#endif
1420 CCHKERRQ(pA->GetComm(),ierr);
1421 if (!istrans)
1422 {
1423 if (tid == pA->GetType()) // use same object and return
1424 {
1425 ierr = PetscObjectReference((PetscObject)(pA->A));
1426 CCHKERRQ(pA->GetComm(),ierr);
1427 *A = pA->A;
1428 return;
1429 }
1430#if PETSC_VERSION_LT(3,10,0)
1431 ierr = PetscObjectTypeCompare((PetscObject)(pA->A),MATIS,&ismatis);
1432 CCHKERRQ(pA->GetComm(),ierr);
1433#endif
1434 }
1435 else
1436 {
1437 ierr = MatTransposeGetMat(pA->A,&At); CCHKERRQ(pA->GetComm(),ierr);
1438#if PETSC_VERSION_LT(3,10,0)
1439 ierr = PetscObjectTypeCompare((PetscObject)(At),MATIS,&ismatis);
1440#endif
1441 CCHKERRQ(pA->GetComm(),ierr);
1442 }
1443
1444 // Try to convert
1445 if (tid == PETSC_MATAIJ)
1446 {
1447#if PETSC_VERSION_LT(3,10,0)
1448 if (ismatis)
1449 {
1450 if (istrans)
1451 {
1452 Mat B;
1453
1454 ierr = MatISGetMPIXAIJ(At,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1455 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1456 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1457 }
1458 else
1459 {
1460 ierr = MatISGetMPIXAIJ(pA->A,MAT_INITIAL_MATRIX,A);
1461 PCHKERRQ(pA->A,ierr);
1462 }
1463 }
1464 else
1465#endif
1466 {
1467 PetscMPIInt size;
1468 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1469
1470 // call MatConvert and see if a converter is available
1471 if (istrans)
1472 {
1473 Mat B;
1474 ierr = MatConvert(At,size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,&B);
1475 PCHKERRQ(pA->A,ierr);
1476 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1477 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1478 }
1479 else
1480 {
1481 ierr = MatConvert(pA->A, size > 1 ? MATMPIAIJ : MATSEQAIJ,MAT_INITIAL_MATRIX,A);
1482 PCHKERRQ(pA->A,ierr);
1483 }
1484 }
1485 }
1486 else if (tid == PETSC_MATIS)
1487 {
1488 if (istrans)
1489 {
1490 Mat B;
1491 ierr = MatConvert(At,MATIS,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1492 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1493 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1494 }
1495 else
1496 {
1497 ierr = MatConvert(pA->A,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1498 }
1499 }
1500 else if (tid == PETSC_MATHYPRE)
1501 {
1502#if defined(PETSC_HAVE_HYPRE)
1503 if (istrans)
1504 {
1505 Mat B;
1506 ierr = MatConvert(At,MATHYPRE,MAT_INITIAL_MATRIX,&B); PCHKERRQ(pA->A,ierr);
1507 ierr = MatCreateTranspose(B,A); PCHKERRQ(pA->A,ierr);
1508 ierr = MatDestroy(&B); PCHKERRQ(pA->A,ierr);
1509 }
1510 else
1511 {
1512 ierr = MatConvert(pA->A,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(pA->A,ierr);
1513 }
1514#else
1515 MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1516#endif
1517 }
1518 else if (tid == PETSC_MATSHELL)
1519 {
1520 MakeWrapper(comm,&op,A);
1521 }
1522 else
1523 {
1524 MFEM_ABORT("Unsupported operator type conversion " << tid)
1525 }
1526 }
1527 else if (pH)
1528 {
1529 if (tid == PETSC_MATAIJ)
1530 {
1531#if defined(PETSC_HAVE_HYPRE)
1532 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATAIJ,
1533 PETSC_USE_POINTER,A);
1534#else
1535 ierr = MatConvert_hypreParCSR_AIJ(const_cast<HypreParMatrix&>(*pH),A);
1536#endif
1537 CCHKERRQ(pH->GetComm(),ierr);
1538 }
1539 else if (tid == PETSC_MATIS)
1540 {
1541#if defined(PETSC_HAVE_HYPRE)
1542 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATIS,
1543 PETSC_USE_POINTER,A);
1544#else
1545 ierr = MatConvert_hypreParCSR_IS(const_cast<HypreParMatrix&>(*pH),A);
1546#endif
1547 CCHKERRQ(pH->GetComm(),ierr);
1548 }
1549 else if (tid == PETSC_MATHYPRE || tid == ANY_TYPE)
1550 {
1551#if defined(PETSC_HAVE_HYPRE)
1552 ierr = MatCreateFromParCSR(const_cast<HypreParMatrix&>(*pH),MATHYPRE,
1553 PETSC_USE_POINTER,A);
1554 CCHKERRQ(pH->GetComm(),ierr);
1555#else
1556 MFEM_ABORT("Reconfigure PETSc with --download-hypre or --with-hypre")
1557#endif
1558 }
1559 else if (tid == PETSC_MATSHELL)
1560 {
1561 MakeWrapper(comm,&op,A);
1562 }
1563 else
1564 {
1565 MFEM_ABORT("Conversion from HypreParCSR to operator type = " << tid <<
1566 " is not implemented");
1567 }
1568 }
1569 else if (pB)
1570 {
1571 Mat *mats,*matsl2l = NULL;
1572 PetscInt i,j,nr,nc;
1573
1574 nr = pB->NumRowBlocks();
1575 nc = pB->NumColBlocks();
1576 ierr = PetscCalloc1(nr*nc,&mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1577 if (tid == PETSC_MATIS)
1578 {
1579 ierr = PetscCalloc1(nr,&matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1580 }
1581 for (i=0; i<nr; i++)
1582 {
1583 PetscBool needl2l = PETSC_TRUE;
1584
1585 for (j=0; j<nc; j++)
1586 {
1587 if (!pB->IsZeroBlock(i,j))
1588 {
1589 ConvertOperator(comm,pB->GetBlock(i,j),&mats[i*nc+j],tid);
1590 if (tid == PETSC_MATIS && needl2l)
1591 {
1592 PetscContainer c;
1593 ierr = PetscObjectQuery((PetscObject)mats[i*nc+j],"_MatIS_PtAP_l2l",
1594 (PetscObject*)&c);
1595 PCHKERRQ(mats[i*nc+j],ierr);
1596 // special case for block operators: the local Vdofs should be
1597 // ordered as:
1598 // [f1_1,...f1_N1,f2_1,...,f2_N2,...,fm_1,...,fm_Nm]
1599 // with m fields, Ni the number of Vdofs for the i-th field
1600 if (c)
1601 {
1602 Array<Mat> *l2l = NULL;
1603 ierr = PetscContainerGetPointer(c,(void**)&l2l);
1604 PCHKERRQ(c,ierr);
1605 MFEM_VERIFY(l2l->Size() == 1,"Unexpected size "
1606 << l2l->Size() << " for block row " << i );
1607 ierr = PetscObjectReference((PetscObject)(*l2l)[0]);
1608 PCHKERRQ(c,ierr);
1609 matsl2l[i] = (*l2l)[0];
1610 needl2l = PETSC_FALSE;
1611 }
1612 }
1613 }
1614 }
1615 }
1616 ierr = MatCreateNest(comm,nr,NULL,nc,NULL,mats,A); CCHKERRQ(comm,ierr);
1617 if (tid == PETSC_MATIS)
1618 {
1619 ierr = MatConvert(*A,MATIS,MAT_INPLACE_MATRIX,A); CCHKERRQ(comm,ierr);
1620
1621 mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(nr);
1622 for (int i=0; i<(int)nr; i++) { (*vmatsl2l)[i] = matsl2l[i]; }
1623 ierr = PetscFree(matsl2l); CCHKERRQ(PETSC_COMM_SELF,ierr);
1624
1625 PetscContainer c;
1626 ierr = PetscContainerCreate(comm,&c); CCHKERRQ(comm,ierr);
1627 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
1628 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
1629 PCHKERRQ(c,ierr);
1630 ierr = PetscObjectCompose((PetscObject)(*A),"_MatIS_PtAP_l2l",(PetscObject)c);
1631 PCHKERRQ((*A),ierr);
1632 ierr = PetscContainerDestroy(&c); CCHKERRQ(comm,ierr);
1633 }
1634 for (i=0; i<nr*nc; i++) { ierr = MatDestroy(&mats[i]); CCHKERRQ(comm,ierr); }
1635 ierr = PetscFree(mats); CCHKERRQ(PETSC_COMM_SELF,ierr);
1636 }
1637 else if (pI && tid == PETSC_MATAIJ)
1638 {
1639 PetscInt rst;
1640
1641 ierr = MatCreate(comm,A); CCHKERRQ(comm,ierr);
1642 ierr = MatSetSizes(*A,pI->Height(),pI->Width(),PETSC_DECIDE,PETSC_DECIDE);
1643 PCHKERRQ(A,ierr);
1644 ierr = MatSetType(*A,MATAIJ); PCHKERRQ(*A,ierr);
1645 ierr = MatMPIAIJSetPreallocation(*A,1,NULL,0,NULL); PCHKERRQ(*A,ierr);
1646 ierr = MatSeqAIJSetPreallocation(*A,1,NULL); PCHKERRQ(*A,ierr);
1647 ierr = MatSetOption(*A,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE); PCHKERRQ(*A,ierr);
1648 ierr = MatGetOwnershipRange(*A,&rst,NULL); PCHKERRQ(*A,ierr);
1649 for (PetscInt i = rst; i < rst+pI->Height(); i++)
1650 {
1651 ierr = MatSetValue(*A,i,i,1.,INSERT_VALUES); PCHKERRQ(*A,ierr);
1652 }
1653 ierr = MatAssemblyBegin(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1654 ierr = MatAssemblyEnd(*A,MAT_FINAL_ASSEMBLY); PCHKERRQ(*A,ierr);
1655 }
1656 else if (pS)
1657 {
1658 if (tid == PETSC_MATSHELL)
1659 {
1660 MakeWrapper(comm,&op,A);
1661 }
1662 else
1663 {
1664 /* from SparseMatrix to SEQAIJ -> always pass through host for now */
1665 Mat B;
1666 PetscScalar *pdata;
1667 PetscInt *pii,*pjj,*oii;
1668 PetscMPIInt size;
1669
1670 int m = pS->Height();
1671 int n = pS->Width();
1672 const int *ii = pS->HostReadI();
1673 const int *jj = pS->HostReadJ();
1674 const mfem::real_t *data = pS->HostReadData();
1675
1676 ierr = PetscMalloc1(m+1,&pii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1677 ierr = PetscMalloc1(ii[m],&pjj); CCHKERRQ(PETSC_COMM_SELF,ierr);
1678 ierr = PetscMalloc1(ii[m],&pdata); CCHKERRQ(PETSC_COMM_SELF,ierr);
1679 pii[0] = ii[0];
1680 for (int i = 0; i < m; i++)
1681 {
1682 bool issorted = true;
1683 pii[i+1] = ii[i+1];
1684 for (int j = ii[i]; j < ii[i+1]; j++)
1685 {
1686 pjj[j] = jj[j];
1687 if (issorted && j != ii[i]) { issorted = (pjj[j] > pjj[j-1]); }
1688 pdata[j] = data[j];
1689 }
1690 if (!issorted)
1691 {
1692 ierr = PetscSortIntWithScalarArray(pii[i+1]-pii[i],pjj + pii[i],pdata + pii[i]);
1693 CCHKERRQ(PETSC_COMM_SELF,ierr);
1694 }
1695 }
1696
1697 mpiierr = MPI_Comm_size(comm,&size); CCHKERRQ(comm,mpiierr);
1698 if (size == 1)
1699 {
1700 ierr = MatCreateSeqAIJWithArrays(comm,m,n,pii,pjj,pdata,&B);
1701 CCHKERRQ(comm,ierr);
1702 oii = NULL;
1703 }
1704 else // block diagonal constructor
1705 {
1706 ierr = PetscCalloc1(m+1,&oii); CCHKERRQ(PETSC_COMM_SELF,ierr);
1707 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,
1708 PETSC_DECIDE,
1709 pii,pjj,pdata,oii,NULL,NULL,&B);
1710 CCHKERRQ(comm,ierr);
1711 }
1712 void *ptrs[4] = {pii,pjj,pdata,oii};
1713 const char *names[4] = {"_mfem_csr_pii",
1714 "_mfem_csr_pjj",
1715 "_mfem_csr_pdata",
1716 "_mfem_csr_oii"
1717 };
1718 for (int i=0; i<4; i++)
1719 {
1720 PetscContainer c;
1721
1722 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); PCHKERRQ(B,ierr);
1723 ierr = PetscContainerSetPointer(c,ptrs[i]); PCHKERRQ(B,ierr);
1724 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
1725 PCHKERRQ(B,ierr);
1726 ierr = PetscObjectCompose((PetscObject)(B),names[i],(PetscObject)c);
1727 PCHKERRQ(B,ierr);
1728 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
1729 }
1730 if (tid == PETSC_MATAIJ)
1731 {
1732 *A = B;
1733 }
1734 else if (tid == PETSC_MATHYPRE)
1735 {
1736 ierr = MatConvert(B,MATHYPRE,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1737 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1738 }
1739 else if (tid == PETSC_MATIS)
1740 {
1741 ierr = MatConvert(B,MATIS,MAT_INITIAL_MATRIX,A); PCHKERRQ(B,ierr);
1742 ierr = MatDestroy(&B); PCHKERRQ(*A,ierr);
1743 }
1744 else
1745 {
1746 MFEM_ABORT("Unsupported operator type conversion " << tid)
1747 }
1748 }
1749 }
1750 else // fallback to general operator
1751 {
1752 MFEM_VERIFY(tid == PETSC_MATSHELL || tid == PETSC_MATAIJ || tid == ANY_TYPE,
1753 "Supported types are ANY_TYPE, PETSC_MATSHELL or PETSC_MATAIJ");
1754 MakeWrapper(comm,&op,A);
1755 if (tid == PETSC_MATAIJ)
1756 {
1757 Mat B;
1758 PetscBool isaij;
1759
1760 ierr = MatComputeOperator(*A,MATMPIAIJ,&B); CCHKERRQ(comm,ierr);
1761 ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIAIJ,&isaij);
1762 CCHKERRQ(comm,ierr);
1763 ierr = MatDestroy(A); CCHKERRQ(comm,ierr);
1764 if (!isaij)
1765 {
1766 ierr = MatConvert(B,MATAIJ,MAT_INITIAL_MATRIX,A); CCHKERRQ(comm,ierr);
1767 ierr = MatDestroy(&B); CCHKERRQ(comm,ierr);
1768 }
1769 else
1770 {
1771 *A = B;
1772 }
1773 }
1774 }
1775 SetUpForDevice();
1776}
1777
1779{
1780 if (A != NULL)
1781 {
1782 MPI_Comm comm = MPI_COMM_NULL;
1783 ierr = PetscObjectGetComm((PetscObject)A,&comm); PCHKERRQ(A,ierr);
1784 ierr = MatDestroy(&A); CCHKERRQ(comm,ierr);
1785 }
1786 delete X;
1787 delete Y;
1788 X = Y = NULL;
1789}
1790
1792{
1793 if (ref)
1794 {
1795 ierr = PetscObjectReference((PetscObject)a); PCHKERRQ(a,ierr);
1796 }
1797 Init();
1798 A = a;
1799 height = GetNumRows();
1800 width = GetNumCols();
1801}
1802
1804{
1805 if (A_ == A) { return; }
1806 Destroy();
1807 ierr = PetscObjectReference((PetscObject)A_); PCHKERRQ(A_,ierr);
1808 A = A_;
1809 height = GetNumRows();
1810 width = GetNumCols();
1811}
1812
1813void PetscParMatrix::SetUpForDevice()
1814{
1815#if !defined(_USE_DEVICE)
1816 return;
1817#else
1818 if (!A || (!Device::Allows(Backend::CUDA_MASK) &&
1820 {
1821 if (A) { ierr = MatBindToCPU(A, PETSC_TRUE); PCHKERRQ(A,ierr); }
1822 return;
1823 }
1824 PetscBool ismatis,isnest,isaij;
1825 ierr = PetscObjectTypeCompare((PetscObject)A,MATIS,&ismatis);
1826 PCHKERRQ(A,ierr);
1827 ierr = PetscObjectTypeCompare((PetscObject)A,MATNEST,&isnest);
1828 PCHKERRQ(A,ierr);
1829 Mat tA = A;
1830 if (ismatis)
1831 {
1832 ierr = MatISGetLocalMat(A,&tA); PCHKERRQ(A,ierr);
1833 ierr = PetscObjectTypeCompare((PetscObject)tA,MATNEST,&isnest);
1834 PCHKERRQ(tA,ierr);
1835 }
1836 if (isnest)
1837 {
1838 PetscInt n,m;
1839 Mat **sub;
1840 ierr = MatNestGetSubMats(tA,&n,&m,&sub); PCHKERRQ(tA,ierr);
1841 bool dvec = false;
1842 for (PetscInt i = 0; i < n; i++)
1843 {
1844 for (PetscInt j = 0; j < m; j++)
1845 {
1846 if (sub[i][j])
1847 {
1848 bool expT = false;
1849 Mat sA = sub[i][j];
1850 ierr = PetscObjectTypeCompareAny((PetscObject)sA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1851 PCHKERRQ(sA,ierr);
1852 if (isaij)
1853 {
1854 ierr = MatSetType(sA,PETSC_MATAIJDEVICE); PCHKERRQ(sA,ierr);
1855 dvec = true;
1856 expT = true;
1857 }
1858 if (expT)
1859 {
1860 ierr = MatSetOption(sA,MAT_FORM_EXPLICIT_TRANSPOSE,
1861 PETSC_TRUE); PCHKERRQ(sA,ierr);
1862 }
1863 }
1864 }
1865 }
1866 if (dvec)
1867 {
1868 ierr = MatSetVecType(tA,PETSC_VECDEVICE); PCHKERRQ(tA,ierr);
1869 }
1870 }
1871 else
1872 {
1873 bool expT = false;
1874 ierr = PetscObjectTypeCompareAny((PetscObject)tA,&isaij,MATSEQAIJ,MATMPIAIJ,"");
1875 PCHKERRQ(tA,ierr);
1876 if (isaij)
1877 {
1878 ierr = MatSetType(tA,PETSC_MATAIJDEVICE); PCHKERRQ(tA,ierr);
1879 expT = true;
1880 }
1881 if (expT)
1882 {
1883 ierr = MatSetOption(tA,MAT_FORM_EXPLICIT_TRANSPOSE,
1884 PETSC_TRUE); PCHKERRQ(tA,ierr);
1885 }
1886 }
1887#endif
1888}
1889
1890// Computes y = alpha * A * x + beta * y
1891// or y = alpha * A^T* x + beta * y
1892static void MatMultKernel(Mat A,PetscScalar a,Vec X,PetscScalar b,Vec Y,
1893 bool transpose)
1894{
1895 PetscErrorCode (*f)(Mat,Vec,Vec);
1896 PetscErrorCode (*fadd)(Mat,Vec,Vec,Vec);
1897 if (transpose)
1898 {
1899 f = MatMultTranspose;
1900 fadd = MatMultTransposeAdd;
1901 }
1902 else
1903 {
1904 f = MatMult;
1905 fadd = MatMultAdd;
1906 }
1907 if (a != 0.)
1908 {
1909 if (b != 0.)
1910 {
1911 ierr = VecScale(Y,b/a); PCHKERRQ(A,ierr);
1912 ierr = (*fadd)(A,X,Y,Y); PCHKERRQ(A,ierr);
1913 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1914 }
1915 else
1916 {
1917 ierr = (*f)(A,X,Y); PCHKERRQ(A,ierr);
1918 ierr = VecScale(Y,a); PCHKERRQ(A,ierr);
1919 }
1920 }
1921 else
1922 {
1923 if (b == 1.)
1924 {
1925 // do nothing
1926 }
1927 else if (b != 0.)
1928 {
1929 ierr = VecScale(Y,b); PCHKERRQ(A,ierr);
1930 }
1931 else
1932 {
1933 ierr = VecSet(Y,0.); PCHKERRQ(A,ierr);
1934 }
1935 }
1936}
1937
1939{
1940 ierr = PetscObjectReference((PetscObject)master.A); PCHKERRQ(master.A,ierr);
1941 Destroy();
1942 Init();
1943 A = master.A;
1944 height = master.height;
1945 width = master.width;
1946}
1947
1949{
1950 if (!X)
1951 {
1952 MFEM_VERIFY(A,"Mat not present");
1953 X = new PetscParVector(*this,false,false); PCHKERRQ(A,ierr);
1954 }
1955 return X;
1956}
1957
1959{
1960 if (!Y)
1961 {
1962 MFEM_VERIFY(A,"Mat not present");
1963 Y = new PetscParVector(*this,true,false); PCHKERRQ(A,ierr);
1964 }
1965 return Y;
1966}
1967
1969{
1970 Mat B;
1971 if (action)
1972 {
1973 ierr = MatCreateTranspose(A,&B); PCHKERRQ(A,ierr);
1974 }
1975 else
1976 {
1977 ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&B); PCHKERRQ(A,ierr);
1978 }
1979 return new PetscParMatrix(B,false);
1980}
1981
1983{
1984 ierr = MatScale(A,s); PCHKERRQ(A,ierr);
1985}
1986
1988 Vector &y) const
1989{
1990 MFEM_ASSERT(x.Size() == Width(), "invalid x.Size() = " << x.Size()
1991 << ", expected size = " << Width());
1992 MFEM_ASSERT(y.Size() == Height(), "invalid y.Size() = " << y.Size()
1993 << ", expected size = " << Height());
1994
1995 PetscParVector *XX = GetX();
1996 PetscParVector *YY = GetY();
1997 bool rw = (b != 0.0);
1998 XX->PlaceMemory(x.GetMemory());
1999 YY->PlaceMemory(y.GetMemory(),rw);
2000 MatMultKernel(A,a,XX->x,b,YY->x,false);
2001 XX->ResetMemory();
2002 YY->ResetMemory();
2003}
2004
2007 Vector &y) const
2008{
2009 MFEM_ASSERT(x.Size() == Height(), "invalid x.Size() = " << x.Size()
2010 << ", expected size = " << Height());
2011 MFEM_ASSERT(y.Size() == Width(), "invalid y.Size() = " << y.Size()
2012 << ", expected size = " << Width());
2013
2014 PetscParVector *XX = GetX();
2015 PetscParVector *YY = GetY();
2016 bool rw = (b != 0.0);
2017 XX->PlaceMemory(y.GetMemory(),rw);
2018 YY->PlaceMemory(x.GetMemory());
2019 MatMultKernel(A,a,YY->x,b,XX->x,true);
2020 XX->ResetMemory();
2021 YY->ResetMemory();
2022}
2023
2024void PetscParMatrix::Print(const char *fname, bool binary) const
2025{
2026 if (fname)
2027 {
2028 PetscViewer view;
2029
2030 if (binary)
2031 {
2032 ierr = PetscViewerBinaryOpen(PetscObjectComm((PetscObject)A),fname,
2033 FILE_MODE_WRITE,&view);
2034 }
2035 else
2036 {
2037 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)A),fname,&view);
2038 }
2039 PCHKERRQ(A,ierr);
2040 ierr = MatView(A,view); PCHKERRQ(A,ierr);
2041 ierr = PetscViewerDestroy(&view); PCHKERRQ(A,ierr);
2042 }
2043 else
2044 {
2045 ierr = MatView(A,NULL); PCHKERRQ(A,ierr);
2046 }
2047}
2048
2050{
2051 MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2052 << ", expected size = " << Height());
2053
2054 PetscParVector *YY = GetY();
2055 YY->PlaceMemory(s.GetMemory());
2056 ierr = MatDiagonalScale(A,*YY,NULL); PCHKERRQ(A,ierr);
2057 YY->ResetMemory();
2058}
2059
2061{
2062 MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2063 << ", expected size = " << Width());
2064
2065 PetscParVector *XX = GetX();
2066 XX->PlaceMemory(s.GetMemory());
2067 ierr = MatDiagonalScale(A,NULL,*XX); PCHKERRQ(A,ierr);
2068 XX->ResetMemory();
2069}
2070
2072{
2073 ierr = MatShift(A,(PetscScalar)s); PCHKERRQ(A,ierr);
2074}
2075
2077{
2078 // for matrices with square diagonal blocks only
2079 MFEM_ASSERT(s.Size() == Height(), "invalid s.Size() = " << s.Size()
2080 << ", expected size = " << Height());
2081 MFEM_ASSERT(s.Size() == Width(), "invalid s.Size() = " << s.Size()
2082 << ", expected size = " << Width());
2083
2084 PetscParVector *XX = GetX();
2085 XX->PlaceMemory(s.GetMemory());
2086 ierr = MatDiagonalSet(A,*XX,ADD_VALUES); PCHKERRQ(A,ierr);
2087 XX->ResetMemory();
2088}
2089
2091 PetscParMatrix *P)
2092{
2093 MFEM_VERIFY(A->Width() == P->Height(),
2094 "Petsc TripleMatrixProduct: Number of local cols of A " << A->Width() <<
2095 " differs from number of local rows of P " << P->Height());
2096 MFEM_VERIFY(A->Height() == R->Width(),
2097 "Petsc TripleMatrixProduct: Number of local rows of A " << A->Height() <<
2098 " differs from number of local cols of R " << R->Width());
2099 Mat B;
2100 ierr = MatMatMatMult(*R,*A,*P,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2101 PCHKERRQ(*R,ierr);
2102 return new PetscParMatrix(B);
2103}
2104
2106{
2107 Mat pA = *A,pP = *P,pRt = *Rt;
2108 Mat B;
2109 PetscBool Aismatis,Pismatis,Rtismatis;
2110
2111 MFEM_VERIFY(A->Width() == P->Height(),
2112 "Petsc RAP: Number of local cols of A " << A->Width() <<
2113 " differs from number of local rows of P " << P->Height());
2114 MFEM_VERIFY(A->Height() == Rt->Height(),
2115 "Petsc RAP: Number of local rows of A " << A->Height() <<
2116 " differs from number of local rows of Rt " << Rt->Height());
2117 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&Aismatis);
2118 PCHKERRQ(pA,ierr);
2119 ierr = PetscObjectTypeCompare((PetscObject)pP,MATIS,&Pismatis);
2120 PCHKERRQ(pA,ierr);
2121 ierr = PetscObjectTypeCompare((PetscObject)pRt,MATIS,&Rtismatis);
2122 PCHKERRQ(pA,ierr);
2123 if (Aismatis &&
2124 Pismatis &&
2125 Rtismatis) // handle special case (this code will eventually go into PETSc)
2126 {
2127 Mat lA,lP,lB,lRt;
2128 ISLocalToGlobalMapping cl2gP,cl2gRt;
2129 PetscInt rlsize,clsize,rsize,csize;
2130
2131 ierr = MatGetLocalToGlobalMapping(pP,NULL,&cl2gP); PCHKERRQ(pA,ierr);
2132 ierr = MatGetLocalToGlobalMapping(pRt,NULL,&cl2gRt); PCHKERRQ(pA,ierr);
2133 ierr = MatGetLocalSize(pP,NULL,&clsize); PCHKERRQ(pP,ierr);
2134 ierr = MatGetLocalSize(pRt,NULL,&rlsize); PCHKERRQ(pRt,ierr);
2135 ierr = MatGetSize(pP,NULL,&csize); PCHKERRQ(pP,ierr);
2136 ierr = MatGetSize(pRt,NULL,&rsize); PCHKERRQ(pRt,ierr);
2137 ierr = MatCreate(A->GetComm(),&B); PCHKERRQ(pA,ierr);
2138 ierr = MatSetSizes(B,rlsize,clsize,rsize,csize); PCHKERRQ(B,ierr);
2139 ierr = MatSetType(B,MATIS); PCHKERRQ(B,ierr);
2140 ierr = MatSetLocalToGlobalMapping(B,cl2gRt,cl2gP); PCHKERRQ(B,ierr);
2141 ierr = MatISGetLocalMat(pA,&lA); PCHKERRQ(pA,ierr);
2142 ierr = MatISGetLocalMat(pP,&lP); PCHKERRQ(pA,ierr);
2143 ierr = MatISGetLocalMat(pRt,&lRt); PCHKERRQ(pA,ierr);
2144 if (lRt == lP)
2145 {
2146 ierr = MatPtAP(lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2147 PCHKERRQ(lA,ierr);
2148 }
2149 else
2150 {
2151 Mat lR;
2152 ierr = MatTranspose(lRt,MAT_INITIAL_MATRIX,&lR); PCHKERRQ(lRt,ierr);
2153 ierr = MatMatMatMult(lR,lA,lP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lB);
2154 PCHKERRQ(lRt,ierr);
2155 ierr = MatDestroy(&lR); PCHKERRQ(lRt,ierr);
2156 }
2157
2158 // attach lRt matrix to the subdomain local matrix
2159 // it may be used if markers on vdofs have to be mapped on
2160 // subdomain true dofs
2161 {
2162 mfem::Array<Mat> *vmatsl2l = new mfem::Array<Mat>(1);
2163 ierr = PetscObjectReference((PetscObject)lRt); PCHKERRQ(lRt,ierr);
2164 (*vmatsl2l)[0] = lRt;
2165
2166 PetscContainer c;
2167 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)B),&c);
2168 PCHKERRQ(B,ierr);
2169 ierr = PetscContainerSetPointer(c,vmatsl2l); PCHKERRQ(c,ierr);
2170 ierr = PetscContainerSetUserDestroy(c,__mfem_matarray_container_destroy);
2171 PCHKERRQ(c,ierr);
2172 ierr = PetscObjectCompose((PetscObject)B,"_MatIS_PtAP_l2l",(PetscObject)c);
2173 PCHKERRQ(B,ierr);
2174 ierr = PetscContainerDestroy(&c); PCHKERRQ(B,ierr);
2175 }
2176
2177 // Set local problem
2178 ierr = MatISSetLocalMat(B,lB); PCHKERRQ(lB,ierr);
2179 ierr = MatDestroy(&lB); PCHKERRQ(lA,ierr);
2180 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2181 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY); PCHKERRQ(B,ierr);
2182 }
2183 else // it raises an error if the PtAP is not supported in PETSc
2184 {
2185 if (pP == pRt)
2186 {
2187 ierr = MatPtAP(pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2188 PCHKERRQ(pA,ierr);
2189 }
2190 else
2191 {
2192 Mat pR;
2193 ierr = MatTranspose(pRt,MAT_INITIAL_MATRIX,&pR); PCHKERRQ(Rt,ierr);
2194 ierr = MatMatMatMult(pR,pA,pP,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&B);
2195 PCHKERRQ(pRt,ierr);
2196 ierr = MatDestroy(&pR); PCHKERRQ(pRt,ierr);
2197 }
2198 }
2199 return new PetscParMatrix(B);
2200}
2201
2203{
2204 PetscParMatrix *out = RAP(P,A,P);
2205 return out;
2206}
2207
2209{
2210 PetscParMatrix *out,*A;
2211#if defined(PETSC_HAVE_HYPRE)
2213#else
2214 A = new PetscParMatrix(hA);
2215#endif
2216 out = RAP(P,A,P);
2217 delete A;
2218 return out;
2219}
2220
2221
2223{
2224 Mat AB;
2225
2226 ierr = MatMatMult(*A,*B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&AB);
2227 CCHKERRQ(A->GetComm(),ierr);
2228 return new PetscParMatrix(AB);
2229}
2230
2232{
2233 Mat Ae;
2234
2235 PetscParVector dummy(GetComm(),0);
2236 ierr = MatDuplicate(A,MAT_COPY_VALUES,&Ae); PCHKERRQ(A,ierr);
2237 EliminateRowsCols(rows_cols,dummy,dummy);
2238 ierr = MatAXPY(Ae,-1.,A,SAME_NONZERO_PATTERN); PCHKERRQ(A,ierr);
2239 return new PetscParMatrix(Ae);
2240}
2241
2243 const HypreParVector &X,
2244 HypreParVector &B,
2245 mfem::real_t diag)
2246{
2247 MFEM_ABORT("Missing PetscParMatrix::EliminateRowsCols() with HypreParVectors");
2248}
2249
2251 const PetscParVector &X,
2252 PetscParVector &B,
2253 mfem::real_t diag)
2254{
2255 PetscInt M,N;
2256 ierr = MatGetSize(A,&M,&N); PCHKERRQ(A,ierr);
2257 MFEM_VERIFY(M == N,"Rectangular case unsupported");
2258
2259 // TODO: what if a diagonal term is not present?
2260 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2261
2262 // rows need to be in global numbering
2263 PetscInt rst;
2264 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2265
2266 IS dir;
2267 ierr = Convert_Array_IS(GetComm(),true,&rows_cols,rst,&dir); PCHKERRQ(A,ierr);
2268 if (!X.GlobalSize() && !B.GlobalSize())
2269 {
2270 ierr = MatZeroRowsColumnsIS(A,dir,diag,NULL,NULL); PCHKERRQ(A,ierr);
2271 }
2272 else
2273 {
2274 ierr = MatZeroRowsColumnsIS(A,dir,diag,X,B); PCHKERRQ(A,ierr);
2275 }
2276 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2277}
2278
2280{
2281 ierr = MatSetOption(A,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE); PCHKERRQ(A,ierr);
2282
2283 // rows need to be in global numbering
2284 PetscInt rst;
2285 ierr = MatGetOwnershipRange(A,&rst,NULL); PCHKERRQ(A,ierr);
2286
2287 IS dir;
2288 ierr = Convert_Array_IS(GetComm(),true,&rows,rst,&dir); PCHKERRQ(A,ierr);
2289 ierr = MatZeroRowsIS(A,dir,0.0,NULL,NULL); PCHKERRQ(A,ierr);
2290 ierr = ISDestroy(&dir); PCHKERRQ(A,ierr);
2291}
2292
2293Mat PetscParMatrix::ReleaseMat(bool dereference)
2294{
2295
2296 Mat B = A;
2297 if (dereference)
2298 {
2299 MPI_Comm comm = GetComm();
2300 ierr = PetscObjectDereference((PetscObject)A); CCHKERRQ(comm,ierr);
2301 }
2302 A = NULL;
2303 return B;
2304}
2305
2307{
2308 PetscBool ok;
2309 MFEM_VERIFY(A, "no associated PETSc Mat object");
2310 PetscObject oA = (PetscObject)(this->A);
2311 // map all of MATAIJ, MATSEQAIJ, and MATMPIAIJ to -> PETSC_MATAIJ
2312 ierr = PetscObjectTypeCompare(oA, MATAIJ, &ok); PCHKERRQ(A,ierr);
2313 if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2314 ierr = PetscObjectTypeCompare(oA, MATSEQAIJ, &ok); PCHKERRQ(A,ierr);
2315 if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2316 ierr = PetscObjectTypeCompare(oA, MATMPIAIJ, &ok); PCHKERRQ(A,ierr);
2317 if (ok == PETSC_TRUE) { return PETSC_MATAIJ; }
2318 ierr = PetscObjectTypeCompare(oA, MATIS, &ok); PCHKERRQ(A,ierr);
2319 if (ok == PETSC_TRUE) { return PETSC_MATIS; }
2320 ierr = PetscObjectTypeCompare(oA, MATSHELL, &ok); PCHKERRQ(A,ierr);
2321 if (ok == PETSC_TRUE) { return PETSC_MATSHELL; }
2322 ierr = PetscObjectTypeCompare(oA, MATNEST, &ok); PCHKERRQ(A,ierr);
2323 if (ok == PETSC_TRUE) { return PETSC_MATNEST; }
2324#if defined(PETSC_HAVE_HYPRE)
2325 ierr = PetscObjectTypeCompare(oA, MATHYPRE, &ok); PCHKERRQ(A,ierr);
2326 if (ok == PETSC_TRUE) { return PETSC_MATHYPRE; }
2327#endif
2328 return PETSC_MATGENERIC;
2329}
2330
2332 const Array<int> &ess_dof_list,
2333 const Vector &X, Vector &B)
2334{
2335 const PetscScalar *array;
2336 Mat pA = const_cast<PetscParMatrix&>(A);
2337
2338 // B -= Ae*X
2339 Ae.Mult(-1.0, X, 1.0, B);
2340
2341 Vec diag = const_cast<PetscParVector&>((*A.GetX()));
2342 ierr = MatGetDiagonal(pA,diag); PCHKERRQ(pA,ierr);
2343 ierr = VecGetArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2344 for (int i = 0; i < ess_dof_list.Size(); i++)
2345 {
2346 int r = ess_dof_list[i];
2347 B(r) = array[r] * X(r);
2348 }
2349 ierr = VecRestoreArrayRead(diag,&array); PCHKERRQ(diag,ierr);
2350}
2351
2352// PetscSolver methods
2353
2354PetscSolver::PetscSolver() : clcustom(false)
2355{
2356 obj = NULL;
2357 B = X = NULL;
2358 cid = -1;
2359 operatorset = false;
2360 bchandler = NULL;
2361 private_ctx = NULL;
2362}
2363
2365{
2366 delete B;
2367 delete X;
2369}
2370
2372{
2373 SetRelTol(tol);
2374}
2375
2377{
2378 if (cid == KSP_CLASSID)
2379 {
2380 KSP ksp = (KSP)obj;
2381 ierr = KSPSetTolerances(ksp,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
2382 }
2383 else if (cid == SNES_CLASSID)
2384 {
2385 SNES snes = (SNES)obj;
2386 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT,
2387 PETSC_DEFAULT);
2388 }
2389 else if (cid == TS_CLASSID)
2390 {
2391 TS ts = (TS)obj;
2392 ierr = TSSetTolerances(ts,PETSC_DECIDE,NULL,tol,NULL);
2393 }
2394 else
2395 {
2396 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2397 }
2398 PCHKERRQ(obj,ierr);
2399}
2400
2402{
2403 if (cid == KSP_CLASSID)
2404 {
2405 KSP ksp = (KSP)obj;
2406 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,tol,PETSC_DEFAULT,PETSC_DEFAULT);
2407 }
2408 else if (cid == SNES_CLASSID)
2409 {
2410 SNES snes = (SNES)obj;
2411 ierr = SNESSetTolerances(snes,tol,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2412 PETSC_DEFAULT);
2413 }
2414 else if (cid == TS_CLASSID)
2415 {
2416 TS ts = (TS)obj;
2417 ierr = TSSetTolerances(ts,tol,NULL,PETSC_DECIDE,NULL);
2418 }
2419 else
2420 {
2421 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2422 }
2423 PCHKERRQ(obj,ierr);
2424}
2425
2426void PetscSolver::SetMaxIter(int max_iter)
2427{
2428 if (cid == KSP_CLASSID)
2429 {
2430 KSP ksp = (KSP)obj;
2431 ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2432 max_iter);
2433 }
2434 else if (cid == SNES_CLASSID)
2435 {
2436 SNES snes = (SNES)obj;
2437 ierr = SNESSetTolerances(snes,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,
2438 max_iter,PETSC_DEFAULT);
2439 }
2440 else if (cid == TS_CLASSID)
2441 {
2442 TS ts = (TS)obj;
2443 ierr = TSSetMaxSteps(ts,max_iter);
2444 }
2445 else
2446 {
2447 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2448 }
2449 PCHKERRQ(obj,ierr);
2450}
2451
2452
2454{
2455 typedef PetscErrorCode (*myPetscFunc)(void**);
2456 PetscViewerAndFormat *vf = NULL;
2457 PetscViewer viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm(obj));
2458
2459 if (plev > 0)
2460 {
2461 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2462 PCHKERRQ(obj,ierr);
2463 }
2464 if (cid == KSP_CLASSID)
2465 {
2466 // there are many other options, see the function KSPSetFromOptions() in
2467 // src/ksp/ksp/interface/itcl.c
2468 typedef PetscErrorCode (*myMonitor)(KSP,PetscInt,PetscReal,void*);
2469 KSP ksp = (KSP)obj;
2470 if (plev >= 0)
2471 {
2472 ierr = KSPMonitorCancel(ksp); PCHKERRQ(ksp,ierr);
2473 }
2474 if (plev == 1)
2475 {
2476#if PETSC_VERSION_LT(3,15,0)
2477 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorDefault,vf,
2478#else
2479 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorResidual,vf,
2480#endif
2481 (myPetscFunc)PetscViewerAndFormatDestroy);
2482 PCHKERRQ(ksp,ierr);
2483 }
2484 else if (plev > 1)
2485 {
2486 ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE); PCHKERRQ(ksp,ierr);
2487 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorSingularValue,vf,
2488 (myPetscFunc)PetscViewerAndFormatDestroy);
2489 PCHKERRQ(ksp,ierr);
2490 if (plev > 2)
2491 {
2492 ierr = PetscViewerAndFormatCreate(viewer,PETSC_VIEWER_DEFAULT,&vf);
2493 PCHKERRQ(viewer,ierr);
2494#if PETSC_VERSION_LT(3,15,0)
2495 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidualNorm,vf,
2496#else
2497 ierr = KSPMonitorSet(ksp,(myMonitor)KSPMonitorTrueResidual,vf,
2498#endif
2499 (myPetscFunc)PetscViewerAndFormatDestroy);
2500 PCHKERRQ(ksp,ierr);
2501 }
2502 }
2503 }
2504 else if (cid == SNES_CLASSID)
2505 {
2506 typedef PetscErrorCode (*myMonitor)(SNES,PetscInt,PetscReal,void*);
2507 SNES snes = (SNES)obj;
2508 if (plev >= 0)
2509 {
2510 ierr = SNESMonitorCancel(snes); PCHKERRQ(snes,ierr);
2511 }
2512 if (plev > 0)
2513 {
2514 ierr = SNESMonitorSet(snes,(myMonitor)SNESMonitorDefault,vf,
2515 (myPetscFunc)PetscViewerAndFormatDestroy);
2516 PCHKERRQ(snes,ierr);
2517 }
2518 }
2519 else if (cid == TS_CLASSID)
2520 {
2521 TS ts = (TS)obj;
2522 if (plev >= 0)
2523 {
2524 ierr = TSMonitorCancel(ts); PCHKERRQ(ts,ierr);
2525 }
2526 }
2527 else
2528 {
2529 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2530 }
2531}
2532
2533MPI_Comm PetscSolver::GetComm() const
2534{
2535 return obj ? PetscObjectComm(obj) : MPI_COMM_NULL;
2536}
2537
2539{
2540 __mfem_monitor_ctx *monctx;
2541 ierr = PetscNew(&monctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2542 monctx->solver = this;
2543 monctx->monitor = ctx;
2544 if (cid == KSP_CLASSID)
2545 {
2546 ierr = KSPMonitorSet((KSP)obj,__mfem_ksp_monitor,monctx,
2547 __mfem_monitor_ctx_destroy);
2548 PCHKERRQ(obj,ierr);
2549 }
2550 else if (cid == SNES_CLASSID)
2551 {
2552 ierr = SNESMonitorSet((SNES)obj,__mfem_snes_monitor,monctx,
2553 __mfem_monitor_ctx_destroy);
2554 PCHKERRQ(obj,ierr);
2555 }
2556 else if (cid == TS_CLASSID)
2557 {
2558 ierr = TSMonitorSet((TS)obj,__mfem_ts_monitor,monctx,
2559 __mfem_monitor_ctx_destroy);
2560 PCHKERRQ(obj,ierr);
2561 }
2562 else
2563 {
2564 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2565 }
2566}
2567
2569{
2570 bchandler = bch;
2571 if (cid == SNES_CLASSID)
2572 {
2573 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)private_ctx;
2574 snes_ctx->bchandler = bchandler;
2575 }
2576 else if (cid == TS_CLASSID)
2577 {
2578 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)private_ctx;
2579 ts_ctx->bchandler = bchandler;
2580 }
2581 else
2582 {
2583 MFEM_ABORT("Handling of essential bc only implemented for nonlinear and time-dependent solvers");
2584 }
2585}
2586
2588{
2589 PC pc = NULL;
2590 if (cid == TS_CLASSID)
2591 {
2592 SNES snes;
2593 KSP ksp;
2594
2595 ierr = TSGetSNES((TS)obj,&snes); PCHKERRQ(obj,ierr);
2596 ierr = SNESGetKSP(snes,&ksp); PCHKERRQ(obj,ierr);
2597 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2598 }
2599 else if (cid == SNES_CLASSID)
2600 {
2601 KSP ksp;
2602
2603 ierr = SNESGetKSP((SNES)obj,&ksp); PCHKERRQ(obj,ierr);
2604 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(obj,ierr);
2605 }
2606 else if (cid == KSP_CLASSID)
2607 {
2608 ierr = KSPGetPC((KSP)obj,&pc); PCHKERRQ(obj,ierr);
2609 }
2610 else if (cid == PC_CLASSID)
2611 {
2612 pc = (PC)obj;
2613 }
2614 else
2615 {
2616 MFEM_ABORT("No support for PetscPreconditionerFactory for this object");
2617 }
2618 if (factory)
2619 {
2620 ierr = MakeShellPCWithFactory(pc,factory); PCHKERRQ(pc,ierr);
2621 }
2622 else
2623 {
2624 ierr = PCSetType(pc, PCNONE); PCHKERRQ(pc,ierr);
2625 }
2626}
2627
2628void PetscSolver::Customize(bool customize) const
2629{
2630 if (!customize) { clcustom = true; }
2631 if (!clcustom)
2632 {
2633 if (cid == PC_CLASSID)
2634 {
2635 PC pc = (PC)obj;
2636 ierr = PCSetFromOptions(pc); PCHKERRQ(pc, ierr);
2637 }
2638 else if (cid == KSP_CLASSID)
2639 {
2640 KSP ksp = (KSP)obj;
2641 ierr = KSPSetFromOptions(ksp); PCHKERRQ(ksp, ierr);
2642 }
2643 else if (cid == SNES_CLASSID)
2644 {
2645 SNES snes = (SNES)obj;
2646 ierr = SNESSetFromOptions(snes); PCHKERRQ(snes, ierr);
2647 }
2648 else if (cid == TS_CLASSID)
2649 {
2650 TS ts = (TS)obj;
2651 ierr = TSSetFromOptions(ts); PCHKERRQ(ts, ierr);
2652 }
2653 else
2654 {
2655 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2656 }
2657 }
2658 clcustom = true;
2659}
2660
2662{
2663 if (cid == KSP_CLASSID)
2664 {
2665 KSP ksp = (KSP)obj;
2666 KSPConvergedReason reason;
2667 ierr = KSPGetConvergedReason(ksp,&reason);
2668 PCHKERRQ(ksp,ierr);
2669 return reason > 0 ? 1 : 0;
2670 }
2671 else if (cid == SNES_CLASSID)
2672 {
2673 SNES snes = (SNES)obj;
2674 SNESConvergedReason reason;
2675 ierr = SNESGetConvergedReason(snes,&reason);
2676 PCHKERRQ(snes,ierr);
2677 return reason > 0 ? 1 : 0;
2678 }
2679 else if (cid == TS_CLASSID)
2680 {
2681 TS ts = (TS)obj;
2682 TSConvergedReason reason;
2683 ierr = TSGetConvergedReason(ts,&reason);
2684 PCHKERRQ(ts,ierr);
2685 return reason > 0 ? 1 : 0;
2686 }
2687 else
2688 {
2689 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2690 return -1;
2691 }
2692}
2693
2695{
2696 if (cid == KSP_CLASSID)
2697 {
2698 KSP ksp = (KSP)obj;
2699 PetscInt its;
2700 ierr = KSPGetIterationNumber(ksp,&its);
2701 PCHKERRQ(ksp,ierr);
2702 return its;
2703 }
2704 else if (cid == SNES_CLASSID)
2705 {
2706 SNES snes = (SNES)obj;
2707 PetscInt its;
2708 ierr = SNESGetIterationNumber(snes,&its);
2709 PCHKERRQ(snes,ierr);
2710 return its;
2711 }
2712 else if (cid == TS_CLASSID)
2713 {
2714 TS ts = (TS)obj;
2715 PetscInt its;
2716 ierr = TSGetStepNumber(ts,&its);
2717 PCHKERRQ(ts,ierr);
2718 return its;
2719 }
2720 else
2721 {
2722 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2723 return -1;
2724 }
2725}
2726
2728{
2729 if (cid == KSP_CLASSID)
2730 {
2731 KSP ksp = (KSP)obj;
2732 PetscReal norm;
2733 ierr = KSPGetResidualNorm(ksp,&norm);
2734 PCHKERRQ(ksp,ierr);
2735 return norm;
2736 }
2737 if (cid == SNES_CLASSID)
2738 {
2739 SNES snes = (SNES)obj;
2740 PetscReal norm;
2741 ierr = SNESGetFunctionNorm(snes,&norm);
2742 PCHKERRQ(snes,ierr);
2743 return norm;
2744 }
2745 else
2746 {
2747 MFEM_ABORT("CLASSID = " << cid << " is not implemented!");
2748 return PETSC_MAX_REAL;
2749 }
2750}
2751
2753{
2755 if (cid == SNES_CLASSID)
2756 {
2757 __mfem_snes_ctx *snes_ctx;
2758 ierr = PetscNew(&snes_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2759 snes_ctx->op = NULL;
2760 snes_ctx->bchandler = NULL;
2761 snes_ctx->work = NULL;
2762 snes_ctx->jacType = Operator::PETSC_MATAIJ;
2763 private_ctx = (void*) snes_ctx;
2764 }
2765 else if (cid == TS_CLASSID)
2766 {
2767 __mfem_ts_ctx *ts_ctx;
2768 ierr = PetscNew(&ts_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2769 ts_ctx->op = NULL;
2770 ts_ctx->bchandler = NULL;
2771 ts_ctx->work = NULL;
2772 ts_ctx->work2 = NULL;
2773 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
2774 ts_ctx->cached_ijacstate = -1;
2775 ts_ctx->cached_rhsjacstate = -1;
2776 ts_ctx->cached_splits_xstate = -1;
2777 ts_ctx->cached_splits_xdotstate = -1;
2779 ts_ctx->jacType = Operator::PETSC_MATAIJ;
2780 private_ctx = (void*) ts_ctx;
2781 }
2782}
2783
2785{
2786 if (!private_ctx) { return; }
2787 // free private context's owned objects
2788 if (cid == SNES_CLASSID)
2789 {
2790 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx *)private_ctx;
2791 delete snes_ctx->work;
2792 }
2793 else if (cid == TS_CLASSID)
2794 {
2795 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx *)private_ctx;
2796 delete ts_ctx->work;
2797 delete ts_ctx->work2;
2798 }
2799 ierr = PetscFree(private_ctx); CCHKERRQ(PETSC_COMM_SELF,ierr);
2800}
2801
2802// PetscBCHandler methods
2803
2805 enum PetscBCHandler::Type type_)
2806 : bctype(type_), setup(false), eval_t(0.0),
2807 eval_t_cached(std::numeric_limits<mfem::real_t>::min())
2808{
2809 SetTDofs(ess_tdof_list);
2810}
2811
2813{
2814 ess_tdof_list.SetSize(list.Size());
2815 ess_tdof_list.Assign(list);
2816 setup = false;
2817}
2818
2820{
2821 if (setup) { return; }
2822 if (bctype == CONSTANT)
2823 {
2824 eval_g.SetSize(n);
2825 this->Eval(eval_t,eval_g);
2826 eval_t_cached = eval_t;
2827 }
2828 else if (bctype == TIME_DEPENDENT)
2829 {
2830 eval_g.SetSize(n);
2831 }
2832 setup = true;
2833}
2834
2836{
2837 (*this).SetUp(x.Size());
2838 y = x;
2839 if (bctype == ZERO)
2840 {
2841 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2842 {
2843 y[ess_tdof_list[i]] = 0.0;
2844 }
2845 }
2846 else
2847 {
2848 if (bctype != CONSTANT && eval_t != eval_t_cached)
2849 {
2850 Eval(eval_t,eval_g);
2851 eval_t_cached = eval_t;
2852 }
2853 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2854 {
2855 y[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2856 }
2857 }
2858}
2859
2861{
2862 (*this).SetUp(x.Size());
2863 if (bctype == ZERO)
2864 {
2865 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2866 {
2867 x[ess_tdof_list[i]] = 0.0;
2868 }
2869 }
2870 else
2871 {
2872 if (bctype != CONSTANT && eval_t != eval_t_cached)
2873 {
2874 Eval(eval_t,eval_g);
2875 eval_t_cached = eval_t;
2876 }
2877 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2878 {
2879 x[ess_tdof_list[i]] = eval_g[ess_tdof_list[i]];
2880 }
2881 }
2882}
2883
2885{
2886 (*this).SetUp(x.Size());
2887 if (bctype == ZERO)
2888 {
2889 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2890 {
2891 y[ess_tdof_list[i]] = x[ess_tdof_list[i]];
2892 }
2893 }
2894 else
2895 {
2896 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2897 {
2898 y[ess_tdof_list[i]] = x[ess_tdof_list[i]] - eval_g[ess_tdof_list[i]];
2899 }
2900 }
2901}
2902
2904{
2905 (*this).SetUp(x.Size());
2906 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2907 {
2908 x[ess_tdof_list[i]] = 0.0;
2909 }
2910}
2911
2913{
2914 (*this).SetUp(x.Size());
2915 y = x;
2916 for (int i = 0; i < ess_tdof_list.Size(); ++i)
2917 {
2918 y[ess_tdof_list[i]] = 0.0;
2919 }
2920}
2921
2922// PetscLinearSolver methods
2923
2924PetscLinearSolver::PetscLinearSolver(MPI_Comm comm, const std::string &prefix,
2925 bool wrapin, bool iter_mode)
2926 : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2927{
2928 KSP ksp;
2929 ierr = KSPCreate(comm,&ksp); CCHKERRQ(comm,ierr);
2930 obj = (PetscObject)ksp;
2931 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2932 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2933 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2934 PCHKERRQ(ksp, ierr);
2935}
2936
2938 const std::string &prefix, bool iter_mode)
2939 : PetscSolver(), Solver(0,iter_mode), wrap(false)
2940{
2941 KSP ksp;
2942 ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2943 obj = (PetscObject)ksp;
2944 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
2945 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2946 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2947 PCHKERRQ(ksp, ierr);
2948 SetOperator(A);
2949}
2950
2952 const std::string &prefix, bool iter_mode)
2953 : PetscSolver(), Solver(0,iter_mode), wrap(wrapin)
2954{
2955 KSP ksp;
2956 ierr = KSPCreate(A.GetComm(),&ksp); CCHKERRQ(A.GetComm(),ierr);
2957 obj = (PetscObject)ksp;
2958 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
2959 ierr = KSPSetOptionsPrefix(ksp, prefix.c_str()); PCHKERRQ(ksp, ierr);
2960 ierr = KSPSetInitialGuessNonzero(ksp, (PetscBool)iterative_mode);
2961 PCHKERRQ(ksp, ierr);
2962 SetOperator(A);
2963}
2964
2966{
2967 const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
2968 PetscParMatrix *pA = const_cast<PetscParMatrix *>
2969 (dynamic_cast<const PetscParMatrix *>(&op));
2970 const Operator *oA = dynamic_cast<const Operator *>(&op);
2971
2972 // update base classes: Operator, Solver, PetscLinearSolver
2973 bool delete_pA = false;
2974 if (!pA)
2975 {
2976 if (hA)
2977 {
2978 // Create MATSHELL object or convert into a format suitable to construct preconditioners
2979 pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2980 delete_pA = true;
2981 }
2982 else if (oA) // fallback to general operator
2983 {
2984 // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
2985 // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
2986 pA = new PetscParMatrix(PetscObjectComm(obj),oA,
2987 wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
2988 delete_pA = true;
2989 }
2990 }
2991 MFEM_VERIFY(pA, "Unsupported operation!");
2992
2993 // Set operators into PETSc KSP
2994 KSP ksp = (KSP)obj;
2995 Mat A = pA->A;
2996 if (operatorset)
2997 {
2998 Mat C;
2999 PetscInt nheight,nwidth,oheight,owidth;
3000
3001 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3002 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3003 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3004 if (nheight != oheight || nwidth != owidth)
3005 {
3006 // reinit without destroying the KSP
3007 // communicator remains the same
3008 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3009 delete X;
3010 delete B;
3011 X = B = NULL;
3012 }
3013 }
3014 ierr = KSPSetOperators(ksp,A,A); PCHKERRQ(ksp,ierr);
3015
3016 // Update PetscSolver
3017 operatorset = true;
3018
3019 // Update the Operator fields.
3020 height = pA->Height();
3021 width = pA->Width();
3022
3023 if (delete_pA) { delete pA; }
3024}
3025
3027{
3028 const HypreParMatrix *hA = dynamic_cast<const HypreParMatrix *>(&op);
3029 PetscParMatrix *pA = const_cast<PetscParMatrix *>
3030 (dynamic_cast<const PetscParMatrix *>(&op));
3031 const Operator *oA = dynamic_cast<const Operator *>(&op);
3032
3033 PetscParMatrix *ppA = const_cast<PetscParMatrix *>
3034 (dynamic_cast<const PetscParMatrix *>(&pop));
3035 const Operator *poA = dynamic_cast<const Operator *>(&pop);
3036
3037 // Convert Operator for linear system
3038 bool delete_pA = false;
3039 if (!pA)
3040 {
3041 if (hA)
3042 {
3043 // Create MATSHELL object or convert into a format suitable to construct preconditioners
3044 pA = new PetscParMatrix(hA, wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3045 delete_pA = true;
3046 }
3047 else if (oA) // fallback to general operator
3048 {
3049 // Create MATSHELL or MATNEST (if oA is a BlockOperator) object
3050 // If oA is a BlockOperator, Operator::Type is relevant to the subblocks
3051 pA = new PetscParMatrix(PetscObjectComm(obj),oA,
3052 wrap ? PETSC_MATSHELL : PETSC_MATAIJ);
3053 delete_pA = true;
3054 }
3055 }
3056 MFEM_VERIFY(pA, "Unsupported operation!");
3057
3058 // Convert Operator to be preconditioned
3059 bool delete_ppA = false;
3060 if (!ppA)
3061 {
3062 if (oA == poA && !wrap) // Same operator, already converted
3063 {
3064 ppA = pA;
3065 }
3066 else
3067 {
3068 ppA = new PetscParMatrix(PetscObjectComm(obj), poA, PETSC_MATAIJ);
3069 delete_ppA = true;
3070 }
3071 }
3072 MFEM_VERIFY(ppA, "Unsupported operation!");
3073
3074 // Set operators into PETSc KSP
3075 KSP ksp = (KSP)obj;
3076 Mat A = pA->A;
3077 Mat P = ppA->A;
3078 if (operatorset)
3079 {
3080 Mat C;
3081 PetscInt nheight,nwidth,oheight,owidth;
3082
3083 ierr = KSPGetOperators(ksp,&C,NULL); PCHKERRQ(ksp,ierr);
3084 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3085 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3086 if (nheight != oheight || nwidth != owidth)
3087 {
3088 // reinit without destroying the KSP
3089 // communicator remains the same
3090 ierr = KSPReset(ksp); PCHKERRQ(ksp,ierr);
3091 delete X;
3092 delete B;
3093 X = B = NULL;
3094 wrap = false;
3095 }
3096 }
3097 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3098
3099 // Update PetscSolver
3100 operatorset = true;
3101
3102 // Update the Operator fields.
3103 height = pA->Height();
3104 width = pA->Width();
3105
3106 if (delete_pA) { delete pA; }
3107 if (delete_ppA) { delete ppA; }
3108}
3109
3111{
3112 KSP ksp = (KSP)obj;
3113
3114 // Preserve Amat if already set
3115 Mat A = NULL;
3116 PetscBool amat;
3117 ierr = KSPGetOperatorsSet(ksp,&amat,NULL); PCHKERRQ(ksp,ierr);
3118 if (amat)
3119 {
3120 ierr = KSPGetOperators(ksp,&A,NULL); PCHKERRQ(ksp,ierr);
3121 ierr = PetscObjectReference((PetscObject)A); PCHKERRQ(ksp,ierr);
3122 }
3123 PetscPreconditioner *ppc = dynamic_cast<PetscPreconditioner *>(&precond);
3124 if (ppc)
3125 {
3126 ierr = KSPSetPC(ksp,*ppc); PCHKERRQ(ksp,ierr);
3127 }
3128 else
3129 {
3130 // wrap the Solver action
3131 // Solver is assumed to be already setup
3132 // ownership of precond is not transferred,
3133 // consistently with other MFEM's linear solvers
3134 PC pc;
3135 ierr = KSPGetPC(ksp,&pc); PCHKERRQ(ksp,ierr);
3136 ierr = MakeShellPC(pc,precond,false); PCHKERRQ(ksp,ierr);
3137 }
3138 if (A)
3139 {
3140 Mat P;
3141
3142 ierr = KSPGetOperators(ksp,NULL,&P); PCHKERRQ(ksp,ierr);
3143 ierr = PetscObjectReference((PetscObject)P); PCHKERRQ(ksp,ierr);
3144 ierr = KSPSetOperators(ksp,A,P); PCHKERRQ(ksp,ierr);
3145 ierr = MatDestroy(&A); PCHKERRQ(ksp,ierr);
3146 ierr = MatDestroy(&P); PCHKERRQ(ksp,ierr);
3147 }
3148}
3149
3150void PetscLinearSolver::MultKernel(const Vector &b, Vector &x, bool trans) const
3151{
3152 KSP ksp = (KSP)obj;
3153
3154 if (!B || !X)
3155 {
3156 Mat pA = NULL;
3157 ierr = KSPGetOperators(ksp, &pA, NULL); PCHKERRQ(obj, ierr);
3158 if (!B)
3159 {
3160 PetscParMatrix A = PetscParMatrix(pA, true);
3161 B = new PetscParVector(A, true, false);
3162 }
3163 if (!X)
3164 {
3165 PetscParMatrix A = PetscParMatrix(pA, true);
3166 X = new PetscParVector(A, false, false);
3167 }
3168 }
3169 B->PlaceMemory(b.GetMemory());
3170
3171 Customize();
3172
3173 PetscBool flg;
3174 ierr = KSPGetInitialGuessNonzero(ksp, &flg);
3175 X->PlaceMemory(x.GetMemory(),flg);
3176
3177 // Solve the system.
3178 if (trans)
3179 {
3180 ierr = KSPSolveTranspose(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3181 }
3182 else
3183 {
3184 ierr = KSPSolve(ksp, B->x, X->x); PCHKERRQ(ksp,ierr);
3185 }
3186 B->ResetMemory();
3187 X->ResetMemory();
3188}
3189
3191{
3192 (*this).MultKernel(b,x,false);
3193}
3194
3196{
3197 (*this).MultKernel(b,x,true);
3198}
3199
3201{
3202 MPI_Comm comm;
3203 KSP ksp = (KSP)obj;
3204 ierr = PetscObjectGetComm((PetscObject)ksp,&comm); PCHKERRQ(ksp,ierr);
3205 ierr = KSPDestroy(&ksp); CCHKERRQ(comm,ierr);
3206}
3207
3208// PetscPCGSolver methods
3209
3210PetscPCGSolver::PetscPCGSolver(MPI_Comm comm, const std::string &prefix,
3211 bool iter_mode)
3212 : PetscLinearSolver(comm,prefix,true,iter_mode)
3213{
3214 KSP ksp = (KSP)obj;
3215 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3216 // this is to obtain a textbook PCG
3217 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3218}
3219
3221 bool iter_mode)
3222 : PetscLinearSolver(A,prefix,iter_mode)
3223{
3224 KSP ksp = (KSP)obj;
3225 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3226 // this is to obtain a textbook PCG
3227 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3228}
3229
3231 const std::string &prefix, bool iter_mode)
3232 : PetscLinearSolver(A,wrap,prefix,iter_mode)
3233{
3234 KSP ksp = (KSP)obj;
3235 ierr = KSPSetType(ksp,KSPCG); PCHKERRQ(ksp,ierr);
3236 // this is to obtain a textbook PCG
3237 ierr = KSPSetNormType(ksp,KSP_NORM_NATURAL); PCHKERRQ(ksp,ierr);
3238}
3239
3240// PetscPreconditioner methods
3241
3243 const std::string &prefix)
3244 : PetscSolver(), Solver()
3245{
3246 PC pc;
3247 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3248 obj = (PetscObject)pc;
3249 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3250 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3251}
3252
3254 const string &prefix)
3255 : PetscSolver(), Solver()
3256{
3257 PC pc;
3258 ierr = PCCreate(A.GetComm(),&pc); CCHKERRQ(A.GetComm(),ierr);
3259 obj = (PetscObject)pc;
3260 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3261 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3262 SetOperator(A);
3263}
3264
3266 const string &prefix)
3267 : PetscSolver(), Solver()
3268{
3269 PC pc;
3270 ierr = PCCreate(comm,&pc); CCHKERRQ(comm,ierr);
3271 obj = (PetscObject)pc;
3272 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
3273 ierr = PCSetOptionsPrefix(pc, prefix.c_str()); PCHKERRQ(pc, ierr);
3274 SetOperator(op);
3275}
3276
3278{
3279 bool delete_pA = false;
3280 PetscParMatrix *pA = const_cast<PetscParMatrix *>
3281 (dynamic_cast<const PetscParMatrix *>(&op));
3282
3283 if (!pA)
3284 {
3285 const Operator *cop = dynamic_cast<const Operator *>(&op);
3286 pA = new PetscParMatrix(PetscObjectComm(obj),cop,PETSC_MATAIJ);
3287 delete_pA = true;
3288 }
3289
3290 // Set operators into PETSc PC
3291 PC pc = (PC)obj;
3292 Mat A = pA->A;
3293 if (operatorset)
3294 {
3295 Mat C;
3296 PetscInt nheight,nwidth,oheight,owidth;
3297
3298 ierr = PCGetOperators(pc,&C,NULL); PCHKERRQ(pc,ierr);
3299 ierr = MatGetSize(A,&nheight,&nwidth); PCHKERRQ(A,ierr);
3300 ierr = MatGetSize(C,&oheight,&owidth); PCHKERRQ(A,ierr);
3301 if (nheight != oheight || nwidth != owidth)
3302 {
3303 // reinit without destroying the PC
3304 // communicator remains the same
3305 ierr = PCReset(pc); PCHKERRQ(pc,ierr);
3306 delete X;
3307 delete B;
3308 X = B = NULL;
3309 }
3310 }
3311 ierr = PCSetOperators(pc,pA->A,pA->A); PCHKERRQ(obj,ierr);
3312
3313 // Update PetscSolver
3314 operatorset = true;
3315
3316 // Update the Operator fields.
3317 height = pA->Height();
3318 width = pA->Width();
3319
3320 if (delete_pA) { delete pA; };
3321}
3322
3323void PetscPreconditioner::MultKernel(const Vector &b, Vector &x,
3324 bool trans) const
3325{
3326 MFEM_VERIFY(!iterative_mode,
3327 "Iterative mode not supported for PetscPreconditioner");
3328 PC pc = (PC)obj;
3329
3330 if (!B || !X)
3331 {
3332 Mat pA = NULL;
3333 ierr = PCGetOperators(pc, NULL, &pA); PCHKERRQ(obj, ierr);
3334 if (!B)
3335 {
3336 PetscParMatrix A(pA, true);
3337 B = new PetscParVector(A, true, false);
3338 }
3339 if (!X)
3340 {
3341 PetscParMatrix A(pA, true);
3342 X = new PetscParVector(A, false, false);
3343 }
3344 }
3345 B->PlaceMemory(b.GetMemory());
3346 X->PlaceMemory(x.GetMemory());
3347
3348 Customize();
3349
3350 // Apply the preconditioner.
3351 if (trans)
3352 {
3353 ierr = PCApplyTranspose(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3354 }
3355 else
3356 {
3357 ierr = PCApply(pc, B->x, X->x); PCHKERRQ(pc, ierr);
3358 }
3359 B->ResetMemory();
3360 X->ResetMemory();
3361}
3362
3364{
3365 (*this).MultKernel(b,x,false);
3366}
3367
3369{
3370 (*this).MultKernel(b,x,true);
3371}
3372
3374{
3375 MPI_Comm comm;
3376 PC pc = (PC)obj;
3377 ierr = PetscObjectGetComm((PetscObject)pc,&comm); PCHKERRQ(pc,ierr);
3378 ierr = PCDestroy(&pc); CCHKERRQ(comm,ierr);
3379}
3380
3381// PetscBDDCSolver methods
3382
3383// Coordinates sampling function
3384static void func_coords(const Vector &x, Vector &y)
3385{
3386 y = x;
3387}
3388
3389void PetscBDDCSolver::BDDCSolverConstructor(const PetscBDDCSolverParams &opts)
3390{
3391 MPI_Comm comm = PetscObjectComm(obj);
3392
3393 // get PETSc object
3394 PC pc = (PC)obj;
3395 Mat pA;
3396 ierr = PCGetOperators(pc,NULL,&pA); PCHKERRQ(pc,ierr);
3397
3398 // matrix type should be of type MATIS
3399 PetscBool ismatis;
3400 ierr = PetscObjectTypeCompare((PetscObject)pA,MATIS,&ismatis);
3401 PCHKERRQ(pA,ierr);
3402 MFEM_VERIFY(ismatis,"PetscBDDCSolver needs the matrix in unassembled format");
3403
3404 // Check options
3405 ParFiniteElementSpace *fespace = opts.fespace;
3406 if (opts.netflux && !fespace)
3407 {
3408 MFEM_WARNING("Don't know how to compute an auxiliary quadrature form without a ParFiniteElementSpace");
3409 }
3410
3411 // Attach default near-null space to local matrices
3412 {
3413 MatNullSpace nnsp;
3414 Mat lA;
3415 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3416 ierr = MatNullSpaceCreate(PetscObjectComm((PetscObject)lA),PETSC_TRUE,0,NULL,
3417 &nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3418 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3419 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3420 }
3421
3422 // set PETSc PC type to PCBDDC
3423 ierr = PCSetType(pc,PCBDDC); PCHKERRQ(obj,ierr);
3424
3425 PetscInt rst,nl;
3426 ierr = MatGetOwnershipRange(pA,&rst,NULL); PCHKERRQ(pA,ierr);
3427 ierr = MatGetLocalSize(pA,&nl,NULL); PCHKERRQ(pA,ierr);
3428
3429 // index sets for fields splitting and coordinates for nodal spaces
3430 IS *fields = NULL;
3431 PetscInt nf = 0;
3432 PetscInt sdim = 0;
3433 PetscReal *coords = NULL;
3434 if (fespace)
3435 {
3436 int vdim = fespace->GetVDim();
3437
3438 // Ideally, the block size should be set at matrix creation
3439 // but the MFEM assembly does not allow to do so
3440 if (fespace->GetOrdering() == Ordering::byVDIM)
3441 {
3442 Mat lA;
3443 ierr = MatSetBlockSize(pA,vdim); PCHKERRQ(pA,ierr);
3444 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(PETSC_COMM_SELF,ierr);
3445 ierr = MatSetBlockSize(lA,vdim); PCHKERRQ(pA,ierr);
3446 }
3447
3448 // fields
3449 if (vdim > 1)
3450 {
3451 PetscInt st = rst, bs, inc, nlf;
3452 nf = vdim;
3453 nlf = nl/nf;
3454 ierr = PetscMalloc1(nf,&fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3455 if (fespace->GetOrdering() == Ordering::byVDIM)
3456 {
3457 inc = 1;
3458 bs = vdim;
3459 }
3460 else
3461 {
3462 inc = nlf;
3463 bs = 1;
3464 }
3465 for (PetscInt i = 0; i < nf; i++)
3466 {
3467 ierr = ISCreateStride(comm,nlf,st,bs,&fields[i]); CCHKERRQ(comm,ierr);
3468 st += inc;
3469 }
3470 }
3471
3472 // coordinates
3473 const FiniteElementCollection *fec = fespace->FEColl();
3474 bool h1space = dynamic_cast<const H1_FECollection*>(fec);
3475 if (h1space)
3476 {
3477 ParFiniteElementSpace *fespace_coords = fespace;
3478
3479 sdim = fespace->GetParMesh()->SpaceDimension();
3480 if (vdim != sdim || fespace->GetOrdering() != Ordering::byVDIM)
3481 {
3482 fespace_coords = new ParFiniteElementSpace(fespace->GetParMesh(),fec,sdim,
3484 }
3485 VectorFunctionCoefficient coeff_coords(sdim, func_coords);
3486 ParGridFunction gf_coords(fespace_coords);
3487 gf_coords.ProjectCoefficient(coeff_coords);
3488 HypreParVector *hvec_coords = gf_coords.ParallelProject();
3489 PetscScalar *data_coords = (PetscScalar*)mfem::Read(hvec_coords->GetMemory(),
3490 hvec_coords->Size(),false);
3491
3492 // likely elasticity -> we attach rigid-body modes as near-null space information to the local matrices
3493 // and to the global matrix
3494 if (vdim == sdim)
3495 {
3496 MatNullSpace nnsp;
3497 Mat lA;
3498 Vec pvec_coords,lvec_coords;
3499 ISLocalToGlobalMapping l2g;
3500 PetscSF sf;
3501 PetscLayout rmap;
3502 const PetscInt *gidxs;
3503 PetscInt nleaves;
3504
3505 ierr = VecCreateMPIWithArray(comm,sdim,hvec_coords->Size(),
3506 hvec_coords->GlobalSize(),data_coords,&pvec_coords);
3507 CCHKERRQ(comm,ierr);
3508 ierr = MatGetNearNullSpace(pA,&nnsp); CCHKERRQ(comm,ierr);
3509 if (!nnsp)
3510 {
3511 ierr = MatNullSpaceCreateRigidBody(pvec_coords,&nnsp);
3512 CCHKERRQ(comm,ierr);
3513 ierr = MatSetNearNullSpace(pA,nnsp); CCHKERRQ(comm,ierr);
3514 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(comm,ierr);
3515 }
3516 ierr = MatISGetLocalMat(pA,&lA); CCHKERRQ(comm,ierr);
3517 ierr = MatCreateVecs(lA,&lvec_coords,NULL); CCHKERRQ(PETSC_COMM_SELF,ierr);
3518 ierr = VecSetBlockSize(lvec_coords,sdim); CCHKERRQ(PETSC_COMM_SELF,ierr);
3519 ierr = MatGetLocalToGlobalMapping(pA,&l2g,NULL); CCHKERRQ(comm,ierr);
3520 ierr = MatGetLayouts(pA,&rmap,NULL); CCHKERRQ(comm,ierr);
3521 ierr = PetscSFCreate(comm,&sf); CCHKERRQ(comm,ierr);
3522 ierr = ISLocalToGlobalMappingGetIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3523 ierr = ISLocalToGlobalMappingGetSize(l2g,&nleaves); CCHKERRQ(comm,ierr);
3524 ierr = PetscSFSetGraphLayout(sf,rmap,nleaves,NULL,PETSC_OWN_POINTER,gidxs);
3525 CCHKERRQ(comm,ierr);
3526 ierr = ISLocalToGlobalMappingRestoreIndices(l2g,&gidxs); CCHKERRQ(comm,ierr);
3527 {
3528 const PetscScalar *garray;
3529 PetscScalar *larray;
3530
3531 ierr = VecGetArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3532 ierr = VecGetArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3533#if PETSC_VERSION_LT(3,15,0)
3534 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3535 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray); CCHKERRQ(comm,ierr);
3536#else
3537 ierr = PetscSFBcastBegin(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3538 CCHKERRQ(comm,ierr);
3539 ierr = PetscSFBcastEnd(sf,MPIU_SCALAR,garray,larray,MPI_REPLACE);
3540 CCHKERRQ(comm,ierr);
3541#endif
3542 ierr = VecRestoreArrayRead(pvec_coords,&garray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3543 ierr = VecRestoreArray(lvec_coords,&larray); CCHKERRQ(PETSC_COMM_SELF,ierr);
3544 }
3545 ierr = VecDestroy(&pvec_coords); CCHKERRQ(comm,ierr);
3546 ierr = MatNullSpaceCreateRigidBody(lvec_coords,&nnsp);
3547 CCHKERRQ(PETSC_COMM_SELF,ierr);
3548 ierr = VecDestroy(&lvec_coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3549 ierr = MatSetNearNullSpace(lA,nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3550 ierr = MatNullSpaceDestroy(&nnsp); CCHKERRQ(PETSC_COMM_SELF,ierr);
3551 ierr = PetscSFDestroy(&sf); CCHKERRQ(PETSC_COMM_SELF,ierr);
3552 }
3553
3554 // each single dof has associated a tuple of coordinates
3555 ierr = PetscMalloc1(nl*sdim,&coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3556 if (nf > 0)
3557 {
3558 for (PetscInt i = 0; i < nf; i++)
3559 {
3560 const PetscInt *idxs;
3561 PetscInt nn;
3562
3563 // It also handles the case of fespace not ordered by VDIM
3564 ierr = ISGetLocalSize(fields[i],&nn); CCHKERRQ(comm,ierr);
3565 ierr = ISGetIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3566 for (PetscInt j = 0; j < nn; j++)
3567 {
3568 PetscInt idx = idxs[j]-rst;
3569 for (PetscInt d = 0; d < sdim; d++)
3570 {
3571 coords[sdim*idx+d] = PetscRealPart(data_coords[sdim*j+d]);
3572 }
3573 }
3574 ierr = ISRestoreIndices(fields[i],&idxs); CCHKERRQ(comm,ierr);
3575 }
3576 }
3577 else
3578 {
3579 for (PetscInt j = 0; j < nl*sdim; j++) { coords[j] = PetscRealPart(data_coords[j]); }
3580 }
3581 if (fespace_coords != fespace)
3582 {
3583 delete fespace_coords;
3584 }
3585 delete hvec_coords;
3586 }
3587 }
3588
3589 // index sets for boundary dofs specification (Essential = dir, Natural = neu)
3590 IS dir = NULL, neu = NULL;
3591
3592 // Extract l2l matrices
3593 Array<Mat> *l2l = NULL;
3594 if (opts.ess_dof_local || opts.nat_dof_local)
3595 {
3596 PetscContainer c;
3597
3598 ierr = PetscObjectQuery((PetscObject)pA,"_MatIS_PtAP_l2l",(PetscObject*)&c);
3599 MFEM_VERIFY(c,"Local-to-local PETSc container not present");
3600 ierr = PetscContainerGetPointer(c,(void**)&l2l); PCHKERRQ(c,ierr);
3601 }
3602
3603 // check information about index sets (essential dofs, fields, etc.)
3604#ifdef MFEM_DEBUG
3605 {
3606 // make sure ess/nat_dof have been collectively set
3607 PetscBool lpr = PETSC_FALSE,pr;
3608 if (opts.ess_dof) { lpr = PETSC_TRUE; }
3609 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3610 CCHKERRQ(comm,mpiierr);
3611 MFEM_VERIFY(lpr == pr,"ess_dof should be collectively set");
3612 lpr = PETSC_FALSE;
3613 if (opts.nat_dof) { lpr = PETSC_TRUE; }
3614 mpiierr = MPI_Allreduce(&lpr,&pr,1,MPIU_BOOL,MPI_LOR,comm);
3615 CCHKERRQ(comm,mpiierr);
3616 MFEM_VERIFY(lpr == pr,"nat_dof should be collectively set");
3617 // make sure fields have been collectively set
3618 PetscInt ms[2],Ms[2];
3619 ms[0] = -nf; ms[1] = nf;
3620 mpiierr = MPI_Allreduce(&ms,&Ms,2,MPIU_INT,MPI_MAX,comm);
3621 CCHKERRQ(comm,mpiierr);
3622 MFEM_VERIFY(-Ms[0] == Ms[1],
3623 "number of fields should be the same across processes");
3624 }
3625#endif
3626
3627 // boundary sets
3628 if (opts.ess_dof)
3629 {
3630 PetscInt st = opts.ess_dof_local ? 0 : rst;
3631 if (!opts.ess_dof_local)
3632 {
3633 // need to compute the boundary dofs in global ordering
3634 ierr = Convert_Array_IS(comm,true,opts.ess_dof,st,&dir);
3635 CCHKERRQ(comm,ierr);
3636 ierr = PCBDDCSetDirichletBoundaries(pc,dir); PCHKERRQ(pc,ierr);
3637 }
3638 else
3639 {
3640 // need to compute a list for the marked boundary dofs in local ordering
3641 ierr = Convert_Vmarks_IS(comm,*l2l,opts.ess_dof,st,&dir);
3642 CCHKERRQ(comm,ierr);
3643 ierr = PCBDDCSetDirichletBoundariesLocal(pc,dir); PCHKERRQ(pc,ierr);
3644 }
3645 }
3646 if (opts.nat_dof)
3647 {
3648 PetscInt st = opts.nat_dof_local ? 0 : rst;
3649 if (!opts.nat_dof_local)
3650 {
3651 // need to compute the boundary dofs in global ordering
3652 ierr = Convert_Array_IS(comm,true,opts.nat_dof,st,&neu);
3653 CCHKERRQ(comm,ierr);
3654 ierr = PCBDDCSetNeumannBoundaries(pc,neu); PCHKERRQ(pc,ierr);
3655 }
3656 else
3657 {
3658 // need to compute a list for the marked boundary dofs in local ordering
3659 ierr = Convert_Vmarks_IS(comm,*l2l,opts.nat_dof,st,&neu);
3660 CCHKERRQ(comm,ierr);
3661 ierr = PCBDDCSetNeumannBoundariesLocal(pc,neu); PCHKERRQ(pc,ierr);
3662 }
3663 }
3664
3665 // field splitting
3666 if (fields)
3667 {
3668 ierr = PCBDDCSetDofsSplitting(pc,nf,fields); PCHKERRQ(pc,ierr);
3669 }
3670 for (PetscInt i = 0; i < nf; i++)
3671 {
3672 ierr = ISDestroy(&fields[i]); CCHKERRQ(comm,ierr);
3673 }
3674 ierr = PetscFree(fields); CCHKERRQ(PETSC_COMM_SELF,ierr);
3675
3676 // coordinates
3677 if (coords)
3678 {
3679 ierr = PCSetCoordinates(pc,sdim,nl,coords); PCHKERRQ(pc,ierr);
3680 }
3681 ierr = PetscFree(coords); CCHKERRQ(PETSC_COMM_SELF,ierr);
3682
3683 // code for block size is disabled since we cannot change the matrix
3684 // block size after it has been setup
3685 // int bs = 1;
3686
3687 // Customize using the finite element space (if any)
3688 if (fespace)
3689 {
3690 const FiniteElementCollection *fec = fespace->FEColl();
3691 bool edgespace, rtspace, h1space;
3692 bool needint = opts.netflux;
3693 bool tracespace, rt_tracespace, edge_tracespace;
3694 int vdim, dim, p;
3695 PetscBool B_is_Trans = PETSC_FALSE;
3696
3697 ParMesh *pmesh = (ParMesh *) fespace->GetMesh();
3698 dim = pmesh->Dimension();
3699 vdim = fespace->GetVDim();
3700 h1space = dynamic_cast<const H1_FECollection*>(fec);
3701 rtspace = dynamic_cast<const RT_FECollection*>(fec);
3702 edgespace = dynamic_cast<const ND_FECollection*>(fec);
3703 edge_tracespace = dynamic_cast<const ND_Trace_FECollection*>(fec);
3704 rt_tracespace = dynamic_cast<const RT_Trace_FECollection*>(fec);
3705 tracespace = edge_tracespace || rt_tracespace;
3706
3707 p = 1;
3708 if (fespace->GetNE() > 0)
3709 {
3710 if (!tracespace)
3711 {
3712 p = fespace->GetElementOrder(0);
3713 }
3714 else
3715 {
3716 p = fespace->GetFaceOrder(0);
3717 if (dim == 2) { p++; }
3718 }
3719 }
3720
3721 if (edgespace) // H(curl)
3722 {
3723 if (dim == 2)
3724 {
3725 needint = true;
3726 if (tracespace)
3727 {
3728 MFEM_WARNING("Tracespace case doesn't work for H(curl) and p=2,"
3729 " not using auxiliary quadrature");
3730 needint = false;
3731 }
3732 }
3733 else
3734 {
3735 FiniteElementCollection *vfec;
3736 if (tracespace)
3737 {
3738 vfec = new H1_Trace_FECollection(p,dim);
3739 }
3740 else
3741 {
3742 vfec = new H1_FECollection(p,dim);
3743 }
3744 ParFiniteElementSpace *vfespace = new ParFiniteElementSpace(pmesh,vfec);
3745 ParDiscreteLinearOperator *grad;
3746 grad = new ParDiscreteLinearOperator(vfespace,fespace);
3747 if (tracespace)
3748 {
3749 grad->AddTraceFaceInterpolator(new GradientInterpolator);
3750 }
3751 else
3752 {
3753 grad->AddDomainInterpolator(new GradientInterpolator);
3754 }
3755 grad->Assemble();
3756 grad->Finalize();
3757 HypreParMatrix *hG = grad->ParallelAssemble();
3758 PetscParMatrix *G = new PetscParMatrix(hG,PETSC_MATAIJ);
3759 delete hG;
3760 delete grad;
3761
3762 PetscBool conforming = PETSC_TRUE;
3763 if (pmesh->Nonconforming()) { conforming = PETSC_FALSE; }
3764 ierr = PCBDDCSetDiscreteGradient(pc,*G,p,0,PETSC_TRUE,conforming);
3765 PCHKERRQ(pc,ierr);
3766 delete vfec;
3767 delete vfespace;
3768 delete G;
3769 }
3770 }
3771 else if (rtspace) // H(div)
3772 {
3773 needint = true;
3774 if (tracespace)
3775 {
3776 MFEM_WARNING("Tracespace case doesn't work for H(div), not using"
3777 " auxiliary quadrature");
3778 needint = false;
3779 }
3780 }
3781 else if (h1space) // H(grad), only for the vector case
3782 {
3783 if (vdim != dim) { needint = false; }
3784 }
3785
3786 PetscParMatrix *B = NULL;
3787 if (needint)
3788 {
3789 // Generate bilinear form in unassembled format which is used to
3790 // compute the net-flux across subdomain boundaries for H(div) and
3791 // Elasticity/Stokes, and the line integral \int u x n of 2D H(curl) fields
3792 FiniteElementCollection *auxcoll;
3793 if (tracespace) { auxcoll = new RT_Trace_FECollection(p,dim); }
3794 else
3795 {
3796 if (h1space)
3797 {
3798 auxcoll = new H1_FECollection(std::max(p-1,1),dim);
3799 }
3800 else
3801 {
3802 auxcoll = new L2_FECollection(p,dim);
3803 }
3804 }
3805 ParFiniteElementSpace *pspace = new ParFiniteElementSpace(pmesh,auxcoll);
3806 ParMixedBilinearForm *b = new ParMixedBilinearForm(fespace,pspace);
3807
3808 if (edgespace)
3809 {
3810 if (tracespace)
3811 {
3812 b->AddTraceFaceIntegrator(new VectorFECurlIntegrator);
3813 }
3814 else
3815 {
3816 b->AddDomainIntegrator(new VectorFECurlIntegrator);
3817 }
3818 }
3819 else if (rtspace)
3820 {
3821 if (tracespace)
3822 {
3823 b->AddTraceFaceIntegrator(new VectorFEDivergenceIntegrator);
3824 }
3825 else
3826 {
3827 b->AddDomainIntegrator(new VectorFEDivergenceIntegrator);
3828 }
3829 }
3830 else
3831 {
3832 b->AddDomainIntegrator(new VectorDivergenceIntegrator);
3833 }
3834 b->Assemble();
3835 b->Finalize();
3836 OperatorHandle Bh(Operator::PETSC_MATIS);
3837 b->ParallelAssemble(Bh);
3838 Bh.Get(B);
3839 Bh.SetOperatorOwner(false);
3840
3841 if (dir) // if essential dofs are present, we need to zero the columns
3842 {
3843 Mat pB = *B;
3844 ierr = MatTranspose(pB,MAT_INPLACE_MATRIX,&pB); PCHKERRQ(pA,ierr);
3845 if (!opts.ess_dof_local)
3846 {
3847 ierr = MatZeroRowsIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3848 }
3849 else
3850 {
3851 ierr = MatZeroRowsLocalIS(pB,dir,0.,NULL,NULL); PCHKERRQ(pA,ierr);
3852 }
3853 B_is_Trans = PETSC_TRUE;
3854 }
3855 delete b;
3856 delete pspace;
3857 delete auxcoll;
3858 }
3859
3860 if (B)
3861 {
3862 ierr = PCBDDCSetDivergenceMat(pc,*B,B_is_Trans,NULL); PCHKERRQ(pc,ierr);
3863 }
3864 delete B;
3865 }
3866 ierr = ISDestroy(&dir); PCHKERRQ(pc,ierr);
3867 ierr = ISDestroy(&neu); PCHKERRQ(pc,ierr);
3868}
3869
3871 const PetscBDDCSolverParams &opts,
3872 const std::string &prefix)
3873 : PetscPreconditioner(A,prefix)
3874{
3875 BDDCSolverConstructor(opts);
3876 Customize();
3877}
3878
3880 const PetscBDDCSolverParams &opts,
3881 const std::string &prefix)
3882 : PetscPreconditioner(comm,op,prefix)
3883{
3884 BDDCSolverConstructor(opts);
3885 Customize();
3886}
3887
3889 const string &prefix)
3890 : PetscPreconditioner(comm,op,prefix)
3891{
3892 PC pc = (PC)obj;
3893 ierr = PCSetType(pc,PCFIELDSPLIT); PCHKERRQ(pc,ierr);
3894
3895 Mat pA;
3896 ierr = PCGetOperators(pc,&pA,NULL); PCHKERRQ(pc,ierr);
3897
3898 // Check if pA is of type MATNEST
3899 PetscBool isnest;
3900 ierr = PetscObjectTypeCompare((PetscObject)pA,MATNEST,&isnest);
3901
3902 PetscInt nr = 0;
3903 IS *isrow = NULL;
3904 if (isnest) // we know the fields
3905 {
3906 ierr = MatNestGetSize(pA,&nr,NULL); PCHKERRQ(pc,ierr);
3907 ierr = PetscCalloc1(nr,&isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3908 ierr = MatNestGetISs(pA,isrow,NULL); PCHKERRQ(pc,ierr);
3909 }
3910
3911 // We need to customize here, before setting the index sets.
3912 // This is because PCFieldSplitSetType customizes the function
3913 // pointers. SubSolver options will be processed during PCApply
3914 Customize();
3915
3916 for (PetscInt i=0; i<nr; i++)
3917 {
3918 ierr = PCFieldSplitSetIS(pc,NULL,isrow[i]); PCHKERRQ(pc,ierr);
3919 }
3920 ierr = PetscFree(isrow); CCHKERRQ(PETSC_COMM_SELF,ierr);
3921}
3922
3925 const std::string &prefix)
3926 : PetscPreconditioner(fes->GetParMesh()->GetComm(),prefix)
3927{
3929 ierr = MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); PCHKERRQ(A,ierr);
3930 ierr = MatSetOption(A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE); PCHKERRQ(A,ierr);
3931 SetOperator(A);
3932 H2SolverConstructor(fes);
3933 Customize();
3934}
3935
3936void PetscH2Solver::H2SolverConstructor(ParFiniteElementSpace *fes)
3937{
3938#if defined(PETSC_HAVE_H2OPUS)
3939 int sdim = fes->GetParMesh()->SpaceDimension();
3940 int vdim = fes->GetVDim();
3941 const FiniteElementCollection *fec = fes->FEColl();
3942 ParFiniteElementSpace *fes_coords = NULL;
3943
3944 if (vdim != sdim || fes->GetOrdering() != Ordering::byVDIM)
3945 {
3946 fes_coords = new ParFiniteElementSpace(fes->GetParMesh(),fec,sdim,
3948 fes = fes_coords;
3949 }
3950 VectorFunctionCoefficient ccoords(sdim, func_coords);
3951
3952 ParGridFunction coords(fes);
3953 coords.ProjectCoefficient(ccoords);
3954 Vector c(fes->GetTrueVSize());
3955 coords.ParallelProject(c);
3956 delete fes_coords;
3957
3958 PC pc = (PC)obj;
3959 ierr = PCSetType(pc,PCH2OPUS); PCHKERRQ(obj, ierr);
3960 ierr = PCSetCoordinates(pc,sdim,c.Size()/sdim,
3961 (PetscReal*)mfem::Read(c.GetMemory(),
3962 c.Size(),false));
3963 ierr = PCSetFromOptions(pc); PCHKERRQ(obj, ierr);
3964#else
3965 MFEM_ABORT("Need PETSc configured with --download-h2opus");
3966#endif
3967}
3968
3969// PetscNonlinearSolver methods
3970
3972 const std::string &prefix)
3973 : PetscSolver(), Solver()
3974{
3975 // Create the actual solver object
3976 SNES snes;
3977 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3978 obj = (PetscObject)snes;
3979 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3980 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3981
3982 // Allocate private solver context
3984}
3985
3987 const std::string &prefix)
3988 : PetscSolver(), Solver()
3989{
3990 // Create the actual solver object
3991 SNES snes;
3992 ierr = SNESCreate(comm, &snes); CCHKERRQ(comm, ierr);
3993 obj = (PetscObject)snes;
3994 ierr = PetscObjectGetClassId(obj, &cid); PCHKERRQ(obj, ierr);
3995 ierr = SNESSetOptionsPrefix(snes, prefix.c_str()); PCHKERRQ(snes, ierr);
3996
3997 // Allocate private solver context
3999
4000 SetOperator(op);
4001}
4002
4004{
4005 MPI_Comm comm;
4006 SNES snes = (SNES)obj;
4007 ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj, ierr);
4008 ierr = SNESDestroy(&snes); CCHKERRQ(comm, ierr);
4009}
4010
4012{
4013 SNES snes = (SNES)obj;
4014
4015 if (operatorset)
4016 {
4017 PetscBool ls,gs;
4018 void *fctx,*jctx;
4019
4020 ierr = SNESGetFunction(snes, NULL, NULL, &fctx);
4021 PCHKERRQ(snes, ierr);
4022 ierr = SNESGetJacobian(snes, NULL, NULL, NULL, &jctx);
4023 PCHKERRQ(snes, ierr);
4024
4025 ls = (PetscBool)(height == op.Height() && width == op.Width() &&
4026 (void*)&op == fctx &&
4027 (void*)&op == jctx);
4028 mpiierr = MPI_Allreduce(&ls,&gs,1,MPIU_BOOL,MPI_LAND,
4029 PetscObjectComm((PetscObject)snes));
4030 CCHKERRQ(PetscObjectComm((PetscObject)snes),mpiierr);
4031 if (!gs)
4032 {
4033 ierr = SNESReset(snes); PCHKERRQ(snes,ierr);
4034 delete X;
4035 delete B;
4036 X = B = NULL;
4037 }
4038 }
4039 else
4040 {
4041 /* PETSc sets the linesearch type to basic (i.e. no linesearch) if not
4042 yet set. We default to backtracking */
4043 SNESLineSearch ls;
4044 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4045 ierr = SNESLineSearchSetType(ls, SNESLINESEARCHBT); PCHKERRQ(snes,ierr);
4046 }
4047
4048 // If we do not pass matrices in, the default matrix type for DMShell is MATDENSE
4049 // in 3.15, which may cause issues.
4050 Mat dummy;
4051 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)snes),op.Height(),
4052 op.Height(),&dummy);
4053
4054 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4055 snes_ctx->op = (Operator*)&op;
4056 ierr = SNESSetFunction(snes, NULL, __mfem_snes_function, (void *)snes_ctx);
4057 PCHKERRQ(snes, ierr);
4058 ierr = SNESSetJacobian(snes, dummy, dummy, __mfem_snes_jacobian,
4059 (void *)snes_ctx);
4060 PCHKERRQ(snes, ierr);
4061
4062 ierr = MatDestroy(&dummy);
4063 PCHKERRQ(snes, ierr);
4064
4065 // Update PetscSolver
4066 operatorset = true;
4067
4068 // Update the Operator fields.
4069 height = op.Height();
4070 width = op.Width();
4071}
4072
4074{
4075 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4076 snes_ctx->jacType = jacType;
4077}
4078
4080 mfem::real_t*))
4081{
4082 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4083 snes_ctx->objective = objfn;
4084
4085 SNES snes = (SNES)obj;
4086 ierr = SNESSetObjective(snes, __mfem_snes_objective, (void *)snes_ctx);
4087 PCHKERRQ(snes, ierr);
4088}
4089
4091 Vector&, Vector&,
4092 bool&, bool&))
4093{
4094 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4095 snes_ctx->postcheck = post;
4096
4097 SNES snes = (SNES)obj;
4098 SNESLineSearch ls;
4099 ierr = SNESGetLineSearch(snes, &ls); PCHKERRQ(snes,ierr);
4100 ierr = SNESLineSearchSetPostCheck(ls, __mfem_snes_postcheck, (void *)snes_ctx);
4101 PCHKERRQ(ls, ierr);
4102}
4103
4105 const Vector&,
4106 const Vector&,
4107 const Vector&,
4108 const Vector&))
4109{
4110 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)private_ctx;
4111 snes_ctx->update = update;
4112
4113 SNES snes = (SNES)obj;
4114 ierr = SNESSetUpdate(snes, __mfem_snes_update); PCHKERRQ(snes, ierr);
4115}
4116
4118{
4119 SNES snes = (SNES)obj;
4120
4121 bool b_nonempty = b.Size();
4122 if (!B) { B = new PetscParVector(PetscObjectComm(obj), *this, true); }
4123 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *this, false, false); }
4125 if (b_nonempty) { B->PlaceMemory(b.GetMemory()); }
4126 else { *B = 0.0; }
4127
4128 Customize();
4129
4130 if (!iterative_mode) { *X = 0.; }
4131
4132 // Solve the system.
4133 ierr = SNESSolve(snes, B->x, X->x); PCHKERRQ(snes, ierr);
4134 X->ResetMemory();
4135 if (b_nonempty) { B->ResetMemory(); }
4136}
4137
4138// PetscODESolver methods
4139
4140PetscODESolver::PetscODESolver(MPI_Comm comm, const string &prefix)
4141 : PetscSolver(), ODESolver()
4142{
4143 // Create the actual solver object
4144 TS ts;
4145 ierr = TSCreate(comm,&ts); CCHKERRQ(comm,ierr);
4146 obj = (PetscObject)ts;
4147 ierr = PetscObjectGetClassId(obj,&cid); PCHKERRQ(obj,ierr);
4148 ierr = TSSetOptionsPrefix(ts, prefix.c_str()); PCHKERRQ(ts, ierr);
4149
4150 // Allocate private solver context
4152
4153 // Default options, to comply with the current interface to ODESolver.
4154 ierr = TSSetMaxSteps(ts,PETSC_MAX_INT-1);
4155 PCHKERRQ(ts,ierr);
4156 ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
4157 PCHKERRQ(ts,ierr);
4158 TSAdapt tsad;
4159 ierr = TSGetAdapt(ts,&tsad);
4160 PCHKERRQ(ts,ierr);
4161 ierr = TSAdaptSetType(tsad,TSADAPTNONE);
4162 PCHKERRQ(ts,ierr);
4163}
4164
4166{
4167 MPI_Comm comm;
4168 TS ts = (TS)obj;
4169 ierr = PetscObjectGetComm(obj,&comm); PCHKERRQ(obj,ierr);
4170 ierr = TSDestroy(&ts); CCHKERRQ(comm,ierr);
4171}
4172
4174 enum PetscODESolver::Type type)
4175{
4176 TS ts = (TS)obj;
4177
4178 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4179 if (operatorset)
4180 {
4181 ierr = TSReset(ts); PCHKERRQ(ts,ierr);
4182 delete X;
4183 X = NULL;
4184 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4185 ts_ctx->cached_ijacstate = -1;
4186 ts_ctx->cached_rhsjacstate = -1;
4187 ts_ctx->cached_splits_xstate = -1;
4188 ts_ctx->cached_splits_xdotstate = -1;
4189 }
4190 f = &f_;
4191
4192 // Set functions in TS
4193 ts_ctx->op = &f_;
4194 if (f_.isImplicit())
4195 {
4196 Mat dummy;
4197 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4198 f_.Height(),&dummy);
4199 PCHKERRQ(ts, ierr);
4200 ierr = TSSetIFunction(ts, NULL, __mfem_ts_ifunction, (void *)ts_ctx);
4201 PCHKERRQ(ts, ierr);
4202 ierr = TSSetIJacobian(ts, dummy, dummy, __mfem_ts_ijacobian, (void *)ts_ctx);
4203 PCHKERRQ(ts, ierr);
4204 ierr = TSSetEquationType(ts, TS_EQ_IMPLICIT);
4205 PCHKERRQ(ts, ierr);
4206 ierr = MatDestroy(&dummy);
4207 PCHKERRQ(ts, ierr);
4208 }
4209 if (!f_.isHomogeneous())
4210 {
4211 Mat dummy = NULL;
4212 if (!f_.isImplicit())
4213 {
4214 ierr = TSSetEquationType(ts, TS_EQ_EXPLICIT);
4215 PCHKERRQ(ts, ierr);
4216 }
4217 else
4218 {
4219 ierr = __mfem_MatCreateDummy(PetscObjectComm((PetscObject)ts),f_.Height(),
4220 f_.Height(),&dummy);
4221 PCHKERRQ(ts, ierr);
4222 }
4223 ierr = TSSetRHSFunction(ts, NULL, __mfem_ts_rhsfunction, (void *)ts_ctx);
4224 PCHKERRQ(ts, ierr);
4225 ierr = TSSetRHSJacobian(ts, dummy, dummy, __mfem_ts_rhsjacobian,
4226 (void *)ts_ctx);
4227 PCHKERRQ(ts, ierr);
4228 ierr = MatDestroy(&dummy);
4229 PCHKERRQ(ts, ierr);
4230 }
4231 operatorset = true;
4232
4233 SetType(type);
4234
4235 // Set solution vector
4236 PetscParVector X(PetscObjectComm(obj),*f,false,true);
4237 ierr = TSSetSolution(ts,X); PCHKERRQ(ts,ierr);
4238
4239 // Compose special purpose function for PDE-constrained optimization
4240 PetscBool use = PETSC_TRUE;
4241 ierr = PetscOptionsGetBool(NULL,NULL,"-mfem_use_splitjac",&use,NULL);
4242 if (use && f_.isImplicit())
4243 {
4244 ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4245 __mfem_ts_computesplits);
4246 PCHKERRQ(ts,ierr);
4247 }
4248 else
4249 {
4250 ierr = PetscObjectComposeFunction((PetscObject)ts,"TSComputeSplitJacobians_C",
4251 NULL);
4252 PCHKERRQ(ts,ierr);
4253 }
4254}
4255
4257{
4258 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4259 ts_ctx->jacType = jacType;
4260}
4261
4263{
4264 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4265 return ts_ctx->type;
4266}
4267
4269{
4270 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4271
4272 TS ts = (TS)obj;
4273 ts_ctx->type = type;
4274 if (type == ODE_SOLVER_LINEAR)
4275 {
4276 ierr = TSSetProblemType(ts, TS_LINEAR);
4277 PCHKERRQ(ts, ierr);
4278 }
4279 else
4280 {
4281 ierr = TSSetProblemType(ts, TS_NONLINEAR);
4282 PCHKERRQ(ts, ierr);
4283 }
4284}
4285
4287{
4288 // Pass the parameters to PETSc.
4289 TS ts = (TS)obj;
4290 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4291 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4292
4293 PetscInt i;
4294 ierr = TSGetStepNumber(ts, &i); PCHKERRQ(ts,ierr);
4295
4296 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4297 X->PlaceMemory(x.GetMemory(),true);
4298
4299 Customize();
4300
4301 // Monitor initial step
4302 if (!i)
4303 {
4304 ierr = TSMonitor(ts, i, t, *X); PCHKERRQ(ts,ierr);
4305 }
4306
4307 // Take the step.
4308 ierr = TSSetSolution(ts, *X); PCHKERRQ(ts, ierr);
4309 ierr = TSStep(ts); PCHKERRQ(ts, ierr);
4310
4311 // Get back current time and the time step used to caller.
4312 // We cannot use TSGetTimeStep() as it returns the next candidate step
4313 PetscReal pt;
4314 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4315 dt = pt - (PetscReal)t;
4316 t = pt;
4317
4318 // Monitor current step
4319 ierr = TSMonitor(ts, i+1, pt, *X); PCHKERRQ(ts,ierr);
4320
4321 X->ResetMemory();
4322}
4323
4325 mfem::real_t t_final)
4326{
4327 // Give the parameters to PETSc.
4328 TS ts = (TS)obj;
4329 ierr = TSSetTime(ts, t); PCHKERRQ(ts, ierr);
4330 ierr = TSSetTimeStep(ts, dt); PCHKERRQ(ts, ierr);
4331 ierr = TSSetMaxTime(ts, t_final); PCHKERRQ(ts, ierr);
4332 ierr = TSSetExactFinalTime(ts, TS_EXACTFINALTIME_MATCHSTEP);
4333 PCHKERRQ(ts, ierr);
4334
4335 if (!X) { X = new PetscParVector(PetscObjectComm(obj), *f, false, false); }
4336 X->PlaceMemory(x.GetMemory(),true);
4337
4338 Customize();
4339
4340 // Reset Jacobian caching since the user may have changed
4341 // the parameters of the solver
4342 // We don't do this in the Step method because two consecutive
4343 // Step() calls are done with the same operator
4344 __mfem_ts_ctx *ts_ctx = (__mfem_ts_ctx*)private_ctx;
4345 ts_ctx->cached_shift = std::numeric_limits<PetscReal>::min();
4346 ts_ctx->cached_ijacstate = -1;
4347 ts_ctx->cached_rhsjacstate = -1;
4348 ts_ctx->cached_splits_xstate = -1;
4349 ts_ctx->cached_splits_xdotstate = -1;
4350
4351 // Take the steps.
4352 ierr = TSSolve(ts, X->x); PCHKERRQ(ts, ierr);
4353 X->ResetMemory();
4354
4355 // Get back final time and time step to caller.
4356 PetscReal pt;
4357 ierr = TSGetTime(ts, &pt); PCHKERRQ(ts,ierr);
4358 t = pt;
4359 ierr = TSGetTimeStep(ts,&pt); PCHKERRQ(ts,ierr);
4360 dt = pt;
4361}
4362
4363} // namespace mfem
4364
4365#include "petsc/private/petscimpl.h"
4366#include "petsc/private/matimpl.h"
4367
4368// auxiliary functions
4369static PetscErrorCode __mfem_ts_monitor(TS ts, PetscInt it, PetscReal t, Vec x,
4370 void* ctx)
4371{
4372 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4373
4374 PetscFunctionBeginUser;
4375 if (!monctx)
4376 {
4377 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4378 }
4379 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4381 monctx->monitor);
4382
4383 if (user_monitor->mon_sol)
4384 {
4385 mfem::PetscParVector V(x,true);
4386 user_monitor->MonitorSolution(it,t,V);
4387 }
4388 user_monitor->MonitorSolver(solver);
4389 PetscFunctionReturn(PETSC_SUCCESS);
4390}
4391
4392static PetscErrorCode __mfem_ts_ifunction(TS ts, PetscReal t, Vec x, Vec xp,
4393 Vec f,void *ctx)
4394{
4395 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4396
4397 PetscFunctionBeginUser;
4398 mfem::PetscParVector xx(x,true);
4399 mfem::PetscParVector yy(xp,true);
4400 mfem::PetscParVector ff(f,true);
4401
4402 mfem::TimeDependentOperator *op = ts_ctx->op;
4403 op->SetTime(t);
4404
4405 if (ts_ctx->bchandler)
4406 {
4407 // we evaluate the ImplicitMult method with the correct bc
4408 // this means the correct time derivative for essential boundary
4409 // dofs is zero
4410 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(xx.Size()); }
4411 if (!ts_ctx->work2) { ts_ctx->work2 = new mfem::Vector(xx.Size()); }
4412 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4413 mfem::Vector* txx = ts_ctx->work;
4414 mfem::Vector* txp = ts_ctx->work2;
4415 bchandler->SetTime(t);
4416 bchandler->ApplyBC(xx,*txx);
4417 bchandler->ZeroBC(yy,*txp);
4418 op->ImplicitMult(*txx,*txp,ff);
4419 // and fix the residual (i.e. f_\partial\Omega = u - g(t))
4420 bchandler->FixResidualBC(xx,ff);
4421 }
4422 else
4423 {
4424 // use the ImplicitMult method of the class
4425 op->ImplicitMult(xx,yy,ff);
4426 }
4427 ff.UpdateVecFromFlags();
4428 PetscFunctionReturn(PETSC_SUCCESS);
4429}
4430
4431static PetscErrorCode __mfem_ts_rhsfunction(TS ts, PetscReal t, Vec x, Vec f,
4432 void *ctx)
4433{
4434 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4435
4436 PetscFunctionBeginUser;
4437 if (ts_ctx->bchandler) { MFEM_ABORT("RHS evaluation with bc not implemented"); } // TODO
4438 mfem::PetscParVector xx(x,true);
4439 mfem::PetscParVector ff(f,true);
4440 mfem::TimeDependentOperator *top = ts_ctx->op;
4441 top->SetTime(t);
4442
4443 // use the ExplicitMult method - compute the RHS function
4444 top->ExplicitMult(xx,ff);
4445
4446 ff.UpdateVecFromFlags();
4447 PetscFunctionReturn(PETSC_SUCCESS);
4448}
4449
4450static PetscErrorCode __mfem_ts_ijacobian(TS ts, PetscReal t, Vec x,
4451 Vec xp, PetscReal shift, Mat A, Mat P,
4452 void *ctx)
4453{
4454 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4455 mfem::Vector *xx;
4456 PetscScalar *array;
4457 PetscReal eps = 0.001; /* 0.1% difference */
4458 PetscInt n;
4459 PetscObjectState state;
4460 PetscErrorCode ierr;
4461
4462 PetscFunctionBeginUser;
4463 // Matrix-free case
4464 if (A && A != P)
4465 {
4466 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4467 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4468 }
4469
4470 // prevent to recompute a Jacobian if we already did so
4471 // the relative tolerance comparison should be fine given the fact
4472 // that two consecutive shifts should have similar magnitude
4473 ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4474 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4475 std::abs(ts_ctx->cached_shift/shift - 1.0) < eps &&
4476 state == ts_ctx->cached_ijacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4477
4478 // update time
4479 mfem::TimeDependentOperator *op = ts_ctx->op;
4480 op->SetTime(t);
4481
4482 // wrap Vecs with Vectors
4483 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4484 ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4485 mfem::Vector yy(array,n);
4486 ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4487 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4488 if (!ts_ctx->bchandler)
4489 {
4490 xx = new mfem::Vector(array,n);
4491 }
4492 else
4493 {
4494 // make sure we compute a Jacobian with the correct boundary values
4495 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4496 mfem::Vector txx(array,n);
4497 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4498 xx = ts_ctx->work;
4499 bchandler->SetTime(t);
4500 bchandler->ApplyBC(txx,*xx);
4501 }
4502 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4503
4504 // Use TimeDependentOperator::GetImplicitGradient(x,y,s)
4505 mfem::Operator& J = op->GetImplicitGradient(*xx,yy,shift);
4506 if (!ts_ctx->bchandler) { delete xx; }
4507 ts_ctx->cached_shift = shift;
4508
4509 // Convert to the operator type requested if needed
4510 bool delete_pA = false;
4511 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4512 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4513 if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4514 pA->GetType() != ts_ctx->jacType))
4515 {
4516 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4517 ts_ctx->jacType);
4518 delete_pA = true;
4519 }
4520
4521 // Eliminate essential dofs
4522 if (ts_ctx->bchandler)
4523 {
4524 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4525 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4526 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4527 }
4528
4529 // Get nonzerostate
4530 PetscObjectState nonzerostate;
4531 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4532
4533 // Avoid unneeded copy of the matrix by hacking
4534 Mat B;
4535 B = pA->ReleaseMat(false);
4536 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4537 if (delete_pA) { delete pA; }
4538
4539 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4540 // preconditioner fails because MatCreateSubMatrix_Nest does not
4541 // actually return a matrix. Instead, for efficiency reasons,
4542 // it returns a reference to the submatrix. The second time it
4543 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4544 // aborts since the two submatrices are actually different.
4545 // We circumvent this issue by incrementing the nonzero state
4546 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4547 // This does not impact performances in the case of MATNEST
4548 PetscBool isnest;
4549 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4550 CHKERRQ(ierr);
4551 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4552
4553 // Jacobian reusage
4554 ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_ijacstate);
4555 CHKERRQ(ierr);
4556
4557 // Fool DM
4558 DM dm;
4559 MatType mtype;
4560 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4561 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4562 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4563 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4564 PetscFunctionReturn(PETSC_SUCCESS);
4565}
4566
4567static PetscErrorCode __mfem_ts_computesplits(TS ts,PetscReal t,Vec x,Vec xp,
4568 Mat Ax,Mat Jx,
4569 Mat Axp,Mat Jxp)
4570{
4571 __mfem_ts_ctx* ts_ctx;
4572 mfem::Vector *xx;
4573 PetscScalar *array;
4574 PetscInt n;
4575 PetscObjectState state;
4576 PetscBool rx = PETSC_TRUE, rxp = PETSC_TRUE;
4577 PetscBool assembled;
4578 PetscErrorCode ierr;
4579
4580 PetscFunctionBeginUser;
4581 // Matrix-free cases
4582 if (Ax && Ax != Jx)
4583 {
4584 ierr = MatAssemblyBegin(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4585 ierr = MatAssemblyEnd(Ax,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4586 }
4587 if (Axp && Axp != Jxp)
4588 {
4589 ierr = MatAssemblyBegin(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4590 ierr = MatAssemblyEnd(Axp,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4591 }
4592
4593 ierr = TSGetIJacobian(ts,NULL,NULL,NULL,(void**)&ts_ctx); CHKERRQ(ierr);
4594
4595 // prevent to recompute the Jacobians if we already did so
4596 ierr = PetscObjectStateGet((PetscObject)Jx,&state); CHKERRQ(ierr);
4597 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4598 state == ts_ctx->cached_splits_xstate) { rx = PETSC_FALSE; }
4599 ierr = PetscObjectStateGet((PetscObject)Jxp,&state); CHKERRQ(ierr);
4600 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4601 state == ts_ctx->cached_splits_xdotstate) { rxp = PETSC_FALSE; }
4602 if (!rx && !rxp) { PetscFunctionReturn(PETSC_SUCCESS); }
4603
4604 // update time
4605 mfem::TimeDependentOperator *op = ts_ctx->op;
4606 op->SetTime(t);
4607
4608 // wrap Vecs with Vectors
4609 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4610 ierr = VecGetArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4611 mfem::Vector yy(array,n);
4612 ierr = VecRestoreArrayRead(xp,(const PetscScalar**)&array); CHKERRQ(ierr);
4613 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4614 if (!ts_ctx->bchandler)
4615 {
4616 xx = new mfem::Vector(array,n);
4617 }
4618 else
4619 {
4620 // make sure we compute a Jacobian with the correct boundary values
4621 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4622 mfem::Vector txx(array,n);
4623 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4624 xx = ts_ctx->work;
4625 bchandler->SetTime(t);
4626 bchandler->ApplyBC(txx,*xx);
4627 }
4628 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4629
4630 // We don't have a specialized interface, so we just compute the split jacobians
4631 // evaluating twice the implicit gradient method with the correct shifts
4632
4633 // first we do the state jacobian
4634 mfem::Operator& oJx = op->GetImplicitGradient(*xx,yy,0.0);
4635
4636 // Convert to the operator type requested if needed
4637 bool delete_mat = false;
4638 mfem::PetscParMatrix *pJx = const_cast<mfem::PetscParMatrix *>
4639 (dynamic_cast<const mfem::PetscParMatrix *>(&oJx));
4640 if (!pJx || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4641 pJx->GetType() != ts_ctx->jacType))
4642 {
4643 if (pJx)
4644 {
4645 Mat B = *pJx;
4646 ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4647 }
4648 pJx = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&oJx,
4649 ts_ctx->jacType);
4650 delete_mat = true;
4651 }
4652 if (rx)
4653 {
4654 ierr = MatAssembled(Jx,&assembled); CHKERRQ(ierr);
4655 if (assembled)
4656 {
4657 ierr = MatCopy(*pJx,Jx,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4658 }
4659 else
4660 {
4661 Mat B;
4662 ierr = MatDuplicate(*pJx,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4663 ierr = MatHeaderReplace(Jx,&B); CHKERRQ(ierr);
4664 }
4665 }
4666 if (delete_mat) { delete pJx; }
4667 pJx = new mfem::PetscParMatrix(Jx,true);
4668
4669 // Eliminate essential dofs
4670 if (ts_ctx->bchandler)
4671 {
4672 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4673 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4674 pJx->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4675 }
4676
4677 // Then we do the jacobian wrt the time derivative of the state
4678 // Note that this is usually the mass matrix
4679 mfem::PetscParMatrix *pJxp = NULL;
4680 if (rxp)
4681 {
4682 delete_mat = false;
4683 mfem::Operator& oJxp = op->GetImplicitGradient(*xx,yy,1.0);
4684 pJxp = const_cast<mfem::PetscParMatrix *>
4685 (dynamic_cast<const mfem::PetscParMatrix *>(&oJxp));
4686 if (!pJxp || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4687 pJxp->GetType() != ts_ctx->jacType))
4688 {
4689 if (pJxp)
4690 {
4691 Mat B = *pJxp;
4692 ierr = PetscObjectReference((PetscObject)B); CHKERRQ(ierr);
4693 }
4694 pJxp = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),
4695 &oJxp,ts_ctx->jacType);
4696 delete_mat = true;
4697 }
4698
4699 ierr = MatAssembled(Jxp,&assembled); CHKERRQ(ierr);
4700 if (assembled)
4701 {
4702 ierr = MatCopy(*pJxp,Jxp,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
4703 }
4704 else
4705 {
4706 Mat B;
4707 ierr = MatDuplicate(*pJxp,MAT_COPY_VALUES,&B); CHKERRQ(ierr);
4708 ierr = MatHeaderReplace(Jxp,&B); CHKERRQ(ierr);
4709 }
4710 if (delete_mat) { delete pJxp; }
4711 pJxp = new mfem::PetscParMatrix(Jxp,true);
4712
4713 // Eliminate essential dofs
4714 if (ts_ctx->bchandler)
4715 {
4716 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4717 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4718 pJxp->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy,2.0);
4719 }
4720
4721 // Obtain the time dependent part of the jacobian by subtracting
4722 // the state jacobian
4723 // We don't do it with the class operator "-=" since we know that
4724 // the sparsity pattern of the two matrices is the same
4725 ierr = MatAXPY(*pJxp,-1.0,*pJx,SAME_NONZERO_PATTERN); PCHKERRQ(ts,ierr);
4726 }
4727
4728 // Jacobian reusage
4729 ierr = PetscObjectStateGet((PetscObject)Jx,&ts_ctx->cached_splits_xstate);
4730 CHKERRQ(ierr);
4731 ierr = PetscObjectStateGet((PetscObject)Jxp,&ts_ctx->cached_splits_xdotstate);
4732 CHKERRQ(ierr);
4733
4734 delete pJx;
4735 delete pJxp;
4736 if (!ts_ctx->bchandler) { delete xx; }
4737 PetscFunctionReturn(PETSC_SUCCESS);
4738}
4739
4740static PetscErrorCode __mfem_ts_rhsjacobian(TS ts, PetscReal t, Vec x,
4741 Mat A, Mat P, void *ctx)
4742{
4743 __mfem_ts_ctx* ts_ctx = (__mfem_ts_ctx*)ctx;
4744 mfem::Vector *xx;
4745 PetscScalar *array;
4746 PetscInt n;
4747 PetscObjectState state;
4748 PetscErrorCode ierr;
4749
4750 PetscFunctionBeginUser;
4751 // Matrix-free case
4752 if (A && A != P)
4753 {
4754 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4755 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4756 }
4757
4758 // prevent to recompute a Jacobian if we already did so
4759 ierr = PetscObjectStateGet((PetscObject)P,&state); CHKERRQ(ierr);
4760 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR &&
4761 state == ts_ctx->cached_rhsjacstate) { PetscFunctionReturn(PETSC_SUCCESS); }
4762
4763 // update time
4764 mfem::TimeDependentOperator *op = ts_ctx->op;
4765 op->SetTime(t);
4766
4767 // wrap Vec with Vector
4768 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4769 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4770 if (!ts_ctx->bchandler)
4771 {
4772 xx = new mfem::Vector(array,n);
4773 }
4774 else
4775 {
4776 // make sure we compute a Jacobian with the correct boundary values
4777 if (!ts_ctx->work) { ts_ctx->work = new mfem::Vector(n); }
4778 mfem::Vector txx(array,n);
4779 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4780 xx = ts_ctx->work;
4781 bchandler->SetTime(t);
4782 bchandler->ApplyBC(txx,*xx);
4783 }
4784 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4785
4786 // Use TimeDependentOperator::GetExplicitGradient(x)
4787 mfem::Operator& J = op->GetExplicitGradient(*xx);
4788 if (!ts_ctx->bchandler) { delete xx; }
4789
4790 // Convert to the operator type requested if needed
4791 bool delete_pA = false;
4792 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4793 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4794 if (!pA || (ts_ctx->jacType != mfem::Operator::ANY_TYPE &&
4795 pA->GetType() != ts_ctx->jacType))
4796 {
4797 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)ts),&J,
4798 ts_ctx->jacType);
4799 delete_pA = true;
4800 }
4801
4802 // Eliminate essential dofs
4803 if (ts_ctx->bchandler)
4804 {
4805 mfem::PetscBCHandler *bchandler = ts_ctx->bchandler;
4806 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)ts),0);
4807 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4808 }
4809
4810 // Get nonzerostate
4811 PetscObjectState nonzerostate;
4812 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4813
4814 // Avoid unneeded copy of the matrix by hacking
4815 Mat B;
4816 B = pA->ReleaseMat(false);
4817 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4818 if (delete_pA) { delete pA; }
4819
4820 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4821 // preconditioner fails because MatCreateSubMatrix_Nest does not
4822 // actually return a matrix. Instead, for efficiency reasons,
4823 // it returns a reference to the submatrix. The second time it
4824 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4825 // aborts since the two submatrices are actually different.
4826 // We circumvent this issue by incrementing the nonzero state
4827 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4828 // This does not impact performances in the case of MATNEST
4829 PetscBool isnest;
4830 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4831 CHKERRQ(ierr);
4832 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4833
4834 // Jacobian reusage
4835 if (ts_ctx->type == mfem::PetscODESolver::ODE_SOLVER_LINEAR)
4836 {
4837 ierr = TSRHSJacobianSetReuse(ts,PETSC_TRUE); PCHKERRQ(ts,ierr);
4838 }
4839 ierr = PetscObjectStateGet((PetscObject)P,&ts_ctx->cached_rhsjacstate);
4840 CHKERRQ(ierr);
4841
4842 // Fool DM
4843 DM dm;
4844 MatType mtype;
4845 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4846 ierr = TSGetDM(ts,&dm); CHKERRQ(ierr);
4847 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4848 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4849 PetscFunctionReturn(PETSC_SUCCESS);
4850}
4851
4852static PetscErrorCode __mfem_snes_monitor(SNES snes, PetscInt it, PetscReal res,
4853 void* ctx)
4854{
4855 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
4856
4857 PetscFunctionBeginUser;
4858 if (!monctx)
4859 {
4860 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
4861 }
4862
4863 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
4865 monctx->monitor);
4866 if (user_monitor->mon_sol)
4867 {
4868 Vec x;
4869 PetscErrorCode ierr;
4870
4871 ierr = SNESGetSolution(snes,&x); CHKERRQ(ierr);
4872 mfem::PetscParVector V(x,true);
4873 user_monitor->MonitorSolution(it,res,V);
4874 }
4875 if (user_monitor->mon_res)
4876 {
4877 Vec x;
4878 PetscErrorCode ierr;
4879
4880 ierr = SNESGetFunction(snes,&x,NULL,NULL); CHKERRQ(ierr);
4881 mfem::PetscParVector V(x,true);
4882 user_monitor->MonitorResidual(it,res,V);
4883 }
4884 user_monitor->MonitorSolver(solver);
4885 PetscFunctionReturn(PETSC_SUCCESS);
4886}
4887
4888static PetscErrorCode __mfem_snes_jacobian(SNES snes, Vec x, Mat A, Mat P,
4889 void *ctx)
4890{
4891 PetscScalar *array;
4892 PetscInt n;
4893 PetscErrorCode ierr;
4894 mfem::Vector *xx;
4895 __mfem_snes_ctx *snes_ctx = (__mfem_snes_ctx*)ctx;
4896
4897 PetscFunctionBeginUser;
4898 ierr = VecGetArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4899 ierr = VecGetLocalSize(x,&n); CHKERRQ(ierr);
4900 if (!snes_ctx->bchandler)
4901 {
4902 xx = new mfem::Vector(array,n);
4903 }
4904 else
4905 {
4906 // make sure we compute a Jacobian with the correct boundary values
4907 if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(n); }
4908 mfem::Vector txx(array,n);
4909 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4910 xx = snes_ctx->work;
4911 bchandler->ApplyBC(txx,*xx);
4912 }
4913
4914 // Use Operator::GetGradient(x)
4915 mfem::Operator& J = snes_ctx->op->GetGradient(*xx);
4916 ierr = VecRestoreArrayRead(x,(const PetscScalar**)&array); CHKERRQ(ierr);
4917 if (!snes_ctx->bchandler) { delete xx; }
4918
4919 // Convert to the operator type requested if needed
4920 bool delete_pA = false;
4921 mfem::PetscParMatrix *pA = const_cast<mfem::PetscParMatrix *>
4922 (dynamic_cast<const mfem::PetscParMatrix *>(&J));
4923 if (!pA || (snes_ctx->jacType != mfem::Operator::ANY_TYPE &&
4924 pA->GetType() != snes_ctx->jacType))
4925 {
4926 pA = new mfem::PetscParMatrix(PetscObjectComm((PetscObject)snes),&J,
4927 snes_ctx->jacType);
4928 delete_pA = true;
4929 }
4930
4931 // Eliminate essential dofs
4932 if (snes_ctx->bchandler)
4933 {
4934 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4935 mfem::PetscParVector dummy(PetscObjectComm((PetscObject)snes),0);
4936 pA->EliminateRowsCols(bchandler->GetTDofs(),dummy,dummy);
4937 }
4938
4939 // Get nonzerostate
4940 PetscObjectState nonzerostate;
4941 ierr = MatGetNonzeroState(P,&nonzerostate); CHKERRQ(ierr);
4942
4943 // Avoid unneeded copy of the matrix by hacking
4944 Mat B = pA->ReleaseMat(false);
4945 ierr = MatHeaderReplace(P,&B); CHKERRQ(ierr);
4946 if (delete_pA) { delete pA; }
4947
4948 // When using MATNEST and PCFIELDSPLIT, the second setup of the
4949 // preconditioner fails because MatCreateSubMatrix_Nest does not
4950 // actually return a matrix. Instead, for efficiency reasons,
4951 // it returns a reference to the submatrix. The second time it
4952 // is called, MAT_REUSE_MATRIX is used and MatCreateSubMatrix_Nest
4953 // aborts since the two submatrices are actually different.
4954 // We circumvent this issue by incrementing the nonzero state
4955 // (i.e. PETSc thinks the operator sparsity pattern has changed)
4956 // This does not impact performances in the case of MATNEST
4957 PetscBool isnest;
4958 ierr = PetscObjectTypeCompare((PetscObject)P,MATNEST,&isnest);
4959 CHKERRQ(ierr);
4960 if (isnest) { P->nonzerostate = nonzerostate + 1; }
4961
4962 // Matrix-free case
4963 if (A && A != P)
4964 {
4965 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4966 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
4967 }
4968
4969 // Fool DM
4970 DM dm;
4971 MatType mtype;
4972 ierr = MatGetType(P,&mtype); CHKERRQ(ierr);
4973 ierr = SNESGetDM(snes,&dm); CHKERRQ(ierr);
4974 ierr = DMSetMatType(dm,mtype); CHKERRQ(ierr);
4975 ierr = DMShellSetMatrix(dm,P); CHKERRQ(ierr);
4976 PetscFunctionReturn(PETSC_SUCCESS);
4977}
4978
4979static PetscErrorCode __mfem_snes_function(SNES snes, Vec x, Vec f, void *ctx)
4980{
4981 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
4982
4983 PetscFunctionBeginUser;
4984 mfem::PetscParVector xx(x,true);
4985 mfem::PetscParVector ff(f,true);
4986 if (snes_ctx->bchandler)
4987 {
4988 // we evaluate the Mult method with the correct bc
4989 if (!snes_ctx->work) { snes_ctx->work = new mfem::Vector(xx.Size()); }
4990 mfem::PetscBCHandler *bchandler = snes_ctx->bchandler;
4991 mfem::Vector* txx = snes_ctx->work;
4992 bchandler->ApplyBC(xx,*txx);
4993 snes_ctx->op->Mult(*txx,ff);
4994 // and fix the residual (i.e. f_\partial\Omega = u - g)
4995 bchandler->FixResidualBC(xx,ff);
4996 }
4997 else
4998 {
4999 // use the Mult method of the class
5000 snes_ctx->op->Mult(xx,ff);
5001 }
5002 ff.UpdateVecFromFlags();
5003 PetscFunctionReturn(PETSC_SUCCESS);
5004}
5005
5006static PetscErrorCode __mfem_snes_objective(SNES snes, Vec x, PetscReal *f,
5007 void *ctx)
5008{
5009 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5010
5011 PetscFunctionBeginUser;
5012 if (!snes_ctx->objective)
5013 {
5014 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing objective function");
5015 }
5016 mfem::PetscParVector xx(x,true);
5017 mfem::real_t lf;
5018 (*snes_ctx->objective)(snes_ctx->op,xx,&lf);
5019 *f = (PetscReal)lf;
5020 PetscFunctionReturn(PETSC_SUCCESS);
5021}
5022
5023static PetscErrorCode __mfem_snes_postcheck(SNESLineSearch ls,Vec X,Vec Y,Vec W,
5024 PetscBool *cy,PetscBool *cw, void* ctx)
5025{
5026 __mfem_snes_ctx* snes_ctx = (__mfem_snes_ctx*)ctx;
5027 bool lcy = false,lcw = false;
5028
5029 PetscFunctionBeginUser;
5030 mfem::PetscParVector x(X,true);
5031 mfem::PetscParVector y(Y,true);
5032 mfem::PetscParVector w(W,true);
5033 (*snes_ctx->postcheck)(snes_ctx->op,x,y,w,lcy,lcw);
5034 if (lcy) { y.UpdateVecFromFlags(); *cy = PETSC_TRUE; }
5035 if (lcw) { w.UpdateVecFromFlags(); *cw = PETSC_TRUE; }
5036 PetscFunctionReturn(PETSC_SUCCESS);
5037}
5038
5039static PetscErrorCode __mfem_snes_update(SNES snes, PetscInt it)
5040{
5041 Vec F,X,dX,pX;
5042 __mfem_snes_ctx* snes_ctx;
5043
5044 PetscFunctionBeginUser;
5045 /* Update callback does not use the context */
5046 ierr = SNESGetFunction(snes,&F,NULL,(void **)&snes_ctx); CHKERRQ(ierr);
5047 ierr = SNESGetSolution(snes,&X); CHKERRQ(ierr);
5048 if (!it)
5049 {
5050 ierr = VecDuplicate(X,&pX); CHKERRQ(ierr);
5051 ierr = PetscObjectCompose((PetscObject)snes,"_mfem_snes_xp",(PetscObject)pX);
5052 CHKERRQ(ierr);
5053 ierr = VecDestroy(&pX); CHKERRQ(ierr);
5054 }
5055 ierr = PetscObjectQuery((PetscObject)snes,"_mfem_snes_xp",(PetscObject*)&pX);
5056 CHKERRQ(ierr);
5057 if (!pX) SETERRQ(PetscObjectComm((PetscObject)snes),PETSC_ERR_USER,
5058 "Missing previous solution");
5059 ierr = SNESGetSolutionUpdate(snes,&dX); CHKERRQ(ierr);
5060 mfem::PetscParVector f(F,true);
5061 mfem::PetscParVector x(X,true);
5062 mfem::PetscParVector dx(dX,true);
5063 mfem::PetscParVector px(pX,true);
5064 (*snes_ctx->update)(snes_ctx->op,it,f,x,dx,px);
5065 /* Store previous solution */
5066 ierr = VecCopy(X,pX); CHKERRQ(ierr);
5067 PetscFunctionReturn(PETSC_SUCCESS);
5068}
5069
5070static PetscErrorCode __mfem_ksp_monitor(KSP ksp, PetscInt it, PetscReal res,
5071 void* ctx)
5072{
5073 __mfem_monitor_ctx *monctx = (__mfem_monitor_ctx*)ctx;
5074
5075 PetscFunctionBeginUser;
5076 if (!monctx)
5077 {
5078 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Missing monitor context");
5079 }
5080
5081 mfem::PetscSolver *solver = (mfem::PetscSolver*)(monctx->solver);
5083 monctx->monitor);
5084 if (user_monitor->mon_sol)
5085 {
5086 Vec x;
5087 PetscErrorCode ierr;
5088
5089 ierr = KSPBuildSolution(ksp,NULL,&x); CHKERRQ(ierr);
5090 mfem::PetscParVector V(x,true);
5091 user_monitor->MonitorSolution(it,res,V);
5092 }
5093 if (user_monitor->mon_res)
5094 {
5095 Vec x;
5096 PetscErrorCode ierr;
5097
5098 ierr = KSPBuildResidual(ksp,NULL,NULL,&x); CHKERRQ(ierr);
5099 mfem::PetscParVector V(x,true);
5100 user_monitor->MonitorResidual(it,res,V);
5101 }
5102 user_monitor->MonitorSolver(solver);
5103 PetscFunctionReturn(PETSC_SUCCESS);
5104}
5105
5106static PetscErrorCode __mfem_mat_shell_apply(Mat A, Vec x, Vec y)
5107{
5108 mfem::Operator *op;
5109 PetscErrorCode ierr;
5110
5111 PetscFunctionBeginUser;
5112 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5113 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5114 mfem::PetscParVector xx(x,true);
5115 mfem::PetscParVector yy(y,true);
5116 op->Mult(xx,yy);
5117 yy.UpdateVecFromFlags();
5118 PetscFunctionReturn(PETSC_SUCCESS);
5119}
5120
5121static PetscErrorCode __mfem_mat_shell_apply_transpose(Mat A, Vec x, Vec y)
5122{
5123 mfem::Operator *op;
5124 PetscErrorCode ierr;
5125 PetscBool flg,symm;
5126
5127 PetscFunctionBeginUser;
5128 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5129 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5130 mfem::PetscParVector xx(x,true);
5131 mfem::PetscParVector yy(y,true);
5132 ierr = MatIsSymmetricKnown(A,&flg,&symm); CHKERRQ(ierr);
5133 if (flg && symm)
5134 {
5135 op->Mult(xx,yy);
5136 }
5137 else
5138 {
5139 op->MultTranspose(xx,yy);
5140 }
5141 yy.UpdateVecFromFlags();
5142 PetscFunctionReturn(PETSC_SUCCESS);
5143}
5144
5145static PetscErrorCode __mfem_mat_shell_copy(Mat A, Mat B, MatStructure str)
5146{
5147 mfem::Operator *op;
5148 PetscErrorCode ierr;
5149
5150 PetscFunctionBeginUser;
5151 ierr = MatShellGetContext(A,(void **)&op); CHKERRQ(ierr);
5152 if (!op) { SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_LIB,"Missing operator"); }
5153 ierr = MatShellSetContext(B,(void *)op); CHKERRQ(ierr);
5154 PetscFunctionReturn(PETSC_SUCCESS);
5155}
5156
5157static PetscErrorCode __mfem_mat_shell_destroy(Mat A)
5158{
5159 PetscFunctionBeginUser;
5160 PetscFunctionReturn(PETSC_SUCCESS);
5161}
5162
5163static PetscErrorCode __mfem_pc_shell_view(PC pc, PetscViewer viewer)
5164{
5165 __mfem_pc_shell_ctx *ctx;
5166 PetscErrorCode ierr;
5167
5168 PetscFunctionBeginUser;
5169 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5170 if (ctx->op)
5171 {
5172 PetscBool isascii;
5173 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);
5174 CHKERRQ(ierr);
5175
5177 (ctx->op);
5178 if (ppc)
5179 {
5180 ierr = PCView(*ppc,viewer); CHKERRQ(ierr);
5181 }
5182 else
5183 {
5184 if (isascii)
5185 {
5186 ierr = PetscViewerASCIIPrintf(viewer,
5187 "No information available on the mfem::Solver\n");
5188 CHKERRQ(ierr);
5189 }
5190 }
5191 if (isascii && ctx->factory)
5192 {
5193 ierr = PetscViewerASCIIPrintf(viewer,
5194 "Number of preconditioners created by the factory %lu\n",ctx->numprec);
5195 CHKERRQ(ierr);
5196 }
5197 }
5198 PetscFunctionReturn(PETSC_SUCCESS);
5199}
5200
5201static PetscErrorCode __mfem_pc_shell_apply(PC pc, Vec x, Vec y)
5202{
5203 __mfem_pc_shell_ctx *ctx;
5204 PetscErrorCode ierr;
5205
5206 PetscFunctionBeginUser;
5207 mfem::PetscParVector xx(x,true);
5208 mfem::PetscParVector yy(y,true);
5209 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5210 if (ctx->op)
5211 {
5212 ctx->op->Mult(xx,yy);
5213 yy.UpdateVecFromFlags();
5214 }
5215 else // operator is not present, copy x
5216 {
5217 yy = xx;
5218 }
5219 PetscFunctionReturn(PETSC_SUCCESS);
5220}
5221
5222static PetscErrorCode __mfem_pc_shell_apply_transpose(PC pc, Vec x, Vec y)
5223{
5224 __mfem_pc_shell_ctx *ctx;
5225 PetscErrorCode ierr;
5226
5227 PetscFunctionBeginUser;
5228 mfem::PetscParVector xx(x,true);
5229 mfem::PetscParVector yy(y,true);
5230 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5231 if (ctx->op)
5232 {
5233 ctx->op->MultTranspose(xx,yy);
5234 yy.UpdateVecFromFlags();
5235 }
5236 else // operator is not present, copy x
5237 {
5238 yy = xx;
5239 }
5240 PetscFunctionReturn(PETSC_SUCCESS);
5241}
5242
5243static PetscErrorCode __mfem_pc_shell_setup(PC pc)
5244{
5245 __mfem_pc_shell_ctx *ctx;
5246
5247 PetscFunctionBeginUser;
5248 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5249 if (ctx->factory)
5250 {
5251 // Delete any owned operator
5252 if (ctx->ownsop)
5253 {
5254 delete ctx->op;
5255 }
5256
5257 // Get current preconditioning Mat
5258 Mat B;
5259 ierr = PCGetOperators(pc,NULL,&B); CHKERRQ(ierr);
5260
5261 // Call user-defined setup
5262 mfem::OperatorHandle hB(new mfem::PetscParMatrix(B,true),true);
5263 mfem::PetscPreconditionerFactory *factory = ctx->factory;
5264 ctx->op = factory->NewPreconditioner(hB);
5265 ctx->ownsop = true;
5266 ctx->numprec++;
5267 }
5268 PetscFunctionReturn(PETSC_SUCCESS);
5269}
5270
5271static PetscErrorCode __mfem_pc_shell_destroy(PC pc)
5272{
5273 __mfem_pc_shell_ctx *ctx;
5274 PetscErrorCode ierr;
5275
5276 PetscFunctionBeginUser;
5277 ierr = PCShellGetContext(pc,(void **)&ctx); CHKERRQ(ierr);
5278 if (ctx->ownsop)
5279 {
5280 delete ctx->op;
5281 }
5282 delete ctx;
5283 PetscFunctionReturn(PETSC_SUCCESS);
5284}
5285
5286static PetscErrorCode __mfem_array_container_destroy(void *ptr)
5287{
5288 PetscErrorCode ierr;
5289
5290 PetscFunctionBeginUser;
5291 ierr = PetscFree(ptr); CHKERRQ(ierr);
5292 PetscFunctionReturn(PETSC_SUCCESS);
5293}
5294
5295static PetscErrorCode __mfem_matarray_container_destroy(void *ptr)
5296{
5298 PetscErrorCode ierr;
5299
5300 PetscFunctionBeginUser;
5301 for (int i=0; i<a->Size(); i++)
5302 {
5303 Mat M = (*a)[i];
5304 MPI_Comm comm = PetscObjectComm((PetscObject)M);
5305 ierr = MatDestroy(&M); CCHKERRQ(comm,ierr);
5306 }
5307 delete a;
5308 PetscFunctionReturn(PETSC_SUCCESS);
5309}
5310
5311static PetscErrorCode __mfem_monitor_ctx_destroy(void **ctx)
5312{
5313 PetscErrorCode ierr;
5314
5315 PetscFunctionBeginUser;
5316 ierr = PetscFree(*ctx); CHKERRQ(ierr);
5317 PetscFunctionReturn(PETSC_SUCCESS);
5318}
5319
5320// Sets the type of PC to PCSHELL and wraps the solver action
5321// if ownsop is true, ownership of precond is transferred to the PETSc object
5322PetscErrorCode MakeShellPC(PC pc, mfem::Solver &precond, bool ownsop)
5323{
5324 PetscFunctionBeginUser;
5325 __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5326 ctx->op = &precond;
5327 ctx->ownsop = ownsop;
5328 ctx->factory = NULL;
5329 ctx->numprec = 0;
5330
5331 // In case the PC was already of type SHELL, this will destroy any
5332 // previous user-defined data structure
5333 // We cannot call PCReset as it will wipe out any operator already set
5334 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5335
5336 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5337 ierr = PCShellSetName(pc,"MFEM Solver (unknown Pmat)"); CHKERRQ(ierr);
5338 ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5339 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5340 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5341 CHKERRQ(ierr);
5342 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5343 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5344 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5345 PetscFunctionReturn(PETSC_SUCCESS);
5346}
5347
5348// Sets the type of PC to PCSHELL. Uses a PetscPreconditionerFactory to construct the solver
5349// Takes ownership of the solver created by the factory
5350PetscErrorCode MakeShellPCWithFactory(PC pc,
5352{
5353 PetscFunctionBeginUser;
5354 __mfem_pc_shell_ctx *ctx = new __mfem_pc_shell_ctx;
5355 ctx->op = NULL;
5356 ctx->ownsop = true;
5357 ctx->factory = factory;
5358 ctx->numprec = 0;
5359
5360 // In case the PC was already of type SHELL, this will destroy any
5361 // previous user-defined data structure
5362 // We cannot call PCReset as it will wipe out any operator already set
5363 ierr = PCSetType(pc,PCNONE); CHKERRQ(ierr);
5364
5365 ierr = PCSetType(pc,PCSHELL); CHKERRQ(ierr);
5366 ierr = PCShellSetName(pc,factory->GetName()); CHKERRQ(ierr);
5367 ierr = PCShellSetContext(pc,(void *)ctx); CHKERRQ(ierr);
5368 ierr = PCShellSetApply(pc,__mfem_pc_shell_apply); CHKERRQ(ierr);
5369 ierr = PCShellSetApplyTranspose(pc,__mfem_pc_shell_apply_transpose);
5370 CHKERRQ(ierr);
5371 ierr = PCShellSetSetUp(pc,__mfem_pc_shell_setup); CHKERRQ(ierr);
5372 ierr = PCShellSetView(pc,__mfem_pc_shell_view); CHKERRQ(ierr);
5373 ierr = PCShellSetDestroy(pc,__mfem_pc_shell_destroy); CHKERRQ(ierr);
5374 PetscFunctionReturn(PETSC_SUCCESS);
5375}
5376
5377// Converts from a list (or a marked Array if islist is false) to an IS
5378// st indicates the offset where to start numbering
5379static PetscErrorCode Convert_Array_IS(MPI_Comm comm, bool islist,
5380 const mfem::Array<int> *list,
5381 PetscInt st, IS* is)
5382{
5383 PetscInt n = list ? list->Size() : 0,*idxs;
5384 const int *data = list ? list->GetData() : NULL;
5385 PetscErrorCode ierr;
5386
5387 PetscFunctionBeginUser;
5388 ierr = PetscMalloc1(n,&idxs); CHKERRQ(ierr);
5389 if (islist)
5390 {
5391 for (PetscInt i=0; i<n; i++) { idxs[i] = data[i] + st; }
5392 }
5393 else
5394 {
5395 PetscInt cum = 0;
5396 for (PetscInt i=0; i<n; i++)
5397 {
5398 if (data[i]) { idxs[cum++] = i+st; }
5399 }
5400 n = cum;
5401 }
5402 ierr = ISCreateGeneral(comm,n,idxs,PETSC_OWN_POINTER,is);
5403 CHKERRQ(ierr);
5404 PetscFunctionReturn(PETSC_SUCCESS);
5405}
5406
5407// Converts from a marked Array of Vdofs to an IS
5408// st indicates the offset where to start numbering
5409// l2l is a vector of matrices generated during RAP
5410static PetscErrorCode Convert_Vmarks_IS(MPI_Comm comm,
5411 mfem::Array<Mat> &pl2l,
5412 const mfem::Array<int> *mark,
5413 PetscInt st, IS* is)
5414{
5415 mfem::Array<int> sub_dof_marker;
5417 PetscInt nl;
5418 PetscErrorCode ierr;
5419
5420 PetscFunctionBeginUser;
5421 for (int i = 0; i < pl2l.Size(); i++)
5422 {
5423 PetscInt m,n,*ii,*jj;
5424 PetscBool done;
5425 ierr = MatGetRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,(const PetscInt**)&ii,
5426 (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5427 MFEM_VERIFY(done,"Unable to perform MatGetRowIJ on " << i << " l2l matrix");
5428 ierr = MatGetSize(pl2l[i],NULL,&n); CHKERRQ(ierr);
5429#if defined(PETSC_USE_64BIT_INDICES)
5430 int nnz = (int)ii[m];
5431 int *mii = new int[m+1];
5432 int *mjj = new int[nnz];
5433 for (int j = 0; j < m+1; j++) { mii[j] = (int)ii[j]; }
5434 for (int j = 0; j < nnz; j++) { mjj[j] = (int)jj[j]; }
5435 l2l[i] = new mfem::SparseMatrix(mii,mjj,NULL,m,n,true,true,true);
5436#else
5437 l2l[i] = new mfem::SparseMatrix(ii,jj,NULL,m,n,false,true,true);
5438#endif
5439 ierr = MatRestoreRowIJ(pl2l[i],0,PETSC_FALSE,PETSC_FALSE,&m,
5440 (const PetscInt**)&ii,
5441 (const PetscInt**)&jj,&done); CHKERRQ(ierr);
5442 MFEM_VERIFY(done,"Unable to perform MatRestoreRowIJ on "
5443 << i << " l2l matrix");
5444 }
5445 nl = 0;
5446 for (int i = 0; i < l2l.Size(); i++) { nl += l2l[i]->Width(); }
5447 sub_dof_marker.SetSize(nl);
5448 const int* vdata = mark->GetData();
5449 int* sdata = sub_dof_marker.GetData();
5450 int cumh = 0, cumw = 0;
5451 for (int i = 0; i < l2l.Size(); i++)
5452 {
5453 const mfem::Array<int> vf_marker(const_cast<int*>(vdata)+cumh,
5454 l2l[i]->Height());
5455 mfem::Array<int> sf_marker(sdata+cumw,l2l[i]->Width());
5456 l2l[i]->BooleanMultTranspose(vf_marker,sf_marker);
5457 cumh += l2l[i]->Height();
5458 cumw += l2l[i]->Width();
5459 }
5460 ierr = Convert_Array_IS(comm,false,&sub_dof_marker,st,is); CCHKERRQ(comm,ierr);
5461 for (int i = 0; i < pl2l.Size(); i++)
5462 {
5463 delete l2l[i];
5464 }
5465 PetscFunctionReturn(PETSC_SUCCESS);
5466}
5467
5468#if !defined(PETSC_HAVE_HYPRE)
5469
5470#if defined(HYPRE_MIXEDINT)
5471#error "HYPRE_MIXEDINT not supported"
5472#endif
5473
5474#include "_hypre_parcsr_mv.h"
5475static PetscErrorCode MatConvert_hypreParCSR_AIJ(hypre_ParCSRMatrix* hA,Mat* pA)
5476{
5477 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5478 hypre_CSRMatrix *hdiag,*hoffd;
5479 PetscScalar *da,*oa,*aptr;
5480 PetscInt *dii,*djj,*oii,*ojj,*iptr;
5481 PetscInt i,dnnz,onnz,m,n;
5482 PetscMPIInt size;
5483 PetscErrorCode ierr;
5484
5485 PetscFunctionBeginUser;
5486 hdiag = hypre_ParCSRMatrixDiag(hA);
5487 hoffd = hypre_ParCSRMatrixOffd(hA);
5488 m = hypre_CSRMatrixNumRows(hdiag);
5489 n = hypre_CSRMatrixNumCols(hdiag);
5490 dnnz = hypre_CSRMatrixNumNonzeros(hdiag);
5491 onnz = hypre_CSRMatrixNumNonzeros(hoffd);
5492 ierr = PetscMalloc1(m+1,&dii); CHKERRQ(ierr);
5493 ierr = PetscMalloc1(dnnz,&djj); CHKERRQ(ierr);
5494 ierr = PetscMalloc1(dnnz,&da); CHKERRQ(ierr);
5495 ierr = PetscMemcpy(dii,hypre_CSRMatrixI(hdiag),(m+1)*sizeof(PetscInt));
5496 CHKERRQ(ierr);
5497 ierr = PetscMemcpy(djj,hypre_CSRMatrixJ(hdiag),dnnz*sizeof(PetscInt));
5498 CHKERRQ(ierr);
5499 ierr = PetscMemcpy(da,hypre_CSRMatrixData(hdiag),dnnz*sizeof(PetscScalar));
5500 CHKERRQ(ierr);
5501 iptr = djj;
5502 aptr = da;
5503 for (i=0; i<m; i++)
5504 {
5505 PetscInt nc = dii[i+1]-dii[i];
5506 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5507 iptr += nc;
5508 aptr += nc;
5509 }
5510 ierr = MPI_Comm_size(comm,&size); CHKERRQ(ierr);
5511 if (size > 1)
5512 {
5513 PetscInt *offdj,*coffd;
5514
5515 ierr = PetscMalloc1(m+1,&oii); CHKERRQ(ierr);
5516 ierr = PetscMalloc1(onnz,&ojj); CHKERRQ(ierr);
5517 ierr = PetscMalloc1(onnz,&oa); CHKERRQ(ierr);
5518 ierr = PetscMemcpy(oii,hypre_CSRMatrixI(hoffd),(m+1)*sizeof(PetscInt));
5519 CHKERRQ(ierr);
5520 offdj = hypre_CSRMatrixJ(hoffd);
5521 coffd = hypre_ParCSRMatrixColMapOffd(hA);
5522 for (i=0; i<onnz; i++) { ojj[i] = coffd[offdj[i]]; }
5523 ierr = PetscMemcpy(oa,hypre_CSRMatrixData(hoffd),onnz*sizeof(PetscScalar));
5524 CHKERRQ(ierr);
5525 iptr = ojj;
5526 aptr = oa;
5527 for (i=0; i<m; i++)
5528 {
5529 PetscInt nc = oii[i+1]-oii[i];
5530 ierr = PetscSortIntWithScalarArray(nc,iptr,aptr); CHKERRQ(ierr);
5531 iptr += nc;
5532 aptr += nc;
5533 }
5534 ierr = MatCreateMPIAIJWithSplitArrays(comm,m,n,PETSC_DECIDE,PETSC_DECIDE,dii,
5535 djj,da,oii,ojj,oa,pA); CHKERRQ(ierr);
5536 }
5537 else
5538 {
5539 oii = ojj = NULL;
5540 oa = NULL;
5541 ierr = MatCreateSeqAIJWithArrays(comm,m,n,dii,djj,da,pA); CHKERRQ(ierr);
5542 }
5543 /* We are responsible to free the CSR arrays. However, since we can take
5544 references of a PetscParMatrix but we cannot take reference of PETSc
5545 arrays, we need to create a PetscContainer object to take reference of
5546 these arrays in reference objects */
5547 void *ptrs[6] = {dii,djj,da,oii,ojj,oa};
5548 const char *names[6] = {"_mfem_csr_dii",
5549 "_mfem_csr_djj",
5550 "_mfem_csr_da",
5551 "_mfem_csr_oii",
5552 "_mfem_csr_ojj",
5553 "_mfem_csr_oa"
5554 };
5555 for (i=0; i<6; i++)
5556 {
5557 PetscContainer c;
5558
5559 ierr = PetscContainerCreate(comm,&c); CHKERRQ(ierr);
5560 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5561 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5562 CHKERRQ(ierr);
5563 ierr = PetscObjectCompose((PetscObject)(*pA),names[i],(PetscObject)c);
5564 CHKERRQ(ierr);
5565 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5566 }
5567 PetscFunctionReturn(PETSC_SUCCESS);
5568}
5569
5570static PetscErrorCode MatConvert_hypreParCSR_IS(hypre_ParCSRMatrix* hA,Mat* pA)
5571{
5572 Mat lA;
5573 ISLocalToGlobalMapping rl2g,cl2g;
5574 IS is;
5575 hypre_CSRMatrix *hdiag,*hoffd;
5576 MPI_Comm comm = hypre_ParCSRMatrixComm(hA);
5577 void *ptrs[2];
5578 const char *names[2] = {"_mfem_csr_aux",
5579 "_mfem_csr_data"
5580 };
5581 PetscScalar *hdd,*hod,*aa,*data;
5582 PetscInt *col_map_offd,*hdi,*hdj,*hoi,*hoj;
5583 PetscInt *aux,*ii,*jj;
5584 PetscInt cum,dr,dc,oc,str,stc,nnz,i,jd,jo;
5585 PetscErrorCode ierr;
5586
5587 PetscFunctionBeginUser;
5588 /* access relevant information in ParCSR */
5589 str = hypre_ParCSRMatrixFirstRowIndex(hA);
5590 stc = hypre_ParCSRMatrixFirstColDiag(hA);
5591 hdiag = hypre_ParCSRMatrixDiag(hA);
5592 hoffd = hypre_ParCSRMatrixOffd(hA);
5593 dr = hypre_CSRMatrixNumRows(hdiag);
5594 dc = hypre_CSRMatrixNumCols(hdiag);
5595 nnz = hypre_CSRMatrixNumNonzeros(hdiag);
5596 hdi = hypre_CSRMatrixI(hdiag);
5597 hdj = hypre_CSRMatrixJ(hdiag);
5598 hdd = hypre_CSRMatrixData(hdiag);
5599 oc = hypre_CSRMatrixNumCols(hoffd);
5600 nnz += hypre_CSRMatrixNumNonzeros(hoffd);
5601 hoi = hypre_CSRMatrixI(hoffd);
5602 hoj = hypre_CSRMatrixJ(hoffd);
5603 hod = hypre_CSRMatrixData(hoffd);
5604
5605 /* generate l2g maps for rows and cols */
5606 ierr = ISCreateStride(comm,dr,str,1,&is); CHKERRQ(ierr);
5607 ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g); CHKERRQ(ierr);
5608 ierr = ISDestroy(&is); CHKERRQ(ierr);
5609 col_map_offd = hypre_ParCSRMatrixColMapOffd(hA);
5610 ierr = PetscMalloc1(dc+oc,&aux); CHKERRQ(ierr);
5611 for (i=0; i<dc; i++) { aux[i] = i+stc; }
5612 for (i=0; i<oc; i++) { aux[i+dc] = col_map_offd[i]; }
5613 ierr = ISCreateGeneral(comm,dc+oc,aux,PETSC_OWN_POINTER,&is); CHKERRQ(ierr);
5614 ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g); CHKERRQ(ierr);
5615 ierr = ISDestroy(&is); CHKERRQ(ierr);
5616
5617 /* create MATIS object */
5618 ierr = MatCreate(comm,pA); CHKERRQ(ierr);
5619 ierr = MatSetSizes(*pA,dr,dc,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5620 ierr = MatSetType(*pA,MATIS); CHKERRQ(ierr);
5621 ierr = MatSetLocalToGlobalMapping(*pA,rl2g,cl2g); CHKERRQ(ierr);
5622 ierr = ISLocalToGlobalMappingDestroy(&rl2g); CHKERRQ(ierr);
5623 ierr = ISLocalToGlobalMappingDestroy(&cl2g); CHKERRQ(ierr);
5624
5625 /* merge local matrices */
5626 ierr = PetscMalloc1(nnz+dr+1,&aux); CHKERRQ(ierr);
5627 ierr = PetscMalloc1(nnz,&data); CHKERRQ(ierr);
5628 ii = aux;
5629 jj = aux+dr+1;
5630 aa = data;
5631 *ii = *(hdi++) + *(hoi++);
5632 for (jd=0,jo=0,cum=0; *ii<nnz; cum++)
5633 {
5634 PetscScalar *aold = aa;
5635 PetscInt *jold = jj,nc = jd+jo;
5636 for (; jd<*hdi; jd++) { *jj++ = *hdj++; *aa++ = *hdd++; }
5637 for (; jo<*hoi; jo++) { *jj++ = *hoj++ + dc; *aa++ = *hod++; }
5638 *(++ii) = *(hdi++) + *(hoi++);
5639 ierr = PetscSortIntWithScalarArray(jd+jo-nc,jold,aold); CHKERRQ(ierr);
5640 }
5641 for (; cum<dr; cum++) { *(++ii) = nnz; }
5642 ii = aux;
5643 jj = aux+dr+1;
5644 aa = data;
5645 ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,dr,dc+oc,ii,jj,aa,&lA);
5646 CHKERRQ(ierr);
5647 ptrs[0] = aux;
5648 ptrs[1] = data;
5649 for (i=0; i<2; i++)
5650 {
5651 PetscContainer c;
5652
5653 ierr = PetscContainerCreate(PETSC_COMM_SELF,&c); CHKERRQ(ierr);
5654 ierr = PetscContainerSetPointer(c,ptrs[i]); CHKERRQ(ierr);
5655 ierr = PetscContainerSetUserDestroy(c,__mfem_array_container_destroy);
5656 CHKERRQ(ierr);
5657 ierr = PetscObjectCompose((PetscObject)lA,names[i],(PetscObject)c);
5658 CHKERRQ(ierr);
5659 ierr = PetscContainerDestroy(&c); CHKERRQ(ierr);
5660 }
5661 ierr = MatISSetLocalMat(*pA,lA); CHKERRQ(ierr);
5662 ierr = MatDestroy(&lA); CHKERRQ(ierr);
5663 ierr = MatAssemblyBegin(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5664 ierr = MatAssemblyEnd(*pA,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
5665 PetscFunctionReturn(PETSC_SUCCESS);
5666}
5667#endif
5668
5669#include <petsc/private/matimpl.h>
5670
5671static PetscErrorCode __mfem_MatCreateDummy(MPI_Comm comm, PetscInt m,
5672 PetscInt n, Mat *A)
5673{
5674 PetscFunctionBegin;
5675 ierr = MatCreate(comm,A); CHKERRQ(ierr);
5676 ierr = MatSetSizes(*A,m,n,PETSC_DECIDE,PETSC_DECIDE); CHKERRQ(ierr);
5677 ierr = PetscObjectChangeTypeName((PetscObject)*A,"mfemdummy"); CHKERRQ(ierr);
5678 (*A)->preallocated = PETSC_TRUE;
5679 ierr = MatSetUp(*A); CHKERRQ(ierr);
5680 PetscFunctionReturn(PETSC_SUCCESS);
5681}
5682
5683#include <petsc/private/vecimpl.h>
5684
5685#if defined(PETSC_HAVE_DEVICE)
5686static PetscErrorCode __mfem_VecSetOffloadMask(Vec v, PetscOffloadMask m)
5687{
5688 PetscFunctionBegin;
5689 v->offloadmask = m;
5690 PetscFunctionReturn(PETSC_SUCCESS);
5691}
5692#endif
5693
5694static PetscErrorCode __mfem_VecBoundToCPU(Vec v, PetscBool *flg)
5695{
5696 PetscFunctionBegin;
5697#if defined(PETSC_HAVE_DEVICE)
5698 *flg = v->boundtocpu;
5699#else
5700 *flg = PETSC_TRUE;
5701#endif
5702 PetscFunctionReturn(PETSC_SUCCESS);
5703}
5704
5705static PetscErrorCode __mfem_PetscObjectStateIncrease(PetscObject o)
5706{
5707 PetscErrorCode ierr;
5708
5709 PetscFunctionBegin;
5710 ierr = PetscObjectStateIncrease(o); CHKERRQ(ierr);
5711 PetscFunctionReturn(PETSC_SUCCESS);
5712}
5713
5714#endif // MFEM_USE_PETSC
5715#endif // MFEM_USE_MPI
void Assign(const T *)
Copy data from a pointer. 'Size()' elements are copied.
Definition array.hpp:925
void SetSize(int nsize)
Change the logical size of the array, keep existing entries.
Definition array.hpp:697
int Size() const
Return the logical size of the array.
Definition array.hpp:144
T * GetData()
Returns the data.
Definition array.hpp:118
A class to handle Block systems in a matrix-free implementation.
int IsZeroBlock(int i, int j) const
Check if block (i,j) is a zero block.
Operator & GetBlock(int i, int j)
Return a reference to block i,j.
int NumRowBlocks() const
Return the number of row blocks.
int NumColBlocks() const
Return the number of column blocks.
static bool Allows(unsigned long b_mask)
Return true if any of the backends in the backend mask, b_mask, are allowed.
Definition device.hpp:259
static int GetId()
Get the device id of the configured device.
Definition device.hpp:253
static MemoryType GetDeviceMemoryType()
Get the current Device MemoryType. This is the MemoryType used by most MFEM classes when allocating m...
Definition device.hpp:274
Collection of finite elements from the same family in multiple dimensions. This class is used to matc...
Definition fe_coll.hpp:27
Ordering::Type GetOrdering() const
Return the ordering method.
Definition fespace.hpp:725
const FiniteElementCollection * FEColl() const
Definition fespace.hpp:727
int GetVDim() const
Returns vector dimension.
Definition fespace.hpp:706
Wrapper for hypre's ParCSR matrix class.
Definition hypre.hpp:388
MPI_Comm GetComm() const
MPI communicator.
Definition hypre.hpp:578
Wrapper for hypre's parallel vector class.
Definition hypre.hpp:206
Identity Operator I: x -> x.
Definition operator.hpp:706
Class used by MFEM to store pointers to host and/or device memory.
bool DeviceIsValid() const
Return true if device pointer is valid.
void CopyFromHost(const T *src, int size)
Copy size entries from the host pointer src to *this.
void MakeAlias(const Memory &base, int offset, int size)
Create a memory object that points inside the memory object base.
bool HostIsValid() const
Return true if host pointer is valid.
bool UseDevice() const
Read the internal device flag.
void CopyToHost(T *dest, int size) const
Copy size entries from *this to the host pointer dest.
bool Empty() const
Return true if the Memory object is empty, see Reset().
void Sync(const Memory &other) const
Copy the host/device pointer validity flags from other to *this.
void CopyFrom(const Memory &src, int size)
Copy size entries from src to *this.
void Reset()
Reset the memory to be empty, ensuring that Delete() will be a no-op.
void Wrap(T *ptr, int size, bool own)
Wrap an externally allocated host pointer, ptr with the current host memory type returned by MemoryMa...
void Delete()
Delete the owned pointers and reset the Memory object.
int SpaceDimension() const
Dimension of the physical space containing the mesh.
Definition mesh.hpp:1163
Abstract class for solving systems of ODEs: dx/dt = f(x,t)
Definition ode.hpp:24
TimeDependentOperator * f
Pointer to the associated TimeDependentOperator.
Definition ode.hpp:27
Pointer to an Operator of a specified type.
Definition handle.hpp:34
Abstract operator.
Definition operator.hpp:25
virtual MemoryClass GetMemoryClass() const
Return the MemoryClass preferred by the Operator.
Definition operator.hpp:86
int width
Dimension of the input / number of columns in the matrix.
Definition operator.hpp:28
int Height() const
Get the height (size of output) of the Operator. Synonym with NumRows().
Definition operator.hpp:66
int height
Dimension of the output / number of rows in the matrix.
Definition operator.hpp:27
virtual void Mult(const Vector &x, Vector &y) const =0
Operator application: y=A(x).
int Width() const
Get the width (size of input) of the Operator. Synonym with NumCols().
Definition operator.hpp:72
Type
Enumeration defining IDs for some classes derived from Operator.
Definition operator.hpp:284
@ ANY_TYPE
ID for the base class Operator, i.e. any type.
Definition operator.hpp:285
@ PETSC_MATIS
ID for class PetscParMatrix, MATIS format.
Definition operator.hpp:289
@ PETSC_MATHYPRE
ID for class PetscParMatrix, MATHYPRE format.
Definition operator.hpp:292
@ PETSC_MATGENERIC
ID for class PetscParMatrix, unspecified format.
Definition operator.hpp:293
@ PETSC_MATAIJ
ID for class PetscParMatrix, MATAIJ format.
Definition operator.hpp:288
@ PETSC_MATNEST
ID for class PetscParMatrix, MATNEST format.
Definition operator.hpp:291
@ PETSC_MATSHELL
ID for class PetscParMatrix, MATSHELL format.
Definition operator.hpp:290
virtual void MultTranspose(const Vector &x, Vector &y) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition operator.hpp:93
virtual Operator & GetGradient(const Vector &x) const
Evaluate the gradient operator at the point x. The default behavior in class Operator is to generate ...
Definition operator.hpp:122
Abstract parallel finite element space.
Definition pfespace.hpp:29
MPI_Comm GetComm() const
Definition pfespace.hpp:273
HYPRE_BigInt * GetTrueDofOffsets() const
Definition pfespace.hpp:282
int GetTrueVSize() const override
Return the number of local vector true dofs.
Definition pfespace.hpp:289
ParMesh * GetParMesh() const
Definition pfespace.hpp:277
Helper class for handling essential boundary conditions.
Definition petsc.hpp:592
PetscBCHandler(Type type_=ZERO)
Definition petsc.hpp:601
@ CONSTANT
Constant in time b.c.
Definition petsc.hpp:597
void SetTDofs(Array< int > &list)
Sets essential dofs (local, per-process numbering)
Definition petsc.cpp:2812
virtual void Eval(real_t t, Vector &g)
Boundary conditions evaluation.
Definition petsc.hpp:617
void SetTime(real_t t)
Sets the current time.
Definition petsc.hpp:627
void SetUp(PetscInt n)
SetUp the helper object, where n is the size of the solution vector.
Definition petsc.cpp:2819
void ZeroBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = 0 on ess_tdof_list
Definition petsc.cpp:2912
Array< int > & GetTDofs()
Gets essential dofs (local, per-process numbering)
Definition petsc.hpp:624
void FixResidualBC(const Vector &x, Vector &y)
y = x-g on ess_tdof_list, the rest of y is unchanged
Definition petsc.cpp:2884
void ApplyBC(const Vector &x, Vector &y)
y = x on ess_tdof_list_c and y = g (internally evaluated) on ess_tdof_list
Definition petsc.cpp:2835
void Zero(Vector &x)
Replace boundary dofs with 0.
Definition petsc.cpp:2903
Auxiliary class for BDDC customization.
Definition petsc.hpp:831
PetscBDDCSolver(MPI_Comm comm, Operator &op, const PetscBDDCSolverParams &opts=PetscBDDCSolverParams(), const std::string &prefix=std::string())
Definition petsc.cpp:3879
PetscFieldSplitSolver(MPI_Comm comm, Operator &op, const std::string &prefix=std::string())
Definition petsc.cpp:3888
PetscH2Solver(Operator &op, ParFiniteElementSpace *fes, const std::string &prefix=std::string())
Definition petsc.cpp:3923
Abstract class for PETSc's linear solvers.
Definition petsc.hpp:750
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition petsc.cpp:3195
operator petsc::KSP() const
Conversion function to PETSc's KSP type.
Definition petsc.hpp:787
virtual ~PetscLinearSolver()
Definition petsc.cpp:3200
virtual void SetOperator(const Operator &op)
Definition petsc.cpp:2965
PetscLinearSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool wrap=true, bool iter_mode=false)
Definition petsc.cpp:2924
void SetPreconditioner(Solver &precond)
Definition petsc.cpp:3110
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition petsc.cpp:3190
bool DeviceRequested() const
Definition petsc.hpp:143
void SetHostInvalid() const
Definition petsc.hpp:98
void SetHostValid() const
Definition petsc.hpp:96
bool WriteRequested() const
Definition petsc.hpp:138
const real_t * GetDevicePointer() const
Definition petsc.cpp:256
const real_t * GetHostPointer() const
Definition petsc.cpp:247
void SyncBaseAndReset()
Definition petsc.hpp:127
bool IsAliasForSync() const
Definition petsc.hpp:100
void MakeAliasForSync(const Memory< real_t > &base_, int offset_, int size_, bool usedev_)
Definition petsc.hpp:102
void SetDeviceInvalid() const
Definition petsc.hpp:99
bool ReadRequested() const
Definition petsc.hpp:133
void SetDeviceValid() const
Definition petsc.hpp:97
void SetPostCheck(void(*post)(Operator *op, const Vector &X, Vector &Y, Vector &W, bool &changed_y, bool &changed_w))
Definition petsc.cpp:4090
virtual void Mult(const Vector &b, Vector &x) const
Application of the solver.
Definition petsc.cpp:4117
void SetObjective(void(*obj)(Operator *op, const Vector &x, real_t *f))
Specification of an objective function to be used for line search.
Definition petsc.cpp:4079
operator petsc::SNES() const
Conversion function to PETSc's SNES type.
Definition petsc.hpp:944
virtual ~PetscNonlinearSolver()
Definition petsc.cpp:4003
void SetJacobianType(Operator::Type type)
Definition petsc.cpp:4073
void SetUpdate(void(*update)(Operator *op, int it, const Vector &F, const Vector &X, const Vector &D, const Vector &P))
Definition petsc.cpp:4104
virtual void SetOperator(const Operator &op)
Specification of the nonlinear operator.
Definition petsc.cpp:4011
PetscNonlinearSolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:3971
virtual void Init(TimeDependentOperator &f_, enum PetscODESolver::Type type)
Initialize the ODE solver.
Definition petsc.cpp:4173
virtual void Run(Vector &x, real_t &t, real_t &dt, real_t t_final)
Perform time integration from time t [in] to time tf [in].
Definition petsc.cpp:4324
virtual void Step(Vector &x, real_t &t, real_t &dt)
Perform a time step from time t [in] to time t [out] based on the requested step size dt [in].
Definition petsc.cpp:4286
operator petsc::TS() const
Conversion function to PETSc's TS type.
Definition petsc.hpp:979
void SetType(PetscODESolver::Type)
Definition petsc.cpp:4268
PetscODESolver::Type GetType() const
Definition petsc.cpp:4262
PetscODESolver(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:4140
void SetJacobianType(Operator::Type type)
Definition petsc.cpp:4256
virtual ~PetscODESolver()
Definition petsc.cpp:4165
PetscPCGSolver(MPI_Comm comm, const std::string &prefix=std::string(), bool iter_mode=false)
Definition petsc.cpp:3210
Wrapper for PETSc's matrix class.
Definition petsc.hpp:319
void Print(const char *fname=NULL, bool binary=false) const
Prints the matrix (to stdout if fname is NULL)
Definition petsc.cpp:2024
PetscInt GetNumRows() const
Returns the local number of rows.
Definition petsc.cpp:991
void ScaleCols(const Vector &s)
Scale the local col i by s(i).
Definition petsc.cpp:2060
void MakeRef(const PetscParMatrix &master)
Makes this object a reference to another PetscParMatrix.
Definition petsc.cpp:1938
void EliminateRows(const Array< int > &rows)
Eliminate only the rows from the matrix.
Definition petsc.cpp:2279
void ConvertOperator(MPI_Comm comm, const Operator &op, petsc::Mat *B, Operator::Type tid)
Definition petsc.cpp:1377
PetscParMatrix & operator-=(const PetscParMatrix &B)
Definition petsc.cpp:1176
void Mult(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec: y = a A x + b y.
Definition petsc.cpp:1987
PetscInt GetColStart() const
Returns the global index of the first local column.
Definition petsc.cpp:984
PetscInt M() const
Returns the global number of rows.
Definition petsc.cpp:1005
void EliminateRowsCols(const Array< int > &rows_cols, const PetscParVector &X, PetscParVector &B, real_t diag=1.)
Eliminate rows and columns from the matrix, and rows from the vector B. Modify B with the BC values i...
Definition petsc.cpp:2250
PetscParVector * GetY() const
Returns the inner vector in the range of A (it creates it if needed)
Definition petsc.cpp:1958
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:1330
Type GetType() const
Definition petsc.cpp:2306
PetscParMatrix & operator=(const PetscParMatrix &B)
Definition petsc.cpp:1145
petsc::Mat ReleaseMat(bool dereference)
Release the PETSc Mat object. If dereference is true, decrement the refcount of the Mat object.
Definition petsc.cpp:2293
void MakeWrapper(MPI_Comm comm, const Operator *op, petsc::Mat *B)
Creates a wrapper around a mfem::Operator op using PETSc's MATSHELL object and returns the Mat in B.
Definition petsc.cpp:1343
PetscInt GetNumCols() const
Returns the local number of columns.
Definition petsc.cpp:998
PetscInt NNZ() const
Returns the number of nonzeros.
Definition petsc.cpp:1019
petsc::Mat A
The actual PETSc object.
Definition petsc.hpp:322
PetscParVector * GetX() const
Returns the inner vector in the domain of A (it creates it if needed)
Definition petsc.cpp:1948
void Shift(real_t s)
Shift diagonal by a constant.
Definition petsc.cpp:2071
PetscParVector * Y
Definition petsc.hpp:325
PetscParVector * X
Auxiliary vectors for typecasting.
Definition petsc.hpp:325
PetscParMatrix()
Create an empty matrix to be used as a reference to an existing matrix.
Definition petsc.cpp:1039
operator petsc::Mat() const
Typecasting to PETSc's Mat type.
Definition petsc.hpp:465
void SetMat(petsc::Mat newA)
Replace the inner Mat Object. The reference count of newA is increased.
Definition petsc.cpp:1803
void operator*=(real_t s)
Scale all entries by s: A_scaled = s*A.
Definition petsc.cpp:1982
void SetBlockSize(PetscInt rbs, PetscInt cbs=-1)
Set row and column block sizes of a matrix.
Definition petsc.cpp:1026
PetscInt N() const
Returns the global number of columns.
Definition petsc.cpp:1012
PetscParMatrix * Transpose(bool action=false)
Returns the transpose of the PetscParMatrix.
Definition petsc.cpp:1968
void Init()
Initialize with defaults. Does not initialize inherited members.
Definition petsc.cpp:1032
void Destroy()
Delete all owned data. Does not perform re-initialization with defaults.
Definition petsc.cpp:1778
PetscInt GetRowStart() const
Returns the global index of the first local row.
Definition petsc.cpp:977
void MultTranspose(real_t a, const Vector &x, real_t b, Vector &y) const
Matvec transpose: y = a A^T x + b y.
Definition petsc.cpp:2005
PetscParMatrix & operator+=(const PetscParMatrix &B)
Definition petsc.cpp:1161
void ScaleRows(const Vector &s)
Scale the local row i by s(i).
Definition petsc.cpp:2049
void ResetMemory()
Completes the operation started with PlaceMemory.
Definition petsc.cpp:891
void SetFlagsFromMask_() const
Definition petsc.cpp:323
void PlaceMemory(Memory< real_t > &, bool=false)
This requests write access from where the memory is valid and temporarily replaces the corresponding ...
Definition petsc.cpp:804
void Randomize(PetscInt seed=0)
Set random values.
Definition petsc.cpp:935
void SetBlockSize(PetscInt bs)
Set block size of a vector.
Definition petsc.cpp:524
bool UseDevice() const override
Return the device flag of the Memory object used by the Vector.
Definition petsc.cpp:509
real_t * HostReadWrite() override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:495
PetscMemory pdata
Definition petsc.hpp:162
void PlaceArray(PetscScalar *temp_data)
Temporarily replace the data of the PETSc Vec object. To return to the original data array,...
Definition petsc.cpp:794
PetscInt GlobalSize() const
Returns the global number of rows.
Definition petsc.cpp:517
PetscParVector & operator-=(const PetscParVector &y)
Definition petsc.cpp:773
real_t * Write(bool=true) override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:440
void UpdateVecFromFlags()
Update PETSc's Vec after having accessed its data via GetMemory()
Definition petsc.cpp:357
void Print(const char *fname=NULL, bool binary=false) const
Prints the vector (to stdout if fname is NULL)
Definition petsc.cpp:950
real_t * ReadWrite(bool=true) override
Shortcut for mfem::ReadWrite(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:470
const real_t * HostRead() const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:435
const real_t * Read(bool=true) const override
Shortcut for mfem::Read(vec.GetMemory(), vec.Size(), on_dev).
Definition petsc.cpp:411
real_t * HostWrite() override
Shortcut for mfem::Write(vec.GetMemory(), vec.Size(), false).
Definition petsc.cpp:465
PetscParVector & operator+=(const PetscParVector &y)
Definition petsc.cpp:766
petsc::Vec x
The actual PETSc object.
Definition petsc.hpp:160
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:694
virtual ~PetscParVector()
Calls PETSc's destroy function.
Definition petsc.cpp:596
void ResetArray()
Reset the PETSc Vec object to use its default data. Call this method after the use of PlaceArray().
Definition petsc.cpp:799
Vector * GlobalVector() const
Returns the global vector in each processor.
Definition petsc.cpp:699
operator petsc::Vec() const
Typecasting to PETSc's Vec type.
Definition petsc.hpp:234
PetscParVector & AddValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Add values in a vector.
Definition petsc.cpp:745
PetscParVector(MPI_Comm comm, PetscInt glob_size, PetscInt *col=NULL)
Creates vector with given global size and partitioning of the columns.
Definition petsc.cpp:578
PetscParVector & SetValues(const Array< PetscInt > &, const Array< PetscScalar > &)
Set values in a vector.
Definition petsc.cpp:731
PetscParVector & operator*=(PetscScalar d)
Definition petsc.cpp:780
PetscParVector & operator=(PetscScalar d)
Set constant values.
Definition petsc.cpp:724
virtual Solver * NewPreconditioner(const OperatorHandle &oh)=0
Abstract class for PETSc's preconditioners.
Definition petsc.hpp:805
virtual void MultTranspose(const Vector &b, Vector &x) const
Action of the transpose operator: y=A^t(x). The default behavior in class Operator is to generate an ...
Definition petsc.cpp:3368
virtual void Mult(const Vector &b, Vector &x) const
Application of the preconditioner.
Definition petsc.cpp:3363
virtual void SetOperator(const Operator &op)
Set/update the solver for the given operator.
Definition petsc.cpp:3277
virtual ~PetscPreconditioner()
Definition petsc.cpp:3373
PetscPreconditioner(MPI_Comm comm, const std::string &prefix=std::string())
Definition petsc.cpp:3242
operator petsc::PC() const
Conversion function to PETSc's PC type.
Definition petsc.hpp:825
Abstract class for monitoring PETSc's solvers.
Definition petsc.hpp:985
virtual void MonitorResidual(PetscInt it, PetscReal norm, const Vector &r)
Monitor the residual vector r.
Definition petsc.hpp:1000
virtual void MonitorSolution(PetscInt it, PetscReal norm, const Vector &x)
Monitor the solution vector x.
Definition petsc.hpp:994
virtual void MonitorSolver(PetscSolver *solver)
Generic monitor to take access to the solver.
Definition petsc.hpp:1006
Abstract class for PETSc's solvers.
Definition petsc.hpp:676
PetscClassId cid
The class id of the actual PETSc object.
Definition petsc.hpp:685
void SetAbsTol(real_t tol)
Definition petsc.cpp:2401
void SetTol(real_t tol)
Definition petsc.cpp:2371
void * private_ctx
Private context for solver.
Definition petsc.hpp:694
void SetPrintLevel(int plev)
Definition petsc.cpp:2453
void SetBCHandler(PetscBCHandler *bch)
Sets the object to handle essential boundary conditions.
Definition petsc.cpp:2568
void SetMaxIter(int max_iter)
Definition petsc.cpp:2426
void SetRelTol(real_t tol)
Definition petsc.cpp:2376
PetscParVector * X
Definition petsc.hpp:688
void CreatePrivateContext()
Definition petsc.cpp:2752
virtual ~PetscSolver()
Destroy the PetscParVectors allocated (if any).
Definition petsc.cpp:2364
int GetNumIterations()
Definition petsc.cpp:2694
real_t GetFinalNorm()
Definition petsc.cpp:2727
MPI_Comm GetComm() const
Get the associated MPI communicator.
Definition petsc.cpp:2533
PetscSolver()
Construct an empty PetscSolver. Initialize protected objects to NULL.
Definition petsc.cpp:2354
PetscObject obj
The actual PETSc object (KSP, PC, SNES or TS).
Definition petsc.hpp:682
bool clcustom
Boolean to handle SetFromOptions calls.
Definition petsc.hpp:679
bool operatorset
Boolean to handle SetOperator calls.
Definition petsc.hpp:697
void Customize(bool customize=true) const
Customize object with options set.
Definition petsc.cpp:2628
void SetMonitor(PetscSolverMonitor *ctx)
Sets user-defined monitoring routine.
Definition petsc.cpp:2538
void FreePrivateContext()
Definition petsc.cpp:2784
PetscBCHandler * bchandler
Handler for boundary conditions.
Definition petsc.hpp:691
void SetPreconditionerFactory(PetscPreconditionerFactory *factory)
Sets the object for the creation of the preconditioner.
Definition petsc.cpp:2587
PetscParVector * B
Right-hand side and solution vector.
Definition petsc.hpp:688
Base class for solvers.
Definition operator.hpp:683
bool iterative_mode
If true, use the second argument of Mult() as an initial guess.
Definition operator.hpp:686
Data type sparse matrix.
Definition sparsemat.hpp:51
const int * HostReadJ() const
const real_t * HostReadData() const
void SortColumnIndices()
Sort the column indices corresponding to each row.
virtual int NumNonZeroElems() const
Returns the number of the nonzero elements in the matrix.
const int * HostReadI() const
Base abstract class for first order time dependent operators.
Definition operator.hpp:317
bool isHomogeneous() const
True if type is HOMOGENEOUS.
Definition operator.hpp:367
virtual Operator & GetExplicitGradient(const Vector &x) const
Return an Operator representing dG/dx at the given point x and the currently set time.
Definition operator.cpp:312
virtual void ImplicitMult(const Vector &x, const Vector &k, Vector &y) const
Perform the action of the implicit part of the operator, F: y = F(x, k, t) where t is the current tim...
Definition operator.cpp:287
bool isImplicit() const
True if type is IMPLICIT or HOMOGENEOUS.
Definition operator.hpp:365
virtual void ExplicitMult(const Vector &x, Vector &y) const
Perform the action of the explicit part of the operator, G: y = G(x, t) where t is the current time.
Definition operator.cpp:282
virtual void SetTime(const real_t t_)
Set the current time.
Definition operator.hpp:360
virtual Operator & GetImplicitGradient(const Vector &x, const Vector &k, real_t shift) const
Return an Operator representing (dF/dk shift + dF/dx) at the given x, k, and the currently set time.
Definition operator.cpp:304
Vector data type.
Definition vector.hpp:80
void MakeDataOwner() const
Set the Vector data (host pointer) ownership flag.
Definition vector.hpp:206
Memory< real_t > & GetMemory()
Return a reference to the Memory object used by the Vector.
Definition vector.hpp:249
Memory< real_t > data
Definition vector.hpp:83
int Size() const
Returns the size of the vector.
Definition vector.hpp:218
virtual void UseDevice(bool use_dev) const
Enable execution of Vector operations using the mfem::Device.
Definition vector.hpp:136
void SetSize(int s)
Resize the vector to size s.
Definition vector.hpp:538
int dim
Definition ex24.cpp:53
void trans(const Vector &u, Vector &x)
Definition ex27.cpp:412
HYPRE_Int HYPRE_BigInt
real_t b
Definition lissajous.cpp:42
real_t a
Definition lissajous.cpp:41
real_t f(const Vector &p)
void write(std::ostream &os, T value)
Write 'value' to stream.
Definition binaryio.hpp:30
T read(std::istream &is)
Read a value from the stream and return it.
Definition binaryio.hpp:37
struct ::_p_Vec * Vec
Definition petsc.hpp:72
struct ::_p_Mat * Mat
Definition petsc.hpp:73
const T * Read(const Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
Definition device.hpp:320
T * HostReadWrite(Memory< T > &mem, int size)
Shortcut to ReadWrite(Memory<T> &mem, int size, false)
Definition device.hpp:361
const T * HostRead(const Memory< T > &mem, int size)
Shortcut to Read(const Memory<T> &mem, int size, false)
Definition device.hpp:327
T * Write(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for write access to mem with the mfem::Device's DeviceMemoryClass, if on_dev = true,...
Definition device.hpp:337
OutStream out(std::cout)
Global stream used by the library for standard output. Initially it uses the same std::streambuf as s...
Definition globals.hpp:66
PetscParMatrix * TripleMatrixProduct(PetscParMatrix *R, PetscParMatrix *A, PetscParMatrix *P)
Returns the matrix R * A * P.
Definition petsc.cpp:2090
T * ReadWrite(Memory< T > &mem, int size, bool on_dev=true)
Get a pointer for read+write access to mem with the mfem::Device's DeviceMemoryClass,...
Definition device.hpp:354
void RAP(const DenseMatrix &A, const DenseMatrix &P, DenseMatrix &RAP)
void MFEMInitializePetsc()
Convenience functions to initialize/finalize PETSc.
Definition petsc.cpp:201
void MFEMFinalizePetsc()
Definition petsc.cpp:241
MemoryType GetMemoryType(MemoryClass mc)
Return a suitable MemoryType for a given MemoryClass.
T * HostWrite(Memory< T > &mem, int size)
Shortcut to Write(const Memory<T> &mem, int size, false)
Definition device.hpp:344
HypreParMatrix * ParMult(const HypreParMatrix *A, const HypreParMatrix *B, bool own_matrix)
Definition hypre.cpp:2949
float real_t
Definition config.hpp:43
MemoryType
Memory types supported by MFEM.
@ HOST
Host memory; using new[] and delete[].
@ DEVICE
Device memory; using CUDA or HIP *Malloc and *Free.
void EliminateBC(const HypreParMatrix &A, const HypreParMatrix &Ae, const Array< int > &ess_dof_list, const Vector &X, Vector &B)
Eliminate essential BC specified by ess_dof_list from the solution X to the r.h.s....
Definition hypre.cpp:3379
std::function< real_t(const Vector &)> f(real_t mass_coeff)
Definition lor_mms.hpp:30
struct s_NavierContext ctx
real_t p(const Vector &x, real_t t)
RefCoord t[3]
RefCoord s[3]
HYPRE_Int PetscInt
Definition petsc.hpp:50
struct _p_PetscObject * PetscObject
Definition petsc.hpp:54
real_t PetscScalar
Definition petsc.hpp:51
real_t PetscReal
Definition petsc.hpp:52
@ HIP_MASK
Biwise-OR of all HIP backends.
Definition device.hpp:91
@ CUDA_MASK
Biwise-OR of all CUDA backends.
Definition device.hpp:89