16static char help[] =
"...\n\n";
25 IntegrationType::GAUSS;
41 static boost::shared_ptr<SetUpSchur>
52int main(
int argc,
char *argv[]) {
59 DMType dm_name =
"DMMOFEM";
61 DMType dm_name_mg =
"DMMOFEM_MG";
65 moab::Core mb_instance;
66 moab::Interface &moab = mb_instance;
69 auto core_log = logging::core::get();
85 simple->getAddBoundaryFE() =
true;
88 auto add_shared_entities_on_skeleton = [&]() {
90 auto boundary_meshset =
simple->getBoundaryMeshSet();
91 auto skeleton_meshset =
simple->getSkeletonMeshSet();
97 0,
simple->getDim() - 1, skeleton_ents,
true);
98 skeleton_ents = subtract(skeleton_ents, bdy_ents);
100 CHKERR m_field.
get_moab().add_entities(skeleton_meshset, skeleton_ents);
104 CHKERR add_shared_entities_on_skeleton();
114 const char *list_bases[] = {
"ainsworth",
"ainsworth_lobatto",
"demkowicz",
117 PetscInt choice_base_value = AINSWORTH;
119 LASBASETOP, &choice_base_value, &flg);
121 if (flg != PETSC_TRUE)
124 if (choice_base_value == AINSWORTH)
126 if (choice_base_value == AINSWORTH_LOBATTO)
128 else if (choice_base_value == DEMKOWICZ)
130 else if (choice_base_value == BERNSTEIN)
133 enum spaces { hdiv, hcurl, last_space };
134 const char *list_spaces[] = {
"hdiv",
"hcurl"};
135 PetscInt choice_space_value = hdiv;
137 last_space, &choice_space_value, &flg);
138 if (flg != PETSC_TRUE)
141 if (choice_space_value == hdiv)
143 else if (choice_space_value == hcurl)
149 CHKERR simple->addDomainBrokenField(
"BROKEN", space, base, 1);
165 auto assemble_domain_lhs = [&](
auto &pip) {
172 IT>::OpMixDivTimesScalar<SPACE_DIM>;
178 pip.push_back(
new OpHdivHdiv(
"BROKEN",
"BROKEN", beta));
179 auto unity = []()
constexpr {
return 1; };
180 pip.push_back(
new OpHdivU(
"BROKEN",
"U", unity,
true));
188 op_loop_skeleton_side->getOpPtrVector(), {});
192 auto broken_data_ptr =
193 boost::make_shared<std::vector<BrokenBaseSideData>>();
198 op_loop_domain_side->getOpPtrVector(), {HDIV});
199 op_loop_domain_side->getOpPtrVector().push_back(
202 op_loop_skeleton_side->getOpPtrVector().push_back(op_loop_domain_side);
204 IT>::OpBrokenSpaceConstrain<1>;
205 op_loop_skeleton_side->getOpPtrVector().push_back(
206 new OpC(
"HYBRID", broken_data_ptr, 1.,
true,
false));
210 constexpr int partition = 1;
212 op_print->doWorkRhsHook = [&](
DataOperator *base_op_ptr,
int side,
216 if (
auto op_ptr =
dynamic_cast<BdyEleOp *
>(base_op_ptr)) {
217 auto fe_method = op_ptr->getFEMethod();
218 auto num_fe = fe_method->numeredEntFiniteElementPtr;
221 if (num_fe->getPStatus() & PSTATUS_SHARED)
222 MOFEM_LOG(
"SELF", Sev::inform) <<
"Num FE: " << *num_fe;
227 op_loop_skeleton_side->getOpPtrVector().push_back(op_print);
230 pip.push_back(op_loop_skeleton_side);
235 auto assemble_domain_rhs = [&](
auto &pip) {
240 auto source = [&](
const double x,
const double y,
241 const double z)
constexpr {
250 CHKERR assemble_domain_lhs(pip_mng->getOpDomainLhsPipeline());
251 CHKERR assemble_domain_rhs(pip_mng->getOpDomainRhsPipeline());
258 TetPolynomialBase::switchCacheBaseOn<HDIV>(
259 {pip_mng->getDomainLhsFE().get(), pip_mng->getDomainRhsFE().get()});
260 TetPolynomialBase::switchCacheBaseOn<L2>(
261 {pip_mng->getDomainLhsFE().get(), pip_mng->getDomainRhsFE().get()});
267 auto ksp = pip_mng->createKSP();
269 CHKERR KSPSetFromOptions(ksp);
270 BOOST_LOG_SCOPED_THREAD_ATTR(
"Timeline", attrs::timer());
271 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp";
273 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp <= Done";
275 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve";
276 CHKERR KSPSolve(ksp, f, x);
277 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve <= Done";
279 CHKERR VecGhostUpdateBegin(x, INSERT_VALUES, SCATTER_FORWARD);
280 CHKERR VecGhostUpdateEnd(x, INSERT_VALUES, SCATTER_FORWARD);
284 auto ksp = pip_mng->createKSP();
286 BOOST_LOG_SCOPED_THREAD_ATTR(
"Timeline", attrs::timer());
287 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp";
288 CHKERR schur_ptr->setUp(ksp);
289 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp <= Done";
291 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve";
292 CHKERR KSPSolve(ksp, f, x);
293 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve <= Done";
295 CHKERR VecGhostUpdateBegin(x, INSERT_VALUES, SCATTER_FORWARD);
296 CHKERR VecGhostUpdateEnd(x, INSERT_VALUES, SCATTER_FORWARD);
301 auto check_residual = [&](
auto x,
auto f) {
307 auto &domain_rhs = pip_mng->getOpDomainRhsPipeline();
313 auto div_flux_ptr = boost::make_shared<VectorDouble>();
315 "BROKEN", div_flux_ptr));
319 domain_rhs.push_back(
new OpUDivFlux(
"U", div_flux_ptr, beta));
320 auto source = [&](
const double x,
const double y,
321 const double z)
constexpr {
return 1; };
327 IT>::OpMixDivTimesU<3, 1, SPACE_DIM>;
330 auto flux_ptr = boost::make_shared<MatrixDouble>();
331 domain_rhs.push_back(
333 boost::shared_ptr<VectorDouble> u_ptr =
334 boost::make_shared<VectorDouble>();
336 domain_rhs.push_back(
new OpHDivH(
"BROKEN", u_ptr, beta));
337 domain_rhs.push_back(
new OpHdivFlux(
"BROKEN", flux_ptr, beta));
345 op_loop_skeleton_side->getOpPtrVector(), {});
349 auto broken_data_ptr =
350 boost::make_shared<std::vector<BrokenBaseSideData>>();
355 op_loop_domain_side->getOpPtrVector(), {HDIV});
356 op_loop_domain_side->getOpPtrVector().push_back(
358 auto flux_mat_ptr = boost::make_shared<MatrixDouble>();
359 op_loop_domain_side->getOpPtrVector().push_back(
361 op_loop_domain_side->getOpPtrVector().push_back(
365 op_loop_skeleton_side->getOpPtrVector().push_back(op_loop_domain_side);
367 IT>::OpBrokenSpaceConstrainDHybrid<1>;
369 IT>::OpBrokenSpaceConstrainDFlux<1>;
370 op_loop_skeleton_side->getOpPtrVector().push_back(
371 new OpC_dHybrid(
"HYBRID", broken_data_ptr, 1.));
372 auto hybrid_ptr = boost::make_shared<MatrixDouble>();
373 op_loop_skeleton_side->getOpPtrVector().push_back(
375 op_loop_skeleton_side->getOpPtrVector().push_back(
376 new OpC_dBroken(broken_data_ptr, hybrid_ptr, 1.));
379 domain_rhs.push_back(op_loop_skeleton_side);
382 CHKERR VecGhostUpdateBegin(f, INSERT_VALUES, SCATTER_FORWARD);
383 CHKERR VecGhostUpdateEnd(f, INSERT_VALUES, SCATTER_FORWARD);
385 pip_mng->getDomainRhsFE()->f = f;
386 pip_mng->getSkeletonRhsFE()->f = f;
387 pip_mng->getDomainRhsFE()->x = x;
388 pip_mng->getSkeletonRhsFE()->x = x;
391 simple->getDomainFEName(),
392 pip_mng->getDomainRhsFE());
394 CHKERR VecGhostUpdateBegin(f, ADD_VALUES, SCATTER_REVERSE);
395 CHKERR VecGhostUpdateEnd(f, ADD_VALUES, SCATTER_REVERSE);
396 CHKERR VecAssemblyBegin(f);
400 CHKERR VecNorm(f, NORM_2, &fnrm);
401 MOFEM_LOG_C(
"AT", Sev::inform,
"Residual %3.4e", fnrm);
403 constexpr double eps = 1e-8;
406 "Residual norm larger than accepted");
411 auto calculate_error = [&]() {
415 auto &domain_rhs = pip_mng->getOpDomainRhsPipeline();
421 auto u_grad_ptr = boost::make_shared<MatrixDouble>();
422 auto flux_val_ptr = boost::make_shared<MatrixDouble>();
423 auto div_val_ptr = boost::make_shared<VectorDouble>();
424 auto source_ptr = boost::make_shared<VectorDouble>();
426 domain_rhs.push_back(
428 domain_rhs.push_back(
431 "BROKEN", div_val_ptr));
432 auto source = [&](
const double x,
const double y,
433 const double z)
constexpr {
return -1; };
436 enum { DIV, GRAD,
LAST };
439 domain_rhs.push_back(
442 u_grad_ptr, mpi_vec, GRAD, flux_val_ptr));
445 simple->getDomainFEName(),
446 pip_mng->getDomainRhsFE());
447 CHKERR VecAssemblyBegin(mpi_vec);
448 CHKERR VecAssemblyEnd(mpi_vec);
451 const double *error_ind;
452 CHKERR VecGetArrayRead(mpi_vec, &error_ind);
454 <<
"Approximation error ||div flux - source||: "
455 << std::sqrt(error_ind[DIV]);
456 MOFEM_LOG(
"AT", Sev::inform) <<
"Approximation error ||grad-flux||: "
457 << std::sqrt(error_ind[GRAD]);
458 CHKERR VecRestoreArrayRead(mpi_vec, &error_ind);
464 auto get_post_proc_fe = [&]() {
467 auto post_proc_fe = boost::make_shared<PostProcEle>(m_field);
472 ForcesAndSourcesCore::UserDataOperator::AdjCache>());
473 post_proc_fe->getOpPtrVector().push_back(op_loop_side);
476 op_loop_side->getOpPtrVector(), {HDIV});
477 auto u_vec_ptr = boost::make_shared<VectorDouble>();
478 auto flux_mat_ptr = boost::make_shared<MatrixDouble>();
479 op_loop_side->getOpPtrVector().push_back(
481 op_loop_side->getOpPtrVector().push_back(
483 op_loop_side->getOpPtrVector().push_back(
487 post_proc_fe->getPostProcMesh(),
489 post_proc_fe->getMapGaussPts(),
493 {{
"BROKEN", flux_mat_ptr}},
502 auto post_proc_fe = get_post_proc_fe();
504 simple->getBoundaryFEName(), post_proc_fe);
505 CHKERR post_proc_fe->writeFile(
"out_result.h5m");
508 CHKERR check_residual(x, f);
533 CHKERR KSPSetFromOptions(ksp);
535 CHKERR KSPGetPC(ksp, &pc);
537 PetscBool is_pcfs = PETSC_FALSE;
538 PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
541 MOFEM_LOG(
"AT", Sev::inform) <<
"Setup Schur pc";
543 auto create_sub_dm = [&]() {
546 auto create_dm = [&](
548 std::string problem_name,
549 std::vector<std::string> fe_names,
550 std::vector<std::string> fields,
556 auto create_dm_imp = [&]() {
560 for (
auto fe : fe_names) {
564 for (
auto field : fields) {
573 "Error in creating schurDM. It is possible that schurDM is "
578 auto schur_dm = create_dm(
582 {
simple->getDomainFEName(),
simple->getSkeletonFEName()},
588 auto block_dm = create_dm(
592 {
simple->getDomainFEName(),
simple->getSkeletonFEName()},
598 return std::make_tuple(schur_dm, block_dm);
601 auto get_nested_mat_data = [&](
auto schur_dm,
auto block_dm) {
609 simple->getDomainFEName(),
613 {
"BROKEN",
"BROKEN"},
624 simple->getSkeletonFEName(),
628 {
"BROKEN",
"HYBRID"}, {
"HYBRID",
"BROKEN"}
640 {schur_dm, block_dm}, block_mat_data,
642 {
"BROKEN",
"U"}, {
nullptr,
nullptr}, true
647 auto set_ops = [&](
auto schur_dm) {
652 boost::shared_ptr<BlockStructure> block_data;
655 pip_mng->getOpDomainLhsPipeline().push_front(
657 pip_mng->getOpDomainLhsPipeline().push_back(
664 auto pre_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
665 auto post_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
667 pre_proc_schur_lhs_ptr->preProcessHook = [
this]() {
670 MOFEM_LOG(
"AT", Sev::verbose) <<
"Lhs Assemble Begin";
674 post_proc_schur_lhs_ptr->postProcessHook = [
this, ao_up,
675 post_proc_schur_lhs_ptr]() {
677 MOFEM_LOG(
"AT", Sev::verbose) <<
"Lhs Assemble End";
678 CHKERR MatAssemblyBegin(S, MAT_FINAL_ASSEMBLY);
679 CHKERR MatAssemblyEnd(S, MAT_FINAL_ASSEMBLY);
680 MOFEM_LOG(
"AT", Sev::verbose) <<
"Lhs Assemble Finish";
685 ksp_ctx_ptr->getPreProcSetOperators().push_front(pre_proc_schur_lhs_ptr);
686 ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_schur_lhs_ptr);
691 auto set_pc = [&](
auto pc,
auto block_dm) {
693 auto block_is =
getDMSubData(block_dm)->getSmartRowIs();
694 CHKERR PCFieldSplitSetIS(pc, NULL, block_is);
695 CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER, S);
699 auto set_diagonal_pc = [&](
auto pc,
auto schur_dm) {
705 CHKERR PCSetOperators(pc, A, P);
709 CHKERR PCFieldSplitSchurGetSubKSP(pc, PETSC_NULL, &subksp);
710 auto get_pc = [](
auto ksp) {
712 CHKERR KSPGetPC(ksp, &pc_raw);
717 auto set_pc_p_mg = [&](
auto dm,
auto pc) {
721 PetscBool same = PETSC_FALSE;
722 PetscObjectTypeCompare((PetscObject)pc, PCMG, &same);
727 CHKERR PCSetFromOptions(pc);
732 CHKERR set_pc_p_mg(schur_dm, get_pc(subksp[1]));
738 auto [schur_dm, block_dm] = create_sub_dm();
740 auto nested_mat_data = get_nested_mat_data(schur_dm, block_dm);
744 CHKERR MatSetDM(S, PETSC_NULL);
747 CHKERR MatSetBlockSize(S, bs);
750 CHKERR set_pc(pc, block_dm);
752 CHKERR KSPGetDM(ksp, &solver_dm);
754 CHKERR DMSetMatType(solver_dm, MATSHELL);
758 CHKERR set_diagonal_pc(pc, schur_dm);
762 "PC is not set to PCFIELDSPLIT");
767boost::shared_ptr<SetUpSchur>
769 return boost::shared_ptr<SetUpSchur>(
new SetUpSchurImpl(m_field));
#define MOFEM_LOG_C(channel, severity, format,...)
void simple(double P1[], double P2[], double P3[], double c[], const int N)
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpSource< 1, FIELD_DIM > OpDomainSource
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
[Define dimension]
#define CATCH_ERRORS
Catch errors.
FieldApproximationBase
approximation base
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base nme:nme847.
@ AINSWORTH_BERNSTEIN_BEZIER_BASE
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
FieldSpace
approximation spaces
@ L2
field with C-1 continuity
@ HCURL
field with continuous tangents
@ HDIV
field with continuous normal traction
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
@ MOFEM_ATOM_TEST_INVALID
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
PetscErrorCode DMoFEMMeshToLocalVector(DM dm, Vec l, InsertMode mode, ScatterMode scatter_mode)
set local (or ghosted) vector values on mesh for partition only
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
MoFEMErrorCode DMRegister_MGViaApproxOrders(const char sname[])
Register DM for Multi-Grid via approximation orders.
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
auto createDMVector(DM dm)
Get smart vector from DM.
PetscErrorCode DMMoFEMAddSubFieldCol(DM dm, const char field_name[])
static LoggerType & setLog(const std::string channel)
Set ans resset chanel logger.
#define MOFEM_LOG(channel, severity)
Log.
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
#define NBFACETRI_L2(P)
Number of base functions on triangle for L2 space.
#define NBEDGE_L2(P)
Number of base functions on edge from L2 space.
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMixDivTimesScalar< 2 > OpHdivU
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
implementation of Data Operators for Forces and Sources
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range > > field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
MoFEMErrorCode DMMoFEMGetBlocMatData(DM dm, boost::shared_ptr< BlockStructure > &)
Get data for block mat.
MoFEMErrorCode setSchurA00MatSolvePC(SmartPetscObj< PC > pc)
Set PC for A00 block.
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > createPCMGSetUpViaApproxOrdersCtx(DM dm, Mat A, bool use_shell_mat)
createPCMGSetUpViaApproxOrdersCtx
auto createVectorMPI(MPI_Comm comm, PetscInt n, PetscInt N)
Create MPI Vector.
auto getDMKspCtx(DM dm)
Get KSP context data structure used by DM.
auto createDMHybridisedL2Matrix(DM dm)
Get smart hybridised L2 matrix from DM.
auto getDMSubData(DM dm)
Get sub problem data structure.
MoFEMErrorCode PCMGSetUpViaApproxOrders(PC pc, boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > ctx, int verb)
Function build MG structure.
boost::shared_ptr< BlockStructure > createBlockMatStructure(DM dm, SchurFEOpsFEandFields schur_fe_op_vec)
Create a Mat Diag Blocks object.
boost::shared_ptr< NestSchurData > createSchurNestedMatrixStruture(std::pair< SmartPetscObj< DM >, SmartPetscObj< DM > > dms, boost::shared_ptr< BlockStructure > block_mat_data_ptr, std::vector< std::string > fields_names, std::vector< boost::shared_ptr< Range > > field_ents, bool add_preconditioner_block)
Get the Schur Nest Mat Array object.
auto createAOMappingIS(IS isapp, IS ispetsc)
Creates an application mapping using two index sets.
PetscErrorCode PetscOptionsGetEList(PetscOptions *, const char pre[], const char name[], const char *const *list, PetscInt next, PetscInt *value, PetscBool *set)
MoFEMErrorCode DMMoFEMSetNestSchurData(DM dm, boost::shared_ptr< NestSchurData >)
auto createDMNestSchurMat(DM dm)
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
OpSchurAssembleBase * createOpSchurAssembleBegin()
auto createDMBlockMat(DM dm)
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
#define EXECUTABLE_DIMENSION
PipelineManager::ElementsAndOpsByDim< SPACE_DIM >::FaceSideEle EleOnSide
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpMixDivTimesU< 3, 1, 2 > OpHDivH
Integrate Rhs div flux base times temperature (T)
Add operators pushing bases from local to physical configuration.
Simple interface for fast problem set-up.
MoFEMErrorCode removeSideDOFs(const std::string problem_name, const std::string block_name, const std::string field_name, int bridge_dim, int lo, int hi, bool is_distributed_mesh=true)
Remove side DOFs.
virtual moab::Interface & get_moab()=0
virtual MPI_Comm & get_comm() const =0
virtual int get_comm_rank() const =0
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
base operator to do operations at Gauss Pt. level
Deprecated interface functions.
Data on single entity (This is passed as argument to DataOperator::doWork)
static boost::shared_ptr< SinkType > createSink(boost::shared_ptr< std::ostream > stream_ptr, std::string comm_filter)
Create a sink object.
static boost::shared_ptr< std::ostream > getStrmWorld()
Get the strm world object.
Get norm of input VectorDouble for Tensor0.
Get norm of input MatrixDouble for Tensor1.
Calculate tenor field using vectorial base, i.e. Hdiv/Hcurl.
Get vector field for H-div approximation.
Calculate divergence of vector field.
Get field gradients at integration pts for scalar filed rank 0, i.e. vector field.
Get value at integration points for scalar field.
Get values at integration pts for tensor filed rank 1, i.e. vector field.
Get values from scalar function at integration points and save them to VectorDouble for Tensor0.
Element used to execute operators on side of the element.
Post post-proc data at points from hash maps.
PipelineManager interface.
Simple interface for fast problem set-up.
intrusive_ptr for managing petsc objects
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
[Push operators to pipeline]
virtual MoFEMErrorCode setUp(SmartPetscObj< KSP >)=0
virtual ~SetUpSchur()=default
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
virtual ~SetUpSchurImpl()=default
SetUpSchurImpl(MoFEM::Interface &m_field)
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
MoFEM::Interface & mField
constexpr AssemblyType AT
constexpr IntegrationType IT
BoundaryEle::UserDataOperator BdyEleOp
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpBaseTimesVector< 3, SPACE_DIM, 1 > OpHdivFlux
Integrating Rhs flux base (1/k) flux (FLUX)
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpMass< 3, SPACE_DIM > OpHdivHdiv
[Linear elastic problem]