22 ? AssemblyType::BLOCK_SCHUR
26 IntegrationType::GAUSS;
39 I>::OpGradSymTensorGrad<BASE_DIM, SPACE_DIM, SPACE_DIM, 0>;
43 I>::OpGradTimesSymTensor<BASE_DIM, SPACE_DIM, SPACE_DIM>;
109 boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
110 std::string
field_name, std::string block_name,
111 boost::shared_ptr<MatrixDouble> mat_D_Ptr,
Sev sev);
115 boost::ptr_deque<ForcesAndSourcesCore::UserDataOperator> &pipeline,
116 std::string
field_name, std::string block_name,
117 boost::shared_ptr<MatrixDouble> mat_D_Ptr,
Sev sev) {
121 OpMatBlocks(std::string
field_name, boost::shared_ptr<MatrixDouble>
m,
124 std::vector<const CubitMeshSets *> meshset_vec_ptr)
128 std::fill(&(doEntities[MBEDGE]), &(doEntities[MBMAXTYPE]),
false);
130 "Can not get data from block");
137 for (
auto &b : blockData) {
139 if (b.blockEnts.find(getFEEntityHandle()) != b.blockEnts.end()) {
140 CHKERR getMatDPtr(matDPtr, b.bulkModulusK, b.shearModulusG);
145 CHKERR getMatDPtr(matDPtr, bulkModulusKDefault, shearModulusGDefault);
150 boost::shared_ptr<MatrixDouble> matDPtr;
154 double shearModulusG;
158 double bulkModulusKDefault;
159 double shearModulusGDefault;
160 std::vector<BlockData> blockData;
164 std::vector<const CubitMeshSets *> meshset_vec_ptr,
168 for (
auto m : meshset_vec_ptr) {
170 std::vector<double> block_data;
171 CHKERR m->getAttributes(block_data);
172 if (block_data.size() < 2) {
174 "Expected that block has two attributes");
176 auto get_block_ents = [&]() {
179 m_field.
get_moab().get_entities_by_handle(
m->meshset, ents,
true);
198 MoFEMErrorCode getMatDPtr(boost::shared_ptr<MatrixDouble> mat_D_ptr,
202 auto set_material_stiffness = [&]() {
222 set_material_stiffness();
227 pipeline.push_back(
new OpMatBlocks(
233 (boost::format(
"%s(.*)") % block_name).str()
271 enum bases { AINSWORTH, DEMKOWICZ, LASBASETOPT };
272 const char *list_bases[LASBASETOPT] = {
"ainsworth",
"demkowicz"};
273 PetscInt choice_base_value = AINSWORTH;
275 LASBASETOPT, &choice_base_value, PETSC_NULL);
278 switch (choice_base_value) {
282 <<
"Set AINSWORTH_LEGENDRE_BASE for displacements";
287 <<
"Set DEMKOWICZ_JACOBI_BASE for displacements";
306 auto project_ho_geometry = [&]() {
310 CHKERR project_ho_geometry();
334 auto no_rule = [](int, int, int) {
return -1; };
336 field_eval_fe_ptr->getRuleHook = no_rule;
338 field_eval_fe_ptr->getOpPtrVector().push_back(
353 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_X",
355 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_Y",
357 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
"REMOVE_Z",
359 CHKERR bc_mng->removeBlockDOFsOnEntities(
simple->getProblemName(),
360 "REMOVE_ALL",
"U", 0, 3);
362 simple->getProblemName(),
"U");
365 CHKERR bc_mng->addBlockDOFsToMPCs(
simple->getProblemName(),
"U");
383 auto integration_rule_bc = [](int, int,
int approx_order) {
389 CHKERR pip->setBoundaryRhsIntegrationRule(integration_rule_bc);
390 CHKERR pip->setBoundaryLhsIntegrationRule(integration_rule_bc);
394 pip->getOpDomainLhsPipeline(), {H1},
"GEOMETRY");
396 pip->getOpDomainRhsPipeline(), {H1},
"GEOMETRY");
398 pip->getOpBoundaryRhsPipeline(), {NOSPACE},
"GEOMETRY");
400 pip->getOpBoundaryLhsPipeline(), {NOSPACE},
"GEOMETRY");
403 auto mat_D_ptr = boost::make_shared<MatrixDouble>();
407 mat_D_ptr, Sev::verbose);
408 pip->getOpDomainLhsPipeline().push_back(
new OpK(
"U",
"U", mat_D_ptr));
412 auto mat_grad_ptr = boost::make_shared<MatrixDouble>();
413 auto mat_strain_ptr = boost::make_shared<MatrixDouble>();
414 auto mat_stress_ptr = boost::make_shared<MatrixDouble>();
415 pip->getOpDomainRhsPipeline().push_back(
419 mat_D_ptr, Sev::inform);
420 pip->getOpDomainRhsPipeline().push_back(
422 pip->getOpDomainRhsPipeline().push_back(
424 mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
426 pip->getOpDomainRhsPipeline().push_back(
428 [](
double,
double,
double)
constexpr {
return -1; }));
433 pip->getOpDomainRhsPipeline(),
mField,
"U", Sev::inform);
439 pip->getOpBoundaryRhsPipeline(),
mField,
"U", -1, Sev::inform);
442 pip->getOpBoundaryLhsPipeline(),
mField,
"U", Sev::verbose);
449 static boost::shared_ptr<SetUpSchur>
462 auto solver = pip->createKSP();
463 CHKERR KSPSetFromOptions(solver);
465 auto dm =
simple->getDM();
469 auto set_essential_bc = [&]() {
474 auto pre_proc_rhs = boost::make_shared<FEMethod>();
475 auto post_proc_rhs = boost::make_shared<FEMethod>();
476 auto post_proc_lhs = boost::make_shared<FEMethod>();
478 auto get_pre_proc_hook = [&]() {
482 pre_proc_rhs->preProcessHook = get_pre_proc_hook();
484 auto get_post_proc_hook_rhs = [
this, post_proc_rhs]() {
488 post_proc_rhs, 1.)();
493 auto get_post_proc_hook_lhs = [
this, post_proc_lhs]() {
497 post_proc_lhs, 1.)();
502 post_proc_rhs->postProcessHook = get_post_proc_hook_rhs;
503 post_proc_lhs->postProcessHook = get_post_proc_hook_lhs;
505 ksp_ctx_ptr->getPreProcComputeRhs().push_front(pre_proc_rhs);
506 ksp_ctx_ptr->getPostProcComputeRhs().push_back(post_proc_rhs);
507 ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_lhs);
511 auto setup_and_solve = [&]() {
513 BOOST_LOG_SCOPED_THREAD_ATTR(
"Timeline", attrs::timer());
514 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp";
516 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSetUp <= Done";
517 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve";
519 MOFEM_LOG(
"TIMER", Sev::inform) <<
"KSPSolve <= Done";
526 CHKERR set_essential_bc();
528 if (A == AssemblyType::BLOCK_SCHUR || A == AssemblyType::SCHUR) {
530 CHKERR schur_ptr->setUp(solver);
536 CHKERR VecGhostUpdateBegin(
D, INSERT_VALUES, SCATTER_FORWARD);
537 CHKERR VecGhostUpdateEnd(
D, INSERT_VALUES, SCATTER_FORWARD);
557 <<
"U_X: " << t_disp(0) <<
" U_Y: " << t_disp(1);
560 <<
"U_X: " << t_disp(0) <<
" U_Y: " << t_disp(1)
561 <<
" U_Z: " << t_disp(2);
576 auto det_ptr = boost::make_shared<VectorDouble>();
577 auto jac_ptr = boost::make_shared<MatrixDouble>();
578 auto inv_jac_ptr = boost::make_shared<MatrixDouble>();
580 pip->getDomainRhsFE().reset();
581 pip->getDomainLhsFE().reset();
582 pip->getBoundaryRhsFE().reset();
583 pip->getBoundaryLhsFE().reset();
587 auto post_proc_mesh = boost::make_shared<moab::Core>();
588 auto post_proc_begin = boost::make_shared<PostProcBrokenMeshInMoabBaseBegin>(
590 auto post_proc_end = boost::make_shared<PostProcBrokenMeshInMoabBaseEnd>(
594 auto calculate_stress_ops = [&](
auto &pip) {
596 auto mat_grad_ptr = boost::make_shared<MatrixDouble>();
597 auto mat_strain_ptr = boost::make_shared<MatrixDouble>();
598 auto mat_stress_ptr = boost::make_shared<MatrixDouble>();
601 auto mat_D_ptr = boost::make_shared<MatrixDouble>();
606 mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
607 auto u_ptr = boost::make_shared<MatrixDouble>();
609 auto x_ptr = boost::make_shared<MatrixDouble>();
612 return boost::make_tuple(u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr);
615 auto get_tag_id_on_pmesh = [&](
bool post_proc_skin)
620 "MAT_ELASTIC", 1, MB_TYPE_INTEGER, tag_mat,
621 MB_TAG_CREAT | MB_TAG_SPARSE, &def_val_int);
622 auto meshset_vec_ptr =
624 std::regex((boost::format(
"%s(.*)") %
"MAT_ELASTIC").str()));
627 std::unique_ptr<Skinner> skin_ptr;
628 if (post_proc_skin) {
630 auto boundary_meshset =
simple->getBoundaryMeshSet();
635 for (
auto m : meshset_vec_ptr) {
639 int const id =
m->getMeshsetId();
640 ents_3d = ents_3d.subset_by_dimension(
SPACE_DIM);
641 if (post_proc_skin) {
643 CHKERR skin_ptr->find_skin(0, ents_3d,
false, skin_faces);
644 ents_3d = intersect(skin_ents, skin_faces);
652 auto post_proc_domain = [&](
auto post_proc_mesh) {
654 boost::make_shared<PostProcEleDomain>(
mField, post_proc_mesh);
657 auto [u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr] =
658 calculate_stress_ops(post_proc_fe->getOpPtrVector());
660 post_proc_fe->getOpPtrVector().push_back(
664 post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
668 {{
"U", u_ptr}, {
"GEOMETRY", x_ptr}},
672 {{
"STRAIN", mat_strain_ptr}, {
"STRESS", mat_stress_ptr}}
678 post_proc_fe->setTagsToTransfer({get_tag_id_on_pmesh(
false)});
682 auto post_proc_boundary = [&](
auto post_proc_mesh) {
684 boost::make_shared<PostProcEleBdy>(mField, post_proc_mesh);
686 post_proc_fe->getOpPtrVector(), {},
"GEOMETRY");
690 auto [u_ptr, x_ptr, mat_strain_ptr, mat_stress_ptr] =
691 calculate_stress_ops(op_loop_side->getOpPtrVector());
692 post_proc_fe->getOpPtrVector().push_back(op_loop_side);
693 auto mat_traction_ptr = boost::make_shared<MatrixDouble>();
694 post_proc_fe->getOpPtrVector().push_back(
700 post_proc_fe->getOpPtrVector().push_back(
704 post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
708 {{
"U", u_ptr}, {
"GEOMETRY", x_ptr}, {
"T", mat_traction_ptr}},
712 {{
"STRAIN", mat_strain_ptr}, {
"STRESS", mat_stress_ptr}}
718 post_proc_fe->setTagsToTransfer({get_tag_id_on_pmesh(
true)});
722 PetscBool post_proc_skin_only = PETSC_FALSE;
724 post_proc_skin_only = PETSC_TRUE;
726 &post_proc_skin_only, PETSC_NULL);
728 if (post_proc_skin_only == PETSC_FALSE) {
729 pip->getDomainRhsFE() = post_proc_domain(post_proc_mesh);
731 pip->getBoundaryRhsFE() = post_proc_boundary(post_proc_mesh);
735 post_proc_begin->getFEMethod());
736 CHKERR pip->loopFiniteElements();
738 post_proc_end->getFEMethod());
740 CHKERR post_proc_end->writeFile(
"out_elastic.h5m");
751 pip->getDomainRhsFE().reset();
752 pip->getDomainLhsFE().reset();
753 pip->getBoundaryRhsFE().reset();
754 pip->getBoundaryLhsFE().reset();
756 auto integration_rule = [](int, int,
int p_data) {
return 2 * p_data + 1; };
761 pip->getOpDomainRhsPipeline(), {H1},
"GEOMETRY");
763 pip->getOpBoundaryRhsPipeline(), {},
"GEOMETRY");
765 auto mat_grad_ptr = boost::make_shared<MatrixDouble>();
766 auto mat_strain_ptr = boost::make_shared<MatrixDouble>();
767 auto mat_stress_ptr = boost::make_shared<MatrixDouble>();
769 pip->getOpDomainRhsPipeline().push_back(
772 pip->getOpDomainRhsPipeline().push_back(
775 auto mat_D_ptr = boost::make_shared<MatrixDouble>();
777 mat_D_ptr, Sev::verbose);
778 pip->getOpDomainRhsPipeline().push_back(
780 mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
782 pip->getOpDomainRhsPipeline().push_back(
785 pip->getOpBoundaryRhsPipeline().push_back(
787 mat_strain_ptr, mat_stress_ptr, mat_D_ptr));
789 pip->getOpDomainRhsPipeline(),
mField,
"U", Sev::verbose);
791 pip->getOpBoundaryRhsPipeline(),
mField,
"U", -1, Sev::verbose);
793 auto dm =
simple->getDM();
795 CHKERR VecSetDM(res, PETSC_NULL);
797 pip->getDomainRhsFE()->f = res;
798 pip->getBoundaryRhsFE()->f = res;
800 CHKERR VecZeroEntries(res);
803 CHKERR pip->loopFiniteElements();
806 CHKERR VecGhostUpdateBegin(res, ADD_VALUES, SCATTER_REVERSE);
807 CHKERR VecGhostUpdateEnd(res, ADD_VALUES, SCATTER_REVERSE);
808 CHKERR VecAssemblyBegin(res);
809 CHKERR VecAssemblyEnd(res);
811 auto zero_residual_at_constrains = [&]() {
813 auto fe_post_proc_ptr = boost::make_shared<FEMethod>();
814 auto get_post_proc_hook_rhs = [&]() {
817 mField, fe_post_proc_ptr, res)();
819 mField, fe_post_proc_ptr, 0, res)();
823 fe_post_proc_ptr->postProcessHook = get_post_proc_hook_rhs;
828 CHKERR zero_residual_at_constrains();
831 CHKERR VecNorm(res, NORM_2, &nrm2);
833 MOFEM_LOG_C(
"WORLD", Sev::inform,
"residual = %3.4e\n", nrm2);
835 PetscBool test = PETSC_FALSE;
837 if (test == PETSC_TRUE) {
839 auto post_proc_residual = [&](
auto dm,
auto f_res,
auto out_name) {
842 boost::make_shared<PostProcBrokenMeshInMoab<DomainEle>>(
mField);
844 auto u_vec = boost::make_shared<MatrixDouble>();
845 post_proc_fe->getOpPtrVector().push_back(
847 post_proc_fe->getOpPtrVector().push_back(
851 post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
863 post_proc_fe->writeFile(out_name);
867 CHKERR post_proc_residual(
simple->getDM(), res,
"res.h5m");
869 constexpr double eps = 1e-8;
872 "Residual is not zero");
881int main(
int argc,
char *argv[]) {
884 const char param_file[] =
"param_file.petsc";
887 auto core_log = logging::core::get();
899 DMType dm_name =
"DMMOFEM";
901 DMType dm_name_mg =
"DMMOFEM_MG";
906 moab::Core mb_instance;
907 moab::Interface &moab = mb_instance;
931 "Is expected that schur matrix is not allocated. This is "
932 "possible only is PC is set up twice");
961 CHKERR KSPGetPC(solver, &pc);
962 PetscBool is_pcfs = PETSC_FALSE;
963 PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
968 "Is expected that schur matrix is not allocated. This is "
969 "possible only is PC is set up twice");
976 CHKERR MatSetDM(
S, PETSC_NULL);
978 CHKERR MatSetOption(
S, MAT_SYMMETRIC, PETSC_TRUE);
983 if constexpr (A == AssemblyType::BLOCK_SCHUR) {
986 CHKERR KSPGetDM(solver, &solver_dm);
987 CHKERR DMSetMatType(solver_dm, MATSHELL);
1016 auto create_dm = [&](
const char *name,
auto &ents,
auto dm_type) {
1018 auto create_dm_imp = [&]() {
1023 auto sub_ents_ptr = boost::make_shared<Range>(ents);
1030 "Error in creating schurDM. It is possible that schurDM is "
1038 if constexpr (A == AssemblyType::BLOCK_SCHUR) {
1040 auto get_nested_mat_data = [&]() -> boost::shared_ptr<NestSchurData> {
1041 auto block_mat_data =
1046 simple->getDomainFEName(),
1058 {
"U"}, {boost::make_shared<Range>(
volEnts)}
1063 auto nested_mat_data = get_nested_mat_data();
1081 {
"U"}, {boost::make_shared<Range>(
volEnts)}, ao_up,
S,
true,
true));
1085 {
"U"}, {boost::make_shared<Range>(
volEnts)}, ao_up,
S,
true,
true));
1087 auto pre_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
1088 auto post_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
1090 pre_proc_schur_lhs_ptr->preProcessHook = [
this]() {
1095 MOFEM_LOG(
"TIMER", Sev::inform) <<
"Lhs Assemble Begin";
1099 post_proc_schur_lhs_ptr->postProcessHook = [
this, post_proc_schur_lhs_ptr,
1102 CHKERR MatAssemblyBegin(
S, MAT_FINAL_ASSEMBLY);
1103 CHKERR MatAssemblyEnd(
S, MAT_FINAL_ASSEMBLY);
1105 mField, post_proc_schur_lhs_ptr, 1,
S, ao_up)();
1106 MOFEM_LOG(
"TIMER", Sev::inform) <<
"Lhs Assemble End";
1111 ksp_ctx_ptr->getPreProcSetOperators().push_front(pre_proc_schur_lhs_ptr);
1112 ksp_ctx_ptr->getPostProcSetOperators().push_back(post_proc_schur_lhs_ptr);
1123 CHKERR PCFieldSplitSetIS(pc, NULL, vol_is);
1124 CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER,
S);
1131 auto get_pc = [](
auto ksp) {
1133 CHKERR KSPGetPC(ksp, &pc_raw);
1137 if constexpr (A == AssemblyType::BLOCK_SCHUR) {
1141 CHKERR PCSetOperators(pc, A, P);
1144 CHKERR PCFieldSplitSchurGetSubKSP(pc, PETSC_NULL, &subksp);
1147 auto set_pc_p_mg = [](
auto dm,
auto pc,
auto S) {
1150 PetscBool same = PETSC_FALSE;
1151 PetscObjectTypeCompare((PetscObject)pc, PCMG, &same);
1155 CHKERR PCSetFromOptions(pc);
1162 CHKERR PetscFree(subksp);
1167boost::shared_ptr<SetUpSchur>
1169 return boost::shared_ptr<SetUpSchur>(
new SetUpSchurImpl(m_field));
Calculate traction for linear problem.
Implementation of elastic spring bc.
Natural boundary condition applying pressure from fluid.
#define MOFEM_LOG_SYNCHRONISE(comm)
Synchronise "SYNC" channel.
#define MOFEM_LOG_C(channel, severity, format,...)
#define MOFEM_TAG_AND_LOG(channel, severity, tag)
Tag and log in channel.
Implementation of natural boundary conditions.
Boundary conditions in domain, i.e. body forces.
void simple(double P1[], double P2[], double P3[], double c[], const int N)
DomainEle::UserDataOperator DomainEleOp
Finire element operator type.
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
[Define dimension]
Kronecker Delta class symmetric.
#define CATCH_ERRORS
Catch errors.
FieldApproximationBase
approximation base
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base nme:nme847.
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
#define MoFEMFunctionReturnHot(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
@ MOFEM_DATA_INCONSISTENCY
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
#define MoFEMFunctionBeginHot
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
PostProcEleByDim< SPACE_DIM >::PostProcEleDomain PostProcEleDomain
PostProcEleByDim< SPACE_DIM >::PostProcEleBdy PostProcEleBdy
static char help[]
[Check]
PetscBool is_plane_strain
FormsIntegrators< DomainEleOp >::Assembly< A >::LinearForm< I >::OpGradTimesSymTensor< BASE_DIM, SPACE_DIM, SPACE_DIM > OpInternalForce
constexpr int SPACE_DIM
[Define dimension]
constexpr double poisson_ratio
constexpr double shear_modulus_G
constexpr IntegrationType I
constexpr double bulk_modulus_K
FormsIntegrators< DomainEleOp >::Assembly< A >::BiLinearForm< I >::OpGradSymTensorGrad< BASE_DIM, SPACE_DIM, SPACE_DIM, 0 > OpK
[Define entities]
constexpr double young_modulus
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
PetscErrorCode DMoFEMPostProcessFiniteElements(DM dm, MoFEM::FEMethod *method)
execute finite element method for each element in dm (problem)
PetscErrorCode DMoFEMMeshToLocalVector(DM dm, Vec l, InsertMode mode, ScatterMode scatter_mode)
set local (or ghosted) vector values on mesh for partition only
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
MoFEMErrorCode DMRegister_MGViaApproxOrders(const char sname[])
Register DM for Multi-Grid via approximation orders.
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
auto createDMVector(DM dm)
Get smart vector from DM.
PetscErrorCode DMMoFEMAddSubFieldCol(DM dm, const char field_name[])
auto createDMMatrix(DM dm)
Get smart matrix from DM.
PetscErrorCode DMoFEMPreProcessFiniteElements(DM dm, MoFEM::FEMethod *method)
execute finite element method for each element in dm (problem)
static LoggerType & setLog(const std::string channel)
Set ans resset chanel logger.
#define MOFEM_LOG(channel, severity)
Log.
SeverityLevel
Severity levels.
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
#define MOFEM_LOG_CHANNEL(channel)
Set and reset channel.
virtual MoFEMErrorCode loop_dofs(const Problem *problem_ptr, const std::string &field_name, RowColData rc, DofMethod &method, int lower_rank, int upper_rank, int verb=DEFAULT_VERBOSITY)=0
Make a loop over dofs.
MoFEMErrorCode getCubitMeshsetPtr(const int ms_id, const CubitBCType cubit_bc_type, const CubitMeshSets **cubit_meshset_ptr) const
get cubit meshset
FTensor::Index< 'i', SPACE_DIM > i
FTensor::Index< 'l', 3 > l
FTensor::Index< 'j', 3 > j
FTensor::Index< 'k', 3 > k
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
implementation of Data Operators for Forces and Sources
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range > > field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
MoFEMErrorCode setSchurA00MatSolvePC(SmartPetscObj< PC > pc)
Set PC for A00 block.
PetscErrorCode PetscOptionsGetBool(PetscOptions *, const char pre[], const char name[], PetscBool *bval, PetscBool *set)
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > createPCMGSetUpViaApproxOrdersCtx(DM dm, Mat A, bool use_shell_mat)
createPCMGSetUpViaApproxOrdersCtx
PetscErrorCode PetscOptionsGetRealArray(PetscOptions *, const char pre[], const char name[], PetscReal dval[], PetscInt *nmax, PetscBool *set)
static FTensor::Ddg< FTensor::PackPtr< T *, 1 >, Tensor_Dim01, Tensor_Dim23 > getFTensor4DdgFromMat(ublas::matrix< T, L, A > &data)
Get symmetric tensor rank 4 on first two and last indices from form data matrix.
auto getDMKspCtx(DM dm)
Get KSP context data structure used by DM.
auto getDMSubData(DM dm)
Get sub problem data structure.
FTensor::Tensor1< FTensor::PackPtr< T *, S >, Tensor_Dim > getFTensor1FromMat(ublas::matrix< T, L, A > &data)
Get tensor rank 1 (vector) form data matrix.
MoFEMErrorCode PCMGSetUpViaApproxOrders(PC pc, boost::shared_ptr< PCMGSetUpViaApproxOrdersCtx > ctx, int verb)
Function build MG structure.
boost::shared_ptr< BlockStructure > createBlockMatStructure(DM dm, SchurFEOpsFEandFields schur_fe_op_vec)
Create a Mat Diag Blocks object.
boost::shared_ptr< NestSchurData > createSchurNestedMatrixStruture(std::pair< SmartPetscObj< DM >, SmartPetscObj< DM > > dms, boost::shared_ptr< BlockStructure > block_mat_data_ptr, std::vector< std::string > fields_names, std::vector< boost::shared_ptr< Range > > field_ents, bool add_preconditioner_block)
Get the Schur Nest Mat Array object.
auto createAOMappingIS(IS isapp, IS ispetsc)
Creates an application mapping using two index sets.
PetscErrorCode PetscOptionsGetEList(PetscOptions *, const char pre[], const char name[], const char *const *list, PetscInt next, PetscInt *value, PetscBool *set)
MoFEMErrorCode DMMoFEMSetNestSchurData(DM dm, boost::shared_ptr< NestSchurData >)
auto createDMNestSchurMat(DM dm)
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
OpSchurAssembleBase * createOpSchurAssembleBegin()
auto createDMBlockMat(DM dm)
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
double young_modulus
Young modulus.
#define EXECUTABLE_DIMENSION
double poisson_ratio
Poisson ratio.
ElementsAndOps< SPACE_DIM >::SideEle SideEle
constexpr auto field_name
static constexpr int approx_order
FTensor::Index< 'm', 3 > m
MoFEMErrorCode boundaryCondition()
MoFEMErrorCode assembleSystem()
MoFEMErrorCode readMesh()
FieldApproximationBase base
MoFEMErrorCode checkResults()
MoFEMErrorCode solveSystem()
Example(MoFEM::Interface &m_field)
MoFEMErrorCode runProblem()
MoFEM::Interface & mField
std::array< double, SPACE_DIM > fieldEvalCoords
MoFEMErrorCode setupProblem()
MoFEMErrorCode outputResults()
MoFEMErrorCode addMatBlockOps(boost::ptr_deque< ForcesAndSourcesCore::UserDataOperator > &pipeline, std::string field_name, std::string block_name, boost::shared_ptr< MatrixDouble > mat_D_Ptr, Sev sev)
boost::shared_ptr< MatrixDouble > vectorFieldPtr
boost::shared_ptr< FieldEvaluatorInterface::SetPtsData > fieldEvalData
Add operators pushing bases from local to physical configuration.
Simple interface for fast problem set-up.
virtual moab::Interface & get_moab()=0
virtual MPI_Comm & get_comm() const =0
virtual int get_comm_rank() const =0
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
Deprecated interface functions.
Definition of the displacement bc data structure.
Data on single entity (This is passed as argument to DataOperator::doWork)
Class (Function) to enforce essential constrains on the left hand side diagonal.
Class (Function) to enforce essential constrains on the right hand side diagonal.
Class (Function) to enforce essential constrains.
Class (Function) to calculate residual side diagonal.
Field evaluator interface.
Section manager is used to create indexes and sections.
static boost::shared_ptr< SinkType > createSink(boost::shared_ptr< std::ostream > stream_ptr, std::string comm_filter)
Create a sink object.
static boost::shared_ptr< std::ostream > getStrmWorld()
Get the strm world object.
static boost::shared_ptr< std::ostream > getStrmSync()
Get the strm sync object.
Interface for managing meshsets containing materials and boundary conditions.
Get field gradients at integration pts for scalar filed rank 0, i.e. vector field.
Get values at integration pts for tensor filed rank 1, i.e. vector field.
Element used to execute operators on side of the element.
Post post-proc data at points from hash maps.
PipelineManager interface.
Projection of edge entities with one mid-node on hierarchical basis.
Simple interface for fast problem set-up.
intrusive_ptr for managing petsc objects
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
PipelineManager::ElementsAndOpsByDim< 3 >::FaceSideEle SideEle
[Push operators to pipeline]
virtual MoFEMErrorCode setUp(SmartPetscObj< KSP > solver)=0
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
MoFEMErrorCode setUp(SmartPetscObj< KSP > solver)
virtual ~SetUpSchurImpl()=default
MoFEMErrorCode createSubDM()
MoFEMErrorCode setDiagonalPC(PC pc)
SetUpSchurImpl(MoFEM::Interface &m_field)
SmartPetscObj< DM > schurDM
MoFEMErrorCode setEntities()
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
SmartPetscObj< DM > blockDM
MoFEMErrorCode setPC(PC pc)
MoFEMErrorCode setOperator()
MoFEM::Interface & mField
PetscBool is_plane_strain