v0.14.0
Loading...
Searching...
No Matches
incompressible_elasticity.cpp
Go to the documentation of this file.
1/**
2 * @file incompressible_elasticity.cpp
3 * @brief Incompressible elasticity problem
4 */
5
6#include <MoFEM.hpp>
7
8using namespace MoFEM;
9
10constexpr int SPACE_DIM =
11 EXECUTABLE_DIMENSION; //< Space dimension of problem, mesh
12
13constexpr AssemblyType AT =
14 (SCHUR_ASSEMBLE) ? AssemblyType::SCHUR
15 : AssemblyType::PETSC; //< selected assembly type
16
18 IntegrationType::GAUSS; //< selected integration type
20
23using DomainEleOp = DomainEle::UserDataOperator;
26using BoundaryEleOp = BoundaryEle::UserDataOperator;
30
31// struct DomainBCs {};
32// struct BoundaryBCs {};
33
34// using DomainRhsBCs = NaturalBC<DomainEleOp>::Assembly<AT>::LinearForm<IT>;
35// using OpDomainRhsBCs = DomainRhsBCs::OpFlux<DomainBCs, 1, SPACE_DIM>;
36// using BoundaryRhsBCs = NaturalBC<BoundaryEleOp>::Assembly<AT>::LinearForm<IT>;
37// using OpBoundaryRhsBCs = BoundaryRhsBCs::OpFlux<BoundaryBCs, 1, SPACE_DIM>;
38// using BoundaryLhsBCs = NaturalBC<BoundaryEleOp>::Assembly<AT>::BiLinearForm<IT>;
39// using OpBoundaryLhsBCs = BoundaryLhsBCs::OpFlux<BoundaryBCs, 1, SPACE_DIM>;
40
41PetscBool doEvalField;
43
46 std::pair<boost::shared_ptr<PostProcEle>,
47 boost::shared_ptr<SkinPostProcEle>>
48 pair_post_proc_fe,
49 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> ux_scatter,
50 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uy_scatter,
51 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uz_scatter,
52 std::array<double, SPACE_DIM> pass_field_eval_coords,
53 boost::shared_ptr<SetPtsData> pass_field_eval_data,
54 boost::shared_ptr<MatrixDouble> vec_field_ptr)
55 : dM(dm), uXScatter(ux_scatter), uYScatter(uy_scatter),
56 uZScatter(uz_scatter), fieldEvalCoords(pass_field_eval_coords),
57 fieldEvalData(pass_field_eval_data), vecFieldPtr(vec_field_ptr) {
58 postProcFe = pair_post_proc_fe.first;
59 skinPostProcFe = pair_post_proc_fe.second;
60 };
61
62 MoFEMErrorCode preProcess() { return 0; }
63 MoFEMErrorCode operator()() { return 0; }
64
67
68 MoFEM::Interface *m_field_ptr;
69 CHKERR DMoFEMGetInterfacePtr(dM, &m_field_ptr);
70 auto *simple = m_field_ptr->getInterface<Simple>();
71
72 if (doEvalField) {
73 if (SPACE_DIM == 3) {
75 ->evalFEAtThePoint3D(
76 fieldEvalCoords.data(), 1e-12, simple->getProblemName(),
77 simple->getDomainFEName(), fieldEvalData,
78 m_field_ptr->get_comm_rank(), m_field_ptr->get_comm_rank(),
79 getCacheWeakPtr().lock(), MF_EXIST, QUIET);
80 } else {
82 ->evalFEAtThePoint2D(
83 fieldEvalCoords.data(), 1e-12, simple->getProblemName(),
84 simple->getDomainFEName(), fieldEvalData,
85 m_field_ptr->get_comm_rank(), m_field_ptr->get_comm_rank(),
86 getCacheWeakPtr().lock(), MF_EXIST, QUIET);
87 }
88
89 if (vecFieldPtr->size1()) {
91 if constexpr (SPACE_DIM == 2)
92 MOFEM_LOG("SYNC", Sev::inform)
93 << "U_X: " << t_disp(0) << " U_Y: " << t_disp(1);
94 else
95 MOFEM_LOG("SYNC", Sev::inform)
96 << "U_X: " << t_disp(0) << " U_Y: " << t_disp(1)
97 << " U_Z: " << t_disp(2);
98 }
99 }
100 MOFEM_LOG_SEVERITY_SYNC(m_field_ptr->get_comm(), Sev::inform);
101
102 auto make_vtk = [&]() {
104 if (postProcFe) {
107 CHKERR postProcFe->writeFile("out_incomp_elasticity" +
108 boost::lexical_cast<std::string>(ts_step) +
109 ".h5m");
110 }
111 if (skinPostProcFe) {
114 CHKERR skinPostProcFe->writeFile(
115 "out_skin_incomp_elasticity_" +
116 boost::lexical_cast<std::string>(ts_step) + ".h5m");
117 }
119 };
120
121 auto print_max_min = [&](auto &tuple, const std::string msg) {
123 CHKERR VecScatterBegin(std::get<1>(tuple), ts_u, std::get<0>(tuple),
124 INSERT_VALUES, SCATTER_FORWARD);
125 CHKERR VecScatterEnd(std::get<1>(tuple), ts_u, std::get<0>(tuple),
126 INSERT_VALUES, SCATTER_FORWARD);
127 double max, min;
128 CHKERR VecMax(std::get<0>(tuple), PETSC_NULL, &max);
129 CHKERR VecMin(std::get<0>(tuple), PETSC_NULL, &min);
130 MOFEM_LOG_C("INCOMP_ELASTICITY", Sev::inform,
131 "%s time %3.4e min %3.4e max %3.4e", msg.c_str(), ts_t, min,
132 max);
134 };
135
136 CHKERR make_vtk();
137 CHKERR print_max_min(uXScatter, "Ux");
138 CHKERR print_max_min(uYScatter, "Uy");
139 if constexpr (SPACE_DIM == 3)
140 CHKERR print_max_min(uZScatter, "Uz");
141
143 }
144
145private:
147 boost::shared_ptr<PostProcEle> postProcFe;
148 boost::shared_ptr<SkinPostProcEle> skinPostProcFe;
149 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uXScatter;
150 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uYScatter;
151 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uZScatter;
152 std::array<double, SPACE_DIM> fieldEvalCoords;
153 boost::shared_ptr<SetPtsData> fieldEvalData;
154 boost::shared_ptr<MatrixDouble> vecFieldPtr;
155};
156
157// Assemble to A matrix, by default, however, some terms are assembled only to
158// preconditioning.
159
160template <>
163 [](ForcesAndSourcesCore::UserDataOperator *op_ptr,
164 const EntitiesFieldData::EntData &row_data,
165 const EntitiesFieldData::EntData &col_data, MatrixDouble &m) {
167 op_ptr->getKSPA(), row_data, col_data, m, ADD_VALUES);
168 };
169
170/**
171 * @brief Element used to specialise assembly
172 *
173 */
175 using DomainEleOp::DomainEleOp;
176};
177
178/**
179 * @brief Specialise assembly for Stabilised matrix
180 *
181 * @tparam
182 */
183template <>
186 [](ForcesAndSourcesCore::UserDataOperator *op_ptr,
187 const EntitiesFieldData::EntData &row_data,
188 const EntitiesFieldData::EntData &col_data, MatrixDouble &m) {
190 op_ptr->getKSPB(), row_data, col_data, m, ADD_VALUES);
191 };
192//! [Specialisation for assembly]
193
194int order = 2;
196inline static double young_modulus = 100;
197inline static double poisson_ratio = 0.25;
198inline static double mu;
199inline static double lambda;
200
201PetscBool isDiscontinuousPressure = PETSC_FALSE;
202
204
205 Incompressible(MoFEM::Interface &m_field) : mField(m_field) {}
206
208
209private:
211
218
219 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uXScatter;
220 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uYScatter;
221 std::tuple<SmartPetscObj<Vec>, SmartPetscObj<VecScatter>> uZScatter;
222};
223
224template <int DIM>
225struct OpCalculateLameStress : public ForcesAndSourcesCore::UserDataOperator {
226 OpCalculateLameStress(double m_u, boost::shared_ptr<MatrixDouble> stress_ptr,
227 boost::shared_ptr<MatrixDouble> strain_ptr,
228 boost::shared_ptr<VectorDouble> pressure_ptr)
230 stressPtr(stress_ptr), strainPtr(strain_ptr),
231 pressurePtr(pressure_ptr) {}
232
233 MoFEMErrorCode doWork(int side, EntityType type,
236 // Define Indicies
239
240 // Define Kronecker Delta
242
243 // Number of Gauss points
244 const size_t nb_gauss_pts = getGaussPts().size2();
245
246 stressPtr->resize((DIM * (DIM + 1)) / 2, nb_gauss_pts);
249 auto t_pressure = getFTensor0FromVec(*(pressurePtr));
250
251 const double l_mu = mU;
252 // Extract matrix from data matrix
253 for (auto gg = 0; gg != nb_gauss_pts; ++gg) {
254
255 t_stress(i, j) = t_pressure * t_kd(i, j) + 2. * l_mu * t_strain(i, j);
256
257 ++t_strain;
258 ++t_stress;
259 ++t_pressure;
260 }
261
263 }
264
265private:
266 double mU;
267 boost::shared_ptr<MatrixDouble> stressPtr;
268 boost::shared_ptr<MatrixDouble> strainPtr;
269 boost::shared_ptr<VectorDouble> pressurePtr;
270};
271
272//! [Run problem]
283//! [Run problem]
284
285//! [Set up problem]
289
290 CHKERR PetscOptionsGetInt(PETSC_NULL, "", "-order", &order, PETSC_NULL);
291 CHKERR PetscOptionsGetInt(PETSC_NULL, "", "-geom_order", &geom_order,
292 PETSC_NULL);
293 CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-is_discontinuous_pressure",
294 &isDiscontinuousPressure, PETSC_NULL);
295
296 MOFEM_LOG("INCOMP_ELASTICITY", Sev::inform) << "Order " << order;
297 MOFEM_LOG("INCOMP_ELASTICITY", Sev::inform) << "Geom order " << geom_order;
298
299 // Select base
300 enum bases { AINSWORTH, DEMKOWICZ, LASBASETOPT };
301 const char *list_bases[LASBASETOPT] = {"ainsworth", "demkowicz"};
302 PetscInt choice_base_value = AINSWORTH;
303 CHKERR PetscOptionsGetEList(PETSC_NULL, NULL, "-base", list_bases,
304 LASBASETOPT, &choice_base_value, PETSC_NULL);
305
307 switch (choice_base_value) {
308 case AINSWORTH:
310 MOFEM_LOG("INCOMP_ELASTICITY", Sev::inform)
311 << "Set AINSWORTH_LEGENDRE_BASE for displacements";
312 break;
313 case DEMKOWICZ:
315 MOFEM_LOG("INCOMP_ELASTICITY", Sev::inform)
316 << "Set DEMKOWICZ_JACOBI_BASE for displacements";
317 break;
318 default:
319 base = LASTBASE;
320 break;
321 }
322
323 // Note: For tets we have only H1 Ainsworth base, for Hex we have only H1
324 // Demkowicz base. We need to implement Demkowicz H1 base on tet.
325 CHKERR simple->addDataField("GEOMETRY", H1, base, SPACE_DIM);
326 CHKERR simple->setFieldOrder("GEOMETRY", geom_order);
327
328 // Adding fields related to incompressible elasticity
329 // Add displacement domain and boundary fields
330 CHKERR simple->addDomainField("U", H1, base, SPACE_DIM);
331 CHKERR simple->addBoundaryField("U", H1, base, SPACE_DIM);
332 CHKERR simple->setFieldOrder("U", order);
333
334 // Add pressure domain and boundary fields
335 // Choose either Crouzeix-Raviart element:
337 CHKERR simple->addDomainField("P", L2, base, 1);
338 CHKERR simple->setFieldOrder("P", order - 2);
339 } else {
340 // ... or Taylor-Hood element:
341 CHKERR simple->addDomainField("P", H1, base, 1);
342 CHKERR simple->setFieldOrder("P", order - 1);
343 }
344
345 // Add geometry data field
346 CHKERR simple->addDataField("GEOMETRY", H1, base, SPACE_DIM);
347 CHKERR simple->setFieldOrder("GEOMETRY", geom_order);
348
349 CHKERR simple->setUp();
350
351 auto project_ho_geometry = [&]() {
352 Projection10NodeCoordsOnField ent_method(mField, "GEOMETRY");
353 return mField.loop_dofs("GEOMETRY", ent_method);
354 };
355 CHKERR project_ho_geometry();
356
358} //! [Set up problem]
359
360//! [Create common data]
363
364 auto get_options = [&]() {
366 CHKERR PetscOptionsGetScalar(PETSC_NULL, "", "-young_modulus",
367 &young_modulus, PETSC_NULL);
368 CHKERR PetscOptionsGetScalar(PETSC_NULL, "", "-poisson_ratio",
369 &poisson_ratio, PETSC_NULL);
370
371 MOFEM_LOG("INCOMP_ELASTICITY", Sev::inform)
372 << "Young modulus " << young_modulus;
373 MOFEM_LOG("INCOMP_ELASTICITY", Sev::inform)
374 << "Poisson_ratio " << poisson_ratio;
375
376 mu = young_modulus / (2. + 2. * poisson_ratio);
377 const double lambda_denom =
378 (1. + poisson_ratio) * (1. - 2. * poisson_ratio);
379 lambda = young_modulus * poisson_ratio / lambda_denom;
380
382 };
383
384 CHKERR get_options();
385
387}
388//! [Create common data]
389
390//! [Boundary condition]
393
394 auto pipeline_mng = mField.getInterface<PipelineManager>();
396 auto bc_mng = mField.getInterface<BcManager>();
397 auto time_scale = boost::make_shared<TimeScale>();
398
399 auto integration_rule = [](int, int, int approx_order) {
400 return 2 * (approx_order - 1);
401 };
402
403 using DomainNaturalBC =
405 using OpBodyForce =
407 using BoundaryNaturalBC =
410
411 CHKERR pipeline_mng->setBoundaryRhsIntegrationRule(integration_rule);
413 pipeline_mng->getOpBoundaryRhsPipeline(), {NOSPACE}, "GEOMETRY");
414 //! [Natural boundary condition]
416 pipeline_mng->getOpBoundaryRhsPipeline(), mField, "U", {time_scale},
417 "FORCE", Sev::inform);
418
419 //! [Define gravity vector]
421 pipeline_mng->getOpDomainRhsPipeline(), mField, "U", {time_scale},
422 "BODY_FORCE", Sev::inform);
423
424 // Essential BC
425 CHKERR bc_mng->removeBlockDOFsOnEntities(simple->getProblemName(), "REMOVE_X",
426 "U", 0, 0);
427 CHKERR bc_mng->removeBlockDOFsOnEntities(simple->getProblemName(), "REMOVE_Y",
428 "U", 1, 1);
429 CHKERR bc_mng->removeBlockDOFsOnEntities(simple->getProblemName(), "REMOVE_Z",
430 "U", 2, 2);
431 CHKERR bc_mng->pushMarkDOFsOnEntities<DisplacementCubitBcData>(
432 simple->getProblemName(), "U");
433
435}
436//! [Boundary condition]
437
438//! [Push operators to pip]
442 auto pip_mng = mField.getInterface<PipelineManager>();
443 auto bc_mng = mField.getInterface<BcManager>();
444
445 auto integration_rule_vol = [](int, int, int approx_order) {
446 return 2 * approx_order + geom_order - 1;
447 };
448
449 auto add_domain_base_ops = [&](auto &pip) {
452 "GEOMETRY");
454 };
455
456 auto add_domain_ops_lhs = [&](auto &pip) {
458
459 // This assemble A-matrix
460 using OpMassPressure = FormsIntegrators<DomainEleOp>::Assembly<
462 // This assemble B-matrix (preconditioned)
463 using OpMassPressureStab = FormsIntegrators<DomainEleOpStab>::Assembly<
465 //! [Operators used for RHS incompressible elasticity]
466
467 //! [Operators used for incompressible elasticity]
468 using OpGradSymTensorGrad = FormsIntegrators<DomainEleOp>::Assembly<
472 //! [Operators used for incompressible elasticity]
473
474 auto mat_D_ptr = boost::make_shared<MatrixDouble>();
475 constexpr auto size_symm = (SPACE_DIM * (SPACE_DIM + 1)) / 2;
476 mat_D_ptr->resize(size_symm * size_symm, 1);
477
482
484 auto t_mat = getFTensor4DdgFromMat<SPACE_DIM, SPACE_DIM, 0>(*mat_D_ptr);
485 t_mat(i, j, k, l) = -2. * mu * ((t_kd(i, k) ^ t_kd(j, l)) / 4.);
486
487 pip.push_back(new OpMixScalarTimesDiv(
488 "P", "U",
489 [](const double, const double, const double) constexpr { return -1.; },
490 true, false));
491 pip.push_back(new OpGradSymTensorGrad("U", "U", mat_D_ptr));
492
493 auto get_lambda_reciprocal = [](const double, const double, const double) {
494 return 1. / lambda;
495 };
496 if (poisson_ratio < 0.5)
497 pip.push_back(new OpMassPressure("P", "P", get_lambda_reciprocal));
498
499 double eps_stab = 0;
500 CHKERR PetscOptionsGetScalar(PETSC_NULL, "", "-eps_stab", &eps_stab,
501 PETSC_NULL);
502 if (eps_stab > 0)
503 pip.push_back(new OpMassPressureStab(
504 "P", "P", [eps_stab](double, double, double) { return eps_stab; }));
505
507 };
508
509 auto add_domain_ops_rhs = [&](auto &pip) {
511
512 //! [Operators used for RHS incompressible elasticity]
513 using OpDomainGradTimesTensor = FormsIntegrators<DomainEleOp>::Assembly<
515
516 using OpDivDeltaUTimesP = FormsIntegrators<DomainEleOp>::Assembly<
518
519 using OpBaseTimesScalarValues = FormsIntegrators<DomainEleOp>::Assembly<
521
522 //! [Operators used for RHS incompressible elasticity]
523
524 auto pressure_ptr = boost::make_shared<VectorDouble>();
525 pip.push_back(new OpCalculateScalarFieldValues("P", pressure_ptr));
526
527 auto div_u_ptr = boost::make_shared<VectorDouble>();
528 pip.push_back(
530
531 auto grad_u_ptr = boost::make_shared<MatrixDouble>();
533 "U", grad_u_ptr));
534
535 auto strain_ptr = boost::make_shared<MatrixDouble>();
536 pip.push_back(new OpSymmetrizeTensor<SPACE_DIM>(grad_u_ptr, strain_ptr));
537
538 auto get_four_mu = [](const double, const double, const double) {
539 return -2. * mu;
540 };
541 auto minus_one = [](const double, const double, const double) constexpr {
542 return -1.;
543 };
544
545 pip.push_back(new OpDomainGradTimesTensor("U", strain_ptr, get_four_mu));
546
547 pip.push_back(new OpDivDeltaUTimesP("U", pressure_ptr, minus_one));
548
549 pip.push_back(new OpBaseTimesScalarValues("P", div_u_ptr, minus_one));
550
551 auto get_lambda_reciprocal = [](const double, const double, const double) {
552 return 1. / lambda;
553 };
554 if (poisson_ratio < 0.5) {
555 pip.push_back(new OpBaseTimesScalarValues("P", pressure_ptr,
556 get_lambda_reciprocal));
557 }
558
560 };
561
562 CHKERR add_domain_base_ops(pip_mng->getOpDomainLhsPipeline());
563 CHKERR add_domain_base_ops(pip_mng->getOpDomainRhsPipeline());
564 CHKERR add_domain_ops_lhs(pip_mng->getOpDomainLhsPipeline());
565 CHKERR add_domain_ops_rhs(pip_mng->getOpDomainRhsPipeline());
566
567 CHKERR pip_mng->setDomainRhsIntegrationRule(integration_rule_vol);
568 CHKERR pip_mng->setDomainLhsIntegrationRule(integration_rule_vol);
569
571}
572//! [Push operators to pip]
573
574//! [Solve]
575struct SetUpSchur {
576 static boost::shared_ptr<SetUpSchur>
579
580protected:
581 SetUpSchur() = default;
582};
583
586
588 auto pip_mng = mField.getInterface<PipelineManager>();
589 auto is_manager = mField.getInterface<ISManager>();
590
591 auto set_section_monitor = [&](auto solver) {
593 SNES snes;
594 CHKERR TSGetSNES(solver, &snes);
595 PetscViewerAndFormat *vf;
596 CHKERR PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,
597 PETSC_VIEWER_DEFAULT, &vf);
598 CHKERR SNESMonitorSet(
599 snes,
600 (MoFEMErrorCode(*)(SNES, PetscInt, PetscReal, void *))SNESMonitorFields,
601 vf, (MoFEMErrorCode(*)(void **))PetscViewerAndFormatDestroy);
603 };
604
605 auto scatter_create = [&](auto D, auto coeff) {
607 CHKERR is_manager->isCreateProblemFieldAndRank(simple->getProblemName(),
608 ROW, "U", coeff, coeff, is);
609 int loc_size;
610 CHKERR ISGetLocalSize(is, &loc_size);
611 Vec v;
612 CHKERR VecCreateMPI(mField.get_comm(), loc_size, PETSC_DETERMINE, &v);
613 VecScatter scatter;
614 CHKERR VecScatterCreate(D, is, v, PETSC_NULL, &scatter);
615 return std::make_tuple(SmartPetscObj<Vec>(v),
617 };
618
619 auto create_post_process_elements = [&]() {
620 auto pp_fe = boost::make_shared<PostProcEle>(mField);
621 auto &pip = pp_fe->getOpPtrVector();
622
623 auto push_vol_ops = [this](auto &pip) {
626 "GEOMETRY");
627
629 };
630
631 auto push_vol_post_proc_ops = [this](auto &pp_fe, auto &&p) {
633
634 auto &pip = pp_fe->getOpPtrVector();
635
637
638 auto x_ptr = boost::make_shared<MatrixDouble>();
639 pip.push_back(
640 new OpCalculateVectorFieldValues<SPACE_DIM>("GEOMETRY", x_ptr));
641 auto u_ptr = boost::make_shared<MatrixDouble>();
642 pip.push_back(new OpCalculateVectorFieldValues<SPACE_DIM>("U", u_ptr));
643
644 auto pressure_ptr = boost::make_shared<VectorDouble>();
645 pip.push_back(new OpCalculateScalarFieldValues("P", pressure_ptr));
646
647 auto div_u_ptr = boost::make_shared<VectorDouble>();
649 "U", div_u_ptr));
650
651 auto grad_u_ptr = boost::make_shared<MatrixDouble>();
653 "U", grad_u_ptr));
654
655 auto strain_ptr = boost::make_shared<MatrixDouble>();
656 pip.push_back(new OpSymmetrizeTensor<SPACE_DIM>(grad_u_ptr, strain_ptr));
657
658 auto stress_ptr = boost::make_shared<MatrixDouble>();
659 pip.push_back(new OpCalculateLameStress<SPACE_DIM>(
660 mu, stress_ptr, strain_ptr, pressure_ptr));
661
662 pip.push_back(
663
664 new OpPPMap(
665
666 pp_fe->getPostProcMesh(), pp_fe->getMapGaussPts(),
667
668 {{"P", pressure_ptr}},
669
670 {{"U", u_ptr}, {"GEOMETRY", x_ptr}},
671
672 {},
673
674 {{"STRAIN", strain_ptr}, {"STRESS", stress_ptr}}
675
676 )
677
678 );
679
681 };
682
683 auto vol_post_proc = [this, push_vol_post_proc_ops, push_vol_ops]() {
684 PetscBool post_proc_vol = PETSC_FALSE;
685 CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-post_proc_vol",
686 &post_proc_vol, PETSC_NULL);
687 if (post_proc_vol == PETSC_FALSE)
688 return boost::shared_ptr<PostProcEle>();
689 auto pp_fe = boost::make_shared<PostProcEle>(mField);
691 push_vol_post_proc_ops(pp_fe, push_vol_ops(pp_fe->getOpPtrVector())),
692 "push_vol_post_proc_ops");
693 return pp_fe;
694 };
695
696 auto skin_post_proc = [this, push_vol_post_proc_ops, push_vol_ops]() {
697 PetscBool post_proc_skin = PETSC_TRUE;
698 CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-post_proc_skin",
699 &post_proc_skin, PETSC_NULL);
700 if (post_proc_skin == PETSC_FALSE)
701 return boost::shared_ptr<SkinPostProcEle>();
702
704 auto pp_fe = boost::make_shared<SkinPostProcEle>(mField);
705 auto op_side = new OpLoopSide<DomainEle>(
706 mField, simple->getDomainFEName(), SPACE_DIM, Sev::verbose);
707 pp_fe->getOpPtrVector().push_back(op_side);
708 CHK_MOAB_THROW(push_vol_post_proc_ops(
709 pp_fe, push_vol_ops(op_side->getOpPtrVector())),
710 "push_vol_post_proc_ops");
711 return pp_fe;
712 };
713
714 return std::make_pair(vol_post_proc(), skin_post_proc());
715 };
716
717 boost::shared_ptr<SetPtsData> field_eval_data;
718 boost::shared_ptr<MatrixDouble> vector_field_ptr;
719
720 std::array<double, SPACE_DIM> field_eval_coords;
721 int coords_dim = SPACE_DIM;
722 CHKERR PetscOptionsGetRealArray(NULL, NULL, "-field_eval_coords",
723 field_eval_coords.data(), &coords_dim,
724 &doEvalField);
725
726 if (doEvalField) {
727 vector_field_ptr = boost::make_shared<MatrixDouble>();
728 field_eval_data =
729 mField.getInterface<FieldEvaluatorInterface>()->getData<DomainEle>();
730 if constexpr (SPACE_DIM == 3) {
731 CHKERR mField.getInterface<FieldEvaluatorInterface>()->buildTree3D(
732 field_eval_data, simple->getDomainFEName());
733 } else {
734 CHKERR mField.getInterface<FieldEvaluatorInterface>()->buildTree2D(
735 field_eval_data, simple->getDomainFEName());
736 }
737
738 field_eval_data->setEvalPoints(field_eval_coords.data(), 1);
739 auto no_rule = [](int, int, int) { return -1; };
740 auto field_eval_fe_ptr = field_eval_data->feMethodPtr.lock();
741 field_eval_fe_ptr->getRuleHook = no_rule;
742 field_eval_fe_ptr->getOpPtrVector().push_back(
743 new OpCalculateVectorFieldValues<SPACE_DIM>("U", vector_field_ptr));
744 }
745
746 auto set_time_monitor = [&](auto dm, auto solver) {
748 boost::shared_ptr<MonitorIncompressible> monitor_ptr(
750 create_post_process_elements(), uXScatter,
751 uYScatter, uZScatter, field_eval_coords,
752 field_eval_data, vector_field_ptr));
753 boost::shared_ptr<ForcesAndSourcesCore> null;
754 CHKERR DMMoFEMTSSetMonitor(dm, solver, simple->getDomainFEName(),
755 monitor_ptr, null, null);
757 };
758
759 auto set_essential_bc = [&]() {
761 // This is low level pushing finite elements (pipelines) to solver
762 auto ts_ctx_ptr = getDMTsCtx(simple->getDM());
763 auto pre_proc_ptr = boost::make_shared<FEMethod>();
764 auto post_proc_rhs_ptr = boost::make_shared<FEMethod>();
765
766 // Add boundary condition scaling
767 auto time_scale = boost::make_shared<TimeScale>();
768
769 pre_proc_ptr->preProcessHook = EssentialPreProc<DisplacementCubitBcData>(
770 mField, pre_proc_ptr, {time_scale}, false);
771 post_proc_rhs_ptr->postProcessHook =
772 EssentialPostProcRhs<DisplacementCubitBcData>(mField, post_proc_rhs_ptr,
773 1.);
774 ts_ctx_ptr->getPreProcessIFunction().push_front(pre_proc_ptr);
775 ts_ctx_ptr->getPreProcessIJacobian().push_front(pre_proc_ptr);
776 ts_ctx_ptr->getPostProcessIFunction().push_back(post_proc_rhs_ptr);
778 };
779
780 auto set_schur_pc = [&](auto solver) {
781 SNES snes;
782 CHKERR TSGetSNES(solver, &snes);
783 KSP ksp;
784 CHKERR SNESGetKSP(snes, &ksp);
785 PC pc;
786 CHKERR KSPGetPC(ksp, &pc);
787 PetscBool is_pcfs = PETSC_FALSE;
788 PetscObjectTypeCompare((PetscObject)pc, PCFIELDSPLIT, &is_pcfs);
789 boost::shared_ptr<SetUpSchur> schur_ptr;
790 auto ts_ctx_ptr = getDMTsCtx(simple->getDM());
791
792 auto A = createDMMatrix(simple->getDM());
793 auto B = matDuplicate(A, MAT_DO_NOT_COPY_VALUES);
794
796 TSSetIJacobian(solver, A, B, TsSetIJacobian, ts_ctx_ptr.get()),
797 "set operators");
798 auto pre_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
799 auto post_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
800 pre_proc_schur_lhs_ptr->preProcessHook = [pre_proc_schur_lhs_ptr]() {
802 MOFEM_LOG("INCOMP_ELASTICITY", Sev::verbose) << "Lhs Zero matrices";
803 CHKERR MatZeroEntries(pre_proc_schur_lhs_ptr->A);
804 CHKERR MatZeroEntries(pre_proc_schur_lhs_ptr->B);
806 };
807 post_proc_schur_lhs_ptr->postProcessHook = [this,
808 post_proc_schur_lhs_ptr]() {
810 MOFEM_LOG("INCOMP_ELASTICITY", Sev::verbose) << "Lhs Assemble Begin";
811 *(post_proc_schur_lhs_ptr->matAssembleSwitch) = false;
812 CHKERR MatAssemblyBegin(post_proc_schur_lhs_ptr->A, MAT_FINAL_ASSEMBLY);
813 CHKERR MatAssemblyEnd(post_proc_schur_lhs_ptr->A, MAT_FINAL_ASSEMBLY);
815 mField, post_proc_schur_lhs_ptr, 1.,
816 SmartPetscObj<Mat>(post_proc_schur_lhs_ptr->A, true))();
817
818 CHKERR MatAssemblyBegin(post_proc_schur_lhs_ptr->B, MAT_FINAL_ASSEMBLY);
819 CHKERR MatAssemblyEnd(post_proc_schur_lhs_ptr->B, MAT_FINAL_ASSEMBLY);
820 CHKERR MatAXPY(post_proc_schur_lhs_ptr->B, 1, post_proc_schur_lhs_ptr->A,
821 SAME_NONZERO_PATTERN);
822 MOFEM_LOG("INCOMP_ELASTICITY", Sev::verbose) << "Lhs Assemble End";
824 };
825 ts_ctx_ptr->getPreProcessIJacobian().push_front(pre_proc_schur_lhs_ptr);
826 ts_ctx_ptr->getPostProcessIJacobian().push_back(post_proc_schur_lhs_ptr);
827
828 if (is_pcfs == PETSC_TRUE) {
829 if (AT == AssemblyType::SCHUR) {
830 schur_ptr = SetUpSchur::createSetUpSchur(mField);
831 CHK_MOAB_THROW(schur_ptr->setUp(solver), "setup schur preconditioner");
832 } else {
833 auto set_pcfieldsplit_preconditioned_ts = [&](auto solver) {
835 auto bc_mng = mField.getInterface<BcManager>();
836 auto name_prb = simple->getProblemName();
838 CHKERR mField.getInterface<ISManager>()->isCreateProblemFieldAndRank(
839 name_prb, ROW, "P", 0, 1, is_p);
840 CHKERR PCFieldSplitSetIS(pc, PETSC_NULL, is_p);
842 };
843 CHK_MOAB_THROW(set_pcfieldsplit_preconditioned_ts(solver),
844 "set pcfieldsplit preconditioned");
845 }
846 return boost::make_tuple(schur_ptr, A, B);
847 }
848
849 return boost::make_tuple(schur_ptr, A, B);
850 };
851
852 auto dm = simple->getDM();
853 auto D = createDMVector(dm);
854
855 uXScatter = scatter_create(D, 0);
856 uYScatter = scatter_create(D, 1);
857 if (SPACE_DIM == 3)
858 uZScatter = scatter_create(D, 2);
859
860 // Add extra finite elements to SNES solver pipelines to resolve essential
861 // boundary conditions
862 CHKERR set_essential_bc();
863
864 auto solver = pip_mng->createTSIM();
865 CHKERR TSSetFromOptions(solver);
866
867 CHKERR set_section_monitor(solver);
868 CHKERR set_time_monitor(dm, solver);
869 auto [schur_pc_ptr, A, B] = set_schur_pc(solver);
870
871 CHKERR TSSetSolution(solver, D);
872 CHKERR TSSetUp(solver);
873 CHKERR TSSolve(solver, NULL);
874
876}
877//! [Solve]
878
879//! [Check]
881//! [Check]
882
883static char help[] = "...\n\n";
884
885int main(int argc, char *argv[]) {
886
887 // Initialisation of MoFEM/PETSc and MOAB data structures
888 const char param_file[] = "param_file.petsc";
889 MoFEM::Core::Initialize(&argc, &argv, param_file, help);
890
891 // Add logging channel for CONTACT
892 auto core_log = logging::core::get();
893 core_log->add_sink(
894 LogManager::createSink(LogManager::getStrmWorld(), "INCOMP_ELASTICITY"));
895 LogManager::setLog("INCOMP_ELASTICITY");
896 MOFEM_LOG_TAG("INCOMP_ELASTICITY", "Indent");
897
898 try {
899
900 //! [Register MoFEM discrete manager in PETSc]
901 DMType dm_name = "DMMOFEM";
902 CHKERR DMRegister_MoFEM(dm_name);
903 //! [Register MoFEM discrete manager in PETSc
904
905 //! [Create MoAB]
906 moab::Core mb_instance; ///< mesh database
907 moab::Interface &moab = mb_instance; ///< mesh database interface
908 //! [Create MoAB]
909
910 //! [Create MoFEM]
911 MoFEM::Core core(moab); ///< finite element database
912 MoFEM::Interface &m_field = core; ///< finite element database interface
913 //! [Create MoFEM]
914
915 //! [Load mesh]
916 Simple *simple = m_field.getInterface<Simple>();
917 CHKERR simple->getOptions();
918 CHKERR simple->loadFile("");
919 //! [Load mesh]
920
921 //! [CONTACT]
922 Incompressible ex(m_field);
923 CHKERR ex.runProblem();
924 //! [CONTACT]
925 }
927
929
930 return 0;
931}
932
933struct SetUpSchurImpl : public SetUpSchur {
934
936 virtual ~SetUpSchurImpl() { S.reset(); }
938
939private:
942
944
946
948
950};
951
955 auto pip = mField.getInterface<PipelineManager>();
956 auto dm = simple->getDM();
957
958 SNES snes;
959 CHKERR TSGetSNES(solver, &snes);
960 KSP ksp;
961 CHKERR SNESGetKSP(snes, &ksp);
962 PC pc;
963 CHKERR KSPGetPC(ksp, &pc);
964
965 MOFEM_LOG("INCOMP_ELASTICITY", Sev::inform) << "Setup Schur pc";
966
967 if (S) {
969 "Is expected that schur matrix is not allocated. This is "
970 "possible only is PC is set up twice");
971 }
972
973 auto create_sub_dm = [&]() {
974 auto sub_dm = createDM(mField.get_comm(), "DMMOFEM");
975 auto set_up = [&]() {
977 CHKERR DMMoFEMCreateSubDM(sub_dm, simple->getDM(), "SUB");
978 CHKERR DMMoFEMSetSquareProblem(sub_dm, PETSC_TRUE);
979 CHKERR DMMoFEMAddElement(sub_dm, simple->getDomainFEName());
980 CHKERR DMMoFEMAddSubFieldRow(sub_dm, "U");
981 CHKERR DMSetUp(sub_dm);
983 };
984 CHK_THROW_MESSAGE(set_up(), "sey up dm");
985 return sub_dm;
986 };
987
988 subDM = create_sub_dm();
990 CHKERR MatSetBlockSize(S, SPACE_DIM);
991
992 auto dm_is = getDMSubData(subDM)->getSmartRowIs();
993 auto ao_up = createAOMappingIS(dm_is, PETSC_NULL);
994 // Domain
995 pip->getOpDomainLhsPipeline().push_front(createOpSchurAssembleBegin());
996 pip->getOpDomainLhsPipeline().push_back(
997 createOpSchurAssembleEnd({"P"}, {nullptr}, ao_up, S, false, false));
998
999 auto pre_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
1000 auto post_proc_schur_lhs_ptr = boost::make_shared<FEMethod>();
1001
1002 pre_proc_schur_lhs_ptr->preProcessHook = [this]() {
1004 CHKERR MatZeroEntries(S);
1006 };
1007
1008 post_proc_schur_lhs_ptr->postProcessHook = [this, ao_up,
1009 post_proc_schur_lhs_ptr]() {
1011
1012 auto print_mat_norm = [this](auto a, std::string prefix) {
1014 double nrm;
1015 CHKERR MatNorm(a, NORM_FROBENIUS, &nrm);
1016 MOFEM_LOG("INCOMP_ELASTICITY", Sev::verbose)
1017 << prefix << " norm = " << nrm;
1019 };
1020
1021 CHKERR MatAssemblyBegin(S, MAT_FINAL_ASSEMBLY);
1022 CHKERR MatAssemblyEnd(S, MAT_FINAL_ASSEMBLY);
1024 mField, post_proc_schur_lhs_ptr, 1, S, ao_up)();
1025
1026#ifndef NDEBUG
1027 CHKERR print_mat_norm(S, "S");
1028#endif // NDEBUG
1030 };
1031
1032 auto ts_ctx_ptr = getDMTsCtx(simple->getDM());
1033 ts_ctx_ptr->getPreProcessIJacobian().push_front(pre_proc_schur_lhs_ptr);
1034 ts_ctx_ptr->getPostProcessIJacobian().push_back(post_proc_schur_lhs_ptr);
1035
1036 SmartPetscObj<IS> is_p;
1037 CHKERR mField.getInterface<ISManager>()->isCreateProblemFieldAndRank(
1038 simple->getProblemName(), ROW, "P", 0, 1, is_p);
1039 CHKERR PCFieldSplitSetIS(pc, NULL, is_p);
1040 CHKERR PCFieldSplitSetSchurPre(pc, PC_FIELDSPLIT_SCHUR_PRE_USER, S);
1041
1043}
1044
1045boost::shared_ptr<SetUpSchur>
1047 return boost::shared_ptr<SetUpSchur>(new SetUpSchurImpl(m_field));
1048}
ForcesAndSourcesCore::UserDataOperator UserDataOperator
#define MOFEM_LOG_SEVERITY_SYNC(comm, severity)
Synchronise "SYNC" on curtain severity level.
#define MOFEM_LOG_C(channel, severity, format,...)
void simple(double P1[], double P2[], double P3[], double c[], const int N)
Definition acoustic.cpp:69
int main()
constexpr double a
constexpr int SPACE_DIM
ElementsAndOps< SPACE_DIM >::BoundaryEle BoundaryEle
ElementsAndOps< SPACE_DIM >::DomainEle DomainEle
[Define dimension]
Kronecker Delta class symmetric.
#define SCHUR_ASSEMBLE
Definition contact.cpp:18
@ QUIET
@ ROW
#define CATCH_ERRORS
Catch errors.
@ MF_EXIST
FieldApproximationBase
approximation base
Definition definitions.h:58
@ LASTBASE
Definition definitions.h:69
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base nme:nme847.
Definition definitions.h:60
@ DEMKOWICZ_JACOBI_BASE
Definition definitions.h:66
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
@ L2
field with C-1 continuity
Definition definitions.h:88
@ H1
continuous field
Definition definitions.h:85
@ NOSPACE
Definition definitions.h:83
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
#define CHK_MOAB_THROW(err, msg)
Check error code of MoAB function and throw MoFEM exception.
@ MOFEM_DATA_INCONSISTENCY
Definition definitions.h:31
CoordinateTypes
Coodinate system.
@ CARTESIAN
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
auto integration_rule
constexpr auto t_kd
FormsIntegrators< DomainEleOp >::Assembly< A >::BiLinearForm< I >::OpMixScalarTimesDiv< SPACE_DIM, COORD_TYPE > OpMixScalarTimesDiv
PetscErrorCode DMMoFEMCreateSubDM(DM subdm, DM dm, const char problem_name[])
Must be called by user to set Sub DM MoFEM data structures.
Definition DMMoFEM.cpp:215
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
Definition DMMoFEM.cpp:497
PetscErrorCode DMMoFEMSetSquareProblem(DM dm, PetscBool square_problem)
set squared problem
Definition DMMoFEM.cpp:456
PetscErrorCode DMMoFEMAddSubFieldRow(DM dm, const char field_name[])
Definition DMMoFEM.cpp:238
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
Definition DMMoFEM.cpp:43
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
Definition DMMoFEM.cpp:586
auto createDMVector(DM dm)
Get smart vector from DM.
Definition DMMoFEM.hpp:1099
PetscErrorCode DMoFEMGetInterfacePtr(DM dm, MoFEM::Interface **m_field_ptr)
Get pointer to MoFEM::Interface.
Definition DMMoFEM.cpp:414
auto createDMMatrix(DM dm)
Get smart matrix from DM.
Definition DMMoFEM.hpp:1056
IntegrationType
Form integrator integration types.
AssemblyType
[Storage and set boundary conditions]
static LoggerType & setLog(const std::string channel)
Set ans resset chanel logger.
#define MOFEM_LOG(channel, severity)
Log.
#define MOFEM_LOG_TAG(channel, tag)
Tag channel.
virtual MoFEMErrorCode loop_dofs(const Problem *problem_ptr, const std::string &field_name, RowColData rc, DofMethod &method, int lower_rank, int upper_rank, int verb=DEFAULT_VERBOSITY)=0
Make a loop over dofs.
FTensor::Index< 'i', SPACE_DIM > i
static double young_modulus
constexpr AssemblyType AT
constexpr IntegrationType IT
static char help[]
[Check]
constexpr CoordinateTypes coord_type
static double lambda
constexpr int SPACE_DIM
static double poisson_ratio
PetscBool isDiscontinuousPressure
static double mu
int order
[Specialisation for assembly]
PetscBool doEvalField
double D
const double v
phase velocity of light in medium (cm/ns)
FTensor::Index< 'l', 3 > l
FTensor::Index< 'j', 3 > j
FTensor::Index< 'k', 3 > k
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
implementation of Data Operators for Forces and Sources
Definition Common.hpp:10
PetscErrorCode DMMoFEMTSSetMonitor(DM dm, TS ts, const std::string fe_name, boost::shared_ptr< MoFEM::FEMethod > method, boost::shared_ptr< MoFEM::BasicMethod > pre_only, boost::shared_ptr< MoFEM::BasicMethod > post_only)
Set Monitor To TS solver.
Definition DMMoFEM.cpp:1056
PetscErrorCode TsSetIJacobian(TS ts, PetscReal t, Vec u, Vec u_t, PetscReal a, Mat A, Mat B, void *ctx)
Set function evaluating jacobian in TS solver.
Definition TsCtx.cpp:165
auto getDMTsCtx(DM dm)
Get TS context data structure used by DM.
Definition DMMoFEM.hpp:1141
OpSchurAssembleBase * createOpSchurAssembleEnd(std::vector< std::string > fields_name, std::vector< boost::shared_ptr< Range > > field_ents, SmartPetscObj< AO > ao, SmartPetscObj< Mat > schur, bool sym_schur, bool symm_op)
Construct a new Op Schur Assemble End object.
Definition Schur.cpp:2186
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
PetscErrorCode PetscOptionsGetBool(PetscOptions *, const char pre[], const char name[], PetscBool *bval, PetscBool *set)
PetscErrorCode PetscOptionsGetScalar(PetscOptions *, const char pre[], const char name[], PetscScalar *dval, PetscBool *set)
PetscErrorCode PetscOptionsGetRealArray(PetscOptions *, const char pre[], const char name[], PetscReal dval[], PetscInt *nmax, PetscBool *set)
static FTensor::Ddg< FTensor::PackPtr< T *, 1 >, Tensor_Dim01, Tensor_Dim23 > getFTensor4DdgFromMat(ublas::matrix< T, L, A > &data)
Get symmetric tensor rank 4 on first two and last indices from form data matrix.
auto getDMSubData(DM dm)
Get sub problem data structure.
Definition DMMoFEM.hpp:1157
FTensor::Tensor1< FTensor::PackPtr< T *, S >, Tensor_Dim > getFTensor1FromMat(ublas::matrix< T, L, A > &data)
Get tensor rank 1 (vector) form data matrix.
static auto getFTensor2SymmetricFromMat(ublas::matrix< T, L, A > &data)
Get symmetric tensor rank 2 (matrix) form data matrix.
MoFEMErrorCode MatSetValues(Mat M, const EntitiesFieldData::EntData &row_data, const EntitiesFieldData::EntData &col_data, const double *ptr, InsertMode iora)
Assemble PETSc matrix.
auto createAOMappingIS(IS isapp, IS ispetsc)
Creates an application mapping using two index sets.
static auto getFTensor0FromVec(ublas::vector< T, A > &data)
Get tensor rank 0 (scalar) form data vector.
PetscErrorCode PetscOptionsGetEList(PetscOptions *, const char pre[], const char name[], const char *const *list, PetscInt next, PetscInt *value, PetscBool *set)
SmartPetscObj< Mat > matDuplicate(Mat mat, MatDuplicateOption op)
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
OpSchurAssembleBase * createOpSchurAssembleBegin()
Definition Schur.cpp:2181
constexpr AssemblyType A
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
#define EXECUTABLE_DIMENSION
Definition plastic.cpp:13
constexpr auto size_symm
Definition plastic.cpp:42
static constexpr int approx_order
FTensor::Index< 'm', 3 > m
Element used to specialise assembly.
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uZScatter
MoFEMErrorCode checkResults()
[Solve]
MoFEMErrorCode createCommonData()
[Set up problem]
MoFEMErrorCode setupProblem()
[Run problem]
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uYScatter
MoFEMErrorCode runProblem()
[Run problem]
MoFEMErrorCode OPs()
[Boundary condition]
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uXScatter
MoFEMErrorCode bC()
[Create common data]
Incompressible(MoFEM::Interface &m_field)
Add operators pushing bases from local to physical configuration.
boost::weak_ptr< CacheTuple > getCacheWeakPtr() const
Get the cache weak ptr object.
Simple interface for fast problem set-up.
Definition BcManager.hpp:25
virtual MPI_Comm & get_comm() const =0
virtual int get_comm_rank() const =0
Core (interface) class.
Definition Core.hpp:82
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
Definition Core.cpp:72
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
Definition Core.cpp:112
Deprecated interface functions.
Definition of the displacement bc data structure.
Definition BCData.hpp:76
Data on single entity (This is passed as argument to DataOperator::doWork)
Class (Function) to enforce essential constrains on the left hand side diagonal.
Definition Essential.hpp:33
Class (Function) to enforce essential constrains on the right hand side diagonal.
Definition Essential.hpp:41
Class (Function) to enforce essential constrains.
Definition Essential.hpp:25
structure for User Loop Methods on finite elements
Field evaluator interface.
structure to get information form mofem into EntitiesFieldData
Section manager is used to create indexes and sections.
Definition ISManager.hpp:23
static boost::shared_ptr< SinkType > createSink(boost::shared_ptr< std::ostream > stream_ptr, std::string comm_filter)
Create a sink object.
static boost::shared_ptr< std::ostream > getStrmWorld()
Get the strm world object.
Assembly methods.
Definition Natural.hpp:65
boost::function< MoFEMErrorCode( ForcesAndSourcesCore::UserDataOperator *op_ptr, const EntitiesFieldData::EntData &row_data, const EntitiesFieldData::EntData &col_data, MatrixDouble &m)> MatSetValuesHook
Calculate field values (template specialization) for tensor field rank 1, i.e. vector field.
Get value at integration points for scalar field.
Get field gradients at integration pts for scalar filed rank 0, i.e. vector field.
Get values at integration pts for tensor filed rank 1, i.e. vector field.
Element used to execute operators on side of the element.
Post post-proc data at points from hash maps.
PipelineManager interface.
Projection of edge entities with one mid-node on hierarchical basis.
Simple interface for fast problem set-up.
Definition Simple.hpp:27
intrusive_ptr for managing petsc objects
PetscReal ts_t
time
PetscInt ts_step
time step number
Vec & ts_u
state vector
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.
MoFEMErrorCode preProcess()
function is run at the beginning of loop
boost::shared_ptr< MatrixDouble > vecFieldPtr
std::array< double, SPACE_DIM > fieldEvalCoords
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uXScatter
boost::shared_ptr< SkinPostProcEle > skinPostProcFe
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uYScatter
MonitorIncompressible(SmartPetscObj< DM > dm, std::pair< boost::shared_ptr< PostProcEle >, boost::shared_ptr< SkinPostProcEle > > pair_post_proc_fe, std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > ux_scatter, std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uy_scatter, std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uz_scatter, std::array< double, SPACE_DIM > pass_field_eval_coords, boost::shared_ptr< SetPtsData > pass_field_eval_data, boost::shared_ptr< MatrixDouble > vec_field_ptr)
MoFEMErrorCode operator()()
function is run for every finite element
boost::shared_ptr< SetPtsData > fieldEvalData
std::tuple< SmartPetscObj< Vec >, SmartPetscObj< VecScatter > > uZScatter
boost::shared_ptr< PostProcEle > postProcFe
MoFEMErrorCode postProcess()
function is run at the end of loop
OpCalculateLameStress(double m_u, boost::shared_ptr< MatrixDouble > stress_ptr, boost::shared_ptr< MatrixDouble > strain_ptr, boost::shared_ptr< VectorDouble > pressure_ptr)
boost::shared_ptr< MatrixDouble > stressPtr
boost::shared_ptr< VectorDouble > pressurePtr
boost::shared_ptr< MatrixDouble > strainPtr
MoFEMErrorCode doWork(int side, EntityType type, EntitiesFieldData::EntData &data)
[Push operators to pipeline]
SetUpSchur()=default
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
virtual MoFEMErrorCode setUp(SmartPetscObj< TS > solver)=0
static boost::shared_ptr< SetUpSchur > createSetUpSchur(MoFEM::Interface &m_field)
SmartPetscObj< DM > subDM
field split sub dm
Definition plastic.cpp:1485
SmartPetscObj< Mat > S
SetUpSchurImpl(MoFEM::Interface &m_field)
SmartPetscObj< DM > createSubDM()
MoFEMErrorCode setUp(SmartPetscObj< TS > solver)
MoFEMErrorCode setUp(SmartPetscObj< KSP >)
MoFEMErrorCode setPC(PC pc)
MoFEMErrorCode setOperator()
MoFEM::Interface & mField