v0.14.0
Loading...
Searching...
No Matches
electrostatics.cpp
Go to the documentation of this file.
1/**
2 * @file Electrostatics.cpp
3 * \example Electrostatics.cpp
4 * */
5
6#ifndef EXECUTABLE_DIMENSION
7#define EXECUTABLE_DIMENSION 3
8#endif
9
10#include <electrostatics.hpp>
11static char help[] = "...\n\n";
13public:
15
16 // Declaration of the main function to run analysis
18
19private:
20 // Declaration of other main functions called in runProgram()
30
31 int oRder = 2; // default order
32 int geomOrder = 1; // default gemoetric order
35 std::string domainField;
36 boost::shared_ptr<std::map<int, BlockData>> permBlockSetsPtr;
37 boost::shared_ptr<std::map<int, BlockData>> intBlockSetsPtr;
38 boost::shared_ptr<std::map<int, BlockData>> electrodeBlockSetsPtr;
39 boost::shared_ptr<DataAtIntegrationPts> commonDataPtr;
40
41 boost::shared_ptr<ForcesAndSourcesCore> interFaceRhsFe;
42 boost::shared_ptr<ForcesAndSourcesCore> electrodeRhsFe;
43
44 double aLpha = 0.0; // declaration for total charge on first electrode
45 double bEta = 0.0; // declaration for total charge on second electrode
46 SmartPetscObj<Vec> petscVec; // petsc vector for the charge solution
47 SmartPetscObj<Vec> petscVecEnergy; // petsc vector for the energy solution
48 PetscBool out_skin = PETSC_FALSE; //
49 PetscBool is_partitioned = PETSC_FALSE;
50 enum VecElements { ZERO = 0, ONE = 1, LAST_ELEMENT };
51 int atomTest = 0;
52};
53
55 : domainField("POTENTIAL"), mField(m_field) {}
56
57//! [Read mesh]
62
64 true; // create lower dimensional element to ensure sharing of partitioed
65 // entities at the boundaries.
68}
69//! [Read mesh]
70
71//! [Setup problem]
74
75 Range domain_ents;
76 CHKERR mField.get_moab().get_entities_by_dimension(0, SPACE_DIM, domain_ents,
77 true);
78 auto get_ents_by_dim = [&](const auto dim) {
79 if (dim == SPACE_DIM) {
80 return domain_ents;
81 } else {
82 Range ents;
83 if (dim == 0)
84 CHKERR mField.get_moab().get_connectivity(domain_ents, ents, true);
85 else
86 CHKERR mField.get_moab().get_entities_by_dimension(0, dim, ents, true);
87 return ents;
88 }
89 };
90
91 // Select base for the field based on the element type
92 auto get_base = [&]() {
93 auto domain_ents = get_ents_by_dim(SPACE_DIM);
94 if (domain_ents.empty())
96 const auto type = type_from_handle(domain_ents[0]);
97 switch (type) {
98 case MBQUAD:
100 case MBHEX:
102 case MBTRI:
104 case MBTET:
106 default:
107 CHK_THROW_MESSAGE(MOFEM_NOT_FOUND, "Element type is not handled");
108 }
109 return NOBASE;
110 };
111
112 auto base = get_base();
115
116 CHKERR PetscOptionsGetInt(PETSC_NULL, "", "-order", &oRder, PETSC_NULL);
117
118 CHKERR PetscOptionsGetInt(PETSC_NULL, "", "-geom_order", &geomOrder,
119 PETSC_NULL);
120
121 CHKERR PetscOptionsGetInt(PETSC_NULL, "", "-atom_test", &atomTest,
122 PETSC_NULL);
124 CHKERR simpleInterface->addDataField("GEOMETRY", H1, base, SPACE_DIM);
126
127 auto project_ho_geometry = [&]() {
128 Projection10NodeCoordsOnField ent_method(mField, "GEOMETRY");
129 return mField.loop_dofs("GEOMETRY", ent_method);
130 };
131
133 CHKERR project_ho_geometry();
134
135 commonDataPtr = boost::make_shared<DataAtIntegrationPts>(mField);
136
137 // gets the map of the permittivity attributes and the block sets
138 permBlockSetsPtr = boost::make_shared<std::map<int, BlockData>>();
139 Range mat_electr_ents; // range of entities with the permittivity
141 if (bit->getName().compare(0, 12, "MAT_ELECTRIC") == 0) {
142 const int id = bit->getMeshsetId();
143 auto &block_data = (*permBlockSetsPtr)[id];
144
145 CHKERR mField.get_moab().get_entities_by_dimension(
146 bit->getMeshset(), SPACE_DIM, block_data.domainEnts, true);
147 mat_electr_ents.merge(block_data.domainEnts);
148
149 std::vector<double> attributes;
150 bit->getAttributes(attributes);
151 if (attributes.size() < 1) {
152 SETERRQ1(PETSC_COMM_WORLD, MOFEM_DATA_INCONSISTENCY,
153 " At least one permittivity attributes should be given but "
154 "found %d",
155 attributes.size());
156 }
157 block_data.iD = id; // id of the block
158 block_data.epsPermit = attributes[0]; // permittivity value of the block
159 }
160 }
161
162 // gets the map of the charge attributes and the block sets
163 intBlockSetsPtr = boost::make_shared<std::map<int, BlockData>>();
164 Range int_electr_ents; // range of entities with the charge
166 if (bit->getName().compare(0, 12, "INT_ELECTRIC") == 0) {
167 const int id = bit->getMeshsetId();
168 auto &block_data = (*intBlockSetsPtr)[id];
169
170 CHKERR mField.get_moab().get_entities_by_dimension(
171 bit->getMeshset(), SPACE_DIM - 1, block_data.interfaceEnts, true);
172 int_electr_ents.merge(block_data.interfaceEnts);
173
174 std::vector<double> attributes;
175 bit->getAttributes(attributes);
176 if (attributes.size() < 1) {
177 SETERRQ1(PETSC_COMM_WORLD, MOFEM_DATA_INCONSISTENCY,
178 "At least one charge attributes should be given but found %d",
179 attributes.size());
180 }
181
182 block_data.iD = id; // id-> block ID
183 block_data.chargeDensity = attributes[0]; // block charge attribute
184 }
185 }
186 // gets the map of the electrode entity range in the block sets
187 electrodeBlockSetsPtr = boost::make_shared<std::map<int, BlockData>>();
188 Range electrode_ents; // range of entities with the electrode
189 int electrodeCount = 0;
191 if (bit->getName().compare(0, 9, "ELECTRODE") == 0) {
192 const int id = bit->getMeshsetId();
193 auto &block_data = (*electrodeBlockSetsPtr)[id];
194 ++electrodeCount;
195
196 CHKERR mField.get_moab().get_entities_by_dimension(
197 bit->getMeshset(), SPACE_DIM - 1, block_data.electrodeEnts, true);
198 electrode_ents.merge(block_data.electrodeEnts);
199
200
201 if (electrodeCount > 2) {
202 SETERRQ(PETSC_COMM_SELF, MOFEM_INVALID_DATA,
203 "Three or more electrode blocksets found");
204 ;
205 }
206 }
207 }
208
209 // sync entities
210 CHKERR mField.getInterface<CommInterface>()->synchroniseEntities(
211 mat_electr_ents);
212 CHKERR mField.getInterface<CommInterface>()->synchroniseEntities(
213 int_electr_ents);
214 CHKERR mField.getInterface<CommInterface>()->synchroniseEntities(
215 electrode_ents);
216
217 CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-out_skin", &out_skin,
218 PETSC_NULL);
219 CHKERR PetscOptionsGetBool(PETSC_NULL, "", "-is_partitioned", &is_partitioned,
220 PETSC_NULL);
221 // get the skin entities
222 Skinner skinner(&mField.get_moab());
223 Range skin_tris;
224 CHKERR skinner.find_skin(0, mat_electr_ents, false, skin_tris);
225 Range proc_skin;
226 ParallelComm *pcomm =
227 ParallelComm::get_pcomm(&mField.get_moab(), MYPCOMM_INDEX);
228 if (is_partitioned) {
229 CHKERR pcomm->filter_pstatus(skin_tris,
230 PSTATUS_SHARED | PSTATUS_MULTISHARED,
231 PSTATUS_NOT, -1, &proc_skin);
232 } else {
233 proc_skin = skin_tris;
234 }
235 // add the skin entities to the field
238 "SKIN");
242 // add the interface entities to the field
243 CHKERR mField.add_finite_element("INTERFACE");
247 CHKERR mField.modify_finite_element_add_field_data("INTERFACE", "GEOMETRY");
248
250 SPACE_DIM - 1, "INTERFACE");
251 // add the electrode entities to the field
252 CHKERR mField.add_finite_element("ELECTRODE");
256 CHKERR mField.modify_finite_element_add_field_data("ELECTRODE", "GEOMETRY");
257
259 "ELECTRODE");
260
261 // sync field entities
262 mField.getInterface<CommInterface>()->synchroniseFieldEntities(domainField);
263 mField.getInterface<CommInterface>()->synchroniseFieldEntities("GEOMETRY");
272
276
277 DMType dm_name = "DMMOFEM";
278 CHKERR DMRegister_MoFEM(dm_name);
279
281 dm = createDM(mField.get_comm(), dm_name);
282
283 // create dm instance
284 CHKERR DMSetType(dm, dm_name);
285
287
288 // initialise petsc vector for required processor
289 int local_size;
290 if (mField.get_comm_rank() == 0) // get_comm_rank() gets processor number
291
292 local_size = LAST_ELEMENT; // last element gives size of vector
293
294 else
295 // other processors (e.g. 1, 2, 3, etc.)
296 local_size = 0; // local size of vector is zero on other processors
297
301}
302//! [Setup problem]
303
304//! [Boundary condition]
307
308 auto bc_mng = mField.getInterface<BcManager>();
309
310 // Remove_BCs_from_blockset name "BOUNDARY_CONDITION";
312 simpleInterface->getProblemName(), "BOUNDARY_CONDITION",
313 std::string(domainField), true);
314
316}
317//! [Boundary condition]
318
319//! [Set integration rules]
322
323 auto rule_lhs = [this](int, int, int p) -> int { return 2 * p + geomOrder -1; };
324 auto rule_rhs = [this](int, int, int p) -> int { return 2 * p + geomOrder -1; };
325
326 auto pipeline_mng = mField.getInterface<PipelineManager>();
327 CHKERR pipeline_mng->setDomainLhsIntegrationRule(rule_lhs);
328 CHKERR pipeline_mng->setDomainRhsIntegrationRule(rule_rhs);
329
331}
332//! [Set integration rules]
333
334//! [Assemble system]
337
338 auto pipeline_mng = mField.getInterface<PipelineManager>();
339 commonDataPtr = boost::make_shared<DataAtIntegrationPts>(mField);
340
341 auto add_domain_base_ops = [&](auto &pipeline) {
343 "GEOMETRY");
344
345 pipeline.push_back(
347 };
348
349 add_domain_base_ops(pipeline_mng->getOpDomainLhsPipeline());
350 auto epsilon = [&](const double, const double, const double) {
351 return commonDataPtr->blockPermittivity;
352 };
353
354 { // Push operators to the Pipeline that is responsible for calculating LHS
355 pipeline_mng->getOpDomainLhsPipeline().push_back(
357 }
358
359 { // Push operators to the Pipeline that is responsible for calculating RHS
360 auto set_values_to_bc_dofs = [&](auto &fe) {
361 auto get_bc_hook = [&]() {
363 return hook;
364 };
365 fe->preProcessHook = get_bc_hook();
366 };
367 // Set essential BC
368 auto calculate_residual_from_set_values_on_bc = [&](auto &pipeline) {
369 using OpInternal =
372
373 add_domain_base_ops(pipeline_mng->getOpDomainRhsPipeline());
374
375 auto grad_u_ptr = boost::make_shared<MatrixDouble>();
376 pipeline_mng->getOpDomainRhsPipeline().push_back(
378 grad_u_ptr));
379 auto minus_epsilon = [&](double, double, double) constexpr {
380 return -commonDataPtr->blockPermittivity;
381 };
382 pipeline_mng->getOpDomainRhsPipeline().push_back(
383 new OpInternal(domainField, grad_u_ptr, minus_epsilon));
384 };
385
386 set_values_to_bc_dofs(pipeline_mng->getDomainRhsFE());
387 calculate_residual_from_set_values_on_bc(
388 pipeline_mng->getOpDomainRhsPipeline());
389
390 auto bodySourceTerm = [&](const double, const double, const double) {
391 return bodySource;
392 };
393 pipeline_mng->getOpDomainRhsPipeline().push_back(
394 new OpBodySourceVectorb(domainField, bodySourceTerm));
395 }
396
397 interFaceRhsFe = boost::shared_ptr<ForcesAndSourcesCore>(
399 interFaceRhsFe->getRuleHook = [this](int, int, int p) {
400 return 2 * p + geomOrder -1;
401 };
402
403 {
404
406 interFaceRhsFe->getOpPtrVector(), {NOSPACE}, "GEOMETRY");
407
408 interFaceRhsFe->getOpPtrVector().push_back(
410
411 auto sIgma = [&](const double, const double, const double) {
412 return commonDataPtr->blockChrgDens;
413 };
414
415 interFaceRhsFe->getOpPtrVector().push_back(
417 }
418
420}
421//! [Assemble system]
422
423//! [Solve system]
426
427 auto pipeline_mng = mField.getInterface<PipelineManager>();
428
429 auto ksp_solver = pipeline_mng->createKSP();
430
431 boost::shared_ptr<ForcesAndSourcesCore> null; ///< Null element does
432 DM dm;
434
435 CHKERR DMMoFEMKSPSetComputeRHS(dm, "INTERFACE", interFaceRhsFe, null, null);
436
437 CHKERR KSPSetFromOptions(ksp_solver);
438
439 // Create RHS and solution vectors
440 auto F = createDMVector(dm);
441 auto D = vectorDuplicate(F);
442 // Solve the system
443 CHKERR KSPSetUp(ksp_solver);
444 CHKERR KSPSolve(ksp_solver, F, D);
445
446 CHKERR VecGhostUpdateBegin(F, INSERT_VALUES, SCATTER_FORWARD);
447 CHKERR VecGhostUpdateEnd(F, INSERT_VALUES, SCATTER_FORWARD);
448
449 double fnorm;
450 CHKERR VecNorm(F, NORM_2, &fnorm);
451 CHKERR PetscPrintf(PETSC_COMM_WORLD, "F norm = %9.8e\n", fnorm);
452
453 // Scatter result data on the mesh
454 CHKERR VecGhostUpdateBegin(D, INSERT_VALUES, SCATTER_FORWARD);
455 CHKERR VecGhostUpdateEnd(D, INSERT_VALUES, SCATTER_FORWARD);
456
457 double dnorm;
458 CHKERR VecNorm(D, NORM_2, &dnorm);
459 CHKERR PetscPrintf(PETSC_COMM_WORLD, "D norm = %9.8e\n", dnorm);
460
461 CHKERR DMoFEMMeshToLocalVector(dm, D, INSERT_VALUES, SCATTER_REVERSE);
462
464}
465//! [Solve system]
466
467//! [Output results]
470 auto pipeline_mng = mField.getInterface<PipelineManager>();
471 pipeline_mng->getDomainRhsFE().reset();
472 pipeline_mng->getBoundaryLhsFE().reset();
473 pipeline_mng->getBoundaryRhsFE().reset(); //
474 pipeline_mng->getDomainLhsFE().reset();
475 auto post_proc_fe = boost::make_shared<PostProcEle>(mField);
476
477 // lamda function to calculate electric field
478 auto calculate_e_field = [&](auto &pipeline) {
479 auto u_ptr = boost::make_shared<VectorDouble>();
480 auto x_ptr = boost::make_shared<MatrixDouble>();
481 auto grad_u_ptr = boost::make_shared<MatrixDouble>();
482 auto e_field_ptr = boost::make_shared<MatrixDouble>();
483 // add higher order operator
485 "GEOMETRY");
486 // calculate field values
487 pipeline.push_back(new OpCalculateScalarFieldValues(domainField, u_ptr));
488 pipeline.push_back(
489 new OpCalculateVectorFieldValues<SPACE_DIM>("GEOMETRY", x_ptr));
490
491 // calculate gradient
492 pipeline.push_back(
494 // calculate electric field
495 pipeline.push_back(new OpElectricField(e_field_ptr, grad_u_ptr));
496 return boost::make_tuple(u_ptr, e_field_ptr, x_ptr);
497 };
498
499 auto [u_ptr, e_field_ptr, x_ptr] =
500 calculate_e_field(post_proc_fe->getOpPtrVector());
501
502 auto e_field_times_perm_ptr = boost::make_shared<MatrixDouble>();
503 auto energy_density_ptr = boost::make_shared<VectorDouble>();
504
505 post_proc_fe->getOpPtrVector().push_back(
506 new OpGradTimesPerm(domainField, e_field_ptr, e_field_times_perm_ptr,
508 post_proc_fe->getOpPtrVector().push_back(
509 new OpEnergyDensity(domainField, e_field_ptr, energy_density_ptr,
511
513 post_proc_fe->getOpPtrVector().push_back(new OpPPMap(
514 post_proc_fe->getPostProcMesh(), post_proc_fe->getMapGaussPts(),
515
516 OpPPMap::DataMapVec{{"POTENTIAL", u_ptr},
517 {"ENERGY_DENSITY", energy_density_ptr}},
519 {"GEOMETRY", x_ptr},
520 {"ELECTRIC_FIELD", e_field_ptr},
521 {"ELECTRIC_DISPLACEMENT", e_field_times_perm_ptr},
522 },
524
526
527 );
528
529 pipeline_mng->getDomainRhsFE() = post_proc_fe;
530 CHKERR pipeline_mng->loopFiniteElements();
531 CHKERR post_proc_fe->writeFile("out.h5m");
532
533 if (out_skin && SPACE_DIM == 3) {
534
535 auto post_proc_skin = boost::make_shared<PostProcFaceEle>(mField);
536 auto op_loop_skin = new OpLoopSide<SideEle>(
537 mField, simpleInterface->getDomainFEName(), SPACE_DIM);
538
539 auto [u_ptr, e_field_ptr, x_ptr] =
540 calculate_e_field(op_loop_skin->getOpPtrVector());
541
542 op_loop_skin->getOpPtrVector().push_back(
543 new OpGradTimesPerm(domainField, e_field_ptr, e_field_times_perm_ptr,
544 permBlockSetsPtr, commonDataPtr));
545 op_loop_skin->getOpPtrVector().push_back(
546 new OpEnergyDensity(domainField, e_field_ptr, energy_density_ptr,
547 permBlockSetsPtr, commonDataPtr));
548
549 // push op to boundary element
550 post_proc_skin->getOpPtrVector().push_back(op_loop_skin);
551
552 post_proc_skin->getOpPtrVector().push_back(new OpPPMap(
553 post_proc_skin->getPostProcMesh(), post_proc_skin->getMapGaussPts(),
554 OpPPMap::DataMapVec{{"POTENTIAL", u_ptr},
555 {"ENERGY_DENSITY", energy_density_ptr}},
556 OpPPMap::DataMapMat{{"ELECTRIC_FIELD", e_field_ptr},
557 {"GEOMETRY", x_ptr},
558 {"ELECTRIC_DISPLACEMENT", e_field_times_perm_ptr}},
560
561 CHKERR DMoFEMLoopFiniteElements(simpleInterface->getDM(), "SKIN",
562 post_proc_skin);
563 CHKERR post_proc_skin->writeFile("out_skin.h5m");
564 }
566}
567//! [Output results]
568
569//! [Get Total Energy]
572 auto pip_energy = mField.getInterface<PipelineManager>();
573 pip_energy->getDomainRhsFE().reset();
574 pip_energy->getDomainLhsFE().reset();
575 pip_energy->getBoundaryLhsFE().reset();
576 pip_energy->getBoundaryRhsFE().reset();
577 pip_energy->getOpDomainRhsPipeline().clear();
578 pip_energy->getOpDomainLhsPipeline().clear();
579
580 // gets the map of the internal domain entity range to get the total energy
581
582 boost::shared_ptr<std::map<int, BlockData>> intrnlDomnBlckSetPtr =
583 boost::make_shared<std::map<int, BlockData>>();
584 Range internal_domain; // range of entities marked the internal domain
586 if (bit->getName().compare(0, 10, "DOMAIN_INT") == 0) {
587 const int id = bit->getMeshsetId();
588 auto &block_data = (*intrnlDomnBlckSetPtr)[id];
589
590 CHKERR mField.get_moab().get_entities_by_dimension(
591 bit->getMeshset(), SPACE_DIM, block_data.internalDomainEnts, true);
592 internal_domain.merge(block_data.internalDomainEnts);
593 block_data.iD = id;
594 }
595 }
596 CHKERR mField.getInterface<CommInterface>()->synchroniseEntities(
597 internal_domain);
598
600 pip_energy->getOpDomainRhsPipeline(), {H1}, "GEOMETRY");
601
602 auto grad_u_ptr = boost::make_shared<MatrixDouble>();
603 auto e_field_ptr = boost::make_shared<MatrixDouble>();
604
605 pip_energy->getOpDomainRhsPipeline().push_back(
607 pip_energy->getOpDomainRhsPipeline().push_back(
608 new OpElectricField(e_field_ptr, grad_u_ptr));
609
610 commonDataPtr = boost::make_shared<DataAtIntegrationPts>(mField);
611
612 pip_energy->getOpDomainRhsPipeline().push_back(
614 intrnlDomnBlckSetPtr, commonDataPtr, petscVecEnergy));
615
616 pip_energy->loopFiniteElements();
617 CHKERR VecAssemblyBegin(petscVecEnergy);
618 CHKERR VecAssemblyEnd(petscVecEnergy);
619
620 double total_energy = 0.0; // declaration for total energy
621 if (!mField.get_comm_rank()) {
622 const double *array;
623
624 CHKERR VecGetArrayRead(petscVecEnergy, &array);
625 total_energy = array[ZERO];
626 MOFEM_LOG_CHANNEL("SELF");
627 MOFEM_LOG_C("SELF", Sev::inform, "Total Energy: %6.15f", total_energy);
628 CHKERR VecRestoreArrayRead(petscVecEnergy, &array);
629 }
630
632}
633//! [Get Total Energy]
634
635//! [Get Charges]
638 auto op_loop_side = new OpLoopSide<SideEle>(
640
642 op_loop_side->getOpPtrVector(), {H1}, "GEOMETRY");
643
644 auto grad_u_ptr_charge = boost::make_shared<MatrixDouble>();
645 auto e_ptr_charge = boost::make_shared<MatrixDouble>();
646
647 op_loop_side->getOpPtrVector().push_back(
649 grad_u_ptr_charge));
650
651 op_loop_side->getOpPtrVector().push_back(
652 new OpElectricField(e_ptr_charge, grad_u_ptr_charge));
653 auto d_jump = boost::make_shared<MatrixDouble>();
654 op_loop_side->getOpPtrVector().push_back(new OpElectricDispJump<SPACE_DIM>(
655 domainField, e_ptr_charge, d_jump, commonDataPtr, permBlockSetsPtr));
656
657 electrodeRhsFe = boost::shared_ptr<ForcesAndSourcesCore>(
659 electrodeRhsFe->getRuleHook = [this](int, int, int p) {
660 return 2 * p + geomOrder -1;
661 };
662
663 // push all the operators in on the side to the electrodeRhsFe
664 electrodeRhsFe->getOpPtrVector().push_back(op_loop_side);
665
666 electrodeRhsFe->getOpPtrVector().push_back(new OpElectrodeCharge<SPACE_DIM>(
668 CHKERR VecZeroEntries(petscVec);
670 "ELECTRODE", electrodeRhsFe, 0,
672 CHKERR VecAssemblyBegin(petscVec);
673 CHKERR VecAssemblyEnd(petscVec);
674
675 if (!mField.get_comm_rank()) {
676 const double *array;
677
678 CHKERR(VecGetArrayRead(petscVec, &array));
679 double aLpha = array[0]; // Use explicit index instead of ZERO
680 double bEta = array[1]; // Use explicit index instead of ONE
681 MOFEM_LOG_CHANNEL("SELF");
682 MOFEM_LOG_C("SELF", Sev::inform,
683 "CHARGE_ELEC_1: %6.15f , CHARGE_ELEC_2: %6.15f", aLpha, bEta);
684
685 CHKERR(VecRestoreArrayRead(petscVec, &array));
686 }
687 if (atomTest && !mField.get_comm_rank()) {
688 double cal_charge_elec1;
689 double cal_charge_elec2;
690 double cal_total_energy;
691 const double *c_ptr, *te_ptr;
692
693 // Get a pointer to the PETSc vector data
694 CHKERR(VecGetArrayRead(petscVec, &c_ptr));
695 CHKERR(VecGetArrayRead(petscVecEnergy, &te_ptr));
696
697 // Expected charges at the electrodes
698 double ref_charge_elec1;
699 double ref_charge_elec2;
700 // Expected total energy of the system
701 double ref_tot_energy;
702 double tol;
703 cal_charge_elec1 = c_ptr[0]; // Read charge at the first electrode
704 cal_charge_elec2 = c_ptr[1]; // Read charge at the second electrode
705 cal_total_energy = te_ptr[0]; // Read total energy of the system
706 if (std::isnan(cal_charge_elec1) || std::isnan(cal_charge_elec2) ||
707 std::isnan(cal_total_energy)) {
708 SETERRQ(PETSC_COMM_SELF, MOFEM_ATOM_TEST_INVALID,
709 "Atom test failed! NaN detected in calculated values.");
710 }
711 switch (atomTest) {
712 case 1: // 2D & 3D test
713 // Expected charges at the electrodes
714 ref_charge_elec1 = 50.0;
715 ref_charge_elec2 = -50.0;
716 // Expected total energy of the system
717 ref_tot_energy = 500.0;
718 tol = 1e-10;
719 break;
720 case 2: // wavy 3D test
721 ref_charge_elec1 = 10.00968352472943;
722 ref_charge_elec2 = 0.0; // no electrode
723 ref_tot_energy = 50.5978;
724 tol = 1e-4;
725 break;
726 default:
727 SETERRQ1(PETSC_COMM_SELF, MOFEM_INVALID_DATA,
728 "atom test %d does not exist", atomTest);
729 }
730
731 // Validate the results
732 if (std::abs(ref_charge_elec1 - cal_charge_elec1) > tol ||
733 std::abs(ref_charge_elec2 - cal_charge_elec2) > tol ||
734 std::abs(ref_tot_energy - cal_total_energy) > tol) {
735 SETERRQ1(
736 PETSC_COMM_SELF, MOFEM_ATOM_TEST_INVALID,
737 "atom test %d failed! Calculated values do not match expected values",
738 atomTest);
739 }
740
741 CHKERR(VecRestoreArrayRead(petscVec,
742 &c_ptr)); // Restore the PETSc vector array
743 CHKERR(VecRestoreArrayRead(petscVecEnergy, &te_ptr));
744 }
745
747}
748//! [Get Charges]
749
750//! [Run program]
765//! [Run program]
766
767//! [Main]
768int main(int argc, char *argv[]) {
769 // Initialisation of MoFEM/PETSc and MOAB data structures
770 const char param_file[] = "param_file.petsc";
771 MoFEM::Core::Initialize(&argc, &argv, param_file, help);
772
773 // Error handling
774 try {
775 // Register MoFEM discrete manager in PETSc
776 DMType dm_name = "DMMOFEM";
777 CHKERR DMRegister_MoFEM(dm_name);
778
779 // Create MOAB instance
780 moab::Core mb_instance; // mesh database
781 moab::Interface &moab = mb_instance; // mesh database interface
782
783 // Create MoFEM instance
784 MoFEM::Core core(moab); // finite element database
785 MoFEM::Interface &m_field = core; // finite element interface
786
787 // Run the main analysis
788 Electrostatics Electrostatics_problem(m_field);
789 CHKERR Electrostatics_problem.runProgram();
790 }
792
793 // Finish work: cleaning memory, getting statistics, etc.
795
796 return 0;
797}
798//! [Main]
#define MOFEM_LOG_C(channel, severity, format,...)
int main()
constexpr int SPACE_DIM
#define CATCH_ERRORS
Catch errors.
@ MF_ZERO
@ AINSWORTH_LEGENDRE_BASE
Ainsworth Cole (Legendre) approx. base nme:nme847.
Definition definitions.h:60
@ NOBASE
Definition definitions.h:59
@ DEMKOWICZ_JACOBI_BASE
Definition definitions.h:66
#define CHK_THROW_MESSAGE(err, msg)
Check and throw MoFEM exception.
@ H1
continuous field
Definition definitions.h:85
#define MYPCOMM_INDEX
default communicator number PCOMM
#define MoFEMFunctionBegin
First executable line of each MoFEM function, used for error handling. Final line of MoFEM functions ...
@ BLOCKSET
@ MOFEM_NOT_FOUND
Definition definitions.h:33
@ MOFEM_ATOM_TEST_INVALID
Definition definitions.h:40
@ MOFEM_DATA_INCONSISTENCY
Definition definitions.h:31
@ MOFEM_INVALID_DATA
Definition definitions.h:36
#define MoFEMFunctionReturn(a)
Last executable line of each PETSc function used for error handling. Replaces return()
#define CHKERR
Inline error check.
static char help[]
constexpr auto domainField
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpSource< BASE_DIM, FIELD_DIM > OpBodySourceVectorb
intPostProc< SPACE_DIM >::intEle IntElementForcesAndSourcesCore
FormsIntegrators< IntEleOp >::Assembly< PETSC >::LinearForm< GAUSS >::OpSource< BASE_DIM, FIELD_DIM > OpInterfaceRhsVectorF
const double bodySource
FormsIntegrators< DomainEleOp >::Assembly< PETSC >::BiLinearForm< GAUSS >::OpGradGrad< BASE_DIM, FIELD_DIM, SPACE_DIM > OpDomainLhsMatrixK
@ F
PetscErrorCode DMMoFEMAddElement(DM dm, std::string fe_name)
add element to dm
Definition DMMoFEM.cpp:497
PetscErrorCode DMoFEMMeshToLocalVector(DM dm, Vec l, InsertMode mode, ScatterMode scatter_mode)
set local (or ghosted) vector values on mesh for partition only
Definition DMMoFEM.cpp:523
PetscErrorCode DMRegister_MoFEM(const char sname[])
Register MoFEM problem.
Definition DMMoFEM.cpp:43
PetscErrorCode DMMoFEMKSPSetComputeRHS(DM dm, const char fe_name[], MoFEM::FEMethod *method, MoFEM::BasicMethod *pre_only, MoFEM::BasicMethod *post_only)
Set compute operator for KSP solver via sub-matrix and IS.
Definition DMMoFEM.cpp:637
PetscErrorCode DMoFEMLoopFiniteElements(DM dm, const char fe_name[], MoFEM::FEMethod *method, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
Definition DMMoFEM.cpp:586
auto createDMVector(DM dm)
Get smart vector from DM.
Definition DMMoFEM.hpp:1099
PetscErrorCode DMoFEMLoopFiniteElementsUpAndLowRank(DM dm, const char fe_name[], MoFEM::FEMethod *method, int low_rank, int up_rank, CacheTupleWeakPtr cache_ptr=CacheTupleSharedPtr())
Executes FEMethod for finite elements in DM.
Definition DMMoFEM.cpp:567
SmartPetscObj< KSP > createKSP(SmartPetscObj< DM > dm=nullptr)
Create KSP (linear) solver.
virtual MoFEMErrorCode add_ents_to_finite_element_by_dim(const EntityHandle entities, const int dim, const std::string &name, const bool recursive=true)=0
add entities to finite element
virtual MoFEMErrorCode add_finite_element(const std::string &fe_name, enum MoFEMTypes bh=MF_EXCL, int verb=DEFAULT_VERBOSITY)=0
add finite element
virtual MoFEMErrorCode build_finite_elements(int verb=DEFAULT_VERBOSITY)=0
Build finite elements.
virtual MoFEMErrorCode modify_finite_element_add_field_col(const std::string &fe_name, const std::string name_row)=0
set field col which finite element use
virtual MoFEMErrorCode modify_finite_element_add_field_row(const std::string &fe_name, const std::string name_row)=0
set field row which finite element use
virtual MoFEMErrorCode modify_finite_element_add_field_data(const std::string &fe_name, const std::string name_field)=0
set finite element field data
#define MOFEM_LOG_CHANNEL(channel)
Set and reset channel.
virtual MoFEMErrorCode loop_dofs(const Problem *problem_ptr, const std::string &field_name, RowColData rc, DofMethod &method, int lower_rank, int upper_rank, int verb=DEFAULT_VERBOSITY)=0
Make a loop over dofs.
#define _IT_CUBITMESHSETS_BY_SET_TYPE_FOR_LOOP_(MESHSET_MANAGER, CUBITBCTYPE, IT)
Iterator that loops over a specific Cubit MeshSet having a particular BC meshset in a moFEM field.
auto bit
set bit
double D
double tol
PetscErrorCode MoFEMErrorCode
MoFEM/PETSc error code.
auto type_from_handle(const EntityHandle h)
get type from entity handle
PetscErrorCode PetscOptionsGetInt(PetscOptions *, const char pre[], const char name[], PetscInt *ivalue, PetscBool *set)
PetscErrorCode PetscOptionsGetBool(PetscOptions *, const char pre[], const char name[], PetscBool *bval, PetscBool *set)
SmartPetscObj< Vec > vectorDuplicate(Vec vec)
Create duplicate vector of smart vector.
auto createVectorMPI(MPI_Comm comm, PetscInt n, PetscInt N)
Create MPI Vector.
auto createDM(MPI_Comm comm, const std::string dm_type_name)
Creates smart DM object.
FormsIntegrators< DomainEleOp >::Assembly< A >::LinearForm< I >::OpGradTimesTensor< 1, FIELD_DIM, SPACE_DIM > OpGradTimesTensor
OpPostProcMapInMoab< SPACE_DIM, SPACE_DIM > OpPPMap
boost::shared_ptr< ForcesAndSourcesCore > electrodeRhsFe
boost::shared_ptr< ForcesAndSourcesCore > interFaceRhsFe
boost::shared_ptr< std::map< int, BlockData > > intBlockSetsPtr
SmartPetscObj< Vec > petscVec
boost::shared_ptr< DataAtIntegrationPts > commonDataPtr
MoFEM::Interface & mField
std::string domainField
MoFEMErrorCode runProgram()
[Get Charges]
MoFEMErrorCode setupProblem()
[Read mesh]
MoFEMErrorCode outputResults()
[Solve system]
MoFEMErrorCode setIntegrationRules()
[Boundary condition]
Electrostatics(MoFEM::Interface &m_field)
MoFEMErrorCode getElectrodeCharge()
[Get Total Energy]
MoFEMErrorCode readMesh()
[Read mesh]
Simple * simpleInterface
SmartPetscObj< Vec > petscVecEnergy
boost::shared_ptr< std::map< int, BlockData > > permBlockSetsPtr
MoFEMErrorCode assembleSystem()
[Set integration rules]
PetscBool is_partitioned
boost::shared_ptr< std::map< int, BlockData > > electrodeBlockSetsPtr
MoFEMErrorCode getTotalEnergy()
[Output results]
MoFEMErrorCode boundaryCondition()
[Setup problem]
MoFEMErrorCode solveSystem()
[Assemble system]
Add operators pushing bases from local to physical configuration.
Simple interface for fast problem set-up.
Definition BcManager.hpp:25
MoFEMErrorCode removeBlockDOFsOnEntities(const std::string problem_name, const std::string block_name, const std::string field_name, int lo, int hi, bool get_low_dim_ents=true, bool is_distributed_mesh=true)
Remove DOFs from problem.
Definition BcManager.cpp:73
Managing BitRefLevels.
virtual int get_comm_size() const =0
virtual moab::Interface & get_moab()=0
virtual MPI_Comm & get_comm() const =0
virtual int get_comm_rank() const =0
Core (interface) class.
Definition Core.hpp:82
static MoFEMErrorCode Initialize(int *argc, char ***args, const char file[], const char help[])
Initializes the MoFEM database PETSc, MOAB and MPI.
Definition Core.cpp:72
static MoFEMErrorCode Finalize()
Checks for options to be called at the conclusion of the program.
Definition Core.cpp:112
Deprecated interface functions.
Class (Function) to enforce essential constrains.
Definition Essential.hpp:25
Get field gradients at integration pts for scalar filed rank 0, i.e. vector field.
Get value at integration points for scalar field.
Get values at integration pts for tensor filed rank 1, i.e. vector field.
Element used to execute operators on side of the element.
Post post-proc data at points from hash maps.
std::map< std::string, boost::shared_ptr< VectorDouble > > DataMapVec
std::map< std::string, boost::shared_ptr< MatrixDouble > > DataMapMat
PipelineManager interface.
boost::shared_ptr< FEMethod > & getDomainRhsFE()
MoFEMErrorCode setDomainLhsIntegrationRule(RuleHookFun rule)
Projection of edge entities with one mid-node on hierarchical basis.
Simple interface for fast problem set-up.
Definition Simple.hpp:27
MoFEMErrorCode buildProblem()
Build problem.
Definition Simple.cpp:751
MoFEMErrorCode addDomainField(const std::string name, const FieldSpace space, const FieldApproximationBase base, const FieldCoefficientsNumber nb_of_coefficients, const TagType tag_type=MB_TAG_SPARSE, const enum MoFEMTypes bh=MF_ZERO, int verb=-1)
Add field on domain.
Definition Simple.cpp:264
MoFEMErrorCode defineFiniteElements()
Define finite elements.
Definition Simple.cpp:474
MoFEMErrorCode loadFile(const std::string options, const std::string mesh_file_name, LoadFileFunc loadFunc=defaultLoadFileFunc)
Load mesh file.
Definition Simple.cpp:194
MoFEMErrorCode buildFiniteElements()
Build finite elements.
Definition Simple.cpp:697
MoFEMErrorCode addBoundaryField(const std::string name, const FieldSpace space, const FieldApproximationBase base, const FieldCoefficientsNumber nb_of_coefficients, const TagType tag_type=MB_TAG_SPARSE, const enum MoFEMTypes bh=MF_ZERO, int verb=-1)
Add field on boundary.
Definition Simple.cpp:358
MoFEMErrorCode getOptions()
get options
Definition Simple.cpp:180
MoFEMErrorCode getDM(DM *dm)
Get DM.
Definition Simple.cpp:829
MoFEMErrorCode buildFields()
Build fields.
Definition Simple.cpp:608
MoFEMErrorCode setFieldOrder(const std::string field_name, const int order, const Range *ents=NULL)
Set field order.
Definition Simple.cpp:599
MoFEMErrorCode defineProblem(const PetscBool is_partitioned=PETSC_TRUE)
define problem
Definition Simple.cpp:575
MoFEMErrorCode addDataField(const std::string name, const FieldSpace space, const FieldApproximationBase base, const FieldCoefficientsNumber nb_of_coefficients, const TagType tag_type=MB_TAG_SPARSE, const enum MoFEMTypes bh=MF_ZERO, int verb=-1)
Add field on domain.
Definition Simple.cpp:396
bool & getAddSkeletonFE()
Get the addSkeletonFE.
Definition Simple.hpp:487
MoFEMErrorCode setUp(const PetscBool is_partitioned=PETSC_TRUE)
Setup problem.
Definition Simple.cpp:765
const std::string getProblemName() const
Get the Problem Name.
Definition Simple.hpp:408
const std::string getDomainFEName() const
Get the Domain FE Name.
Definition Simple.hpp:387
intrusive_ptr for managing petsc objects
MoFEMErrorCode getInterface(IFACE *&iface) const
Get interface reference to pointer of interface.