Commit 0c380f40 authored by Baptiste Durand's avatar Baptiste Durand

Merge branch 'update-demo-scripts' into 'master'

Update demo scripts

See merge request !2
parents 7330b4d9 e0da4a1e
......@@ -12,24 +12,9 @@ import matplotlib.pyplot as plt
from ho_homog import homog2d as hom
import dolfin as fe
from ho_homog import geometry as geo
from pathlib import Path
import numpy as np
import logging
from logging.handlers import RotatingFileHandler
plt.ioff()
#* Logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s', "%H:%M")
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
geo.init_geo_tools()
geo.set_gmsh_option("General.Verbosity", 2)
......@@ -41,26 +26,31 @@ fe.set_log_level(20)
# * PROGRESS = 16, // what's happening (broadly)
# * TRACE = 13, // what's happening (in detail)
# * DBG = 10 // sundry
# * https://fenicsproject.org/qa/810/how-to-disable-message-solving-linear-variational-problem/
# * https://fenicsproject.org/qa/810/how-to-disable-message-solving-linear-variational-problem/ #noqa
# * Step 1 : Generating the mesh file
a = 1
b, k = a, a/3
panto_test = mesh_generate_2D.Gmsh2DRVE.pantograph(a, b, k, 0.01, nb_cells=(2, 2), soft_mat=True, name='panto_with_soft')
panto_test.main_mesh_refinement((0.1, 0.5), (0.1, 0.3), False)
panto_test.soft_mesh_refinement((0.1, 0.5), (0.1, 0.3), False)
b, k = a, a / 3
r = a / 1e3
panto_test = mesh_generate_2D.Gmsh2DRVE.pantograph(
a, b, k, r, nb_cells=(1, 1), soft_mat=True, name="panto_with_soft"
)
panto_test.main_mesh_refinement((3 * r, a / 2), (r / 6, a / 6), True)
panto_test.soft_mesh_refinement((3 * r, a / 2), (r / 6, a / 6), True)
panto_test.mesh_generate()
# * Step 2 : Defining the material mechanical properties for each subdomain
E1, nu1 = 1., 0.3
E2, nu2 = E1/100., nu1
E1, nu1 = 1.0, 0.3
E2, nu2 = E1 / 100.0, nu1
E_nu_tuples = [(E1, nu1), (E2, nu2)]
subdo_tags = tuple([subdo.tag for subdo in panto_test.phy_surf]) # Here: soft_mat = True => tags = (1, 2)
subdo_tags = tuple(
[subdo.tag for subdo in panto_test.phy_surf]
) # Here: soft_mat = True => tags = (1, 2)
material_dict = dict()
for coeff, tag in zip(E_nu_tuples, subdo_tags):
material_dict[tag] = mat.Material(coeff[0], coeff[1], 'cp')
material_dict[tag] = mat.Material(coeff[0], coeff[1], "cp")
# * Step 3 : Creating the Python object that represents the RVE and is suitable for FEniCS
# * Two alternatives :
......@@ -69,7 +59,8 @@ rve = part.Fenics2DRVE.gmsh_2_Fenics_2DRVE(panto_test, material_dict)
# ! OR:
# * Step 3.2 : Initialization of the Fenics2DRVE instance from a mesh file + generating vectors
# * Step 3.2 : Initialization of the Fenics2DRVE instance from
# * a mesh file + generating vectors
# mesh_path = panto_test.mesh_abs_path
# gen_vect = panto_test.gen_vect #or explicitely : np.array([[4., 0.], [0., 8.]])
# rve = part.Fenics2DRVE.file_2_Fenics_2DRVE(mesh_path, gen_vect, material_dict)
......@@ -78,25 +69,28 @@ rve = part.Fenics2DRVE.gmsh_2_Fenics_2DRVE(panto_test, material_dict)
hom_model = hom.Fenics2DHomogenization(rve)
# * Step 5 : Computing the homogenized consitutive tensors
DictOfLocalizationsU, DictOfLocalizationsSigma, DictOfLocalizationsEpsilon, DictOfConstitutiveTensors = hom_model.homogenizationScheme('EG')
*localization_dicts, constitutive_tensors = hom_model.homogenizationScheme("EG")
# * Step 6 : Postprocessing
print(DictOfConstitutiveTensors)
print(DictOfConstitutiveTensors['E']['E'])
# * [[ 0.0726 0.0379 -0. ]
# * [ 0.0379 0.1638 0. ]
# * [-0. 0. 0.0906]]
print(DictOfConstitutiveTensors['EGbis']['EGbis'])
# * [[ 0.3799 0.1405 -0. 0. 0. 0.0401]
# * [ 0.1405 0.14 -0. 0. 0. 0.0451]
# * [-0. -0. 0.1428 0.0393 0.039 -0. ]
# * [ 0. 0. 0.0393 0.292 0.1822 -0. ]
# * [ 0. 0. 0.039 0.1822 0.2401 -0. ]
# * [ 0.0401 0.0451 -0. -0. -0. 0.0676]]
print(constitutive_tensors)
print(constitutive_tensors["E"]["E"])
# *[[0.041 0.0156 0. ]
# * [0.0156 0.0688 0. ]
# * [0. 0. 0.0307]]
print(constitutive_tensors["EGbis"]["EGbis"])
# * [[ 0.2831 0.078 0. 0. 0. 0.0336]
# * [ 0.078 0.0664 -0. 0. -0. 0.0282]
# * [ 0. -0. 0.0756 0.0343 0.0243 -0. ]
# * [ 0. 0. 0.0343 0.2289 0.1113 0. ]
# * [ 0. -0. 0.0243 0.1113 0.1419 0. ]
# * [ 0.0336 0.0282 -0. 0. 0. 0.0541]]
plt.figure()
fe.plot(fe.project(0.1*hom_model.localization['E']['U'][2],hom_model.V), mode='displacement')
plt.savefig("loc_EU.pdf")
fe.plot(
fe.project(0.1 * hom_model.localization["E"]["U"][2], hom_model.V),
mode="displacement",
)
plt.savefig("loc_E12_u.pdf")
plt.show()
......@@ -6,6 +6,7 @@ Created on 18/04/2018
Compare the higher-order homogenized solution with the full-scale solution.
Example : 2D, pantograph microstructure.
"""
import logging
from pathlib import Path
......@@ -25,6 +26,7 @@ from ho_homog import (
pckg_logger,
periodicity,
toolbox_FEniCS,
toolbox_gmsh
)
logger = logging.getLogger("demo_full_compare")
......@@ -41,46 +43,15 @@ formatter = logging.Formatter(
"%(asctime)s :: %(levelname)s :: %(name)s :: %(message)s", "%H:%M:%S"
)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger_root.addHandler(stream_handler)
gmsh_logger = logging.getLogger("gmsh")
gmsh_logger.setLevel(logging.INFO)
logging.getLogger("UFL").setLevel(logging.DEBUG)
logging.getLogger("FFC").setLevel(logging.DEBUG)
fe.set_log_level(20)
def process_gmsh_log(gmsh_log: list, detect_error=True):
"""Treatment of log messages gathered with gmsh.logger.get()"""
err_msg, warn_msg = list(), list()
for line in gmsh_log:
if "error" in line.lower():
err_msg.append(line)
if "warning" in line.lower():
warn_msg.append(line)
gmsh_logger.info("**********")
gmsh_logger.info(
f"{len(gmsh_log)} logging messages got from Gmsh : "
f"{len(err_msg)} errors, {len(warn_msg)} warnings."
)
if err_msg:
gmsh_logger.error("Gmsh errors details :")
for line in err_msg:
gmsh_logger.error(line)
if warn_msg:
gmsh_logger.warning("Gmsh warnings details :")
for line in warn_msg:
gmsh_logger.warning(line)
gmsh_logger.debug("All gmsh messages :")
gmsh_logger.debug(gmsh_log)
gmsh_logger.info("**********")
if detect_error and err_msg:
raise AssertionError("Gmsh logging messages signal errors.")
# * Step 1 : Modeling the geometry of the part and the RVE
geometry.init_geo_tools()
geometry.set_gmsh_option("Mesh.MaxNumThreads2D", 4)
......@@ -94,13 +65,13 @@ part_geo = mesh_generate_2D.Gmsh2DRVE.pantograph(
a, b, k, r, nb_cells=(20, 1), soft_mat=False, name="panto_part"
)
# ! Utiliser autre chose que Gmsh2DRVE
process_gmsh_log(gmsh.logger.get())
toolbox_gmsh.process_gmsh_log(gmsh.logger.get())
gmsh.logger.stop()
gmsh.logger.start()
rve_geo = mesh_generate_2D.Gmsh2DRVE.pantograph(
a, b, k, r, nb_cells=(1, 1), soft_mat=False, name="panto_rve"
)
process_gmsh_log(gmsh.logger.get())
toolbox_gmsh.process_gmsh_log(gmsh.logger.get())
gmsh.logger.stop()
# * Step 2 : Generating the meshes
......@@ -111,7 +82,7 @@ for geo_model in [part_geo, rve_geo]:
geo_model.main_mesh_refinement(d_min_max, lc_min_max, False)
gmsh.logger.start()
geo_model.mesh_generate()
process_gmsh_log(gmsh.logger.get())
toolbox_gmsh.process_gmsh_log(gmsh.logger.get())
gmsh.logger.stop()
# * Step 2 : Defining the material properties
......@@ -123,7 +94,7 @@ material_dict_rve = {rve_geo.phy_surf[0].tag: materials.Material(E, nu, "cp")}
# * Step 3 : Calculation of the full-scale solution
# * Step 3.1 : Create a part object
panto_part = part.Fenics2DRVE.file_2_FenicsPart(
panto_part = part.FenicsPart.file_2_FenicsPart(
part_geo.mesh_abs_path,
material_dict_part,
global_dimensions=part_geo.gen_vect,
......@@ -232,71 +203,11 @@ macro_fields = {
"EGG": [U_hom[3]] + [0] * 11,
}
per_scalar_fnct_cpp_code = """
#include <pybind11/pybind11.h>
#include <pybind11/eigen.h>
namespace py = pybind11;
#include <dolfin/function/Expression.h>
#include <dolfin/function/Function.h>
class PeriodicExpr : public dolfin::Expression
{
public:
PeriodicExpr() : dolfin::Expression() {}
void eval(
Eigen::Ref<Eigen::VectorXd> values,
Eigen::Ref<const Eigen::VectorXd> x, const ufc::cell& cell
) const override
{
Eigen::Vector2d coord_equi;
coord_equi[0] = x[0] -per_x*floor(x[0]/per_x);
coord_equi[1] = x[1] -per_y*floor(x[1]/per_y);
f->eval(values, coord_equi);
//passer par un pointeur ? *f->eval(values, coord_equi);
}
std::shared_ptr<dolfin::Function> f;
double per_x;
double per_y;
};
PYBIND11_MODULE(SIGNATURE, m)
{
py::class_<PeriodicExpr, std::shared_ptr<PeriodicExpr>, dolfin::Expression>
(m, "PeriodicExpr", py::dynamic_attr())
.def(py::init<>())
.def_readwrite("f", &PeriodicExpr::f)
.def_readwrite("per_x", &PeriodicExpr::per_x)
.def_readwrite("per_y", &PeriodicExpr::per_y);
}
"""
# TODO Organisation du code à revoir
# TODO Peut-on s'éviter la compilation du code à chaque fois qu'une instance de PeriodicExpr est crée ?
# TODO la solution est peut-être dans un __init__ personnalisé :
# TODO Idée : reprendre le init de la classe mère sans l'écraser totalement et simplement lui rajouter la partie :
# TODO : __init(self, periodicity_vectors, local_function)
# TODO : self._cpp_object = fe.compile_cpp_code(per_scalar_fnct_cpp_code).PeriodicExpr()
# TODO self._cpp_object.per_x = per_x(periodicity_vectors) #per_x à définir, ajuster pour des cas où ce n'est pas un rectangle ?
# TODO self._cpp_object.per_y = per_y(periodicity_vectors)
class PeriodicExpr(fe.UserExpression):
def value_shape(self):
return ()
# TODO : Regarder si on peut faire un __init__ particulier,
# TODO tel qu'on conserve
# periodic_cppcode = fe.compile_cpp_code(per_scalar_fnct_cpp_code).PeriodicExpr() #? Peut-on le sortir de la boucle ci-dessous ?
key_conversion = {"U": "u", "Epsilon": "eps", "Sigma": "sigma"}
localztn_expr = dict()
for key, scd_dict in hom_model.localization.items():
if key == "EGGbis": # ! Je passe avec une clé EGG ?! Pourquoi ?
if key == "EGGbis":
continue
localztn_expr[key] = dict()
for scd_key, localztn_fields in scd_dict.items():
......@@ -307,16 +218,9 @@ for key, scd_dict in hom_model.localization.items():
for i, field in enumerate(localztn_fields):
new_fields = list() # 1 field per component of U, Sigma and Epsilon
for component in field.split():
per_field = PeriodicExpr(degree=3)
periodic_cppcode = fe.compile_cpp_code(
per_scalar_fnct_cpp_code
).PeriodicExpr()
periodic_cppcode.per_x = rve_geo.gen_vect[0, 0]
periodic_cppcode.per_y = rve_geo.gen_vect[1, 1]
component.set_allow_extrapolation(True)
periodic_cppcode.f = component.cpp_object()
# https://bitbucket.org/fenics-project/dolfin/issues/1041/compiledexpression-cant-be-initialized
per_field._cpp_object = periodic_cppcode
per_field = periodicity.PeriodicExpr(
component, rve_geo.gen_vect, degree=3
)
new_fields.append(per_field)
localztn_expr[key][updated_key2].append(new_fields)
function_spaces = {"u": model.displ_fspace, "eps": model.strain_fspace}
......@@ -364,4 +268,3 @@ for f_name in reconstr_sol.keys():
errors[f_name] = error
print(f_name, error, difference, exact_norm)
logger.info(f"Relative error for {f_name} = {error}")
......@@ -10,7 +10,14 @@ from pathlib import Path
import dolfin as fe
import gmsh
from ho_homog import *
import ho_homog.geometry as geo
from ho_homog import GEO_TOLERANCE, pckg_logger
from ho_homog.mesh_generate_2D import Gmsh2DRVE
from ho_homog.full_scale_pb import FullScaleModel
from ho_homog.materials import Material
from ho_homog.part import FenicsPart
from ho_homog.periodicity import PeriodicDomain
from ho_homog.toolbox_gmsh import process_gmsh_log
logger = logging.getLogger("demo_full_scale")
logger_root = logging.getLogger()
......@@ -29,44 +36,14 @@ stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger_root.addHandler(stream_handler)
gmsh_logger = logging.getLogger("gmsh")
gmsh_logger.setLevel(logging.INFO)
def process_gmsh_log(gmsh_log: list, detect_error=True):
"""Treatment of log messages gathered with gmsh.logger.get()"""
err_msg, warn_msg = list(), list()
for line in gmsh_log:
if "error" in line.lower():
err_msg.append(line)
if "warning" in line.lower():
warn_msg.append(line)
gmsh_logger.info("**********")
gmsh_logger.info(
f"{len(gmsh_log)} logging messages got from Gmsh : {len(err_msg)} errors, {len(warn_msg)} warnings."
)
if err_msg:
gmsh_logger.error("Gmsh errors details :")
for line in err_msg:
gmsh_logger.error(line)
if warn_msg:
gmsh_logger.warning("Gmsh warnings details :")
for line in warn_msg:
gmsh_logger.warning(line)
gmsh_logger.debug("All gmsh messages :")
gmsh_logger.debug(gmsh_log)
gmsh_logger.info("**********")
if detect_error and err_msg:
raise AssertionError("Gmsh logging messages signal errors.")
# * Step 1 : Modeling the geometry of the part
geometry.init_geo_tools()
geo.init_geo_tools()
a = 1
b, k = a, a / 3
r = 0.05
gmsh.logger.start()
panto_geo = mesh_generate_2D.Gmsh2DRVE.pantograph(
panto_geo = Gmsh2DRVE.pantograph(
a, b, k, 0.1, nb_cells=(10, 1), soft_mat=True, name="panto_with_soft"
)
process_gmsh_log(gmsh.logger.get())
......@@ -93,16 +70,12 @@ E_nu_tuples = [(E1, nu1), (E2, nu2)]
subdo_tags = tuple([subdo.tag for subdo in panto_geo.phy_surf])
material_dict = dict()
for coeff, tag in zip(E_nu_tuples, subdo_tags):
material_dict[tag] = materials.Material(coeff[0], coeff[1], "cp")
material_dict[tag] = Material(coeff[0], coeff[1], "cp")
# * Step 3 : Create part object
panto_part = part.Fenics2DRVE.file_2_FenicsPart(
panto_geo.mesh_abs_path,
material_dict,
global_dimensions=panto_geo.gen_vect,
subdomains_import=True,
panto_part = FenicsPart.file_2_FenicsPart(
panto_geo.mesh_abs_path, material_dict, panto_geo.gen_vect, subdomains_import=True
)
LX = panto_part.global_dimensions[0, 0]
......@@ -120,12 +93,12 @@ class RightBorder(fe.SubDomain):
l_border = LeftBorder()
r_border = RightBorder()
boundary_markers = fe.MeshFunction("size_t", panto_part.mesh, dim=panto_part.dim-1)
boundary_markers = fe.MeshFunction("size_t", panto_part.mesh, dim=panto_part.dim - 1)
boundary_markers.set_all(9999)
l_border.mark(boundary_markers, 11)
r_border.mark(boundary_markers, 99)
pbc = full_scale_pb.PeriodicDomain.pbc_dual_base(panto_part.global_dimensions, "Y")
pbc = PeriodicDomain.pbc_dual_base(panto_part.global_dimensions, "Y")
boundary_conditions = [
{"type": "Periodic", "constraint": pbc},
{
......@@ -154,9 +127,8 @@ load = (2, s_load, indicator_fctn)
element = ("Lagrange", 2)
# * Step 7 : Gathering all data in a model
model = full_scale_pb.FullScaleModel(panto_part, [load], boundary_conditions, element)
model.set_solver("LU", "mumps")
model = FullScaleModel(panto_part, [load], boundary_conditions, element)
model.set_solver("mumps")
# * Step 8 : Solving problem
model.solve(results_file=Path("demo_results/demo_pantograph_full_scale.xdmf"))
......@@ -9,12 +9,10 @@ import ho_homog.geometry as geo
import ho_homog.mesh_tools as msh
import numpy as np
import logging
from logging.handlers import RotatingFileHandler
import gmsh
import os
import matplotlib.pyplot as plt
from more_itertools import flatten
from pathlib import Path
from subprocess import run
# nice shortcuts
model = gmsh.model
......@@ -23,29 +21,24 @@ factory = model.occ
# * Logging
logger = logging.getLogger(__name__) # http://sametmax.com/ecrire-des-logs-en-python/
logger.setLevel(logging.INFO)
if __name__ == "__main__":
logger_root = logging.getLogger()
logger_root.setLevel(logging.INFO)
formatter = logging.Formatter(
"%(asctime)s :: %(levelname)s :: %(name)s :: %(message)s", "%Y-%m-%d %H:%M:%S"
)
log_path = Path.home().joinpath("Desktop/activity.log")
file_handler = RotatingFileHandler(str(log_path), "a", 1000000, 10)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger_root.addHandler(file_handler) # Pour écriture d'un fichier log
formatter = logging.Formatter(
"%(asctime)s :: %(levelname)s :: %(message)s", "%H:%M"
)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger_root.addHandler(stream_handler)
formatter = logging.Formatter("%(asctime)s :: %(levelname)s :: %(message)s", "%H:%M")
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
geo.init_geo_tools()
name = "pantograph"
model.add(name)
root = Path(__file__).resolve().parent.joinpath(name)
geo_file = root.with_suffix(".brep")
mesh_file = root.with_suffix(".msh")
os.path.realpath(__file__)
logger.info("Start defining the pantograph geometry")
a = 1
b = 1
......@@ -65,11 +58,11 @@ E1 = geo.Point(e1)
E2 = geo.Point(e2)
E1m = geo.Point(-1 * e1)
E2m = geo.Point(-1 * e2)
O = np.zeros((3,))
O_pt = np.zeros((3,))
L = geo.Point(2 * (e1 + e2))
Lm = geo.Point(2 * (e1 - e2))
M = geo.Point(e1 + 1.5 * e2 + b / 2)
I = geo.Point(2 * (e1 + 1.5 * e2 + b / 2))
N = geo.Point(2 * (e1 + 1.5 * e2 + b / 2))
contours = list()
contours.append([E1, E2, E1m, E2m])
......@@ -87,26 +80,25 @@ contours.append(
pattern_ll = [geo.LineLoop(pt_list, explicit=False) for pt_list in contours]
pattern_ll += [geo.point_reflection(ll, M) for ll in pattern_ll]
sym_ll = [geo.plane_reflection(ll, I, e1) for ll in pattern_ll]
sym_ll = [geo.plane_reflection(ll, N, e1) for ll in pattern_ll]
for ll in sym_ll:
ll.reverse()
pattern_ll += sym_ll
sym_ll = [geo.plane_reflection(ll, I, e2) for ll in pattern_ll]
sym_ll = [geo.plane_reflection(ll, N, e2) for ll in pattern_ll]
for ll in sym_ll:
ll.reverse()
pattern_ll += sym_ll
pattern_ll = geo.remove_duplicates(pattern_ll)
logger.info(
f"Done removing of the line-loops duplicates. pattern_ll length : {len(pattern_ll)}"
)
# 13 no-redundant LineLoop to define the pantographe microstructure geometry in one cell.
logger.info("Removing of the line-loops duplicates : Done")
logger.info(f"pattern_ll length : {len(pattern_ll)}")
# * 13 no-redundant LineLoops to define the pantograph microstructure geometry in 1 cell.
for ll in pattern_ll:
ll.round_corner_incircle(r)
logger.info("Done rounding all corners of pattern line-loops")
logger.info("Rounding all corners of pattern line-loops : Done")
macro_vtcs = [O, gen_vect[0], gen_vect[0] + gen_vect[1], gen_vect[1]]
macro_vtcs = [O_pt, gen_vect[0], gen_vect[0] + gen_vect[1], gen_vect[1]]
macro_ll = geo.LineLoop([geo.Point(c) for c in macro_vtcs])
macro_s = geo.PlaneSurface(macro_ll)
......@@ -114,22 +106,20 @@ logger.info("Start boolean operations on surfaces")
pattern_s = [geo.PlaneSurface(ll) for ll in pattern_ll]
rve_s = geo.surface_bool_cut(macro_s, pattern_s)
rve_s = rve_s[0]
logger.info("Done boolean operations on surfaces")
logger.info("Boolean operations on surfaces : Done")
rve_s_phy = geo.PhysicalGroup([rve_s], 2, "partition_plein")
factory.synchronize()
rve_s_phy.add_gmsh()
factory.synchronize()
data = model.getPhysicalGroups()
logger.info(
"All physical groups in the model "
+ repr(data)
+ " Names : "
+ repr([model.getPhysicalName(*dimtag) for dimtag in data])
)
logger.info("Done generating the gmsh geometrical model")
gmsh.write("%s.brep" % name)
os.system("gmsh %s.brep &" % name)
logger.info(f"All physical groups in the model : \n {data}")
names = [model.getPhysicalName(*dimtag) for dimtag in data]
logger.info(f"Physical group names: \n {names}")
logger.info("Generate geometry model : Done")
gmsh.write(str(geo_file))
run(f"gmsh {str(geo_file)} &", shell=True, check=True)
logger.info("Start defining a mesh refinement constraint")
constr_pts = [pt for ll in pattern_ll for pt in ll.vertices]
......@@ -143,21 +133,12 @@ for pt in fine_pts:
pt.add_gmsh()
factory.synchronize()
f = msh.set_mesh_refinement(
[r, a], [r / 4, a / 3], attractors={"points": fine_pts}, sigmoid_interpol=True
(r, a), (r / 4, a / 3), attractors={"points": fine_pts}, sigmoid_interpol=True
)
msh.set_background_mesh(f)
# logger.info('Test de rafinement du maillage autour des lignes du bord de la microstructure')
# fine_lns = list(flatten([ll.sides for ll in pattern_ll]))
# g = msh.set_mesh_refinement([r, a/3], [r/3, a/3], attractors={'curves':fine_lns}, sigmoid_interpol=True)
# msh.set_background_mesh(g)
if not gmsh.option.getNumber("Mesh.CharacteristicLengthExtendFromBoundary") == 0:
pre_val = gmsh.option.getNumber("Mesh.CharacteristicLengthExtendFromBoundary")
val = 0
gmsh.option.setNumber("Mesh.CharacteristicLengthExtendFromBoundary", val)
logging.info(
f"Option Mesh.CharacteristicLengthExtendFromBoundary set to {val}. Initial value : {pre_val}."
)
logger.info("Done defining a mesh refinement constraint")
logger.info("Mesh refinement constraint Done")
logger.info("Start defining a periodicity constraint for the mesh")
macro_bndry = macro_ll.sides
rve_s.get_boundary(recursive=True)
micro_bndry = [geo.macro_line_fragments(rve_s.boundary, M_ln) for M_ln in macro_bndry]
......@@ -169,16 +150,18 @@ logger.debug("length of micro_bndry list : " + str(len(micro_bndry)))
msh.set_periodicity_pairs(micro_bndry[0], micro_bndry[2])
msh.set_periodicity_pairs(micro_bndry[1], micro_bndry[3])
logger.info("Periodicity constraint : Done")
logger.info("Cleaning model")
factory.remove([(1, l.tag) for l in macro_ll.sides])
for l in macro_ll.sides:
l.tag = None
pre_val = gmsh.option.getNumber("Mesh.SaveAll")
val = 0
gmsh.option.setNumber("Mesh.SaveAll", val)
logging.info(f"Option Mesh.SaveAll set to {val}. Initial value : {pre_val}.")
factory.synchronize()
factory.removeAllDuplicates()
factory.synchronize()
geo.set_gmsh_option("Mesh.CharacteristicLengthExtendFromBoundary", 0)
geo.set_gmsh_option("Mesh.SaveAll", 0)
geo.PhysicalGroup.set_group_mesh(1)
gmsh.model.mesh.generate(2)
gmsh.write("%s.msh" % name)
os.system("gmsh %s.msh &" % name)
gmsh.write(str(mesh_file))
run(f"gmsh {str(mesh_file)} &", shell=True, check=True)
gmsh.fltk.run()