Commit 0c380f40 authored by Baptiste Durand's avatar Baptiste Durand

Merge branch 'update-demo-scripts' into 'master'

Update demo scripts

See merge request !2
parents 7330b4d9 e0da4a1e
...@@ -12,24 +12,9 @@ import matplotlib.pyplot as plt ...@@ -12,24 +12,9 @@ import matplotlib.pyplot as plt
from ho_homog import homog2d as hom from ho_homog import homog2d as hom
import dolfin as fe import dolfin as fe
from ho_homog import geometry as geo from ho_homog import geometry as geo
from pathlib import Path
import numpy as np import numpy as np
import logging
from logging.handlers import RotatingFileHandler
plt.ioff()
#* Logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s', "%H:%M")
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
geo.init_geo_tools() geo.init_geo_tools()
geo.set_gmsh_option("General.Verbosity", 2) geo.set_gmsh_option("General.Verbosity", 2)
...@@ -41,26 +26,31 @@ fe.set_log_level(20) ...@@ -41,26 +26,31 @@ fe.set_log_level(20)
# * PROGRESS = 16, // what's happening (broadly) # * PROGRESS = 16, // what's happening (broadly)
# * TRACE = 13, // what's happening (in detail) # * TRACE = 13, // what's happening (in detail)
# * DBG = 10 // sundry # * DBG = 10 // sundry
# * https://fenicsproject.org/qa/810/how-to-disable-message-solving-linear-variational-problem/ # * https://fenicsproject.org/qa/810/how-to-disable-message-solving-linear-variational-problem/ #noqa
# * Step 1 : Generating the mesh file # * Step 1 : Generating the mesh file
a = 1 a = 1
b, k = a, a/3 b, k = a, a / 3
panto_test = mesh_generate_2D.Gmsh2DRVE.pantograph(a, b, k, 0.01, nb_cells=(2, 2), soft_mat=True, name='panto_with_soft') r = a / 1e3
panto_test.main_mesh_refinement((0.1, 0.5), (0.1, 0.3), False) panto_test = mesh_generate_2D.Gmsh2DRVE.pantograph(
panto_test.soft_mesh_refinement((0.1, 0.5), (0.1, 0.3), False) a, b, k, r, nb_cells=(1, 1), soft_mat=True, name="panto_with_soft"
)
panto_test.main_mesh_refinement((3 * r, a / 2), (r / 6, a / 6), True)
panto_test.soft_mesh_refinement((3 * r, a / 2), (r / 6, a / 6), True)
panto_test.mesh_generate() panto_test.mesh_generate()
# * Step 2 : Defining the material mechanical properties for each subdomain # * Step 2 : Defining the material mechanical properties for each subdomain
E1, nu1 = 1., 0.3 E1, nu1 = 1.0, 0.3
E2, nu2 = E1/100., nu1 E2, nu2 = E1 / 100.0, nu1
E_nu_tuples = [(E1, nu1), (E2, nu2)] E_nu_tuples = [(E1, nu1), (E2, nu2)]
subdo_tags = tuple([subdo.tag for subdo in panto_test.phy_surf]) # Here: soft_mat = True => tags = (1, 2) subdo_tags = tuple(
[subdo.tag for subdo in panto_test.phy_surf]
) # Here: soft_mat = True => tags = (1, 2)
material_dict = dict() material_dict = dict()
for coeff, tag in zip(E_nu_tuples, subdo_tags): for coeff, tag in zip(E_nu_tuples, subdo_tags):
material_dict[tag] = mat.Material(coeff[0], coeff[1], 'cp') material_dict[tag] = mat.Material(coeff[0], coeff[1], "cp")
# * Step 3 : Creating the Python object that represents the RVE and is suitable for FEniCS # * Step 3 : Creating the Python object that represents the RVE and is suitable for FEniCS
# * Two alternatives : # * Two alternatives :
...@@ -69,7 +59,8 @@ rve = part.Fenics2DRVE.gmsh_2_Fenics_2DRVE(panto_test, material_dict) ...@@ -69,7 +59,8 @@ rve = part.Fenics2DRVE.gmsh_2_Fenics_2DRVE(panto_test, material_dict)
# ! OR: # ! OR:
# * Step 3.2 : Initialization of the Fenics2DRVE instance from a mesh file + generating vectors # * Step 3.2 : Initialization of the Fenics2DRVE instance from
# * a mesh file + generating vectors
# mesh_path = panto_test.mesh_abs_path # mesh_path = panto_test.mesh_abs_path
# gen_vect = panto_test.gen_vect #or explicitely : np.array([[4., 0.], [0., 8.]]) # gen_vect = panto_test.gen_vect #or explicitely : np.array([[4., 0.], [0., 8.]])
# rve = part.Fenics2DRVE.file_2_Fenics_2DRVE(mesh_path, gen_vect, material_dict) # rve = part.Fenics2DRVE.file_2_Fenics_2DRVE(mesh_path, gen_vect, material_dict)
...@@ -78,25 +69,28 @@ rve = part.Fenics2DRVE.gmsh_2_Fenics_2DRVE(panto_test, material_dict) ...@@ -78,25 +69,28 @@ rve = part.Fenics2DRVE.gmsh_2_Fenics_2DRVE(panto_test, material_dict)
hom_model = hom.Fenics2DHomogenization(rve) hom_model = hom.Fenics2DHomogenization(rve)
# * Step 5 : Computing the homogenized consitutive tensors # * Step 5 : Computing the homogenized consitutive tensors
DictOfLocalizationsU, DictOfLocalizationsSigma, DictOfLocalizationsEpsilon, DictOfConstitutiveTensors = hom_model.homogenizationScheme('EG') *localization_dicts, constitutive_tensors = hom_model.homogenizationScheme("EG")
# * Step 6 : Postprocessing # * Step 6 : Postprocessing
print(DictOfConstitutiveTensors) print(constitutive_tensors)
print(DictOfConstitutiveTensors['E']['E']) print(constitutive_tensors["E"]["E"])
# * [[ 0.0726 0.0379 -0. ] # *[[0.041 0.0156 0. ]
# * [ 0.0379 0.1638 0. ] # * [0.0156 0.0688 0. ]
# * [-0. 0. 0.0906]] # * [0. 0. 0.0307]]
print(DictOfConstitutiveTensors['EGbis']['EGbis']) print(constitutive_tensors["EGbis"]["EGbis"])
# * [[ 0.3799 0.1405 -0. 0. 0. 0.0401] # * [[ 0.2831 0.078 0. 0. 0. 0.0336]
# * [ 0.1405 0.14 -0. 0. 0. 0.0451] # * [ 0.078 0.0664 -0. 0. -0. 0.0282]
# * [-0. -0. 0.1428 0.0393 0.039 -0. ] # * [ 0. -0. 0.0756 0.0343 0.0243 -0. ]
# * [ 0. 0. 0.0393 0.292 0.1822 -0. ] # * [ 0. 0. 0.0343 0.2289 0.1113 0. ]
# * [ 0. 0. 0.039 0.1822 0.2401 -0. ] # * [ 0. -0. 0.0243 0.1113 0.1419 0. ]
# * [ 0.0401 0.0451 -0. -0. -0. 0.0676]] # * [ 0.0336 0.0282 -0. 0. 0. 0.0541]]
plt.figure() plt.figure()
fe.plot(fe.project(0.1*hom_model.localization['E']['U'][2],hom_model.V), mode='displacement') fe.plot(
plt.savefig("loc_EU.pdf") fe.project(0.1 * hom_model.localization["E"]["U"][2], hom_model.V),
mode="displacement",
)
plt.savefig("loc_E12_u.pdf")
plt.show() plt.show()
...@@ -6,6 +6,7 @@ Created on 18/04/2018 ...@@ -6,6 +6,7 @@ Created on 18/04/2018
Compare the higher-order homogenized solution with the full-scale solution. Compare the higher-order homogenized solution with the full-scale solution.
Example : 2D, pantograph microstructure. Example : 2D, pantograph microstructure.
""" """
import logging import logging
from pathlib import Path from pathlib import Path
...@@ -25,6 +26,7 @@ from ho_homog import ( ...@@ -25,6 +26,7 @@ from ho_homog import (
pckg_logger, pckg_logger,
periodicity, periodicity,
toolbox_FEniCS, toolbox_FEniCS,
toolbox_gmsh
) )
logger = logging.getLogger("demo_full_compare") logger = logging.getLogger("demo_full_compare")
...@@ -41,46 +43,15 @@ formatter = logging.Formatter( ...@@ -41,46 +43,15 @@ formatter = logging.Formatter(
"%(asctime)s :: %(levelname)s :: %(name)s :: %(message)s", "%H:%M:%S" "%(asctime)s :: %(levelname)s :: %(name)s :: %(message)s", "%H:%M:%S"
) )
stream_handler = logging.StreamHandler() stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG) stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter) stream_handler.setFormatter(formatter)
logger_root.addHandler(stream_handler) logger_root.addHandler(stream_handler)
gmsh_logger = logging.getLogger("gmsh")
gmsh_logger.setLevel(logging.INFO)
logging.getLogger("UFL").setLevel(logging.DEBUG) logging.getLogger("UFL").setLevel(logging.DEBUG)
logging.getLogger("FFC").setLevel(logging.DEBUG) logging.getLogger("FFC").setLevel(logging.DEBUG)
fe.set_log_level(20) fe.set_log_level(20)
def process_gmsh_log(gmsh_log: list, detect_error=True):
"""Treatment of log messages gathered with gmsh.logger.get()"""
err_msg, warn_msg = list(), list()
for line in gmsh_log:
if "error" in line.lower():
err_msg.append(line)
if "warning" in line.lower():
warn_msg.append(line)
gmsh_logger.info("**********")
gmsh_logger.info(
f"{len(gmsh_log)} logging messages got from Gmsh : "
f"{len(err_msg)} errors, {len(warn_msg)} warnings."
)
if err_msg:
gmsh_logger.error("Gmsh errors details :")
for line in err_msg:
gmsh_logger.error(line)
if warn_msg:
gmsh_logger.warning("Gmsh warnings details :")
for line in warn_msg:
gmsh_logger.warning(line)
gmsh_logger.debug("All gmsh messages :")
gmsh_logger.debug(gmsh_log)
gmsh_logger.info("**********")
if detect_error and err_msg:
raise AssertionError("Gmsh logging messages signal errors.")
# * Step 1 : Modeling the geometry of the part and the RVE # * Step 1 : Modeling the geometry of the part and the RVE
geometry.init_geo_tools() geometry.init_geo_tools()
geometry.set_gmsh_option("Mesh.MaxNumThreads2D", 4) geometry.set_gmsh_option("Mesh.MaxNumThreads2D", 4)
...@@ -94,13 +65,13 @@ part_geo = mesh_generate_2D.Gmsh2DRVE.pantograph( ...@@ -94,13 +65,13 @@ part_geo = mesh_generate_2D.Gmsh2DRVE.pantograph(
a, b, k, r, nb_cells=(20, 1), soft_mat=False, name="panto_part" a, b, k, r, nb_cells=(20, 1), soft_mat=False, name="panto_part"
) )
# ! Utiliser autre chose que Gmsh2DRVE # ! Utiliser autre chose que Gmsh2DRVE
process_gmsh_log(gmsh.logger.get()) toolbox_gmsh.process_gmsh_log(gmsh.logger.get())
gmsh.logger.stop() gmsh.logger.stop()
gmsh.logger.start() gmsh.logger.start()
rve_geo = mesh_generate_2D.Gmsh2DRVE.pantograph( rve_geo = mesh_generate_2D.Gmsh2DRVE.pantograph(
a, b, k, r, nb_cells=(1, 1), soft_mat=False, name="panto_rve" a, b, k, r, nb_cells=(1, 1), soft_mat=False, name="panto_rve"
) )
process_gmsh_log(gmsh.logger.get()) toolbox_gmsh.process_gmsh_log(gmsh.logger.get())
gmsh.logger.stop() gmsh.logger.stop()
# * Step 2 : Generating the meshes # * Step 2 : Generating the meshes
...@@ -111,7 +82,7 @@ for geo_model in [part_geo, rve_geo]: ...@@ -111,7 +82,7 @@ for geo_model in [part_geo, rve_geo]:
geo_model.main_mesh_refinement(d_min_max, lc_min_max, False) geo_model.main_mesh_refinement(d_min_max, lc_min_max, False)
gmsh.logger.start() gmsh.logger.start()
geo_model.mesh_generate() geo_model.mesh_generate()
process_gmsh_log(gmsh.logger.get()) toolbox_gmsh.process_gmsh_log(gmsh.logger.get())
gmsh.logger.stop() gmsh.logger.stop()
# * Step 2 : Defining the material properties # * Step 2 : Defining the material properties
...@@ -123,7 +94,7 @@ material_dict_rve = {rve_geo.phy_surf[0].tag: materials.Material(E, nu, "cp")} ...@@ -123,7 +94,7 @@ material_dict_rve = {rve_geo.phy_surf[0].tag: materials.Material(E, nu, "cp")}
# * Step 3 : Calculation of the full-scale solution # * Step 3 : Calculation of the full-scale solution
# * Step 3.1 : Create a part object # * Step 3.1 : Create a part object
panto_part = part.Fenics2DRVE.file_2_FenicsPart( panto_part = part.FenicsPart.file_2_FenicsPart(
part_geo.mesh_abs_path, part_geo.mesh_abs_path,
material_dict_part, material_dict_part,
global_dimensions=part_geo.gen_vect, global_dimensions=part_geo.gen_vect,
...@@ -232,71 +203,11 @@ macro_fields = { ...@@ -232,71 +203,11 @@ macro_fields = {
"EGG": [U_hom[3]] + [0] * 11, "EGG": [U_hom[3]] + [0] * 11,
} }
per_scalar_fnct_cpp_code = """
#include <pybind11/pybind11.h>
#include <pybind11/eigen.h>
namespace py = pybind11;
#include <dolfin/function/Expression.h>
#include <dolfin/function/Function.h>
class PeriodicExpr : public dolfin::Expression
{
public:
PeriodicExpr() : dolfin::Expression() {}
void eval(
Eigen::Ref<Eigen::VectorXd> values,
Eigen::Ref<const Eigen::VectorXd> x, const ufc::cell& cell
) const override
{
Eigen::Vector2d coord_equi;
coord_equi[0] = x[0] -per_x*floor(x[0]/per_x);
coord_equi[1] = x[1] -per_y*floor(x[1]/per_y);
f->eval(values, coord_equi);
//passer par un pointeur ? *f->eval(values, coord_equi);
}
std::shared_ptr<dolfin::Function> f;
double per_x;
double per_y;
};
PYBIND11_MODULE(SIGNATURE, m)
{
py::class_<PeriodicExpr, std::shared_ptr<PeriodicExpr>, dolfin::Expression>
(m, "PeriodicExpr", py::dynamic_attr())
.def(py::init<>())
.def_readwrite("f", &PeriodicExpr::f)
.def_readwrite("per_x", &PeriodicExpr::per_x)
.def_readwrite("per_y", &PeriodicExpr::per_y);
}
"""
# TODO Organisation du code à revoir
# TODO Peut-on s'éviter la compilation du code à chaque fois qu'une instance de PeriodicExpr est crée ?
# TODO la solution est peut-être dans un __init__ personnalisé :
# TODO Idée : reprendre le init de la classe mère sans l'écraser totalement et simplement lui rajouter la partie :
# TODO : __init(self, periodicity_vectors, local_function)
# TODO : self._cpp_object = fe.compile_cpp_code(per_scalar_fnct_cpp_code).PeriodicExpr()
# TODO self._cpp_object.per_x = per_x(periodicity_vectors) #per_x à définir, ajuster pour des cas où ce n'est pas un rectangle ?
# TODO self._cpp_object.per_y = per_y(periodicity_vectors)
class PeriodicExpr(fe.UserExpression):
def value_shape(self):
return ()
# TODO : Regarder si on peut faire un __init__ particulier,
# TODO tel qu'on conserve
# periodic_cppcode = fe.compile_cpp_code(per_scalar_fnct_cpp_code).PeriodicExpr() #? Peut-on le sortir de la boucle ci-dessous ?
key_conversion = {"U": "u", "Epsilon": "eps", "Sigma": "sigma"} key_conversion = {"U": "u", "Epsilon": "eps", "Sigma": "sigma"}
localztn_expr = dict() localztn_expr = dict()
for key, scd_dict in hom_model.localization.items(): for key, scd_dict in hom_model.localization.items():
if key == "EGGbis": # ! Je passe avec une clé EGG ?! Pourquoi ? if key == "EGGbis":
continue continue
localztn_expr[key] = dict() localztn_expr[key] = dict()
for scd_key, localztn_fields in scd_dict.items(): for scd_key, localztn_fields in scd_dict.items():
...@@ -307,16 +218,9 @@ for key, scd_dict in hom_model.localization.items(): ...@@ -307,16 +218,9 @@ for key, scd_dict in hom_model.localization.items():
for i, field in enumerate(localztn_fields): for i, field in enumerate(localztn_fields):
new_fields = list() # 1 field per component of U, Sigma and Epsilon new_fields = list() # 1 field per component of U, Sigma and Epsilon
for component in field.split(): for component in field.split():
per_field = PeriodicExpr(degree=3) per_field = periodicity.PeriodicExpr(
periodic_cppcode = fe.compile_cpp_code( component, rve_geo.gen_vect, degree=3
per_scalar_fnct_cpp_code )
).PeriodicExpr()
periodic_cppcode.per_x = rve_geo.gen_vect[0, 0]
periodic_cppcode.per_y = rve_geo.gen_vect[1, 1]
component.set_allow_extrapolation(True)
periodic_cppcode.f = component.cpp_object()
# https://bitbucket.org/fenics-project/dolfin/issues/1041/compiledexpression-cant-be-initialized
per_field._cpp_object = periodic_cppcode
new_fields.append(per_field) new_fields.append(per_field)
localztn_expr[key][updated_key2].append(new_fields) localztn_expr[key][updated_key2].append(new_fields)
function_spaces = {"u": model.displ_fspace, "eps": model.strain_fspace} function_spaces = {"u": model.displ_fspace, "eps": model.strain_fspace}
...@@ -364,4 +268,3 @@ for f_name in reconstr_sol.keys(): ...@@ -364,4 +268,3 @@ for f_name in reconstr_sol.keys():
errors[f_name] = error errors[f_name] = error
print(f_name, error, difference, exact_norm) print(f_name, error, difference, exact_norm)
logger.info(f"Relative error for {f_name} = {error}") logger.info(f"Relative error for {f_name} = {error}")
...@@ -10,7 +10,14 @@ from pathlib import Path ...@@ -10,7 +10,14 @@ from pathlib import Path
import dolfin as fe import dolfin as fe
import gmsh import gmsh
from ho_homog import * import ho_homog.geometry as geo
from ho_homog import GEO_TOLERANCE, pckg_logger
from ho_homog.mesh_generate_2D import Gmsh2DRVE
from ho_homog.full_scale_pb import FullScaleModel
from ho_homog.materials import Material
from ho_homog.part import FenicsPart
from ho_homog.periodicity import PeriodicDomain
from ho_homog.toolbox_gmsh import process_gmsh_log
logger = logging.getLogger("demo_full_scale") logger = logging.getLogger("demo_full_scale")
logger_root = logging.getLogger() logger_root = logging.getLogger()
...@@ -29,44 +36,14 @@ stream_handler.setLevel(logging.INFO) ...@@ -29,44 +36,14 @@ stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter) stream_handler.setFormatter(formatter)
logger_root.addHandler(stream_handler) logger_root.addHandler(stream_handler)
gmsh_logger = logging.getLogger("gmsh")
gmsh_logger.setLevel(logging.INFO)
def process_gmsh_log(gmsh_log: list, detect_error=True):
"""Treatment of log messages gathered with gmsh.logger.get()"""
err_msg, warn_msg = list(), list()
for line in gmsh_log:
if "error" in line.lower():
err_msg.append(line)
if "warning" in line.lower():
warn_msg.append(line)
gmsh_logger.info("**********")
gmsh_logger.info(
f"{len(gmsh_log)} logging messages got from Gmsh : {len(err_msg)} errors, {len(warn_msg)} warnings."
)
if err_msg:
gmsh_logger.error("Gmsh errors details :")
for line in err_msg:
gmsh_logger.error(line)
if warn_msg:
gmsh_logger.warning("Gmsh warnings details :")
for line in warn_msg:
gmsh_logger.warning(line)
gmsh_logger.debug("All gmsh messages :")
gmsh_logger.debug(gmsh_log)
gmsh_logger.info("**********")
if detect_error and err_msg:
raise AssertionError("Gmsh logging messages signal errors.")
# * Step 1 : Modeling the geometry of the part # * Step 1 : Modeling the geometry of the part
geometry.init_geo_tools() geo.init_geo_tools()
a = 1 a = 1
b, k = a, a / 3 b, k = a, a / 3
r = 0.05 r = 0.05
gmsh.logger.start() gmsh.logger.start()
panto_geo = mesh_generate_2D.Gmsh2DRVE.pantograph( panto_geo = Gmsh2DRVE.pantograph(
a, b, k, 0.1, nb_cells=(10, 1), soft_mat=True, name="panto_with_soft" a, b, k, 0.1, nb_cells=(10, 1), soft_mat=True, name="panto_with_soft"
) )
process_gmsh_log(gmsh.logger.get()) process_gmsh_log(gmsh.logger.get())
...@@ -93,16 +70,12 @@ E_nu_tuples = [(E1, nu1), (E2, nu2)] ...@@ -93,16 +70,12 @@ E_nu_tuples = [(E1, nu1), (E2, nu2)]
subdo_tags = tuple([subdo.tag for subdo in panto_geo.phy_surf]) subdo_tags = tuple([subdo.tag for subdo in panto_geo.phy_surf])
material_dict = dict() material_dict = dict()
for coeff, tag in zip(E_nu_tuples, subdo_tags): for coeff, tag in zip(E_nu_tuples, subdo_tags):
material_dict[tag] = materials.Material(coeff[0], coeff[1], "cp") material_dict[tag] = Material(coeff[0], coeff[1], "cp")
# * Step 3 : Create part object # * Step 3 : Create part object
panto_part = part.Fenics2DRVE.file_2_FenicsPart( panto_part = FenicsPart.file_2_FenicsPart(
panto_geo.mesh_abs_path, panto_geo.mesh_abs_path, material_dict, panto_geo.gen_vect, subdomains_import=True
material_dict,
global_dimensions=panto_geo.gen_vect,
subdomains_import=True,
) )
LX = panto_part.global_dimensions[0, 0] LX = panto_part.global_dimensions[0, 0]
...@@ -120,12 +93,12 @@ class RightBorder(fe.SubDomain): ...@@ -120,12 +93,12 @@ class RightBorder(fe.SubDomain):
l_border = LeftBorder() l_border = LeftBorder()
r_border = RightBorder() r_border = RightBorder()
boundary_markers = fe.MeshFunction("size_t", panto_part.mesh, dim=panto_part.dim-1) boundary_markers = fe.MeshFunction("size_t", panto_part.mesh, dim=panto_part.dim - 1)
boundary_markers.set_all(9999) boundary_markers.set_all(9999)
l_border.mark(boundary_markers, 11) l_border.mark(boundary_markers, 11)
r_border.mark(boundary_markers, 99) r_border.mark(boundary_markers, 99)
pbc = full_scale_pb.PeriodicDomain.pbc_dual_base(panto_part.global_dimensions, "Y") pbc = PeriodicDomain.pbc_dual_base(panto_part.global_dimensions, "Y")
boundary_conditions = [ boundary_conditions = [
{"type": "Periodic", "constraint": pbc}, {"type": "Periodic", "constraint": pbc},
{ {
...@@ -154,9 +127,8 @@ load = (2, s_load, indicator_fctn) ...@@ -154,9 +127,8 @@ load = (2, s_load, indicator_fctn)
element = ("Lagrange", 2) element = ("Lagrange", 2)
# * Step 7 : Gathering all data in a model # * Step 7 : Gathering all data in a model
model = full_scale_pb.FullScaleModel(panto_part, [load], boundary_conditions, element) model = FullScaleModel(panto_part, [load], boundary_conditions, element)
model.set_solver("LU", "mumps") model.set_solver("mumps")
# * Step 8 : Solving problem # * Step 8 : Solving problem
model.solve(results_file=Path("demo_results/demo_pantograph_full_scale.xdmf")) model.solve(results_file=Path("demo_results/demo_pantograph_full_scale.xdmf"))
...@@ -9,12 +9,10 @@ import ho_homog.geometry as geo ...@@ -9,12 +9,10 @@ import ho_homog.geometry as geo
import ho_homog.mesh_tools as msh import ho_homog.mesh_tools as msh
import numpy as np import numpy as np
import logging import logging
from logging.handlers import RotatingFileHandler
import gmsh import gmsh
import os import os
import matplotlib.pyplot as plt
from more_itertools import flatten
from pathlib import Path from pathlib import Path
from subprocess import run
# nice shortcuts # nice shortcuts
model = gmsh.model model = gmsh.model
...@@ -23,29 +21,24 @@ factory = model.occ ...@@ -23,29 +21,24 @@ factory = model.occ
# * Logging # * Logging
logger = logging.getLogger(__name__) # http://sametmax.com/ecrire-des-logs-en-python/ logger = logging.getLogger(__name__) # http://sametmax.com/ecrire-des-logs-en-python/
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
if __name__ == "__main__":
logger_root = logging.getLogger() formatter = logging.Formatter("%(asctime)s :: %(levelname)s :: %(message)s", "%H:%M")
logger_root.setLevel(logging.INFO) stream_handler = logging.StreamHandler()
formatter = logging.Formatter( stream_handler.setLevel(logging.INFO)
"%(asctime)s :: %(levelname)s :: %(name)s :: %(message)s", "%Y-%m-%d %H:%M:%S" stream_handler.setFormatter(formatter)
) logger.addHandler(stream_handler)
log_path = Path.home().joinpath("Desktop/activity.log")
file_handler = RotatingFileHandler(str(log_path), "a", 1000000, 10)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger_root.addHandler(file_handler) # Pour écriture d'un fichier log
formatter = logging.Formatter(
"%(asctime)s :: %(levelname)s :: %(message)s", "%H:%M"
)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger_root.addHandler(stream_handler)
geo.init_geo_tools() geo.init_geo_tools()
name = "pantograph" name = "pantograph"
model.add(name) model.add(name)
root = Path(__file__).resolve().parent.joinpath(name)
geo_file = root.with_suffix(".brep")
mesh_file = root.with_suffix(".msh")
os.path.realpath(__file__)
logger.info("Start defining the pantograph geometry") logger.info("Start defining the pantograph geometry")
a = 1 a = 1
b = 1 b = 1
...@@ -65,11 +58,11 @@ E1 = geo.Point(e1) ...@@ -65,11 +58,11 @@ E1 = geo.Point(e1)
E2 = geo.Point(e2) E2 = geo.Point(e2)
E1m = geo.Point(-1 * e1) E1m = geo.Point(-1 * e1)
E2m = geo.Point(-1 * e2) E2m = geo.Point(-1 * e2)
O = np.zeros((3,)) O_pt = np.zeros((3,))
L = geo.Point(2 * (e1 + e2))