diff --git a/README.md b/README.md index 00d18102..f4eccf82 100644 --- a/README.md +++ b/README.md @@ -1,61 +1,72 @@ # Crack Propagation in Brittle Materials ## 2024 -From little or nothing, to experimental verification of a complex fracture experiment. -We solve the following (difficult) problem: +`Irrevolutions` is a computational stability analysis toolkit designed to solve nonlinear and nonconvex evolutionary problems using advanced numerical methods. It provides efficient algorithms for computing solutions constrained minimum problems with application to irreversible evolutions (hence its name). In particular, this framework is relevant in the context of fracture and damage mechanics. -**Evolution of Damage (irreversible)** +**Irreversible Evolution of Damage** Let $y=(\alpha, u)$ be an admissible state of a brittle system where $\alpha: \Omega \mapsto [0, 1]$ is a smooth damage field which identifies cracks (where $\alpha =1$) and $u$ is a displacement field. Provided a material model (an energy) $E_\ell$, given a time horizon $T$, let's find a map $t \in [0, T]\mapsto y_t$ such that: damage is non-decreasing and the observed state $y_t$ is energy-minimal, among admissible variations. +## How to contribute -#### Feature branch workflow +### Reporting bugs +If you find a bug in `irrevolutions`, please report it on the GitHub issue tracker. -For each new feature you wish to implement, create a branch named ```{yourname}-{feature}```, -as in ```andres-meshes```. +## Suggesting enhancements +If you wish to suggest a new feature or an improvement of a current feature, you can submit this on the issue tracker. -https://docs.gitlab.com/ee/gitlab-basics/feature_branch_workflow.html +## Contributing code (submitting a pull request) +To contribute code `irrevolutions`, create a pull request. If you want to contribute, but are unsure where to start, get in touch with the authors. - - Create your feature branch:`git checkout -b username-feature` - - To push your branch: `git push -u origin feature_branch_name` - - Create a pull request on the main branch for merging. Somebody should approve the pull-request. - +On opening a pull request, unit tests will run on GitHub Continuous Integration. You can click on these in the pull request to see where (if anywhere) the tests are failing. -### Weekly updates (merge from main) +For more details on the pull request workflow, check +https://docs.godotengine.org/en/3.1/community/contributing/pr_workflow.html + + + +### Installation + +Before installing `irrevolutions`, ensure you have `dolfinx` and other dependencies installed. You can install `dolfinx` using one of the following methods: + +- Using conda ``` -git checkout main -git pull -git checkout yourname-branch -git merge main +conda create -n fenicsx-env -c conda-forge fenics-dolfinx=0.7.2 mpich pyvista +conda activate fenicsx-env ``` -Asymmetrically, feature-work is `rebased`. +- Using Spack +see https://github.com/FEniCS/dolfinx/blob/main/README.md#spack -### To run the code (on Docker) +- Using Apt (ubuntu) +``` +add-apt-repository ppa:fenics-packages/fenics +apt update +apt install fenicsx=1:0.7.3-3~ppa1~lunar1 +``` + +For detailed instructions, see https://github.com/FEniCS/dolfinx/blob/main/README.md#installation -First, run the container, attaching an interactive session and sharing data space -(the current dir) between the host and the container (the syntax is origin:target). +- Using a Docker container -On an ARM-based machine: +For an ARM-based machine: ``` docker run --rm -ti -v "$(pwd)":/home/numerix -w /home/numerix kumiori3/numerix:stable ``` -On an AMD64 machine: +For an AMD64 machine: ``` docker run --rm -ti -v "$(pwd)":/home/numerix -w /home/numerix kumiori3/numerix:stable-amd64 ``` -On a windows box: +For a windows box: ``` docker run --rm -ti -v "C:/...":/home/numerix" -w /home/numerix kumiori3\numerix:stable-amd64 ``` -### Installation - -To install the software, run DOLFINx through the aforementioned docker container and install the irrevolutions-package with +Finally, to install `irrevolutions` head to the package root directory and run ```python3 -m pip install .``` -from the root of this repository ---- @@ -66,8 +77,11 @@ This code was initially conceived as a support for the teaching course MEC647, ### Acknowledgements + To all the students for their effort, participation, and motivation. +This project contains code from the DOLFINy project (https://github.com/fenics-dolfiny/dolfiny), which is licensed under the LGPLv3 license. We acknowledge and thank the DOLFINy contributors for their work. + See paper.md ### License @@ -76,9 +90,7 @@ See `LICENSE` file. Each file should have at least the "copyright" line and a pointer to where the full notice is found. - + Copyright or copyLeft (C) <~0> @@ -97,7 +109,7 @@ Each file should have at least the "copyright" line and a pointer to where the f book for further abstraction. You should have received a copy of the GNU General Public License - along with this program. If not, see . + along with `irrevolution`. If not, see . diff --git a/contributed/DIC_CT_35/export_msh.py b/contributed/DIC_CT_35/export_msh.py index 194702fd..f388e3a9 100644 --- a/contributed/DIC_CT_35/export_msh.py +++ b/contributed/DIC_CT_35/export_msh.py @@ -2,9 +2,10 @@ # coding: utf-8 -from dolfin import * import os +from dolfin import * + filename = "mesh/DIC_running" @@ -13,7 +14,7 @@ os.remove(filename + ".msh") # xml to h5 (1-3) mesh = Mesh(filename + ".xml") -#boundaries = MeshFunction("size_t", mesh, "mesh4_facet_region.xml") +# boundaries = MeshFunction("size_t", mesh, "mesh4_facet_region.xml") subdomains = MeshFunction("size_t", mesh, filename + "_physical_region.xml") boundaries = MeshFunction("size_t", mesh, filename + "_facet_region.xml") @@ -30,13 +31,13 @@ os.remove(filename + ".xml") -#mesh = Mesh() -#hdf = HDF5File(mesh.mpi_comm(), filename + ".h5", "r") -#hdf.read(mesh, "/mesh", False) -#ndim = mesh.topology().dim() +# mesh = Mesh() +# hdf = HDF5File(mesh.mpi_comm(), filename + ".h5", "r") +# hdf.read(mesh, "/mesh", False) +# ndim = mesh.topology().dim() -#boundaries = MeshFunction("size_t", mesh,1) -#hdf.read(boundaries, "/boundaries") +# boundaries = MeshFunction("size_t", mesh,1) +# hdf.read(boundaries, "/boundaries") -#subdomains = MeshFunction("size_t", mesh,2) -#hdf.read(subdomains, "/subdomains") +# subdomains = MeshFunction("size_t", mesh,2) +# hdf.read(subdomains, "/subdomains") diff --git a/contributed/NOTCH/Notch_problem.py b/contributed/NOTCH/Notch_problem.py index 4ffbf24b..04d46779 100644 --- a/contributed/NOTCH/Notch_problem.py +++ b/contributed/NOTCH/Notch_problem.py @@ -1,56 +1,39 @@ # Numpy -> numerical library for Python. We'll use it for all array operations. # It's written in C and it's faster (than traditional Python) -from algorithms import am -import algorithms -from models import DamageElasticityModel as Brittle -import models -from pyvista.utilities import xvfb -import pyvista -from utils.viz import plot_mesh, plot_vector, plot_scalar -from irrevolutions.utils import viz -from meshes import primitives -import meshes -import matplotlib.pyplot as plt -import gmsh -from dolfinx.io import XDMFFile -import ufl -from dolfinx.fem import ( - Constant, - Function, - FunctionSpace, - assemble_scalar, - dirichletbc, - form, - locate_dofs_geometrical, - set_bc, -) -import dolfinx.io import logging -from dolfinx import log -import dolfinx.plot -import dolfinx -from petsc4py import PETSc -import petsc4py -from mpi4py import MPI -import pdb -from pathlib import Path -import os -import numpy as np # Yaml (Yet another markup language) -> We'll use it to pass, read and structure # light text data in .yml files. -import yaml - # Json -> Another form to work with data. It comes from JavaScript. Similar functions # that Yaml. Used speacily with API request, when we need data "fetch". -import json - # Communication with the machine: # Sys -> allows to acess the system and launch commandes. # Os - > allows to acess the operation system. import sys + +import dolfinx +import dolfinx.io +import dolfinx.plot +import gmsh +import matplotlib.pyplot as plt +import meshes +import numpy as np +import pyvista +import ufl +from algorithms import am +from dolfinx.fem import ( + assemble_scalar, + dirichletbc, + locate_dofs_geometrical, + set_bc, +) +from models import DamageElasticityModel as Brittle +from petsc4py import PETSc +from pyvista.utilities import xvfb +from utils.viz import plot_mesh, plot_scalar, plot_vector + # -> this serves to add a path to the code search for things -sys.path.append('../') +sys.path.append("../") # pdb -> usefull for debugging, it can stop a code operation and allows to read # variables and do calculations @@ -85,65 +68,60 @@ parameters = { # In case of evolution (nonlinear) problems, it's necessary to define a max # and a min. For the elastic solution, just one value in needed. - 'loading': { - 'type': 'ID', # ID -> Imposed Displacement | IF -> Imposed Force - 'min': 0, - 'max': 1.5, - 'steps': 20 + "loading": { + "type": "ID", # ID -> Imposed Displacement | IF -> Imposed Force + "min": 0, + "max": 1.5, + "steps": 20, }, - 'geometry': { - 'geom_type': 'bar', - 'Lx': 1., - 'Ly': 0.01 + "geometry": {"geom_type": "bar", "Lx": 1.0, "Ly": 0.01}, + "model": { + "E": 1.0, + "nu": 0.3, + "mu": 0, # don't change it -> calculated later + "lmbda": 0, # don't change it -> calculated later + "w1": 1.0, + "ell": 0.01, + "k_res": 1.0e-8, }, - 'model': { - 'E': 1.0, - 'nu': 0.3, - 'mu': 0, # don't change it -> calculated later - 'lmbda': 0, # don't change it -> calculated later - 'w1': 1., - 'ell': 0.01, - 'k_res': 1.e-8 - }, - 'solvers': { - 'elasticity': { - 'snes': { - 'snes_type': 'newtontr', - 'snes_stol': 1e-8, - 'snes_atol': 1e-8, - 'snes_rtol': 1e-8, - 'snes_max_it': 100, - 'snes_monitor': "", - 'ksp_type': 'preonly', - 'pc_type': 'lu', - 'pc_factor_mat_solver_type': 'mumps' + "solvers": { + "elasticity": { + "snes": { + "snes_type": "newtontr", + "snes_stol": 1e-8, + "snes_atol": 1e-8, + "snes_rtol": 1e-8, + "snes_max_it": 100, + "snes_monitor": "", + "ksp_type": "preonly", + "pc_type": "lu", + "pc_factor_mat_solver_type": "mumps", } }, - 'damage': { - 'snes': { - 'snes_type': 'vinewtonrsls', - 'snes_stol': 1e-5, - 'snes_atol': 1e-5, - 'snes_rtol': 1e-8, - 'snes_max_it': 100, - 'snes_monitor': "", - 'ksp_type': 'preonly', - 'pc_type': 'lu', - 'pc_factor_mat_solver_type': 'mumps' + "damage": { + "snes": { + "snes_type": "vinewtonrsls", + "snes_stol": 1e-5, + "snes_atol": 1e-5, + "snes_rtol": 1e-8, + "snes_max_it": 100, + "snes_monitor": "", + "ksp_type": "preonly", + "pc_type": "lu", + "pc_factor_mat_solver_type": "mumps", }, }, - 'damage_elasticity': { + "damage_elasticity": { "max_it": 100, "alpha_rtol": 1.0e-5, - "criterion": "alpha_H1" - } - } + "criterion": "alpha_H1", + }, + }, } E = parameters["model"]["E"] poisson = parameters["model"]["nu"] -parameters['model']['lmbda'] = E * poisson / \ - ((1 + poisson) * (1 - 2 * poisson)) -parameters['model']['mu'] = E / (2 * (1 + poisson)) +parameters["model"]["lmbda"] = E * poisson / ((1 + poisson) * (1 - 2 * poisson)) +parameters["model"]["mu"] = E / (2 * (1 + poisson)) # parameters.get('loading') -> this parameters can be defined and obtained from # a external file. In the first exemple (mec647_VI_1), the parameters were # read from a .yml file. @@ -159,7 +137,7 @@ def mesh_V( de2, key=0, show=False, - filename='mesh.unv', + filename="mesh.unv", order=1, ): """ @@ -187,8 +165,8 @@ def mesh_V( tdim = 2 model = gmsh.model() - model.add('TPB') - model.setCurrent('TPB') + model.add("TPB") + model.setCurrent("TPB") # Generating the points of the geometrie p0 = model.geo.addPoint(0.0, a, 0.0, de2, tag=0) p1 = model.geo.addPoint(hopen, 0.0, 0.0, de, tag=1) @@ -208,7 +186,7 @@ def mesh_V( notch_right = model.geo.addLine(p0, p1, tag=8) bot_right = model.geo.addLine(p1, p2, tag=9) right = model.geo.addLine(p2, p3, tag=10) - #top_right = model.geo.addLine(p3, p4, tag=11) + # top_right = model.geo.addLine(p3, p4, tag=11) if key == 0: top_right = model.geo.addLine(p3, p21, tag=11) top_left = model.geo.addLine(p22, p5, tag=12) @@ -224,19 +202,24 @@ def mesh_V( fissure = model.geo.addLine(p20, p0, tag=22) # Creating the surface using the lines created if key == 0: - perimeter = model.geo.addCurveLoop([notch_right, - bot_right, - right, - top_right, - load_right, - load_left, - top_left, - left, - bot_left, - notch_left]) + perimeter = model.geo.addCurveLoop( + [ + notch_right, + bot_right, + right, + top_right, + load_right, + load_left, + top_left, + left, + bot_left, + notch_left, + ] + ) elif key == 1: perimeter = model.geo.addCurveLoop( - [notch_right, bot_right, right, top_right, sym_plan, fissure]) + [notch_right, bot_right, right, top_right, sym_plan, fissure] + ) surface = model.geo.addPlaneSurface([perimeter]) # model.geo.addSurfaceLoop([surface,16]) model.mesh.setOrder(order) @@ -244,52 +227,52 @@ def mesh_V( # Creating Physical Groups to extract data from the geometrie if key == 0: gmsh.model.addPhysicalGroup(tdim - 1, [left], tag=101) - gmsh.model.setPhysicalName(tdim - 1, 101, 'Left') + gmsh.model.setPhysicalName(tdim - 1, 101, "Left") gmsh.model.addPhysicalGroup(tdim - 1, [right], tag=102) - gmsh.model.setPhysicalName(tdim - 1, 102, 'Right') + gmsh.model.setPhysicalName(tdim - 1, 102, "Right") gmsh.model.addPhysicalGroup(tdim - 2, [p6], tag=103) - gmsh.model.setPhysicalName(tdim - 2, 103, 'Left_point') + gmsh.model.setPhysicalName(tdim - 2, 103, "Left_point") gmsh.model.addPhysicalGroup(tdim - 2, [p2], tag=104) - gmsh.model.setPhysicalName(tdim - 2, 104, 'Right_point') + gmsh.model.setPhysicalName(tdim - 2, 104, "Right_point") gmsh.model.addPhysicalGroup(tdim - 2, [p4], tag=105) - gmsh.model.setPhysicalName(tdim - 2, 105, 'Load_point') + gmsh.model.setPhysicalName(tdim - 2, 105, "Load_point") gmsh.model.addPhysicalGroup(tdim - 2, [p0], tag=106) - gmsh.model.setPhysicalName(tdim - 2, 106, 'Notch_point') + gmsh.model.setPhysicalName(tdim - 2, 106, "Notch_point") gmsh.model.addPhysicalGroup(tdim - 1, [load_right], tag=107) - gmsh.model.setPhysicalName(tdim - 1, 107, 'load_right') + gmsh.model.setPhysicalName(tdim - 1, 107, "load_right") gmsh.model.addPhysicalGroup(tdim - 1, [load_left], tag=108) - gmsh.model.setPhysicalName(tdim - 1, 108, 'load_left') + gmsh.model.setPhysicalName(tdim - 1, 108, "load_left") gmsh.model.addPhysicalGroup(tdim, [surface], tag=110) - gmsh.model.setPhysicalName(tdim, 110, 'mesh_surface') + gmsh.model.setPhysicalName(tdim, 110, "mesh_surface") # Cast3M can't read Physical Groups of points (dim = 0). Instead, we check the number in the mesh and input in manually in the code. # The number of a node doesn't change if it's in a point of the geometry if key == 1: gmsh.model.addPhysicalGroup(tdim, [surface], tag=110) - gmsh.model.setPhysicalName(tdim, 110, 'mesh_surface') + gmsh.model.setPhysicalName(tdim, 110, "mesh_surface") gmsh.model.addPhysicalGroup(tdim - 1, [fissure], tag=111) - gmsh.model.setPhysicalName(tdim - 1, 111, 'fissure') + gmsh.model.setPhysicalName(tdim - 1, 111, "fissure") gmsh.model.addPhysicalGroup(tdim - 1, [sym_plan], tag=112) - gmsh.model.setPhysicalName(tdim - 1, 112, 'sym_plan') + gmsh.model.setPhysicalName(tdim - 1, 112, "sym_plan") - #gmsh.model.addPhysicalGroup(tdim-2, [p20], tag=113) - #gmsh.model.setPhysicalName(tdim-2, 113, 'Crack_tip') + # gmsh.model.addPhysicalGroup(tdim-2, [p20], tag=113) + # gmsh.model.setPhysicalName(tdim-2, 113, 'Crack_tip') - #gmsh.model.addPhysicalGroup(tdim-2, [p4], tag=114) - #gmsh.model.setPhysicalName(tdim-2, 114, 'Load_point') + # gmsh.model.addPhysicalGroup(tdim-2, [p4], tag=114) + # gmsh.model.setPhysicalName(tdim-2, 114, 'Load_point') - #gmsh.model.addPhysicalGroup(tdim-2, [p2], tag=115) - #gmsh.model.setPhysicalName(tdim-2, 115,'Right_point') + # gmsh.model.addPhysicalGroup(tdim-2, [p2], tag=115) + # gmsh.model.setPhysicalName(tdim-2, 115,'Right_point') # Generating the mesh model.geo.synchronize() model.mesh.generate(tdim) @@ -313,17 +296,16 @@ def mesh_V( # in order to apply Newman Bondary conditions, which means that is a condition # applied not in the displacement (variable of interest), but in the correspond # variable (in this case, force/pressure) -mesh, facet_tags = meshes.gmsh_model_to_mesh(gmsh_model, - cell_data=False, - facet_data=True, - gdim=2) +mesh, facet_tags = meshes.gmsh_model_to_mesh( + gmsh_model, cell_data=False, facet_data=True, gdim=2 +) # Plot mesh plt.figure() ax = plot_mesh(mesh) fig = ax.get_figure() -fig.savefig(f"mesh.png") +fig.savefig("mesh.png") # Functional setting # 'u' represents the displacement in this problem. In order to solve it, the @@ -343,10 +325,8 @@ def mesh_V( # The Lagrange elements are going to be defined in the mesh as such we take the # geometry of elements present in the mesh. -element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), - degree=1, dim=2) -element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), - degree=1) +element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=2) +element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), degree=1) # After defining the Finite Element in ufl, a association with dolfinx is made. # To inputs are necessary, the mesh and the element type created. In some sense, @@ -384,36 +364,34 @@ def mesh_V( # define the integral measures, as such one is a integral. dx = ufl.Measure("dx", domain=mesh) # -> volume measure # We include here the subdomain data generated at the gmsh file. -ds = ufl.Measure("ds", subdomain_data=facet_tags, - domain=mesh) # -> surface measure +ds = ufl.Measure("ds", subdomain_data=facet_tags, domain=mesh) # -> surface measure # ds() # dS = ufl.Measure("dS", domain = mesh) - inner boundaries of the mesh -> # not usefull -model = Brittle(parameters.get('model')) -state = {'u': u, 'alpha': alpha} +model = Brittle(parameters.get("model")) +state = {"u": u, "alpha": alpha} # The total energy density is calculated this time using a already written # function of the "model". This return the elasticity energy (with the a(alpha)) # and the damage energy term. To count for externals forces, it need to substract it # from the total energy # - ufl.dot(force,u)*ds(107) - ufl.dot(force,u)*ds(108) total_energy = model.total_energy_density(state) * dx -if parameters['loading']['type'] == 'ID': +if parameters["loading"]["type"] == "ID": total_energy = model.total_energy_density(state) * dx -if parameters['loading']['type'] == 'IF': +if parameters["loading"]["type"] == "IF": # Getting load parameters force = dolfinx.fem.Function(V_u, name="Contact_force") - loading_force = -1 * parameters['loading']['max'] + loading_force = -1 * parameters["loading"]["max"] force.interpolate( - lambda x: ( - np.zeros_like( - x[0]), - loading_force * - np.ones_like( - x[1]))) - total_energy = model.total_energy_density( - state) * dx - ufl.dot(force, u) * ds(107) - ufl.dot(force, u) * ds(108) + lambda x: (np.zeros_like(x[0]), loading_force * np.ones_like(x[1])) + ) + total_energy = ( + model.total_energy_density(state) * dx + - ufl.dot(force, u) * ds(107) + - ufl.dot(force, u) * ds(108) + ) # Boundary sets # Function that returns 'TRUE' if the point of the mesh is in the region you want @@ -425,7 +403,8 @@ def BC_points(x): # Y-coordinate return np.logical_and( np.logical_or(np.isclose(x[0], -L / 2), np.isclose(x[0], L / 2)), - np.isclose(x[1], 0)) + np.isclose(x[1], 0), + ) BC_entities = dolfinx.mesh.locate_entities_boundary(mesh, 0, BC_points) @@ -433,82 +412,77 @@ def BC_points(x): u_.interpolate(lambda x: (np.zeros_like(x[0]), np.zeros_like(x[1]))) # FOR IMPOSED FORCE : -if parameters['loading']['type'] == 'IF': +if parameters["loading"]["type"] == "IF": bcs_u = [dirichletbc(u_, BC_dofs)] # FOR IMPOSED DISPLACEMENT : -if parameters['loading']['type'] == 'ID': +if parameters["loading"]["type"] == "ID": + def ID_points(x): - return np.logical_and(np.equal(x[1], h), - np.logical_and(np.greater_equal(x[0], -1 * n), - np.less_equal(x[0], n) - )) + return np.logical_and( + np.equal(x[1], h), + np.logical_and(np.greater_equal(x[0], -1 * n), np.less_equal(x[0], n)), + ) + ID_entities = dolfinx.mesh.locate_entities_boundary(mesh, 0, ID_points) ID_dofs = dolfinx.fem.locate_dofs_topological(V_u, 0, ID_entities) - u_imposed.interpolate(lambda x: (np.zeros_like( - x[0]), -1 * parameters['loading']['max'] * np.ones_like(x[1]))) + u_imposed.interpolate( + lambda x: ( + np.zeros_like(x[0]), + -1 * parameters["loading"]["max"] * np.ones_like(x[1]), + ) + ) bcs_u = [dirichletbc(u_, BC_dofs), dirichletbc(u_imposed, ID_dofs)] -dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], -L / 2)) -dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], L / 2)) +dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], -L / 2)) +dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], L / 2)) BC_dofs_alpha = dolfinx.fem.locate_dofs_topological(V_alpha, 0, BC_entities) -if parameters['loading']['type'] == 'IF': +if parameters["loading"]["type"] == "IF": bcs_alpha = [ - dirichletbc(np.array(0., dtype=PETSc.ScalarType), - BC_dofs_alpha, - V_alpha) + dirichletbc(np.array(0.0, dtype=PETSc.ScalarType), BC_dofs_alpha, V_alpha) ] -if parameters['loading']['type'] == 'ID': - ID_dofs_alpha = dolfinx.fem.locate_dofs_topological( - V_alpha, 0, ID_entities) +if parameters["loading"]["type"] == "ID": + ID_dofs_alpha = dolfinx.fem.locate_dofs_topological(V_alpha, 0, ID_entities) bcs_alpha = [ - dirichletbc(np.array(0., dtype=PETSc.ScalarType), - np.concatenate( - [dofs_alpha_left, dofs_alpha_right, BC_dofs_alpha, ID_dofs_alpha]), - V_alpha) + dirichletbc( + np.array(0.0, dtype=PETSc.ScalarType), + np.concatenate( + [dofs_alpha_left, dofs_alpha_right, BC_dofs_alpha, ID_dofs_alpha] + ), + V_alpha, + ) ] bcs_alpha = [] -#dofs_alpha_left, dofs_alpha_right +# dofs_alpha_left, dofs_alpha_right bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} # Update the bounds set_bc(alpha_ub.vector, bcs_alpha) set_bc(alpha_lb.vector, bcs_alpha) -solve_it = am.AlternateMinimisation(total_energy, - state, - bcs, - parameters.get("solvers"), - bounds=(alpha_lb, alpha_ub)) +solve_it = am.AlternateMinimisation( + total_energy, state, bcs, parameters.get("solvers"), bounds=(alpha_lb, alpha_ub) +) # solve_it.elasticity # Loop for evolution -Loads = np.linspace(parameters.get("loading").get("min"), - parameters.get("loading").get("max"), - parameters.get("loading").get("steps")) - -data = { - 'elastic': [], - 'surface': [], - 'total': [], - 'load': [] -} +Loads = np.linspace( + parameters.get("loading").get("min"), + parameters.get("loading").get("max"), + parameters.get("loading").get("steps"), +) + +data = {"elastic": [], "surface": [], "total": [], "load": []} -for (i_t, t) in enumerate(Loads): +for i_t, t in enumerate(Loads): # update bondary conditions - if parameters['loading']['type'] == 'ID': - u_imposed.interpolate(lambda x: ( - np.zeros_like(x[0]), -1 * t * np.ones_like(x[1]))) - if parameters['loading']['type'] == 'IF': + if parameters["loading"]["type"] == "ID": + u_imposed.interpolate( + lambda x: (np.zeros_like(x[0]), -1 * t * np.ones_like(x[1])) + ) + if parameters["loading"]["type"] == "IF": force.interpolate( - lambda x: ( - np.zeros_like( - x[0]), - loading_force * - t * - np.ones_like( - x[1]))) + lambda x: (np.zeros_like(x[0]), loading_force * t * np.ones_like(x[1])) + ) # update lower bound for damage alpha.vector.copy(alpha_lb.vector) # solve for current load step @@ -516,31 +490,31 @@ def ID_points(x): # postprocessing # global surface_energy = assemble_scalar( - dolfinx.fem.form( - model.damage_energy_density(state) * dx)) - elastic_energy = assemble_scalar(dolfinx.fem.form( - model.elastic_energy_density(state) * dx)) + dolfinx.fem.form(model.damage_energy_density(state) * dx) + ) + elastic_energy = assemble_scalar( + dolfinx.fem.form(model.elastic_energy_density(state) * dx) + ) - data.get('elastic').append(elastic_energy) - data.get('surface').append(surface_energy) - data.get('total').append(surface_energy + elastic_energy) - data.get('load').append(t) + data.get("elastic").append(elastic_energy) + data.get("surface").append(surface_energy) + data.get("total").append(surface_energy + elastic_energy) + data.get("load").append(t) - print(f'Solved timestep {i_t}, load {t}') - print( - f'Elastic energy {elastic_energy:.3g}, Surface energy {surface_energy:.3g}') + print(f"Solved timestep {i_t}, load {t}") + print(f"Elastic energy {elastic_energy:.3g}, Surface energy {surface_energy:.3g}") # saving -plt.plot(data.get('load'), data.get('surface'), label='surface') -plt.plot(data.get('load'), data.get('elastic'), label='elastic') -#plt.plot(data.get('load'), [1./2. * t**2*L for t in data.get('load')], label='anal elast', ls=':', c='k') +plt.plot(data.get("load"), data.get("surface"), label="surface") +plt.plot(data.get("load"), data.get("elastic"), label="elastic") +# plt.plot(data.get('load'), [1./2. * t**2*L for t in data.get('load')], label='anal elast', ls=':', c='k') -plt.title('My specimen') +plt.title("My specimen") plt.legend() -#plt.yticks([0, 1/20], [0, '$1/2.\sigma_c^2/E_0$']) -#plt.xticks([0, 1], [0, 1]) +# plt.yticks([0, 1/20], [0, '$1/2.\sigma_c^2/E_0$']) +# plt.xticks([0, 1], [0, 1]) try: from dolfinx.plot import create_vtk_mesh as compute_topology @@ -577,7 +551,7 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): shape=(1, 2), ) _plt = plot_scalar(alpha, plotter, subplot=(0, 0)) -_plt.screenshot(f"alpha.png") +_plt.screenshot("alpha.png") xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -589,14 +563,14 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): # plt = plot_scalar(u.sub(0), plotter, subplot=(0, 0)) _plt = plot_vector(u, plotter, subplot=(0, 0)) -_plt.screenshot(f"displacement_MPI.png") +_plt.screenshot("displacement_MPI.png") plt.figure() -plt.plot(data.get('load'), data.get('surface'), label='surface') -plt.plot(data.get('load'), data.get('elastic'), label='elastic') -#plt.plot(data.get('load'), [1./2. * t**2*L for t in data.get('load')], label='anal elast', ls=':', c='k') +plt.plot(data.get("load"), data.get("surface"), label="surface") +plt.plot(data.get("load"), data.get("elastic"), label="elastic") +# plt.plot(data.get('load'), [1./2. * t**2*L for t in data.get('load')], label='anal elast', ls=':', c='k') -plt.title('My specimen') +plt.title("My specimen") plt.legend() -plt.savefig('energy.png') +plt.savefig("energy.png") diff --git a/contributed/solveModel/solveEP2.py b/contributed/solveModel/solveEP2.py index c292691a..9b7c2e5b 100644 --- a/contributed/solveModel/solveEP2.py +++ b/contributed/solveModel/solveEP2.py @@ -1,51 +1,32 @@ # library include -import pyvista -from solvers import SNESSolver -from utils.viz import plot_mesh, plot_vector, plot_scalar -from irrevolutions.utils import viz -from meshes import primitives -import meshes -from pyvista.utilities import xvfb +import logging +import sys + +import dolfinx +import dolfinx.io +import dolfinx.plot import matplotlib.pyplot as plt +import meshes +import numpy as np +import pyvista +import ufl +import yaml from dolfinx.fem import ( - Constant, Function, - FunctionSpace, - assemble_scalar, dirichletbc, - form, - locate_dofs_geometrical, - set_bc, ) -import dolfinx.io -import numpy as np -import yaml -import json -import sys -import os -from pathlib import Path - -from mpi4py import MPI - -import petsc4py +from meshes import primitives from petsc4py import PETSc - -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl - - -from dolfinx.io import XDMFFile - -import logging +from pyvista.utilities import xvfb +from solvers import SNESSolver +from utils.viz import plot_mesh, plot_scalar, plot_vector logging.basicConfig(level=logging.INFO) -sys.path.append('./') +sys.path.append("./") # meshes @@ -57,8 +38,7 @@ def plot_vector(u, plotter, subplot=None): plotter.subplot(subplot[0], subplot[1]) V = u.function_space mesh = V.mesh - topology, cell_types = dolfinx.plot.create_vtk_topology( - mesh, mesh.topology.dim) + topology, cell_types = dolfinx.plot.create_vtk_topology(mesh, mesh.topology.dim) num_dofs_local = u.function_space.dofmap.index_map.size_local geometry = u.function_space.tabulate_dof_coordinates()[:num_dofs_local] values = np.zeros((V.dofmap.index_map.size_local, 3), dtype=np.float64) @@ -84,8 +64,7 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): plotter.subplot(subplot[0], subplot[1]) V = alpha.function_space mesh = V.mesh - topology, cell_types, _ = dolfinx.plot.create_vtk_mesh( - mesh, mesh.topology.dim) + topology, cell_types, _ = dolfinx.plot.create_vtk_mesh(mesh, mesh.topology.dim) grid = pyvista.UnstructuredGrid(topology, cell_types, mesh.geometry.x) plotter.subplot(0, 0) @@ -95,36 +74,27 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): plotter.view_xy() return plotter + # Parameters parameters = { - 'loading': { - 'min': 0, - 'max': 1 - }, - 'geometry': { - 'geom_type': 'bar', - 'Lx': 5., - 'Ly': 15 - }, - 'model': { - 'mu': 1., - 'lmbda': 0. - }, - 'solvers': { - 'snes': { - 'snes_type': 'newtontr', - 'snes_stol': 1e-8, - 'snes_atol': 1e-8, - 'snes_rtol': 1e-8, - 'snes_max_it': 100, - 'snes_monitor': "", - 'ksp_type': 'preonly', - 'pc_type': 'lu', - 'pc_factor_mat_solver_type': 'mumps' + "loading": {"min": 0, "max": 1}, + "geometry": {"geom_type": "bar", "Lx": 5.0, "Ly": 15}, + "model": {"mu": 1.0, "lmbda": 0.0}, + "solvers": { + "snes": { + "snes_type": "newtontr", + "snes_stol": 1e-8, + "snes_atol": 1e-8, + "snes_rtol": 1e-8, + "snes_max_it": 100, + "snes_monitor": "", + "ksp_type": "preonly", + "pc_type": "lu", + "pc_factor_mat_solver_type": "mumps", } - } + }, } # parameters.get('loading') @@ -137,20 +107,16 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): geom_type = parameters["geometry"]["geom_type"] -gmsh_model, tdim = primitives.mesh_ep_gmshapi(geom_type, - Lx, - Ly, - 1, - 0.5, - 0.3, - tdim=2) +gmsh_model, tdim = primitives.mesh_ep_gmshapi(geom_type, Lx, Ly, 1, 0.5, 0.3, tdim=2) -mesh, mts = meshes.gmsh_model_to_mesh(gmsh_model, - cell_data=False, - facet_data=True, - gdim=2, - exportMesh=True, - fileName="epTestMesh.msh") +mesh, mts = meshes.gmsh_model_to_mesh( + gmsh_model, + cell_data=False, + facet_data=True, + gdim=2, + exportMesh=True, + fileName="epTestMesh.msh", +) # TODO: Plot mesh @@ -158,16 +124,18 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): plt.figure() ax = plot_mesh(mesh) fig = ax.get_figure() -fig.savefig(f"mesh.png") +fig.savefig("mesh.png") -boundaries = [(1, lambda x: np.isclose(x[0], 0)), - (2, lambda x: np.isclose(x[0], Lx)), - (3, lambda x: np.isclose(x[1], 0)), - (4, lambda x: np.isclose(x[1], Ly))] +boundaries = [ + (1, lambda x: np.isclose(x[0], 0)), + (2, lambda x: np.isclose(x[0], Lx)), + (3, lambda x: np.isclose(x[1], 0)), + (4, lambda x: np.isclose(x[1], Ly)), +] facet_indices, facet_markers = [], [] fdim = mesh.topology.dim - 1 -for (marker, locator) in boundaries: +for marker, locator in boundaries: facets = dolfinx.mesh.locate_entities(mesh, fdim, locator) facet_indices.append(facets) facet_markers.append(np.full(len(facets), marker)) @@ -175,15 +143,12 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): facet_markers = np.array(np.hstack(facet_markers), dtype=np.int32) sorted_facets = np.argsort(facet_indices) facet_tag = dolfinx.mesh.MeshTags( - mesh, - fdim, - facet_indices[sorted_facets], - facet_markers[sorted_facets]) + mesh, fdim, facet_indices[sorted_facets], facet_markers[sorted_facets] +) # Functional setting -element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), - degree=1, dim=2) +element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=2) V_u = dolfinx.fem.FunctionSpace(mesh, element_u) u = dolfinx.fem.Function(V_u, name="Displacement") @@ -215,18 +180,16 @@ def plot_scalar(alpha, plotter, subplot=None, lineproperties={}): loc.set(1.0/100.) """ -#x = ufl.SpatialCoordinate(mesh) -#g = dolfinx.Expression ('4 *x[1]') +# x = ufl.SpatialCoordinate(mesh) +# g = dolfinx.Expression ('4 *x[1]') # boundary conditions g.interpolate(lambda x: (np.zeros_like(x[0]), np.ones_like(x[1]))) -g.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) +g.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) def left(x): - return np.isclose(x[0], 0.) + return np.isclose(x[0], 0.0) def right(x): @@ -234,34 +197,35 @@ def right(x): def bottom(x): - return np.isclose(x[1], 0.) + return np.isclose(x[1], 0.0) def top(x): return np.isclose(x[1], Ly) + # left side left_facets = dolfinx.mesh.locate_entities_boundary(mesh, 1, left) -left_dofs = dolfinx.fem.locate_dofs_topological(V_u, mesh.topology.dim - 1, - left_facets) +left_dofs = dolfinx.fem.locate_dofs_topological(V_u, mesh.topology.dim - 1, left_facets) # right side right_facets = dolfinx.mesh.locate_entities_boundary(mesh, 1, right) -right_dofs = dolfinx.fem.locate_dofs_topological(V_u, mesh.topology.dim - 1, - right_facets) +right_dofs = dolfinx.fem.locate_dofs_topological( + V_u, mesh.topology.dim - 1, right_facets +) top_facets = dolfinx.mesh.locate_entities_boundary(mesh, 1, top) -top_dofs = dolfinx.fem.locate_dofs_topological(V_u, mesh.topology.dim - 1, - top_facets) +top_dofs = dolfinx.fem.locate_dofs_topological(V_u, mesh.topology.dim - 1, top_facets) bottom_facets = dolfinx.mesh.locate_entities_boundary(mesh, 1, bottom) -bottom_dofs = dolfinx.fem.locate_dofs_topological(V_u, mesh.topology.dim - 1, - bottom_facets) +bottom_dofs = dolfinx.fem.locate_dofs_topological( + V_u, mesh.topology.dim - 1, bottom_facets +) # energy mu = parameters["model"]["mu"] @@ -272,11 +236,10 @@ def _e(u): return ufl.sym(ufl.grad(u)) -en_density = 1 / 2 * (2 * mu * ufl.inner(_e(u), _e(u))) + \ - lmbda * ufl.tr(_e(u))**2 +en_density = 1 / 2 * (2 * mu * ufl.inner(_e(u), _e(u))) + lmbda * ufl.tr(_e(u)) ** 2 energy = en_density * dx - ufl.inner(u, g) * dS(4) -#bcs = [dirichletbc(zero, bottom_dofs), dirichletbc(one, top_dofs)] +# bcs = [dirichletbc(zero, bottom_dofs), dirichletbc(one, top_dofs)] bcs = [dirichletbc(zero, bottom_dofs)] # solving @@ -313,4 +276,4 @@ def _e(u): # _plt = plot_scalar(u_.sub(0), plotter, subplot=(0, 0)) _plt = plot_vector(u, plotter, subplot=(0, 1)) -_plt.screenshot(f"displacement_MPI.png") +_plt.screenshot("displacement_MPI.png") diff --git a/demo/demo_bifurcation.py b/demo/demo_bifurcation.py index 80529b7f..605341d6 100644 --- a/demo/demo_bifurcation.py +++ b/demo/demo_bifurcation.py @@ -1,12 +1,19 @@ #!/usr/bin/env python3 -import pandas as pd -from irrevolutions.utils import ColorPrint -from irrevolutions.utils.plots import plot_energies -from irrevolutions.meshes.primitives import mesh_bar_gmshapi -from irrevolutions.algorithms.so import BifurcationSolver -from irrevolutions.algorithms.am import AlternateMinimisation -from irrevolutions.models import DamageElasticityModel as Brittle +import json +import logging +import os +import sys +from pathlib import Path + +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -18,22 +25,14 @@ set_bc, ) from dolfinx.io import XDMFFile, gmshio -import numpy as np -import yaml -import json -from pathlib import Path -import sys -import os +from irrevolutions.algorithms.am import AlternateMinimisation +from irrevolutions.algorithms.so import BifurcationSolver +from irrevolutions.meshes.primitives import mesh_bar_gmshapi +from irrevolutions.models import DamageElasticityModel as Brittle +from irrevolutions.utils import ColorPrint +from irrevolutions.utils.plots import plot_energies from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -import ufl -import numpy as np -from dolfinx.common import list_timings - -import logging logging.basicConfig(level=logging.INFO) diff --git a/demo/demo_elasticity.py b/demo/demo_elasticity.py index 6d73f688..a4daf9ca 100644 --- a/demo/demo_elasticity.py +++ b/demo/demo_elasticity.py @@ -1,30 +1,27 @@ #!/usr/bin/env python3 -import pyvista -import dolfinx.mesh -from dolfinx.io import XDMFFile, gmshio -import numpy as np -import yaml import json -from pathlib import Path -import sys +import logging import os -from mpi4py import MPI -import petsc4py -from petsc4py import PETSc +import sys +from pathlib import Path + import dolfinx +import dolfinx.mesh import dolfinx.plot -from dolfinx import log +import numpy as np +import petsc4py +import pyvista import ufl - -from pyvista.utilities import xvfb - +import yaml +from dolfinx import log +from dolfinx.io import XDMFFile, gmshio +from irrevolutions.meshes.primitives import mesh_bar_gmshapi from irrevolutions.models import ElasticityModel from irrevolutions.solvers import SNESSolver as ElasticitySolver -from irrevolutions.meshes.primitives import mesh_bar_gmshapi from irrevolutions.utils.viz import plot_vector - -import numpy as np -import logging +from mpi4py import MPI +from petsc4py import PETSc +from pyvista.utilities import xvfb logging.basicConfig(level=logging.INFO) diff --git a/demo/demo_traction.py b/demo/demo_traction.py index cfdeb5ed..e3d6072b 100644 --- a/demo/demo_traction.py +++ b/demo/demo_traction.py @@ -1,13 +1,20 @@ #!/usr/bin/env python3 +import json +import logging +import os +import sys +from pathlib import Path + +import dolfinx +import dolfinx.mesh +import dolfinx.plot +import numpy as np import pandas as pd -from irrevolutions.utils.plots import plot_energies, plot_force_displacement -from irrevolutions.utils.viz import plot_scalar, plot_vector -from irrevolutions.meshes.primitives import mesh_bar_gmshapi -from irrevolutions.algorithms.am import AlternateMinimisation, HybridSolver -from irrevolutions.models import DamageElasticityModel as Brittle +import petsc4py import pyvista -from pyvista.utilities import xvfb -import dolfinx.mesh +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -19,23 +26,14 @@ set_bc, ) from dolfinx.io import XDMFFile, gmshio -import numpy as np -import yaml -import json -from pathlib import Path -import sys -import os +from irrevolutions.algorithms.am import AlternateMinimisation, HybridSolver +from irrevolutions.meshes.primitives import mesh_bar_gmshapi +from irrevolutions.models import DamageElasticityModel as Brittle +from irrevolutions.utils.plots import plot_energies, plot_force_displacement +from irrevolutions.utils.viz import plot_scalar, plot_vector from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -import ufl -import numpy as np - -from dolfinx.common import list_timings - -import logging +from pyvista.utilities import xvfb logging.basicConfig(level=logging.INFO) diff --git a/demo/demo_vector_info.py b/demo/demo_vector_info.py index 789ee8dd..b4903ba8 100644 --- a/demo/demo_vector_info.py +++ b/demo/demo_vector_info.py @@ -1,5 +1,5 @@ -from petsc4py import PETSc import numpy as np +from petsc4py import PETSc def display_vector_info(v): diff --git a/demo/demo_vi.py b/demo/demo_vi.py index 76c1c7a0..9851c0ac 100644 --- a/demo/demo_vi.py +++ b/demo/demo_vi.py @@ -1,33 +1,28 @@ -from dolfinx.fem.assemble import assemble_scalar import logging -import numpy as np +import os +import sys +from pathlib import Path import dolfinx -import dolfinx.plot import dolfinx.io +import dolfinx.mesh +import dolfinx.plot +import numpy as np +import petsc4py +import pyvista +import ufl +import yaml from dolfinx.fem import ( Function, FunctionSpace, - assemble_scalar, dirichletbc, ) -import dolfinx.mesh +from dolfinx.fem.assemble import assemble_scalar from dolfinx.mesh import CellType -import ufl - -from mpi4py import MPI -import petsc4py -import sys -import yaml -import os -from pathlib import Path -import pyvista -from pyvista.utilities import xvfb - -import dolfinx.plot - from irrevolutions.solvers import SNESSolver from irrevolutions.utils.viz import plot_profile, plot_scalar +from mpi4py import MPI +from pyvista.utilities import xvfb petsc4py.init(sys.argv) diff --git a/paper/paper.bib b/paper/paper.bib index 50850885..446fa623 100644 --- a/paper/paper.bib +++ b/paper/paper.bib @@ -1,9 +1,6 @@ %% This BibTeX bibliography file was created using BibDesk. %% http://bibdesk.sourceforge.net/ -%% Created for pierluigi at 2024-08-02 17:21:35 +0900 - - %% Saved with string encoding Unicode (UTF-8) @@ -26,11 +23,9 @@ @article{SICSIC @article{bazant, abstract = {We consider a wide class of gradient damage models which are characterized by two constitutive functions after a normalization of the scalar damage parameter. The evolution problem is formulated following a variational approach based on the principles of irreversibility, stability and energy balance. Applied to a monotonically increasing traction test of a one-dimensional bar, we consider the homogeneous response where both the strain and the damage fields are uniform in space. In the case of a softening behavior, we show that the homogeneous state of the bar at a given time is stable provided that the length of the bar is less than a state dependent critical value and unstable otherwise. However, we also show that bifurcations can appear even if the homogeneous state is stable. All these results are obtained in a closed form. Finally, we propose a practical method to identify the two constitutive functions. This method is based on the measure of the homogeneous response in a situation where this response is stable without possibility of bifurcation, and on a procedure which gives the opportunity to detect its loss of stability. All the theoretical analyses are illustrated by examples.}, author = {Zden\v{e}k P. Ba\v{z}ant}, - date-added = {2024-08-02 17:17:28 +0900}, - date-modified = {2024-08-02 17:18:29 +0900}, doi = {10.1061/(ASCE)0733-9399(1988)114:12(2013)}, journal = {Journal of Engineering Mechanics}, - title = {Stable States and Paths of Stmuctures with Plasticity or Damage}, + title = {{Stable States and Paths of Stmuctures with Plasticity or Damage}}, volume = {114}, year = {1988}, bdsk-url-1 = {https://www.sciencedirect.com/science/article/pii/S002250961100055X}, @@ -104,7 +99,7 @@ @unpublished{camilla date-added = {2024-08-02 16:57:06 +0900}, date-modified = {2024-08-02 16:58:32 +0900}, journal = {preprint, \url{https://hal.sorbonne-universite.fr/hal-04552309}}, - title = {Stability and crack nucleation in variational phase-field models of fracture: effects of length-scales and stress multi-axiality}, + title = {Stability and crack nucleation in variational phase-field models of fracture: effects of length-scales and stress multi-axiality}} year = {}} @article{Pham2013aa, @@ -112,14 +107,13 @@ @article{Pham2013aa author = {Pham, K. and Marigo, J. -J.}, date = {2013/01/01}, date-added = {2024-08-02 14:24:09 +0900}, - date-modified = {2024-08-02 14:24:22 +0900}, doi = {10.1007/s10659-012-9382-5}, id = {Pham2013}, isbn = {1573-2681}, journal = {Journal of Elasticity}, number = {1}, pages = {63--93}, - title = {Stability of Homogeneous States with Gradient Damage Models: Size Effects and Shape Effects in the Three-Dimensional Setting}, + title = {{Stability of Homogeneous States with Gradient Damage Models: Size Effects and Shape Effects in the Three-Dimensional Setting}}, volume = {110}, year = {2013}, bdsk-url-1 = {https://doi.org/10.1007/s10659-012-9382-5}} @@ -176,14 +170,13 @@ @article{FRANCFORT @article{dalcinpazklercosimo2011, author = {Lisandro D. Dalcin and Rodrigo R. Paz and Pablo A. Kler and Alejandro Cosimo}, date-added = {2024-08-02 11:42:21 +0900}, - date-modified = {2024-08-02 11:42:21 +0900}, doi = {10.1016/j.advwatres.2011.04.013}, issn = {0309-1708}, journal = {Advances in Water Resources}, note = {New Computational Methods and Software Tools}, number = {9}, pages = {1124 - 1139}, - title = {Parallel distributed computing using Python}, + title = {{Parallel distributed computing using Python}}, volume = {34}, year = {2011}, bdsk-url-1 = {https://doi.org/10.1016/j.advwatres.2011.04.013}} @@ -191,14 +184,13 @@ @article{dalcinpazklercosimo2011 @article{moreau:hal-01867187, author = {Moreau, Jean Jacques}, date-added = {2024-07-31 15:03:36 +0900}, - date-modified = {2024-07-31 15:03:36 +0900}, hal_id = {hal-01867187}, hal_version = {v1}, journal = {{Comptes rendus hebdomadaires des s{\'e}ances de l'Acad{\'e}mie des sciences}}, pages = {238-240}, - pdf = {https://hal.science/hal-01867187/file/D%C3%A9composition_orthogonale_espace_hilbertien_Moreau_CRAS_1962.pdf}, publisher = {{Gauthier-Villars}}, title = {{D{\'e}composition orthogonale d'un espace hilbertien selon deux c{\^o}nes mutuellement polaires}}, + pdf = {https://hal.science/hal-01867187/file/D%C3%A9composition_orthogonale_espace_hilbertien_Moreau_CRAS_1962.pdf}, url = {https://hal.science/hal-01867187}, volume = {255}, year = {1962}, @@ -208,7 +200,6 @@ @article{pham:2011-the-issues abstract = {We consider a wide class of gradient damage models which are characterized by two constitutive functions after a normalization of the scalar damage parameter. The evolution problem is formulated following a variational approach based on the principles of irreversibility, stability and energy balance. Applied to a monotonically increasing traction test of a one-dimensional bar, we consider the homogeneous response where both the strain and the damage fields are uniform in space. In the case of a softening behavior, we show that the homogeneous state of the bar at a given time is stable provided that the length of the bar is less than a state dependent critical value and unstable otherwise. However, we also show that bifurcations can appear even if the homogeneous state is stable. All these results are obtained in a closed form. Finally, we propose a practical method to identify the two constitutive functions. This method is based on the measure of the homogeneous response in a situation where this response is stable without possibility of bifurcation, and on a procedure which gives the opportunity to detect its loss of stability. All the theoretical analyses are illustrated by examples.}, author = {Kim Pham and Jean-Jacques Marigo and Corrado Maurini}, date-added = {2024-07-31 14:57:29 +0900}, - date-modified = {2024-07-31 14:57:29 +0900}, doi = {10.1016/j.jmps.2011.03.010}, issn = {0022-5096}, journal = {Journal of the Mechanics and Physics of Solids}, @@ -216,7 +207,6 @@ @article{pham:2011-the-issues number = {6}, pages = {1163-1190}, title = {The issues of the uniqueness and the stability of the homogeneous response in uniaxial tests with gradient damage models}, - url = {https://www.sciencedirect.com/science/article/pii/S002250961100055X}, volume = {59}, year = {2011}, bdsk-url-1 = {https://www.sciencedirect.com/science/article/pii/S002250961100055X}, @@ -268,7 +258,7 @@ @article{marigo:2023-la-mecanique @url{Habera:aa, author = {Michal Habera and Andreas Zilian}, date-added = {2024-03-04 16:19:39 +0100}, - date-modified = {2024-03-04 16:21:00 +0100}, + date-modified = {2024-03-05 00:21:00 +0900}, title = {dolfiny: Python wrappers for DOLFINx}, url = {https://github.com/michalhabera/dolfiny}, year = {2024}, @@ -311,7 +301,7 @@ @article{leon-baldelli:2021-numerical author = {Le{\'o}n Baldelli, Andr{\'e}s A. and Maurini, Corrado}, da = {2021/07/01/}, date-added = {2021-05-16 16:45:21 +0200}, - date-modified = {2024-07-29 23:10:50 +0000}, + date-modified = {2024-08-16 14:51:57 +0900}, doi = {10.1016/j.jmps.2021.104424}, isbn = {0022-5096}, journal = {Journal of the Mechanics and Physics of Solids}, @@ -319,7 +309,6 @@ @article{leon-baldelli:2021-numerical pages = {104424}, title = {Numerical bifurcation and stability analysis of variational gradient-damage models for phase-field fracture}, ty = {JOUR}, - url = {https://www.sciencedirect.com/science/article/pii/S0022509621001010}, volume = {152}, year = {2021}, bdsk-file-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhbxBcAC4ALgAvAC4ALgAvAC4ALgAvAC4ALgAvAC4ALgAvAEwAaQBiAHIAYQByAHkALwBNAG8AYgBpAGwAZQAgAEQAbwBjAHUAbQBlAG4AdABzAC8AYwBvAG0AfgBhAHAAcABsAGUAfgBDAGwAbwB1AGQARABvAGMAcwAvAFAAYQBwAGUAcgBzAC8AbABlAG8DAQBuACAAYgBhAGwAZABlAGwAbABpAC0AMgAwADIAMQAgADIALgBwAGQAZk8RAcwAAAAAAcwAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAOGUkbtCRAAB/////xhsZZduIGJhbGRlbGxpLTIwMjEgMi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////3xvc9wAAAAAAAAAAAAUABQAACiBjdQAAAAAAAAAAAAAAAAAGUGFwZXJzAAIAXy86VXNlcnM6a3VtaW9yaTM6TGlicmFyeTpNb2JpbGUgRG9jdW1lbnRzOmNvbX5hcHBsZX5DbG91ZERvY3M6UGFwZXJzOmxlb8yBbiBiYWxkZWxsaS0yMDIxIDIucGRmAAAOADQAGQBsAGUAbwMBAG4AIABiAGEAbABkAGUAbABsAGkALQAyADAAMgAxACAAMgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAXVVzZXJzL2t1bWlvcmkzL0xpYnJhcnkvTW9iaWxlIERvY3VtZW50cy9jb21+YXBwbGV+Q2xvdWREb2NzL1BhcGVycy9sZW/MgW4gYmFsZGVsbGktMjAyMSAyLnBkZgAAEwABLwAAFQACAA///wAAAAgADQAaACQA3wAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAKv}, @@ -352,15 +341,14 @@ @article{pinto-da-costa:2010-cone-constrained author = {Pinto da Costa, A. and Seeger, A.}, date = {2010/01/01}, date-added = {2023-02-07 16:15:01 +0100}, - date-modified = {2023-02-07 16:15:01 +0100}, + date-modified = {2024-08-16 14:54:37 +0900}, doi = {10.1007/s10589-008-9167-8}, id = {Pinto da Costa2010}, isbn = {1573-2894}, journal = {Computational Optimization and Applications}, number = {1}, pages = {25--57}, - title = {Cone-constrained eigenvalue problems: theory and algorithms}, - url = {https://doi.org/10.1007/s10589-008-9167-8}, + title = {{Cone-constrained eigenvalue problems: theory and algorithms}}, volume = {45}, year = {2010}, bdsk-file-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxBaLi4vLi4vLi4vLi4vLi4vTGlicmFyeS9Nb2JpbGUgRG9jdW1lbnRzL2NvbX5hcHBsZX5DbG91ZERvY3MvUGFwZXJzL3BpbnRvIGRhIGNvc3RhLTIwMTAucGRmTxEBwAAAAAABwAACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAA4ZSRu0JEAAH/////F3BpbnRvIGRhIGNvc3RhLTIwMTAucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////fKsmAAAAAAAAAAAAABQAFAAAKIGN1AAAAAAAAAAAAAAAAAAZQYXBlcnMAAgBcLzpVc2VyczprdW1pb3JpMzpMaWJyYXJ5Ok1vYmlsZSBEb2N1bWVudHM6Y29tfmFwcGxlfkNsb3VkRG9jczpQYXBlcnM6cGludG8gZGEgY29zdGEtMjAxMC5wZGYADgAwABcAcABpAG4AdABvACAAZABhACAAYwBvAHMAdABhAC0AMgAwADEAMAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAWlVzZXJzL2t1bWlvcmkzL0xpYnJhcnkvTW9iaWxlIERvY3VtZW50cy9jb21+YXBwbGV+Q2xvdWREb2NzL1BhcGVycy9waW50byBkYSBjb3N0YS0yMDEwLnBkZgATAAEvAAAVAAIAD///AAAACAANABoAJACBAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAkU=}, diff --git a/paper/paper.md b/paper/paper.md index 1df40b4b..a1a78b87 100644 --- a/paper/paper.md +++ b/paper/paper.md @@ -37,7 +37,7 @@ We study irreversible evolutionary processes with a general energetic notion of # Statement of need -Quasi-static evolution problems arising in fracture are strongly nonlinear [@marigo:2023-la-mecanique], [@bourdin:2008-the-variational]. They can admit multiple solutions, or none [@leon-baldelli:2021-numerical]. This demands both a functional theoretical framework and practical computational tools for real case scenarios. Due to the lack of uniqueness of solutions, it is fundamental to leverage the full variational structure of the problem and investigate up to second order, to detect nucleation of stable modes and transitions of unstable states. The stability of a multiscale system along its nontrivial evolutionary paths in phase space is a key property that is difficult to check: numerically, for real case scenarios with several length scales involved, and analytically, in the infinite-dimensional setting. ~~The current literature in computational fracture mechanics predominantly focuses on unilateral first-order criteria, systematically neglecting the exploration of higher-order information for critical points.~~ **Despite the concept of unilateral stability is classical in the variational theory of irreversible systems [@mielke] and the mechanics of fracture [@FRANCFORT] (see also [@bazant, @PETRYK, @Quoc, @Quoc2002]), few studies have explored second-order criteria for crack nucleation and evolution. Although sporadic, these studies are significant, including [@pham:2011-the-issues], [@Pham2013aa], [@SICSIC], [@leon-baldelli:2021-numerical], and [@camilla].** The current literature in computational fracture mechanics predominantly focuses on unilateral first-order criteria, systematically neglecting the exploration of higher-order information for critical points. **To the best of our knowledge, no general numerical tools are available to address second-order criteria in evolutionary nonlinear irreversible systems and fracture mechanics.** +Quasi-static evolution problems arising in fracture are strongly nonlinear [@marigo:2023-la-mecanique], [@bourdin:2008-the-variational]. They can admit multiple solutions, or none [@leon-baldelli:2021-numerical]. This demands both a functional theoretical framework and practical computational tools for real case scenarios. Due to the lack of uniqueness of solutions, it is fundamental to leverage the full variational structure of the problem and investigate up to second order, to detect nucleation of stable modes and transitions of unstable states. The stability of a multiscale system along its nontrivial evolutionary paths in phase space is a key property that is difficult to check: numerically, for real case scenarios with several length scales involved, and analytically, in the infinite-dimensional setting. ~~The current literature in computational fracture mechanics predominantly focuses on unilateral first-order criteria, systematically neglecting the exploration of higher-order information for critical points.~~ **Despite the concept of unilateral stability is classical in the variational theory of irreversible systems [@mielke] and the mechanics of fracture [@FRANCFORT] (see also [@bazant], [ @PETRYK], [@Quoc], [@Quoc2002]), few studies have explored second-order criteria for crack nucleation and evolution. Although sporadic, these studies are significant, including [@pham:2011-the-issues], [@Pham2013aa], [@SICSIC], [@leon-baldelli:2021-numerical], and [@camilla].** The current literature in computational fracture mechanics predominantly focuses on unilateral first-order criteria, systematically neglecting the exploration of higher-order information for critical points. **To the best of our knowledge, no general numerical tools are available to address second-order criteria in evolutionary nonlinear irreversible systems and fracture mechanics.** To fill this gap, our nonlinear solvers offer a flexible toolkit for advanced stability analysis of systems which evolve with constraints. @@ -155,7 +155,7 @@ where $A=0$ if ${bc}^2<\pi^2 {a}, C=0$ if ${bc}^2>\pi^2 {a}$. $A$ and $C$ are ar $$ \beta^*(x)=\left\{ \begin{aligned} -C>0,\qquad & \text{ if }\pi^2 {a}>{bc}^2 \\ +C,\qquad & \text{ if }\pi^2 {a}>{bc}^2 \\ C+A \cos (\pi x),\qquad & \text{ if } \pi^2 {a}={bc}^2 \text{, with }C>0 \text{ and }|A| \leq C\\ C\left(1+\cos (\pi \frac{x}{{D}})\right), \qquad & \text{ if } \pi^2 {a}<{bc}^2, \text { for } x \in(0, {D}) \end{aligned}\right. diff --git a/playground/benchmark-umut-at2/vs_analytics_at2.py b/playground/benchmark-umut-at2/vs_analytics_at2.py index 22820452..e984ea7c 100644 --- a/playground/benchmark-umut-at2/vs_analytics_at2.py +++ b/playground/benchmark-umut-at2/vs_analytics_at2.py @@ -17,22 +17,30 @@ import ufl import yaml from dolfinx.common import list_timings -from dolfinx.fem import (Constant, Function, assemble_scalar, dirichletbc, - form, locate_dofs_geometrical, set_bc) -from dolfinx.fem.petsc import assemble_vector, set_bc +from dolfinx.fem import ( + Constant, + Function, + assemble_scalar, + dirichletbc, + form, + locate_dofs_geometrical, +) from dolfinx.io import XDMFFile from irrevolutions.algorithms.am import HybridSolver from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver -from irrevolutions.solvers import SNESSolver -from irrevolutions.solvers.function import vec_to_functions -from irrevolutions.test.test_1d import _AlternateMinimisation1D as am1d -from irrevolutions.utils import (ColorPrint, ResultsStorage, Visualization, - _logger, _write_history_data, history_data, - norm_H1, norm_L2) -from irrevolutions.utils.plots import (plot_AMit_load, plot_energies, - plot_force_displacement) -from irrevolutions.utils.viz import (plot_mesh, plot_profile, plot_scalar, - plot_vector) +from irrevolutions.utils import ( + ColorPrint, + Visualization, + _logger, + _write_history_data, + history_data, +) +from irrevolutions.utils.plots import ( + plot_AMit_load, + plot_energies, + plot_force_displacement, +) +from irrevolutions.utils.viz import plot_profile, plot_scalar from mpi4py import MPI from petsc4py import PETSc from pyvista.utilities import xvfb @@ -48,6 +56,7 @@ def a(alpha): # k_res = parameters["model"]['k_res'] return (1 - alpha) ** 2 + def w(alpha): """ Return the homogeneous damage energy term, @@ -58,8 +67,10 @@ def w(alpha): return alpha**2 # return alpha -def elastic_energy_density(state, - u_zero: Optional[dolfinx.fem.function.Function] = None): + +def elastic_energy_density( + state, u_zero: Optional[dolfinx.fem.function.Function] = None +): """ Returns the elastic energy density of the state. """ @@ -70,10 +81,10 @@ def elastic_energy_density(state, _mu = parameters["model"]["E"] _kappa = parameters["model"].get("kappa", 1.0) - + # energy_density = _mu / 2.0 * ufl.inner(eps, eps) energy_density = _mu / 2.0 * a(alpha) * ufl.inner(eps, eps) - + if u_zero is None: u_zero = Constant(u.function_space.mesh, 0.0) @@ -81,6 +92,7 @@ def elastic_energy_density(state, return energy_density + substrate_density + def damage_energy_density(state): """ Return the damage energy density of the state. @@ -93,11 +105,13 @@ def damage_energy_density(state): grad_alpha = ufl.grad(alpha) # Compute the damage dissipation density - damage_density = _w1 * w(alpha) + \ - _w1 * _ell**2 / 2. * ufl.dot(grad_alpha, grad_alpha) + damage_density = _w1 * w(alpha) + _w1 * _ell**2 / 2.0 * ufl.dot( + grad_alpha, grad_alpha + ) return damage_density + def stress(state): """ Return the one-dimensional stress @@ -108,6 +122,7 @@ def stress(state): return parameters["model"]["E"] * a(alpha) * u.dx() * dx + def run_computation(parameters, storage=None): Lx = parameters["geometry"]["Lx"] _nameExp = parameters["geometry"]["geom_type"] @@ -121,7 +136,7 @@ def run_computation(parameters, storage=None): outdir = os.path.join(os.path.dirname(__file__), "output") if storage is None: - prefix = os.path.join(outdir, f"thin-film-at2") + prefix = os.path.join(outdir, "thin-film-at2") else: prefix = storage @@ -186,24 +201,25 @@ def run_computation(parameters, storage=None): alpha_lb.interpolate(lambda x: np.zeros_like(x[0])) alpha_ub.interpolate(lambda x: np.ones_like(x[0])) - eps_t = dolfinx.fem.Constant(mesh, np.array(1., dtype=PETSc.ScalarType)) - u_zero.interpolate(lambda x: eps_t/2. * (2*x[0] - Lx)) - + eps_t = dolfinx.fem.Constant(mesh, np.array(1.0, dtype=PETSc.ScalarType)) + u_zero.interpolate(lambda x: eps_t / 2.0 * (2 * x[0] - Lx)) + for f in [zero_u, u_zero, alpha_lb, alpha_ub]: f.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) - bcs_u = [dirichletbc(u_zero, dofs_u_right), - dirichletbc(u_zero, dofs_u_left)] + bcs_u = [dirichletbc(u_zero, dofs_u_right), dirichletbc(u_zero, dofs_u_left)] # bcs_u = [] bcs_alpha = [] - + bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} - - total_energy = (elastic_energy_density(state, u_zero) + damage_energy_density(state)) * dx - + + total_energy = ( + elastic_energy_density(state, u_zero) + damage_energy_density(state) + ) * dx + load_par = parameters["loading"] loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) @@ -227,12 +243,10 @@ def run_computation(parameters, storage=None): logging.basicConfig(level=logging.INFO) - for i_t, t in enumerate(loads): - eps_t.value = t - - u_zero.interpolate(lambda x: eps_t/2. * (2*x[0] - Lx)) + + u_zero.interpolate(lambda x: eps_t / 2.0 * (2 * x[0] - Lx)) u_zero.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) @@ -262,7 +276,7 @@ def run_computation(parameters, storage=None): stable = stability.solve(alpha_lb, eig0=z0, inertia=inertia) - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") @@ -270,7 +284,6 @@ def run_computation(parameters, storage=None): history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf" ) - xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -300,7 +313,7 @@ def run_computation(parameters, storage=None): plotter, lineproperties={ "c": "k", - "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}" + "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}", }, ) ax = _plt.gca() @@ -315,28 +328,20 @@ def run_computation(parameters, storage=None): plotter, fig=_plt, ax=ax, - lineproperties={ - "c": "r", - "label": "$u_0$" - }, + lineproperties={"c": "r", "label": "$u_0$"}, ) - _plt, data = plot_profile( u, points, plotter, fig=_plt, ax=ax, - lineproperties={ - "c": "g", - "label": "$u$" - }, + lineproperties={"c": "g", "label": "$u$"}, ) _plt.savefig(f"{prefix}/damage_profile-{i_t}.png") - fracture_energy = comm.allreduce( assemble_scalar(form(damage_energy_density(state) * dx)), op=MPI.SUM, @@ -358,7 +363,7 @@ def run_computation(parameters, storage=None): [elastic_energy, fracture_energy], ) history_data["F"].append(_F) - + with XDMFFile( comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 ) as file: @@ -370,12 +375,12 @@ def run_computation(parameters, storage=None): json.dump(history_data, a_file) a_file.close() - # df = pd.DataFrame(history_data) print(pd.DataFrame(history_data)) - + return history_data, stability.data, state + def load_parameters(file_path, ndofs, model="at2"): """ Load parameters from a YAML file. @@ -416,34 +421,36 @@ def load_parameters(file_path, ndofs, model="at2"): parameters["model"]["ell"] = 0.158114 parameters["model"]["k_res"] = 0.0 parameters["model"]["mu"] = 1 - parameters["model"]["kappa"] = (.34)**(-2) + parameters["model"]["kappa"] = (0.34) ** (-2) signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature + if __name__ == "__main__": # Set the logging level logging.basicConfig(level=logging.INFO) # Load parameters parameters, signature = load_parameters( - os.path.join(os.path.dirname(__file__), "parameters.yaml"), - ndofs=100, - model="at2") - + os.path.join(os.path.dirname(__file__), "parameters.yaml"), + ndofs=100, + model="at2", + ) + # Run computation _storage = f"output/thinfilm-1d/MPI-{MPI.COMM_WORLD.Get_size()}/{signature}" visualization = Visualization(_storage) - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, stability_data, state = run_computation(parameters, _storage) - + from irrevolutions.utils import table_timing_data + _timings = table_timing_data() visualization.save_table(_timings, "timing_data") list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) ColorPrint.print_bold(f"===================- {signature} -=================") ColorPrint.print_bold(f"===================- {_storage} -=================") - diff --git a/playground/benchmark-umut-at2/vs_analytics_at2_2d.py b/playground/benchmark-umut-at2/vs_analytics_at2_2d.py index b5438f0b..fcf8df1b 100644 --- a/playground/benchmark-umut-at2/vs_analytics_at2_2d.py +++ b/playground/benchmark-umut-at2/vs_analytics_at2_2d.py @@ -5,7 +5,6 @@ import os import sys from pathlib import Path -from typing import Optional import dolfinx import dolfinx.mesh @@ -17,24 +16,31 @@ import ufl import yaml from dolfinx.common import list_timings -from dolfinx.fem import (Constant, Function, assemble_scalar, dirichletbc, - form, locate_dofs_geometrical, set_bc) -from dolfinx.fem.petsc import assemble_vector, set_bc +from dolfinx.fem import ( + Constant, + Function, + assemble_scalar, + form, + locate_dofs_geometrical, +) from dolfinx.io import XDMFFile, gmshio from irrevolutions.algorithms.am import HybridSolver from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.meshes.primitives import mesh_bar_gmshapi -from irrevolutions.models import \ - BrittleMembraneOverElasticFoundation as ThinFilm -from irrevolutions.solvers.function import vec_to_functions -from irrevolutions.test.test_1d import _AlternateMinimisation1D as am1d -from irrevolutions.utils import (ColorPrint, ResultsStorage, Visualization, - _logger, _write_history_data, history_data, - norm_H1, norm_L2) -from irrevolutions.utils.plots import (plot_AMit_load, plot_energies, - plot_force_displacement) -from irrevolutions.utils.viz import (plot_mesh, plot_profile, plot_scalar, - plot_vector) +from irrevolutions.models import BrittleMembraneOverElasticFoundation as ThinFilm +from irrevolutions.utils import ( + ColorPrint, + Visualization, + _logger, + _write_history_data, + history_data, +) +from irrevolutions.utils.plots import ( + plot_AMit_load, + plot_energies, + plot_force_displacement, +) +from irrevolutions.utils.viz import plot_profile, plot_scalar, plot_vector from mpi4py import MPI from petsc4py import PETSc from pyvista.utilities import xvfb @@ -45,8 +51,8 @@ # Mesh on node model_rank and then distribute model_rank = 0 + class ThinFilmAT2(ThinFilm): - def w(self, alpha): """ Return the dissipated energy function as a function of the state @@ -55,6 +61,7 @@ def w(self, alpha): # Return w(alpha) function return alpha**2 + def stress(state): """ Return the one-dimensional stress @@ -65,13 +72,13 @@ def stress(state): return parameters["model"]["E"] * a(alpha) * u.dx() * dx -def run_computation(parameters, storage=None): +def run_computation(parameters, storage=None): Lx = parameters["geometry"]["Lx"] Ly = parameters["geometry"]["Ly"] geom_type = parameters["geometry"]["geom_type"] tdim = parameters["geometry"]["geometric_dimension"] - lc = parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] + lc = parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] _nameExp = parameters["geometry"]["geom_type"] @@ -79,7 +86,7 @@ def run_computation(parameters, storage=None): outdir = os.path.join(os.path.dirname(__file__), "output") if storage is None: - prefix = os.path.join(outdir, f"thin-film-at2-2d") + prefix = os.path.join(outdir, "thin-film-at2-2d") else: prefix = storage @@ -94,7 +101,7 @@ def run_computation(parameters, storage=None): with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) - + gmsh_model, tdim = mesh_bar_gmshapi(geom_type, Lx, Ly, lc, tdim) model_rank = 0 mesh, mts, fts = gmshio.model_to_mesh(gmsh_model, comm, model_rank, tdim) @@ -147,22 +154,22 @@ def run_computation(parameters, storage=None): alpha_ub.interpolate(lambda x: np.ones_like(x[0])) # eps_t = dolfinx.fem.Constant(mesh, np.array(1., dtype=PETSc.ScalarType)) - + for f in [zero_u, u_zero, alpha_lb, alpha_ub]: f.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) - # bcs_u = [dirichletbc(u_zero, dofs_u_right), + # bcs_u = [dirichletbc(u_zero, dofs_u_right), # dirichletbc(u_zero, dofs_u_left)] bcs_u = [] bcs_alpha = [] - + bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} - - tau = Constant(mesh, np.array(0., dtype=PETSc.ScalarType)) - eps_t = tau * ufl.as_tensor([[1., 0], [0, 0]]) + + tau = Constant(mesh, np.array(0.0, dtype=PETSc.ScalarType)) + eps_t = tau * ufl.as_tensor([[1.0, 0], [0, 0]]) model = ThinFilmAT2(parameters["model"], eps_0=eps_t) @@ -196,7 +203,7 @@ def run_computation(parameters, storage=None): for i_t, t in enumerate(loads): tau.value = t - + # update the lower bound alpha.vector.copy(alpha_lb.vector) alpha_lb.vector.ghostUpdate( @@ -224,7 +231,7 @@ def run_computation(parameters, storage=None): _logger.critical(f"-- Solving Stability (Stability) for t = {t:3.2f} --") stable = stability.solve(alpha_lb, eig0=z0, inertia=inertia) - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") @@ -232,7 +239,6 @@ def run_computation(parameters, storage=None): history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf" ) - xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -262,7 +268,7 @@ def run_computation(parameters, storage=None): plotter, lineproperties={ "c": "k", - "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}" + "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}", }, ) ax = _plt.gca() @@ -277,23 +283,16 @@ def run_computation(parameters, storage=None): plotter, fig=_plt, ax=ax, - lineproperties={ - "c": "r", - "label": "$u_0$" - }, + lineproperties={"c": "r", "label": "$u_0$"}, ) - _plt, data = plot_profile( u, points, plotter, fig=_plt, ax=ax, - lineproperties={ - "c": "g", - "label": "$u$" - }, + lineproperties={"c": "g", "label": "$u$"}, ) _plt.savefig(f"{prefix}/damage_profile-{i_t}.png") @@ -324,7 +323,7 @@ def run_computation(parameters, storage=None): [elastic_energy, fracture_energy], ) history_data["F"].append(stress) - + with XDMFFile( comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 ) as file: @@ -337,7 +336,7 @@ def run_computation(parameters, storage=None): a_file.close() xvfb.start_xvfb(wait=0.05) - + pyvista.OFF_SCREEN = True plotter = pyvista.Plotter( title="Thin Film", @@ -349,12 +348,13 @@ def run_computation(parameters, storage=None): _plt.screenshot(f"{prefix}/traction-state.png") _plt.close() - + # df = pd.DataFrame(history_data) print(pd.DataFrame(history_data)) - + return history_data, stability.data, state + def load_parameters(file_path, ndofs, model="at2"): """ Load parameters from a YAML file. @@ -385,46 +385,48 @@ def load_parameters(file_path, ndofs, model="at2"): parameters["geometry"]["mesh_size_factor"] = 3 parameters["geometry"]["Lx"] = 3 parameters["geometry"]["Ly"] = 5e-2 - + parameters["stability"]["cone"]["cone_max_it"] = 400000 parameters["stability"]["cone"]["cone_atol"] = 1e-6 parameters["stability"]["cone"]["cone_rtol"] = 1e-6 parameters["stability"]["cone"]["scaling"] = 1e-2 parameters["model"]["w1"] = 1 - parameters["model"]["ell"] = (0.158114)**2 / 2 + parameters["model"]["ell"] = (0.158114) ** 2 / 2 # parameters["model"]["ell"] = .1 parameters["model"]["k_res"] = 0.0 parameters["model"]["E"] = 1 - parameters["model"]["ell_e"] = .34 + parameters["model"]["ell_e"] = 0.34 signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature + if __name__ == "__main__": # Set the logging level logging.basicConfig(level=logging.INFO) # Load parameters parameters, signature = load_parameters( - os.path.join(os.path.dirname(__file__), "parameters.yaml"), - ndofs=100, - model="at2") - + os.path.join(os.path.dirname(__file__), "parameters.yaml"), + ndofs=100, + model="at2", + ) + # Run computation _storage = f"output/thinfilm-bar/MPI-{MPI.COMM_WORLD.Get_size()}/{signature}" visualization = Visualization(_storage) ColorPrint.print_bold(f"===================- {_storage} -=================") - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, stability_data, state = run_computation(parameters, _storage) - + from irrevolutions.utils import table_timing_data + _timings = table_timing_data() visualization.save_table(_timings, "timing_data") list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) ColorPrint.print_bold(f"===================- {signature} -=================") ColorPrint.print_bold(f"===================- {_storage} -=================") - diff --git a/playground/nb/postprocess.py b/playground/nb/postprocess.py index ed0cc788..a520b889 100644 --- a/playground/nb/postprocess.py +++ b/playground/nb/postprocess.py @@ -1,59 +1,51 @@ -from matplotlib.ticker import FormatStrFormatter -from matplotlib.ticker import ScalarFormatter -import matplotlib.pyplot as plt -import matplotlib.patches as patches -import numpy as np -import sympy as sp -import sys -import os -import sympy -import shutil -import math +import hashlib + # import xmltodict # import pickle import json +import os +import os.path + +import matplotlib.patches as patches +import matplotlib.pyplot as plt +import numpy as np + # import pandas -import pylab -from os import listdir import pandas as pd -import visuals -import hashlib import yaml +from matplotlib.ticker import FormatStrFormatter -import os.path - -print('postproc') +print("postproc") -elastic = 'C0' -homogen = 'C1' -localis = 'C2' -unstabl = 'C3' +elastic = "C0" +homogen = "C1" +localis = "C2" +unstabl = "C3" def load_data(rootdir): - # with open(rootdir + '/parameters.pkl', 'r') as f: # params = json.load(f) - with open(rootdir + '/parameters.yaml') as f: + with open(rootdir + "/parameters.yaml") as f: params = yaml.load(f, Loader=yaml.FullLoader) try: - with open(rootdir + '/time_data.json', 'r') as f: + with open(rootdir + "/time_data.json", "r") as f: data = json.load(f) - dataf = pd.DataFrame(data).sort_values('load') + dataf = pd.DataFrame(data).sort_values("load") # Continue with your code using the dataf DataFrame except FileNotFoundError: print("File 'time_data.json' not found. Handle this case accordingly.") dataf = pd.DataFrame() - if os.path.isfile(rootdir + '/signature.md5'): + if os.path.isfile(rootdir + "/signature.md5"): # print('sig file found') - with open(rootdir + '/signature.md5', 'r') as f: + with open(rootdir + "/signature.md5", "r") as f: signature = f.read() else: - print('no sig file found') - signature = hashlib.md5(str(params).encode('utf-8')).hexdigest() + print("no sig file found") + signature = hashlib.md5(str(params).encode("utf-8")).hexdigest() return params, dataf, signature @@ -61,10 +53,10 @@ def load_data(rootdir): def t_stab(ell, q=2): # coeff = 2.*np.pi*q/(q+1)**(3./2.)*np.sqrt(2) coeff_bif = 2 * np.pi * np.sqrt(1 / 6) - coeff = coeff_bif / ((q + 1) / (2. * q)) + coeff = coeff_bif / ((q + 1) / (2.0 * q)) if 1 / ell > coeff: # print(1/ell, coeff) - return 1. + return 1.0 else: return coeff * ell @@ -74,111 +66,97 @@ def t_bif(ell, q=2): coeff = 2 * np.pi * np.sqrt(1 / 6) if 1 / ell > coeff: # print(1/ell, coeff) - return 1. + return 1.0 else: return coeff * ell / 1 def plot_loadticks(ax, tc, ell): - if t_stab(ell) - tc < .1: + if t_stab(ell) - tc < 0.1: # label = '$t_c=t_b=t_s$' ax.set_xticks([0, tc]) - ax.set_xticklabels(['0', '$t_c$=$t_b$=$t_s$']) + ax.set_xticklabels(["0", "$t_c$=$t_b$=$t_s$"]) else: ax.set_xticks([0, tc, t_bif(ell), t_stab(ell)]) - ax.set_xticklabels(['0', '$t_c$', '$t_b$', '$t_s$']) + ax.set_xticklabels(["0", "$t_c$", "$t_b$", "$t_s$"]) return ax def plot_fills(ax, ell, tc): ax.add_patch( - patches.Rectangle( - (0, - 0), - 1, - 10, - facecolor=elastic, - fill=True, - alpha=.3)) + patches.Rectangle((0, 0), 1, 10, facecolor=elastic, fill=True, alpha=0.3) + ) ax.add_patch( patches.Rectangle( - (tc, - 0), - t_bif(ell) - 1, - 10, - facecolor=homogen, - fill=True, - alpha=.3)) + (tc, 0), t_bif(ell) - 1, 10, facecolor=homogen, fill=True, alpha=0.3 + ) + ) ax.add_patch( patches.Rectangle( - (t_bif(ell), - 0), - t_stab(ell) - - t_bif(ell), + (t_bif(ell), 0), + t_stab(ell) - t_bif(ell), 10, - facecolor='w', + facecolor="w", fill=True, - alpha=.3)) + alpha=0.3, + ) + ) ax.add_patch( patches.Rectangle( - (t_stab(ell), - 0), - 10, - 10, - facecolor=localis, - fill=True, - alpha=.3)) + (t_stab(ell), 0), 10, 10, facecolor=localis, fill=True, alpha=0.3 + ) + ) return ax def plot_spectrum(params, data, tc, ax=None, tol=1e-12): - E0 = params['model']['E'] - w1 = params['model']['sigma_D0']**2 / E0 - ell = params['model']['ell'] + E0 = params["model"]["E"] + w1 = params["model"]["sigma_D0"] ** 2 / E0 + ell = params["model"]["ell"] fig = plt.figure() - for i, d in enumerate(data['eigs']): + for i, d in enumerate(data["eigs"]): if d is not (None and np.inf and np.nan): lend = len(d) if isinstance(d, list) else 1 - plt.scatter([(data['load'].values)[i]] * lend, d, - c=np.where(np.array(d) < tol, 'red', 'C2')) + plt.scatter( + [(data["load"].values)[i]] * lend, + d, + c=np.where(np.array(d) < tol, "red", "C2"), + ) # c=np.where(np.array(d) tol)[0]].values + loads = df["load"][np.where(np.array(mineig) < 0)[0]].values + plt.plot(loads, [1 / ell] * len(loads), c="k", marker="X") + loads = df["load"][np.where(np.array(mineig) > tol)[0]].values # plt.plot(loads, [1/ell]*len(loads), c='C2', marker='.') - elasticloads = np.where(df['load'] <= 1)[0] + elasticloads = np.where(df["load"] <= 1)[0] # plt.plot(loads, [1/ell]*len(loads), c='C0', marker='.') - plt.plot(loads[elasticloads[-1]::], [1 / ell] * - len(loads[elasticloads[-1]::]), c=homogen, marker='.') - plt.plot(loads[elasticloads], [1 / ell] * - len(loads[elasticloads]), c=elastic, marker='.') + plt.plot( + loads[elasticloads[-1] : :], + [1 / ell] * len(loads[elasticloads[-1] : :]), + c=homogen, + marker=".", + ) + plt.plot( + loads[elasticloads], + [1 / ell] * len(loads[elasticloads]), + c=elastic, + marker=".", + ) if debug: - print('1/ell, mineog', 1 / ell, mineig) - print('nonunique loads') + print("1/ell, mineog", 1 / ell, mineig) + print("nonunique loads") print(1 / ell, np.where(np.array(mineig) < tol)[0]) - print('unstable') + print("unstable") print(1 / ell, np.where(np.array(mineig) < 0)[0]) # plt.plot((20, 20), (20, 20), ls='-', c='C0', marker='+', label='$\\lambda_0<{}$'.format(tol)) - plt.plot((20, 20), (20, 20), ls='', c='k', - marker='X', label='incr. unstable') - plt.plot((20, 20), (20, 20), ls='', c=elastic, marker='.', label='elastic') - plt.plot((20, 20), (20, 20), ls='', c=homogen, - marker='.', label='incr. \\& state stable ') + plt.plot((20, 20), (20, 20), ls="", c="k", marker="X", label="incr. unstable") + plt.plot((20, 20), (20, 20), ls="", c=elastic, marker=".", label="elastic") + plt.plot( + (20, 20), + (20, 20), + ls="", + c=homogen, + marker=".", + label="incr. \\& state stable ", + ) q = 2 - coeff_sta = 2. * np.pi * q / (q + 1)**(3. / 2.) * np.sqrt(2) - coeff_bif = coeff_sta * (q + 1) / (2. * q) - loads = np.linspace(1., 10., 100) + coeff_sta = 2.0 * np.pi * q / (q + 1) ** (3.0 / 2.0) * np.sqrt(2) + coeff_bif = coeff_sta * (q + 1) / (2.0 * q) + loads = np.linspace(1.0, 10.0, 100) ax = plt.gca() # ax.plot(loads, [2.*2.*np.pi*q/(q+1)**(3./2.)*np.sqrt(2)/i for i in # loads], lw=3, c='k's) - ax.plot(loads, [coeff_sta / i for i in loads], - '-', c='k', label='$$t_s(L/\\ell)$$') - ax.plot(loads, [coeff_bif / i for i in loads], - '-.', c='k', label='$$t_b(L/\\ell)$$') + ax.plot(loads, [coeff_sta / i for i in loads], "-", c="k", label="$$t_s(L/\\ell)$$") + ax.plot( + loads, [coeff_bif / i for i in loads], "-.", c="k", label="$$t_b(L/\\ell)$$" + ) # plt.axvline(1.0, c='k', lw=1) - ax.fill_betweenx([coeff_sta / i for i in loads], loads, - 20., alpha=.3, facecolor=localis) + ax.fill_betweenx( + [coeff_sta / i for i in loads], loads, 20.0, alpha=0.3, facecolor=localis + ) # ax.fill_betweenx([coeff_bif/i for i in loads], 0, loads, alpha=.3, facecolor='C1') - ax.fill_betweenx([coeff_bif / i for i in loads], 1, - loads, alpha=.3, facecolor=homogen) + ax.fill_betweenx( + [coeff_bif / i for i in loads], 1, loads, alpha=0.3, facecolor=homogen + ) # ax.add_patch(patches.Rectangle((0, coeff_bif), 1, 10, facecolor = 'C0',fill=True, alpha=.3)) ax.add_patch( - patches.Rectangle( - (0, - 0), - 1, - 10, - facecolor=elastic, - fill=True, - alpha=.3)) + patches.Rectangle((0, 0), 1, 10, facecolor=elastic, fill=True, alpha=0.3) + ) ax.add_patch( patches.Rectangle( - (1, - coeff_sta), - 10, - 10, - facecolor=localis, - fill=True, - alpha=.3)) + (1, coeff_sta), 10, 10, facecolor=localis, fill=True, alpha=0.3 + ) + ) x1, y1 = [1, 1], [coeff_bif, 20] - plt.plot(x1, y1, lw=2, c='k') + plt.plot(x1, y1, lw=2, c="k") - plt.legend(loc='upper right') - plt.xlabel('$t$') - plt.ylabel('$$L/\\ell$$') - plt.ylim(0., 1.5 * coeff_sta) - plt.xlim(0., max(loads)) + plt.legend(loc="upper right") + plt.xlabel("$t$") + plt.ylabel("$$L/\\ell$$") + plt.ylim(0.0, 1.5 * coeff_sta) + plt.xlim(0.0, max(loads)) - ax.set_yticks([0, 1, 1 / .5, 1 / .25, coeff_sta, coeff_bif]) - ax.set_yticklabels(['0', '1', '2', '4', '$$\\ell_s$$', '$$\\ell_b$$']) + ax.set_yticks([0, 1, 1 / 0.5, 1 / 0.25, coeff_sta, coeff_bif]) + ax.set_yticklabels(["0", "1", "2", "4", "$$\\ell_s$$", "$$\\ell_b$$"]) # plt.loglog() plt.ylim(0.5, 3 * coeff_sta) @@ -414,54 +406,58 @@ def plot_stability(prefix, tol=1e-5): def load_cont(prefix): - with open(prefix + '/continuation_data.json', 'r') as f: + with open(prefix + "/continuation_data.json", "r") as f: data = json.load(f) - dataf = pd.DataFrame(data).sort_values('iteration') + dataf = pd.DataFrame(data).sort_values("iteration") return dataf def format_params(params): - return '$$\\ell = {:.2f}, \\nu = {:.1f}, \\sigma_c = {:.1f}, ' \ - 'E = {:.1f}$$'.format(params['material']['ell'], params['material']['nu'], - params['material']['sigma_D0'], params['material']['E']) + return ( + "$$\\ell = {:.2f}, \\nu = {:.1f}, \\sigma_c = {:.1f}, " "E = {:.1f}$$".format( + params["material"]["ell"], + params["material"]["nu"], + params["material"]["sigma_D0"], + params["material"]["E"], + ) + ) def _plot_spectrum(data): - def _stab_cnd(data): return [ - 0 if data["cone-stable"][i] == True else 1 for i in range(len(data))] + def _stab_cnd(data): + return [0 if data["cone-stable"][i] == True else 1 for i in range(len(data))] + # _uniq_cnd = [.3 if d[0]>0 else .7 for d in data['eigs']] """docstring for plotSpaceVsCone""" figure, axis = plt.subplots(1, 1) - _lambda_0 = [ - np.nan if isinstance( - a, list) else a for a in data["cone-eig"]] + _lambda_0 = [np.nan if isinstance(a, list) else a for a in data["cone-eig"]] # __lambda_0 = [e[0] for e in data['eigs']] - __lambda_0 = [e[0] if len(e) > 0 else np.nan for e in data['eigs']] + __lambda_0 = [e[0] if len(e) > 0 else np.nan for e in data["eigs"]] scale = __lambda_0[0] _colormap = _stab_cnd(data) axis.scatter( data.load, - np.array(_lambda_0) / - scale, + np.array(_lambda_0) / scale, c=_colormap, - cmap='RdYlGn_r', - alpha=.8, + cmap="RdYlGn_r", + alpha=0.8, s=200, - label='cone') + label="cone", + ) # axis.scatter(data.load, np.array(__lambda_0)/scale, c=_uniq_cnd, cmap = 'seismic', alpha=.8, label='space') axis.scatter( data.load, - np.array(__lambda_0) / - scale, - cmap='seismic', - alpha=.8, - label='space') - axis.axhline(0., c='k') - axis.set_xlabel('load') + np.array(__lambda_0) / scale, + cmap="seismic", + alpha=0.8, + label="space", + ) + axis.axhline(0.0, c="k") + axis.set_xlabel("load") axis.set_yticks([0, 1, -3]) - axis.set_ylabel('$min \\lambda / \\Lambda_0$') - axis.set_title('Min eig in cone vs. space') + axis.set_ylabel("$min \\lambda / \\Lambda_0$") + axis.set_title("Min eig in cone vs. space") axis.legend() return figure, axis @@ -482,29 +478,28 @@ def read_mode_data_from_npz(npz_file, time_step, num_points=-1, num_modes=1): """ mode_data = {} mode_data["mesh"] = npz_file["mesh"] - if 'time_steps' not in npz_file or time_step not in npz_file['time_steps']: + if "time_steps" not in npz_file or time_step not in npz_file["time_steps"]: print(f"No data available for timestep {time_step}.") return None - index = np.where(npz_file['time_steps'] == time_step)[0][0] + index = np.where(npz_file["time_steps"] == time_step)[0][0] for mode in range(1, num_modes + 1): - mode_key = f'mode_{mode}' + mode_key = f"mode_{mode}" - if mode_key not in npz_file['point_values'].item(): - print( - f"No data available for mode {mode} at timestep {time_step}.") + if mode_key not in npz_file["point_values"].item(): + print(f"No data available for mode {mode} at timestep {time_step}.") continue - fields = npz_file['point_values'].item()[mode_key] - if 'bifurcation_β' not in fields or 'stability_β' not in fields: + fields = npz_file["point_values"].item()[mode_key] + if "bifurcation_β" not in fields or "stability_β" not in fields: print(f"Incomplete data for mode {mode} at timestep {time_step}.") continue - field_β_bif_values = np.array(fields['bifurcation_β'][index]) - field_v_bif_values = np.array(fields['bifurcation_v'][index]) - field_β_stab_values = np.array(fields['stability_β'][index]) - field_v_stab_values = np.array(fields['stability_v'][index]) + field_β_bif_values = np.array(fields["bifurcation_β"][index]) + field_v_bif_values = np.array(fields["bifurcation_v"][index]) + field_β_stab_values = np.array(fields["stability_β"][index]) + field_v_stab_values = np.array(fields["stability_v"][index]) # Assuming x_values is known or can be obtained if num_points == -1: @@ -513,10 +508,10 @@ def read_mode_data_from_npz(npz_file, time_step, num_points=-1, num_modes=1): x_values = np.linspace(0, 1, num_points) mode_data["fields"] = { - 'bifurcation_β': field_β_bif_values, - 'bifurcation_v': field_v_bif_values, - 'stability_β': field_β_stab_values, - 'stability_v': field_v_stab_values, + "bifurcation_β": field_β_bif_values, + "bifurcation_v": field_v_bif_values, + "stability_β": field_β_stab_values, + "stability_v": field_v_stab_values, } mode_data["time_step"] = time_step @@ -529,83 +524,68 @@ def read_mode_data_from_npz(npz_file, time_step, num_points=-1, num_modes=1): def plot_fields_for_time_step(mode_shapes_data): x_values = mode_shapes_data["mesh"] fields = mode_shapes_data["fields"] - if 'bifurcation_β' in fields and 'stability_β' in fields: - bifurcation_values = np.array(fields['bifurcation_β']) - bifurcation_values_v = np.array(fields['bifurcation_v']) - stability_values = np.array(fields['stability_β']) - stability_values_v = np.array(fields['stability_v']) + if "bifurcation_β" in fields and "stability_β" in fields: + bifurcation_values = np.array(fields["bifurcation_β"]) + bifurcation_values_v = np.array(fields["bifurcation_v"]) + stability_values = np.array(fields["stability_β"]) + stability_values_v = np.array(fields["stability_v"]) fig, axes = plt.subplots(1, 2, figsize=(10, 5)) + axes[0].plot(x_values, bifurcation_values, label="numerical value", marker="o") axes[0].plot( - x_values, - bifurcation_values, - label='numerical value', - marker='o') - axes[0].plot( - x_values, - bifurcation_values_v, - label='numerical value', - marker='o') - axes[0].set_title(f'Bifurcation') - - axes[1].plot( - x_values, - stability_values, - label='numerical value', - marker='o') - axes[1].plot( - x_values, - stability_values_v, - label='numerical value', - marker='o') - axes[1].set_title(f'Stability') + x_values, bifurcation_values_v, label="numerical value", marker="o" + ) + axes[0].set_title("Bifurcation") + + axes[1].plot(x_values, stability_values, label="numerical value", marker="o") + axes[1].plot(x_values, stability_values_v, label="numerical value", marker="o") + axes[1].set_title("Stability") for axis in axes: - axis.axhline(0., c='k') + axis.axhline(0.0, c="k") return fig, axes def plot_operator_spectrum(data, parameters): figure, axis = plt.subplots(1, 1) - scale = data['eigs_ball'].values[0][0] + scale = data["eigs_ball"].values[0][0] tol = parameters["stability"]["cone"]["cone_rtol"] - colour = np.where(data['eigs_cone'] > tol, 'green', 'red') + colour = np.where(data["eigs_cone"] > tol, "green", "red") # Concatenate data for all load steps load_steps_all = np.concatenate( [ - np.full_like( - eigenvalues, - load_step) for load_step, - eigenvalues in zip( - data['load'], - data['eigs_ball'])]) - eigenvalues_all = np.concatenate(data['eigs_ball']) + np.full_like(eigenvalues, load_step) + for load_step, eigenvalues in zip(data["load"], data["eigs_ball"]) + ] + ) + eigenvalues_all = np.concatenate(data["eigs_ball"]) # Create a scatter plot with vertical alignment axis.scatter( load_steps_all, eigenvalues_all / scale, - marker='o', - c='C0', - label='Eigenvalues in vector space') + marker="o", + c="C0", + label="Eigenvalues in vector space", + ) axis.scatter( data.load, - data['eigs_cone'], - marker='d', + data["eigs_cone"], + marker="d", c=colour, s=60, - label='Eigenvalues in cone') + label="Eigenvalues in cone", + ) - axis.set_xlabel(r'Load $t$') - axis.set_ylabel('Eigenvalues') - axis.set_title( - 'Spectrum of Nonlinear Operator $H_{\\ell}:=E_\\ell\'\'(y_t)$') + axis.set_xlabel(r"Load $t$") + axis.set_ylabel("Eigenvalues") + axis.set_title("Spectrum of Nonlinear Operator $H_{\\ell}:=E_\\ell''(y_t)$") axis.set_yticks([0, 1, -3]) # axis.axhline(0., c='k') - axis.axhline(tol, c='k') - axis.set_ylim(-.5, 1.5) + axis.axhline(tol, c="k") + axis.set_ylim(-0.5, 1.5) axis.grid(True) axis.legend() diff --git a/playground/nb/visuals.py b/playground/nb/visuals.py index 9e949107..336f511d 100644 --- a/playground/nb/visuals.py +++ b/playground/nb/visuals.py @@ -1,128 +1,189 @@ - # coding: utf-8 # In[ ]: -def matplotlibdefaults(palette='medium',useTex=False): - from matplotlib import rcParams, cycler - lightgrey = '#CBCBCB' - grey = '#8C8C8C' - darkgrey = '#4D4D4D' - if palette == 'pastel': - cs = ['#fbb4ae', '#b3cde3', '#ccebc5', '#decbe4', '#fed9a6', '#ffffcc', '#e5d8bd', '#fddaec'] - elif palette == 'light': - cs = ['#8abde6', 'fbb258', '90cd97', 'f6aac9' , 'bfa554' , 'bc99c7' , 'eddd46' , 'f07e6e'] - elif palette == 'medium': - cs = ['#5da5da', 'faa43a' , '60bd68' , 'f17cb0' , 'b2912f' , 'b276b2' , 'decf3f' , 'f15854'] - elif palette == 'dark': - cs = ['#265dab', '#df5dab', '#059748' , '#e5120b' , '#9d722a' , '#7b3a96' , '#c7b42e' , '#cb201e'] + +def matplotlibdefaults(palette="medium", useTex=False): + from matplotlib import cycler, rcParams + + lightgrey = "#CBCBCB" + grey = "#8C8C8C" + darkgrey = "#4D4D4D" + if palette == "pastel": + cs = [ + "#fbb4ae", + "#b3cde3", + "#ccebc5", + "#decbe4", + "#fed9a6", + "#ffffcc", + "#e5d8bd", + "#fddaec", + ] + elif palette == "light": + cs = [ + "#8abde6", + "fbb258", + "90cd97", + "f6aac9", + "bfa554", + "bc99c7", + "eddd46", + "f07e6e", + ] + elif palette == "medium": + cs = [ + "#5da5da", + "faa43a", + "60bd68", + "f17cb0", + "b2912f", + "b276b2", + "decf3f", + "f15854", + ] + elif palette == "dark": + cs = [ + "#265dab", + "#df5dab", + "#059748", + "#e5120b", + "#9d722a", + "#7b3a96", + "#c7b42e", + "#cb201e", + ] else: - print('Unknown palette: Using medium') - cs = ['#5da5da', 'faa43a' , '60bd68' , 'f17cb0' , 'b2912f' , 'b276b2' , 'decf3f' , 'f15854'] - - rcParams['axes.labelsize'] = 18 - rcParams['axes.facecolor'] = 'none' # axes background color - rcParams['axes.edgecolor'] = grey # axes edge color - rcParams['axes.labelcolor'] = darkgrey - rcParams['axes.prop_cycle'] = cycler(color=cs) - - rcParams['xtick.labelsize'] = 18 - rcParams['ytick.labelsize'] = 18 - rcParams['xtick.color'] = grey - rcParams['ytick.color'] = grey - rcParams['xtick.direction'] = 'out' - rcParams['ytick.direction'] = 'out' - rcParams['xtick.major.width'] = 2 - rcParams['ytick.major.width'] = 2 - rcParams['xtick.major.size'] = 8 - rcParams['ytick.major.size'] = 8 - - rcParams['legend.fontsize'] = 12 - rcParams['font.family'] = 'serif' - rcParams['text.usetex'] = useTex + print("Unknown palette: Using medium") + cs = [ + "#5da5da", + "faa43a", + "60bd68", + "f17cb0", + "b2912f", + "b276b2", + "decf3f", + "f15854", + ] + + rcParams["axes.labelsize"] = 18 + rcParams["axes.facecolor"] = "none" # axes background color + rcParams["axes.edgecolor"] = grey # axes edge color + rcParams["axes.labelcolor"] = darkgrey + rcParams["axes.prop_cycle"] = cycler(color=cs) + + rcParams["xtick.labelsize"] = 18 + rcParams["ytick.labelsize"] = 18 + rcParams["xtick.color"] = grey + rcParams["ytick.color"] = grey + rcParams["xtick.direction"] = "out" + rcParams["ytick.direction"] = "out" + rcParams["xtick.major.width"] = 2 + rcParams["ytick.major.width"] = 2 + rcParams["xtick.major.size"] = 8 + rcParams["ytick.major.size"] = 8 + + rcParams["legend.fontsize"] = 12 + rcParams["font.family"] = "serif" + rcParams["text.usetex"] = useTex if useTex: - rcParams['font.serif'] = 'Computer Modern Roman' + rcParams["font.serif"] = "Computer Modern Roman" else: - rcParams['font.serif'] = 'Times' + rcParams["font.serif"] = "Times" + + rcParams["lines.linewidth"] = 2.0 + rcParams["lines.markersize"] = 8 + rcParams["lines.markeredgewidth"] = 0 + rcParams["lines.solid_joinstyle"] = "round" - rcParams['lines.linewidth'] = 2.0 - rcParams['lines.markersize'] = 8 - rcParams['lines.markeredgewidth'] = 0 - rcParams['lines.solid_joinstyle'] = 'round' + rcParams["figure.facecolor"] = "#FFFFFF" # figure facecolor; 0.75 is scalar gray - rcParams['figure.facecolor'] = '#FFFFFF' # figure facecolor; 0.75 is scalar gray - - rcParams['axes.linewidth'] = 2.0 - rcParams['axes.titlesize'] = 12 - rcParams['text.color'] = darkgrey + rcParams["axes.linewidth"] = 2.0 + rcParams["axes.titlesize"] = 12 + rcParams["text.color"] = darkgrey - rcParams['grid.color'] = lightgrey - rcParams['grid.linestyle'] = '-' - rcParams['grid.linewidth'] = 0.25 # in points - rcParams['grid.alpha'] = .5 # transparency, between 0.0 and 1.0 + rcParams["grid.color"] = lightgrey + rcParams["grid.linestyle"] = "-" + rcParams["grid.linewidth"] = 0.25 # in points + rcParams["grid.alpha"] = 0.5 # transparency, between 0.0 and 1.0 - rcParams['legend.frameon'] = False - rcParams['legend.labelspacing'] = 0.25 + rcParams["legend.frameon"] = False + rcParams["legend.labelspacing"] = 0.25 def setspines(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): - j.spines['top'].set_color('none') - j.spines['right'].set_color('none') - j.tick_params(axis='both',top='off',right='off',which='both',colors='#8C8C8C') - #j.spines['left'].set_position(('outward',10)) - #j.spines['bottom'].set_position(('outward',10)) - #j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + j.spines["top"].set_color("none") + j.spines["right"].set_color("none") + j.tick_params( + axis="both", top="off", right="off", which="both", colors="#8C8C8C" + ) + # j.spines['left'].set_position(('outward',10)) + # j.spines['bottom'].set_position(('outward',10)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 + + matplotlibdefaults(useTex=True) + def setspines4(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): # j.spines['top'].set_color('none') # j.spines['right'].set_color('none') - j.tick_params(axis='both',top='on',right='on',which='both',colors='#8C8C8C') - #j.spines['left'].set_position(('outward',10)) - #j.spines['bottom'].set_position(('outward',10)) - #j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + j.tick_params( + axis="both", top="on", right="on", which="both", colors="#8C8C8C" + ) + # j.spines['left'].set_position(('outward',10)) + # j.spines['bottom'].set_position(('outward',10)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 -# setspines() -import json +# setspines() def setspines2(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): - j.spines['top'].set_color('none') + j.spines["top"].set_color("none") # j.spines['right'].set_color('none') - j.tick_params(axis='both',top='off',right='on',which='both',colors='#8C8C8C') + j.tick_params( + axis="both", top="off", right="on", which="both", colors="#8C8C8C" + ) # j.spines['left'].set_position(('outward',10)) # j.spines['bottom'].set_position(('outward',10)) # j.spines['right'].set_position(('outward',10)) # j.spines['top'].set_position(('outward',10)) -# j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 + def setspines0(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): - j.spines['top'].set_color('none') -# j.spines['bottom'].set_color('none') - j.spines['right'].set_color('none') - j.spines['left'].set_color('none') - j.tick_params(axis='both',top='off',right='off',which='both',colors='#8C8C8C') - #j.spines['left'].set_position(('outward',10)) - #j.spines['bottom'].set_position(('outward',10)) - #j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + j.spines["top"].set_color("none") + # j.spines['bottom'].set_color('none') + j.spines["right"].set_color("none") + j.spines["left"].set_color("none") + j.tick_params( + axis="both", top="off", right="off", which="both", colors="#8C8C8C" + ) + # j.spines['left'].set_position(('outward',10)) + # j.spines['bottom'].set_position(('outward',10)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 diff --git a/playground/pizza-notch/pizza-notch.py b/playground/pizza-notch/pizza-notch.py index 79834bad..2e1da815 100644 --- a/playground/pizza-notch/pizza-notch.py +++ b/playground/pizza-notch/pizza-notch.py @@ -1,53 +1,47 @@ #!/usr/bin/env python3 -import pandas as pd -import numpy as np -import yaml +import hashlib import json -from pathlib import Path -import sys +import logging import os -import hashlib +import sys +from pathlib import Path -from dolfinx.fem import dirichletbc +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import pyvista +import ufl +import yaml from dolfinx.fem import ( Function, FunctionSpace, assemble_scalar, dirichletbc, form, + locate_dofs_topological, set_bc, ) -from mpi4py import MPI -import petsc4py -from petsc4py import PETSc -import dolfinx -import dolfinx.plot -import ufl - -from dolfinx.fem.petsc import set_bc from dolfinx.io import XDMFFile, gmshio -import logging - -import pyvista -from pyvista.utilities import xvfb from dolfinx.mesh import locate_entities_boundary -from dolfinx.fem import locate_dofs_topological - -from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.algorithms.am import HybridSolver -from irrevolutions.models import DamageElasticityModel as Brittle +from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.meshes.pacman import mesh_pacman +from irrevolutions.models import DamageElasticityModel as Brittle from irrevolutions.utils import ( ColorPrint, + _logger, _write_history_data, history_data, set_vector_to_constant, ) -from irrevolutions.utils import _logger from irrevolutions.utils.lib import _local_notch_asymptotic -from irrevolutions.utils.viz import plot_mesh from irrevolutions.utils.viz import plot_mesh, plot_scalar, plot_vector +from mpi4py import MPI +from petsc4py import PETSc +from pyvista.utilities import xvfb description = """We solve here a basic 2d of a notched specimen. Imagine a dinner a pizza which is missing a slice, and lots of hungry friends @@ -221,7 +215,6 @@ def run_computation(parameters, storage): _logger.setLevel(level=logging.CRITICAL) for i_t, t in enumerate(loads): - uD.interpolate( lambda x: _local_notch_asymptotic( x, ω=np.deg2rad(_omega / 2.0), t=t, par=parameters["material"] @@ -244,8 +237,7 @@ def run_computation(parameters, storage): stable = stability.solve(alpha_lb, eig0=bifurcation._spectrum, inertia=inertia) - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: - + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: fracture_energy = comm.allreduce( assemble_scalar(form(model.damage_energy_density(state) * dx)), op=MPI.SUM, @@ -299,6 +291,7 @@ def run_computation(parameters, storage): return history_data, stability.data, state + def load_parameters(file_path, ndofs, model="at1"): """ Load parameters from a YAML file. @@ -351,6 +344,7 @@ def load_parameters(file_path, ndofs, model="at1"): return parameters, signature + def test_2d(): # import argparse from mpi4py import MPI @@ -368,7 +362,7 @@ def test_2d(): ) ColorPrint.print_bold(f"===================-{_storage}-=================") - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, stability_data, state = run_computation(parameters, _storage) ColorPrint.print_bold(history_data["eigs-cone"]) @@ -391,4 +385,4 @@ def test_2d(): if __name__ == "__main__": - test_2d() \ No newline at end of file + test_2d() diff --git a/pyproject.toml b/pyproject.toml index 465a597e..2ada7543 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "irreversible-variational-solvers" +name = "irrevolutions" authors = [ {name = "Andrés A León Baldelli", email = "leon.baldelli@cnrs.fr"}, ] @@ -32,7 +32,5 @@ dependencies = [ [project.optional-dependencies] test = ["pytest"] -# "numpy>=1.26.0", -# "mpi4py>=3.1.5", -# "slepc4py==3.20.1", -# "petsc4py>=3.20.5", +[tool.ruff.lint.isort.sections] +"mpi" = ["mpi4py", "petsc4py"] \ No newline at end of file diff --git a/setup.py b/setup.py index 54bc29c0..3fb1d724 100644 --- a/setup.py +++ b/setup.py @@ -1,12 +1,12 @@ -from setuptools import setup, find_packages +from setuptools import find_packages, setup setup( - name='irrevolutions', - version='2024.0.1', - package_dir={'': 'src'}, # The root package is under the 'src' directory - packages=find_packages('src'), # Find packages under the 'src' directory + name="irrevolutions", + version="2024.0.1", + package_dir={"": "src"}, # The root package is under the 'src' directory + packages=find_packages("src"), # Find packages under the 'src' directory include_package_data=True, package_data={ - 'irrevolutions.models': ['default_parameters.yml'], + "irrevolutions.models": ["default_parameters.yml"], }, ) diff --git a/src/irrevolutions/__init__.py b/src/irrevolutions/__init__.py index 0bb4310f..451cc6f6 100644 --- a/src/irrevolutions/__init__.py +++ b/src/irrevolutions/__init__.py @@ -6,16 +6,17 @@ """ -import dolfinx.io -from mpi4py import MPI +import logging import dolfinx +import dolfinx.io import dolfinx.plot +from mpi4py import MPI -import logging logging.basicConfig(level=logging.INFO) logging.critical( f"DOLFINx version: {dolfinx.__version__} based on GIT commit: \ - {dolfinx.git_commit_hash} of https://github.com/FEniCS/dolfinx/") + {dolfinx.git_commit_hash} of https://github.com/FEniCS/dolfinx/" +) comm = MPI.COMM_WORLD diff --git a/src/irrevolutions/algorithms/am.py b/src/irrevolutions/algorithms/am.py index 35b3d5d9..44de69ca 100644 --- a/src/irrevolutions/algorithms/am.py +++ b/src/irrevolutions/algorithms/am.py @@ -1,28 +1,27 @@ -from irrevolutions.utils import norm_H1, norm_L2, set_vector_to_constant, ColorPrint import logging -import dolfinx -from dolfinx.io import XDMFFile - -from irrevolutions.solvers import SNESSolver -from irrevolutions.solvers.snesblockproblem import SNESBlockProblem -from irrevolutions.solvers.function import functions_to_vec +import dolfinx +import numpy as np +import ufl from dolfinx.fem import ( Function, - form, assemble_scalar, + form, ) +from dolfinx.io import XDMFFile +from mpi4py import MPI from petsc4py import PETSc -import ufl -import numpy as np -from mpi4py import MPI +from irrevolutions.solvers import SNESSolver +from irrevolutions.solvers.function import functions_to_vec +from irrevolutions.solvers.snesblockproblem import SNESBlockProblem +from irrevolutions.utils import ColorPrint, norm_H1, norm_L2, set_vector_to_constant comm = MPI.COMM_WORLD from dolfinx.fem.petsc import ( - set_bc, assemble_vector, + set_bc, ) logging.basicConfig() diff --git a/src/irrevolutions/algorithms/ls.py b/src/irrevolutions/algorithms/ls.py index d6e49484..a291d30d 100644 --- a/src/irrevolutions/algorithms/ls.py +++ b/src/irrevolutions/algorithms/ls.py @@ -1,19 +1,17 @@ import logging -from irrevolutions.utils import norm_H1 +import random +import mpi4py +import numpy as np +from dolfinx.cpp.log import LogLevel, log from dolfinx.fem import ( Function, - form, assemble_scalar, + form, ) from petsc4py import PETSc -from dolfinx.cpp.log import log, LogLevel -import numpy as np -import mpi4py -import numpy as np -import random -import logging +from irrevolutions.utils import norm_H1 # Set up logging configuration logging.basicConfig(level=logging.INFO) @@ -249,7 +247,7 @@ def __iter__(self): def __next__(self): logger.info(f"\n\nCalled next, can time be stopped? {self.stop_time}") - + if self.stop_time: self.stop_time = False index = self.i @@ -261,8 +259,8 @@ def __next__(self): index = self.i else: raise StopIteration - - return index + + return index def pause_time(self): self.stop_time = True diff --git a/src/irrevolutions/algorithms/so.py b/src/irrevolutions/algorithms/so.py index 3d0f9864..11e5c997 100644 --- a/src/irrevolutions/algorithms/so.py +++ b/src/irrevolutions/algorithms/so.py @@ -381,7 +381,7 @@ def solve(self, alpha_old: dolfinx.fem.function.Function): # Check if the system is damage-critical and log it self.log_critical_state() - with dolfinx.common.Timer(f"~Second Order: Bifurcation") as timer: + with dolfinx.common.Timer("~Second Order: Bifurcation") as timer: # Set up constraints constraints = self.setup_constraints(alpha_old) self.inertia_setup(constraints) @@ -666,8 +666,8 @@ def __init__( self.solution = {"lambda_t": np.nan, "xt": [], "yt": []} - with dolfinx.common.Timer(f"~Second Order: Stability"): - with dolfinx.common.Timer(f"~Second Order: Cone Project"): + with dolfinx.common.Timer("~Second Order: Stability"): + with dolfinx.common.Timer("~Second Order: Cone Project"): # self._converged = False self._v = create_vector_block(self.F) @@ -736,7 +736,7 @@ def solve(self, alpha_old: dolfinx.fem.function.Function, eig0=None, inertia=Non self._converged = False errors.append(1) - with dolfinx.common.Timer(f"~Second Order: Stability"): + with dolfinx.common.Timer("~Second Order: Stability"): constraints = self.setup_constraints(alpha_old) self.constraints = constraints @@ -760,7 +760,7 @@ def solve(self, alpha_old: dolfinx.fem.function.Function, eig0=None, inertia=Non # ... extend ... self._extend_vector(_yr, _y) self._extend_vector(_xk, _x) - + y = self.normalise_eigenmode(_y, mode="functional") xk = self.normalise_eigenmode(_x, mode="functional") @@ -787,7 +787,7 @@ def convergence_loop(self, errors, _Ar, _xk): - _xk (petsc4py.PETSc.Vec): Updated solution vector after convergence. - _lmbda_k (float): Updated Lagrange multiplier corresponding to the final solution. """ - + _s = float(self.parameters.get("cone").get("scaling")) while self.iterate(_xk, errors): @@ -1076,7 +1076,7 @@ def _cone_project_restricted(self, v): Returns: Vector: The projected vector. """ - with dolfinx.common.Timer(f"~Second Order: Cone Project"): + with dolfinx.common.Timer("~Second Order: Cone Project"): maps = [ (V.dofmap.index_map, V.dofmap.index_map_bs) for V in self.constraints.function_spaces @@ -1093,8 +1093,8 @@ def _cone_project_restricted(self, v): x_local.array[_dofs] = np.maximum(x_local.array[_dofs], 0) _logger.debug(f"Local dofs: {_dofs}") - _logger.debug(f"x_local") - _logger.debug(f"x_local truncated") + _logger.debug("x_local") + _logger.debug("x_local truncated") _x.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) diff --git a/src/irrevolutions/meshes/V_notch_2D.py b/src/irrevolutions/meshes/V_notch_2D.py index a949e154..67a99deb 100644 --- a/src/irrevolutions/meshes/V_notch_2D.py +++ b/src/irrevolutions/meshes/V_notch_2D.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 -from mpi4py import MPI import numpy as np +from mpi4py import MPI def mesh_V_notch( @@ -22,7 +22,6 @@ def mesh_V_notch( # Perform Gmsh work only on rank = 0 if comm.rank == 0: - import gmsh # Initialise gmsh and set options @@ -60,8 +59,6 @@ def mesh_V_notch( notch_top = model.geo.addLine(p6, p4, tag=12) notch_bottom = model.geo.addLine(p4, p5, tag=13) - - cell_tag_names = {"Domain": 15} cloop = model.geo.addCurveLoop( @@ -91,13 +88,9 @@ def mesh_V_notch( model.mesh.generate(tdim) - - facet_tag_names = { - 'extboundary': 100 - } + facet_tag_names = {"extboundary": 100} tag_names = {"facets": facet_tag_names, "cells": cell_tag_names} - - + # Optional: Write msh file if msh_file is not None: gmsh.write(msh_file) diff --git a/src/irrevolutions/meshes/__init__.py b/src/irrevolutions/meshes/__init__.py index c2059709..00eebe2c 100644 --- a/src/irrevolutions/meshes/__init__.py +++ b/src/irrevolutions/meshes/__init__.py @@ -7,40 +7,35 @@ # ========================================= from functools import wraps -import numpy -import gmsh -from mpi4py import MPI - -from dolfinx.cpp.io import perm_gmsh, distribute_entity_data -from dolfinx.cpp.mesh import to_type, cell_entity_type -from dolfinx.cpp.graph import AdjacencyList_int32 # from dolfinx.mesh import create_meshtags, create_mesh - -from .pacman import mesh_pacman - from gmsh import model -def mesh_bounding_box(mesh, i): return ( - min(mesh.geometry.x[:, i]), max(mesh.geometry.x[:, i])) +def mesh_bounding_box(mesh, i): + return (min(mesh.geometry.x[:, i]), max(mesh.geometry.x[:, i])) def get_tag(kwargs): - return '' if (kwargs.get('tag') == None or kwargs.get('tag') == -1) else f"({kwargs.get('tag')})" + return ( + "" + if (kwargs.get("tag") == None or kwargs.get("tag") == -1) + else f"({kwargs.get('tag')})" + ) def geo_decorate_point(func): @wraps(func) def wrapper(*args, **kwargs): _tag = get_tag(kwargs) - if kwargs.get('meshSize'): + if kwargs.get("meshSize"): _str = f"Point {_tag} = {{ {args[0]}, {args[1]}, {args[2]}, {kwargs.get('meshSize')} }};" else: _str = f"Point {_tag} = {{ {args[0]}, {args[1]}, {args[2]}, {args[3]} }};" print(_str) return func(*args, **kwargs) + return wrapper @@ -48,9 +43,9 @@ def geo_decorate_line(func): @wraps(func) def wrapper(*args, **kwargs): _tag = get_tag(kwargs) - print( - f"Line {_tag} = {{ {args[0]}, {args[1]} }};") + print(f"Line {_tag} = {{ {args[0]}, {args[1]} }};") return func(*args, **kwargs) + return wrapper @@ -58,9 +53,9 @@ def geo_decorate_circle(func): @wraps(func) def wrapper(*args, **kwargs): _tag = get_tag(kwargs) - print( - f"Circle {_tag} = {{ {args[0]}, {args[1]}, {args[2]} }};") + print(f"Circle {_tag} = {{ {args[0]}, {args[1]}, {args[2]} }};") return func(*args, **kwargs) + return wrapper @@ -68,9 +63,9 @@ def geo_decorate_loop(func): @wraps(func) def wrapper(*args, **kwargs): _tag = get_tag(kwargs) - print( - f"Line Loop {_tag} = {{ {', '.join(map(str, args[0]))} }};") + print(f"Line Loop {_tag} = {{ {', '.join(map(str, args[0]))} }};") return func(*args, **kwargs) + return wrapper @@ -79,9 +74,9 @@ def geo_decorate_surface(func): def wrapper(*args, **kwargs): _str = [", ".join(map(str, arg)) for arg in args] _tag = get_tag(kwargs) - print( - f"Plane Surface {_tag} = {{ {', '.join(map(str, _str))} }};") + print(f"Plane Surface {_tag} = {{ {', '.join(map(str, _str))} }};") return func(*args, **kwargs) + return wrapper @@ -91,13 +86,12 @@ def wrapper(*args, **kwargs): _tag = get_tag(kwargs) _str = " ".join(map(str, args[1])) if args[0] == 1: - print( - f"Physical Line {_tag} = {{ {_str} }};") + print(f"Physical Line {_tag} = {{ {_str} }};") elif args[0] == 2: - print( - f"Physical Surface {_tag} = {{ {_str} }};") + print(f"Physical Surface {_tag} = {{ {_str} }};") return func(*args, **kwargs) + return wrapper diff --git a/src/irrevolutions/meshes/boolean.py b/src/irrevolutions/meshes/boolean.py index 435d6e74..c0b1a227 100644 --- a/src/irrevolutions/meshes/boolean.py +++ b/src/irrevolutions/meshes/boolean.py @@ -1,9 +1,7 @@ from mpi4py import MPI -def mesh_bar_gmshapi(name, - msh_file=None, - comm=MPI.COMM_WORLD): +def mesh_bar_gmshapi(name, msh_file=None, comm=MPI.COMM_WORLD): """ Create mesh. """ @@ -15,9 +13,10 @@ def mesh_bar_gmshapi(name, # Perform Gmsh work only on rank = 0 if comm.rank == 0: + import warnings import gmsh - import warnings + warnings.filterwarnings("ignore") # Initialise gmsh and set options gmsh.initialize() @@ -25,18 +24,19 @@ def mesh_bar_gmshapi(name, # gmsh.option.setNumber("Mesh.Algorithm", 6) # model = gmsh.model() gmsh.model.add("Bool 2D") - L, H, r = 1., 1., .1 - hole = gmsh.model.occ.addCircle(L / 2, L / 2, 0., r, tag=1) - domain = gmsh.model.occ.addRectangle(0, 0, 0., L, H, tag=2, roundedRadius=.1) + L, H, r = 1.0, 1.0, 0.1 + hole = gmsh.model.occ.addCircle(L / 2, L / 2, 0.0, r, tag=1) + domain = gmsh.model.occ.addRectangle(0, 0, 0.0, L, H, tag=2, roundedRadius=0.1) boolean = gmsh.model.occ.cut([(2, hole)], [(2, domain)], tag=3) + def mesh_moonslice_gmshapi( name, geom_parameters, lc, tdim=2, order=1, - msh_file='bool', + msh_file="bool", comm=MPI.COMM_WORLD, ): """ @@ -49,8 +49,10 @@ def mesh_moonslice_gmshapi( # Perform Gmsh work only on rank = 0 if comm.rank == 0: - import gmsh import warnings + + import gmsh + warnings.filterwarnings("ignore") gmsh.initialize() gmsh.option.setNumber("General.Terminal", 1) @@ -66,7 +68,7 @@ def mesh_moonslice_gmshapi( # R1=1.; R2=2.3; R3=1.; ex=0.; ey=-.3 gmsh.model.occ.addDisk(0, 0, 0, R1, R1, tag=1) - gmsh.model.occ.addDisk(ex, ey, 0., R2, R3, tag=2) + gmsh.model.occ.addDisk(ex, ey, 0.0, R2, R3, tag=2) gmsh.model.occ.cut([(tdim, 1)], [(tdim, 2)], 3) gmsh.model.occ.synchronize() model = gmsh.model @@ -76,13 +78,12 @@ def mesh_moonslice_gmshapi( model.setPhysicalName(tdim, domain, "Surface") gmsh.model.mesh.setOrder(order) - gmsh.option.setNumber("Mesh.MeshSizeFromCurvature", 20) # We can constraint the min and max element sizes to stay within reasonnable # values (see `t10.py' for more details): - gmsh.option.setNumber("Mesh.MeshSizeMin", lc/2.) - gmsh.option.setNumber("Mesh.MeshSizeMax", 2*lc) + gmsh.option.setNumber("Mesh.MeshSizeMin", lc / 2.0) + gmsh.option.setNumber("Mesh.MeshSizeMax", 2 * lc) gmsh.model.mesh.generate(tdim) diff --git a/src/irrevolutions/meshes/extended_pacman.py b/src/irrevolutions/meshes/extended_pacman.py index 3d1fde33..02340b0a 100644 --- a/src/irrevolutions/meshes/extended_pacman.py +++ b/src/irrevolutions/meshes/extended_pacman.py @@ -1,32 +1,49 @@ -from mpi4py import MPI -import numpy as np import os import sys + +import numpy as np +from mpi4py import MPI + sys.path.append("../") -from meshes import (_addPoint as addPoint, - _addLine as addLine, +from pathlib import Path + +from meshes import ( _addCircleArc as addCircleArc, +) +from meshes import ( _addCurveLoop as addCurveLoop, +) +from meshes import ( + _addLine as addLine, +) +from meshes import ( + _addPhysicalSurface as _addPhysicalSurface, +) +from meshes import ( _addPlaneSurface as _addPlaneSurface, - _addPhysicalSurface as _addPhysicalSurface,) +) +from meshes import ( + _addPoint as addPoint, +) -from pathlib import Path def mesh_extended_pacman( - name, - geom_parameters, - tdim=2, - order=1, - msh_file='extended_pacman.msh', - comm=MPI.COMM_WORLD, + name, + geom_parameters, + tdim=2, + order=1, + msh_file="extended_pacman.msh", + comm=MPI.COMM_WORLD, ): """ Create mesh. - """ - + """ + if comm.rank == 0: - import gmsh import warnings + + import gmsh + warnings.filterwarnings("ignore") gmsh.initialize() gmsh.option.setNumber("General.Terminal", 1) @@ -41,14 +58,30 @@ def mesh_extended_pacman( model = gmsh.model model.add("extended_pacman") - p0 = addPoint(0, 0, 0, lc/refinement, tag=0) - p1 = addPoint( - radius*np.cos(omega / 2), radius*np.sin(omega / 2), 0.0, lc, tag=1) - p2 = addPoint( - radius*np.cos(omega / 2), - radius*np.sin(omega / 2), 0.0, lc, tag=2) - p3 = addPoint(radius, 0, 0.0, lc/refinement, tag=12) - - p10 = addPoint( - rho*radius*np.cos(omega / 2), rho*radius*np.sin(omega / 2), 0.0, lc, tag=10) - p20 = addPoint( - rho*radius*np.cos(omega / 2), - rho*radius*np.sin(omega / 2), 0.0, lc, tag=20) - p30 = addPoint(rho*radius, 0, 0.0, lc, tag=120) + p0 = addPoint(0, 0, 0, lc / refinement, tag=0) + p1 = addPoint( + -radius * np.cos(omega / 2), radius * np.sin(omega / 2), 0.0, lc, tag=1 + ) + p2 = addPoint( + -radius * np.cos(omega / 2), -radius * np.sin(omega / 2), 0.0, lc, tag=2 + ) + p3 = addPoint(radius, 0, 0.0, lc / refinement, tag=12) + + p10 = addPoint( + -rho * radius * np.cos(omega / 2), + rho * radius * np.sin(omega / 2), + 0.0, + lc, + tag=10, + ) + p20 = addPoint( + -rho * radius * np.cos(omega / 2), + -rho * radius * np.sin(omega / 2), + 0.0, + lc, + tag=20, + ) + p30 = addPoint(rho * radius, 0, 0.0, lc, tag=120) top = addLine(p1, p0, tag=3) bot = addLine(p0, p2, tag=4) @@ -62,23 +95,25 @@ def mesh_extended_pacman( arc2_int = addCircleArc(12, 0, 2, tag=60) arc1_ext = addCircleArc(20, 0, 120, tag=51) arc2_ext = addCircleArc(120, 0, 10, tag=61) - cloop_ext = addCurveLoop([top_ext, arc1_int, arc2_int, bot_ext, arc1_ext, arc2_ext], tag=1010) + cloop_ext = addCurveLoop( + [top_ext, arc1_int, arc2_int, bot_ext, arc1_ext, arc2_ext], tag=1010 + ) _addPlaneSurface([cloop], tag=100) _addPlaneSurface([cloop_ext], tag=101) - + model.geo.addSurfaceLoop([cloop, cloop_ext, 15]) - + model.geo.synchronize() entities = model.getEntities(dim=2) - + _addPhysicalSurface(tdim, [entities[0][1]], tag=1) model.setPhysicalName(tdim, 1, "Pacman") _addPhysicalSurface(tdim, [entities[1][1]], tag=100) model.setPhysicalName(tdim, 100, "Extended Domain") # model.addPhysicalGroup(tdim, [entities[0][1], entities[1][1]], tag=1000) - + model.geo.synchronize() model.mesh.generate(tdim) @@ -88,14 +123,14 @@ def mesh_extended_pacman( return gmsh.model if comm.rank == 0 else None, tdim -if __name__ == "__main__": +if __name__ == "__main__": import sys + import yaml - + # , merge_meshtags, locate_dofs_topological from mpi4py import MPI - import dolfinx.plot _geom_parameters = """ elltomesh: 1 @@ -107,7 +142,7 @@ def mesh_extended_pacman( rho: 1.3 refinement: 4 """ - + geom_parameters = yaml.load(_geom_parameters, Loader=yaml.FullLoader) mesh = mesh_extended_pacman( @@ -115,7 +150,6 @@ def mesh_extended_pacman( geom_parameters, tdim=2, order=1, - msh_file='extended_pacman.msh', + msh_file="extended_pacman.msh", comm=MPI.COMM_WORLD, ) - diff --git a/src/irrevolutions/meshes/pacman.py b/src/irrevolutions/meshes/pacman.py index cd4cc9b1..53c3f4ba 100644 --- a/src/irrevolutions/meshes/pacman.py +++ b/src/irrevolutions/meshes/pacman.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 from mpi4py import MPI -import numpy as np def mesh_pacman( @@ -18,9 +17,11 @@ def mesh_pacman( # Perform Gmsh work only on rank = 0 if comm.rank == 0: - import numpy as np - import gmsh import warnings + + import gmsh + import numpy as np + warnings.filterwarnings("ignore") gmsh.initialize() gmsh.option.setNumber("General.Terminal", 1) @@ -41,13 +42,11 @@ def mesh_pacman( model = gmsh.model # model.occ.addDisk(0, 0, 0, R, R, tag=10) - # print("Model name: " + gmsh.model.getCurrent()) # get all elementary entities in the model entities = gmsh.model.occ.getEntities() - # for e in entities: # print("Entity " + str(e) + " of type " + gmsh.model.getType(e[0], e[1])) # # get the mesh nodes for each elementary entity @@ -62,12 +61,15 @@ def mesh_pacman( # print(" - boundary entities " + str(boundary)) # partitions = gmsh.model.occ.getPartitions(e[0], e[1]) - # print(entities) - p0 = model.geo.addPoint(0, 0, 0, lc/refinement, tag=0) - p1 = model.geo.addPoint( - radius*np.cos(omega / 2), radius*np.sin(omega / 2), 0.0, lc, tag=1) - p2 = model.geo.addPoint( - radius*np.cos(omega / 2), - radius*np.sin(omega / 2), 0.0, lc, tag=2) - p3 = model.geo.addPoint(radius, 0, 0.0, lc/refinement, tag=12) + p0 = model.geo.addPoint(0, 0, 0, lc / refinement, tag=0) + p1 = model.geo.addPoint( + -radius * np.cos(omega / 2), radius * np.sin(omega / 2), 0.0, lc, tag=1 + ) + p2 = model.geo.addPoint( + -radius * np.cos(omega / 2), -radius * np.sin(omega / 2), 0.0, lc, tag=2 + ) + p3 = model.geo.addPoint(radius, 0, 0.0, lc / refinement, tag=12) top = model.geo.addLine(p1, p0, tag=3) bot = model.geo.addLine(p0, p2, tag=4) @@ -75,7 +77,6 @@ def mesh_pacman( arc2 = model.geo.addCircleArc(12, 0, 1, tag=6) cloop = model.geo.addCurveLoop([top, bot, arc1, arc2]) - s = model.geo.addPlaneSurface([cloop]) model.geo.addSurfaceLoop([s, 1000]) model.geo.synchronize() @@ -88,13 +89,12 @@ def mesh_pacman( gmsh.model.addPhysicalGroup(tdim - 1, [5], tag=20) gmsh.model.setPhysicalName(tdim - 1, 20, "dirichlet_boundary") - gmsh.option.setNumber("Mesh.MeshSizeFromCurvature", 20) # We can constrain resolution # values (see `t10.py' for more details): - gmsh.option.setNumber("Mesh.MeshSizeMin", lc/refinement) - gmsh.option.setNumber("Mesh.MeshSizeMax", 2*lc) + gmsh.option.setNumber("Mesh.MeshSizeMin", lc / refinement) + gmsh.option.setNumber("Mesh.MeshSizeMax", 2 * lc) gmsh.model.mesh.generate(tdim) # Optional: Write msh file diff --git a/src/irrevolutions/meshes/primitives.py b/src/irrevolutions/meshes/primitives.py index bc25ae6f..8a4519fa 100644 --- a/src/irrevolutions/meshes/primitives.py +++ b/src/irrevolutions/meshes/primitives.py @@ -3,19 +3,10 @@ from mpi4py import MPI -def mesh_ep_gmshapi(name, - Lx, - Ly, - L0, - s, - lc, - tdim, - order=1, - msh_file=None, - sep=0.1, - comm=MPI.COMM_WORLD): +def mesh_ep_gmshapi( + name, Lx, Ly, L0, s, lc, tdim, order=1, msh_file=None, sep=0.1, comm=MPI.COMM_WORLD +): if comm.rank == 0: - import gmsh # Initialise gmsh and set options @@ -30,28 +21,41 @@ def mesh_ep_gmshapi(name, p1 = model.geo.addPoint(Lx, 0.0, 0, lc, tag=1) p2 = model.geo.addPoint(Lx, Ly, 0.0, lc, tag=2) p3 = model.geo.addPoint(0, Ly, 0, lc, tag=3) - #pLa= model.geo.addPoint(0, Ly/2-s/2, 0, lc, tag=4) - pRa= model.geo.addPoint(Lx, Ly/2+s/2-sep, 0, lc, tag=6) - pRb= model.geo.addPoint(Lx, Ly/2+s/2+sep, 0, lc, tag=7) - pLa= model.geo.addPoint(0, Ly/2-s/2-sep, 0, lc, tag=8) - pLb= model.geo.addPoint(0, Ly/2-s/2+sep, 0, lc, tag=5) - plM= model.geo.addPoint(L0, Ly/2-s/2, 0, lc, tag=9) - prM= model.geo.addPoint(Lx-L0, Ly/2+s/2, 0, lc, tag=10) + # pLa= model.geo.addPoint(0, Ly/2-s/2, 0, lc, tag=4) + pRa = model.geo.addPoint(Lx, Ly / 2 + s / 2 - sep, 0, lc, tag=6) + pRb = model.geo.addPoint(Lx, Ly / 2 + s / 2 + sep, 0, lc, tag=7) + pLa = model.geo.addPoint(0, Ly / 2 - s / 2 - sep, 0, lc, tag=8) + pLb = model.geo.addPoint(0, Ly / 2 - s / 2 + sep, 0, lc, tag=5) + plM = model.geo.addPoint(L0, Ly / 2 - s / 2, 0, lc, tag=9) + prM = model.geo.addPoint(Lx - L0, Ly / 2 + s / 2, 0, lc, tag=10) # points = [p0, p1, p2, p3] bottom = model.geo.addLine(p0, p1, tag=0) - #right = model.geo.addLine(p1, p2, tag=1) + # right = model.geo.addLine(p1, p2, tag=1) rightB = model.geo.addLine(p1, pRa, tag=1) - crackBR= model.geo.addLine(pRa, prM, tag=2) - crackTR= model.geo.addLine(prM, pRb, tag=3) + crackBR = model.geo.addLine(pRa, prM, tag=2) + crackTR = model.geo.addLine(prM, pRb, tag=3) rightT = model.geo.addLine(pRb, p2, tag=4) top = model.geo.addLine(p2, p3, tag=5) - #left=model.geo.addLine(p3, p0, tag=6) + # left=model.geo.addLine(p3, p0, tag=6) leftT = model.geo.addLine(p3, pLb, tag=6) crackTL = model.geo.addLine(pLb, plM, tag=7) crackBL = model.geo.addLine(plM, pLa, tag=8) leftB = model.geo.addLine(pLa, p0, tag=9) - #cloop1 = model.geo.addCurveLoop([bottom, right, top, left]) - cloop1 = model.geo.addCurveLoop([crackTR, rightT, top, leftT, crackTL, crackBL, leftB, bottom, rightB, crackBR]) + # cloop1 = model.geo.addCurveLoop([bottom, right, top, left]) + cloop1 = model.geo.addCurveLoop( + [ + crackTR, + rightT, + top, + leftT, + crackTL, + crackBL, + leftB, + bottom, + rightB, + crackBR, + ] + ) # surface_1 = model.geo.addPlaneSurface([cloop1]) @@ -72,18 +76,18 @@ def mesh_ep_gmshapi(name, # domain = 1 # gmsh.model.addPhysicalGroup(tdim, [v[1] for v in volumes], domain) # gmsh.model.setPhysicalName(tdim, domain, 'domain') - #gmsh.model.addPhysicalGroup(tdim - 2, [9], tag=18) - #gmsh.model.setPhysicalName(tdim - 2, 18, "nodeLeftMiddle") + # gmsh.model.addPhysicalGroup(tdim - 2, [9], tag=18) + # gmsh.model.setPhysicalName(tdim - 2, 18, "nodeLeftMiddle") gmsh.model.addPhysicalGroup(tdim - 1, [0], tag=10) gmsh.model.setPhysicalName(tdim - 1, 10, "bottom") gmsh.model.addPhysicalGroup(tdim - 1, [5], tag=11) gmsh.model.setPhysicalName(tdim - 1, 11, "top") - + gmsh.model.addPhysicalGroup(tdim - 1, [6, 7, 8, 9], tag=12) - #gmsh.model.addPhysicalGroup(tdim - 1, [6], tag=12) + # gmsh.model.addPhysicalGroup(tdim - 1, [6], tag=12) gmsh.model.setPhysicalName(tdim - 1, 12, "left") gmsh.model.addPhysicalGroup(tdim - 1, [1, 2, 3, 4], tag=13) - #gmsh.model.addPhysicalGroup(tdim - 1, [1], tag=13) + # gmsh.model.addPhysicalGroup(tdim - 1, [1], tag=13) gmsh.model.setPhysicalName(tdim - 1, 13, "right") gmsh.model.addPhysicalGroup(tdim - 1, [7], tag=14) gmsh.model.setPhysicalName(tdim - 1, 14, "Lliptop") @@ -93,7 +97,7 @@ def mesh_ep_gmshapi(name, gmsh.model.setPhysicalName(tdim - 1, 16, "Rliptop") gmsh.model.addPhysicalGroup(tdim - 1, [3], tag=17) gmsh.model.setPhysicalName(tdim - 1, 17, "Rlipbot") - + model.mesh.generate(tdim) # Define physical groups for interfaces (! target tag > 0) @@ -120,19 +124,10 @@ def mesh_ep_gmshapi(name, return gmsh.model if comm.rank == 0 else None, tdim -def mesh_rightCrack_gmshapi(name, - Lx, - Ly, - L0, - s, - lc, - tdim, - order=1, - msh_file=None, - sep=0.1, - comm=MPI.COMM_WORLD): +def mesh_rightCrack_gmshapi( + name, Lx, Ly, L0, s, lc, tdim, order=1, msh_file=None, sep=0.1, comm=MPI.COMM_WORLD +): if comm.rank == 0: - import gmsh # Initialise gmsh and set options @@ -147,24 +142,26 @@ def mesh_rightCrack_gmshapi(name, p1 = model.geo.addPoint(Lx, 0.0, 0, lc, tag=1) p2 = model.geo.addPoint(Lx, Ly, 0.0, lc, tag=2) p3 = model.geo.addPoint(0, Ly, 0, lc, tag=3) - #pLa= model.geo.addPoint(0, Ly/2-s/2, 0, lc, tag=4) - pRa= model.geo.addPoint(Lx, Ly/2+s/2-sep, 0, lc, tag=6) - pRb= model.geo.addPoint(Lx, Ly/2+s/2+sep, 0, lc, tag=7) - pLa= model.geo.addPoint(0, Ly/2-s/2-sep, 0, lc, tag=8) - pLb= model.geo.addPoint(0, Ly/2-s/2+sep, 0, lc, tag=5) - plM= model.geo.addPoint(L0, Ly/2-s/2, 0, lc, tag=9) - prM= model.geo.addPoint(Lx-L0, Ly/2+s/2, 0, lc, tag=10) + # pLa= model.geo.addPoint(0, Ly/2-s/2, 0, lc, tag=4) + pRa = model.geo.addPoint(Lx, Ly / 2 + s / 2 - sep, 0, lc, tag=6) + pRb = model.geo.addPoint(Lx, Ly / 2 + s / 2 + sep, 0, lc, tag=7) + pLa = model.geo.addPoint(0, Ly / 2 - s / 2 - sep, 0, lc, tag=8) + pLb = model.geo.addPoint(0, Ly / 2 - s / 2 + sep, 0, lc, tag=5) + plM = model.geo.addPoint(L0, Ly / 2 - s / 2, 0, lc, tag=9) + prM = model.geo.addPoint(Lx - L0, Ly / 2 + s / 2, 0, lc, tag=10) # points = [p0, p1, p2, p3] bottom = model.geo.addLine(p0, p1, tag=0) right = model.geo.addLine(p1, p2, tag=1) top = model.geo.addLine(p2, p3, tag=5) - #left=model.geo.addLine(p3, p0, tag=6) + # left=model.geo.addLine(p3, p0, tag=6) leftT = model.geo.addLine(p3, pLb, tag=6) crackTL = model.geo.addLine(pLb, plM, tag=7) crackBL = model.geo.addLine(plM, pLa, tag=8) leftB = model.geo.addLine(pLa, p0, tag=9) - #cloop1 = model.geo.addCurveLoop([bottom, right, top, left]) - cloop1 = model.geo.addCurveLoop([right, top, leftT, crackTL, crackBL, leftB, bottom]) + # cloop1 = model.geo.addCurveLoop([bottom, right, top, left]) + cloop1 = model.geo.addCurveLoop( + [right, top, leftT, crackTL, crackBL, leftB, bottom] + ) # surface_1 = model.geo.addPlaneSurface([cloop1]) @@ -185,24 +182,24 @@ def mesh_rightCrack_gmshapi(name, # domain = 1 # gmsh.model.addPhysicalGroup(tdim, [v[1] for v in volumes], domain) # gmsh.model.setPhysicalName(tdim, domain, 'domain') - #gmsh.model.addPhysicalGroup(tdim - 2, [9], tag=18) - #gmsh.model.setPhysicalName(tdim - 2, 18, "nodeLeftMiddle") + # gmsh.model.addPhysicalGroup(tdim - 2, [9], tag=18) + # gmsh.model.setPhysicalName(tdim - 2, 18, "nodeLeftMiddle") gmsh.model.addPhysicalGroup(tdim - 1, [0], tag=10) gmsh.model.setPhysicalName(tdim - 1, 10, "bottom") gmsh.model.addPhysicalGroup(tdim - 1, [5], tag=11) gmsh.model.setPhysicalName(tdim - 1, 11, "top") - - #gmsh.model.addPhysicalGroup(tdim - 1, [6, 7, 8, 9], tag=12) + + # gmsh.model.addPhysicalGroup(tdim - 1, [6, 7, 8, 9], tag=12) gmsh.model.addPhysicalGroup(tdim - 1, [6], tag=12) gmsh.model.setPhysicalName(tdim - 1, 12, "left") - #gmsh.model.addPhysicalGroup(tdim - 1, [1, 2, 3, 4], tag=13) + # gmsh.model.addPhysicalGroup(tdim - 1, [1, 2, 3, 4], tag=13) gmsh.model.addPhysicalGroup(tdim - 1, [1], tag=13) gmsh.model.setPhysicalName(tdim - 1, 13, "right") gmsh.model.addPhysicalGroup(tdim - 1, [7], tag=14) gmsh.model.setPhysicalName(tdim - 1, 14, "Lliptop") gmsh.model.addPhysicalGroup(tdim - 1, [8], tag=15) gmsh.model.setPhysicalName(tdim - 1, 15, "Llipbot") - + model.mesh.generate(tdim) # Define physical groups for interfaces (! target tag > 0) @@ -228,21 +225,16 @@ def mesh_rightCrack_gmshapi(name, return gmsh.model if comm.rank == 0 else None, tdim -def mesh_bar_gmshapi(name, - Lx, - Ly, - lc, - tdim, - order=1, - msh_file=None, - comm=MPI.COMM_WORLD): + +def mesh_bar_gmshapi( + name, Lx, Ly, lc, tdim, order=1, msh_file=None, comm=MPI.COMM_WORLD +): """ Create mesh of 3d tensile test specimen according to ISO 6892-1:2019 using the Python API of Gmsh. """ # Perform Gmsh work only on rank = 0 if comm.rank == 0: - import gmsh # Initialise gmsh and set options @@ -319,13 +311,7 @@ def mesh_bar_gmshapi(name, return gmsh.model if comm.rank == 0 else None, tdim -def mesh_circle_gmshapi(name, - R, - lc, - tdim, - order=1, - msh_file=None, - comm=MPI.COMM_WORLD): +def mesh_circle_gmshapi(name, R, lc, tdim, order=1, msh_file=None, comm=MPI.COMM_WORLD): """ Create 2d circle mesh using the Python API of Gmsh. """ @@ -376,20 +362,18 @@ def mesh_circle_gmshapi(name, import sys sys.path.append("../../damage") - from xdmf import XDMFFile + from pathlib import Path + + import dolfinx.plot from mesh import gmsh_to_dolfin # , merge_meshtags, locate_dofs_topological from mpi4py import MPI - from pathlib import Path - import dolfinx.plot + from xdmf import XDMFFile - gmsh_model, tdim = mesh_bar_gmshapi("bar", - 1, - 0.1, - 0.01, - 2, - msh_file="output/bar.msh") + gmsh_model, tdim = mesh_bar_gmshapi( + "bar", 1, 0.1, 0.01, 2, msh_file="output/bar.msh" + ) mesh, mts = gmsh_to_dolfin(gmsh_model, tdim, prune_z=True) Path("output").mkdir(parents=True, exist_ok=True) with XDMFFile(MPI.COMM_WORLD, "output/bar.xdmf", "w") as ofile: @@ -401,8 +385,7 @@ def mesh_circle_gmshapi(name, xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True plotter = pyvista.Plotter(title="Bar mesh") - topology, cell_types = dolfinx.plot.create_vtk_topology( - mesh, mesh.topology.dim) + topology, cell_types = dolfinx.plot.create_vtk_topology(mesh, mesh.topology.dim) grid = pyvista.UnstructuredGrid(topology, cell_types, mesh.geometry.x) # plotter.subplot(0, 0) actor_1 = plotter.add_mesh(grid, show_edges=True) diff --git a/src/irrevolutions/meshes/tdcb_2D.py b/src/irrevolutions/meshes/tdcb_2D.py index 10537bcd..90e0969e 100644 --- a/src/irrevolutions/meshes/tdcb_2D.py +++ b/src/irrevolutions/meshes/tdcb_2D.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 from mpi4py import MPI -import numpy as np def mesh_tdcb( @@ -19,7 +18,6 @@ def mesh_tdcb( # Perform Gmsh work only on rank = 0 if comm.rank == 0: - import gmsh # Initialise gmsh and set options @@ -121,17 +119,16 @@ def mesh_tdcb( cell_tag_names = {"Domain": 1} facet_tag_names = { - "top_pin" : 2, - "bottom_pin" : 3, - "top_boundary" : 4, - "bottom_boundary" : 5 + "top_pin": 2, + "bottom_pin": 3, + "top_boundary": 4, + "bottom_boundary": 5, } # Optional: Write msh file if msh_file is not None: gmsh.write(msh_file) - tag_names = {"facets": facet_tag_names, "cells": cell_tag_names} return gmsh.model if comm.rank == 0 else None, tdim, tag_names diff --git a/src/irrevolutions/models/__init__.py b/src/irrevolutions/models/__init__.py index 297f68ad..97e062e7 100644 --- a/src/irrevolutions/models/__init__.py +++ b/src/irrevolutions/models/__init__.py @@ -1,7 +1,7 @@ -import ufl +import os +import ufl import yaml -import os # import pdb @@ -13,7 +13,6 @@ class ElasticityModel: - # Basic class for elasticity def __init__(self, eps_0=None, model_parameters={}): @@ -34,9 +33,11 @@ def __init__(self, eps_0=None, model_parameters={}): # geometric values # self.Ly = geometry_parameters["Ly"] # calculating Lame coefficients - self.lmbda = (self.E * self.nu / - ((1 + self.nu) * (1 - - (self.model_dimension - 1) * self.nu))) + self.lmbda = ( + self.E + * self.nu + / ((1 + self.nu) * (1 - (self.model_dimension - 1) * self.nu)) + ) self.mu = self.E / (2 * (1 + self.nu)) def eps(self, u): @@ -44,11 +45,14 @@ def eps(self, u): return ufl.sym(ufl.grad(u)) if self.model_type == "plane-strain": return ufl.sym( - ufl.as_matrix([ - [u[0].dx(0), u[0.].dx(1), 0], - [u[1].dx(0), u[1].dx(1), 0], - [0, 0, 0], - ])) + ufl.as_matrix( + [ + [u[0].dx(0), u[0.0].dx(1), 0], + [u[1].dx(0), u[1].dx(1), 0], + [0, 0, 0], + ] + ) + ) def elastic_energy_density_strain(self, eps): """ @@ -58,7 +62,7 @@ def elastic_energy_density_strain(self, eps): lmbda = self.lmbda mu = self.mu # Elastic energy density - return 1 / 2 * (2 * mu * ufl.inner(eps, eps) + lmbda * ufl.tr(eps)**2) + return 1 / 2 * (2 * mu * ufl.inner(eps, eps) + lmbda * ufl.tr(eps) ** 2) def elastic_energy_density(self, state): """ @@ -104,7 +108,7 @@ def __init__(self, model_parameters={}): def a(self, alpha): k_res = self.k_res - return (1 - alpha)**2 + k_res + return (1 - alpha) ** 2 + k_res def w(self, alpha): """ @@ -121,10 +125,13 @@ def elastic_energy_density_strain(self, eps, alpha): # Parameters lmbda = self.lmbda mu = self.mu - + energy_density = ( - self.a(alpha) * 1.0 / 2.0 * - (2 * mu * ufl.inner(eps, eps) + lmbda * ufl.tr(eps)**2)) + self.a(alpha) + * 1.0 + / 2.0 + * (2 * mu * ufl.inner(eps, eps) + lmbda * ufl.tr(eps) ** 2) + ) return energy_density def elastic_energy_density(self, state): @@ -150,7 +157,8 @@ def stress0(self, u): lmbda = self.lmbda mu = self.mu sigma = 2 * mu * strain + lmbda * ufl.tr(strain) * ufl.Identity( - self.model_dimension) + self.model_dimension + ) return sigma def damage_energy_density(self, state): @@ -166,8 +174,7 @@ def damage_energy_density(self, state): # Compute the damage gradient grad_alpha = ufl.grad(alpha) # Compute the damage dissipation density - D_d = w1 * self.w(alpha) + w1 * ell**2 * ufl.dot( - grad_alpha, grad_alpha) + D_d = w1 * self.w(alpha) + w1 * ell**2 * ufl.dot(grad_alpha, grad_alpha) return D_d def total_energy_density(self, state): @@ -175,14 +182,15 @@ def total_energy_density(self, state): Return the damage dissipation density from the state. """ # Get the material parameters - energy = self.elastic_energy_density( - state) + self.damage_energy_density(state) + energy = self.elastic_energy_density(state) + self.damage_energy_density(state) return energy + class BrittleMembraneOverElasticFoundation(DamageElasticityModel): """ Base class for thin film elasticity coupled with damage. """ + def __init__(self, model_parameters={}, eps_0=ufl.Identity(2)): """ Initialie material parameters. @@ -199,7 +207,7 @@ def __init__(self, model_parameters={}, eps_0=ufl.Identity(2)): super().__init__(model_parameters) if model_parameters: self.model_parameters.update(model_parameters) - + # Initialize the damage parameters self.w1 = self.model_parameters["w1"] self.ell = self.model_parameters["ell"] @@ -208,7 +216,7 @@ def __init__(self, model_parameters={}, eps_0=ufl.Identity(2)): self.eps_0 = eps_0 def elastic_foundation_density(self, u): - K = self.ell_e**(-2.) + K = self.ell_e ** (-2.0) return 0.5 * K * ufl.inner(u, u) def elastic_energy_density(self, state): @@ -220,41 +228,51 @@ def elastic_energy_density(self, state): u = state["u"] eps = self.eps(u) - self.eps_0 return self.elastic_energy_density_strain( - eps, alpha) + self.elastic_foundation_density(u) + eps, alpha + ) + self.elastic_foundation_density(u) def stress(self, strain, alpha): - from numpy import ndarray from dolfinx.fem import assemble_scalar, form + from numpy import ndarray + # Differentiate the elastic energy w.r.t. the strain tensor eps_ = ufl.variable(strain) # Derivative of energy w.r.t. the strain tensor to obtain the stress # tensor _sigma = ufl.diff(self.elastic_energy_density_strain(eps_, alpha), eps_) - dx = ufl.Measure("dx", domain = alpha.function_space.mesh) + dx = ufl.Measure("dx", domain=alpha.function_space.mesh) sigma = ndarray(shape=(self.model_dimension, self.model_dimension)) for i in range(self.model_dimension): for j in range(self.model_dimension): # ompute the average value for the field sigma sigma[i, j] = assemble_scalar(form(_sigma[i, j] * dx)) - + return ufl.as_tensor(sigma) + from dolfinx.fem.function import Function + + class VariableThickness: - #accept the class as argument + # accept the class as argument def __init__(self, model): self.model = model - - #accept the class's __init__ method arguments - def __call__(self, thickness: Function, model_parameters={}, eps_0=ufl.Identity(2)): - #replace energy densities with newdisplay - self.model.elastic_energy_density = thickness * self.model.elastic_energy_density - self.model.elastic_foundation_density = thickness * self.model.elastic_foundation_density - self.model.damage_dissipation_density = thickness * self.model.damage_dissipation_density - - #return the instance of the class + # accept the class's __init__ method arguments + def __call__(self, thickness: Function, model_parameters={}, eps_0=ufl.Identity(2)): + # replace energy densities with newdisplay + self.model.elastic_energy_density = ( + thickness * self.model.elastic_energy_density + ) + self.model.elastic_foundation_density = ( + thickness * self.model.elastic_foundation_density + ) + self.model.damage_dissipation_density = ( + thickness * self.model.damage_dissipation_density + ) + + # return the instance of the class obj = self.model(thickness, model_parameters, eps_0) return obj diff --git a/src/irrevolutions/practice/default.py b/src/irrevolutions/practice/default.py index 73fff6e1..b3aa1daa 100644 --- a/src/irrevolutions/practice/default.py +++ b/src/irrevolutions/practice/default.py @@ -21,26 +21,18 @@ #!/usr/bin/env python3 -import pdb -import sys -import os -import yaml import json -from pathlib import Path -import numpy as np -import pandas as pd -from sympy import derive_by_array -import ufl import logging - -import petsc4py -from mpi4py import MPI -from petsc4py import PETSc +import pdb +import sys import dolfinx +import dolfinx.mesh import dolfinx.plot -from dolfinx import log -from dolfinx.common import Timer, list_timings, TimingType +import numpy as np +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -51,25 +43,19 @@ locate_dofs_geometrical, set_bc, ) -from dolfinx.fem.petsc import set_bc from dolfinx.io import XDMFFile, gmshio -from dolfinx.mesh import CellType -import dolfinx.mesh +from mpi4py import MPI +from petsc4py import PETSc sys.path.append("../") # from algorithms.am import AlternateMinimisation, HybridSolver -from algorithms.so import BifurcationSolver, StabilitySolver -from meshes.primitives import mesh_bar_gmshapi from irrevolutions.utils import ColorPrint -from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 - - - +from meshes.primitives import mesh_bar_gmshapi # Configuration handling (load parameters from YAML) + def load_parameters(file_path): """ Load parameters from a YAML file. @@ -91,11 +77,11 @@ def load_parameters(file_path): parameters["stability"]["cone"]["scaling"] = 0.3 parameters["model"]["model_dimension"] = 2 - parameters["model"]["model_type"] = '1D' + parameters["model"]["model_type"] = "1D" parameters["model"]["w1"] = 1 - parameters["model"]["ell"] = .1 - parameters["model"]["k_res"] = 0. - parameters["loading"]["min"] = .8 + parameters["model"]["ell"] = 0.1 + parameters["model"]["k_res"] = 0.0 + parameters["loading"]["min"] = 0.8 parameters["loading"]["max"] = 10.5 parameters["loading"]["steps"] = 10 @@ -109,12 +95,14 @@ def load_parameters(file_path): _nameExp = parameters["geometry"]["geom_type"] ell_ = parameters["model"]["ell"] - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature + # Mesh creation function + def create_mesh(parameters): """ Create a mesh based on the specified parameters. @@ -126,7 +114,6 @@ def create_mesh(parameters): dolfinx.Mesh: Generated mesh. """ # Extract mesh parameters from parameters dictionary - from meshes.primitives import mesh_bar_gmshapi Lx = parameters["geometry"]["Lx"] Ly = parameters["geometry"]["Ly"] @@ -143,12 +130,12 @@ def create_mesh(parameters): # Get mesh and meshtags mesh, mts, fts = gmshio.model_to_mesh(gmsh_model, comm, model_rank, tdim) - - return mesh + # Function space creation function + def create_function_space(mesh): """ Create function spaces for displacement and damage fields. @@ -168,6 +155,7 @@ def create_function_space(mesh): return V_u, V_alpha + def init_state(V_u, V_alpha): """ Create the state variables u and alpha. @@ -186,8 +174,10 @@ def init_state(V_u, V_alpha): return state + # Boundary conditions setup function + def setup_boundary_conditions(V_u, V_alpha, Lx): """ Set up boundary conditions for displacement and damage fields. @@ -200,15 +190,11 @@ def setup_boundary_conditions(V_u, V_alpha, Lx): Returns: list of dolfinx.DirichletBC: List of boundary conditions. """ - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) - dofs_u_left = locate_dofs_geometrical( - V_u, lambda x: np.isclose(x[0], 0.0)) - dofs_u_right = locate_dofs_geometrical( - V_u, lambda x: np.isclose(x[0], Lx)) + dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) + dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) zero_u = Function(V_u) u_ = Function(V_u, name="Boundary Displacement") @@ -224,8 +210,10 @@ def setup_boundary_conditions(V_u, V_alpha, Lx): return bcs + # Model initialization function + def initialise_model(parameters): """ Initialise the material model based on simulation parameters. @@ -240,7 +228,7 @@ def initialise_model(parameters): model_parameters = parameters["model"] from models import DamageElasticityModel as Brittle - + class BrittleAT2(Brittle): """Brittle AT_2 model, without an elastic phase. For fun only.""" @@ -258,8 +246,10 @@ def w(self, alpha): return model + # Energy functional definition function + def define_energy_functional(state, model): """ Define the energy functional for the simulation. @@ -292,8 +282,10 @@ def define_energy_functional(state, model): return total_energy + # Solver initialization functions + def initialise_solver(total_energy, state, bcs, parameters): """ Initialise the solver for the simulation. @@ -309,7 +301,7 @@ def initialise_solver(total_energy, state, bcs, parameters): """ # V_u, V_alpha, u, alpha - + from algorithms.am import AlternateMinimisation # alpha = Function(V_alpha, name="Damage") @@ -319,32 +311,40 @@ def initialise_solver(total_energy, state, bcs, parameters): alpha_ub = Function(V_alpha, name="Upper bound") for f in [alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - set_bc(alpha_ub.vector, bcs['bcs_alpha']) + set_bc(alpha_ub.vector, bcs["bcs_alpha"]) alpha_ub.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) # Initialise solver solver = AlternateMinimisation( - total_energy, state, bcs, solver_parameters = parameters, bounds=(alpha_lb, alpha_ub) + total_energy, + state, + bcs, + solver_parameters=parameters, + bounds=(alpha_lb, alpha_ub), ) - return solver + # Logging setup function + def setup_logging(): """ Set up logging for the simulation. """ logging.basicConfig(level=logging.INFO) + # Results storage functions/classes + class ResultsStorage: """ Class for storing and saving simulation results. @@ -367,12 +367,17 @@ def store_results(self, parameters, history_data, state): alpha = state["alpha"] if self.comm.rank == 0: - with open(f"{self.prefix}/parameters.yaml", 'w') as file: + with open(f"{self.prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) - with XDMFFile(self.comm, f"{self.prefix}/simulation_results.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + self.comm, + f"{self.prefix}/simulation_results.xdmf", + "w", + encoding=XDMFFile.Encoding.HDF5, + ) as file: # for t, data in history_data.items(): - # file.write_scalar(data, t) + # file.write_scalar(data, t) file.write_mesh(u.function_space.mesh) file.write_function(u, t) @@ -382,8 +387,10 @@ def store_results(self, parameters, history_data, state): with open(f"{self.prefix}/time_data.json", "w") as file: json.dump(history_data, file) + # Visualization functions/classes + class Visualization: """ Class for visualizing simulation results. @@ -415,8 +422,10 @@ def save_table(self, data, name): json.dump(data.to_json(), a_file) a_file.close() + # Time loop function + def run_time_loop(parameters, solver, model, bcs): """ Main time loop for the simulation. @@ -436,9 +445,12 @@ def run_time_loop(parameters, solver, model, bcs): comm = MPI.COMM_WORLD dx = ufl.Measure("dx", domain=state["u"].function_space.mesh) - loads = np.linspace(parameters["loading"]["min"], - parameters["loading"]["max"], parameters["loading"]["steps"]) - + loads = np.linspace( + parameters["loading"]["min"], + parameters["loading"]["max"], + parameters["loading"]["steps"], + ) + history_data = { "load": [], "elastic_energy": [], @@ -451,19 +463,18 @@ def run_time_loop(parameters, solver, model, bcs): cells = np.arange(map.size_local + map.num_ghosts, dtype=np.int32) from dolfinx import cpp as _cpp + _x = _cpp.fem.interpolation_coords(V_u.element, mesh, cells) alpha = state["alpha"] u = state["u"] - + # Main time loop for i_t, t in enumerate(loads): - - # Update boundary conditions or external loads if necessary - datum = lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1])) - bcs['bcs_u'][1].g.interpolate(datum(_x), cells) - bcs['bcs_u'][1].g.x.scatter_forward() + datum = lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1])) + bcs["bcs_u"][1].g.interpolate(datum(_x), cells) + bcs["bcs_u"][1].g.x.scatter_forward() logging.critical(f"\n\n-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --\n") @@ -474,7 +485,7 @@ def run_time_loop(parameters, solver, model, bcs): solver.alpha_lb.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) - + # Solve for the current time step solver.solve() @@ -498,6 +509,7 @@ def run_time_loop(parameters, solver, model, bcs): return history_data + if __name__ == "__main__": # Main script execution # Load parameters from YAML file @@ -513,7 +525,6 @@ def run_time_loop(parameters, solver, model, bcs): # Set up boundary conditions bcs = setup_boundary_conditions(V_u, V_alpha, parameters["geometry"]["Lx"]) - # Initialise material model model = initialise_model(parameters) @@ -541,7 +552,7 @@ def postprocess(history_data, state): visualization = Visualization(f"output/traction_AT2_cone/{signature}") visualization.visualise_results(history_data) - + pdb.set_trace() list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) diff --git a/src/irrevolutions/practice/discrete_atk.py b/src/irrevolutions/practice/discrete_atk.py index d7547ce2..6ab99bcf 100644 --- a/src/irrevolutions/practice/discrete_atk.py +++ b/src/irrevolutions/practice/discrete_atk.py @@ -1,53 +1,37 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml import json -from pathlib import Path -import sys +import logging import os +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, - FunctionSpace, assemble_scalar, dirichletbc, form, locate_dofs_geometrical, set_bc, ) +from dolfinx.fem.petsc import assemble_vector +from dolfinx.io import XDMFFile from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl - -from dolfinx.fem.petsc import ( - set_bc, - assemble_vector - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType sys.path.append("../") from algorithms.so import BifurcationSolver, StabilitySolver +from irrevolutions.utils import ColorPrint, norm_H1, norm_L2 from solvers import SNESSolver -from meshes.primitives import mesh_bar_gmshapi -from irrevolutions.utils import ColorPrint -from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 - - - sys.path.append("../") @@ -64,20 +48,20 @@ """ -from solvers.function import functions_to_vec logging.getLogger().setLevel(logging.CRITICAL) comm = MPI.COMM_WORLD + class _AlternateMinimisation: - def __init__(self, - total_energy, - state, - bcs, - solver_parameters={}, - bounds=(dolfinx.fem.function.Function, - dolfinx.fem.function.Function) - ): + def __init__( + self, + total_energy, + state, + bcs, + solver_parameters={}, + bounds=(dolfinx.fem.function.Function, dolfinx.fem.function.Function), + ): self.state = state self.alpha = state["alpha"] self.alpha_old = dolfinx.fem.function.Function(self.alpha.function_space) @@ -90,8 +74,7 @@ def __init__(self, V_u = state["u"].function_space V_alpha = state["alpha"].function_space - energy_u = ufl.derivative( - self.total_energy, self.u, ufl.TestFunction(V_u)) + energy_u = ufl.derivative(self.total_energy, self.u, ufl.TestFunction(V_u)) energy_alpha = ufl.derivative( self.total_energy, self.alpha, ufl.TestFunction(V_alpha) ) @@ -117,7 +100,6 @@ def __init__(self, ) def solve(self, outdir=None): - alpha_diff = dolfinx.fem.Function(self.alpha.function_space) self.data = { @@ -154,10 +136,7 @@ def solve(self, outdir=None): Fv = [assemble_vector(form(F)) for F in self.F] Fnorm = np.sqrt( - np.array( - [comm.allreduce(Fvi.norm(), op=MPI.SUM) - for Fvi in Fv] - ).sum() + np.array([comm.allreduce(Fvi.norm(), op=MPI.SUM) for Fvi in Fv]).sum() ) error_alpha_max = alpha_diff.vector.max()[1] @@ -204,10 +183,8 @@ def solve(self, outdir=None): self.data["solver_u_it"].append(solver_u_it) self.data["total_energy"].append(total_energy_int) - if ( - self.solver_parameters.get( - "damage_elasticity").get("criterion") + self.solver_parameters.get("damage_elasticity").get("criterion") == "residual_u" ): if error_residual_F <= self.solver_parameters.get( @@ -215,8 +192,7 @@ def solve(self, outdir=None): ).get("alpha_rtol"): break if ( - self.solver_parameters.get( - "damage_elasticity").get("criterion") + self.solver_parameters.get("damage_elasticity").get("criterion") == "alpha_H1" ): if error_alpha_H1 <= self.solver_parameters.get( @@ -231,12 +207,11 @@ def solve(self, outdir=None): petsc4py.init(sys.argv) -def discrete_atk(arg_N=2): +def discrete_atk(arg_N=2): # Mesh on node model_rank and then distribute model_rank = 0 - with open("./parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) @@ -244,14 +219,14 @@ def discrete_atk(arg_N=2): # parameters["cone"]["atol"] = 1e-7 parameters["model"]["model_dimension"] = 1 - parameters["model"]["model_type"] = '1D' + parameters["model"]["model_type"] = "1D" parameters["model"]["mu"] = 1 parameters["model"]["w1"] = 1 parameters["model"]["k_res"] = 1e-4 parameters["model"]["k"] = 3 parameters["model"]["N"] = arg_N # parameters["loading"]["max"] = 2. - parameters["loading"]["max"] = parameters["model"]["k"] + parameters["loading"]["max"] = parameters["model"]["k"] parameters["loading"]["steps"] = 100 parameters["geometry"]["geom_type"] = "discrete-damageable" @@ -268,12 +243,12 @@ def discrete_atk(arg_N=2): geom_type = parameters["geometry"]["geom_type"] _N = parameters["model"]["N"] - # Create the mesh of the specimen with given dimensions mesh = dolfinx.mesh.create_unit_interval(MPI.COMM_WORLD, _N) import hashlib - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() outdir = os.path.join(os.path.dirname(__file__), "output") prefix = os.path.join(outdir, f"discrete-atk-N{parameters['model']['N']}") @@ -286,31 +261,31 @@ def discrete_atk(arg_N=2): Path(_crunchdir).mkdir(parents=True, exist_ok=True) if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) if comm.rank == 0: - with open(f"{_crunchdir}/{signature}.md5", 'w') as f: - f.write('') + with open(f"{_crunchdir}/{signature}.md5", "w") as f: + f.write("") if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) # Functional Setting - element_u = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), - degree=1) + element_u = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), degree=1) - element_alpha = ufl.FiniteElement("DG", mesh.ufl_cell(), - degree=0) + element_alpha = ufl.FiniteElement("DG", mesh.ufl_cell(), degree=0) V_u = dolfinx.fem.FunctionSpace(mesh, element_u) V_alpha = dolfinx.fem.FunctionSpace(mesh, element_alpha) @@ -318,7 +293,6 @@ def discrete_atk(arg_N=2): u = dolfinx.fem.Function(V_u, name="Displacement") u_ = dolfinx.fem.Function(V_u, name="BoundaryDisplacement") - alpha = dolfinx.fem.Function(V_alpha, name="Damage") # Pack state @@ -339,20 +313,16 @@ def discrete_atk(arg_N=2): u_ = Function(V_u, name="Boundary Unknown") zero_u = Function(V_u, name="Boundary Unknown") - # Measures dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) # Boundary sets + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) - - dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.)) + dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) # Boundary data @@ -369,14 +339,13 @@ def discrete_atk(arg_N=2): u_.interpolate(lambda x: np.ones_like(x[0])) for f in [zero_u, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - bc_u_left = dirichletbc( - np.array(0, dtype=PETSc.ScalarType), dofs_u_left, V_u) + bc_u_left = dirichletbc(np.array(0, dtype=PETSc.ScalarType), dofs_u_left, V_u) - bc_u_right = dirichletbc( - u_, dofs_u_right) + bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] @@ -388,17 +357,14 @@ def discrete_atk(arg_N=2): # mat_par = parameters.get() - def a(alpha): - k_res = parameters["model"]['k_res'] - return (1 - alpha)**2 + k_res - + k_res = parameters["model"]["k_res"] + return (1 - alpha) ** 2 + k_res def a_atk(alpha): - k_res = parameters["model"]['k_res'] - _k = parameters["model"]['k'] - return (1 - alpha) / ((_k-1) * alpha + 1) - + k_res = parameters["model"]["k_res"] + _k = parameters["model"]["k"] + return (1 - alpha) / ((_k - 1) * alpha + 1) def w(alpha): """ @@ -409,23 +375,21 @@ def w(alpha): # Return w(alpha) function return alpha - def elastic_energy_density_atk(state): """ Returns the elastic energy density from the state. """ # Parameters - _mu = parameters["model"]['mu'] - _N = parameters["model"]['N'] + _mu = parameters["model"]["mu"] + _N = parameters["model"]["N"] alpha = state["alpha"] u = state["u"] - eps = ufl.grad(u) + eps = ufl.grad(u) - energy_density = _mu / 2. * a_atk(alpha) * ufl.inner(eps, eps) + energy_density = _mu / 2.0 * a_atk(alpha) * ufl.inner(eps, eps) return energy_density - def damage_energy_density(state): """ Return the damage dissipation density from the state. @@ -439,11 +403,9 @@ def damage_energy_density(state): # Compute the damage gradient grad_alpha = ufl.grad(alpha) # Compute the damage dissipation density - D_d = _w1 * w(alpha) + _w1 * _ell**2 * ufl.dot( - grad_alpha, grad_alpha) + D_d = _w1 * w(alpha) + _w1 * _ell**2 * ufl.dot(grad_alpha, grad_alpha) return D_d - def stress(state): """ Return the one-dimensional stress @@ -451,10 +413,11 @@ def stress(state): u = state["u"] alpha = state["alpha"] - return parameters["model"]['mu'] * a_atk(alpha) * u.dx() * dx + return parameters["model"]["mu"] * a_atk(alpha) * u.dx() * dx - total_energy = (elastic_energy_density_atk(state) + - damage_energy_density(state)) * dx + total_energy = ( + elastic_energy_density_atk(state) + damage_energy_density(state) + ) * dx # Energy functional # f = Constant(mesh, 0) @@ -463,22 +426,18 @@ def stress(state): external_work = f * state["u"] * dx load_par = parameters["loading"] - loads = np.linspace(load_par["min"], - load_par["max"], load_par["steps"]) + loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) solver = _AlternateMinimisation( total_energy, state, bcs, parameters.get("solvers"), bounds=(alpha_lb, alpha_ub) ) - stability = BifurcationSolver( total_energy, state, bcs, stability_parameters=parameters.get("stability") ) - cone = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -502,8 +461,9 @@ def stress(state): for i_t, t in enumerate(loads): u_.interpolate(lambda x: t * np.ones_like(x[0])) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + u_.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) # update the lower bound alpha.vector.copy(alpha_lb.vector) @@ -536,12 +496,12 @@ def stress(state): assemble_scalar(form(elastic_energy_density_atk(state) * dx)), op=MPI.SUM, ) - _F = assemble_scalar( form(stress(state)) ) - + _F = assemble_scalar(form(stress(state))) + history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(solver.data) history_data["cone_data"].append(cone.data) history_data["eigs"].append(stability.data["eigs"]) @@ -550,11 +510,13 @@ def stress(state): history_data["F"].append(_F) history_data["alpha_t"].append(state["alpha"].vector.array.tolist()) history_data["u_t"].append(state["u"].vector.array.tolist()) - + logging.critical(f"u_t {u.vector.array}") logging.critical(f"u_t norm {state['u'].vector.norm()}") - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -569,20 +531,20 @@ def stress(state): df = pd.DataFrame(history_data) print(df) - return history_data, prefix, _nameExp + def postprocess(history_data, prefix, nameExp): """docstring for postprocess""" - - from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement + from utils.plots import plot_AMit_load, plot_force_displacement if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{nameExp}_it_load.pdf") - plot_force_displacement(history_data, file=f"{prefix}/{nameExp}_stress-load.pdf") - + plot_force_displacement( + history_data, file=f"{prefix}/{nameExp}_stress-load.pdf" + ) # Viz @@ -590,9 +552,8 @@ def postprocess(history_data, prefix, nameExp): if __name__ == "__main__": import argparse - parser = argparse.ArgumentParser(description='Process evolution.') - parser.add_argument('-N', type=int, default=2, - help='Number of elements') + parser = argparse.ArgumentParser(description="Process evolution.") + parser.add_argument("-N", type=int, default=2, help="Number of elements") args = parser.parse_args() # print() @@ -600,10 +561,10 @@ def postprocess(history_data, prefix, nameExp): # __import__('pdb').set_trace() history_data, prefix, name = discrete_atk(args.N) - logging.info(f'Output in {prefix}') + logging.info(f"Output in {prefix}") postprocess(history_data, prefix, name) - logging.info(f'Output in {prefix}') + logging.info(f"Output in {prefix}") else: - print("File executed when imported") \ No newline at end of file + print("File executed when imported") diff --git a/src/irrevolutions/practice/discrete_atk_homogeneous.py b/src/irrevolutions/practice/discrete_atk_homogeneous.py index 0848e5fe..94b519d0 100644 --- a/src/irrevolutions/practice/discrete_atk_homogeneous.py +++ b/src/irrevolutions/practice/discrete_atk_homogeneous.py @@ -1,55 +1,39 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml import json -from pathlib import Path -import sys +import logging import os +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, - FunctionSpace, assemble_scalar, dirichletbc, form, locate_dofs_geometrical, set_bc, ) +from dolfinx.fem.petsc import assemble_vector +from dolfinx.io import XDMFFile from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl - -from dolfinx.fem.petsc import ( - set_bc, - assemble_vector - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType sys.path.append("../") from algorithms.so import BifurcationSolver, StabilitySolver +from irrevolutions.utils import ColorPrint, norm_H1, norm_L2 from solvers import SNESSolver -from meshes.primitives import mesh_bar_gmshapi -from irrevolutions.utils import ColorPrint -from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 from utils.viz import plot_matrix - - - sys.path.append("../") @@ -65,20 +49,20 @@ """ -from solvers.function import functions_to_vec logging.getLogger().setLevel(logging.CRITICAL) comm = MPI.COMM_WORLD + class _AlternateMinimisation: - def __init__(self, - total_energy, - state, - bcs, - solver_parameters={}, - bounds=(dolfinx.fem.function.Function, - dolfinx.fem.function.Function) - ): + def __init__( + self, + total_energy, + state, + bcs, + solver_parameters={}, + bounds=(dolfinx.fem.function.Function, dolfinx.fem.function.Function), + ): self.state = state self.alpha = state["alpha"] self.alpha_old = dolfinx.fem.function.Function(self.alpha.function_space) @@ -91,8 +75,7 @@ def __init__(self, V_u = state["u"].function_space V_alpha = state["alpha"].function_space - energy_u = ufl.derivative( - self.total_energy, self.u, ufl.TestFunction(V_u)) + energy_u = ufl.derivative(self.total_energy, self.u, ufl.TestFunction(V_u)) energy_alpha = ufl.derivative( self.total_energy, self.alpha, ufl.TestFunction(V_alpha) ) @@ -118,7 +101,6 @@ def __init__(self, ) def solve(self, outdir=None): - alpha_diff = dolfinx.fem.Function(self.alpha.function_space) self.data = { @@ -155,10 +137,7 @@ def solve(self, outdir=None): Fv = [assemble_vector(form(F)) for F in self.F] Fnorm = np.sqrt( - np.array( - [comm.allreduce(Fvi.norm(), op=MPI.SUM) - for Fvi in Fv] - ).sum() + np.array([comm.allreduce(Fvi.norm(), op=MPI.SUM) for Fvi in Fv]).sum() ) error_alpha_max = alpha_diff.vector.max()[1] @@ -205,10 +184,8 @@ def solve(self, outdir=None): self.data["solver_u_it"].append(solver_u_it) self.data["total_energy"].append(total_energy_int) - if ( - self.solver_parameters.get( - "damage_elasticity").get("criterion") + self.solver_parameters.get("damage_elasticity").get("criterion") == "residual_u" ): if error_residual_F <= self.solver_parameters.get( @@ -216,8 +193,7 @@ def solve(self, outdir=None): ).get("alpha_rtol"): break if ( - self.solver_parameters.get( - "damage_elasticity").get("criterion") + self.solver_parameters.get("damage_elasticity").get("criterion") == "alpha_H1" ): if error_alpha_H1 <= self.solver_parameters.get( @@ -232,12 +208,11 @@ def solve(self, outdir=None): petsc4py.init(sys.argv) -def discrete_atk(arg_N=2): +def discrete_atk(arg_N=2): # Mesh on node model_rank and then distribute model_rank = 0 - with open("./parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) @@ -245,14 +220,14 @@ def discrete_atk(arg_N=2): # parameters["cone"]["atol"] = 1e-7 parameters["model"]["model_dimension"] = 1 - parameters["model"]["model_type"] = '1D' + parameters["model"]["model_type"] = "1D" parameters["model"]["mu"] = 1 parameters["model"]["w1"] = 2 parameters["model"]["k_res"] = 1e-4 parameters["model"]["k"] = 4 parameters["model"]["N"] = arg_N # parameters["loading"]["max"] = 2. - parameters["loading"]["max"] = parameters["model"]["k"] + parameters["loading"]["max"] = parameters["model"]["k"] parameters["loading"]["steps"] = 30 parameters["geometry"]["geom_type"] = "discrete-damageable" @@ -269,15 +244,17 @@ def discrete_atk(arg_N=2): geom_type = parameters["geometry"]["geom_type"] _N = parameters["model"]["N"] - # Create the mesh of the specimen with given dimensions mesh = dolfinx.mesh.create_unit_interval(MPI.COMM_WORLD, _N) import hashlib - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() outdir = os.path.join(os.path.dirname(__file__), "output") - prefix = os.path.join(outdir, f"discrete-atk-N{parameters['model']['N']}-homogeneous") + prefix = os.path.join( + outdir, f"discrete-atk-N{parameters['model']['N']}-homogeneous" + ) if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) @@ -286,25 +263,25 @@ def discrete_atk(arg_N=2): if comm.rank == 0: Path(_crunchdir).mkdir(parents=True, exist_ok=True) - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) - with open(f"{_crunchdir}/{signature}.md5", 'w') as f: - f.write('') + with open(f"{_crunchdir}/{signature}.md5", "w") as f: + f.write("") - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) # Functional Setting - element_u = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), - degree=1) + element_u = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), degree=1) - element_alpha = ufl.FiniteElement("DG", mesh.ufl_cell(), - degree=0) + element_alpha = ufl.FiniteElement("DG", mesh.ufl_cell(), degree=0) V_u = dolfinx.fem.FunctionSpace(mesh, element_u) V_alpha = dolfinx.fem.FunctionSpace(mesh, element_alpha) @@ -312,7 +289,6 @@ def discrete_atk(arg_N=2): u = dolfinx.fem.Function(V_u, name="Displacement") u_ = dolfinx.fem.Function(V_u, name="BoundaryDisplacement") - alpha = dolfinx.fem.Function(V_alpha, name="Damage") # Pack state @@ -333,20 +309,16 @@ def discrete_atk(arg_N=2): u_ = Function(V_u, name="Boundary Unknown") zero_u = Function(V_u, name="Boundary Unknown") - # Measures dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) # Boundary sets + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) - - dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.)) + dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) # Boundary data @@ -363,14 +335,13 @@ def discrete_atk(arg_N=2): u_.interpolate(lambda x: np.ones_like(x[0])) for f in [zero_u, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - bc_u_left = dirichletbc( - np.array(0, dtype=PETSc.ScalarType), dofs_u_left, V_u) + bc_u_left = dirichletbc(np.array(0, dtype=PETSc.ScalarType), dofs_u_left, V_u) - bc_u_right = dirichletbc( - u_, dofs_u_right) + bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] @@ -382,17 +353,14 @@ def discrete_atk(arg_N=2): # mat_par = parameters.get() - def a(alpha): - k_res = parameters["model"]['k_res'] - return (1 - alpha)**2 + k_res - + k_res = parameters["model"]["k_res"] + return (1 - alpha) ** 2 + k_res def a_atk(alpha): - k_res = parameters["model"]['k_res'] - _k = parameters["model"]['k'] - return (1 - alpha) / ((_k-1) * alpha + 1) - + k_res = parameters["model"]["k_res"] + _k = parameters["model"]["k"] + return (1 - alpha) / ((_k - 1) * alpha + 1) def w(alpha): """ @@ -403,23 +371,21 @@ def w(alpha): # Return w(alpha) function return alpha - def elastic_energy_density_atk(state): """ Returns the elastic energy density from the state. """ # Parameters - _mu = parameters["model"]['mu'] - _N = parameters["model"]['N'] + _mu = parameters["model"]["mu"] + _N = parameters["model"]["N"] alpha = state["alpha"] u = state["u"] - eps = ufl.grad(u) + eps = ufl.grad(u) - energy_density = _mu / 2. * a_atk(alpha) * ufl.inner(eps, eps) + energy_density = _mu / 2.0 * a_atk(alpha) * ufl.inner(eps, eps) return energy_density - def damage_energy_density(state): """ Return the damage dissipation density from the state. @@ -433,11 +399,9 @@ def damage_energy_density(state): # Compute the damage gradient grad_alpha = ufl.grad(alpha) # Compute the damage dissipation density - D_d = _w1 * w(alpha) + _w1 * _ell**2 * ufl.dot( - grad_alpha, grad_alpha) + D_d = _w1 * w(alpha) + _w1 * _ell**2 * ufl.dot(grad_alpha, grad_alpha) return D_d - def stress(state): """ Return the one-dimensional stress @@ -445,10 +409,11 @@ def stress(state): u = state["u"] alpha = state["alpha"] - return parameters["model"]['mu'] * a_atk(alpha) * u.dx() * dx + return parameters["model"]["mu"] * a_atk(alpha) * u.dx() * dx - total_energy = (elastic_energy_density_atk(state) + - damage_energy_density(state)) * dx + total_energy = ( + elastic_energy_density_atk(state) + damage_energy_density(state) + ) * dx # Energy functional # f = Constant(mesh, 0) @@ -457,22 +422,18 @@ def stress(state): external_work = f * state["u"] * dx load_par = parameters["loading"] - loads = np.linspace(load_par["min"], - load_par["max"], load_par["steps"]) + loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) solver = _AlternateMinimisation( total_energy, state, bcs, parameters.get("solvers"), bounds=(alpha_lb, alpha_ub) ) - stability = BifurcationSolver( total_energy, state, bcs, stability_parameters=parameters.get("stability") ) - cone = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -493,11 +454,11 @@ def stress(state): def _critical_load(matpar): _mu, _k, _w1, _N = matpar["mu"], matpar["k"], matpar["w1"], matpar["N"] - return np.sqrt(8*_w1 / (_mu*_k)/4) + return np.sqrt(8 * _w1 / (_mu * _k) / 4) def _homogeneous_state(state, t, matpar): """docstring for _homogeneous_state""" - + _u = state["u"] _alpha = state["alpha"] _mu, _k, _w1, _N = matpar["mu"], matpar["k"], matpar["w1"], matpar["N"] @@ -509,26 +470,24 @@ def _homogeneous_state(state, t, matpar): if t <= _tc: # elastic - _alphah = [0. for i in range(0, _N)] - _uh = [i*t/_N for i in range(0, _N+1)] - else: + _alphah = [0.0 for i in range(0, _N)] + _uh = [i * t / _N for i in range(0, _N + 1)] + else: # damaging - _α = (t/_tc - 1) / (_k - 1) + _α = (t / _tc - 1) / (_k - 1) _alphah = [_α for i in range(0, _N)] - _e = t/_N - _uh = [_e * i for i in range(0, _N+1)] + _e = t / _N + _uh = [_e * i for i in range(0, _N + 1)] _alpha.vector[:] = _alphah _u.vector[:] = _uh - import scipy - for i_t, t in enumerate(loads): logging.critical(f"-- Solving for t = {t:3.2f} --") logging.basicConfig(level=logging.DEBUG) # homogeneous solution - _homogeneous_state(state, t, parameters["model"]) + _homogeneous_state(state, t, parameters["model"]) # n_eigenvalues = 10 is_stable = stability.solve(alpha_lb) @@ -556,7 +515,6 @@ def _homogeneous_state(state, t, matpar): # __import__('pdb').set_trace() _fig.savefig(f"{prefix}/mat-rA-{cone.eigen.eps.getOptionsPrefix()}-{i_t}.png") - fracture_energy = comm.allreduce( assemble_scalar(form(damage_energy_density(state) * dx)), op=MPI.SUM, @@ -565,12 +523,12 @@ def _homogeneous_state(state, t, matpar): assemble_scalar(form(elastic_energy_density_atk(state) * dx)), op=MPI.SUM, ) - _F = assemble_scalar( form(stress(state)) ) - + _F = assemble_scalar(form(stress(state))) + history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) # history_data["solver_data"].append(solver.data) history_data["cone_data"].append(cone.data) history_data["eigs"].append(stability.data["eigs"]) @@ -579,11 +537,13 @@ def _homogeneous_state(state, t, matpar): history_data["F"].append(_F) history_data["alpha_t"].append(state["alpha"].vector.array.tolist()) history_data["u_t"].append(state["u"].vector.array.tolist()) - + logging.critical(f"u_t {u.vector.array}") logging.critical(f"u_t norm {state['u'].vector.norm()}") - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -598,20 +558,20 @@ def _homogeneous_state(state, t, matpar): df = pd.DataFrame(history_data) print(df) - return history_data, prefix, _nameExp + def postprocess(history_data, prefix, nameExp): """docstring for postprocess""" - - from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement + from utils.plots import plot_force_displacement if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{nameExp}_energies.pdf") # plot_AMit_load(history_data, file=f"{prefix}/{nameExp}_it_load.pdf") - plot_force_displacement(history_data, file=f"{prefix}/{nameExp}_stress-load.pdf") - + plot_force_displacement( + history_data, file=f"{prefix}/{nameExp}_stress-load.pdf" + ) # Viz @@ -619,19 +579,18 @@ def postprocess(history_data, prefix, nameExp): if __name__ == "__main__": import argparse - parser = argparse.ArgumentParser(description='Process evolution.') - parser.add_argument('-N', type=int, default=2, - help='Number of elements') + parser = argparse.ArgumentParser(description="Process evolution.") + parser.add_argument("-N", type=int, default=2, help="Number of elements") args = parser.parse_args() # print() history_data, prefix, name = discrete_atk(args.N) - logging.info(f'Output in {prefix}') - __import__('pdb').set_trace() + logging.info(f"Output in {prefix}") + __import__("pdb").set_trace() postprocess(history_data, prefix, name) - logging.info(f'Output in {prefix}') + logging.info(f"Output in {prefix}") else: - print("File executed when imported") \ No newline at end of file + print("File executed when imported") diff --git a/src/irrevolutions/practice/enpassant.py b/src/irrevolutions/practice/enpassant.py index f5dff8ec..579ff496 100644 --- a/src/irrevolutions/practice/enpassant.py +++ b/src/irrevolutions/practice/enpassant.py @@ -9,50 +9,30 @@ To change the data, change the geometry files according to presentation """ + import logging import sys -sys.path.append('../') -import os -import pyvista -from utils.viz import plot_mesh -from utils.viz import plot_mesh, plot_vector, plot_scalar -from irrevolutions.utils import viz -from meshes import primitives -import meshes -from pyvista.utilities import xvfb + +sys.path.append("../") +import dolfinx +import dolfinx.io +import dolfinx.plot import matplotlib.pyplot as plt +import meshes +import numpy as np +import pyvista +import ufl +from algorithms import am from dolfinx.fem import ( - Constant, - Function, - FunctionSpace, assemble_scalar, dirichletbc, - form, locate_dofs_geometrical, - set_bc, ) -import dolfinx.io -import numpy as np -import yaml -import json - -from pathlib import Path - -from mpi4py import MPI - -import petsc4py -from petsc4py import PETSc - -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl -import models +from meshes import primitives from models import DamageElasticityModel as Brittle -import algorithms -from algorithms import am - -from dolfinx.io import XDMFFile +from petsc4py import PETSc +from pyvista.utilities import xvfb +from utils.viz import plot_mesh, plot_scalar, plot_vector logging.basicConfig() # logging.getLogger().setLevel(logging.DEBUG) @@ -63,7 +43,6 @@ logging.basicConfig(level=logging.INFO) - # meshes # visualisation @@ -71,58 +50,48 @@ # Parameters parameters = { - 'loading': { - 'min': 0.0, - 'max': 50, - 'steps': 100 - }, - 'geometry': { - 'geom_type': 'bar', - 'Lx': 100, - 'Ly': 200, - 'L0': 15, - 's': 5, - }, - 'model': { - 'E': 1E-1, - 'nu': .4, - 'w1': 1., - 'ell': 2.8571, - 'k_res': 1.e-8 + "loading": {"min": 0.0, "max": 50, "steps": 100}, + "geometry": { + "geom_type": "bar", + "Lx": 100, + "Ly": 200, + "L0": 15, + "s": 5, }, - 'solvers': { - 'elasticity': { - 'snes': { - 'snes_type': 'newtontr', - 'snes_stol': 1e-8, - 'snes_atol': 1e-8, - 'snes_rtol': 1e-8, - 'snes_max_it': 250, - 'snes_monitor': "", - 'ksp_type': 'preonly', - 'pc_type': 'lu', - 'pc_factor_mat_solver_type': 'mumps' + "model": {"E": 1e-1, "nu": 0.4, "w1": 1.0, "ell": 2.8571, "k_res": 1.0e-8}, + "solvers": { + "elasticity": { + "snes": { + "snes_type": "newtontr", + "snes_stol": 1e-8, + "snes_atol": 1e-8, + "snes_rtol": 1e-8, + "snes_max_it": 250, + "snes_monitor": "", + "ksp_type": "preonly", + "pc_type": "lu", + "pc_factor_mat_solver_type": "mumps", } }, - 'damage': { - 'snes': { - 'snes_type': 'vinewtonrsls', - 'snes_stol': 1e-5, - 'snes_atol': 1e-5, - 'snes_rtol': 1e-8, - 'snes_max_it': 100, - 'snes_monitor': "", - 'ksp_type': 'preonly', - 'pc_type': 'lu', - 'pc_factor_mat_solver_type': 'mumps' + "damage": { + "snes": { + "snes_type": "vinewtonrsls", + "snes_stol": 1e-5, + "snes_atol": 1e-5, + "snes_rtol": 1e-8, + "snes_max_it": 100, + "snes_monitor": "", + "ksp_type": "preonly", + "pc_type": "lu", + "pc_factor_mat_solver_type": "mumps", }, }, - 'damage_elasticity': { + "damage_elasticity": { "max_it": 2000, "alpha_rtol": 1.0e-4, - "criterion": "alpha_H1" - } - } + "criterion": "alpha_H1", + }, + }, } # Mesh @@ -132,16 +101,17 @@ s = parameters["geometry"]["s"] geom_type = parameters["geometry"]["geom_type"] -gmsh_model, tdim = primitives.mesh_ep_gmshapi(geom_type, - Lx, - Ly, - L0, - s, - parameters["model"]["ell"]/5, - tdim=2, - sep=3E-1, - _n=5, - ) +gmsh_model, tdim = primitives.mesh_ep_gmshapi( + geom_type, + Lx, + Ly, + L0, + s, + parameters["model"]["ell"] / 5, + tdim=2, + sep=3e-1, + _n=5, +) """gmsh_model, tdim = primitives.mesh_bar_gmshapi(geom_type, Lx, @@ -149,29 +119,28 @@ parameters.get("model").get("ell")/3, tdim=2)""" -mesh, mts = meshes.gmsh_model_to_mesh(gmsh_model, - cell_data=False, - facet_data=True, - gdim=2) +mesh, mts = meshes.gmsh_model_to_mesh( + gmsh_model, cell_data=False, facet_data=True, gdim=2 +) plt.figure() ax = plot_mesh(mesh) fig = ax.get_figure() -fig.savefig(f"mesh.png") +fig.savefig("mesh.png") mesh.topology.create_entities(tdim - 1) def left_corner(x): - return np.logical_and(x[0] < Lx/4, x[1] < Ly/2) + return np.logical_and(x[0] < Lx / 4, x[1] < Ly / 2) def middle_area(x): - return np.logical_and(x[1] < Ly/2+s, x[1] > Ly/2-s) + return np.logical_and(x[1] < Ly / 2 + s, x[1] > Ly / 2 - s) -edges = dolfinx.mesh.locate_entities(mesh, tdim-1, middle_area) +edges = dolfinx.mesh.locate_entities(mesh, tdim - 1, middle_area) mesh_refined_local2 = dolfinx.mesh.refine(mesh, edges, redistribute=False) """ @@ -182,17 +151,15 @@ def middle_area(x): plt.figure() ax = plot_mesh(mesh_refined_local2) fig = ax.get_figure() -fig.savefig(f"mesh_refined_local_bulk.png") +fig.savefig("mesh_refined_local_bulk.png") # Enables opportunity to improve mesh at specific places, might lead to deformed elements mesh = mesh_refined_local2 # Functional Setting -element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), - degree=1, dim=2) +element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=2) -element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), - degree=1) +element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), degree=1) V_u = dolfinx.fem.FunctionSpace(mesh, element_u) V_alpha = dolfinx.fem.FunctionSpace(mesh, element_alpha) @@ -203,9 +170,9 @@ def middle_area(x): _tot_dofs = 0 for _V in [V_u, V_alpha]: - _tot_dofs += _V.dofmap.index_map.size_global*_V.dofmap.index_map_bs + _tot_dofs += _V.dofmap.index_map.size_global * _V.dofmap.index_map_bs -logging.critical(f'Total number of dofs={_tot_dofs:.1e}') +logging.critical(f"Total number of dofs={_tot_dofs:.1e}") alpha = dolfinx.fem.Function(V_alpha, name="Damage") @@ -222,17 +189,13 @@ def middle_area(x): # Boundary sets -dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.)) -dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) -dofs_alpha_bottom = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[1], 0.)) -dofs_alpha_top = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[1], Lx)) +dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) +dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) +dofs_alpha_bottom = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[1], 0.0)) +dofs_alpha_top = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[1], Lx)) -dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.)) +dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) dofs_u_top = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[1], Ly)) dofs_u_bottom = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[1], 0)) @@ -246,16 +209,16 @@ def middle_area(x): # Boundary conditions bcs_u = [ - dirichletbc(np.array([0., 0.], dtype=PETSc.ScalarType), - dofs_u_bottom, - V_u), - dirichletbc(u_, dofs_u_top) + dirichletbc(np.array([0.0, 0.0], dtype=PETSc.ScalarType), dofs_u_bottom, V_u), + dirichletbc(u_, dofs_u_top), ] bcs_alpha = [ - dirichletbc(np.array(0., dtype=PETSc.ScalarType), - np.concatenate([dofs_alpha_bottom, dofs_alpha_top]), - V_alpha) + dirichletbc( + np.array(0.0, dtype=PETSc.ScalarType), + np.concatenate([dofs_alpha_bottom, dofs_alpha_top]), + V_alpha, + ) ] bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} @@ -264,30 +227,29 @@ def middle_area(x): total_energy = model.total_energy_density(state) * dx + def monitor(snes, its, fgnorm): - if(its%10==0): + if its % 10 == 0: print(f"Iteration {its:d}, error: {fgnorm:2.3e}") -solver = am.AlternateMinimisation(total_energy, - state, - bcs, - parameters.get("solvers"), - bounds=(alpha_lb, alpha_ub), - # monitor=monitor - ) +solver = am.AlternateMinimisation( + total_energy, + state, + bcs, + parameters.get("solvers"), + bounds=(alpha_lb, alpha_ub), + # monitor=monitor +) # Loop for evolution -loads = np.linspace(parameters.get("loading").get("min"), - parameters.get("loading").get("max"), - parameters.get("loading").get("steps")) - -data = { - 'elastic': [], - 'surface': [], - 'total': [], - 'load': [] -} +loads = np.linspace( + parameters.get("loading").get("min"), + parameters.get("loading").get("max"), + parameters.get("loading").get("steps"), +) + +data = {"elastic": [], "surface": [], "total": [], "load": []} xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True plotter = pyvista.Plotter( @@ -297,73 +259,79 @@ def monitor(snes, its, fgnorm): ) logging.getLogger().setLevel(logging.INFO) -for (i_t, t) in enumerate(loads): - # update boundary conditions - - u_.interpolate(lambda x: (np.zeros_like(x[0]), t*np.ones_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) - - # update lower bound for damage - alpha.vector.copy(alpha_lb.vector) - alpha.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) - - # solve for current load step - logging.info(f"Solving timestep {i_t}, load: {t}") - - solver.solve(outdir = 'output/') - - # postprocessing - # global - - surface_energy = assemble_scalar(dolfinx.fem.form( - model.damage_energy_density(state) * dx)) - - elastic_energy = assemble_scalar( - dolfinx.fem.form(model.elastic_energy_density(state) * dx)) - - data.get('elastic').append(elastic_energy) - data.get('surface').append(surface_energy) - data.get('total').append(surface_energy+elastic_energy) - data.get('load').append(t) - -# print(f"Solved timestep {i_t}, load: {t}") - print( - f"Elastic Energy {elastic_energy:.3g}, Surface energy: {surface_energy:.3g}") - print("\n\n") - if generateStepwiseOutput: - if(surface_energy > .1 and i_t % 25 == 0): - #if(i_t>1050 and i_t<1100): - _plt = plot_scalar(alpha, plotter) - _plt.screenshot(f"./plots/s05_fine/alpha"+str(i_t)+".png") - #if i_t>1100: - # break - if(i_t > 20 and elastic_energy < 1E-3 and elastic_energy < surface_energy): - #Brute force approach to determine, whether we have already reached a crack propagation state - break - - # savings? +for i_t, t in enumerate(loads): + # update boundary conditions + + u_.interpolate(lambda x: (np.zeros_like(x[0]), t * np.ones_like(x[1]))) + u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) + + # update lower bound for damage + alpha.vector.copy(alpha_lb.vector) + alpha.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) + + # solve for current load step + logging.info(f"Solving timestep {i_t}, load: {t}") + + solver.solve(outdir="output/") + + # postprocessing + # global + + surface_energy = assemble_scalar( + dolfinx.fem.form(model.damage_energy_density(state) * dx) + ) + + elastic_energy = assemble_scalar( + dolfinx.fem.form(model.elastic_energy_density(state) * dx) + ) + + data.get("elastic").append(elastic_energy) + data.get("surface").append(surface_energy) + data.get("total").append(surface_energy + elastic_energy) + data.get("load").append(t) + + # print(f"Solved timestep {i_t}, load: {t}") + print(f"Elastic Energy {elastic_energy:.3g}, Surface energy: {surface_energy:.3g}") + print("\n\n") + if generateStepwiseOutput: + if surface_energy > 0.1 and i_t % 25 == 0: + # if(i_t>1050 and i_t<1100): + _plt = plot_scalar(alpha, plotter) + _plt.screenshot("./plots/s05_fine/alpha" + str(i_t) + ".png") + # if i_t>1100: + # break + if i_t > 20 and elastic_energy < 1e-3 and elastic_energy < surface_energy: + # Brute force approach to determine, whether we have already reached a crack propagation state + break + + # savings? plt.figure() -plt.plot(data.get('load'), data.get('surface'), label='surface') -plt.plot(data.get('load'), data.get('elastic'), label='elastic') -plt.plot(data.get('load'), [ - 1./2. * t**2*Lx for t in data.get('load')], label='anal elast', ls=':', c='k') +plt.plot(data.get("load"), data.get("surface"), label="surface") +plt.plot(data.get("load"), data.get("elastic"), label="elastic") +plt.plot( + data.get("load"), + [1.0 / 2.0 * t**2 * Lx for t in data.get("load")], + label="anal elast", + ls=":", + c="k", +) -plt.title('Traction bar energetics') +plt.title("Traction bar energetics") plt.legend() -plt.yticks([0, 1/20], [0, '$1/2.\sigma_c^2/E_0$']) +plt.yticks([0, 1 / 20], [0, "$1/2.\sigma_c^2/E_0$"]) plt.xticks([0, 1], [0, 1]) plt.savefig("energetics.png") # savings? plt.figure() -plt.plot(data.get('load')[:-40], data.get('surface')[:-40], label='surface') -plt.plot(data.get('load')[:-40], data.get('elastic')[:-40], label='elastic') +plt.plot(data.get("load")[:-40], data.get("surface")[:-40], label="surface") +plt.plot(data.get("load")[:-40], data.get("elastic")[:-40], label="elastic") -plt.title('Last steps') +plt.title("Last steps") plt.legend() -plt.yticks([0, 1/20], [0, '$1/2.\sigma_c^2/E_0$']) +plt.yticks([0, 1 / 20], [0, "$1/2.\sigma_c^2/E_0$"]) plt.xticks([0, 1], [0, 1]) plt.savefig("lastSteps.png") @@ -376,7 +344,7 @@ def monitor(snes, its, fgnorm): # _plt = plot_scalar(u_.sub(0), plotter, subplot=(0, 0)) _plt = plot_vector(u, plotter, subplot=(0, 1)) -_plt.screenshot(f"displacement_MPI.png") +_plt.screenshot("displacement_MPI.png") xvfb.start_xvfb(wait=0.05) @@ -389,4 +357,4 @@ def monitor(snes, its, fgnorm): ) _plt = plot_scalar(alpha, plotter, subplot=(0, 0)) -_plt.screenshot(f"alpha2.png") +_plt.screenshot("alpha2.png") diff --git a/src/irrevolutions/practice/multiaxial-disc.py b/src/irrevolutions/practice/multiaxial-disc.py index a80e1c21..cada246c 100644 --- a/src/irrevolutions/practice/multiaxial-disc.py +++ b/src/irrevolutions/practice/multiaxial-disc.py @@ -1,77 +1,40 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml -import json -from pathlib import Path -import sys +import logging import os -import matplotlib.pyplot as plt +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType -import dolfinx.mesh -from dolfinx.fem import ( - Constant, - Function, - FunctionSpace, - assemble_scalar, - dirichletbc, - form, - locate_dofs_geometrical, - set_bc, -) - -import pyvista -from pyvista.utilities import xvfb -# -from mpi4py import MPI -import petsc4py -from petsc4py import PETSc import dolfinx +import dolfinx.mesh import dolfinx.plot -from dolfinx import log -import ufl -from dolfinx.mesh import locate_entities_boundary, CellType, create_rectangle -from dolfinx.fem import locate_dofs_topological +import pandas as pd +import yaml +from dolfinx.common import list_timings -from dolfinx.fem.petsc import ( - set_bc, - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType +# +from mpi4py import MPI +from petsc4py import PETSc sys.path.append("../") -from models import DamageElasticityModel as Brittle -from algorithms.am import AlternateMinimisation, HybridSolver -from algorithms.so import BifurcationSolver, StabilitySolver -from meshes.primitives import mesh_bar_gmshapi -from irrevolutions.utils import ColorPrint -from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 -from meshes.pacman import mesh_pacman -from utils.viz import plot_mesh, plot_vector, plot_scalar -from utils.lib import _local_notch_asymptotic -logging.basicConfig(level=logging.DEBUG) +logging.basicConfig(level=logging.DEBUG) # ------------------------------------------------------------------ class ConvergenceError(Exception): """Error raised when a solver fails to converge""" + def _make_reasons(reasons): return dict( - [(getattr(reasons, r), r) - for r in dir(reasons) if not r.startswith("_")] + [(getattr(reasons, r), r) for r in dir(reasons) if not r.startswith("_")] ) + SNESReasons = _make_reasons(PETSc.SNES.ConvergedReason()) KSPReasons = _make_reasons(PETSc.KSP.ConvergedReason()) + def check_snes_convergence(snes): r = snes.getConvergedReason() try: @@ -114,16 +77,16 @@ def check_snes_convergence(snes): def multiaxial_disc(nest): - """Testing nucleation for for a multiaxial disc, + """Testing nucleation for for a multiaxial disc, thanks to: Camilla Zolesi""" - # parameters: INPUT + # parameters: INPUT model_rank = 0 with open("../test/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - # history_data: OUTPUT + # history_data: OUTPUT history_data = { "load": [], @@ -137,10 +100,10 @@ def multiaxial_disc(nest): "uniqueness": [], "inertia": [], "stable": [], - "alphadot_norm" : [], - "rate_12_norm" : [], - "unscaled_rate_12_norm" : [], - "cone-stable": [] + "alphadot_norm": [], + "rate_12_norm": [], + "unscaled_rate_12_norm": [], + "cone-stable": [], } # generate mesh @@ -158,13 +121,12 @@ def multiaxial_disc(nest): # postprocessing - return history_data + if __name__ == "__main__": history_data = multiaxial_disc(nest=False) list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) - - df = pd.DataFrame(history_data) - print(df.drop(['solver_data', 'cone_data'], axis=1)) + df = pd.DataFrame(history_data) + print(df.drop(["solver_data", "cone_data"], axis=1)) diff --git a/src/irrevolutions/practice/pacman-cone.py b/src/irrevolutions/practice/pacman-cone.py index ad509d9b..0cb6131b 100644 --- a/src/irrevolutions/practice/pacman-cone.py +++ b/src/irrevolutions/practice/pacman-cone.py @@ -1,18 +1,21 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml +import hashlib import json -from pathlib import Path -import sys +import logging import os -import matplotlib.pyplot as plt +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import pyvista +import ufl +import yaml +from dolfinx.common import list_timings, timing from dolfinx.fem import ( Constant, Function, @@ -20,58 +23,45 @@ assemble_scalar, dirichletbc, form, - locate_dofs_geometrical, + locate_dofs_topological, set_bc, ) +from dolfinx.io import XDMFFile, gmshio +from dolfinx.mesh import locate_entities_boundary -import pyvista -from pyvista.utilities import xvfb -# +# from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl -from dolfinx.mesh import locate_entities_boundary, CellType, create_rectangle -from dolfinx.fem import locate_dofs_topological -import hashlib - -from dolfinx.fem.petsc import ( - set_bc, - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType, timing +from pyvista.utilities import xvfb sys.path.append("../") -from models import DamageElasticityModel as Brittle from algorithms.am import AlternateMinimisation, HybridSolver from algorithms.so import BifurcationSolver, StabilitySolver -from meshes.primitives import mesh_bar_gmshapi from irrevolutions.utils import ColorPrint -from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 from meshes.pacman import mesh_pacman -from utils.viz import plot_mesh, plot_vector, plot_scalar +from models import DamageElasticityModel as Brittle from utils.lib import _local_notch_asymptotic +from utils.viz import plot_mesh, plot_scalar, plot_vector + logging.basicConfig(level=logging.DEBUG) logging.logMultiprocessing = False + + class ConvergenceError(Exception): """Error raised when a solver fails to converge""" + def _make_reasons(reasons): return dict( - [(getattr(reasons, r), r) - for r in dir(reasons) if not r.startswith("_")] + [(getattr(reasons, r), r) for r in dir(reasons) if not r.startswith("_")] ) SNESReasons = _make_reasons(PETSc.SNES.ConvergedReason()) KSPReasons = _make_reasons(PETSc.KSP.ConvergedReason()) + def check_snes_convergence(snes): r = snes.getConvergedReason() try: @@ -100,9 +90,11 @@ def check_snes_convergence(snes): % (snes.getIterationNumber(), msg) ) + comm = MPI.COMM_WORLD -def pacman_cone(resolution=2, slug='pacman'): + +def pacman_cone(resolution=2, slug="pacman"): Lx = 1.0 Ly = 0.1 _nel = 30 @@ -125,9 +117,9 @@ def pacman_cone(resolution=2, slug='pacman'): _r = parameters["geometry"]["r"] _omega = parameters["geometry"]["omega"] tdim = parameters["geometry"]["geometric_dimension"] - + _nameExp = parameters["geometry"]["geom_type"] - _nameExp = 'pacman' + _nameExp = "pacman" ell_ = parameters["model"]["ell"] lc = ell_ / resolution @@ -135,14 +127,14 @@ def pacman_cone(resolution=2, slug='pacman'): parameters["geometry"]["lc"] = lc parameters["loading"]["min"] = 0.35 - parameters["loading"]["max"] = .50 + parameters["loading"]["max"] = 0.50 parameters["loading"]["steps"] = 100 # Get geometry model geom_type = parameters["geometry"]["geom_type"] model_rank = 0 - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() outdir = os.path.join("output", slug, signature) prefix = os.path.join(outdir) @@ -158,26 +150,25 @@ def pacman_cone(resolution=2, slug='pacman'): if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) with open(f"{prefix}/parameters.yaml") as f: _parameters = yaml.load(f, Loader=yaml.FullLoader) - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) - - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) - plt.figure() ax = plot_mesh(mesh) fig = ax.get_figure() fig.savefig(f"{prefix}/mesh.png") - # Function spaces element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=2) V_u = FunctionSpace(mesh, element_u) @@ -194,11 +185,9 @@ def pacman_cone(resolution=2, slug='pacman'): alpha_lb = Function(V_alpha, name="Lower bound") alpha_ub = Function(V_alpha, name="Upper bound") - # Pack state state = {"u": u, "alpha": alpha} - uD = Function(V_u, name="Asymptotic Notch Displacement") # Measures @@ -208,23 +197,31 @@ def pacman_cone(resolution=2, slug='pacman'): # Set Bcs Function ext_bd_facets = locate_entities_boundary( - mesh, dim=1, marker=lambda x: np.isclose(x[0]**2. + x[1]**2. - _r**2, 0., atol=1.e-4) + mesh, + dim=1, + marker=lambda x: np.isclose( + x[0] ** 2.0 + x[1] ** 2.0 - _r**2, 0.0, atol=1.0e-4 + ), ) - boundary_dofs_u = locate_dofs_topological( - V_u, mesh.topology.dim - 1, ext_bd_facets) + boundary_dofs_u = locate_dofs_topological(V_u, mesh.topology.dim - 1, ext_bd_facets) boundary_dofs_alpha = locate_dofs_topological( - V_alpha, mesh.topology.dim - 1, ext_bd_facets) + V_alpha, mesh.topology.dim - 1, ext_bd_facets + ) - uD.interpolate(lambda x: _local_notch_asymptotic( - x, ω=np.deg2rad(_omega / 2.), par=parameters["material"])) + uD.interpolate( + lambda x: _local_notch_asymptotic( + x, ω=np.deg2rad(_omega / 2.0), par=parameters["material"] + ) + ) alpha_lb.interpolate(lambda x: np.zeros_like(x[0])) alpha_ub.interpolate(lambda x: np.ones_like(x[0])) for f in [alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) bcs_u = [dirichletbc(value=uD, dofs=boundary_dofs_u)] @@ -255,14 +252,12 @@ def pacman_cone(resolution=2, slug='pacman'): total_energy = model.total_energy_density(state) * dx - external_work load_par = parameters["loading"] - loads = np.linspace(load_par["min"], - load_par["max"], load_par["steps"]) + loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) # Solvers solver = AlternateMinimisation( - total_energy, state, bcs, parameters.get("solvers"), - bounds=(alpha_lb, alpha_ub) + total_energy, state, bcs, parameters.get("solvers"), bounds=(alpha_lb, alpha_ub) ) hybrid = HybridSolver( @@ -274,14 +269,11 @@ def pacman_cone(resolution=2, slug='pacman'): ) bifurcation = BifurcationSolver( - total_energy, state, bcs, - bifurcation_parameters=parameters.get( - "stability") + total_energy, state, bcs, bifurcation_parameters=parameters.get("stability") ) cone = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -300,54 +292,51 @@ def pacman_cone(resolution=2, slug='pacman'): "alphadot_norm": [], "rate_12_norm": [], "unscaled_rate_12_norm": [], - "cone-stable": [] + "cone-stable": [], } for i_t, t in enumerate(loads): - - uD.interpolate(lambda x: _local_notch_asymptotic( - x, - ω=np.deg2rad(_omega / 2.), - t=t, - par=parameters["material"] - )) + uD.interpolate( + lambda x: _local_notch_asymptotic( + x, ω=np.deg2rad(_omega / 2.0), t=t, par=parameters["material"] + ) + ) # update the lower bound alpha.vector.copy(alpha_lb.vector) alpha_lb.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) - + ColorPrint.print_pass(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") - ColorPrint.print_bold(f" Solving first order: AM*Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: AM*Hybrid ") + ColorPrint.print_bold("===================-=============") hybrid.solve(alpha_lb) - # compute the rate alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) rate_12_norm = hybrid.scaled_rate_norm(alpha, parameters) urate_12_norm = hybrid.unscaled_rate_norm(alpha) - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") is_stable = bifurcation.solve(alpha_lb) is_elastic = bifurcation.is_elastic() inertia = bifurcation.get_inertia() - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") - + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") + stable = cone.my_solve(alpha_lb, eig0=bifurcation._spectrum) - + logging.critical(f"State is elastic: {is_elastic}") logging.critical(f"State's inertia: {inertia}") @@ -366,7 +355,7 @@ def pacman_cone(resolution=2, slug='pacman'): history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(hybrid.data) history_data["solver_HY_data"].append(hybrid.newton_data) history_data["solver_KS_data"].append(cone.data) @@ -382,7 +371,9 @@ def pacman_cone(resolution=2, slug='pacman'): # Save solution - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) file.write_function(alphadot, t) @@ -393,15 +384,18 @@ def pacman_cone(resolution=2, slug='pacman'): a_file.close() # Viz - if not 'SINGULARITY_CONTAINER' in os.environ: - from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement + if "SINGULARITY_CONTAINER" not in os.environ: + from utils.plots import ( + plot_AMit_load, + plot_energies, + ) if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") # plot_force_displacement(history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") print() print() print() @@ -410,7 +404,6 @@ def pacman_cone(resolution=2, slug='pacman'): xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True - plotter = pyvista.Plotter( title="Pacman test", window_size=[1600, 600], @@ -442,16 +435,22 @@ def pacman_cone(resolution=2, slug='pacman'): } performance["N"].append(MPI.COMM_WORLD.size) - performance["dofs"].append(sum([V.dofmap.bs * V.dofmap.index_map.size_global for V in [V_u, V_alpha]])) + performance["dofs"].append( + sum([V.dofmap.bs * V.dofmap.index_map.size_global for V in [V_u, V_alpha]]) + ) performance["1stOrder-AM"].append(timing("~First Order: AltMin solver")) performance["1stOrder-Hyb"].append(timing("~First Order: Hybrid solver")) - performance["1stOrder-AM-Damage"].append(timing("~First Order: AltMin-Damage solver")) - performance["1stOrder-AM-Elastic"].append(timing("~First Order: AltMin-Elastic solver")) + performance["1stOrder-AM-Damage"].append( + timing("~First Order: AltMin-Damage solver") + ) + performance["1stOrder-AM-Elastic"].append( + timing("~First Order: AltMin-Elastic solver") + ) performance["2ndOrder-Uniqueness"].append(timing("~Second Order: Bifurcation")) try: performance["2ndOrder-Stability"].append(timing("~Second Order: Cone Solver")) - except Exception as e: + except Exception: performance["2ndOrder-Stability"].append(np.nan) if comm.rank == 0: @@ -461,19 +460,18 @@ def pacman_cone(resolution=2, slug='pacman'): return history_data, signature, prefix, performance + if __name__ == "__main__": import argparse - parser = argparse.ArgumentParser(description='Process evolution.') - parser.add_argument('-r', type=int, default=3, - help='resolution: ell to h ratio') + parser = argparse.ArgumentParser(description="Process evolution.") + parser.add_argument("-r", type=int, default=3, help="resolution: ell to h ratio") args = parser.parse_args() - + ColorPrint.print_info(f"Resolution: {args.r}") - - history_data, signature, prefix, timings = pacman_cone(resolution = args.r) + + history_data, signature, prefix, timings = pacman_cone(resolution=args.r) ColorPrint.print_bold(f" signature {signature} ") df = pd.DataFrame(history_data) - print(df.drop(['solver_data', 'solver_HY_data', 'solver_KS_data'], axis=1)) - + print(df.drop(["solver_data", "solver_HY_data", "solver_KS_data"], axis=1)) diff --git a/src/irrevolutions/practice/pacman_hybrid.py b/src/irrevolutions/practice/pacman_hybrid.py index eda91b99..c07c0e9a 100644 --- a/src/irrevolutions/practice/pacman_hybrid.py +++ b/src/irrevolutions/practice/pacman_hybrid.py @@ -1,49 +1,40 @@ +import os import sys from pathlib import Path -import os -from pyvista.utilities import xvfb + +import dolfinx.plot +import matplotlib.pyplot as plt import pyvista +import yaml from dolfinx.fem import ( - Constant, Function, FunctionSpace, - assemble_scalar, dirichletbc, - form, - locate_dofs_geometrical, locate_dofs_topological, set_bc, ) -import matplotlib.pyplot as plt from dolfinx.io import XDMFFile, gmshio -from dolfinx.mesh import locate_entities_boundary, CellType, create_rectangle -from dolfinx.fem import locate_dofs_topological -import yaml -import dolfinx.plot - +from dolfinx.mesh import locate_entities_boundary +from pyvista.utilities import xvfb sys.path.append("../") -from irrevolutions.utils import ColorPrint, set_vector_to_constant -from models import DamageElasticityModel as Brittle -from algorithms.am import AlternateMinimisation as AM, HybridSolver +import json +import logging +from datetime import date +import dolfinx +import numpy as np +import petsc4py +import ufl +from algorithms.am import HybridSolver +from irrevolutions.utils import ColorPrint, set_vector_to_constant from meshes.pacman import mesh_pacman -from utils.lib import _local_notch_asymptotic - -from utils.viz import plot_mesh, plot_vector, plot_scalar +from models import DamageElasticityModel as Brittle from mpi4py import MPI -import json from petsc4py import PETSc from solvers.function import functions_to_vec -from dolfinx.fem import FunctionSpace -import ufl -import petsc4py -from solvers.snesblockproblem import SNESBlockProblem -import dolfinx -from datetime import date -import logging - -import numpy as np +from utils.lib import _local_notch_asymptotic +from utils.viz import plot_mesh, plot_scalar, plot_vector logging.basicConfig(level=logging.INFO) @@ -68,8 +59,7 @@ class ConvergenceError(Exception): def _make_reasons(reasons): return dict( - [(getattr(reasons, r), r) - for r in dir(reasons) if not r.startswith("_")] + [(getattr(reasons, r), r) for r in dir(reasons) if not r.startswith("_")] ) @@ -111,6 +101,7 @@ def check_snes_convergence(snes): if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) + def pacman_hybrid(nest): # Parameters Lx = 1.0 @@ -129,14 +120,14 @@ def pacman_hybrid(nest): _omega = parameters["geometry"]["omega"] tdim = parameters["geometry"]["geometric_dimension"] _nameExp = parameters["geometry"]["geom_type"] - _nameExp = 'pacman' + _nameExp = "pacman" ell_ = parameters["model"]["ell"] - lc = ell_ / 1. + lc = ell_ / 1.0 parameters["geometry"]["lc"] = lc - parameters["loading"]["min"] = 0. - parameters["loading"]["max"] = .5 + parameters["loading"]["min"] = 0.0 + parameters["loading"]["max"] = 0.5 # Get geometry model geom_type = parameters["geometry"]["geom_type"] @@ -148,7 +139,9 @@ def pacman_hybrid(nest): if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) if comm.rank == 0: @@ -186,23 +179,31 @@ def pacman_hybrid(nest): # Set Bcs Function ext_bd_facets = locate_entities_boundary( - mesh, dim=1, marker=lambda x: np.isclose(x[0]**2. + x[1]**2. - _r**2, 0., atol=1.e-4) + mesh, + dim=1, + marker=lambda x: np.isclose( + x[0] ** 2.0 + x[1] ** 2.0 - _r**2, 0.0, atol=1.0e-4 + ), ) - boundary_dofs_u = locate_dofs_topological( - V_u, mesh.topology.dim - 1, ext_bd_facets) + boundary_dofs_u = locate_dofs_topological(V_u, mesh.topology.dim - 1, ext_bd_facets) boundary_dofs_alpha = locate_dofs_topological( - V_alpha, mesh.topology.dim - 1, ext_bd_facets) + V_alpha, mesh.topology.dim - 1, ext_bd_facets + ) - uD.interpolate(lambda x: _local_notch_asymptotic( - x, ω=np.deg2rad(_omega / 2.), par=parameters["material"])) + uD.interpolate( + lambda x: _local_notch_asymptotic( + x, ω=np.deg2rad(_omega / 2.0), par=parameters["material"] + ) + ) alpha_lb.interpolate(lambda x: np.zeros_like(x[0])) alpha_ub.interpolate(lambda x: np.ones_like(x[0])) for f in [alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) bcs_u = [dirichletbc(value=uD, dofs=boundary_dofs_u)] @@ -261,14 +262,13 @@ def pacman_hybrid(nest): ) load_par = parameters["loading"] - loads = np.linspace(load_par["min"], - load_par["max"], load_par["steps"]) + loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) # loads = [0.1, 1.0, 1.1] # loads = np.linspace(0.3, 1., 10) if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) snes = hybrid.newton.snes @@ -281,13 +281,11 @@ def pacman_hybrid(nest): data = [] for i_t, t in enumerate(loads): - - uD.interpolate(lambda x: _local_notch_asymptotic( - x, - ω=np.deg2rad(_omega / 2.), - t=t, - par=parameters["material"] - )) + uD.interpolate( + lambda x: _local_notch_asymptotic( + x, ω=np.deg2rad(_omega / 2.0), t=t, par=parameters["material"] + ) + ) # update the lower bound alpha.vector.copy(alpha_lb.vector) @@ -302,27 +300,28 @@ def pacman_hybrid(nest): alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) rate_12_norm = hybrid.scaled_rate_norm(alphadot, parameters) rate_12_norm_unscaled = hybrid.unscaled_rate_norm(alphadot) - fracture_energy = comm.allreduce( - dolfinx.fem.assemble_scalar(dolfinx.fem.form( - model.damage_energy_density(state) * dx)), + dolfinx.fem.assemble_scalar( + dolfinx.fem.form(model.damage_energy_density(state) * dx) + ), op=MPI.SUM, ) elastic_energy = comm.allreduce( - dolfinx.fem.assemble_scalar(dolfinx.fem.form( - model.elastic_energy_density(state) * dx)), + dolfinx.fem.assemble_scalar( + dolfinx.fem.form(model.elastic_energy_density(state) * dx) + ), op=MPI.SUM, ) @@ -334,10 +333,10 @@ def pacman_hybrid(nest): "load": t, "fracture_energy": fracture_energy, "elastic_energy": elastic_energy, - "total_energy": elastic_energy+fracture_energy, + "total_energy": elastic_energy + fracture_energy, "solver_data": hybrid.data, "rate_12_norm": rate_12_norm, - "rate_12_norm_unscaled": rate_12_norm_unscaled + "rate_12_norm_unscaled": rate_12_norm_unscaled, # "eigs" : stability.data["eigs"], # "stable" : stability.data["stable"], # "F" : _F diff --git a/src/irrevolutions/practice/parametric-traction-bar-r.py b/src/irrevolutions/practice/parametric-traction-bar-r.py index 87dff771..49c09555 100644 --- a/src/irrevolutions/practice/parametric-traction-bar-r.py +++ b/src/irrevolutions/practice/parametric-traction-bar-r.py @@ -1,6 +1,7 @@ import subprocess + import numpy as np -import sys + # List of parameters # parameters_list = np.logspace(-8, -1, 8) @@ -18,4 +19,4 @@ print(f"Script executed successfully with parameter: {param}") except subprocess.CalledProcessError as e: # Handle any errors or exceptions - print(f"Error executing script with parameter {param}: {e}") \ No newline at end of file + print(f"Error executing script with parameter {param}: {e}") diff --git a/src/irrevolutions/practice/parametric-traction-bar-s.py b/src/irrevolutions/practice/parametric-traction-bar-s.py index 61519197..85a4350b 100644 --- a/src/irrevolutions/practice/parametric-traction-bar-s.py +++ b/src/irrevolutions/practice/parametric-traction-bar-s.py @@ -1,6 +1,7 @@ import subprocess + import numpy as np -import sys + # List of parameters # parameters_list = np.logspace(-8, -1, 8) @@ -24,4 +25,4 @@ print(f"Script executed successfully with parameter: {param}") except subprocess.CalledProcessError as e: # Handle any errors or exceptions - print(f"Error executing script with parameter {param}: {e}") \ No newline at end of file + print(f"Error executing script with parameter {param}: {e}") diff --git a/src/irrevolutions/practice/thinfilm-bar.py b/src/irrevolutions/practice/thinfilm-bar.py index 890a000b..fbea3503 100644 --- a/src/irrevolutions/practice/thinfilm-bar.py +++ b/src/irrevolutions/practice/thinfilm-bar.py @@ -1,18 +1,19 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml import json -from pathlib import Path -import sys +import logging import os -import matplotlib.pyplot as plt +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import pyvista +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -23,63 +24,45 @@ locate_dofs_geometrical, set_bc, ) +from dolfinx.io import XDMFFile, gmshio -import pyvista -from pyvista.utilities import xvfb -# +# from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl -from dolfinx.mesh import locate_entities_boundary, CellType, create_rectangle -from dolfinx.fem import locate_dofs_topological - -from dolfinx.fem.petsc import ( - set_bc, - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType +from pyvista.utilities import xvfb sys.path.append("../") -from models import BrittleMembraneOverElasticFoundation as ThinFilm -from algorithms.am import AlternateMinimisation, HybridSolver +from algorithms.am import HybridSolver from algorithms.so import BifurcationSolver, StabilitySolver -from meshes.primitives import mesh_bar_gmshapi -from irrevolutions.utils import ColorPrint -from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 -# from meshes.pacman import mesh_pacman -from utils.viz import plot_mesh, plot_vector, plot_scalar, plot_profile -from utils.lib import _local_notch_asymptotic -from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement -from irrevolutions.utils import table_timing_data -from utils.parametric import parameters_vs_elle -from solvers.function import vec_to_functions +from default import ResultsStorage, Visualization # logging.basicConfig(level=logging.DEBUG) +from irrevolutions.utils import ColorPrint, setup_logger_mpi, table_timing_data +from meshes.primitives import mesh_bar_gmshapi +from models import BrittleMembraneOverElasticFoundation as ThinFilm +from solvers.function import vec_to_functions +from utils.parametric import parameters_vs_elle +from utils.plots import plot_AMit_load, plot_energies, plot_force_displacement -from irrevolutions.utils import setup_logger_mpi - +# from meshes.pacman import mesh_pacman +from utils.viz import plot_profile, plot_scalar, plot_vector -from default import ResultsStorage, Visualization # ------------------------------------------------------------------ class ConvergenceError(Exception): """Error raised when a solver fails to converge""" + def _make_reasons(reasons): return dict( - [(getattr(reasons, r), r) - for r in dir(reasons) if not r.startswith("_")] + [(getattr(reasons, r), r) for r in dir(reasons) if not r.startswith("_")] ) + SNESReasons = _make_reasons(PETSc.SNES.ConvergedReason()) KSPReasons = _make_reasons(PETSc.KSP.ConvergedReason()) + def check_snes_convergence(snes): r = snes.getConvergedReason() try: @@ -129,24 +112,25 @@ def main(parameters, storage=None): Ly = parameters["geometry"]["Ly"] tdim = parameters["geometry"]["geometric_dimension"] - lc = parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] + lc = parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] geom_type = parameters["geometry"]["geom_type"] - _nameExp = 'thinfilm-' + parameters["geometry"]["geom_type"] + _nameExp = "thinfilm-" + parameters["geometry"]["geom_type"] import hashlib - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() outdir = os.path.join(os.path.dirname(__file__), "output") if storage is None: prefix = os.path.join(outdir, "thinfilm-bar", signature) else: prefix = storage - + if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) # generate mesh @@ -175,7 +159,7 @@ def main(parameters, storage=None): β = Function(V_alpha, name="DamagePerturbation") v = Function(V_u, name="DisplacementPerturbation") perturbation = {"v": v, "beta": β} - + z = [u, alpha] # need upper/lower bound for the damage field alpha_lb = Function(V_alpha, name="Lower bound") @@ -185,10 +169,8 @@ def main(parameters, storage=None): dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) - dofs_u_left = locate_dofs_geometrical( - V_u, lambda x: np.isclose(x[0], 0.0)) - dofs_u_right = locate_dofs_geometrical( - V_u, lambda x: np.isclose(x[0], Lx)) + dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) + dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) zero_u = Function(V_u) u_ = Function(V_u, name="Boundary Displacement") @@ -208,14 +190,12 @@ def main(parameters, storage=None): ) bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} - # loading - tau = Constant(mesh, np.array(0., dtype=PETSc.ScalarType)) - eps_0 = tau * ufl.as_tensor([[1., 0], [0, 0]]) + tau = Constant(mesh, np.array(0.0, dtype=PETSc.ScalarType)) + eps_0 = tau * ufl.as_tensor([[1.0, 0], [0, 0]]) load_par = parameters["loading"] - loads = np.linspace(load_par["min"], - load_par["max"], load_par["steps"]) + loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) # energy (model) model = ThinFilm(parameters["model"], eps_0=eps_0) f = Constant(mesh, np.array([0, 0], dtype=PETSc.ScalarType)) @@ -239,8 +219,7 @@ def main(parameters, storage=None): ) stability = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { "load": [], @@ -257,41 +236,45 @@ def main(parameters, storage=None): "alphadot_norm": [], "rate_12_norm": [], "unscaled_rate_12_norm": [], - "cone-stable": [] + "cone-stable": [], } # timestepping - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) for i_t, t in enumerate(loads): tau.value = t - # update the lower bound alpha.vector.copy(alpha_lb.vector) alpha_lb.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) - - ColorPrint.print_bold(f" Solving first order: AM ") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold(" Solving first order: AM ") + ColorPrint.print_bold("===================-=========") logger.critical(f"{i_t}/{len(loads)}: Solving for t = {t:3.2f} --") - ColorPrint.print_bold(f" Solving first order: Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: Hybrid ") + ColorPrint.print_bold("===================-=============") logger.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") - + equilibrium.solve(alpha_lb) - + fracture_energy = comm.allreduce( - dolfinx.fem.assemble_scalar(dolfinx.fem.form(model.damage_energy_density(state) * dx)), + dolfinx.fem.assemble_scalar( + dolfinx.fem.form(model.damage_energy_density(state) * dx) + ), op=MPI.SUM, ) elastic_energy = comm.allreduce( - dolfinx.fem.assemble_scalar(dolfinx.fem.form(model.elastic_energy_density(state) * dx)), + dolfinx.fem.assemble_scalar( + dolfinx.fem.form(model.elastic_energy_density(state) * dx) + ), op=MPI.SUM, ) @@ -299,32 +282,34 @@ def main(parameters, storage=None): alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) rate_12_norm = equilibrium.scaled_rate_norm(alphadot, parameters) rate_12_norm_unscaled = equilibrium.unscaled_rate_norm(alphadot) - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") is_stable = bifurcation.solve(alpha_lb) is_elastic = bifurcation.is_elastic() inertia = bifurcation.get_inertia() - ColorPrint.print_bold(f" Solving second order: Stability Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Stability Pb. ") + ColorPrint.print_bold("===================-=================") - stable = stability.my_solve(alpha_lb, eig0=bifurcation._spectrum, inertia = inertia) + stable = stability.my_solve( + alpha_lb, eig0=bifurcation._spectrum, inertia=inertia + ) if bifurcation._spectrum: - vec_to_functions(bifurcation._spectrum[0]['xk'], [v, β]) - + vec_to_functions(bifurcation._spectrum[0]["xk"], [v, β]) + tol = 1e-3 xs = np.linspace(0 + tol, Lx - tol, 101) points = np.zeros((3, 101)) points[0] = xs - + plotter = pyvista.Plotter( title="Perturbation profile", window_size=[800, 600], @@ -335,10 +320,7 @@ def main(parameters, storage=None): points, plotter, subplot=(0, 0), - lineproperties={ - "c": "k", - "label": f"$\\beta$" - }, + lineproperties={"c": "k", "label": "$\\beta$"}, ) ax = _plt.gca() _plt.legend() @@ -347,7 +329,6 @@ def main(parameters, storage=None): _plt.savefig(f"{prefix}/perturbation-profile-{i_t}.png") _plt.close() - plotter = pyvista.Plotter( title="Cone-Perturbation profile", window_size=[800, 600], @@ -355,14 +336,11 @@ def main(parameters, storage=None): ) _plt, data = plot_profile( - stability.perturbation['beta'], + stability.perturbation["beta"], points, plotter, subplot=(0, 0), - lineproperties={ - "c": "k", - "label": f"$\\beta$" - }, + lineproperties={"c": "k", "label": "$\\beta$"}, ) ax = _plt.gca() _plt.legend() @@ -371,9 +349,8 @@ def main(parameters, storage=None): _plt.savefig(f"{prefix}/perturbation-profile-cone-{i_t}.png") _plt.close() - # if stability.perturbation: - # pass + # pass fracture_energy = comm.allreduce( assemble_scalar(form(model.damage_energy_density(state) * dx)), @@ -391,11 +368,10 @@ def main(parameters, storage=None): ) _unique = True if inertia[0] == 0 and inertia[1] == 0 else False - history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(equilibrium.data) history_data["eigs"].append(bifurcation.data["eigs"]) history_data["F"].append(stress) @@ -408,22 +384,23 @@ def main(parameters, storage=None): history_data["uniqueness"].append(_unique) history_data["inertia"].append(inertia) - # postprocessing - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: + # postprocessing + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") plot_force_displacement( - history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") - + history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf" + ) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) # postprocessing - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: - + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: if comm.Get_rank == 1: xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -437,7 +414,6 @@ def main(parameters, storage=None): _plt = plot_vector(u, plotter, subplot=(0, 1)) _plt.screenshot(f"{prefix}/traction-state.png") - return history_data, state @@ -459,7 +435,7 @@ def load_parameters(file_path): parameters["stability"]["cone"]["cone_max_it"] = 400000 parameters["stability"]["cone"]["cone_atol"] = 1e-6 parameters["stability"]["cone"]["cone_rtol"] = 1e-6 - parameters["stability"]["cone"]["scaling"] = 1.e-5 + parameters["stability"]["cone"]["scaling"] = 1.0e-5 parameters["geometry"]["Lx"] = 5 parameters["geometry"]["Ly"] = 5e-1 @@ -467,8 +443,8 @@ def load_parameters(file_path): # parameters["model"]["model_type"] = '1D' # parameters["model"]["w1"] = 1 parameters["model"]["nu"] = 0 - parameters["model"]["ell_e"] = .2 - parameters["model"]["ell"] = parameters["model"]["ell_e"]/3 + parameters["model"]["ell_e"] = 0.2 + parameters["model"]["ell"] = parameters["model"]["ell_e"] / 3 # parameters["model"]["ell"] = .05 # parameters["model"]["k_res"] = 0. parameters["loading"]["min"] = 0.99 @@ -485,24 +461,30 @@ def load_parameters(file_path): # _nameExp = parameters["geometry"]["geom_type"] # ell_ = parameters["model"]["ell"] - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature if __name__ == "__main__": import argparse + base_parameters, base_signature = load_parameters("../data/thinfilm/parameters.yml") # _storage = f"output/thinfilm-bar/{signature}" - parser = argparse.ArgumentParser(description='Process evolution.') - - parser.add_argument('--ell_e', type=float, default=.3, - help='internal elastic length') + parser = argparse.ArgumentParser(description="Process evolution.") + + parser.add_argument( + "--ell_e", type=float, default=0.3, help="internal elastic length" + ) args = parser.parse_args() if "--ell_e" in sys.argv: - parameters, signature = parameters_vs_elle(parameters=base_parameters, elle=np.float(args.ell_e)) - _storage = f"output/parametric/thinfilm-bar/vs_ell_e/{base_signature}/{signature}" + parameters, signature = parameters_vs_elle( + parameters=base_parameters, elle=np.float(args.ell_e) + ) + _storage = ( + f"output/parametric/thinfilm-bar/vs_ell_e/{base_signature}/{signature}" + ) else: parameters, signature = base_parameters, base_signature @@ -513,13 +495,13 @@ def load_parameters(file_path): print(pretty_parameters) ColorPrint.print_bold(f"===================-{_storage}-=================") - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, state = main(parameters, _storage) list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) - + df = pd.DataFrame(history_data) - - print(df.drop(['solver_data', 'cone_data'], axis=1)) + + print(df.drop(["solver_data", "cone_data"], axis=1)) ColorPrint.print_bold(f"===================-{signature}-=================") ColorPrint.print_bold(f"===================-{_storage}-=================") diff --git a/src/irrevolutions/practice/traction-AT1_cone.py b/src/irrevolutions/practice/traction-AT1_cone.py index c5f93051..e7076ecd 100644 --- a/src/irrevolutions/practice/traction-AT1_cone.py +++ b/src/irrevolutions/practice/traction-AT1_cone.py @@ -1,17 +1,19 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml import json -from pathlib import Path -import sys +import logging import os +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -22,30 +24,17 @@ locate_dofs_geometrical, set_bc, ) +from dolfinx.io import XDMFFile, gmshio from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl - -from dolfinx.fem.petsc import ( - set_bc, - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType sys.path.append("../") -from models import DamageElasticityModel as Brittle from algorithms.am import AlternateMinimisation, HybridSolver from algorithms.so import BifurcationSolver, StabilitySolver -from meshes.primitives import mesh_bar_gmshapi from irrevolutions.utils import ColorPrint +from meshes.primitives import mesh_bar_gmshapi +from models import DamageElasticityModel as Brittle from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 - """Traction endommageable bar @@ -73,13 +62,13 @@ parameters["stability"]["cone"]["cone_rtol"] = 1e-5 parameters["stability"]["cone"]["scaling"] = 0.3 -parameters["model"]["ell"] = .1 +parameters["model"]["ell"] = 0.1 parameters["model"]["model_dimension"] = 2 -parameters["model"]["model_type"] = '1D' +parameters["model"]["model_type"] = "1D" parameters["model"]["w1"] = 1 -parameters["model"]["k_res"] = 0. +parameters["model"]["k_res"] = 0.0 -parameters["loading"]["min"] = .98 +parameters["loading"]["min"] = 0.98 parameters["loading"]["max"] = 1.4 parameters["loading"]["steps"] = 100 @@ -104,7 +93,7 @@ if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) -_lc = ell_ / parameters["geometry"]["ell_lc"] +_lc = ell_ / parameters["geometry"]["ell_lc"] # _lc = Lx/2 gmsh_model, tdim = mesh_bar_gmshapi(geom_type, Lx, Ly, _lc, tdim) @@ -114,17 +103,20 @@ import hashlib -signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + +signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) -with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: +with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 +) as file: file.write_mesh(mesh) # Functional Setting @@ -155,10 +147,8 @@ dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) -dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) -dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) +dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) +dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) @@ -170,16 +160,13 @@ alpha_ub.interpolate(lambda x: np.ones_like(x[0])) for f in [zero_u, zero_alpha, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) -bc_u_left = dirichletbc( - np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) +bc_u_left = dirichletbc(np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) # import pdb; pdb.set_trace() -bc_u_right = dirichletbc( - u_, dofs_u_right) +bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] @@ -209,12 +196,10 @@ total_energy = model.total_energy_density(state) * dx - external_work load_par = parameters["loading"] -loads = np.linspace(load_par["min"], - load_par["max"], load_par["steps"]) +loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) solver = AlternateMinimisation( - total_energy, state, bcs, parameters.get("solvers"), - bounds=(alpha_lb, alpha_ub) + total_energy, state, bcs, parameters.get("solvers"), bounds=(alpha_lb, alpha_ub) ) hybrid = HybridSolver( @@ -230,8 +215,7 @@ ) cone = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -245,11 +229,11 @@ "eigs": [], "uniqueness": [], "inertia": [], - "F": [], - "alphadot_norm" : [], - "rate_12_norm" : [], - "unscaled_rate_12_norm" : [], - "cone-stable": [] + "F": [], + "alphadot_norm": [], + "rate_12_norm": [], + "unscaled_rate_12_norm": [], + "cone-stable": [], } @@ -259,10 +243,9 @@ # logging.getLogger().setLevel(logging.DEBUG) for i_t, t in enumerate(loads): -# for i_t, t in enumerate([0., .99, 1.0, 1.01]): - u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + # for i_t, t in enumerate([0., .99, 1.0, 1.01]): + u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) + u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) # update the lower bound alpha.vector.copy(alpha_lb.vector) @@ -274,17 +257,16 @@ logging.critical("") logging.critical("") logging.critical("") - - ColorPrint.print_bold(f" Solving first order: AM ") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold(" Solving first order: AM ") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") solver.solve() - ColorPrint.print_bold(f" Solving first order: Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -293,8 +275,8 @@ alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) logging.info(f"alpha vector norm: {alpha.vector.norm()}") logging.info(f"alpha lb norm: {alpha_lb.vector.norm()}") @@ -306,9 +288,8 @@ logging.info(f"scaled rate state_12 norm: {rate_12_norm}") logging.info(f"unscaled scaled rate state_12 norm: {urate_12_norm}") - - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") # n_eigenvalues = 10 is_stable = bifurcation.solve(alpha_lb) @@ -319,12 +300,12 @@ ColorPrint.print_bold(f"State is elastic: {is_elastic}") ColorPrint.print_bold(f"State's inertia: {inertia}") # ColorPrint.print_bold(f"State is stable: {is_stable}") - - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") - + + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") + stable = cone.my_solve(alpha_lb, eig0=bifurcation._spectrum) - + fracture_energy = comm.allreduce( assemble_scalar(form(model.damage_energy_density(state) * dx)), op=MPI.SUM, @@ -344,7 +325,7 @@ history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(solver.data) history_data["eigs"].append(bifurcation.data["eigs"]) history_data["F"].append(stress) @@ -357,7 +338,9 @@ history_data["uniqueness"].append(_unique) history_data["inertia"].append(inertia) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -366,7 +349,7 @@ json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") print() print() print() @@ -375,13 +358,12 @@ # print(history_data) - df = pd.DataFrame(history_data) -print(df.drop(['solver_data', 'cone_data'], axis=1)) +print(df.drop(["solver_data", "cone_data"], axis=1)) # Viz -from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement +from utils.plots import plot_AMit_load, plot_force_displacement if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") @@ -389,12 +371,13 @@ plot_force_displacement(history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") +import sys -from pyvista.utilities import xvfb import pyvista -import sys -from utils.viz import plot_mesh, plot_vector, plot_scalar -# +from pyvista.utilities import xvfb +from utils.viz import plot_scalar, plot_vector + +# xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -407,5 +390,3 @@ _plt = plot_scalar(alpha, plotter, subplot=(0, 0)) _plt = plot_vector(u, plotter, subplot=(0, 1)) _plt.screenshot(f"{prefix}/traction-state.png") - - diff --git a/src/irrevolutions/practice/traction-AT1_first_order.py b/src/irrevolutions/practice/traction-AT1_first_order.py index 8ec4b0e3..02d5efe0 100644 --- a/src/irrevolutions/practice/traction-AT1_first_order.py +++ b/src/irrevolutions/practice/traction-AT1_first_order.py @@ -1,17 +1,19 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml import json -from pathlib import Path -import sys +import logging import os +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -22,32 +24,17 @@ locate_dofs_geometrical, set_bc, ) +from dolfinx.io import XDMFFile, gmshio from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl - -from dolfinx.fem.petsc import ( - set_bc, - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType sys.path.append("../") -from models import DamageElasticityModel as Brittle from algorithms.am import AlternateMinimisation, HybridSolver from algorithms.so import BifurcationSolver, StabilitySolver -from meshes.primitives import mesh_bar_gmshapi from irrevolutions.utils import ColorPrint +from meshes.primitives import mesh_bar_gmshapi +from models import DamageElasticityModel as Brittle from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 - - - sys.path.append("../") @@ -78,13 +65,13 @@ parameters["stability"]["cone"]["cone_rtol"] = 1e-5 parameters["stability"]["cone"]["scaling"] = 0.01 -parameters["model"]["ell"] = .1 +parameters["model"]["ell"] = 0.1 parameters["model"]["model_dimension"] = 2 -parameters["model"]["model_type"] = '1D' +parameters["model"]["model_type"] = "1D" parameters["model"]["w1"] = 1 -parameters["model"]["k_res"] = 0. +parameters["model"]["k_res"] = 0.0 -parameters["loading"]["min"] = .98 +parameters["loading"]["min"] = 0.98 parameters["loading"]["max"] = 1.4 parameters["loading"]["steps"] = 100 @@ -109,7 +96,7 @@ if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) -_lc = ell_ / parameters["geometry"]["ell_lc"] +_lc = ell_ / parameters["geometry"]["ell_lc"] # _lc = Lx/2 gmsh_model, tdim = mesh_bar_gmshapi(geom_type, Lx, Ly, _lc, tdim) @@ -119,17 +106,20 @@ import hashlib -signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + +signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) -with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: +with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 +) as file: file.write_mesh(mesh) # Functional Setting @@ -160,10 +150,8 @@ dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) -dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) -dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) +dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) +dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) @@ -175,16 +163,13 @@ alpha_ub.interpolate(lambda x: np.ones_like(x[0])) for f in [zero_u, zero_alpha, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) -bc_u_left = dirichletbc( - np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) +bc_u_left = dirichletbc(np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) # import pdb; pdb.set_trace() -bc_u_right = dirichletbc( - u_, dofs_u_right) +bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] @@ -214,12 +199,10 @@ total_energy = model.total_energy_density(state) * dx - external_work load_par = parameters["loading"] -loads = np.linspace(load_par["min"], - load_par["max"], load_par["steps"]) +loads = np.linspace(load_par["min"], load_par["max"], load_par["steps"]) solver = AlternateMinimisation( - total_energy, state, bcs, parameters.get("solvers"), - bounds=(alpha_lb, alpha_ub) + total_energy, state, bcs, parameters.get("solvers"), bounds=(alpha_lb, alpha_ub) ) hybrid = HybridSolver( @@ -235,8 +218,7 @@ ) cone = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -250,9 +232,9 @@ # "eigs": [], # "uniqueness": [], # "inertia": [], - "F": [], + "F": [], # "alphadot_norm" : [], - # "rate_12_norm" : [], + # "rate_12_norm" : [], # "unscaled_rate_12_norm" : [], # "cone-stable": [] } @@ -264,10 +246,9 @@ # logging.getLogger().setLevel(logging.DEBUG) for i_t, t in enumerate(loads): -# for i_t, t in enumerate([0., .99, 1.0, 1.01]): - u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + # for i_t, t in enumerate([0., .99, 1.0, 1.01]): + u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) + u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) # update the lower bound alpha.vector.copy(alpha_lb.vector) @@ -279,17 +260,16 @@ logging.critical("") logging.critical("") logging.critical("") - - ColorPrint.print_bold(f" Solving first order: AM ") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold(" Solving first order: AM ") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") solver.solve() - ColorPrint.print_bold(f" Solving first order: Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -298,8 +278,8 @@ alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) logging.info(f"alpha vector norm: {alpha.vector.norm()}") logging.info(f"alpha lb norm: {alpha_lb.vector.norm()}") @@ -311,7 +291,6 @@ logging.info(f"scaled rate state_12 norm: {rate_12_norm}") logging.info(f"unscaled scaled rate state_12 norm: {urate_12_norm}") - # ColorPrint.print_bold(f" Solving second order: Rate Pb. ") # ColorPrint.print_bold(f"===================-=================") @@ -324,12 +303,12 @@ # ColorPrint.print_bold(f"State is elastic: {is_elastic}") # ColorPrint.print_bold(f"State's inertia: {inertia}") # # ColorPrint.print_bold(f"State is stable: {is_stable}") - + # ColorPrint.print_bold(f" Solving second order: Cone Pb. ") # ColorPrint.print_bold(f"===================-=================") - + # stable = cone.my_solve(alpha_lb, eig0=bifurcation.Kspectrum[0]) - + fracture_energy = comm.allreduce( assemble_scalar(form(model.damage_energy_density(state) * dx)), op=MPI.SUM, @@ -349,12 +328,14 @@ history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(solver.data) # history_data["eigs"].append(bifurcation.data["eigs"]) history_data["F"].append(stress) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -363,7 +344,7 @@ json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") print() print() print() @@ -372,13 +353,12 @@ # print(history_data) - df = pd.DataFrame(history_data) -print(df.drop(['solver_data'], axis=1)) +print(df.drop(["solver_data"], axis=1)) # Viz -from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement +from utils.plots import plot_AMit_load, plot_force_displacement if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") @@ -386,12 +366,13 @@ plot_force_displacement(history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") +import sys -from pyvista.utilities import xvfb import pyvista -import sys -from utils.viz import plot_mesh, plot_vector, plot_scalar -# +from pyvista.utilities import xvfb +from utils.viz import plot_scalar, plot_vector + +# xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -404,5 +385,3 @@ _plt = plot_scalar(alpha, plotter, subplot=(0, 0)) _plt = plot_vector(u, plotter, subplot=(0, 1)) _plt.screenshot(f"{prefix}/traction-state.png") - - diff --git a/src/irrevolutions/practice/traction-AT2_cone.py b/src/irrevolutions/practice/traction-AT2_cone.py index aa396e17..53867297 100644 --- a/src/irrevolutions/practice/traction-AT2_cone.py +++ b/src/irrevolutions/practice/traction-AT2_cone.py @@ -1,50 +1,42 @@ #!/usr/bin/env python3 -import logging import json -import numpy as np -import pandas as pd -import yaml -from pathlib import Path -import sys +import logging import os +import sys +from pathlib import Path import dolfinx import dolfinx.plot -from dolfinx import log +import numpy as np +import pandas as pd +import petsc4py import ufl - +import yaml from dolfinx.fem import ( Constant, Function, FunctionSpace, - locate_dofs_geometrical, assemble_scalar, dirichletbc, form, + locate_dofs_geometrical, set_bc, ) -from dolfinx.fem.petsc import assemble_vector -from dolfinx.mesh import CellType from dolfinx.io import XDMFFile, gmshio -from dolfinx.common import Timer, list_timings, TimingType - from mpi4py import MPI -import petsc4py from petsc4py import PETSc sys.path.append("../") -from models import DamageElasticityModel as Brittle from algorithms.am import AlternateMinimisation, HybridSolver from algorithms.so_merged import BifurcationSolver, StabilitySolver -from solvers import SNESSolver -from meshes.primitives import mesh_bar_gmshapi from irrevolutions.utils import ColorPrint +from meshes.primitives import mesh_bar_gmshapi +from models import DamageElasticityModel as Brittle from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 -import logging sys.path.append("../") + class BrittleAT2(Brittle): """Brittle AT_2 model, without an elastic phase. For fun only.""" @@ -56,6 +48,7 @@ def w(self, alpha): # Return w(alpha) function return self.w1 * alpha**2 + petsc4py.init(sys.argv) comm = MPI.COMM_WORLD @@ -70,11 +63,11 @@ def w(self, alpha): parameters["stability"]["cone"]["scaling"] = 0.3 parameters["model"]["model_dimension"] = 2 -parameters["model"]["model_type"] = '1D' +parameters["model"]["model_type"] = "1D" parameters["model"]["w1"] = 1 -parameters["model"]["ell"] = .1 -parameters["model"]["k_res"] = 0. -parameters["loading"]["min"] = .8 +parameters["model"]["ell"] = 0.1 +parameters["model"]["k_res"] = 0.0 +parameters["loading"]["min"] = 0.8 parameters["loading"]["max"] = 1.5 parameters["loading"]["steps"] = 10 @@ -86,14 +79,15 @@ def w(self, alpha): tdim = parameters["geometry"]["geometric_dimension"] _nameExp = parameters["geometry"]["geom_type"] ell_ = parameters["model"]["ell"] -_lc = ell_ / parameters["geometry"]["ell_lc"] +_lc = ell_ / parameters["geometry"]["ell_lc"] geom_type = parameters["geometry"]["geom_type"] gmsh_model, tdim = mesh_bar_gmshapi(geom_type, Lx, Ly, _lc, tdim) mesh, mts, fts = gmshio.model_to_mesh(gmsh_model, comm, model_rank, tdim) import hashlib -signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + +signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() outdir = os.path.join(os.path.dirname(__file__), "output") prefix = os.path.join(outdir, "traction_AT2_cone", signature) @@ -101,14 +95,16 @@ def w(self, alpha): Path(prefix).mkdir(parents=True, exist_ok=True) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) -with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: +with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 +) as file: file.write_mesh(mesh) element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=tdim) @@ -152,7 +148,9 @@ def w(self, alpha): bcs_alpha = [] set_bc(alpha_ub.vector, bcs_alpha) -alpha_ub.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) +alpha_ub.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD +) bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} model = BrittleAT2(parameters["model"]) @@ -199,10 +197,10 @@ def w(self, alpha): "uniqueness": [], "inertia": [], "F": [], - "alphadot_norm" : [], - "rate_12_norm" : [], - "unscaled_rate_12_norm" : [], - "cone-stable": [] + "alphadot_norm": [], + "rate_12_norm": [], + "unscaled_rate_12_norm": [], + "cone-stable": [], } check_stability = [] @@ -210,20 +208,22 @@ def w(self, alpha): logging.getLogger().setLevel(logging.INFO) for i_t, t in enumerate(loads): - u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) + u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) alpha.vector.copy(alpha_lb.vector) - alpha_lb.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) + alpha_lb.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - ColorPrint.print_bold(f" Solving first order: AM ") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold(" Solving first order: AM ") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") solver.solve() - ColorPrint.print_bold(f" Solving first order: Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -231,8 +231,8 @@ def w(self, alpha): alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) logging.critical(f"alpha vector norm: {alpha.vector.norm()}") logging.critical(f"alpha lb norm: {alpha_lb.vector.norm()}") @@ -244,8 +244,8 @@ def w(self, alpha): logging.critical(f"scaled rate state_12 norm: {rate_12_norm}") logging.critical(f"unscaled scaled rate state_12 norm: {urate_12_norm}") - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") is_stable = bifurcation.solve(alpha_lb) is_elastic = bifurcation.is_elastic() @@ -256,11 +256,11 @@ def w(self, alpha): ColorPrint.print_bold(f"State is elastic: {is_elastic}") ColorPrint.print_bold(f"State's inertia: {inertia}") - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") - + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") + stable = cone.my_solve(alpha_lb, eig0=bifurcation._spectrum) - + fracture_energy = comm.allreduce( assemble_scalar(form(model.damage_energy_density(state) * dx)), op=MPI.SUM, @@ -280,7 +280,7 @@ def w(self, alpha): history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(solver.data) history_data["eigs"].append(bifurcation.data["eigs"]) history_data["F"].append(stress) @@ -293,7 +293,9 @@ def w(self, alpha): history_data["uniqueness"].append(_unique) history_data["inertia"].append(inertia) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -302,22 +304,23 @@ def w(self, alpha): json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") df = pd.DataFrame(history_data) -print(df.drop(['solver_data', 'cone_data'], axis=1)) +print(df.drop(["solver_data", "cone_data"], axis=1)) -from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement +from utils.plots import plot_AMit_load, plot_force_displacement if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") plot_force_displacement(history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") -from pyvista.utilities import xvfb -import pyvista import sys -from utils.viz import plot_mesh, plot_vector, plot_scalar + +import pyvista +from pyvista.utilities import xvfb +from utils.viz import plot_scalar, plot_vector xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -332,4 +335,4 @@ def w(self, alpha): _plt.screenshot(f"{prefix}/traction-state.png") ColorPrint.print_bold(f"===================-{signature}-=================") -ColorPrint.print_bold(f" Done! ") \ No newline at end of file +ColorPrint.print_bold(" Done! ") diff --git a/src/irrevolutions/practice/traction-ATJJ.py b/src/irrevolutions/practice/traction-ATJJ.py index d9014e3b..81a61afb 100644 --- a/src/irrevolutions/practice/traction-ATJJ.py +++ b/src/irrevolutions/practice/traction-ATJJ.py @@ -1,17 +1,19 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml import json -from pathlib import Path -import sys +import logging import os +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -22,40 +24,21 @@ locate_dofs_geometrical, set_bc, ) +from dolfinx.io import XDMFFile, gmshio from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl -import hashlib - -from dolfinx.fem.petsc import ( - set_bc, -) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType, timing - sys.path.append("../") -from irrevolutions.utils import norm_H1, norm_L2 -from utils.plots import plot_energies +from algorithms.am import HybridSolver +from algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.utils import ColorPrint from meshes.primitives import mesh_bar_gmshapi -from algorithms.so import BifurcationSolver, StabilitySolver -from algorithms.am import AlternateMinimisation, HybridSolver from models import DamageElasticityModel - - logging.getLogger().setLevel(logging.ERROR) - - """Traction damageable bar 0|WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW|========> t @@ -87,7 +70,7 @@ def a(self, alpha): w = self.w(alpha) _k = self.k - return ((1 - w) + k_res) / (1 + (_k-1) * w) + return ((1 - w) + k_res) / (1 + (_k - 1) * w) def w(self, alpha): """ @@ -96,7 +79,7 @@ def w(self, alpha): """ # Return w(alpha) function - return 1-(1-alpha)**2 + return 1 - (1 - alpha) ** 2 def parameters_vs_ell(parameters=None, ell=0.1): @@ -116,7 +99,7 @@ def parameters_vs_ell(parameters=None, ell=0.1): # parameters["model"]["w1"] = 1 # parameters["model"]["k_res"] = 0. - parameters["loading"]["min"] = .0 + parameters["loading"]["min"] = 0.0 parameters["loading"]["max"] = parameters["model"]["k"] parameters["loading"]["steps"] = 30 @@ -127,7 +110,6 @@ def parameters_vs_ell(parameters=None, ell=0.1): def parameters_vs_SPA_scaling(file=None, s=0.01): - if file is None: with open("../test/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) @@ -140,14 +122,14 @@ def parameters_vs_SPA_scaling(file=None, s=0.01): parameters["stability"]["cone"]["cone_atol"] = 1e-6 parameters["stability"]["cone"]["cone_rtol"] = 1e-5 parameters["model"]["ell"] = 0.1 - parameters["loading"]["min"] = .9 + parameters["loading"]["min"] = 0.9 parameters["loading"]["max"] = parameters["model"]["k"] parameters["loading"]["steps"] = 30 return parameters -def traction_with_parameters(parameters, slug=''): +def traction_with_parameters(parameters, slug=""): # Get mesh parameters Lx = parameters["geometry"]["Lx"] Ly = parameters["geometry"]["Ly"] @@ -160,8 +142,7 @@ def traction_with_parameters(parameters, slug=''): # Get geometry model geom_type = parameters["geometry"]["geom_type"] - import hashlib - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() # Create the mesh of the specimen with given dimensions print("ell:", parameters["model"]["ell"]) @@ -181,26 +162,27 @@ def traction_with_parameters(parameters, slug=''): # Get mesh and meshtags mesh, mts, fts = gmshio.model_to_mesh(gmsh_model, comm, model_rank, tdim) - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) with open(f"{prefix}/parameters.yaml") as f: _parameters = yaml.load(f, Loader=yaml.FullLoader) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) # Functional Setting - element_u = ufl.VectorElement( - "Lagrange", mesh.ufl_cell(), degree=1, dim=tdim) + element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=tdim) V_u = FunctionSpace(mesh, element_u) element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), degree=1) @@ -225,10 +207,8 @@ def traction_with_parameters(parameters, slug=''): dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) @@ -240,14 +220,13 @@ def traction_with_parameters(parameters, slug=''): alpha_ub.interpolate(lambda x: np.ones_like(x[0])) for f in [zero_u, zero_alpha, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - bc_u_left = dirichletbc( - np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) + bc_u_left = dirichletbc(np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) - bc_u_right = dirichletbc( - u_, dofs_u_right) + bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] @@ -276,8 +255,11 @@ def traction_with_parameters(parameters, slug=''): external_work = ufl.dot(f, state["u"]) * dx total_energy = model.total_energy_density(state) * dx - external_work - loads = np.linspace(parameters["loading"]["min"], - parameters["loading"]["max"], parameters["loading"]["steps"]) + loads = np.linspace( + parameters["loading"]["min"], + parameters["loading"]["max"], + parameters["loading"]["steps"], + ) # solver = AlternateMinimisation( # total_energy, state, bcs, parameters.get("solvers"), @@ -293,13 +275,11 @@ def traction_with_parameters(parameters, slug=''): ) bifurcation = BifurcationSolver( - total_energy, state, bcs, stability_parameters=parameters.get( - "stability") + total_energy, state, bcs, stability_parameters=parameters.get("stability") ) cone = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -318,7 +298,7 @@ def traction_with_parameters(parameters, slug=''): "alphadot_norm": [], "rate_12_norm": [], "unscaled_rate_12_norm": [], - "cone-stable": [] + "cone-stable": [], } check_stability = [] @@ -327,16 +307,15 @@ def traction_with_parameters(parameters, slug=''): # logging.getLogger().setLevel(logging.ERROR) # logging.getLogger().setLevel(logging.INFO) # logging.getLogger().setLevel(logging.DEBUG) - for i_t, t in enumerate(loads): plotter = None # for i_t, t in enumerate([0., .99, 1.0, 1.01]): - u_.interpolate(lambda x: ( - t * np.ones_like(x[0]), np.zeros_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) + u_.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) # update the lower bound alpha.vector.copy(alpha_lb.vector) @@ -349,14 +328,14 @@ def traction_with_parameters(parameters, slug=''): logging.critical("") logging.critical("") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") # solver.solve() - ColorPrint.print_bold(f" Solving first order: AM*Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: AM*Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -374,14 +353,12 @@ def traction_with_parameters(parameters, slug=''): logging.critical(f"alpha vector norm: {alpha.vector.norm()}") logging.critical(f"alpha lb norm: {alpha_lb.vector.norm()}") logging.critical(f"alphadot norm: {alphadot.vector.norm()}") - logging.critical( - f"vector norms [u, alpha]: {[zi.vector.norm() for zi in z]}") + logging.critical(f"vector norms [u, alpha]: {[zi.vector.norm() for zi in z]}") logging.critical(f"scaled rate state_12 norm: {rate_12_norm}") - logging.critical( - f"unscaled scaled rate state_12 norm: {urate_12_norm}") + logging.critical(f"unscaled scaled rate state_12 norm: {urate_12_norm}") - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") # n_eigenvalues = 10 is_stable = bifurcation.solve(alpha_lb) @@ -393,8 +370,8 @@ def traction_with_parameters(parameters, slug=''): logging.critical(f"State is elastic: {is_elastic}") logging.critical(f"State's inertia: {inertia}") - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") stable = cone.my_solve(alpha_lb, eig0=bifurcation._spectrum) @@ -418,7 +395,7 @@ def traction_with_parameters(parameters, slug=''): history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(hybrid.data) history_data["solver_HY_data"].append(hybrid.newton_data) history_data["solver_KS_data"].append(cone.data) @@ -432,7 +409,9 @@ def traction_with_parameters(parameters, slug=''): history_data["uniqueness"].append(_unique) history_data["inertia"].append(inertia) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -441,49 +420,56 @@ def traction_with_parameters(parameters, slug=''): json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") print() print() print() - if hasattr(cone, 'perturbation'): + if hasattr(cone, "perturbation"): plotter, _plt = _plot_perturbations_profile( - [bifurcation.spectrum[0]["beta"], cone.perturbation['beta']], - parameters, prefix, plotter=plotter, - label='$\\beta(x) $\lambda$={bifurcation._spectrum[0][\'lambda\']:.2f}$', + [bifurcation.spectrum[0]["beta"], cone.perturbation["beta"]], + parameters, + prefix, + plotter=plotter, + label="$\\beta(x) $\lambda$={bifurcation._spectrum[0]['lambda']:.2f}$", idx=i_t, - aux = [bifurcation.spectrum, cone.data["lambda_0"]]) + aux=[bifurcation.spectrum, cone.data["lambda_0"]], + ) _plt.savefig(f"{prefix}/test_profile-{i_t}.png") - if hasattr(bifurcation, 'spectrum') and len(bifurcation.spectrum) > 0: - plotter, _plt = _plot_bif_spectrum_profile(bifurcation.spectrum, - parameters, prefix, plotter=None, label='', idx=i_t) + if hasattr(bifurcation, "spectrum") and len(bifurcation.spectrum) > 0: + plotter, _plt = _plot_bif_spectrum_profile( + bifurcation.spectrum, + parameters, + prefix, + plotter=None, + label="", + idx=i_t, + ) _plt.savefig(f"{prefix}/test_spectrum-{i_t}.png") - # plotter, _plt = _plot_bif_spectrum_profile_fullvec(bifurcation._spectrum, + # plotter, _plt = _plot_bif_spectrum_profile_fullvec(bifurcation._spectrum, # parameters, prefix, plotter=None, label='', idx=i_t) # _plt.savefig(f"{prefix}/test_spectrum_full-{i_t}.png") - - - _timings = list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) # Viz - from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement + from utils.plots import plot_AMit_load, plot_force_displacement from utils.viz import plot_profile if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") plot_force_displacement( - history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") + history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf" + ) - from pyvista.utilities import xvfb import pyvista - import sys - from utils.viz import plot_mesh, plot_vector, plot_scalar + from pyvista.utilities import xvfb + from utils.viz import plot_scalar, plot_vector + # xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -497,8 +483,6 @@ def traction_with_parameters(parameters, slug=''): _plt = plot_vector(u, plotter, subplot=(0, 1)) _plt.screenshot(f"{prefix}/traction-state.png") - from utils.viz import plot_profile - xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -520,7 +504,7 @@ def traction_with_parameters(parameters, slug=''): subplot=(0, 0), lineproperties={ "c": "k", - "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}" + "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}", }, ) ax = _plt.gca() @@ -532,16 +516,18 @@ def traction_with_parameters(parameters, slug=''): return history_data, signature, _timings -def _plot_bif_spectrum_profile(spectrum, parameters, prefix, plotter=None, label='', idx=''): +def _plot_bif_spectrum_profile( + spectrum, parameters, prefix, plotter=None, label="", idx="" +): """docstring for _plot_bif_spectrum_profile""" - - from utils.viz import plot_profile + import matplotlib.pyplot as plt + from utils.viz import plot_profile # __import__('pdb').set_trace() - + # fields = spectrum["perturbations_beta"] # fields = spectrum["perturbations_beta"] - fields = [item.get('beta') for item in spectrum] + fields = [item.get("beta") for item in spectrum] n = len(fields) num_cols = 1 num_rows = (n + num_cols - 1) // num_cols @@ -549,7 +535,7 @@ def _plot_bif_spectrum_profile(spectrum, parameters, prefix, plotter=None, label if plotter == None: import pyvista # from pyvista.utilities import xvfb - + plotter = pyvista.Plotter( title="Bifurcation Spectrum Profile", window_size=[1600, 600], @@ -585,16 +571,16 @@ def _plot_bif_spectrum_profile(spectrum, parameters, prefix, plotter=None, label subplot=(row, col), lineproperties={ "c": "k", - "ls": '-', + "ls": "-", # "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}" - "label": label + "label": label, }, fig=figure, - ax=_axes + ax=_axes, ) - _axes.axis('off') - _axes.axhline('0', lw=3, c='k') + _axes.axis("off") + _axes.axhline("0", lw=3, c="k") # _plt = None # axes[row].plot(xs, xs*i, label=f'mode {i}') @@ -602,12 +588,15 @@ def _plot_bif_spectrum_profile(spectrum, parameters, prefix, plotter=None, label return plotter, _plt -def _plot_bif_spectrum_profile_fullvec(fields, parameters, prefix, plotter=None, label='', idx=''): + +def _plot_bif_spectrum_profile_fullvec( + fields, parameters, prefix, plotter=None, label="", idx="" +): """docstring for _plot_bif_spectrum_profile""" - - from utils.viz import plot_profile + import matplotlib.pyplot as plt - + from utils.viz import plot_profile + # fields = data["perturbations_beta"] # fields = data["perturbations_beta"] # fields = [item.get('beta') for item in data] @@ -618,7 +607,7 @@ def _plot_bif_spectrum_profile_fullvec(fields, parameters, prefix, plotter=None, if plotter == None: import pyvista # from pyvista.utilities import xvfb - + plotter = pyvista.Plotter( title="Bifurcation Spectrum Profile", window_size=[1600, 600], @@ -637,7 +626,7 @@ def _plot_bif_spectrum_profile_fullvec(fields, parameters, prefix, plotter=None, # if n==1: __import__('pdb').set_trace() for i, field in enumerate(fields): - u = field['xk'][1] + u = field["xk"][1] # u = field['xk'][1] row = i // num_cols col = i % num_cols @@ -649,10 +638,12 @@ def _plot_bif_spectrum_profile_fullvec(fields, parameters, prefix, plotter=None, _axes = axes[row] if n > 1 else axes # if label == '': - label = f"mode {i} $\lambda_{i}$ = {field.get('lambda'):.2e}, ||={u.vector.norm()}" - + label = ( + f"mode {i} $\lambda_{i}$ = {field.get('lambda'):.2e}, ||={u.vector.norm()}" + ) + print(label) - + _plt, data = plot_profile( u, points, @@ -660,16 +651,16 @@ def _plot_bif_spectrum_profile_fullvec(fields, parameters, prefix, plotter=None, subplot=(row, col), lineproperties={ "c": "k", - "ls": '-', + "ls": "-", # "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}" - "label": label + "label": label, }, fig=figure, - ax=_axes + ax=_axes, ) - _axes.axis('off') - _axes.axhline('0', lw=3, c='k') + _axes.axis("off") + _axes.axhline("0", lw=3, c="k") # _plt = None # axes[row].plot(xs, xs*i, label=f'mode {i}') @@ -677,10 +668,12 @@ def _plot_bif_spectrum_profile_fullvec(fields, parameters, prefix, plotter=None, return plotter, _plt -def _plot_perturbations_profile(fields, parameters, prefix, plotter=None, label='', idx='', aux=None): - from utils.viz import plot_profile +def _plot_perturbations_profile( + fields, parameters, prefix, plotter=None, label="", idx="", aux=None +): import matplotlib.pyplot as plt + from utils.viz import plot_profile u = fields[0] # u = fields[0]['xk'][1] @@ -691,7 +684,7 @@ def _plot_perturbations_profile(fields, parameters, prefix, plotter=None, label= if plotter == None: import pyvista # from pyvista.utilities import xvfb - + plotter = pyvista.Plotter( title="Test Profile", window_size=[800, 600], @@ -710,11 +703,11 @@ def _plot_perturbations_profile(fields, parameters, prefix, plotter=None, label= subplot=(0, 0), lineproperties={ "c": "k", - "ls": '--', + "ls": "--", # "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}" - "label": f'space, $\\lambda_0=${aux[0][0].get("lambda"):.1e}' + "label": f'space, $\\lambda_0=${aux[0][0].get("lambda"):.1e}', }, - fig=figure + fig=figure, ) _plt, data = plot_profile( @@ -725,15 +718,15 @@ def _plot_perturbations_profile(fields, parameters, prefix, plotter=None, label= lineproperties={ "c": "k", # "label": f"$\\alpha$ with $\ell$ = {parameters['model']['ell']:.2f}" - "label": f'cone, $\\lambda_K=${aux[1]:.1e}' + "label": f"cone, $\\lambda_K=${aux[1]:.1e}", }, - fig=figure + fig=figure, ) ax = _plt.gca() ax.set_xticks([0, 1], ["0", "1"]) ax.set_yticks([]) - for spine in ['top', 'right', 'left']: + for spine in ["top", "right", "left"]: ax.spines[spine].set_visible(False) # ax.spines['top'].set_visible(False) @@ -744,15 +737,14 @@ def _plot_perturbations_profile(fields, parameters, prefix, plotter=None, label= _plt.legend() _plt.fill_between(data[0], data[1].reshape(len(data[1]))) - - - _plt.title(f"Profile of perturbation") + _plt.title("Profile of perturbation") # _plt.savefig(f"{prefix}/test_profile{idx}.png") - + return plotter, _plt pass + def param_ell(): # for ell in [0.1, 0.2, 0.3]: @@ -762,56 +754,55 @@ def param_ell(): parameters = parameters_vs_ell(parameters, ell) - message = f'Running test with ell={ell}' + message = f"Running test with ell={ell}" pretty_parameters = json.dumps(parameters, indent=2) ColorPrint.print_bold(f" {message} ") - ColorPrint.print_bold(f"===================-===============") + ColorPrint.print_bold("===================-===============") print(pretty_parameters) ColorPrint.print_bold(f" {message} ") - ColorPrint.print_bold(f"===================-===============") + ColorPrint.print_bold("===================-===============") history_data, signature, timings = traction_with_parameters( - parameters, slug='atk_vs_ell') + parameters, slug="atk_vs_ell" + ) df = pd.DataFrame(history_data) ColorPrint.print_bold(f" {message} ") - ColorPrint.print_bold(f"===================-===============") + ColorPrint.print_bold("===================-===============") ColorPrint.print_bold(f" signature {signature} ") - print( - df.drop(['solver_data', 'solver_KS_data', 'solver_HY_data'], axis=1)) + print(df.drop(["solver_data", "solver_KS_data", "solver_HY_data"], axis=1)) -def param_s(): +def param_s(): for s in [0.001, 0.01, 0.05]: with open("../test/atk_parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - message = f'Running SPA test with s={s}' + message = f"Running SPA test with s={s}" parameters = parameters_vs_SPA_scaling(parameters, s) pretty_parameters = json.dumps(parameters, indent=2) ColorPrint.print_bold(f" {message} ") - ColorPrint.print_bold(f"===================-===============") + ColorPrint.print_bold("===================-===============") print(pretty_parameters) ColorPrint.print_bold(f" {message} ") - ColorPrint.print_bold(f"===================-===============") + ColorPrint.print_bold("===================-===============") history_data, signature, timings = traction_with_parameters( - parameters, slug='atk_vs_s') + parameters, slug="atk_vs_s" + ) df = pd.DataFrame(history_data) ColorPrint.print_bold(f" {message} ") - ColorPrint.print_bold(f"===================-===============") + ColorPrint.print_bold("===================-===============") ColorPrint.print_bold(f" signature {signature} ") - print( - df.drop(['solver_data', 'solver_KS_data', 'solver_HY_data'], axis=1)) + print(df.drop(["solver_data", "solver_KS_data", "solver_HY_data"], axis=1)) if __name__ == "__main__": - from irrevolutions.utils import ColorPrint logging.getLogger().setLevel(logging.ERROR) @@ -822,15 +813,14 @@ def param_s(): with open("../test/atk_parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - message = f'Running SPA test with parameters' + message = "Running SPA test with parameters" pretty_parameters = json.dumps(parameters, indent=2) ColorPrint.print_bold(pretty_parameters) history_data, signature, timings = traction_with_parameters( - parameters, slug='atk_traction') + parameters, slug="atk_traction" + ) ColorPrint.print_bold(f" signature {signature} ") df = pd.DataFrame(history_data) - print( - df.drop(['solver_data', 'solver_KS_data', 'solver_HY_data'], axis=1)) - + print(df.drop(["solver_data", "solver_KS_data", "solver_HY_data"], axis=1)) diff --git a/src/irrevolutions/practice/traction-bar-clean.py b/src/irrevolutions/practice/traction-bar-clean.py index 634b1aab..52300ce0 100644 --- a/src/irrevolutions/practice/traction-bar-clean.py +++ b/src/irrevolutions/practice/traction-bar-clean.py @@ -1,54 +1,45 @@ #!/usr/bin/env python3 -import logging import json -import numpy as np -import pandas as pd -import yaml -from pathlib import Path -import sys +import logging import os - +import sys +from pathlib import Path import dolfinx import dolfinx.plot -from dolfinx import log +import numpy as np +import pandas as pd +import petsc4py import ufl - +import yaml from dolfinx.fem import ( Constant, Function, FunctionSpace, - locate_dofs_geometrical, assemble_scalar, dirichletbc, form, + locate_dofs_geometrical, set_bc, ) -from dolfinx.fem.petsc import assemble_vector -from dolfinx.mesh import CellType from dolfinx.io import XDMFFile, gmshio -from dolfinx.common import Timer, list_timings, TimingType - from mpi4py import MPI -import petsc4py from petsc4py import PETSc sys.path.append("../") -from utils.viz import plot_mesh, plot_vector, plot_scalar, plot_profile -import pyvista -from pyvista.utilities import xvfb -from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement import hashlib -from irrevolutions.utils import norm_H1, norm_L2 -from utils.plots import plot_energies -from irrevolutions.utils import ColorPrint -from irrevolutions.utils import _logger, simulation_info -from meshes.primitives import mesh_bar_gmshapi -from solvers import SNESSolver -from algorithms.so import BifurcationSolver, StabilitySolver + +import pyvista from algorithms.am import AlternateMinimisation, HybridSolver +from algorithms.so import BifurcationSolver, StabilitySolver +from irrevolutions.utils import ColorPrint, _logger, simulation_info +from meshes.primitives import mesh_bar_gmshapi from models import DamageElasticityModel as Brittle +from pyvista.utilities import xvfb from solvers.function import vec_to_functions +from utils.plots import plot_AMit_load, plot_energies, plot_force_displacement +from utils.viz import plot_profile, plot_scalar, plot_vector + class BrittleAT2(Brittle): """Brittle AT_2 model, without an elastic phase. For fun only.""" @@ -84,12 +75,17 @@ def store_results(self, parameters, history_data, state): alpha = state["alpha"] if self.comm.rank == 0: - with open(f"{self.prefix}/parameters.yaml", 'w') as file: + with open(f"{self.prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) - with XDMFFile(self.comm, f"{self.prefix}/simulation_results.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + self.comm, + f"{self.prefix}/simulation_results.xdmf", + "w", + encoding=XDMFFile.Encoding.HDF5, + ) as file: # for t, data in history_data.items(): - # file.write_scalar(data, t) + # file.write_scalar(data, t) file.write_mesh(u.function_space.mesh) file.write_function(u, t) @@ -99,6 +95,7 @@ def store_results(self, parameters, history_data, state): with open(f"{self.prefix}/time_data.json", "w") as file: json.dump(history_data, file) + # Visualization functions/classes class Visualization: """ @@ -132,8 +129,8 @@ def save_table(self, data, name): json.dump(data.to_json(), a_file) a_file.close() -def main(parameters, model='at2', storage=None): +def main(parameters, model="at2", storage=None): petsc4py.init(sys.argv) comm = MPI.COMM_WORLD @@ -144,34 +141,35 @@ def main(parameters, model='at2', storage=None): tdim = parameters["geometry"]["geometric_dimension"] _nameExp = parameters["geometry"]["geom_type"] ell_ = parameters["model"]["ell"] - lc = parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] + lc = parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] geom_type = parameters["geometry"]["geom_type"] gmsh_model, tdim = mesh_bar_gmshapi(geom_type, Lx, Ly, lc, tdim) mesh, mts, fts = gmshio.model_to_mesh(gmsh_model, comm, model_rank, tdim) - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() outdir = os.path.join(os.path.dirname(__file__), "output") if storage is None: prefix = os.path.join(outdir, "traction_AT2_cone", signature) else: prefix = storage - + if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) - parameters = {**simulation_info, **parameters} - + if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=tdim) @@ -194,10 +192,8 @@ def main(parameters, model='at2', storage=None): dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) @@ -207,18 +203,18 @@ def main(parameters, model='at2', storage=None): u_.interpolate(lambda x: (np.ones_like(x[0]), 0 * np.ones_like(x[1]))) alpha_lb.interpolate(lambda x: np.zeros_like(x[0])) alpha_ub.interpolate(lambda x: np.ones_like(x[0])) - + # Perturbation β = Function(V_alpha, name="DamagePerturbation") v = Function(V_u, name="DisplacementPerturbation") perturbation = {"v": v, "beta": β} - + for f in [zero_u, zero_alpha, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - bc_u_left = dirichletbc( - np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) + bc_u_left = dirichletbc(np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] @@ -232,17 +228,17 @@ def main(parameters, model='at2', storage=None): set_bc(alpha_ub.vector, bcs_alpha) alpha_ub.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} - - if model == 'at2': + if model == "at2": model = BrittleAT2(parameters["model"]) - elif model == 'at1': + elif model == "at1": model = Brittle(parameters["model"]) else: - raise ValueError('Model not implemented') - + raise ValueError("Model not implemented") + state = {"u": u, "alpha": alpha} z = [u, alpha] @@ -266,14 +262,11 @@ def main(parameters, model='at2', storage=None): ) bifurcation = BifurcationSolver( - total_energy, state, bcs, - bifurcation_parameters=parameters.get( - "stability") + total_energy, state, bcs, bifurcation_parameters=parameters.get("stability") ) stability = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -291,7 +284,7 @@ def main(parameters, model='at2', storage=None): "alphadot_norm": [], "rate_12_norm": [], "unscaled_rate_12_norm": [], - "cone-stable": [] + "cone-stable": [], } check_stability = [] @@ -299,22 +292,24 @@ def main(parameters, model='at2', storage=None): logging.getLogger().setLevel(logging.INFO) for i_t, t in enumerate(loads): - u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) + u_.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) alpha.vector.copy(alpha_lb.vector) alpha_lb.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - ColorPrint.print_bold(f" Solving first order: AM ") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold(" Solving first order: AM ") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") solver.solve() - ColorPrint.print_bold(f" Solving first order: Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -328,16 +323,15 @@ def main(parameters, model='at2', storage=None): logging.critical(f"alpha vector norm: {alpha.vector.norm()}") logging.critical(f"alpha lb norm: {alpha_lb.vector.norm()}") logging.critical(f"alphadot norm: {alphadot.vector.norm()}") - logging.critical( - f"vector norms [u, alpha]: {[zi.vector.norm() for zi in z]}") + logging.critical(f"vector norms [u, alpha]: {[zi.vector.norm() for zi in z]}") rate_12_norm = hybrid.scaled_rate_norm(alpha, parameters) urate_12_norm = hybrid.unscaled_rate_norm(alpha) logging.critical(f"scaled rate state_12 norm: {rate_12_norm}") logging.critical(f"unscaled scaled rate state_12 norm: {urate_12_norm}") - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") is_stable = bifurcation.solve(alpha_lb) is_elastic = bifurcation.is_elastic() @@ -348,10 +342,12 @@ def main(parameters, model='at2', storage=None): ColorPrint.print_bold(f"State is elastic: {is_elastic}") ColorPrint.print_bold(f"State's inertia: {inertia}") - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") - stable = stability.my_solve(alpha_lb, eig0=bifurcation._spectrum, inertia = inertia) + stable = stability.my_solve( + alpha_lb, eig0=bifurcation._spectrum, inertia=inertia + ) if bifurcation._spectrum: plot_perturbations(comm, Lx, prefix, β, v, bifurcation, stability, i_t) @@ -375,7 +371,7 @@ def main(parameters, model='at2', storage=None): history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(solver.data) history_data["eigs"].append(bifurcation.data["eigs"]) history_data["F"].append(stress) @@ -388,7 +384,9 @@ def main(parameters, model='at2', storage=None): history_data["uniqueness"].append(_unique) history_data["inertia"].append(inertia) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -397,18 +395,18 @@ def main(parameters, model='at2', storage=None): json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") df = pd.DataFrame(history_data) - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: if comm.Get_size() == 1: - # if comm.rank == 0 and comm.Get_size() == 1: + # if comm.rank == 0 and comm.Get_size() == 1: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") plot_force_displacement( - history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") - + history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf" + ) xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -423,34 +421,31 @@ def main(parameters, model='at2', storage=None): _plt.screenshot(f"{prefix}/traction-state.png") ColorPrint.print_bold(f"===================-{signature}-=================") - ColorPrint.print_bold(f" Done! ") + ColorPrint.print_bold(" Done! ") return history_data, state + def plot_perturbations(comm, Lx, prefix, β, v, bifurcation, stability, i_t): - - vec_to_functions(bifurcation._spectrum[0]['xk'], [v, β]) + vec_to_functions(bifurcation._spectrum[0]["xk"], [v, β]) if comm.Get_size() == 1: tol = 1e-3 xs = np.linspace(0 + tol, Lx - tol, 101) points = np.zeros((3, 101)) points[0] = xs - + plotter = pyvista.Plotter( - title="Perturbation profile", - window_size=[800, 600], - shape=(1, 1), - ) + title="Perturbation profile", + window_size=[800, 600], + shape=(1, 1), + ) _plt, data = plot_profile( - β, - points, - plotter, - subplot=(0, 0), - lineproperties={ - "c": "k", - "label": f"$\\beta$" - }, - ) + β, + points, + plotter, + subplot=(0, 0), + lineproperties={"c": "k", "label": "$\\beta$"}, + ) ax = _plt.gca() _plt.legend() _plt.fill_between(data[0], data[1].reshape(len(data[1]))) @@ -458,23 +453,19 @@ def plot_perturbations(comm, Lx, prefix, β, v, bifurcation, stability, i_t): _plt.savefig(f"{prefix}/perturbation-profile-{i_t}.png") _plt.close() - plotter = pyvista.Plotter( - title="Cone-Perturbation profile", - window_size=[800, 600], - shape=(1, 1), - ) + title="Cone-Perturbation profile", + window_size=[800, 600], + shape=(1, 1), + ) _plt, data = plot_profile( - stability.perturbation['beta'], - points, - plotter, - subplot=(0, 0), - lineproperties={ - "c": "k", - "label": f"$\\beta$" - }, - ) + stability.perturbation["beta"], + points, + plotter, + subplot=(0, 0), + lineproperties={"c": "k", "label": "$\\beta$"}, + ) ax = _plt.gca() _plt.legend() _plt.fill_between(data[0], data[1].reshape(len(data[1]))) @@ -482,10 +473,11 @@ def plot_perturbations(comm, Lx, prefix, β, v, bifurcation, stability, i_t): _plt.savefig(f"{prefix}/perturbation-profile-cone-{i_t}.png") _plt.close() + # Configuration handling (load parameters from YAML) -def load_parameters(file_path, model='at2'): +def load_parameters(file_path, model="at2"): """ Load parameters from a YAML file. @@ -500,20 +492,19 @@ def load_parameters(file_path, model='at2'): with open(file_path) as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - if model == 'at2': - parameters["loading"]["min"] = .0 + if model == "at2": + parameters["loading"]["min"] = 0.0 parameters["loading"]["max"] = 1.3 parameters["loading"]["steps"] = 30 - elif model == 'at1': - parameters["loading"]["min"] = .0 - parameters["loading"]["max"] = 2. + elif model == "at1": + parameters["loading"]["min"] = 0.0 + parameters["loading"]["max"] = 2.0 parameters["loading"]["steps"] = 50 parameters["geometry"]["geom_type"] = "traction-bar" parameters["geometry"]["mesh_size_factor"] = 5 - parameters["stability"]["cone"]["cone_max_it"] = 400000 parameters["stability"]["cone"]["cone_atol"] = 1e-7 parameters["stability"]["cone"]["cone_rtol"] = 1e-7 @@ -521,28 +512,32 @@ def load_parameters(file_path, model='at2'): parameters["model"]["model_dimension"] = 2 parameters["model"]["w1"] = 1 - parameters["model"]["ell"] = .1 - parameters["model"]["k_res"] = 0. + parameters["model"]["ell"] = 0.1 + parameters["model"]["k_res"] = 0.0 - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature + if __name__ == "__main__": import argparse + admissible_models = {"at1", "at2"} - parser = argparse.ArgumentParser(description='Process evolution.') - parser.add_argument("--model", choices=admissible_models, default = 'at1', help="The model to use.") + parser = argparse.ArgumentParser(description="Process evolution.") + parser.add_argument( + "--model", choices=admissible_models, default="at1", help="The model to use." + ) args = parser.parse_args() parameters, signature = load_parameters("../test/parameters.yml", model=args.model) pretty_parameters = json.dumps(parameters, indent=2) _logger.info(pretty_parameters) - + _storage = f"output/traction-bar/{args.model}/{signature}-MPI{MPI.COMM_WORLD.size}" ColorPrint.print_bold(f"===================-{_storage}-=================") - - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, state = main(parameters, args.model, _storage) # Store and visualise results @@ -552,9 +547,11 @@ def load_parameters(file_path, model='at2'): # [[nan], [-0.0021062360599051365]] visualization = Visualization(_storage) - visualization.visualise_results(pd.DataFrame(history_data), drop = ["solver_data", "cone_data"]) + visualization.visualise_results( + pd.DataFrame(history_data), drop=["solver_data", "cone_data"] + ) visualization.save_table(pd.DataFrame(history_data), "history_data") - + # list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) ColorPrint.print_bold(f"===================-{signature}-=================") @@ -562,6 +559,7 @@ def load_parameters(file_path, model='at2'): # timings from irrevolutions.utils import table_timing_data + _timings = table_timing_data() visualization.save_table(_timings, "timing_data") diff --git a/src/irrevolutions/practice/traction-cone.py b/src/irrevolutions/practice/traction-cone.py index 0c581fd5..e135b55b 100644 --- a/src/irrevolutions/practice/traction-cone.py +++ b/src/irrevolutions/practice/traction-cone.py @@ -1,17 +1,19 @@ #!/usr/bin/env python3 -import pdb -import pandas as pd -import numpy as np -from sympy import derive_by_array -import yaml import json -from pathlib import Path -import sys +import logging import os +import sys +from pathlib import Path -from dolfinx.fem import locate_dofs_geometrical, dirichletbc -from dolfinx.mesh import CellType +import dolfinx import dolfinx.mesh +import dolfinx.plot +import numpy as np +import pandas as pd +import petsc4py +import ufl +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -22,35 +24,20 @@ locate_dofs_geometrical, set_bc, ) +from dolfinx.io import XDMFFile, gmshio from mpi4py import MPI -import petsc4py from petsc4py import PETSc -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl -import hashlib - -from dolfinx.fem.petsc import ( - set_bc, - ) -from dolfinx.io import XDMFFile, gmshio -import logging -from dolfinx.common import Timer, list_timings, TimingType, timing sys.path.append("../") -from models import DamageElasticityModel as Brittle -from algorithms.am import AlternateMinimisation, HybridSolver +from algorithms.am import HybridSolver from algorithms.so import BifurcationSolver, StabilitySolver -from meshes.primitives import mesh_bar_gmshapi from irrevolutions.utils import ColorPrint -from utils.plots import plot_energies -from irrevolutions.utils import norm_H1, norm_L2 +from meshes.primitives import mesh_bar_gmshapi +from models import DamageElasticityModel as Brittle logging.getLogger().setLevel(logging.ERROR) - sys.path.append("../") @@ -72,7 +59,7 @@ model_rank = 0 -def traction_with_parameters(parameters, slug = ''): +def traction_with_parameters(parameters, slug=""): # Get mesh parameters Lx = parameters["geometry"]["Lx"] Ly = parameters["geometry"]["Ly"] @@ -80,13 +67,12 @@ def traction_with_parameters(parameters, slug = ''): _nameExp = parameters["geometry"]["geom_type"] ell_ = parameters["model"]["ell"] - _lc = ell_ / parameters["geometry"]["ell_lc"] + _lc = ell_ / parameters["geometry"]["ell_lc"] # Get geometry model geom_type = parameters["geometry"]["geom_type"] - import hashlib - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() # Create the mesh of the specimen with given dimensions print("ell:", parameters["model"]["ell"]) @@ -94,7 +80,7 @@ def traction_with_parameters(parameters, slug = ''): outdir = os.path.join("output", slug, signature) # prefix = os.path.join(outdir, "traction_parametric_vs_ell") - + prefix = os.path.join(outdir) if comm.rank == 0: @@ -106,10 +92,10 @@ def traction_with_parameters(parameters, slug = ''): # Get mesh and meshtags mesh, mts, fts = gmshio.model_to_mesh(gmsh_model, comm, model_rank, tdim) - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) with open(f"{prefix}/parameters.yaml") as f: @@ -118,10 +104,12 @@ def traction_with_parameters(parameters, slug = ''): print("dblchedk", _parameters["model"]["ell"]) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_mesh(mesh) # Functional Setting @@ -151,10 +139,8 @@ def traction_with_parameters(parameters, slug = ''): dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) @@ -166,14 +152,13 @@ def traction_with_parameters(parameters, slug = ''): alpha_ub.interpolate(lambda x: np.ones_like(x[0])) for f in [zero_u, zero_alpha, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - bc_u_left = dirichletbc( - np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) + bc_u_left = dirichletbc(np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) - bc_u_right = dirichletbc( - u_, dofs_u_right) + bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] @@ -202,11 +187,14 @@ def traction_with_parameters(parameters, slug = ''): external_work = ufl.dot(f, state["u"]) * dx total_energy = model.total_energy_density(state) * dx - external_work - loads = np.linspace(parameters["loading"]["min"], - parameters["loading"]["max"], parameters["loading"]["steps"]) + loads = np.linspace( + parameters["loading"]["min"], + parameters["loading"]["max"], + parameters["loading"]["steps"], + ) # solver = AlternateMinimisation( - # total_energy, state, bcs, parameters.get("solvers"), + # total_energy, state, bcs, parameters.get("solvers"), # bounds=(alpha_lb, alpha_ub) # ) @@ -223,8 +211,7 @@ def traction_with_parameters(parameters, slug = ''): ) cone = StabilitySolver( - total_energy, state, bcs, - cone_parameters=parameters.get("stability") + total_energy, state, bcs, cone_parameters=parameters.get("stability") ) history_data = { @@ -239,14 +226,13 @@ def traction_with_parameters(parameters, slug = ''): "eigs": [], "uniqueness": [], "inertia": [], - "F": [], - "alphadot_norm" : [], - "rate_12_norm" : [], - "unscaled_rate_12_norm" : [], - "cone-stable": [] + "F": [], + "alphadot_norm": [], + "rate_12_norm": [], + "unscaled_rate_12_norm": [], + "cone-stable": [], } - check_stability = [] # logging.basicConfig(level=logging.INFO) @@ -255,10 +241,11 @@ def traction_with_parameters(parameters, slug = ''): # logging.getLogger().setLevel(logging.DEBUG) for i_t, t in enumerate(loads): - # for i_t, t in enumerate([0., .99, 1.0, 1.01]): - u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + # for i_t, t in enumerate([0., .99, 1.0, 1.01]): + u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) + u_.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) # update the lower bound alpha.vector.copy(alpha_lb.vector) @@ -270,16 +257,15 @@ def traction_with_parameters(parameters, slug = ''): logging.critical("") logging.critical("") logging.critical("") - - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") # solver.solve() - ColorPrint.print_bold(f" Solving first order: AM*Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: AM*Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -288,9 +274,8 @@ def traction_with_parameters(parameters, slug = ''): alpha.vector.copy(alphadot.vector) alphadot.vector.axpy(-1, alpha_lb.vector) alphadot.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD - ) - + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) rate_12_norm = hybrid.scaled_rate_norm(alpha, parameters) urate_12_norm = hybrid.unscaled_rate_norm(alpha) @@ -302,8 +287,8 @@ def traction_with_parameters(parameters, slug = ''): logging.critical(f"scaled rate state_12 norm: {rate_12_norm}") logging.critical(f"unscaled scaled rate state_12 norm: {urate_12_norm}") - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") # n_eigenvalues = 10 is_stable = bifurcation.solve(alpha_lb) @@ -314,12 +299,12 @@ def traction_with_parameters(parameters, slug = ''): logging.critical(f"State is elastic: {is_elastic}") logging.critical(f"State's inertia: {inertia}") - - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") - + + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") + stable = cone.my_solve(alpha_lb, eig0=bifurcation._spectrum) - + fracture_energy = comm.allreduce( assemble_scalar(form(model.damage_energy_density(state) * dx)), op=MPI.SUM, @@ -340,7 +325,7 @@ def traction_with_parameters(parameters, slug = ''): history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["solver_data"].append(hybrid.data) history_data["solver_HY_data"].append(hybrid.newton_data) history_data["solver_KS_data"].append(cone.data) @@ -354,7 +339,9 @@ def traction_with_parameters(parameters, slug = ''): history_data["uniqueness"].append(_unique) history_data["inertia"].append(inertia) - with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + comm, f"{prefix}/{_nameExp}.xdmf", "a", encoding=XDMFFile.Encoding.HDF5 + ) as file: file.write_function(u, t) file.write_function(alpha, t) @@ -363,34 +350,32 @@ def traction_with_parameters(parameters, slug = ''): json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") print() print() print() _timings = list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) - # Viz - from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement + from utils.plots import plot_AMit_load, plot_energies, plot_force_displacement if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") - plot_force_displacement(history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") - - + plot_force_displacement( + history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf" + ) - from pyvista.utilities import xvfb import pyvista - import sys - from utils.viz import plot_mesh, plot_vector, plot_scalar - # + from pyvista.utilities import xvfb + from utils.viz import plot_scalar, plot_vector + + # xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True - plotter = pyvista.Plotter( title="Traction test", window_size=[1600, 600], @@ -402,39 +387,36 @@ def traction_with_parameters(parameters, slug = ''): return history_data, _timings + def param_ell(): - for ell in [0.05, - 0.1, 0.2, 0.3 - ]: + for ell in [0.05, 0.1, 0.2, 0.3]: with open("../test/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - + parameters = parameters_vs_ell(parameters, ell) pretty_parameters = json.dumps(parameters, indent=2) print(pretty_parameters) print(parameters["loading"]["max"]) - history_data, timings = traction_with_parameters(parameters, slug='vs_ell') + history_data, timings = traction_with_parameters(parameters, slug="vs_ell") df = pd.DataFrame(history_data) - print(df.drop(['solver_data', 'solver_KS_data', 'solver_HY_data'], axis=1)) + print(df.drop(["solver_data", "solver_KS_data", "solver_HY_data"], axis=1)) -if __name__ == "__main__": - +if __name__ == "__main__": logging.getLogger().setLevel(logging.ERROR) - - # param_ell() + # param_ell() for s in [0.001, 0.005, 0.01, 0.02, 0.05, 0.1]: with open("../test/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - + parameters = parameters_vs_SPA_scaling(parameters, s) pretty_parameters = json.dumps(parameters, indent=2) print(pretty_parameters) - history_data, timings = traction_with_parameters(parameters, slug='vs_s') + history_data, timings = traction_with_parameters(parameters, slug="vs_s") df = pd.DataFrame(history_data) - print(df.drop(['solver_data', 'solver_KS_data', 'solver_HY_data'], axis=1)) + print(df.drop(["solver_data", "solver_KS_data", "solver_HY_data"], axis=1)) diff --git a/src/irrevolutions/practice/traction-parametric.py b/src/irrevolutions/practice/traction-parametric.py index fd6ecc57..3bafd81a 100644 --- a/src/irrevolutions/practice/traction-parametric.py +++ b/src/irrevolutions/practice/traction-parametric.py @@ -1,53 +1,44 @@ #!/usr/bin/env python3 -import logging import json -import numpy as np -import pandas as pd -import yaml -from pathlib import Path -import sys +import logging import os - +import sys +from pathlib import Path import dolfinx import dolfinx.plot -from dolfinx import log +import numpy as np +import pandas as pd +import petsc4py import ufl - +import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, FunctionSpace, - locate_dofs_geometrical, assemble_scalar, dirichletbc, form, + locate_dofs_geometrical, set_bc, ) -from dolfinx.fem.petsc import assemble_vector -from dolfinx.mesh import CellType -from dolfinx.io import XDMFFile, gmshio -from dolfinx.common import Timer, list_timings, TimingType - +from dolfinx.io import gmshio from mpi4py import MPI -import petsc4py from petsc4py import PETSc - sys.path.append("../") -from utils.viz import plot_mesh, plot_vector, plot_scalar -import pyvista -from pyvista.utilities import xvfb -from utils.plots import plot_energies, plot_AMit_load, plot_force_displacement import hashlib -from irrevolutions.utils import norm_H1, norm_L2 -from utils.plots import plot_energies + +import pyvista +from algorithms.am import AlternateMinimisation, HybridSolver +from algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.utils import ColorPrint from meshes.primitives import mesh_bar_gmshapi -from solvers import SNESSolver -from algorithms.so import BifurcationSolver, StabilitySolver -from algorithms.am import AlternateMinimisation, HybridSolver from models import DamageElasticityModel as Brittle +from pyvista.utilities import xvfb +from utils.plots import plot_energies, plot_force_displacement +from utils.viz import plot_scalar, plot_vector class BrittleAT2(Brittle): @@ -62,7 +53,6 @@ def w(self, alpha): return self.w1 * alpha**2 - class ResultsStorage: """ Class for storing and saving simulation results. @@ -85,7 +75,7 @@ def store_results(self, parameters, history_data, state): alpha = state["alpha"] if self.comm.rank == 0: - with open(f"{self.prefix}/parameters.yaml", 'w') as file: + with open(f"{self.prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) # with XDMFFile(self.comm, f"{self.prefix}/simulation_results.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: @@ -100,8 +90,10 @@ def store_results(self, parameters, history_data, state): with open(f"{self.prefix}/time_data.json", "w") as file: json.dump(history_data, file) + # Visualization functions/classes + class Visualization: """ Class for visualizing simulation results. @@ -147,8 +139,8 @@ def save_json(self, data, name): json.dump(data, a_file) a_file.close() -def main(parameters, model='at2', storage=None): +def main(parameters, model="at2", storage=None): petsc4py.init(sys.argv) comm = MPI.COMM_WORLD @@ -165,23 +157,23 @@ def main(parameters, model='at2', storage=None): gmsh_model, tdim = mesh_bar_gmshapi(geom_type, Lx, Ly, _lc, tdim) mesh, mts, fts = gmshio.model_to_mesh(gmsh_model, comm, model_rank, tdim) - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() outdir = os.path.join(os.path.dirname(__file__), "output") if storage is None: prefix = os.path.join(outdir, "traction_AT2_cone", signature) else: prefix = storage - + if comm.rank == 0: Path(prefix).mkdir(parents=True, exist_ok=True) if comm.rank == 0: - with open(f"{prefix}/signature.md5", 'w') as f: + with open(f"{prefix}/signature.md5", "w") as f: f.write(signature) if comm.rank == 0: - with open(f"{prefix}/parameters.yaml", 'w') as file: + with open(f"{prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) # with XDMFFile(comm, f"{prefix}/{_nameExp}.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: @@ -207,10 +199,8 @@ def main(parameters, model='at2', storage=None): dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) @@ -222,27 +212,28 @@ def main(parameters, model='at2', storage=None): alpha_ub.interpolate(lambda x: np.ones_like(x[0])) for f in [zero_u, zero_alpha, u_, alpha_lb, alpha_ub]: - f.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + f.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - bc_u_left = dirichletbc( - np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) + bc_u_left = dirichletbc(np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] bcs_alpha = [] set_bc(alpha_ub.vector, bcs_alpha) alpha_ub.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} - if model == 'at2': + if model == "at2": model = BrittleAT2(parameters["model"]) - elif model == 'at1': + elif model == "at1": model = Brittle(parameters["model"]) else: - raise ValueError('Model not implemented') - + raise ValueError("Model not implemented") + state = {"u": u, "alpha": alpha} f = Constant(mesh, np.array([0, 0], dtype=PETSc.ScalarType)) @@ -265,8 +256,7 @@ def main(parameters, model='at2', storage=None): ) bifurcation = BifurcationSolver( - total_energy, state, bcs, bifurcation_parameters=parameters.get( - "stability") + total_energy, state, bcs, bifurcation_parameters=parameters.get("stability") ) cone = StabilitySolver( @@ -288,31 +278,32 @@ def main(parameters, model='at2', storage=None): "alphadot_norm": [], "rate_12_norm": [], "unscaled_rate_12_norm": [], - "cone-stable": [] + "cone-stable": [], } check_stability = [] - for i_t, t in enumerate(loads): logging.getLogger().setLevel(logging.WARNING) - u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + u_.interpolate(lambda x: (t * np.ones_like(x[0]), np.zeros_like(x[1]))) + u_.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) alpha.vector.copy(alpha_lb.vector) alpha_lb.vector.ghostUpdate( - addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - ColorPrint.print_bold(f" Solving first order: AM ") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold(" Solving first order: AM ") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") # solver.solve() - ColorPrint.print_bold(f" Solving first order: Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -334,8 +325,8 @@ def main(parameters, model='at2', storage=None): logging.critical(f"scaled rate state_12 norm: {rate_12_norm}") logging.critical(f"unscaled scaled rate state_12 norm: {urate_12_norm}") - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") is_stable = bifurcation.solve(alpha_lb) is_elastic = bifurcation.is_elastic() @@ -346,8 +337,8 @@ def main(parameters, model='at2', storage=None): ColorPrint.print_bold(f"State is elastic: {is_elastic}") ColorPrint.print_bold(f"State's inertia: {inertia}") - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") stable = cone.my_solve(alpha_lb, eig0=bifurcation._spectrum) @@ -370,7 +361,7 @@ def main(parameters, model='at2', storage=None): history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) # history_data["solver_data"].append(solver.data) history_data["eigs"].append(bifurcation.data["eigs"]) history_data["F"].append(stress) @@ -392,19 +383,19 @@ def main(parameters, model='at2', storage=None): json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") df = pd.DataFrame(history_data) # print(df.drop(['solver_data', 'cone_data'], axis=1)) - print(df.drop(['cone_data'], axis=1)) + print(df.drop(["cone_data"], axis=1)) - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") # plot_AMit_load(history_data, file=f"{prefix}/{_nameExp}_it_load.pdf") plot_force_displacement( - history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf") - + history_data, file=f"{prefix}/{_nameExp}_stress-load.pdf" + ) xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True @@ -419,23 +410,26 @@ def main(parameters, model='at2', storage=None): _plt.screenshot(f"{prefix}/traction-state.png") ColorPrint.print_bold(f"===================-{signature}-=================") - ColorPrint.print_bold(f" Done! ") - + ColorPrint.print_bold(" Done! ") performance = { "N": [], "dofs": [], } performance["N"].append(MPI.COMM_WORLD.size) - performance["dofs"].append(sum([V.dofmap.bs * V.dofmap.index_map.size_global for V in [V_u, V_alpha]])) + performance["dofs"].append( + sum([V.dofmap.bs * V.dofmap.index_map.size_global for V in [V_u, V_alpha]]) + ) list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) return history_data, performance, state + # Configuration handling (load parameters from YAML) -def load_parameters(file_path, model='at2'): + +def load_parameters(file_path, model="at2"): """ Load parameters from a YAML file. @@ -450,17 +444,17 @@ def load_parameters(file_path, model='at2'): with open(file_path) as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - if model == 'at2': - parameters["loading"]["min"] = .9 - parameters["loading"]["max"] = .9 + if model == "at2": + parameters["loading"]["min"] = 0.9 + parameters["loading"]["max"] = 0.9 parameters["loading"]["steps"] = 1 - elif model == 'at1': + elif model == "at1": parameters["loading"]["min"] = 1.03 parameters["loading"]["max"] = 1.03 parameters["loading"]["steps"] = 1 - - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature @@ -476,10 +470,9 @@ def param_vs_ell(): def param_vs_s(base_parameters, base_signature): - # s_list = [1.e-08, 1.e-07, 1e-6, 1e-5, 2e-5, 5e-5, 1e-4, 0.001, 0.003, 0.005, 0.01, 0.02, 0.05, 0.1] s_list = np.logspace(-9, -1, 9).tolist() - + from irrevolutions.utils import table_timing_data _rootdir = f"output/parametric/traction-bar/vs_s/{base_signature}" @@ -488,18 +481,21 @@ def param_vs_s(base_parameters, base_signature): Path(_rootdir).mkdir(parents=True, exist_ok=True) if MPI.COMM_WORLD.rank == 0: - with open(f"{_rootdir}/parameters.yaml", 'w') as file: + with open(f"{_rootdir}/parameters.yaml", "w") as file: yaml.dump(base_parameters, file) for s in s_list: - parameters, signature = parameters_vs_SPA_scaling(parameters=base_parameters, s=s) + parameters, signature = parameters_vs_SPA_scaling( + parameters=base_parameters, s=s + ) _storage = f"output/parametric/traction-bar/vs_s/{base_signature}/{signature}" - - ColorPrint.print_bold(f"===PARAMETRIC vs S================-{s}-=================") + ColorPrint.print_bold( + f"===PARAMETRIC vs S================-{s}-=================" + ) ColorPrint.print_bold(f"===================-{signature}-=================") - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, performance, state = main(parameters, _storage) _timings = table_timing_data() @@ -517,21 +513,17 @@ def param_vs_s(base_parameters, base_signature): visualization.save_json(performance, "performance") - ColorPrint.print_bold(f"===================-{signature}-=================") - # Store and visualise results - return history_data, _timings def param_vs_dry(base_parameters, base_signature): - # s_list = [1.e-08, 1.e-07, 1e-6, 1e-5, 2e-5, 5e-5, 1e-4, 0.001, 0.003, 0.005, 0.01, 0.02, 0.05, 0.1] s_list = np.arange(0, 10).tolist() - + from irrevolutions.utils import table_timing_data _rootdir = f"output/parametric/traction-bar/vs_s/{base_signature}" @@ -540,7 +532,7 @@ def param_vs_dry(base_parameters, base_signature): Path(_rootdir).mkdir(parents=True, exist_ok=True) if MPI.COMM_WORLD.rank == 0: - with open(f"{_rootdir}/parameters.yaml", 'w') as file: + with open(f"{_rootdir}/parameters.yaml", "w") as file: yaml.dump(base_parameters, file) for s in s_list: @@ -549,11 +541,12 @@ def param_vs_dry(base_parameters, base_signature): signature = s _storage = f"output/parametric/traction-bar/dry/{signature}" - - ColorPrint.print_bold(f"===PARAMETRIC vs S================-{s}-=================") + ColorPrint.print_bold( + f"===PARAMETRIC vs S================-{s}-=================" + ) ColorPrint.print_bold(f"===================-{signature}-=================") - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, performance, state = main(parameters, _storage) _timings = table_timing_data() @@ -571,54 +564,62 @@ def param_vs_dry(base_parameters, base_signature): visualization.save_json(performance, "performance") - ColorPrint.print_bold(f"===================-{signature}-=================") - # Store and visualise results - return history_data, _timings + if __name__ == "__main__": import argparse - from utils.parametric import parameters_vs_SPA_scaling, parameters_vs_ell, parameters_vs_n_refinement + + from utils.parametric import ( + parameters_vs_ell, + parameters_vs_n_refinement, + parameters_vs_SPA_scaling, + ) + admissible_models = {"at1", "at2", "thinfilm"} - parser = argparse.ArgumentParser(description='Process evolution.') - - parser.add_argument('-s', type=str, default=1e-4, - help='scaling') - - parser.add_argument('-n', type=int, default=3, - help='resolution: ell to h ratio') - + parser = argparse.ArgumentParser(description="Process evolution.") + + parser.add_argument("-s", type=str, default=1e-4, help="scaling") + + parser.add_argument("-n", type=int, default=3, help="resolution: ell to h ratio") + parser.add_argument("--model", choices=admissible_models, help="The model to use.") args = parser.parse_args() - base_parameters, base_signature = load_parameters("../test/parameters.yml", model=args.model) + base_parameters, base_signature = load_parameters( + "../test/parameters.yml", model=args.model + ) if "-s" in sys.argv: - parameters, signature = parameters_vs_SPA_scaling(parameters=base_parameters, s=np.float(args.s)) + parameters, signature = parameters_vs_SPA_scaling( + parameters=base_parameters, s=np.float(args.s) + ) _storage = f"output/parametric/traction-bar/vs_s/{args.model}/{base_signature}/{signature}" elif "-n" in sys.argv: - parameters, signature = parameters_vs_n_refinement(parameters=base_parameters, r=np.int(args.n)) + parameters, signature = parameters_vs_n_refinement( + parameters=base_parameters, r=np.int(args.n) + ) _storage = f"output/parametric/traction-bar/vs_resolution/{args.model}/{base_signature}/{signature}" else: parameters, signature = base_parameters, base_signature _storage = f"output/parametric/traction-bar/vs_s/{args.model}/base/{signature}" - ColorPrint.print_bold(f" Base ") + ColorPrint.print_bold(" Base ") ColorPrint.print_bold(f"===================-model {args.model}-=================") ColorPrint.print_bold(f"===================-{base_signature}-=================") ColorPrint.print_bold(f"===================-{_storage}-=================") print(json.dumps(parameters, indent=2)) - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, performance, state = main(parameters, args.model, _storage) # Store and visualise results @@ -628,7 +629,7 @@ def param_vs_dry(base_parameters, base_signature): visualization = Visualization(_storage) visualization.visualise_results(history_data) - + list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) ColorPrint.print_bold(f"===================-{signature}-=================") @@ -636,6 +637,7 @@ def param_vs_dry(base_parameters, base_signature): # timings from irrevolutions.utils import table_timing_data + _timings = table_timing_data() visualization.save_table(pd.DataFrame(history_data), "_history_data.json") diff --git a/src/irrevolutions/practice/unstabinst.py b/src/irrevolutions/practice/unstabinst.py index 194ca238..eef2bea1 100644 --- a/src/irrevolutions/practice/unstabinst.py +++ b/src/irrevolutions/practice/unstabinst.py @@ -1,106 +1,76 @@ # library include import sys -sys.path.append('../') -from algorithms import am -import algorithms -import pyvista -from utils.viz import plot_mesh, plot_vector, plot_scalar -from models import DamageElasticityModel as Brittle -from irrevolutions.utils import viz -from meshes import primitives -import meshes -from pyvista.utilities import xvfb + +sys.path.append("../") +import logging +import sys + +import dolfinx +import dolfinx.io +import dolfinx.plot +import gmsh import matplotlib.pyplot as plt +import meshes +import numpy as np +import pyvista +import ufl +from algorithms import am from dolfinx.fem import ( - Constant, - Function, - FunctionSpace, assemble_scalar, dirichletbc, - form, locate_dofs_geometrical, set_bc, ) -import gmsh -import dolfinx.io -import numpy as np -import yaml -import json -import sys -import os -from pathlib import Path - -from mpi4py import MPI - -import petsc4py +from models import DamageElasticityModel as Brittle from petsc4py import PETSc - -import dolfinx -import dolfinx.plot -from dolfinx import log -import ufl - - -from dolfinx.io import XDMFFile - -import logging +from pyvista.utilities import xvfb +from utils.viz import plot_mesh, plot_scalar, plot_vector logging.basicConfig(level=logging.INFO) -sys.path.append('./') +sys.path.append("./") # meshes parameters = { - 'loading': { - 'min': 0., - 'max': 1., - 'steps': 10 - }, - 'geometry': { - 'geom_type': 'beleza', - }, - 'model': { - 'tdim': 2, - 'E': 1, - 'nu': .3, - 'w1': 1., - 'ell': 0.1, - 'k_res': 1.e-8 + "loading": {"min": 0.0, "max": 1.0, "steps": 10}, + "geometry": { + "geom_type": "beleza", }, - 'solvers': { - 'elasticity': { - 'snes': { - 'snes_type': 'newtontr', - 'snes_stol': 1e-8, - 'snes_atol': 1e-8, - 'snes_rtol': 1e-8, - 'snes_max_it': 250, - 'snes_monitor': "", - 'ksp_type': 'preonly', - 'pc_type': 'lu', - 'pc_factor_mat_solver_type': 'mumps' + "model": {"tdim": 2, "E": 1, "nu": 0.3, "w1": 1.0, "ell": 0.1, "k_res": 1.0e-8}, + "solvers": { + "elasticity": { + "snes": { + "snes_type": "newtontr", + "snes_stol": 1e-8, + "snes_atol": 1e-8, + "snes_rtol": 1e-8, + "snes_max_it": 250, + "snes_monitor": "", + "ksp_type": "preonly", + "pc_type": "lu", + "pc_factor_mat_solver_type": "mumps", } }, - 'damage': { - 'snes': { - 'snes_type': 'vinewtonrsls', - 'snes_stol': 1e-5, - 'snes_atol': 1e-5, - 'snes_rtol': 1e-8, - 'snes_max_it': 100, - 'snes_monitor': "", - 'ksp_type': 'preonly', - 'pc_type': 'lu', - 'pc_factor_mat_solver_type': 'mumps' + "damage": { + "snes": { + "snes_type": "vinewtonrsls", + "snes_stol": 1e-5, + "snes_atol": 1e-5, + "snes_rtol": 1e-8, + "snes_max_it": 100, + "snes_monitor": "", + "ksp_type": "preonly", + "pc_type": "lu", + "pc_factor_mat_solver_type": "mumps", }, }, - 'damage_elasticity': { + "damage_elasticity": { "max_it": 2000, "alpha_rtol": 1.0e-4, - "criterion": "alpha_H1" - } - } + "criterion": "alpha_H1", + }, + }, } @@ -112,7 +82,7 @@ def mesh_V( lc, key=0, show=False, - filename='mesh.unv', + filename="mesh.unv", order=1, ): """ @@ -127,83 +97,95 @@ def mesh_V( 1 -> create model for Cast3M show = False -> doesn't open Gmsh to vizualise the mesh (default) True -> open Gmsh to vizualise the mesh - filename = name and format of the output file for key = 1 + filename = name and format of the output file for key = 1 order = order of the function of form """ gmsh.initialize() gmsh.option.setNumber("General.Terminal", 1) gmsh.option.setNumber("Mesh.Algorithm", 5) - hopen = a*np.tan((gamma/2.0)*np.pi/180) - c0 = h/40 - load_len = min(h/40, L/80) + hopen = a * np.tan((gamma / 2.0) * np.pi / 180) + c0 = h / 40 + load_len = min(h / 40, L / 80) tdim = 2 model = gmsh.model() - model.add('TPB') - model.setCurrent('TPB') - #Generating the points of the geometrie + model.add("TPB") + model.setCurrent("TPB") + # Generating the points of the geometrie p0 = model.geo.addPoint(0.0, a, 0.0, lc, tag=0) p1 = model.geo.addPoint(hopen, 0.0, 0.0, lc, tag=1) - p2 = model.geo.addPoint(L/2, 0.0, 0.0, lc, tag=2) - p3 = model.geo.addPoint(L/2, h, 0.0, lc, tag=3) + p2 = model.geo.addPoint(L / 2, 0.0, 0.0, lc, tag=2) + p3 = model.geo.addPoint(L / 2, h, 0.0, lc, tag=3) p4 = model.geo.addPoint(0.0, h, 0.0, lc, tag=4) - p5 = model.geo.addPoint(-L/2, h, 0.0, lc, tag=5) - p6 = model.geo.addPoint(-L/2, 0.0, 0.0, lc, tag=6) + p5 = model.geo.addPoint(-L / 2, h, 0.0, lc, tag=5) + p6 = model.geo.addPoint(-L / 2, 0.0, 0.0, lc, tag=6) p7 = model.geo.addPoint(-hopen, 0.0, 0.0, lc, tag=7) - #Load facet + # Load facet p21 = model.geo.addPoint(load_len, h, 0.0, lc, tag=30) p22 = model.geo.addPoint(-load_len, h, 0.0, lc, tag=31) - #Creating the lines by connecting the points + # Creating the lines by connecting the points notch_right = model.geo.addLine(p0, p1, tag=8) bot_right = model.geo.addLine(p1, p2, tag=9) right = model.geo.addLine(p2, p3, tag=10) - #top_right = model.geo.addLine(p3, p4, tag=11) + # top_right = model.geo.addLine(p3, p4, tag=11) top_right = model.geo.addLine(p3, p21, tag=11) top_left = model.geo.addLine(p22, p5, tag=12) left = model.geo.addLine(p5, p6, tag=13) bot_left = model.geo.addLine(p6, p7, tag=14) notch_left = model.geo.addLine(p7, p0, tag=15) - #Load facet + # Load facet load_right = model.geo.addLine(p21, p4, tag=32) load_left = model.geo.addLine(p4, p22, tag=33) - #Creating the surface using the lines created + # Creating the surface using the lines created perimeter = model.geo.addCurveLoop( - [notch_right, bot_right, right, top_right, load_right, load_left, top_left, left, bot_left, notch_left]) + [ + notch_right, + bot_right, + right, + top_right, + load_right, + load_left, + top_left, + left, + bot_left, + notch_left, + ] + ) surface = model.geo.addPlaneSurface([perimeter]) - #model.geo.addSurfaceLoop([surface,16]) + # model.geo.addSurfaceLoop([surface,16]) model.mesh.setOrder(order) -#Creating Physical Groups to extract data from the geometrie - gmsh.model.addPhysicalGroup(tdim-1, [left], tag=101) - gmsh.model.setPhysicalName(tdim-1, 101, 'Left') + # Creating Physical Groups to extract data from the geometrie + gmsh.model.addPhysicalGroup(tdim - 1, [left], tag=101) + gmsh.model.setPhysicalName(tdim - 1, 101, "Left") - gmsh.model.addPhysicalGroup(tdim-1, [right], tag=102) - gmsh.model.setPhysicalName(tdim-1, 102, 'Right') + gmsh.model.addPhysicalGroup(tdim - 1, [right], tag=102) + gmsh.model.setPhysicalName(tdim - 1, 102, "Right") - gmsh.model.addPhysicalGroup(tdim-2, [p6], tag=103) - gmsh.model.setPhysicalName(tdim-2, 103, 'Left_point') + gmsh.model.addPhysicalGroup(tdim - 2, [p6], tag=103) + gmsh.model.setPhysicalName(tdim - 2, 103, "Left_point") - gmsh.model.addPhysicalGroup(tdim-2, [p2], tag=104) - gmsh.model.setPhysicalName(tdim-2, 104, 'Right_point') + gmsh.model.addPhysicalGroup(tdim - 2, [p2], tag=104) + gmsh.model.setPhysicalName(tdim - 2, 104, "Right_point") - gmsh.model.addPhysicalGroup(tdim-2, [p4], tag=105) - gmsh.model.setPhysicalName(tdim-2, 105, 'Load_point') + gmsh.model.addPhysicalGroup(tdim - 2, [p4], tag=105) + gmsh.model.setPhysicalName(tdim - 2, 105, "Load_point") - gmsh.model.addPhysicalGroup(tdim-2, [p0], tag=106) - gmsh.model.setPhysicalName(tdim-2, 106, 'Notch_point') + gmsh.model.addPhysicalGroup(tdim - 2, [p0], tag=106) + gmsh.model.setPhysicalName(tdim - 2, 106, "Notch_point") - gmsh.model.addPhysicalGroup(tdim-1, [load_right], tag=107) - gmsh.model.setPhysicalName(tdim-1, 107, 'load_right') + gmsh.model.addPhysicalGroup(tdim - 1, [load_right], tag=107) + gmsh.model.setPhysicalName(tdim - 1, 107, "load_right") - gmsh.model.addPhysicalGroup(tdim-1, [load_left], tag=108) - gmsh.model.setPhysicalName(tdim-1, 108, 'load_left') + gmsh.model.addPhysicalGroup(tdim - 1, [load_left], tag=108) + gmsh.model.setPhysicalName(tdim - 1, 108, "load_left") gmsh.model.addPhysicalGroup(tdim, [surface], tag=110) - gmsh.model.setPhysicalName(tdim, 110, 'mesh_surface') + gmsh.model.setPhysicalName(tdim, 110, "mesh_surface") -#Cast3M can't read Physical Groups of points (dim = 0). Instead, we check the number in the mesh and input in manually in the code. -#The number of a node doesn't change if it's in a point of the geometry + # Cast3M can't read Physical Groups of points (dim = 0). Instead, we check the number in the mesh and input in manually in the code. + # The number of a node doesn't change if it's in a point of the geometry model.geo.synchronize() model.mesh.generate(tdim) @@ -213,42 +195,39 @@ def mesh_V( gmsh.write(filename) return gmsh.model + geo_parameters = { "a": 0.15, "h": 0.5, "L": 1, "gamma": 90, - "lc": .1, + "lc": 0.1, } -import pdb -parameters.get('geometry').update(geo_parameters) +parameters.get("geometry").update(geo_parameters) gmsh_model = mesh_V(**geo_parameters) -mesh, facet_tags = meshes.gmsh_model_to_mesh(gmsh_model, - cell_data=False, - facet_data=True, - gdim=2) +mesh, facet_tags = meshes.gmsh_model_to_mesh( + gmsh_model, cell_data=False, facet_data=True, gdim=2 +) plt.figure() ax = plot_mesh(mesh) fig = ax.get_figure() -fig.savefig(f"output/Vnotch_mesh.png") +fig.savefig("output/Vnotch_mesh.png") # pdb.set_trace() -# Functional setting +# Functional setting -element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), - degree=1, dim=2) -element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), - degree=1) +element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=2) +element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), degree=1) V_u = dolfinx.fem.FunctionSpace(mesh, element_u) V_alpha = dolfinx.fem.FunctionSpace(mesh, element_alpha) u = dolfinx.fem.Function(V_u, name="Displacement") -#the displacement +# the displacement u_ = dolfinx.fem.Function(V_u, name="Boundary_Displacement") u_corner = dolfinx.fem.Function(V_u, name="Corner_Displacement") alpha = dolfinx.fem.Function(V_alpha, name="Damage") @@ -261,44 +240,47 @@ def mesh_V( alpha_ub.interpolate(lambda x: np.ones_like(x[0])) dx = ufl.Measure("dx", domain=mesh) # -> volume measure -#We include here the subdomain data generated at the gmsh file. -ds = ufl.Measure("ds", subdomain_data=facet_tags, - domain=mesh) +# We include here the subdomain data generated at the gmsh file. +ds = ufl.Measure("ds", subdomain_data=facet_tags, domain=mesh) -model = Brittle(parameters.get('model')) -state = {'u': u, 'alpha': alpha} +model = Brittle(parameters.get("model")) +state = {"u": u, "alpha": alpha} total_energy = model.total_energy_density(state) * dx -force.interpolate(lambda x: (np.zeros_like( - x[0]), parameters['loading']['max']*np.ones_like(x[1]))) - +force.interpolate( + lambda x: (np.zeros_like(x[0]), parameters["loading"]["max"] * np.ones_like(x[1])) +) + u_corner.interpolate(lambda x: (np.zeros_like(x[0]), np.zeros_like(x[1]))) for u in (u_corner,): - u.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + u.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) # total_energy = model.total_energy_density( # state) * dx - ufl.dot(force, u)*ds(107) - ufl.dot(force, u)*ds(108) -_h = parameters.get('geometry').get("h") -_L = parameters.get('geometry').get("L") +_h = parameters.get("geometry").get("h") +_L = parameters.get("geometry").get("L") + + def _small_set(x): _eta = 1e-2 - _lower_bound = - _eta - _upper_bound = + _eta + _lower_bound = -_eta + _upper_bound = +_eta return np.logical_and( np.isclose(x[1], _h), np.logical_and( - np.greater_equal(x[0], _lower_bound), - np.less_equal(x[0], _upper_bound) - ) + np.greater_equal(x[0], _lower_bound), np.less_equal(x[0], _upper_bound) + ), ) + def _corners(x): - return np.logical_and( - np.logical_or(np.isclose(x[0], -_L/2), np.isclose(x[0], _L/2)), - np.isclose(x[1], 0)) + return np.logical_and( + np.logical_or(np.isclose(x[0], -_L / 2), np.isclose(x[0], _L / 2)), + np.isclose(x[1], 0), + ) + # _smallset_entities = dolfinx.mesh.locate_entities_boundary(mesh, 0, _small_set) # _smallset_dofs = dolfinx.fem.locate_dofs_topological( @@ -315,9 +297,8 @@ def _corners(x): dofs_u_corners = locate_dofs_geometrical(V_u, _corners) # Bcs -bcs_alpha=[] -bcs_u = [dirichletbc(u_, dofs_u_smallset), - dirichletbc(u_corner, dofs_u_corners)] +bcs_alpha = [] +bcs_u = [dirichletbc(u_, dofs_u_smallset), dirichletbc(u_corner, dofs_u_corners)] bcs = {"bcs_u": bcs_u, "bcs_alpha": bcs_alpha} # Update the bounds @@ -328,23 +309,17 @@ def _corners(x): total_energy = model.total_energy_density(state) * dx -solver = am.AlternateMinimisation(total_energy, - state, - bcs, - parameters.get("solvers"), - bounds=(alpha_lb, alpha_ub) - ) +solver = am.AlternateMinimisation( + total_energy, state, bcs, parameters.get("solvers"), bounds=(alpha_lb, alpha_ub) +) # visualisation -loads = np.linspace(parameters.get("loading").get("min"), - parameters.get("loading").get("max"), - parameters.get("loading").get("steps")) - -data = { - 'elastic': [], - 'surface': [], - 'total': [], - 'load': [] -} +loads = np.linspace( + parameters.get("loading").get("min"), + parameters.get("loading").get("max"), + parameters.get("loading").get("steps"), +) + +data = {"elastic": [], "surface": [], "total": [], "load": []} xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True plotter = pyvista.Plotter( @@ -353,38 +328,39 @@ def _corners(x): shape=(1, 2), ) -for (i_t, t) in enumerate(loads): - # update boundary conditions +for i_t, t in enumerate(loads): + # update boundary conditions - u_.interpolate(lambda x: (np.zeros_like(x[0]), t*np.ones_like(x[1]))) - u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + u_.interpolate(lambda x: (np.zeros_like(x[0]), t * np.ones_like(x[1]))) + u_.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) - # update lower bound for damage - alpha.vector.copy(alpha_lb.vector) - alpha.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + # update lower bound for damage + alpha.vector.copy(alpha_lb.vector) + alpha.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) - # solve for current load step - print(f"Solving timestep {i_t}, load: {t}") - solver.solve() + # solve for current load step + print(f"Solving timestep {i_t}, load: {t}") + solver.solve() - # global postprocessing - surface_energy = assemble_scalar(dolfinx.fem.form( - model.damage_energy_density(state) * dx)) + # global postprocessing + surface_energy = assemble_scalar( + dolfinx.fem.form(model.damage_energy_density(state) * dx) + ) - elastic_energy = assemble_scalar( - dolfinx.fem.form(model.elastic_energy_density(state) * dx)) + elastic_energy = assemble_scalar( + dolfinx.fem.form(model.elastic_energy_density(state) * dx) + ) - data.get('elastic').append(elastic_energy) - data.get('surface').append(surface_energy) - data.get('total').append(surface_energy+elastic_energy) - data.get('load').append(t) + data.get("elastic").append(elastic_energy) + data.get("surface").append(surface_energy) + data.get("total").append(surface_energy + elastic_energy) + data.get("load").append(t) - print(f"Solved timestep {i_t}, load: {t}") - print( - f"Elastic Energy {elastic_energy:.3g}, Surface energy: {surface_energy:.3g}") - print("\n\n") + print(f"Solved timestep {i_t}, load: {t}") + print(f"Elastic Energy {elastic_energy:.3g}, Surface energy: {surface_energy:.3g}") + print("\n\n") plotter = pyvista.Plotter( title="Displacement", @@ -394,4 +370,4 @@ def _corners(x): _plt = plot_scalar(alpha, plotter, subplot=(0, 0)) _plt = plot_vector(u, plotter, subplot=(0, 1)) -_plt.screenshot(f"./output/vnotch_fields.png") +_plt.screenshot("./output/vnotch_fields.png") diff --git a/src/irrevolutions/solvers/__init__.py b/src/irrevolutions/solvers/__init__.py index bb3f7ca3..c9180050 100644 --- a/src/irrevolutions/solvers/__init__.py +++ b/src/irrevolutions/solvers/__init__.py @@ -1,19 +1,24 @@ -from mpi4py import MPI -import ufl -import dolfinx -from petsc4py import PETSc import sys + +import dolfinx import petsc4py +import ufl +from mpi4py import MPI +from petsc4py import PETSc petsc4py.init(sys.argv) from dolfinx.cpp.log import LogLevel, log -from dolfinx.fem import form -# from damage.utils import ColorPrint +# from damage.utils import ColorPrint from dolfinx.fem.petsc import ( - assemble_matrix, apply_lifting, create_vector, create_matrix, set_bc, assemble_vector) - + apply_lifting, + assemble_matrix, + assemble_vector, + create_matrix, + create_vector, + set_bc, +) # import pdb; # pdb.set_trace() @@ -25,6 +30,7 @@ class SNESSolver: """ Problem class for elasticity, compatible with PETSC.SNES solvers. """ + def __init__( self, F_form: ufl.Form, @@ -38,7 +44,6 @@ def __init__( monitor=None, prefix=None, ): - self.u = u self.bcs = bcs self.bounds = bounds @@ -115,11 +120,11 @@ def F(self, snes: PETSc.SNES, x: PETSc.Vec, b: PETSc.Vec): """ # We need to assign the vector to the function - x.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + x.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) x.copy(self.u.vector) - self.u.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) + self.u.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) # Zero the residual vector with b.localForm() as b_local: @@ -128,8 +133,7 @@ def F(self, snes: PETSc.SNES, x: PETSc.Vec, b: PETSc.Vec): # Apply boundary conditions apply_lifting(b, [self.J_form], [self.bcs], [x], -1.0) - b.ghostUpdate(addv=PETSc.InsertMode.ADD, - mode=PETSc.ScatterMode.REVERSE) + b.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.REVERSE) set_bc(b, self.bcs, x, -1.0) def J(self, snes, x: PETSc.Vec, A: PETSc.Mat, P: PETSc.Mat): @@ -156,10 +160,10 @@ def solve(self): # "with converged reason", # self.solver.getConvergedReason(), # ) - self.u.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) - return (self.solver.getIterationNumber(), - self.solver.getConvergedReason()) + self.u.vector.ghostUpdate( + addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD + ) + return (self.solver.getIterationNumber(), self.solver.getConvergedReason()) except Warning: log( diff --git a/src/irrevolutions/solvers/function.py b/src/irrevolutions/solvers/function.py index 3bf44003..453ac2ea 100644 --- a/src/irrevolutions/solvers/function.py +++ b/src/irrevolutions/solvers/function.py @@ -1,10 +1,10 @@ import typing -import numpy import dolfinx -from petsc4py import PETSc +import numpy import ufl from dolfinx.fem import Function +from petsc4py import PETSc def extract_blocks( @@ -42,7 +42,6 @@ def extract_blocks( blocks = [None for i in range(len(test_functions))] for i, tef in enumerate(test_functions): - if trial_functions is not None: for j, trf in enumerate(trial_functions): to_null = dict() diff --git a/src/irrevolutions/solvers/restriction.py b/src/irrevolutions/solvers/restriction.py index 6d05bac1..ea026077 100644 --- a/src/irrevolutions/solvers/restriction.py +++ b/src/irrevolutions/solvers/restriction.py @@ -1,7 +1,7 @@ import typing -import numpy import dolfinx +import numpy from petsc4py import PETSc @@ -39,7 +39,6 @@ def __init__( offset_vec = 0 for i, space in enumerate(function_spaces): - bs = space.dofmap.index_map_bs size_local = space.dofmap.index_map.size_local @@ -91,9 +90,9 @@ def update_functions(self, f: typing.List, rx: PETSc.Vec): for i, fi in enumerate(f): num_rdofs = self.bglobal_dofs_vec[i].shape[0] - fi.vector.array[ - self.bglobal_dofs_vec[i] - self.boffsets_vec[i] - ] = rx.array_r[rdof_offset : (rdof_offset + num_rdofs)] + fi.vector.array[self.bglobal_dofs_vec[i] - self.boffsets_vec[i]] = ( + rx.array_r[rdof_offset : (rdof_offset + num_rdofs)] + ) fi.vector.ghostUpdate( addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD diff --git a/src/irrevolutions/solvers/slepcblockproblem.py b/src/irrevolutions/solvers/slepcblockproblem.py index cd33f626..1b197626 100644 --- a/src/irrevolutions/solvers/slepcblockproblem.py +++ b/src/irrevolutions/solvers/slepcblockproblem.py @@ -1,18 +1,15 @@ +import logging import typing # from yaml.tokens import BlockSequenceStartToken - import dolfinx import ufl -from .function import vec_to_functions -from slepc4py import SLEPc -from irrevolutions.utils.viz import plot_matrix # plot_matrix - from petsc4py import PETSc -import logging -import pdb +from slepc4py import SLEPc + +from .function import vec_to_functions class SLEPcBlockProblem: @@ -306,7 +303,7 @@ def solve(self): # logging.debug(f"mat rA sizes {self.rA.sizes}") # logging.debug(f"mat A sizes {self.A.sizes}") - if logging.getLevelName(logging.getLogger().getEffectiveLevel()) == 'DEBUG': + if logging.getLevelName(logging.getLogger().getEffectiveLevel()) == "DEBUG": viewer = PETSc.Viewer().createASCII( f"rA-{self.eps.getOptionsPrefix()[0:-1]}.txt" ) @@ -322,7 +319,6 @@ def solve(self): # logging.critical(f"mat B sizes {self.B.sizes}") # logging.critical(f"mat B norm {self.B.norm()}") - if not self.empty_B(): # pdb.set_trace() diff --git a/src/irrevolutions/solvers/snesblockproblem.py b/src/irrevolutions/solvers/snesblockproblem.py index 489d8c16..39aaa165 100644 --- a/src/irrevolutions/solvers/snesblockproblem.py +++ b/src/irrevolutions/solvers/snesblockproblem.py @@ -1,15 +1,15 @@ +import logging import typing # from yaml.tokens import BlockSequenceStartToken - import dolfinx +import numpy as np import ufl -from .function import vec_to_functions, functions_to_vec - -from petsc4py import PETSc from mpi4py import MPI -import logging -import numpy as np +from petsc4py import PETSc + +from .function import functions_to_vec, vec_to_functions + class SNESBlockProblem: def __init__( @@ -251,9 +251,11 @@ def _monitor_block(self, snes, it, norm): self.compute_norms_block(snes) self.print_norms(it) - logging.debug(f'Residual reduced norms {self.norm_r}') - logging.debug(f'Residual reduced norm {np.sqrt(np.array([x**2 for x in self.norm_r[0]]).sum())}') - + logging.debug(f"Residual reduced norms {self.norm_r}") + logging.debug( + f"Residual reduced norm {np.sqrt(np.array([x**2 for x in self.norm_r[0]]).sum())}" + ) + # if logging.root.level <= logging.DEBUG: # self._plot_solution(it) @@ -263,7 +265,7 @@ def _plot_solution(self, it): # init plotter import pyvista from pyvista.utilities import xvfb - from utils.viz import plot_vector, plot_scalar + from utils.viz import plot_scalar, plot_vector xvfb.start_xvfb(wait=0.05) pyvista.OFF_SCREEN = True diff --git a/src/irrevolutions/utils/__init__.py b/src/irrevolutions/utils/__init__.py index f0bfc165..10a87658 100644 --- a/src/irrevolutions/utils/__init__.py +++ b/src/irrevolutions/utils/__init__.py @@ -1,6 +1,7 @@ import json import logging import os +import pickle import subprocess import sys from typing import List @@ -12,10 +13,10 @@ from dolfinx.fem import assemble_scalar, form from mpi4py import MPI from petsc4py import PETSc -import pickle comm = MPI.COMM_WORLD + class ColorPrint: """ Colored printing functions for strings that use universal ANSI escape @@ -70,9 +71,11 @@ def print_bold(message, end="\n"): sys.stdout.write("\x1b[1;37m" + message.strip() + "\x1b[0m" + end) sys.stdout.flush() + def setup_logger_mpi(root_priority: int = logging.INFO): import dolfinx from mpi4py import MPI + class MPIFormatter(logging.Formatter): def format(self, record): record.rank = MPI.COMM_WORLD.Get_rank() @@ -93,34 +96,36 @@ def format(self, record): logger.propagate = False # StreamHandler to log messages to the console console_handler = logging.StreamHandler() - file_handler = logging.FileHandler('evolution.log') + file_handler = logging.FileHandler("evolution.log") # formatter = logging.Formatter('%(asctime)s - %(name)s - [%(levelname)s] - %(message)s') - formatter = MPIFormatter('%(asctime)s [Rank %(rank)d, Size %(size)d] - %(name)s - [%(levelname)s] - %(message)s') + formatter = MPIFormatter( + "%(asctime)s [Rank %(rank)d, Size %(size)d] - %(name)s - [%(levelname)s] - %(message)s" + ) file_handler.setFormatter(formatter) console_handler.setFormatter(formatter) - + # file_handler.setLevel(logging.INFO) file_handler.setLevel(root_process_log_level if rank == 0 else logging.CRITICAL) console_handler.setLevel(root_process_log_level if rank == 0 else logging.CRITICAL) - # Disable propagation to root logger for both handlers console_handler.propagate = False file_handler.propagate = False - - + # logger.addHandler(console_handler) logger.addHandler(file_handler) # Log messages, and only the root process will log. logger.info("The root process spawning an evolution computation (rank 0)") logger.info( - f"DOLFINx version: {dolfinx.__version__} based on GIT commit: {dolfinx.git_commit_hash} of https://github.com/FEniCS/dolfinx/") + f"DOLFINx version: {dolfinx.__version__} based on GIT commit: {dolfinx.git_commit_hash} of https://github.com/FEniCS/dolfinx/" + ) return logger + _logger = setup_logger_mpi() @@ -149,20 +154,34 @@ def get_branch_details(): return branch_name, commit_hash except Exception as e: - print(f"Failed to retrieve branch name and commit hash from GitHub Actions environment: {e}") + print( + f"Failed to retrieve branch name and commit hash from GitHub Actions environment: {e}" + ) branch_name = None commit_hash = None # If GitHub Actions environment variables are not available, try to get branch name and commit hash locally try: # Get the current Git branch - branch = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"], stderr=subprocess.PIPE).strip().decode("utf-8") - commit_hash = subprocess.check_output(["git", "rev-parse", "HEAD"]).strip().decode("utf-8") + branch = ( + subprocess.check_output( + ["git", "rev-parse", "--abbrev-ref", "HEAD"], stderr=subprocess.PIPE + ) + .strip() + .decode("utf-8") + ) + commit_hash = ( + subprocess.check_output(["git", "rev-parse", "HEAD"]) + .strip() + .decode("utf-8") + ) return branch, commit_hash - + except Exception as e: print(f"Failed to retrieve branch name and commit hash locally: {e}") - return 'unknown', 'unknown' + return "unknown", "unknown" + + # Get the current branch branch, commit_hash = get_branch_details() @@ -189,6 +208,7 @@ def get_branch_details(): **code_info, } + def norm_L2(u): """ Returns the L2 norm of the function u @@ -196,22 +216,21 @@ def norm_L2(u): comm = u.function_space.mesh.comm dx = ufl.Measure("dx", u.function_space.mesh) norm_form = form(ufl.inner(u, u) * dx) - norm = np.sqrt(comm.allreduce( - assemble_scalar(norm_form), op=mpi4py.MPI.SUM)) + norm = np.sqrt(comm.allreduce(assemble_scalar(norm_form), op=mpi4py.MPI.SUM)) return norm + def norm_H1(u): """ Returns the H1 norm of the function u """ comm = u.function_space.mesh.comm dx = ufl.Measure("dx", u.function_space.mesh) - norm_form = form( - (ufl.inner(u, u) + ufl.inner(ufl.grad(u), ufl.grad(u))) * dx) - norm = np.sqrt(comm.allreduce( - assemble_scalar(norm_form), op=mpi4py.MPI.SUM)) + norm_form = form((ufl.inner(u, u) + ufl.inner(ufl.grad(u), ufl.grad(u))) * dx) + norm = np.sqrt(comm.allreduce(assemble_scalar(norm_form), op=mpi4py.MPI.SUM)) return norm + def seminorm_H1(u): """ Returns the H1 norm of the function u @@ -219,21 +238,23 @@ def seminorm_H1(u): comm = u.function_space.mesh.comm dx = ufl.Measure("dx", u.function_space.mesh) seminorm = form((ufl.inner(ufl.grad(u), ufl.grad(u))) * dx) - seminorm = np.sqrt(comm.allreduce( - assemble_scalar(seminorm), op=mpi4py.MPI.SUM)) + seminorm = np.sqrt(comm.allreduce(assemble_scalar(seminorm), op=mpi4py.MPI.SUM)) return seminorm + def set_vector_to_constant(x, value): with x.localForm() as local: local.set(value) x.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) + def table_timing_data(): import pandas as pd from dolfinx.common import timing timing_data = [] - tasks = ["~First Order: AltMin solver", + tasks = [ + "~First Order: AltMin solver", "~First Order: AltMin-Damage solver", "~First Order: AltMin-Elastic solver", "~First Order: Hybrid solver", @@ -241,25 +262,29 @@ def table_timing_data(): "~Second Order: Cone Project", "~Second Order: Stability", "~Postprocessing and Vis", - "~Computation Experiment" - ] + "~Computation Experiment", + ] for task in tasks: timing_data.append(timing(task)) - - df = pd.DataFrame(timing_data, columns=["reps", "wall tot", "usr", "sys"], index=tasks) + + df = pd.DataFrame( + timing_data, columns=["reps", "wall tot", "usr", "sys"], index=tasks + ) return df + def find_offending_columns_lengths(data): lengths = {} for key, value in data.items(): try: lengths[key] = len(value) except TypeError: - lengths[key] = 'Non-iterable' + lengths[key] = "Non-iterable" return lengths + from dolfinx.io import XDMFFile @@ -285,12 +310,17 @@ def store_results(self, parameters, history_data, state): alpha = state["alpha"] if self.comm.rank == 0: - with open(f"{self.prefix}/parameters.yaml", 'w') as file: + with open(f"{self.prefix}/parameters.yaml", "w") as file: yaml.dump(parameters, file) - with XDMFFile(self.comm, f"{self.prefix}/simulation_results.xdmf", "w", encoding=XDMFFile.Encoding.HDF5) as file: + with XDMFFile( + self.comm, + f"{self.prefix}/simulation_results.xdmf", + "w", + encoding=XDMFFile.Encoding.HDF5, + ) as file: # for t, data in history_data.items(): - # file.write_scalar(data, t) + # file.write_scalar(data, t) file.write_mesh(u.function_space.mesh) file.write_function(u, t) @@ -300,8 +330,10 @@ def store_results(self, parameters, history_data, state): with open(f"{self.prefix}/time_data.json", "w") as file: json.dump(history_data, file) + # Visualization functions/classes + class Visualization: """ Class for visualizing simulation results. @@ -334,6 +366,7 @@ def save_table(self, data, name): json.dump(data.to_json(), a_file) a_file.close() + history_data = { "load": [], "elastic_energy": [], @@ -348,16 +381,25 @@ def save_table(self, data, name): "inertia": [], } -def _write_history_data(equilibrium, bifurcation, stability, history_data, t, inertia, stable, energies: List): - + +def _write_history_data( + equilibrium, + bifurcation, + stability, + history_data, + t, + inertia, + stable, + energies: List, +): elastic_energy = energies[0] fracture_energy = energies[1] unique = True if inertia[0] == 0 and inertia[1] == 0 else False - + history_data["load"].append(t) history_data["fracture_energy"].append(fracture_energy) history_data["elastic_energy"].append(elastic_energy) - history_data["total_energy"].append(elastic_energy+fracture_energy) + history_data["total_energy"].append(elastic_energy + fracture_energy) history_data["equilibrium_data"].append(equilibrium.data) history_data["inertia"].append(inertia) history_data["unique"].append(unique) @@ -366,7 +408,7 @@ def _write_history_data(equilibrium, bifurcation, stability, history_data, t, in history_data["cone_data"].append(stability.data) history_data["eigs_cone"].append(stability.solution["lambda_t"]) - return + return def indicator_function(v): @@ -375,11 +417,10 @@ def indicator_function(v): # Create the indicator function w = dolfinx.fem.Function(v.function_space) with w.vector.localForm() as w_loc, v.vector.localForm() as v_loc: - w_loc[:] = np.where(v_loc[:] > 0, 1., 0.) + w_loc[:] = np.where(v_loc[:] > 0, 1.0, 0.0) + + w.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) - w.vector.ghostUpdate(addv=PETSc.InsertMode.INSERT, - mode=PETSc.ScatterMode.FORWARD) - return w @@ -491,7 +532,7 @@ def save_minimal_constraints(obj, filename): def load_minimal_constraints(filename): import irrevolutions.solvers.restriction as restriction - + with open(filename, "rb") as file: minimal_constraints = pickle.load(file) @@ -504,13 +545,12 @@ def load_minimal_constraints(filename): return reconstructed_obj - - def sample_data(N, positive=True): + import random + import dolfinx from dolfinx.cpp.la.petsc import get_local_vectors, scatter_local_vectors - import random - + mesh = dolfinx.mesh.create_unit_interval(MPI.COMM_WORLD, N - 1) comm = MPI.COMM_WORLD comm.Get_rank() @@ -557,4 +597,3 @@ def sample_data(N, positive=True): v.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) return F, v - diff --git a/src/irrevolutions/utils/colab.py b/src/irrevolutions/utils/colab.py index d8181b27..2eb67cd1 100644 --- a/src/irrevolutions/utils/colab.py +++ b/src/irrevolutions/utils/colab.py @@ -1,6 +1,5 @@ # %%capture import subprocess -import sys try: import google.colab # noqa: F401 @@ -14,7 +13,13 @@ except ImportError: # !wget "https://fem-on-colab.github.io/releases/fenicsx-install.sh" -O "/tmp/fenicsx-install.sh" && bash "/tmp/fenicsx-install.sh"; try: - subprocess.run(["wget", "https://fem-on-colab.github.io/releases/fenicsx-install.sh", "-O /tmp/fenicsx-install.sh"]) + subprocess.run( + [ + "wget", + "https://fem-on-colab.github.io/releases/fenicsx-install.sh", + "-O /tmp/fenicsx-install.sh", + ] + ) except subprocess.CalledProcessError as ret: print("error code", ret.returncode, ret.output) @@ -40,4 +45,4 @@ # import pyvista # except ImportError: # !pip3 install --upgrade pyvista itkwidgets; -# import pyvista # noqa: F401 \ No newline at end of file +# import pyvista # noqa: F401 diff --git a/src/irrevolutions/utils/eigenspace.py b/src/irrevolutions/utils/eigenspace.py index d172c68f..bccb282a 100644 --- a/src/irrevolutions/utils/eigenspace.py +++ b/src/irrevolutions/utils/eigenspace.py @@ -15,20 +15,21 @@ def solve_minimum(parameters): a = parameters["a"] b = parameters["b"] c = parameters["c"] - A, C = sp.symbols('A C') - _condition = b * c**2 < np.pi**2 * a + A, C = sp.symbols("A C") + _condition = b * c**2 < np.pi**2 * a print(f"bc**2 = {np.around(b*c**2, 2)}, π**2 * a = {np.around(np.pi**2 * a, 2)}") - + if _condition: - print('case 1') + print("case 1") _subs = {A: 0} elif not _condition: - print('case 2') + print("case 2") _subs = {C: 0} - return np.min([b*c**2,np.pi**2*a]), _subs + return np.min([b * c**2, np.pi**2 * a]), _subs + -def solve_eigenspace_vector(parameters, idx = 0): +def solve_eigenspace_vector(parameters, idx=0): """ Solve for the eigenspace in a vector space. @@ -39,48 +40,58 @@ def solve_eigenspace_vector(parameters, idx = 0): Returns: dict: A dictionary containing 'v', 'β', and 'D'. """ - x = sp.symbols('x', real=True) - v = sp.Function('v', real=True)(x) - β = sp.Function('β', real=True)(x) - C, A = sp.symbols('C A') - + x = sp.symbols("x", real=True) + v = sp.Function("v", real=True)(x) + β = sp.Function("β", real=True)(x) + C, A = sp.symbols("C A") + a = parameters["a"] b = parameters["b"] - c = parameters["c"] - + c = parameters["c"] + if b * c**2 < sp.pi**2 * a: - print('case 1') + print("case 1") _subs = {A: 0} A = 0 elif b * c**2 > sp.pi**2 * a: - print('case 2') + print("case 2") _subs = {C: 0} C = 0 - - - β = C + A*sp.cos(sp.pi * x) + + β = C + A * sp.cos(sp.pi * x) v = c * A / sp.pi * sp.sin(sp.pi * x) - depends_on_A = np.any([sp.symbols('A') in expression.free_symbols for expression in [v, β]]) - depends_on_C = np.any([sp.symbols('C') in expression.free_symbols for expression in [v, β]]) - - _norm = sp.sqrt(np.sum([sp.integrate(eigenfunction**2, (x, 0, 1)) for eigenfunction in (v, β)])) + depends_on_A = np.any( + [sp.symbols("A") in expression.free_symbols for expression in [v, β]] + ) + depends_on_C = np.any( + [sp.symbols("C") in expression.free_symbols for expression in [v, β]] + ) + + _norm = sp.sqrt( + np.sum([sp.integrate(eigenfunction**2, (x, 0, 1)) for eigenfunction in (v, β)]) + ) print([expression.free_symbols for expression in [v, β]]) print(_norm, depends_on_A, depends_on_C) - + if depends_on_A: - print('depends_on_A') - _normalise = [{sp.symbols('A'): ay} for ay in sp.solve(_norm - 1, A)] + print("depends_on_A") + _normalise = [{sp.symbols("A"): ay} for ay in sp.solve(_norm - 1, A)] elif depends_on_C: - print('depends_on_C') - _normalise = [{sp.symbols('C'): cy} for cy in sp.solve(_norm - 1, C)] - - return {"v": v.subs(_normalise[idx]), "β": β.subs(_normalise[idx]), "D": 0}, _normalise[idx] + print("depends_on_C") + _normalise = [{sp.symbols("C"): cy} for cy in sp.solve(_norm - 1, C)] + + return { + "v": v.subs(_normalise[idx]), + "β": β.subs(_normalise[idx]), + "D": 0, + }, _normalise[idx] # return (v, β), _normalise -def solve_eigenspace_cone(parameters, idx = 0): + +def solve_eigenspace_cone(parameters, idx=0): """ Solve for the eigenspace in a convex set (cone). @@ -91,63 +102,63 @@ def solve_eigenspace_cone(parameters, idx = 0): Returns: dict: A dictionary containing 'v', 'β', and 'D'. """ - x = sp.symbols('x', real=True) - v = sp.Function('v', real=True)(x) - β = sp.Function('β', real=True)(x) - C, A = sp.symbols('C A') - + x = sp.symbols("x", real=True) + v = sp.Function("v", real=True)(x) + β = sp.Function("β", real=True)(x) + C, A = sp.symbols("C A") + a = parameters["a"] b = parameters["b"] - c = parameters["c"] + c = parameters["c"] D = 0 - + if b * c**2 < sp.pi**2 * a: - print('case 1') + print("case 1") β = C - + elif b * c**2 > sp.pi**2 * a: - print('case 2') + print("case 2") # D = sp.symbols('D') - D = (sp.pi**2 * a / (b * c**2))**(1/3) + D = (sp.pi**2 * a / (b * c**2)) ** (1 / 3) β = sp.Piecewise( - (C *(1 + sp.cos(sp.pi * x / D)), (0 <= x) & (x <= D)), - (0, True) - ) - - _min = (np.pi**2 * a)**(1/3) * (b * c**2)**(2/3) - + (C * (1 + sp.cos(sp.pi * x / D)), (0 <= x) & (x <= D)), (0, True) + ) + + _min = (np.pi**2 * a) ** (1 / 3) * (b * c**2) ** (2 / 3) + elif b * c**2 == sp.pi**2 * a: - print('case eq') + print("case eq") _min = b * c**2 _subs = {C: 0} C = 0 - β = C + A*sp.cos(sp.pi * x) + β = C + A * sp.cos(sp.pi * x) # abs(A) < C - - depends_on_A = sp.symbols('A') in β.free_symbols - depends_on_C = sp.symbols('C') in β.free_symbols - depends_on_D = sp.symbols('D') in β.free_symbols - + + depends_on_A = sp.symbols("A") in β.free_symbols + depends_on_C = sp.symbols("C") in β.free_symbols + depends_on_D = sp.symbols("D") in β.free_symbols + _norm = sp.sqrt(sp.integrate(β**2, (x, 0, 1))) # print([expression.free_symbols for expression in [v, β]]) print(_norm) - + if depends_on_A: - print('depends_on_A') - _normalise = [{sp.symbols('A'): ay} for ay in sp.solve(_norm - 1, A)] + print("depends_on_A") + _normalise = [{sp.symbols("A"): ay} for ay in sp.solve(_norm - 1, A)] elif depends_on_C: - print('depends_on_C') - _normalise = [{sp.symbols('C'): cy} for cy in sp.solve(_norm - 1, C) if cy > 0] + print("depends_on_C") + _normalise = [{sp.symbols("C"): cy} for cy in sp.solve(_norm - 1, C) if cy > 0] elif depends_on_D: - print('depends_on_D') - + print("depends_on_D") + return {"v": 0, "β": β.subs(_normalise[idx]), "D": D}, _normalise[idx] -def book_of_the_numbers(scale_b = 3, scale_c = 3): + +def book_of_the_numbers(scale_b=3, scale_c=3): """This function, informally called `fuck_dgsi`, invokes the book of the numbers to get three real quantities, according to the scriptures. - + @article{pham:2011-the-issues, author = {Pham, Kim and Marigo, Jean-Jacques and Maurini, Corrado}, date-added = {2015-08-24 14:23:19 +0000}, @@ -160,14 +171,16 @@ def book_of_the_numbers(scale_b = 3, scale_c = 3): volume = {59}, year = {2011}, } - + Also, fuck Elsevier and Springer Nature. - + """ while True: a = np.random.rand() - b = np.random.rand()*scale_b - c = float((np.random.choice([-1, 1], 1) * np.random.rand(1))[0]*scale_c) # Generate a random number with sign between 0 and 3 + b = np.random.rand() * scale_b + c = float( + (np.random.choice([-1, 1], 1) * np.random.rand(1))[0] * scale_c + ) # Generate a random number with sign between 0 and 3 # Check conditions if a > 0 and b > 0 and c != 0: diff --git a/src/irrevolutions/utils/lib.py b/src/irrevolutions/utils/lib.py index 72fa4b12..ec58f9e8 100644 --- a/src/irrevolutions/utils/lib.py +++ b/src/irrevolutions/utils/lib.py @@ -7,40 +7,43 @@ def singularity_exp(omega): lmbda : = sin(2*lmbda*(pi - omega)) + lmbda*sin(2(pi-lmbda)) = 0""" from sympy import nsolve, pi, sin, symbols - x = symbols('x') + x = symbols("x") - return nsolve( - sin(2*x*(pi - omega)) + x*sin(2*(pi-omega)), - x, .5) + return nsolve(sin(2 * x * (pi - omega)) + x * sin(2 * (pi - omega)), x, 0.5) # = parameters["material"] -def _local_notch_asymptotic(x, ω=45, t=1., par={}): +def _local_notch_asymptotic(x, ω=45, t=1.0, par={}): from sympy import cos, pi, sin, symbols + λ = singularity_exp(ω) - Θ = symbols('Θ') - _E = par['E'] - ν = par['ν'] + Θ = symbols("Θ") + _E = par["E"] + ν = par["ν"] Θv = np.arctan2(x[1], x[0]) - - coeff = ( (1+λ) * sin( (1+λ) * (pi - ω) ) ) / ( (1-λ) * sin( (1-λ) * (pi - ω) ) ) - _f = (2*np.pi)**(λ - 1) * ( cos( (1+λ) * Θ) - coeff * cos((1-λ) * Θ) ) / (1-coeff) + coeff = ((1 + λ) * sin((1 + λ) * (pi - ω))) / ((1 - λ) * sin((1 - λ) * (pi - ω))) + + _f = ( + (2 * np.pi) ** (λ - 1) + * (cos((1 + λ) * Θ) - coeff * cos((1 - λ) * Θ)) + / (1 - coeff) + ) f = sp.lambdify(Θ, _f, "numpy") fp = sp.lambdify(Θ, sp.diff(_f, Θ, 1), "numpy") fpp = sp.lambdify(Θ, sp.diff(_f, Θ, 2), "numpy") fppp = sp.lambdify(Θ, sp.diff(_f, Θ, 3), "numpy") - r = np.sqrt(x[0]**2. + x[1]**2.) - _c1 = (λ+1)*(1- ν*λ - ν**2.*(λ+1)) - _c2 = 1-ν**2. - _c3 = 2.*(1+ν)*λ**2. + _c1 + r = np.sqrt(x[0] ** 2.0 + x[1] ** 2.0) + _c1 = (λ + 1) * (1 - ν * λ - ν**2.0 * (λ + 1)) + _c2 = 1 - ν**2.0 + _c3 = 2.0 * (1 + ν) * λ**2.0 + _c1 _c4 = _c2 - _c5 = λ**2. * (1-λ**2.) + _c5 = λ**2.0 * (1 - λ**2.0) - ur = t * ( r**λ / _E * (_c1*f(Θv) + _c2*fpp(Θv)) ) / _c5 - uΘ = t * ( r**λ / _E * (_c3*fp(Θv) + _c4*fppp(Θv)) ) / _c5 + ur = t * (r**λ / _E * (_c1 * f(Θv) + _c2 * fpp(Θv))) / _c5 + uΘ = t * (r**λ / _E * (_c3 * fp(Θv) + _c4 * fppp(Θv))) / _c5 _tdim = 2 values = np.zeros((_tdim, x.shape[1])) values[0] = ur * np.cos(Θv) - uΘ * np.sin(Θv) diff --git a/src/irrevolutions/utils/mesh_bar_gmshapi.py b/src/irrevolutions/utils/mesh_bar_gmshapi.py index e8d6b02a..8ad21142 100644 --- a/src/irrevolutions/utils/mesh_bar_gmshapi.py +++ b/src/irrevolutions/utils/mesh_bar_gmshapi.py @@ -12,7 +12,6 @@ def mesh_bar_gmshapi( # Perform Gmsh work only on rank = 0 if comm.rank == 0: - import gmsh # Initialise gmsh and set options @@ -73,6 +72,7 @@ def mesh_bar_gmshapi( import dolfinx.plot from dolfinx.io import XDMFFile from gmsh_mesh import gmsh_model_to_mesh + # from mesh import gmsh_to_dolfin # , merge_meshtags, locate_dofs_topological from mpi4py import MPI diff --git a/src/irrevolutions/utils/parametric.py b/src/irrevolutions/utils/parametric.py index 13feaf78..32de37d0 100644 --- a/src/irrevolutions/utils/parametric.py +++ b/src/irrevolutions/utils/parametric.py @@ -3,59 +3,59 @@ import yaml -def parameters_vs_ell(parameters = None, ell = 0.1): - if parameters is None: +def parameters_vs_ell(parameters=None, ell=0.1): + if parameters is None: with open("../test/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - + parameters["model"]["ell"] = ell parameters["geometry"]["ell_lc"] = 3 - - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature -def parameters_vs_elle(parameters = None, elle = 0.3): + +def parameters_vs_elle(parameters=None, elle=0.3): # for the thin film model - if parameters is None: + if parameters is None: with open("../data/thinfilm/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - + parameters["model"]["ell_e"] = elle - parameters["model"]["ell"] = elle/3 + parameters["model"]["ell"] = elle / 3 parameters["geometry"]["mesh_size_factor"] = 3 - parameters["geometry"]["lc"] = parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + parameters["geometry"]["lc"] = ( + parameters["model"]["ell"] / parameters["geometry"]["mesh_size_factor"] + ) + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature -def parameters_vs_SPA_scaling(parameters = None, s = 0.01): - if parameters is None: +def parameters_vs_SPA_scaling(parameters=None, s=0.01): + if parameters is None: with open("../test/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - + parameters["stability"]["cone"]["scaling"] = s parameters["stability"]["cone"]["cone_max_it"] = 400000 # parameters["stability"]["cone"]["cone_atol"] = 1e-6 # parameters["stability"]["cone"]["cone_rtol"] = 1e-5 - - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature -def parameters_vs_n_refinement(parameters = None, r = 3): - if parameters is None: +def parameters_vs_n_refinement(parameters=None, r=3): + if parameters is None: with open("../test/parameters.yml") as f: parameters = yaml.load(f, Loader=yaml.FullLoader) - + parameters["geometry"]["ell_lc"] = r - signature = hashlib.md5(str(parameters).encode('utf-8')).hexdigest() + signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() return parameters, signature - diff --git a/src/irrevolutions/utils/plots.py b/src/irrevolutions/utils/plots.py index 9227b097..046d823f 100644 --- a/src/irrevolutions/utils/plots.py +++ b/src/irrevolutions/utils/plots.py @@ -3,7 +3,6 @@ def plot_energies(history_data, title="Evolution", file=None): - fig, ax1 = matplotlib.pyplot.subplots() if title is not None: @@ -48,7 +47,6 @@ def plot_energies(history_data, title="Evolution", file=None): def plot_AMit_load(history_data, title="AM max it - Load", file=None): - fig, ax1 = matplotlib.pyplot.subplots() if title is not None: @@ -81,7 +79,6 @@ def plot_AMit_load(history_data, title="AM max it - Load", file=None): def plot_force_displacement(history_data, title="Stress - load", file=None): - fig, ax1 = matplotlib.pyplot.subplots() if title is not None: @@ -114,7 +111,6 @@ def plot_force_displacement(history_data, title="Stress - load", file=None): def plot_residual_AMit( history_data, load_check, criterion, title="Residual - AM it", file=None ): - fig, ax1 = matplotlib.pyplot.subplots() if title is not None: @@ -148,7 +144,6 @@ def plot_residual_AMit( def plot_energy_AMit(history_data, load_check, title="Total energy - AM it", file=None): - fig, ax1 = matplotlib.pyplot.subplots() if title is not None: diff --git a/src/irrevolutions/utils/visuals.py b/src/irrevolutions/utils/visuals.py index ab2a6548..03e8b3ed 100644 --- a/src/irrevolutions/utils/visuals.py +++ b/src/irrevolutions/utils/visuals.py @@ -1,123 +1,180 @@ -import json - - -def matplotlibdefaults(palette='medium',useTex=False): +def matplotlibdefaults(palette="medium", useTex=False): from matplotlib import cycler, rcParams - lightgrey = '#CBCBCB' - grey = '#8C8C8C' - darkgrey = '#4D4D4D' - if palette == 'pastel': - cs = ['#fbb4ae', '#b3cde3', '#ccebc5', '#decbe4', '#fed9a6', '#ffffcc', '#e5d8bd', '#fddaec'] - elif palette == 'light': - cs = ['#8abde6', 'fbb258', '90cd97', 'f6aac9' , 'bfa554' , 'bc99c7' , 'eddd46' , 'f07e6e'] - elif palette == 'medium': - cs = ['#5da5da', 'faa43a' , '60bd68' , 'f17cb0' , 'b2912f' , 'b276b2' , 'decf3f' , 'f15854'] - elif palette == 'dark': - cs = ['#265dab', '#df5dab', '#059748' , '#e5120b' , '#9d722a' , '#7b3a96' , '#c7b42e' , '#cb201e'] + + lightgrey = "#CBCBCB" + grey = "#8C8C8C" + darkgrey = "#4D4D4D" + if palette == "pastel": + cs = [ + "#fbb4ae", + "#b3cde3", + "#ccebc5", + "#decbe4", + "#fed9a6", + "#ffffcc", + "#e5d8bd", + "#fddaec", + ] + elif palette == "light": + cs = [ + "#8abde6", + "fbb258", + "90cd97", + "f6aac9", + "bfa554", + "bc99c7", + "eddd46", + "f07e6e", + ] + elif palette == "medium": + cs = [ + "#5da5da", + "faa43a", + "60bd68", + "f17cb0", + "b2912f", + "b276b2", + "decf3f", + "f15854", + ] + elif palette == "dark": + cs = [ + "#265dab", + "#df5dab", + "#059748", + "#e5120b", + "#9d722a", + "#7b3a96", + "#c7b42e", + "#cb201e", + ] else: - print('Unknown palette: Using medium') - cs = ['#5da5da', 'faa43a' , '60bd68' , 'f17cb0' , 'b2912f' , 'b276b2' , 'decf3f' , 'f15854'] - - rcParams['axes.labelsize'] = 18 - rcParams['axes.facecolor'] = 'none' # axes background color - rcParams['axes.edgecolor'] = grey # axes edge color - rcParams['axes.labelcolor'] = darkgrey - rcParams['axes.prop_cycle'] = cycler(color=cs) - - rcParams['xtick.labelsize'] = 18 - rcParams['ytick.labelsize'] = 18 - rcParams['xtick.color'] = grey - rcParams['ytick.color'] = grey - rcParams['xtick.direction'] = 'out' - rcParams['ytick.direction'] = 'out' - rcParams['xtick.major.width'] = 2 - rcParams['ytick.major.width'] = 2 - rcParams['xtick.major.size'] = 8 - rcParams['ytick.major.size'] = 8 - - rcParams['legend.fontsize'] = 12 - rcParams['font.family'] = 'serif' - rcParams['text.usetex'] = useTex + print("Unknown palette: Using medium") + cs = [ + "#5da5da", + "faa43a", + "60bd68", + "f17cb0", + "b2912f", + "b276b2", + "decf3f", + "f15854", + ] + + rcParams["axes.labelsize"] = 18 + rcParams["axes.facecolor"] = "none" # axes background color + rcParams["axes.edgecolor"] = grey # axes edge color + rcParams["axes.labelcolor"] = darkgrey + rcParams["axes.prop_cycle"] = cycler(color=cs) + + rcParams["xtick.labelsize"] = 18 + rcParams["ytick.labelsize"] = 18 + rcParams["xtick.color"] = grey + rcParams["ytick.color"] = grey + rcParams["xtick.direction"] = "out" + rcParams["ytick.direction"] = "out" + rcParams["xtick.major.width"] = 2 + rcParams["ytick.major.width"] = 2 + rcParams["xtick.major.size"] = 8 + rcParams["ytick.major.size"] = 8 + + rcParams["legend.fontsize"] = 12 + rcParams["font.family"] = "serif" + rcParams["text.usetex"] = useTex if useTex: - rcParams['font.serif'] = 'Computer Modern Roman' + rcParams["font.serif"] = "Computer Modern Roman" else: - rcParams['font.serif'] = 'Times' + rcParams["font.serif"] = "Times" + + rcParams["lines.linewidth"] = 2.0 + rcParams["lines.markersize"] = 8 + rcParams["lines.markeredgewidth"] = 0 + rcParams["lines.solid_joinstyle"] = "round" - rcParams['lines.linewidth'] = 2.0 - rcParams['lines.markersize'] = 8 - rcParams['lines.markeredgewidth'] = 0 - rcParams['lines.solid_joinstyle'] = 'round' + rcParams["figure.facecolor"] = "#FFFFFF" # figure facecolor; 0.75 is scalar gray - rcParams['figure.facecolor'] = '#FFFFFF' # figure facecolor; 0.75 is scalar gray - - rcParams['axes.linewidth'] = 2.0 - rcParams['axes.titlesize'] = 12 - rcParams['text.color'] = darkgrey + rcParams["axes.linewidth"] = 2.0 + rcParams["axes.titlesize"] = 12 + rcParams["text.color"] = darkgrey - rcParams['grid.color'] = lightgrey - rcParams['grid.linestyle'] = '-' - rcParams['grid.linewidth'] = 0.25 # in points - rcParams['grid.alpha'] = .5 # transparency, between 0.0 and 1.0 + rcParams["grid.color"] = lightgrey + rcParams["grid.linestyle"] = "-" + rcParams["grid.linewidth"] = 0.25 # in points + rcParams["grid.alpha"] = 0.5 # transparency, between 0.0 and 1.0 + + rcParams["legend.frameon"] = False + rcParams["legend.labelspacing"] = 0.25 - rcParams['legend.frameon'] = False - rcParams['legend.labelspacing'] = 0.25 def setspines(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): - j.spines['top'].set_color('none') - j.spines['right'].set_color('none') - j.tick_params(axis='both',top='off',right='off',which='both',colors='#8C8C8C') - #j.spines['left'].set_position(('outward',10)) - #j.spines['bottom'].set_position(('outward',10)) - #j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + j.spines["top"].set_color("none") + j.spines["right"].set_color("none") + j.tick_params( + axis="both", top="off", right="off", which="both", colors="#8C8C8C" + ) + # j.spines['left'].set_position(('outward',10)) + # j.spines['bottom'].set_position(('outward',10)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 def setspines4(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): # j.spines['top'].set_color('none') # j.spines['right'].set_color('none') - j.tick_params(axis='both',top='on',right='on',which='both',colors='#8C8C8C') - #j.spines['left'].set_position(('outward',10)) - #j.spines['bottom'].set_position(('outward',10)) - #j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + j.tick_params( + axis="both", top="on", right="on", which="both", colors="#8C8C8C" + ) + # j.spines['left'].set_position(('outward',10)) + # j.spines['bottom'].set_position(('outward',10)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 def setspines2(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): - j.spines['top'].set_color('none') + j.spines["top"].set_color("none") # j.spines['right'].set_color('none') - j.tick_params(axis='both',top='off',right='on',which='both',colors='#8C8C8C') + j.tick_params( + axis="both", top="off", right="on", which="both", colors="#8C8C8C" + ) # j.spines['left'].set_position(('outward',10)) # j.spines['bottom'].set_position(('outward',10)) # j.spines['right'].set_position(('outward',10)) # j.spines['top'].set_position(('outward',10)) -# j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 + def setspines0(): import matplotlib.pylab + for i in matplotlib.pylab.get_fignums(): for j in matplotlib.pylab.figure(i).get_axes(): - j.spines['top'].set_color('none') -# j.spines['bottom'].set_color('none') - j.spines['right'].set_color('none') - j.spines['left'].set_color('none') - j.tick_params(axis='both',top='off',right='off',which='both',colors='#8C8C8C') - #j.spines['left'].set_position(('outward',10)) - #j.spines['bottom'].set_position(('outward',10)) - #j.spines['left'].set_position(('axes', -0.05)) - #j.spines['bottom'].set_position(('axes', -0.05)) + j.spines["top"].set_color("none") + # j.spines['bottom'].set_color('none') + j.spines["right"].set_color("none") + j.spines["left"].set_color("none") + j.tick_params( + axis="both", top="off", right="off", which="both", colors="#8C8C8C" + ) + # j.spines['left'].set_position(('outward',10)) + # j.spines['bottom'].set_position(('outward',10)) + # j.spines['left'].set_position(('axes', -0.05)) + # j.spines['bottom'].set_position(('axes', -0.05)) return 0 diff --git a/src/irrevolutions/utils/viz.py b/src/irrevolutions/utils/viz.py index 76ba8924..64ede825 100644 --- a/src/irrevolutions/utils/viz.py +++ b/src/irrevolutions/utils/viz.py @@ -9,8 +9,6 @@ import logging -import dolfinx - logging.basicConfig(level=logging.INFO) from mpi4py import MPI @@ -22,13 +20,9 @@ xvfb.start_xvfb(wait=0.05) -import dolfinx.plot -import matplotlib -import matplotlib.collections import matplotlib.pyplot as plt import matplotlib.tri as tri from dolfinx.plot import vtk_mesh as compute_topology -from mpl_toolkits.axes_grid1 import make_axes_locatable # try: # from dolfinx.plot import create_vtk_mesh as compute_topology @@ -36,7 +30,7 @@ # from dolfinx.plot import create_vtk_topology as compute_topology -def plot_vector(u, plotter, subplot=None, scale=1.): +def plot_vector(u, plotter, subplot=None, scale=1.0): if subplot: plotter.subplot(subplot[0], subplot[1]) V = u.function_space @@ -63,10 +57,11 @@ def plot_vector(u, plotter, subplot=None, scale=1.): grid, show_edges=True, color="black", style="wireframe", opacity=0.3 ) plotter.view_xy() - plotter.set_background('white') + plotter.set_background("white") return plotter # figure = plotter.screenshot(f"./output/test_viz/test_viz_MPI{comm.size}-.png") + def plot_scalar(u, plotter, subplot=None, lineproperties={}): """Plots a scalar function using pyvista @@ -78,30 +73,41 @@ def plot_scalar(u, plotter, subplot=None, lineproperties={}): Returns: plotter: Updated plotter object - """ + """ if subplot: plotter.subplot(subplot[0], subplot[1]) V = u.function_space mesh = V.mesh - + ret = compute_topology(mesh, mesh.topology.dim) if len(ret) == 2: topology, cell_types = ret - else: + else: topology, cell_types, _ = ret grid = pyvista.UnstructuredGrid(topology, cell_types, mesh.geometry.x) plotter.subplot(0, 0) values = u.vector.array.real.reshape( - V.dofmap.index_map.size_local, V.dofmap.index_map_bs) + V.dofmap.index_map.size_local, V.dofmap.index_map_bs + ) grid.point_data["u"] = values grid.set_active_scalars("u") plotter.add_mesh(grid, **lineproperties) plotter.view_xy() - plotter.set_background('white') + plotter.set_background("white") return plotter -def plot_profile(u, points, plotter, subplot=None, lineproperties={}, fig=None, ax=None, subplotnumber = 1): + +def plot_profile( + u, + points, + plotter, + subplot=None, + lineproperties={}, + fig=None, + ax=None, + subplotnumber=1, +): # import matplotlib.pyplot as plt # import dolfinx.geometry # mesh = u.function_space.mesh @@ -124,13 +130,13 @@ def plot_profile(u, points, plotter, subplot=None, lineproperties={}, fig=None, # u_values = u.eval(points_on_proc, cells) points_on_proc, u_values = get_datapoints(u, points) - + if fig is None: fig = plt.figure() if subplot: # plotter.subplot(subplot[0], subplot[1]) - # if subplot: + # if subplot: plt.subplot(subplot[0], subplot[1], subplotnumber) # plt.plot(points_on_proc[:, 0], u_values, "k", ls="-", linewidth=1, label="") @@ -144,6 +150,7 @@ def plot_profile(u, points, plotter, subplot=None, lineproperties={}, fig=None, plt.legend() return plt, (points_on_proc[:, 0], u_values) + def plot_mesh(mesh, ax=None): if ax is None: ax = plt.gca() @@ -154,8 +161,10 @@ def plot_mesh(mesh, ax=None): ax.triplot(tria, color="k") return ax + def get_datapoints(u, points): import dolfinx.geometry + mesh = u.function_space.mesh cells = [] bb_tree = dolfinx.geometry.bb_tree(mesh, mesh.topology.dim) @@ -171,34 +180,32 @@ def get_datapoints(u, points): points_on_proc = np.array(points_on_proc, dtype=np.float64) u_values = u.eval(points_on_proc, cells) - + return points_on_proc, u_values + def plot_perturbations(comm, Lx, prefix, β, v, bifurcation, stability, i_t): from solvers.function import vec_to_functions - - vec_to_functions(bifurcation._spectrum[0]['xk'], [v, β]) + + vec_to_functions(bifurcation._spectrum[0]["xk"], [v, β]) if comm.Get_size() == 1: tol = 1e-3 xs = np.linspace(0 + tol, Lx - tol, 101) points = np.zeros((3, 101)) points[0] = xs - + plotter = pyvista.Plotter( - title="Perturbation profile", - window_size=[800, 600], - shape=(1, 1), - ) + title="Perturbation profile", + window_size=[800, 600], + shape=(1, 1), + ) _plt, data = plot_profile( - β, - points, - plotter, - subplot=(0, 0), - lineproperties={ - "c": "k", - "label": f"$\\beta$" - }, - ) + β, + points, + plotter, + subplot=(0, 0), + lineproperties={"c": "k", "label": "$\\beta$"}, + ) ax = _plt.gca() _plt.legend() _plt.fill_between(data[0], data[1].reshape(len(data[1]))) @@ -206,38 +213,34 @@ def plot_perturbations(comm, Lx, prefix, β, v, bifurcation, stability, i_t): _plt.savefig(f"{prefix}/perturbation-profile-{i_t}.png") _plt.close() - plotter = pyvista.Plotter( - title="Cone-Perturbation profile", - window_size=[800, 600], - shape=(1, 1), - ) + title="Cone-Perturbation profile", + window_size=[800, 600], + shape=(1, 1), + ) _plt, data = plot_profile( - stability.perturbation['beta'], - points, - plotter, - subplot=(0, 0), - lineproperties={ - "c": "k", - "label": f"$\\beta$" - }, - ) + stability.perturbation["beta"], + points, + plotter, + subplot=(0, 0), + lineproperties={"c": "k", "label": "$\\beta$"}, + ) ax = _plt.gca() _plt.legend() _plt.fill_between(data[0], data[1].reshape(len(data[1]))) _plt.title("Perurbation from the Cone") _plt.savefig(f"{prefix}/perturbation-profile-cone-{i_t}.png") _plt.close() - + return plotter + import scipy def plot_matrix(M): import matplotlib.pyplot as plt - import numpy as np fig, ax = plt.subplots() indptr, indices, data = M.getValuesCSR() @@ -246,7 +249,7 @@ def plot_matrix(M): for i in range(_M.shape[0]): for j in range(_M.shape[0]): - c = _M[j,i] - ax.text(i, j, f"{c:.3f}", va='center', ha='center') + c = _M[j, i] + ax.text(i, j, f"{c:.3f}", va="center", ha="center") return fig diff --git a/test/test_1d.py b/test/test_1d.py index b03dd936..9385115c 100644 --- a/test/test_1d.py +++ b/test/test_1d.py @@ -14,6 +14,7 @@ import pyvista import ufl import yaml +from dolfinx.common import list_timings from dolfinx.fem import ( Constant, Function, @@ -23,12 +24,8 @@ locate_dofs_geometrical, set_bc, ) -from dolfinx.common import list_timings -from dolfinx.fem.petsc import assemble_vector, set_bc +from dolfinx.fem.petsc import assemble_vector from dolfinx.io import XDMFFile -from mpi4py import MPI -from petsc4py import PETSc - from irrevolutions.algorithms.am import HybridSolver from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.solvers import SNESSolver @@ -44,11 +41,12 @@ from irrevolutions.utils.plots import ( plot_AMit_load, plot_energies, - plot_force_displacement, ) # from irrevolutions.utils.viz import plot_profile +from mpi4py import MPI +from petsc4py import PETSc """The fundamental problem of a 1d bar in traction. 0|(WWWWWWWWWWWWWWWWWWWWWW)|========> t @@ -62,7 +60,6 @@ class _AlternateMinimisation1D: - def __init__( self, total_energy, @@ -109,7 +106,6 @@ def __init__( ) def solve(self, outdir=None): - alpha_diff = dolfinx.fem.Function(self.alpha.function_space) self.data = { @@ -486,9 +482,8 @@ def stress(state): stable = stability.solve(alpha_lb, eig0=z0, inertia=inertia) - with dolfinx.common.Timer(f"~Postprocessing and Vis") as timer: + with dolfinx.common.Timer("~Postprocessing and Vis") as timer: if comm.Get_size() == 1: - if bifurcation._spectrum: vec_to_functions(bifurcation._spectrum[0]["xk"], [v, β]) @@ -507,7 +502,7 @@ def stress(state): points, plotter, subplot=(1, 2), - lineproperties={"c": "k", "label": f"$\\beta$"}, + lineproperties={"c": "k", "label": "$\\beta$"}, subplotnumber=1, ) ax = _plt.gca() @@ -534,7 +529,7 @@ def stress(state): points, plotter, subplot=(1, 2), - lineproperties={"c": "k", "label": f"$\\beta$"}, + lineproperties={"c": "k", "label": "$\\beta$"}, subplotnumber=2, ax=ax, ) @@ -691,8 +686,6 @@ def load_parameters(file_path, ndofs, model="at1"): def test_1d(): - import argparse - from mpi4py import MPI # parser = argparse.ArgumentParser(description="Process evolution.") @@ -705,10 +698,12 @@ def test_1d(): pretty_parameters = json.dumps(parameters, indent=2) # print(pretty_parameters) # _storage = f"output/one-dimensional-bar/MPI-{MPI.COMM_WORLD.Get_size()}/{args.N}/{signature}" - _storage = f"output/one-dimensional-bar/MPI-{MPI.COMM_WORLD.Get_size()}/{_N}/{signature}" + _storage = ( + f"output/one-dimensional-bar/MPI-{MPI.COMM_WORLD.Get_size()}/{_N}/{signature}" + ) ColorPrint.print_bold(f"===================-{_storage}-=================") - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, stability_data, state = run_computation(parameters, _storage) from irrevolutions.utils import ResultsStorage, Visualization @@ -726,16 +721,22 @@ def test_1d(): list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) from irrevolutions.utils import table_timing_data + _timings = table_timing_data() visualization.save_table(_timings, "timing_data") _neg_eigen_ball = [d[0] for d in pd.DataFrame(history_data).inertia.values] _stability = pd.DataFrame(history_data).stable.values _uniqueness = pd.DataFrame(history_data).unique.values - + np.testing.assert_array_equal(_neg_eigen_ball, [0, 0, 0, 1, 2]) - np.testing.assert_array_equal(_stability, np.array([True, True, True, False, False])) - np.testing.assert_array_equal(_uniqueness, np.array([True, True, True, False, False])) - + np.testing.assert_array_equal( + _stability, np.array([True, True, True, False, False]) + ) + np.testing.assert_array_equal( + _uniqueness, np.array([True, True, True, False, False]) + ) + + if __name__ == "__main__": - test_1d() \ No newline at end of file + test_1d() diff --git a/test/test_binarydataio.py b/test/test_binarydataio.py index e7dcb8dc..598f9541 100644 --- a/test/test_binarydataio.py +++ b/test/test_binarydataio.py @@ -1,7 +1,9 @@ -from .test_errorcodes import translatePETScERROR -from petsc4py import PETSc import pickle +from petsc4py import PETSc + +from .test_errorcodes import translatePETScERROR + def save_binary_data(filename, data): viewer = PETSc.Viewer().createBinary(filename, "w") @@ -123,7 +125,6 @@ def load_minimal_constraints(filename): if __name__ == "__main__": - m, n = 16, 32 # Example usage diff --git a/test/test_cone_convergence.py b/test/test_cone_convergence.py index 53868174..1bfa338a 100644 --- a/test/test_cone_convergence.py +++ b/test/test_cone_convergence.py @@ -1,16 +1,16 @@ import logging import os +import pickle import dolfinx -from irrevolutions import utils +import irrevolutions.solvers.restriction as restriction import numpy as np import ufl from dolfinx.io import XDMFFile +from irrevolutions import utils from irrevolutions.algorithms.so import StabilitySolver -import irrevolutions.solvers.restriction as restriction from irrevolutions.utils import _logger from mpi4py import MPI -import pickle _logger.setLevel(logging.CRITICAL) diff --git a/test/test_cone_project.py b/test/test_cone_project.py index 4e5b8afb..cdd07bfe 100644 --- a/test/test_cone_project.py +++ b/test/test_cone_project.py @@ -1,16 +1,19 @@ -from irrevolutions.utils import sample_data -from test_restriction import ( - get_inactive_dofset, +import sys + +import dolfinx +import irrevolutions.solvers.restriction as restriction +import numpy as np +from irrevolutions.utils import ( + _logger, + load_binary_matrix, + load_binary_vector, + sample_data, ) -from test_extend import test_extend_vector -from irrevolutions.utils import load_binary_data, load_binary_matrix, load_binary_vector from mpi4py import MPI from petsc4py import PETSc -import numpy as np -import dolfinx -from irrevolutions.utils import _logger -import irrevolutions.solvers.restriction as restriction -import sys +from test_restriction import ( + get_inactive_dofset, +) sys.path.append("../") @@ -50,7 +53,7 @@ def _cone_project_restricted(v, _x, constraints): Returns: Vector: The projected vector. """ - with dolfinx.common.Timer(f"~Second Order: Cone Project"): + with dolfinx.common.Timer("~Second Order: Cone Project"): [ (V.dofmap.index_map, V.dofmap.index_map_bs) for V in constraints.function_spaces @@ -67,8 +70,8 @@ def _cone_project_restricted(v, _x, constraints): x_local.array[_dofs] = np.maximum(x_local.array[_dofs], 0) _logger.debug(f"Local dofs: {_dofs}") - _logger.debug(f"x_local") - _logger.debug(f"x_local truncated") + _logger.debug("x_local") + _logger.debug("x_local truncated") _x.ghostUpdate(addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD) # x_u, x_alpha = get_local_vectors(_x, maps) @@ -85,7 +88,6 @@ def _cone_project_restricted(v, _x, constraints): def test_cone_project(): - full_matrix = load_binary_matrix("data/solver/A.mat") matrix = load_binary_matrix("data/solver/Ar.mat") guess = load_binary_vector("data/solver/x0r.vec") @@ -103,7 +105,7 @@ def test_cone_project(): vr = constraints.restrict_vector(v) # x = test_cone_project_restricted(vr, constraints, x) x = _cone_project_restricted(vr, _x, constraints) - + if __name__ == "__main__": test_cone_project() diff --git a/test/test_errorcodes.py b/test/test_errorcodes.py index f72bcae1..8e13b0ac 100644 --- a/test/test_errorcodes.py +++ b/test/test_errorcodes.py @@ -58,7 +58,6 @@ translatePETScERROR = {v: k for k, v in error_codes.items()} if __name__ == "__main__": - # Example: Look up the description string for an error code error_code = 55 description = translatePETScERROR.get(error_code, "Error code not found") diff --git a/test/test_extend.py b/test/test_extend.py index fa041416..29f9f4d6 100644 --- a/test/test_extend.py +++ b/test/test_extend.py @@ -1,16 +1,14 @@ -from irrevolutions.utils import sample_data +from logging import getLevelName + +import dolfinx +import irrevolutions.solvers.restriction as restriction from dolfinx.cpp.la.petsc import get_local_vectors +from irrevolutions.utils import _logger, sample_data +from mpi4py import MPI from test_restriction import ( __log_incipit, get_inactive_dofset, ) -import dolfinx -from logging import getLevelName -from mpi4py import MPI - - -import irrevolutions.solvers.restriction as restriction -from irrevolutions.utils import _logger comm = MPI.COMM_WORLD rank = comm.Get_rank() @@ -41,13 +39,13 @@ def test_extend_vector(): maps = [(V.dofmap.index_map, V.dofmap.index_map_bs) for V in [V_u, V_alpha]] if getLevelName(_logger.getEffectiveLevel()) == "DEBUG": - _logger.debug(f"x") + _logger.debug("x") x.view() - _logger.debug(f"vr") + _logger.debug("vr") vr.view() - _logger.info(f"Setting up dofs for extension") + _logger.info("Setting up dofs for extension") _logger.debug( f'{__log_incipit} The "good" dofs: constraints.bglobal_dofs_vec_stacked {constraints.bglobal_dofs_vec_stacked}' ) @@ -58,11 +56,11 @@ def test_extend_vector(): _logger.debug(f"{__log_incipit} Local data of the x: {x.array}") if getLevelName(_logger.getEffectiveLevel()) == "DEBUG": - _logger.debug(f"x") + _logger.debug("x") x.view() x_u, x_alpha = get_local_vectors(x, maps) - _logger.info(f"The local vectors") + _logger.info("The local vectors") _logger.debug(f"{__log_incipit} Local data of the subvector x_u: {x_u}") _logger.debug(f"{__log_incipit} Local data of the subvector x_alpha: {x_alpha}") diff --git a/test/test_linsearch.py b/test/test_linsearch.py index 2b24e86f..3285bb1e 100644 --- a/test/test_linsearch.py +++ b/test/test_linsearch.py @@ -13,7 +13,6 @@ import numpy as np import pandas as pd import petsc4py -import pytest import pyvista import ufl import yaml @@ -34,14 +33,15 @@ from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.meshes.primitives import mesh_bar_gmshapi from irrevolutions.models import DamageElasticityModel as Brittle -from irrevolutions.utils.plots import plot_energies, plot_AMit_load, plot_force_displacement from irrevolutions.solvers.function import vec_to_functions from irrevolutions.utils import ( ColorPrint, - history_data, _write_history_data, + history_data, norm_H1, - find_offending_columns_lengths, +) +from irrevolutions.utils.plots import ( + plot_energies, ) from irrevolutions.utils.viz import ( plot_profile, @@ -61,6 +61,7 @@ model_rank = 0 test_dir = os.path.dirname(__file__) + def test_linsearch(): parameters, signature = load_parameters(os.path.join(test_dir, "./parameters.yml")) storage = f"output/linesearch/{signature}" @@ -102,8 +103,7 @@ def test_linsearch(): file.write_mesh(mesh) # Function spaces - element_u = ufl.VectorElement( - "Lagrange", mesh.ufl_cell(), degree=1, dim=tdim) + element_u = ufl.VectorElement("Lagrange", mesh.ufl_cell(), degree=1, dim=tdim) V_u = FunctionSpace(mesh, element_u) element_alpha = ufl.FiniteElement("Lagrange", mesh.ufl_cell(), degree=1) @@ -130,10 +130,8 @@ def test_linsearch(): dx = ufl.Measure("dx", domain=mesh) ds = ufl.Measure("ds", domain=mesh) - dofs_alpha_left = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], 0.0)) - dofs_alpha_right = locate_dofs_geometrical( - V_alpha, lambda x: np.isclose(x[0], Lx)) + dofs_alpha_left = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], 0.0)) + dofs_alpha_right = locate_dofs_geometrical(V_alpha, lambda x: np.isclose(x[0], Lx)) dofs_u_left = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], 0.0)) dofs_u_right = locate_dofs_geometrical(V_u, lambda x: np.isclose(x[0], Lx)) @@ -149,8 +147,7 @@ def test_linsearch(): addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) - bc_u_left = dirichletbc( - np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) + bc_u_left = dirichletbc(np.array([0, 0], dtype=PETSc.ScalarType), dofs_u_left, V_u) bc_u_right = dirichletbc(u_, dofs_u_right) bcs_u = [bc_u_left, bc_u_right] @@ -195,8 +192,7 @@ def test_linsearch(): ) bifurcation = BifurcationSolver( - total_energy, state, bcs, bifurcation_parameters=parameters.get( - "stability") + total_energy, state, bcs, bifurcation_parameters=parameters.get("stability") ) stability = StabilitySolver( @@ -221,15 +217,15 @@ def test_linsearch(): addv=PETSc.InsertMode.INSERT, mode=PETSc.ScatterMode.FORWARD ) - ColorPrint.print_bold(f" Solving first order: AM ") - ColorPrint.print_bold(f"===================-=========") + ColorPrint.print_bold(" Solving first order: AM ") + ColorPrint.print_bold("===================-=========") logging.critical(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") equilibrium.solve() - ColorPrint.print_bold(f" Solving first order: Hybrid ") - ColorPrint.print_bold(f"===================-=============") + ColorPrint.print_bold(" Solving first order: Hybrid ") + ColorPrint.print_bold("===================-=============") logging.info(f"-- {i_t}/{len(loads)}: Solving for t = {t:3.2f} --") hybrid.solve(alpha_lb) @@ -244,16 +240,16 @@ def test_linsearch(): rate_12_norm = hybrid.scaled_rate_norm(alpha, parameters) urate_12_norm = hybrid.unscaled_rate_norm(alpha) - ColorPrint.print_bold(f" Solving second order: Rate Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Rate Pb. ") + ColorPrint.print_bold("===================-=================") # n_eigenvalues = 10 is_unique = bifurcation.solve(alpha_lb) is_elastic = not bifurcation._is_critical(alpha_lb) inertia = bifurcation.get_inertia() - ColorPrint.print_bold(f" Solving second order: Cone Pb. ") - ColorPrint.print_bold(f"===================-=================") + ColorPrint.print_bold(" Solving second order: Cone Pb. ") + ColorPrint.print_bold("===================-=================") z0 = ( bifurcation._spectrum[0]["xk"] @@ -261,13 +257,9 @@ def test_linsearch(): else None ) - stable = stability.solve( - alpha_lb, - eig0=z0, - inertia=inertia) + stable = stability.solve(alpha_lb, eig0=z0, inertia=inertia) if not stable: - vec_to_functions(stability.solution["xt"], [v, β]) perturbation = {"v": v, "beta": β} @@ -285,13 +277,7 @@ def test_linsearch(): x_plot = np.linspace(interval[0], interval[1], order + 1) fig, axes = plt.subplots(1, 1) plt.scatter(x_plot, energies_1d) - plt.scatter( - h_opt, - 0, - c="k", - s=40, - marker="|", - label=f"$h^*={h_opt:.2f}$") + plt.scatter(h_opt, 0, c="k", s=40, marker="|", label=f"$h^*={h_opt:.2f}$") plt.scatter(h_opt, p(h_opt), c="k", s=40, alpha=0.5) xs = np.linspace(interval[0], interval[1], 30) axes.plot(xs, p(xs), label="Energy slice along perturbation") @@ -354,7 +340,7 @@ def test_linsearch(): β, points, plotter, - lineproperties={"c": "k", "label": f"$\\beta$"}, + lineproperties={"c": "k", "label": "$\\beta$"}, ) _plt.gca() _plt.legend() @@ -385,11 +371,9 @@ def test_linsearch(): logging.critical(f"alpha vector norm: {alpha.vector.norm()}") logging.critical(f"alpha lb norm: {alpha_lb.vector.norm()}") logging.critical(f"alphadot norm: {alphadot.vector.norm()}") - logging.critical( - f"vector norms [u, alpha]: {[zi.vector.norm() for zi in z]}") + logging.critical(f"vector norms [u, alpha]: {[zi.vector.norm() for zi in z]}") logging.critical(f"scaled rate state_12 norm: {rate_12_norm}") - logging.critical( - f"unscaled scaled rate state_12 norm: {urate_12_norm}") + logging.critical(f"unscaled scaled rate state_12 norm: {urate_12_norm}") fracture_energy = comm.allreduce( assemble_scalar(form(model.damage_energy_density(state) * dx)), @@ -430,7 +414,7 @@ def test_linsearch(): json.dump(history_data, a_file) a_file.close() - ColorPrint.print_bold(f" Written timely data. ") + ColorPrint.print_bold(" Written timely data. ") print() list_timings(MPI.COMM_WORLD, [dolfinx.common.TimingType.wall]) df = pd.DataFrame(history_data) @@ -453,10 +437,10 @@ def test_linsearch(): _plt = plot_vector(u, plotter, subplot=(0, 1)) _plt.screenshot(f"{prefix}/traction-state.png") - if comm.rank == 0: plot_energies(history_data, file=f"{prefix}/{_nameExp}_energies.pdf") + def load_parameters(file_path): """ Load parameters from a YAML file. @@ -491,6 +475,6 @@ def load_parameters(file_path): return parameters, signature + if __name__ == "__main__": test_linsearch() - diff --git a/test/test_logging_mpi.py b/test/test_logging_mpi.py index 9e53d850..96eb4043 100644 --- a/test/test_logging_mpi.py +++ b/test/test_logging_mpi.py @@ -2,8 +2,8 @@ def setup_logger_mpi(root_priority: int = logging.INFO): - from mpi4py import MPI import dolfinx + from mpi4py import MPI class MPIFormatter(logging.Formatter): def format(self, record): diff --git a/test/test_meta.py b/test/test_meta.py index 6ad33451..22b7f221 100644 --- a/test/test_meta.py +++ b/test/test_meta.py @@ -8,6 +8,7 @@ # testing is a way to perturb the flimsy roots of an unstable system. # With a proof. + def add(x, y): return x + y diff --git a/test/test_rayleigh.py b/test/test_rayleigh.py index f63cdd3c..3b763e75 100644 --- a/test/test_rayleigh.py +++ b/test/test_rayleigh.py @@ -2,9 +2,8 @@ import hashlib import json import logging -import sys -from pathlib import Path import os +from pathlib import Path import dolfinx import matplotlib.pyplot as plt @@ -13,7 +12,6 @@ import ufl import yaml from dolfinx.fem import dirichletbc, locate_dofs_geometrical -from dolfinx.fem import form, assemble_scalar from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver from irrevolutions.solvers.function import vec_to_functions from irrevolutions.utils import ColorPrint, _logger, indicator_function @@ -21,16 +19,18 @@ from mpi4py import MPI from petsc4py import PETSc -# sys.path.append("../") -# sys.path.append("../playground/nb") -# from test_extend import test_extend_vector -# from test_cone_project import _cone_project_restricted - test_dir = os.path.dirname(__file__) _logger.setLevel(logging.CRITICAL) +def parallel_assemble_scalar(ufl_form): + compiled_form = dolfinx.fem.form(ufl_form) + comm = compiled_form.mesh.comm + local_scalar = dolfinx.fem.assemble_scalar(compiled_form) + return comm.allreduce(local_scalar, op=MPI.SUM) + + def rayleigh_ratio(z, parameters): (v, β) = z dx = ufl.Measure("dx", v.function_space.mesh) @@ -47,17 +47,20 @@ def rayleigh_ratio(z, parameters): ) * dx denominator = ufl.inner(β, β) * dx - R = assemble_scalar(form(numerator)) / assemble_scalar(form(denominator)) + R = parallel_assemble_scalar(numerator) / parallel_assemble_scalar(denominator) return R -def test_rayleigh(parameters = None, storage=None): - +def test_rayleigh(parameters=None, storage=None): if parameters is None: - parameters, signature = load_parameters(os.path.join(test_dir, "parameters.yml"), ndofs=50) + parameters, signature = load_parameters( + os.path.join(test_dir, "parameters.yml"), ndofs=50 + ) pretty_parameters = json.dumps(parameters, indent=2) - storage = f"output/rayleigh-benchmark/MPI-{MPI.COMM_WORLD.Get_size()}/{signature}" + storage = ( + f"output/rayleigh-benchmark/MPI-{MPI.COMM_WORLD.Get_size()}/{signature}" + ) else: signature = hashlib.md5(str(parameters).encode("utf-8")).hexdigest() @@ -75,7 +78,6 @@ def test_rayleigh(parameters = None, storage=None): N = parameters["geometry"]["N"] mesh = dolfinx.mesh.create_unit_interval(MPI.COMM_WORLD, N) - ColorPrint.print_bold(f"===================-{storage}-=================") if comm.rank == 0: @@ -197,7 +199,7 @@ def test_rayleigh(parameters = None, storage=None): vec_to_functions(bifurcation.spectrum[0]["xk"], [v, β]) _support = indicator_function(stability.perturbation["β"]) - D_support = dolfinx.fem.assemble_scalar(dolfinx.fem.form(_support * dx)) + D_support = parallel_assemble_scalar(_support * dx) tol = 1e-3 xs = np.linspace(0 + tol, 1 - tol, 101) @@ -218,7 +220,7 @@ def test_rayleigh(parameters = None, storage=None): subplot=(1, 3), fig=fig, ax=axes[0], - lineproperties={"c": "k", "label": f"$\\beta$"}, + lineproperties={"c": "k", "label": "$\\beta$"}, subplotnumber=1, ) axes[0] = _plt.gca() @@ -238,7 +240,7 @@ def test_rayleigh(parameters = None, storage=None): subplot=(1, 3), fig=fig, ax=axes[0], - lineproperties={"c": "k", "label": f"$v$", "ls": "--"}, + lineproperties={"c": "k", "label": "$v$", "ls": "--"}, subplotnumber=1, ) axes[0].set_ylabel("$v,\\beta$") @@ -250,7 +252,7 @@ def test_rayleigh(parameters = None, storage=None): fig=fig, ax=axes[1], subplot=(1, 3), - lineproperties={"c": "k", "label": f"$\\beta$"}, + lineproperties={"c": "k", "label": "$\\beta$"}, subplotnumber=2, ) _plt.fill_between( @@ -264,7 +266,7 @@ def test_rayleigh(parameters = None, storage=None): fig=fig, ax=axes[1], subplot=(1, 3), - lineproperties={"c": "k", "label": f"$v$", "ls": "--"}, + lineproperties={"c": "k", "label": "$v$", "ls": "--"}, subplotnumber=2, ) @@ -285,7 +287,7 @@ def test_rayleigh(parameters = None, storage=None): fig=fig, ax=axes[2], subplot=(1, 3), - lineproperties={"c": "k", "label": f"$\\zeta$"}, + lineproperties={"c": "k", "label": "$\\zeta$"}, subplotnumber=3, ) _plt.fill_between( @@ -299,7 +301,7 @@ def test_rayleigh(parameters = None, storage=None): fig=fig, ax=axes[2], subplot=(1, 3), - lineproperties={"c": "k", "label": f"$w$", "ls": "--"}, + lineproperties={"c": "k", "label": "$w$", "ls": "--"}, subplotnumber=3, ) @@ -444,5 +446,5 @@ def load_parameters(file_path, ndofs, model="at1"): _storage = f"output/rayleigh-benchmark/MPI-{MPI.COMM_WORLD.Get_size()}/{signature}" ColorPrint.print_bold(f"===================-{_storage}-=================") - with dolfinx.common.Timer(f"~Computation Experiment") as timer: + with dolfinx.common.Timer("~Computation Experiment") as timer: history_data, stability_data, state = test_rayleigh(parameters, _storage) diff --git a/test/test_rayleigh_parametric.py b/test/test_rayleigh_parametric.py index 667acc08..28857889 100644 --- a/test/test_rayleigh_parametric.py +++ b/test/test_rayleigh_parametric.py @@ -1,20 +1,20 @@ -from irrevolutions.utils import _logger, ColorPrint, indicator_function -from irrevolutions.utils import eigenspace as eig -from irrevolutions.utils.viz import get_datapoints -from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver -from dolfinx.fem import form, assemble_scalar -from pathlib import Path -import json import argparse +import json import logging -from mpi4py import MPI -from petsc4py import PETSc -import yaml -from dolfinx.fem import locate_dofs_geometrical, dirichletbc +import sys +from pathlib import Path + +import dolfinx import numpy as np import ufl -import dolfinx -import sys +import yaml +from dolfinx.fem import assemble_scalar, dirichletbc, form, locate_dofs_geometrical +from irrevolutions.algorithms.so import BifurcationSolver, StabilitySolver +from irrevolutions.utils import ColorPrint, _logger, indicator_function +from irrevolutions.utils import eigenspace as eig +from irrevolutions.utils.viz import get_datapoints +from mpi4py import MPI +from petsc4py import PETSc sys.path.append("../") sys.path.append("../playground/nb") @@ -27,7 +27,6 @@ def rayleigh_ratio_reduced(β, parameters): - dx = ufl.Measure("dx", β.function_space.mesh) a, b, c = ( parameters["model"]["a"], @@ -328,7 +327,6 @@ def load_parameters(file_path, ndofs, model="rayleigh"): if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Process evolution.") parser.add_argument("-N", help="The number of dofs.", type=int, default=50) parser.add_argument( @@ -350,5 +348,5 @@ def load_parameters(file_path, ndofs, model="rayleigh"): _storage = f"output/rayleigh-benchmark-parametric/MPI-{MPI.COMM_WORLD.Get_size()}/{signature}" ColorPrint.print_bold(f"===================-{_storage}-=================") - with dolfinx.common.Timer(f"~Random Computation Experiment") as timer: + with dolfinx.common.Timer("~Random Computation Experiment") as timer: history_data, stability_data, state = rayleigh(parameters, _storage) diff --git a/test/test_restriction.py b/test/test_restriction.py index 8aea0647..b45b7fc6 100644 --- a/test/test_restriction.py +++ b/test/test_restriction.py @@ -1,11 +1,11 @@ -from irrevolutions.utils import sample_data -from dolfinx.cpp.la.petsc import get_local_vectors -from mpi4py import MPI -import numpy as np +import sys + import dolfinx -from irrevolutions.utils import _logger import irrevolutions.solvers.restriction as restriction -import sys +import numpy as np +from dolfinx.cpp.la.petsc import get_local_vectors +from irrevolutions.utils import _logger, sample_data +from mpi4py import MPI sys.path.append("../") @@ -18,13 +18,12 @@ def get_inactive_dofset(v, F): """docstring for get_inactive_dofset""" - _logger.info(f"inactive dofset") + _logger.info("inactive dofset") V_u, V_alpha = F[0].function_spaces[0], F[1].function_spaces[0] __names = ["u", "alpha"] for i, space in enumerate([V_u, V_alpha]): - bs = space.dofmap.index_map_bs size_local = space.dofmap.index_map.size_local @@ -101,14 +100,14 @@ def test_restriction(): f"{__log_incipit} constraints.bglobal_dofs_vec_stacked {constraints.bglobal_dofs_vec_stacked}" ) - _logger.info(f"v") + _logger.info("v") v.view() - _logger.info(f"vr") + _logger.info("vr") vr.view() # assert we get the right number of restricted dofs assert len(np.concatenate(restricted_dofs)) == vr.getSize() - + # return v, vr, constraints diff --git a/test/test_sample_data.py b/test/test_sample_data.py index b5dee8a2..162d0cc8 100644 --- a/test/test_sample_data.py +++ b/test/test_sample_data.py @@ -1,12 +1,13 @@ -from dolfinx.cpp.la.petsc import get_local_vectors, scatter_local_vectors -from mpi4py import MPI -from petsc4py import PETSc import random +import sys + +import dolfinx import numpy as np import ufl -import dolfinx +from dolfinx.cpp.la.petsc import get_local_vectors, scatter_local_vectors from irrevolutions.utils import _logger -import sys +from mpi4py import MPI +from petsc4py import PETSc sys.path.append("../") diff --git a/test/test_scatter.py b/test/test_scatter.py index 2b320df9..2d2a7c1a 100644 --- a/test/test_scatter.py +++ b/test/test_scatter.py @@ -1,16 +1,16 @@ +import os import random -from dolfinx import cpp as _cpp -import numpy as np -from dolfinx.fem import locate_dofs_geometrical -import irrevolutions.solvers.restriction as restriction -import ufl -import dolfinx -from petsc4py import PETSc -from mpi4py import MPI import sys -import os +import dolfinx +import irrevolutions.solvers.restriction as restriction +import numpy as np import petsc4py +import ufl +from dolfinx import cpp as _cpp +from dolfinx.fem import locate_dofs_geometrical +from mpi4py import MPI +from petsc4py import PETSc petsc4py.init(sys.argv) @@ -138,7 +138,6 @@ def get_inactive_dofset(): print(f"v restored (projected) {v.array}") for i, space in enumerate([V_u, V_alpha]): - bs = space.dofmap.index_map_bs size_local = space.dofmap.index_map.size_local @@ -151,19 +150,19 @@ def get_inactive_dofset(): x0_local = _cpp.la.petsc.get_local_vectors(x, maps) -print(f"this should scatter x0_local into the global vector v") +print("this should scatter x0_local into the global vector v") _cpp.la.petsc.scatter_local_vectors(v, x0_local, maps) v.ghostUpdate(addv=PETSc.InsertMode.ADD, mode=PETSc.ScatterMode.FORWARD) -print(f"v should now be zero") +print("v should now be zero") print(f"v restored (projected) {v.array}") _sub = v.getSubVector(_is) _sub.pointwiseMax(_sub, a) # _cpp.la.petsc.scatter_local_vectors(v, x0_local, maps) -print(f"v should now be harmless") +print("v should now be harmless") print(f"v restored {v.array}") @@ -187,7 +186,7 @@ def converged(x): # if not converged: # its += 1 - print("converged" if _converged else f" converging") + print("converged" if _converged else " converging") return _converged diff --git a/test/test_spa.py b/test/test_spa.py index ef812705..68bf9728 100644 --- a/test/test_spa.py +++ b/test/test_spa.py @@ -1,16 +1,18 @@ -import pickle import logging -from mpi4py import MPI -from dolfinx.io import XDMFFile +import os +import pickle +import sys + +import dolfinx +import irrevolutions.solvers.restriction as restriction import numpy as np import ufl -import dolfinx +from dolfinx.io import XDMFFile from irrevolutions.utils import _logger +from mpi4py import MPI from test_cone_project import _cone_project_restricted + from . import test_binarydataio as bio -import irrevolutions.solvers.restriction as restriction -import os -import sys sys.path.append("../") @@ -41,7 +43,6 @@ def load_minimal_constraints(filename, spaces): def test_spa(): - def iterate(x, xold, errors): """ Perform convergence check and handle exceptions (NonConvergenceException). @@ -80,7 +81,7 @@ def update_lambda_and_y(xk, Ar): xAx_r = xk.dot(_Axr) - _logger.debug(f"xk view in update at iteration") + _logger.debug("xk view in update at iteration") _lmbda_t = xAx_r / xk.dot(xk) _y.waxpy(-_lmbda_t, xk, _Axr) diff --git a/test/test_variational_iterator.py b/test/test_variational_iterator.py index eb0c7e48..c0721a64 100644 --- a/test/test_variational_iterator.py +++ b/test/test_variational_iterator.py @@ -1,16 +1,18 @@ -import numpy as np -import random import logging +import random + +import numpy as np # Set up logging configuration logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -# Implementing a custom time-stepping iterator, CustomSimpleIterator, to iterate over a list of loads parametrising the evolution of an incremental system, providing the flexibility to pause time when needed. -# The iterator maintains an index to track the current load, along with a boolean flag to indicate whether time should be paused. -# If the flag is set, the iterator returns the current load without incrementing the index; otherwise, it increments the index and returns the next load. +# Implementing a custom time-stepping iterator, CustomSimpleIterator, to iterate over a list of loads parametrising the evolution of an incremental system, providing the flexibility to pause time when needed. +# The iterator maintains an index to track the current load, along with a boolean flag to indicate whether time should be paused. +# If the flag is set, the iterator returns the current load without incrementing the index; otherwise, it increments the index and returns the next load. # This design allows for a simple implementation of an energetic variational statement for time-dependent processes that require performing variations at fixed load. Such computations involve equilibrium, bifurcation, and stability checks, offering a clear and efficient mechanism for following the evolution of states for systems which are time-parametrised. + class CustomSimpleIterator: def __init__(self, loads): self.i = 0 @@ -22,7 +24,7 @@ def __iter__(self): def __next__(self): logger.info(f"\n\nCalled next, can time be stopped? {self.stop_time}") - + if self.stop_time: self.stop_time = False index = self.i @@ -34,21 +36,24 @@ def __next__(self): index = self.i else: raise StopIteration - - return index + + return index def pause_time(self): self.stop_time = True logger.info(f"Called pause, stop_time is {self.stop_time}") + # Example functions def update_loads(t): return t + class EquilibriumSolver: def solve(self): return (random.uniform(-1, 1), random.uniform(0, 1)) + class StabilitySolver: def solve(self, y_t, t): if t == 0: @@ -56,9 +61,11 @@ def solve(self, y_t, t): else: return random.choice([True, False]) + def perturb_state(y_t): return y_t[0] + 0.1, y_t[1] + 0.1 + # Example usage loads = np.linspace(0, 10, 11) iterator = CustomSimpleIterator(loads) @@ -71,20 +78,20 @@ def perturb_state(y_t): # next increments the self index except StopIteration: break - + # Perform your time step with t update_loads(t) - + # Log current load logger.info(f"Current load: {t:.2f}") y_t = equilibrium.solve() stable = stability.solve(y_t, t) - + logger.info(f"Equilibrium state at load {t:.2f}") # logger.info(f"Equilibrium state at load {t:.2f}: {y_t}") logger.info(f"Stability of state at load {t:.2f}: {stable}") - + if not stable: iterator.pause_time() y_t = perturb_state(y_t)