Merge branch 'development' into 284-more-friendly-output-name-of-add_strain-or-a-better-presentation-in-documentation-webpage

This commit is contained in:
Yi Hu 2023-07-04 16:07:48 +02:00
commit 0e9e37bfb3
264 changed files with 4471 additions and 4103 deletions

8
.gitattributes vendored
View File

@ -12,12 +12,12 @@
*.pbz2 binary *.pbz2 binary
# ignore files from MSC.Marc in language statistics # ignore files from MSC.Marc in language statistics
install/MarcMentat/** linguist-vendored install/MarcMentat/** linguist-vendored
src/Marc/include/* linguist-vendored src/Marc/include/* linguist-vendored
install/MarcMentat/MSC_modifications.py linguist-vendored=false install/MarcMentat/MSC_modifications.py linguist-vendored=false
# ignore reference files for tests in language statistics # ignore reference files for tests in language statistics
python/tests/reference/** linguist-vendored python/tests/resources/** linguist-vendored
# ignore deprecated scripts # ignore deprecated scripts
processing/legacy/** linguist-vendored processing/legacy/** linguist-vendored

View File

@ -82,7 +82,7 @@ jobs:
- name: DAMASK - Run - name: DAMASK - Run
run: | run: |
./bin/DAMASK_grid -l tensionX.yaml -g 20grains16x16x16.vti -w examples/grid ./bin/DAMASK_grid -l tensionX.yaml -g 20grains16x16x16.vti -m material.yaml -w examples/grid
./bin/DAMASK_mesh -h ./bin/DAMASK_mesh -h
@ -189,5 +189,5 @@ jobs:
- name: DAMASK - Run - name: DAMASK - Run
run: | run: |
./bin/DAMASK_grid -l tensionX.yaml -g 20grains16x16x16.vti -w examples/grid ./bin/DAMASK_grid -l tensionX.yaml -g 20grains16x16x16.vti -m material.yaml -w examples/grid
./bin/DAMASK_mesh -h ./bin/DAMASK_mesh -h

View File

@ -75,7 +75,7 @@ jobs:
run: > run: >
sudo apt-get update && sudo apt-get update &&
sudo apt-get remove mysql* && sudo apt-get remove mysql* &&
sudo apt-get install python3-pandas python3-scipy python3-h5py python3-vtk7 python3-matplotlib python3-yaml -y sudo apt-get install python3-pandas python3-scipy python3-h5py python3-vtk9 python3-matplotlib python3-yaml -y
- name: Run unit tests - name: Run unit tests
run: | run: |

View File

@ -11,7 +11,7 @@ endif()
project(Prerequisites LANGUAGES) project(Prerequisites LANGUAGES)
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}") set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}")
pkg_check_modules(PETSC_MIN REQUIRED PETSc>=3.12.0 QUIET) #CMake does not support version range pkg_check_modules(PETSC_MIN REQUIRED PETSc>=3.12.0 QUIET) #CMake does not support version range
pkg_check_modules(PETSC REQUIRED PETSc<3.19.0) pkg_check_modules(PETSC REQUIRED PETSc<3.20.0)
pkg_get_variable(CMAKE_Fortran_COMPILER PETSc fcompiler) pkg_get_variable(CMAKE_Fortran_COMPILER PETSc fcompiler)
pkg_get_variable(CMAKE_C_COMPILER PETSc ccompiler) pkg_get_variable(CMAKE_C_COMPILER PETSc ccompiler)

@ -1 +1 @@
Subproject commit 8fbc1dd8a26bf359b72bc076dac8ea3edef3be6d Subproject commit 9f4ffce8b2df951191a14dc3229de1aee6e544e6

View File

@ -1 +1 @@
3.0.0-alpha7-488-gf6c761980 3.0.0-alpha7-604-g65fa62b3f

View File

@ -3,7 +3,7 @@ type: Hooke
references: references:
- D.J. Dever, - D.J. Dever,
Journal of Applied Physics 43(8):3293-3301, 1972, Journal of Applied Physics 43(8):3293-3301, 1972,
https://doi.org/10.1063/1.1661710 https://doi.org/10.1063/1.1661710,
fit to Tab. II (T_min=25ºC, T_max=880ºC) fit to Tab. II (T_min=25ºC, T_max=880ºC)
C_11: 232.2e+9 C_11: 232.2e+9

View File

@ -3,8 +3,9 @@ type: Hooke
references: references:
- S.A. Kim and W.L. Johnson, - S.A. Kim and W.L. Johnson,
Materials Science & Engineering A 452-453:633-639, 2007, Materials Science & Engineering A 452-453:633-639, 2007,
https://doi.org/10.1016/j.msea.2006.11.147 https://doi.org/10.1016/j.msea.2006.11.147,
Tab. 1 (averaged for bcc)
C_11: 268.1e+9 C_11: 267.9e+9
C_12: 111.2e+9 C_12: 110.8e+9
C_44: 79.06e+9 C_44: 78.9e+9

View File

@ -3,7 +3,7 @@ type: dislotungsten
references: references:
- D. Cereceda et al., - D. Cereceda et al.,
International Journal of Plasticity 78:242-265, 2016, International Journal of Plasticity 78:242-265, 2016,
http://dx.doi.org/10.1016/j.ijplas.2015.09.002 https://doi.org/10.1016/j.ijplas.2015.09.002
- R. Gröger et al., - R. Gröger et al.,
Acta Materialia 56(19):5412-5425, 2008, Acta Materialia 56(19):5412-5425, 2008,
https://doi.org/10.1016/j.actamat.2008.07.037 https://doi.org/10.1016/j.actamat.2008.07.037

View File

@ -12,6 +12,7 @@ output: [rho_dip, rho_mob]
N_sl: [12, 12] N_sl: [12, 12]
f_edge: [1.0, 1.0]
b_sl: [2.49e-10, 2.49e-10] b_sl: [2.49e-10, 2.49e-10]
rho_mob_0: [2.81e+12, 2.8e+12] rho_mob_0: [2.81e+12, 2.8e+12]
rho_dip_0: [1.0, 1.0] # not given rho_dip_0: [1.0, 1.0] # not given

View File

@ -15,6 +15,8 @@ output: [rho_mob, rho_dip, gamma_sl, Lambda_sl, tau_pass, f_tw, Lambda_tw, f_tr]
# Glide # Glide
N_sl: [12] N_sl: [12]
f_edge: [1.0]
b_sl: [2.56e-10] # a/sqrt(2) b_sl: [2.56e-10] # a/sqrt(2)
Q_sl: [3.5e-19] Q_sl: [3.5e-19]
p_sl: [0.325] p_sl: [0.325]

View File

@ -0,0 +1,23 @@
type: kinehardening
references:
- J.A. Wollmershauser et al.,
International Journal of Fatigue 36(1):181-193, 2012,
https://doi.org/10.1016/j.ijfatigue.2011.07.008
output: [xi, chi, chi_flip, gamma_flip, gamma, sgn(gamma)]
N_sl: [12]
xi_0: [0.070e+9] # τ_0,for
xi_inf: [0.015e+9] # τ_1,for
h_0_xi: [0.065e+9] # θ_0,for
h_inf_xi: [0.045e+9] # θ_1,for
chi_inf: [0.027e+9] # τ_1,bs
h_0_chi: [55e+9] # θ_0,bs
h_inf_chi: [1.3e+9] # θ_1,bs
n: 20 # not mentioned in the reference
dot_gamma_0: 1e-4 # not mentioned in the reference
h_sl-sl: [1, 1, 1, 1, 1, 1, 1]

View File

@ -25,5 +25,6 @@ from ._colormap import Colormap # noqa
from ._vtk import VTK # noqa from ._vtk import VTK # noqa
from ._config import Config # noqa from ._config import Config # noqa
from ._configmaterial import ConfigMaterial # noqa from ._configmaterial import ConfigMaterial # noqa
from ._loadcasegrid import LoadcaseGrid # noqa
from ._grid import Grid # noqa from ._grid import Grid # noqa
from ._result import Result # noqa from ._result import Result # noqa

View File

@ -317,6 +317,11 @@ class Grid:
loaded : damask.Grid loaded : damask.Grid
Grid-based geometry from file. Grid-based geometry from file.
Notes
-----
Material indices in Neper usually start at 1 unless
a buffer material with index 0 is added.
Examples Examples
-------- --------
Read a periodic polycrystal generated with Neper. Read a periodic polycrystal generated with Neper.
@ -325,7 +330,7 @@ class Grid:
>>> N_grains = 20 >>> N_grains = 20
>>> cells = (32,32,32) >>> cells = (32,32,32)
>>> damask.util.run(f'neper -T -n {N_grains} -tesrsize {cells[0]}:{cells[1]}:{cells[2]} -periodicity all -format vtk') >>> damask.util.run(f'neper -T -n {N_grains} -tesrsize {cells[0]}:{cells[1]}:{cells[2]} -periodicity all -format vtk')
>>> damask.Grid.load_Neper(f'n{N_grains}-id1.vtk') >>> damask.Grid.load_Neper(f'n{N_grains}-id1.vtk').renumber()
cells: 32 × 32 × 32 cells: 32 × 32 × 32
size: 1.0 × 1.0 × 1.0 size: 1.0 × 1.0 × 1.0
origin: 0.0 0.0 0.0 m origin: 0.0 0.0 0.0 m
@ -336,7 +341,7 @@ class Grid:
cells = np.array(v.vtk_data.GetDimensions())-1 cells = np.array(v.vtk_data.GetDimensions())-1
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
return Grid(material = v.get('MaterialId').reshape(cells,order='F').astype('int32',casting='unsafe') - 1, return Grid(material = v.get('MaterialId').reshape(cells,order='F').astype('int32',casting='unsafe'),
size = bbox[1] - bbox[0], size = bbox[1] - bbox[0],
origin = bbox[0], origin = bbox[0],
comments = util.execution_stamp('Grid','load_Neper'), comments = util.execution_stamp('Grid','load_Neper'),
@ -1044,7 +1049,7 @@ class Grid:
Parameters Parameters
---------- ----------
idx : numpy.ndarray of int, shape (:,:,:) or (:,:,:,3) idx : numpy.ndarray of int, shape (:,:,:) or (:,:,:,3)
Grid of flat indices or coordinate indices. Grid of flat indices or coordinate indices.
Returns Returns
------- -------

View File

@ -0,0 +1,78 @@
from typing import Optional, Union, Dict, Any, List
from numpy import ma
import yaml
from ._typehints import FileHandle
from ._config import NiceDumper
from . import util
from . import Config
class MaskedMatrixDumper(NiceDumper):
"""Format masked matrices."""
def represent_data(self, data: Any):
return super().represent_data(data.astype(object).filled('x') if isinstance(data, ma.core.MaskedArray) else data) # type: ignore[attr-defined]
class LoadcaseGrid(Config):
"""Load case for grid solver."""
def __init__(self,
config: Optional[Union[str,Dict[str,Any]]] = None,
*,
solver: Optional[Dict[str,str]] = None,
loadstep: Optional[List[Dict[str,Any]]] = None):
"""
New grid solver load case.
Parameters
----------
config : dict or str, optional
Grid solver load case. String needs to be valid YAML.
solver : dict, optional
Solver configuration.
Defaults to an empty dict if 'config' is not given.
loadstep : list of dict, optional
Load step configuration.
Defaults to an empty list if 'config' is not given.
"""
kwargs: Dict[str,Union[Dict[str,str],List[Dict[str,Any]]]] = {}
default: Union[List,Dict]
for arg,value,default in [('solver',solver,{}),('loadstep',loadstep,[])]: # type: ignore[assignment]
if value is not None:
kwargs[arg] = value
elif config is None:
kwargs[arg] = default
super().__init__(config,**kwargs)
def save(self,
fname: FileHandle,
**kwargs):
"""
Save to YAML file.
Parameters
----------
fname : file, str, or pathlib.Path
Filename or file to write.
**kwargs : dict
Keyword arguments parsed to yaml.dump.
"""
for key,default in dict(width=256,
default_flow_style=None,
sort_keys=False).items():
if key not in kwargs:
kwargs[key] = default
fhandle = util.open_text(fname,'w')
try:
fhandle.write(yaml.dump(self,Dumper=MaskedMatrixDumper,**kwargs))
except TypeError: # compatibility with old pyyaml
del kwargs['sort_keys']
fhandle.write(yaml.dump(self,Dumper=MaskedMatrixDumper,**kwargs))

View File

@ -15,7 +15,7 @@ from typing import Optional, Union, Callable, Any, Sequence, Literal, Dict, List
import h5py import h5py
import numpy as np import numpy as np
import numpy.ma as ma from numpy import ma
import damask import damask
from . import VTK from . import VTK
@ -106,10 +106,8 @@ class Result:
self.version_major = f.attrs['DADF5_version_major'] self.version_major = f.attrs['DADF5_version_major']
self.version_minor = f.attrs['DADF5_version_minor'] self.version_minor = f.attrs['DADF5_version_minor']
if (self.version_major != 0 or not 12 <= self.version_minor <= 14) and self.version_major != 1: if (self.version_major != 0 or not 14 <= self.version_minor <= 14) and self.version_major != 1:
raise TypeError(f'unsupported DADF5 version "{self.version_major}.{self.version_minor}"') raise TypeError(f'unsupported DADF5 version "{self.version_major}.{self.version_minor}"')
if self.version_major == 0 and self.version_minor < 14:
self.export_simulation_setup = None # type: ignore
self.structured = 'cells' in f['geometry'].attrs.keys() self.structured = 'cells' in f['geometry'].attrs.keys()
@ -1940,7 +1938,8 @@ class Result:
def export_DADF5(self, def export_DADF5(self,
fname, fname,
output: Union[str, List[str]] = '*'): output: Union[str, List[str]] = '*',
mapping = None):
""" """
Export visible components into a new DADF5 file. Export visible components into a new DADF5 file.
@ -1954,20 +1953,61 @@ class Result:
output : (list of) str, optional output : (list of) str, optional
Names of the datasets to export. Names of the datasets to export.
Defaults to '*', in which case all visible datasets are exported. Defaults to '*', in which case all visible datasets are exported.
mapping : numpy.ndarray of int, shape (:,:,:), optional
Indices for regridding.
""" """
if Path(fname).expanduser().absolute() == self.fname: if Path(fname).expanduser().absolute() == self.fname:
raise PermissionError(f'cannot overwrite {self.fname}') raise PermissionError(f'cannot overwrite {self.fname}')
def cp(path_in,path_out,label,mapping):
if mapping is None:
path_in.copy(label,path_out)
else:
path_out.create_dataset(label,data=path_in[label][()][mapping])
path_out[label].attrs.update(path_in[label].attrs)
with h5py.File(self.fname,'r') as f_in, h5py.File(fname,'w') as f_out: with h5py.File(self.fname,'r') as f_in, h5py.File(fname,'w') as f_out:
for k,v in f_in.attrs.items(): f_out.attrs.update(f_in.attrs)
f_out.attrs.create(k,v) for g in ['setup','geometry'] + (['cell_to'] if mapping is None else []):
for g in ['setup','geometry','cell_to']:
f_in.copy(g,f_out) f_in.copy(g,f_out)
if mapping is not None:
cells = mapping.shape
mapping_flat = mapping.flatten(order='F')
f_out['geometry'].attrs['cells'] = cells
f_out.create_group('cell_to') # ToDo: attribute missing
mappings = {'phase':{},'homogenization':{}} # type: ignore
mapping_phase = f_in['cell_to']['phase'][()][mapping_flat]
for p in np.unique(mapping_phase['label']):
m = mapping_phase['label'] == p
mappings['phase'][p] = mapping_phase[m]['entry']
c = np.count_nonzero(m)
mapping_phase[m] = list(zip((p,)*c,tuple(np.arange(c))))
f_out['cell_to'].create_dataset('phase',data=mapping_phase.reshape(np.prod(mapping_flat.shape),-1))
mapping_homog = f_in['cell_to']['homogenization'][()][mapping]
for h in np.unique(mapping_homog['label']):
m = mapping_homog['label'] == h
mappings['homogenization'][h] = mapping_homog[m]['entry']
c = np.count_nonzero(m)
mapping_homog[mapping_homog['label'] == h] = list(zip((h,)*c,tuple(np.arange(c))))
f_out['cell_to'].create_dataset('homogenization',data=mapping_homog.flatten())
for inc in util.show_progress(self.visible['increments']): for inc in util.show_progress(self.visible['increments']):
f_in.copy(inc,f_out,shallow=True) f_in.copy(inc,f_out,shallow=True)
for out in _match(output,f_in['/'.join([inc,'geometry'])].keys()): if mapping is None:
f_in[inc]['geometry'].copy(out,f_out[inc]['geometry']) for label in ['u_p','u_n']:
f_in[inc]['geometry'].copy(label,f_out[inc]['geometry'])
else:
u_p = f_in[inc]['geometry']['u_p'][()][mapping_flat]
f_out[inc]['geometry'].create_dataset('u_p',data=u_p)
u_n = np.zeros((len(mapping_flat),3)) # ToDo: needs implementation
f_out[inc]['geometry'].create_dataset('u_n',data=u_n)
for label in self.homogenizations: for label in self.homogenizations:
f_in[inc]['homogenization'].copy(label,f_out[inc]['homogenization'],shallow=True) f_in[inc]['homogenization'].copy(label,f_out[inc]['homogenization'],shallow=True)
@ -1979,7 +2019,7 @@ class Result:
for field in _match(self.visible['fields'],f_in['/'.join([inc,ty,label])].keys()): for field in _match(self.visible['fields'],f_in['/'.join([inc,ty,label])].keys()):
p = '/'.join([inc,ty,label,field]) p = '/'.join([inc,ty,label,field])
for out in _match(output,f_in[p].keys()): for out in _match(output,f_in[p].keys()):
f_in[p].copy(out,f_out[p]) cp(f_in[p],f_out[p],out,None if mapping is None else mappings[ty][label.encode()])
def export_simulation_setup(self, def export_simulation_setup(self,

View File

@ -603,4 +603,7 @@ class Table:
f = util.open_text(fname,'w') f = util.open_text(fname,'w')
f.write('\n'.join([f'# {c}' for c in self.comments] + [' '.join(labels)])+('\n' if labels else '')) f.write('\n'.join([f'# {c}' for c in self.comments] + [' '.join(labels)])+('\n' if labels else ''))
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,line_terminator='\n') try: # backward compatibility
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,lineterminator='\n')
except TypeError:
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False,line_terminator='\n')

View File

@ -186,8 +186,6 @@ def displacement_fluct_point(size: _FloatSequence,
Fluctuating part of the cell center displacements. Fluctuating part of the cell center displacements.
""" """
integrator = 0.5j*_np.array(size,float)/_np.pi
k_s = _ks(size,F.shape[:3],False) k_s = _ks(size,F.shape[:3],False)
k_s_squared = _np.einsum('...l,...l',k_s,k_s) k_s_squared = _np.einsum('...l,...l',k_s,k_s)
k_s_squared[0,0,0] = 1.0 k_s_squared[0,0,0] = 1.0
@ -195,8 +193,8 @@ def displacement_fluct_point(size: _FloatSequence,
displacement = -_np.einsum('ijkml,ijkl,l->ijkm', displacement = -_np.einsum('ijkml,ijkl,l->ijkm',
_np.fft.rfftn(F,axes=(0,1,2)), _np.fft.rfftn(F,axes=(0,1,2)),
k_s, k_s,
integrator, _np.array([0.5j/_np.pi]*3),
) / k_s_squared[...,_np.newaxis] ) / k_s_squared[...,_np.newaxis]
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3]) return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])

View File

@ -60,9 +60,9 @@ def update(request):
@pytest.fixture @pytest.fixture
def ref_path_base(): def res_path_base():
"""Directory containing reference results.""" """Directory containing testing resources."""
return Path(__file__).parent/'reference' return Path(__file__).parent/'resources'
@pytest.fixture @pytest.fixture

View File

Before

Width:  |  Height:  |  Size: 147 B

After

Width:  |  Height:  |  Size: 147 B

View File

Before

Width:  |  Height:  |  Size: 138 B

After

Width:  |  Height:  |  Size: 138 B

View File

@ -0,0 +1 @@
n10-id1_scaled.vtk binary

Some files were not shown because too many files have changed in this diff Show More