Merge branch 'result-export-DADF5' into 'development'

export to new DADF5 file

See merge request damask/DAMASK!644
This commit is contained in:
Philip Eisenlohr 2022-11-08 19:01:48 +00:00
commit b5b861afdd
2 changed files with 91 additions and 21 deletions

View File

@ -283,16 +283,16 @@ class Result:
def times_in_range(self,
start: float = None,
end: float = None) -> Sequence[int]:
end: float = None) -> Sequence[float]:
"""
Get all increments within a given time range.
Get times of all increments within a given time range.
Parameters
----------
start : float, optional
Time of start increment. Defaults to first.
Time of start increment. Defaults to time of first.
end : float, optional
Time of end increment. Defaults to last.
Time of end increment. Defaults to time of last.
Returns
-------
@ -817,7 +817,7 @@ class Result:
----------
T_sym : str
Name of symmetric tensor dataset.
eigenvalue : {'max', 'mid', 'min'}
eigenvalue : {'max', 'mid', 'min'}, optional
Eigenvalue. Defaults to 'max'.
Examples
@ -863,7 +863,7 @@ class Result:
----------
T_sym : str
Name of symmetric tensor dataset.
eigenvalue : {'max', 'mid', 'min'}
eigenvalue : {'max', 'mid', 'min'}, optional
Eigenvalue to which the eigenvector corresponds.
Defaults to 'max'.
@ -897,7 +897,7 @@ class Result:
----------
l : numpy.array of shape (3)
Lab frame direction for inverse pole figure.
q : str
q : str, optional
Name of the dataset containing the crystallographic orientation as quaternions.
Defaults to 'O'.
@ -1104,7 +1104,7 @@ class Result:
Parameters
----------
q : str
q : str, optional
Name of the dataset containing the crystallographic orientation as quaternions.
Defaults to 'O'.
uvw|hkl : numpy.ndarray of shape (3)
@ -1528,7 +1528,7 @@ class Result:
Parameters
----------
output : (list of) str
output : (list of) str, optional
Names of the datasets included in the XDMF file.
Defaults to '*', in which case all datasets are considered.
target_dir : str or pathlib.Path, optional
@ -1663,7 +1663,7 @@ class Result:
def export_VTK(self,
output: Union[str,list] = '*',
output: Union[str,List[str]] = '*',
mode: str = 'cell',
constituents: IntSequence = None,
target_dir: Union[str, Path] = None,
@ -1683,7 +1683,7 @@ class Result:
output : (list of) str, optional
Names of the datasets to export to the VTK file.
Defaults to '*', in which case all datasets are exported.
mode : {'cell', 'point'}
mode : {'cell', 'point'}, optional
Export in cell format or point format.
Defaults to 'cell'.
constituents : (list of) int, optional
@ -1691,13 +1691,13 @@ class Result:
Defaults to None, in which case all constituents are considered.
target_dir : str or pathlib.Path, optional
Directory to save VTK files. Will be created if non-existent.
fill_float : float
fill_float : float, optional
Fill value for non-existent entries of floating point type.
Defaults to NaN.
fill_int : int
fill_int : int, optional
Fill value for non-existent entries of integer type.
Defaults to 0.
parallel : bool
parallel : bool, optional
Write VTK files in parallel in a separate background process.
Defaults to True.
@ -1775,14 +1775,14 @@ class Result:
Parameters
----------
output : (list of) str
output : (list of) str, optional
Names of the datasets to read.
Defaults to '*', in which case all datasets are read.
flatten : bool
flatten : bool, optional
Remove singular levels of the folder hierarchy.
This might be beneficial in case of single increment,
phase/homogenization, or field. Defaults to True.
prune : bool
prune : bool, optional
Remove branches with no data. Defaults to True.
Returns
@ -1814,6 +1814,50 @@ class Result:
return None if (type(r) == dict and r == {}) else r
def export_DADF5(self,
fname,
output: Union[str, List[str]] = '*'):
"""
Export visible components into a new DADF5 file.
A DADF5 (DAMASK HDF5) file contains DAMASK results.
Its group/folder structure reflects the layout in material.yaml.
Parameters
----------
fname : str or pathlib.Path
Name of the DADF5 file to be created.
output : (list of) str, optional
Names of the datasets to export.
Defaults to '*', in which case all visible datasets are exported.
"""
if Path(fname).expanduser().absolute() == self.fname:
raise PermissionError(f'cannot overwrite {self.fname}')
with h5py.File(self.fname,'r') as f_in, h5py.File(fname,'w') as f_out:
for k,v in f_in.attrs.items():
f_out.attrs.create(k,v)
for g in ['setup','geometry','cell_to']:
f_in.copy(g,f_out)
for inc in util.show_progress(self.visible['increments']):
f_in.copy(inc,f_out,shallow=True)
for out in _match(output,f_in['/'.join([inc,'geometry'])].keys()):
f_in[inc]['geometry'].copy(out,f_out[inc]['geometry'])
for label in self.homogenizations:
f_in[inc]['homogenization'].copy(label,f_out[inc]['homogenization'],shallow=True)
for label in self.phases:
f_in[inc]['phase'].copy(label,f_out[inc]['phase'],shallow=True)
for ty in ['phase','homogenization']:
for label in self.visible[ty+'s']:
for field in _match(self.visible['fields'],f_in['/'.join([inc,ty,label])].keys()):
p = '/'.join([inc,ty,label,field])
for out in _match(output,f_in[p].keys()):
f_in[p].copy(out,f_out[p])
def place(self,
output: Union[str, List[str]] = '*',
flatten: bool = True,
@ -1836,19 +1880,19 @@ class Result:
output : (list of) str, optional
Names of the datasets to read.
Defaults to '*', in which case all datasets are placed.
flatten : bool
flatten : bool, optional
Remove singular levels of the folder hierarchy.
This might be beneficial in case of single increment or field.
Defaults to True.
prune : bool
prune : bool, optional
Remove branches with no data. Defaults to True.
constituents : (list of) int, optional
Constituents to consider.
Defaults to None, in which case all constituents are considered.
fill_float : float
fill_float : float, optional
Fill value for non-existent entries of floating point type.
Defaults to NaN.
fill_int : int
fill_int : int, optional
Fill value for non-existent entries of integer type.
Defaults to 0.

View File

@ -5,10 +5,13 @@ import shutil
import os
import sys
import hashlib
import fnmatch
import random
from datetime import datetime
import pytest
import vtk
import h5py
import numpy as np
from damask import Result
@ -542,6 +545,11 @@ class TestResult:
@pytest.mark.parametrize('overwrite',[True,False])
def test_export_setup(self,ref_path,tmp_path,fname,output,overwrite):
r = Result(ref_path/fname)
r.export_setup(output,target_dir=tmp_path)
with h5py.File(ref_path/fname,'r') as f_hdf5:
for file in fnmatch.filter(f_hdf5['setup'].keys(),output):
with open(tmp_path/file) as f:
assert f_hdf5[f'setup/{file}'][()][0].decode() == f.read()
r.export_setup(output,target_dir=tmp_path,overwrite=overwrite)
def test_export_setup_custom_path(self,ref_path,tmp_path):
@ -555,3 +563,21 @@ class TestResult:
os.chdir(cwd)
r.export_setup('material.yaml',target_dir=t)
assert 'material.yaml' in os.listdir(absdir); (absdir/'material.yaml').unlink()
@pytest.mark.parametrize('fname',['4grains2x4x3_compressionY.hdf5',
'6grains6x7x8_single_phase_tensionY.hdf5'])
def test_export_DADF5(self,ref_path,tmp_path,fname):
r = Result(ref_path/fname)
r = r.view(phases = random.sample(r.phases,1))
r = r.view(increments = random.sample(r.increments,np.random.randint(2,len(r.increments))))
r.export_DADF5(tmp_path/fname)
r_exp = Result(tmp_path/fname)
assert str(r.get()) == str(r_exp.get())
assert str(r.place()) == str(r_exp.place())
@pytest.mark.parametrize('fname',['4grains2x4x3_compressionY.hdf5',
'6grains6x7x8_single_phase_tensionY.hdf5'])
def test_export_DADF5_name_clash(self,ref_path,tmp_path,fname):
r = Result(ref_path/fname)
with pytest.raises(PermissionError):
r.export_DADF5(r.fname)