added custom path export option to Result.export_* functions

This commit is contained in:
Daniel Otto de Mentock 2022-11-06 18:10:23 +00:00 committed by Philip Eisenlohr
parent 92f6d7e68d
commit 2c3da9c1bf
3 changed files with 668 additions and 593 deletions

View File

@ -276,7 +276,7 @@ class Result:
Increment number of all increments within the given bounds.
"""
s,e = map(lambda x: int(x[10:] if isinstance(x,str) and x.startswith(prefix_inc) else x),
s,e = map(lambda x: int(x.split(prefix_inc)[-1] if isinstance(x,str) and x.startswith(prefix_inc) else x),
(self.incs[ 0] if start is None else start,
self.incs[-1] if end is None else end))
return [i for i in self.incs if s <= i <= e]
@ -1516,7 +1516,9 @@ class Result:
def export_XDMF(self,
output: Union[str, List[str]] = '*'):
output: Union[str, List[str]] = '*',
target_dir: Union[str, Path] = None,
absolute_path: bool = False):
"""
Write XDMF file to directly visualize data from DADF5 file.
@ -1529,12 +1531,17 @@ class Result:
output : (list of) str
Names of the datasets included in the XDMF file.
Defaults to '*', in which case all datasets are considered.
target_dir : str or pathlib.Path, optional
Directory to save XDMF file. Will be created if non-existent.
absolute_path : bool, optional
Store absolute (instead of relative) path to DADF5 file.
Defaults to False, i.e. the XDMF file expects the
DADF5 file at a stable relative path.
"""
if self.N_constituents != 1 or len(self.phases) != 1 or not self.structured:
raise TypeError('XDMF output requires structured grid with single phase and single constituent.')
attribute_type_map = defaultdict(lambda:'Matrix', ( ((),'Scalar'), ((3,),'Vector'), ((3,3),'Tensor')) )
def number_type_map(dtype):
@ -1544,29 +1551,34 @@ class Result:
xdmf = ET.Element('Xdmf')
xdmf.attrib={'Version': '2.0',
'xmlns:xi': 'http://www.w3.org/2001/XInclude'}
xdmf.attrib = {'Version': '2.0',
'xmlns:xi': 'http://www.w3.org/2001/XInclude'}
domain = ET.SubElement(xdmf, 'Domain')
collection = ET.SubElement(domain, 'Grid')
collection.attrib={'GridType': 'Collection',
'CollectionType': 'Temporal',
'Name': 'Increments'}
collection.attrib = {'GridType': 'Collection',
'CollectionType': 'Temporal',
'Name': 'Increments'}
time = ET.SubElement(collection, 'Time')
time.attrib={'TimeType': 'List'}
time.attrib = {'TimeType': 'List'}
time_data = ET.SubElement(time, 'DataItem')
times = [self.times[self.increments.index(i)] for i in self.visible['increments']]
time_data.attrib={'Format': 'XML',
'NumberType': 'Float',
'Dimensions': f'{len(times)}'}
time_data.attrib = {'Format': 'XML',
'NumberType': 'Float',
'Dimensions': f'{len(times)}'}
time_data.text = ' '.join(map(str,times))
attributes = []
data_items = []
hdf5_name = self.fname.name
hdf5_dir = self.fname.parent
xdmf_dir = Path.cwd() if target_dir is None else Path(target_dir)
hdf5_link = (hdf5_dir if absolute_path else Path(os.path.relpath(hdf5_dir,xdmf_dir.resolve())))/hdf5_name
with h5py.File(self.fname,'r') as f:
for inc in self.visible['increments']:
@ -1601,8 +1613,7 @@ class Result:
data_items[-1].attrib = {'Format': 'HDF',
'Precision': '8',
'Dimensions': '{} {} {} 3'.format(*(self.cells[::-1]+1))}
data_items[-1].text = f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
data_items[-1].text = f'{hdf5_link}:/{inc}/geometry/u_n'
for ty in ['phase','homogenization']:
for label in self.visible[ty+'s']:
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
@ -1624,9 +1635,10 @@ class Result:
'Precision': f'{dtype.itemsize}',
'Dimensions': '{} {} {} {}'.format(*self.cells[::-1],1 if shape == () else
np.prod(shape))}
data_items[-1].text = f'{os.path.split(self.fname)[1]}:{name}'
data_items[-1].text = f'{hdf5_link}:{name}'
with util.open_text(self.fname.with_suffix('.xdmf').name,'w') as f:
xdmf_dir.mkdir(parents=True,exist_ok=True)
with util.open_text((xdmf_dir/hdf5_name).with_suffix('.xdmf'),'w') as f:
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
@ -1654,6 +1666,7 @@ class Result:
output: Union[str,list] = '*',
mode: str = 'cell',
constituents: IntSequence = None,
target_dir: Union[str, Path] = None,
fill_float: float = np.nan,
fill_int: int = 0,
parallel: bool = True):
@ -1676,6 +1689,8 @@ class Result:
constituents : (list of) int, optional
Constituents to consider.
Defaults to None, in which case all constituents are considered.
target_dir : str or pathlib.Path, optional
Directory to save VTK files. Will be created if non-existent.
fill_float : float
Fill value for non-existent entries of floating point type.
Defaults to NaN.
@ -1706,6 +1721,9 @@ class Result:
at_cell_ph,in_data_ph,at_cell_ho,in_data_ho = self._mappings()
vtk_dir = Path.cwd() if target_dir is None else Path(target_dir)
vtk_dir.mkdir(parents=True,exist_ok=True)
with h5py.File(self.fname,'r') as f:
if self.version_minor >= 13:
creator = f.attrs['creator'] if h5py3 else f.attrs['creator'].decode()
@ -1744,8 +1762,9 @@ class Result:
for label,dataset in outs.items():
v = v.set(' / '.join(['/'.join([ty,field,label]),dataset.dtype.metadata['unit']]),dataset)
v.save(f'{self.fname.stem}_inc{inc[10:].zfill(N_digits)}',parallel=parallel)
v.save(vtk_dir/f'{self.fname.stem}_inc{inc.split(prefix_inc)[-1].zfill(N_digits)}',
parallel=parallel)
def get(self,
output: Union[str, List[str]] = '*',
@ -1890,7 +1909,9 @@ class Result:
def export_setup(self,
output: Union[str, List[str]] = '*',
overwrite: bool = False):
target_dir: Union[str, Path] = None,
overwrite: bool = False,
):
"""
Export configuration files.
@ -1899,21 +1920,35 @@ class Result:
output : (list of) str, optional
Names of the datasets to export to the file.
Defaults to '*', in which case all datasets are exported.
target_dir : str or pathlib.Path, optional
Directory to save configuration files. Will be created if non-existent.
overwrite : bool, optional
Overwrite existing configuration files.
Defaults to False.
"""
def export(name: str, obj: Union[h5py.Dataset,h5py.Group], output: Union[str,List[str]], overwrite: bool):
if type(obj) == h5py.Dataset and _match(output,[name]):
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
if not Path(name).exists() or overwrite:
with util.open_text(name,'w') as f_out: f_out.write(obj[0].decode())
print(f'Exported {d} to "{name}".')
else:
print(f'"{name}" exists, {d} not exported.')
elif type(obj) == h5py.Group:
os.makedirs(name, exist_ok=True)
def export(name: str,
obj: Union[h5py.Dataset,h5py.Group],
output: Union[str,List[str]],
cfg_dir: Path,
overwrite: bool):
cfg = cfg_dir/name
if type(obj) == h5py.Dataset and _match(output,[name]):
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
if overwrite or not cfg.exists():
with util.open_text(cfg,'w') as f_out: f_out.write(obj[0].decode())
print(f'Exported {d} to "{cfg}".')
else:
print(f'"{cfg}" exists, {d} not exported.')
elif type(obj) == h5py.Group:
cfg.mkdir(parents=True,exist_ok=True)
cfg_dir = (Path.cwd() if target_dir is None else Path(target_dir))
cfg_dir.mkdir(parents=True,exist_ok=True)
with h5py.File(self.fname,'r') as f_in:
f_in['setup'].visititems(partial(export,output=output,overwrite=overwrite))
f_in['setup'].visititems(partial(export,
output=output,
cfg_dir=cfg_dir,
overwrite=overwrite))

File diff suppressed because it is too large Load Diff

View File

@ -323,12 +323,9 @@ class TestResult:
created_first = last.place('sigma').dtype.metadata['created']
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
last = last.view(protected=False)
else:
last = last.view(protected=True)
last = last.view(protected=overwrite != 'on')
time.sleep(2.)
time.sleep(2)
try:
last.add_calculation('#sigma#*0.0+311.','sigma','not the Cauchy stress')
except ValueError:
@ -380,13 +377,12 @@ class TestResult:
@pytest.mark.xfail(int(vtk.vtkVersion.GetVTKVersion().split('.')[0])<9, reason='missing "Direction" attribute')
def test_vtk(self,request,tmp_path,ref_path,update,patch_execution_stamp,patch_datetime_now,output,fname,inc):
result = Result(ref_path/fname).view(increments=inc)
os.chdir(tmp_path)
result.export_VTK(output,parallel=False)
result.export_VTK(output,target_dir=tmp_path,parallel=False)
fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vti'
v = VTK.load(tmp_path/fname)
v.comments = 'n/a'
v.save(tmp_path/fname,parallel=False)
with open(fname) as f:
with open(tmp_path/fname) as f:
cur = hashlib.md5(f.read().encode()).hexdigest()
if update:
with open((ref_path/'export_VTK'/request.node.name).with_suffix('.md5'),'w') as f:
@ -416,34 +412,34 @@ class TestResult:
with pytest.raises(ValueError):
single_phase.export_VTK(mode='invalid')
def test_vtk_custom_path(self,tmp_path,single_phase):
export_dir = tmp_path/'export_dir'
single_phase.export_VTK(mode='point',target_dir=export_dir,parallel=False)
assert set(os.listdir(export_dir))==set([f'{single_phase.fname.stem}_inc{i:02}.vtp' for i in range(0,40+1,4)])
def test_XDMF_datatypes(self,tmp_path,single_phase,update,ref_path):
for shape in [('scalar',()),('vector',(3,)),('tensor',(3,3)),('matrix',(12,))]:
for what,shape in {'scalar':(),'vector':(3,),'tensor':(3,3),'matrix':(12,)}.items():
for dtype in ['f4','f8','i1','i2','i4','i8','u1','u2','u4','u8']:
single_phase.add_calculation(f"np.ones(np.shape(#F#)[0:1]+{shape[1]},'{dtype}')",f'{shape[0]}_{dtype}')
fname = os.path.splitext(os.path.basename(single_phase.fname))[0]+'.xdmf'
os.chdir(tmp_path)
single_phase.export_XDMF()
single_phase.add_calculation(f"np.ones(np.shape(#F#)[0:1]+{shape},'{dtype}')",f'{what}_{dtype}')
xdmf_path = tmp_path/single_phase.fname.with_suffix('.xdmf').name
single_phase.export_XDMF(target_dir=tmp_path)
if update:
shutil.copy(tmp_path/fname,ref_path/fname)
assert sorted(open(tmp_path/fname).read()) == sorted(open(ref_path/fname).read()) # XML is not ordered
shutil.copy(xdmf_path,ref_path/xdmf_path.name)
assert sorted(open(xdmf_path).read()) == sorted(open(ref_path/xdmf_path.name).read())
@pytest.mark.skipif(not (hasattr(vtk,'vtkXdmfReader') and hasattr(vtk.vtkXdmfReader(),'GetOutput')),
reason='https://discourse.vtk.org/t/2450')
def test_XDMF_shape(self,tmp_path,single_phase):
os.chdir(tmp_path)
single_phase.export_XDMF()
fname = os.path.splitext(os.path.basename(single_phase.fname))[0]+'.xdmf'
single_phase.export_XDMF(target_dir=single_phase.fname.parent)
fname = single_phase.fname.with_suffix('.xdmf')
reader_xdmf = vtk.vtkXdmfReader()
reader_xdmf.SetFileName(fname)
reader_xdmf.Update()
dim_xdmf = reader_xdmf.GetOutput().GetDimensions()
bounds_xdmf = reader_xdmf.GetOutput().GetBounds()
single_phase.view(increments=0).export_VTK(parallel=False)
fname = os.path.splitext(os.path.basename(single_phase.fname))[0]+'_inc00.vti'
single_phase.view(increments=0).export_VTK(target_dir=single_phase.fname.parent,parallel=False)
fname = single_phase.fname.with_name(single_phase.fname.stem+'_inc00.vti')
reader_vti = vtk.vtkXMLImageDataReader()
reader_vti.SetFileName(fname)
reader_vti.Update()
@ -455,6 +451,40 @@ class TestResult:
with pytest.raises(TypeError):
default.export_XDMF()
def test_XDMF_custom_path(self,single_phase,tmp_path):
os.chdir(tmp_path)
single_phase.export_XDMF()
assert single_phase.fname.with_suffix('.xdmf').name in os.listdir(tmp_path)
export_dir = tmp_path/'export_dir'
single_phase.export_XDMF(target_dir=export_dir)
assert single_phase.fname.with_suffix('.xdmf').name in os.listdir(export_dir)
@pytest.mark.skipif(not (hasattr(vtk,'vtkXdmfReader') and hasattr(vtk.vtkXdmfReader(),'GetOutput')),
reason='https://discourse.vtk.org/t/2450')
def test_XDMF_relabs_path(self,single_phase,tmp_path):
def dims(xdmf):
reader_xdmf = vtk.vtkXdmfReader()
reader_xdmf.SetFileName(xdmf)
reader_xdmf.Update()
return reader_xdmf.GetOutput().GetDimensions()
single_phase.export_XDMF(target_dir=tmp_path)
xdmfname = single_phase.fname.with_suffix('.xdmf').name
ref_dims = dims(tmp_path/xdmfname)
for (d,info) in {
'A': dict(absolute_path=True,
mv='..',
),
'B': dict(absolute_path=False,
mv='../A',
),
}.items():
sub = tmp_path/d; sub.mkdir(exist_ok=True)
single_phase.export_XDMF(target_dir=sub,absolute_path=info['absolute_path'])
os.replace(sub/xdmfname,sub/info['mv']/xdmfname)
assert ref_dims == dims(sub/info['mv']/xdmfname)
@pytest.mark.parametrize('view,output,flatten,prune',
[({},['F','P','F','L_p','F_e','F_p'],True,True),
({'increments':3},'F',True,True),
@ -511,7 +541,17 @@ class TestResult:
@pytest.mark.parametrize('output',['material.yaml','*'])
@pytest.mark.parametrize('overwrite',[True,False])
def test_export_setup(self,ref_path,tmp_path,fname,output,overwrite):
os.chdir(tmp_path)
r = Result(ref_path/fname)
r.export_setup(output,overwrite)
r.export_setup(output,overwrite)
r.export_setup(output,target_dir=tmp_path,overwrite=overwrite)
def test_export_setup_custom_path(self,ref_path,tmp_path):
src = ref_path/'4grains2x4x3_compressionY.hdf5'
subdir = 'export_dir'
absdir = tmp_path/subdir
absdir.mkdir()
r = Result(src)
for t,cwd in zip([absdir,subdir,None],[tmp_path,tmp_path,absdir]):
os.chdir(cwd)
r.export_setup('material.yaml',target_dir=t)
assert 'material.yaml' in os.listdir(absdir); (absdir/'material.yaml').unlink()