Merge branch '285-attributes-of-damask-result-should-depend-on-view' into 'development'
attributes should depend on view Closes #285 See merge request damask/DAMASK!877
This commit is contained in:
commit
303061fb96
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit 62df7f24f2a95fda255f7d20b130afcfeecb1b4a
|
Subproject commit bb989504bea36fe2dbb881bc9232b0ea3193631a
|
|
@ -1,2 +1,4 @@
|
||||||
[run]
|
[run]
|
||||||
source = damask
|
source = damask
|
||||||
|
[report]
|
||||||
|
show_missing = true
|
||||||
|
|
|
@ -2,7 +2,6 @@ import re
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import os
|
import os
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
|
||||||
import xml.etree.ElementTree as ET # noqa
|
import xml.etree.ElementTree as ET # noqa
|
||||||
import xml.dom.minidom
|
import xml.dom.minidom
|
||||||
import functools
|
import functools
|
||||||
|
@ -117,30 +116,30 @@ class Result:
|
||||||
self.origin = f['geometry'].attrs['origin']
|
self.origin = f['geometry'].attrs['origin']
|
||||||
|
|
||||||
r = re.compile(rf'{prefix_inc}([0-9]+)')
|
r = re.compile(rf'{prefix_inc}([0-9]+)')
|
||||||
self.increments = sorted([i for i in f.keys() if r.match(i)],key=util.natural_sort)
|
self._increments = sorted([i for i in f.keys() if r.match(i)],key=util.natural_sort)
|
||||||
self.times = np.around([f[i].attrs['t/s'] for i in self.increments],12)
|
self._times = {int(i.split('_')[1]):np.around(f[i].attrs['t/s'],12) for i in self._increments}
|
||||||
if len(self.increments) == 0:
|
if len(self._increments) == 0:
|
||||||
raise ValueError('incomplete DADF5 file')
|
raise ValueError('incomplete DADF5 file')
|
||||||
|
|
||||||
self.N_materialpoints, self.N_constituents = np.shape(f['cell_to/phase'])
|
self.N_materialpoints, self.N_constituents = np.shape(f['cell_to/phase'])
|
||||||
|
|
||||||
self.homogenization = f['cell_to/homogenization']['label'].astype('str')
|
self.homogenization = f['cell_to/homogenization']['label'].astype('str')
|
||||||
self.homogenizations = sorted(np.unique(self.homogenization),key=util.natural_sort)
|
self._homogenizations = sorted(np.unique(self.homogenization),key=util.natural_sort)
|
||||||
self.phase = f['cell_to/phase']['label'].astype('str')
|
self.phase = f['cell_to/phase']['label'].astype('str')
|
||||||
self.phases = sorted(np.unique(self.phase),key=util.natural_sort)
|
self._phases = sorted(np.unique(self.phase),key=util.natural_sort)
|
||||||
|
|
||||||
self.fields: List[str] = []
|
fields: List[str] = []
|
||||||
for c in self.phases:
|
for c in self._phases:
|
||||||
self.fields += f['/'.join([self.increments[0],'phase',c])].keys()
|
fields += f['/'.join([self._increments[0],'phase',c])].keys()
|
||||||
for m in self.homogenizations:
|
for m in self._homogenizations:
|
||||||
self.fields += f['/'.join([self.increments[0],'homogenization',m])].keys()
|
fields += f['/'.join([self._increments[0],'homogenization',m])].keys()
|
||||||
self.fields = sorted(set(self.fields),key=util.natural_sort) # make unique
|
self._fields = sorted(set(fields),key=util.natural_sort) # make unique
|
||||||
|
|
||||||
self.visible = {'increments': self.increments,
|
self._visible = {'increments': self._increments,
|
||||||
'phases': self.phases,
|
'phases': self._phases,
|
||||||
'homogenizations': self.homogenizations,
|
'homogenizations': self._homogenizations,
|
||||||
'fields': self.fields,
|
'fields': self._fields,
|
||||||
}
|
}
|
||||||
|
|
||||||
self.fname = Path(fname).expanduser().absolute()
|
self.fname = Path(fname).expanduser().absolute()
|
||||||
|
|
||||||
|
@ -170,7 +169,7 @@ class Result:
|
||||||
header = [f'Created by {f.attrs["creator"]}',
|
header = [f'Created by {f.attrs["creator"]}',
|
||||||
f' on {f.attrs["created"]}',
|
f' on {f.attrs["created"]}',
|
||||||
f' executing "{f.attrs["call"]}"']
|
f' executing "{f.attrs["call"]}"']
|
||||||
visible_increments = self.visible['increments']
|
visible_increments = self._visible['increments']
|
||||||
|
|
||||||
first = self.view(increments=visible_increments[0:1]).list_data()
|
first = self.view(increments=visible_increments[0:1]).list_data()
|
||||||
|
|
||||||
|
@ -221,32 +220,33 @@ class Result:
|
||||||
|
|
||||||
if what == 'increments':
|
if what == 'increments':
|
||||||
choice = [c if isinstance(c,str) and c.startswith(prefix_inc) else
|
choice = [c if isinstance(c,str) and c.startswith(prefix_inc) else
|
||||||
self.increments[c] if isinstance(c,int) and c<0 else
|
self._increments[c] if isinstance(c,int) and c<0 else
|
||||||
f'{prefix_inc}{c}' for c in choice]
|
f'{prefix_inc}{c}' for c in choice]
|
||||||
elif what == 'times':
|
elif what == 'times':
|
||||||
atol = 1e-2 * np.min(np.diff(self.times))
|
times = list(self._times.values())
|
||||||
|
atol = 1e-2 * np.min(np.diff(times))
|
||||||
what = 'increments'
|
what = 'increments'
|
||||||
if choice == ['*']:
|
if choice == ['*']:
|
||||||
choice = self.increments
|
choice = self._increments
|
||||||
else:
|
else:
|
||||||
iterator = np.array(choice).astype(float)
|
iterator = np.array(choice).astype(float)
|
||||||
choice = []
|
choice = []
|
||||||
for c in iterator:
|
for c in iterator:
|
||||||
idx = np.searchsorted(self.times,c,side='left')
|
idx = np.searchsorted(times,c,side='left')
|
||||||
if idx<len(self.times) and np.isclose(c,self.times[idx],rtol=0,atol=atol):
|
if idx<len(self._times) and np.isclose(c,times[idx],rtol=0,atol=atol):
|
||||||
choice.append(self.increments[idx])
|
choice.append(self._increments[idx])
|
||||||
elif idx>0 and np.isclose(c,self.times[idx-1],rtol=0,atol=atol):
|
elif idx>0 and np.isclose(c,times[idx-1],rtol=0,atol=atol):
|
||||||
choice.append(self.increments[idx-1])
|
choice.append(self._increments[idx-1])
|
||||||
|
|
||||||
valid = _match(choice,getattr(self,what))
|
valid = _match(choice,getattr(self,'_'+what))
|
||||||
existing = set(self.visible[what])
|
existing = set(self._visible[what])
|
||||||
|
|
||||||
if action == 'set':
|
if action == 'set':
|
||||||
dup.visible[what] = sorted(set(valid), key=util.natural_sort)
|
dup._visible[what] = sorted(set(valid), key=util.natural_sort)
|
||||||
elif action == 'add':
|
elif action == 'add':
|
||||||
dup.visible[what] = sorted(existing.union(valid), key=util.natural_sort)
|
dup._visible[what] = sorted(existing.union(valid), key=util.natural_sort)
|
||||||
elif action == 'del':
|
elif action == 'del':
|
||||||
dup.visible[what] = sorted(existing.difference(valid), key=util.natural_sort)
|
dup._visible[what] = sorted(existing.difference(valid), key=util.natural_sort)
|
||||||
|
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
@ -271,9 +271,9 @@ class Result:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
s,e = map(lambda x: int(x.split(prefix_inc)[-1] if isinstance(x,str) and x.startswith(prefix_inc) else x),
|
s,e = map(lambda x: int(x.split(prefix_inc)[-1] if isinstance(x,str) and x.startswith(prefix_inc) else x),
|
||||||
(self.incs[ 0] if start is None else start,
|
(self._incs[ 0] if start is None else start,
|
||||||
self.incs[-1] if end is None else end))
|
self._incs[-1] if end is None else end))
|
||||||
return [i for i in self.incs if s <= i <= e]
|
return [i for i in self._incs if s <= i <= e]
|
||||||
|
|
||||||
def times_in_range(self,
|
def times_in_range(self,
|
||||||
start: Optional[float] = None,
|
start: Optional[float] = None,
|
||||||
|
@ -295,7 +295,7 @@ class Result:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
s,e = (self.times[ 0] if start is None else start,
|
s,e = (self.times[ 0] if start is None else start,
|
||||||
self.times[-1] if end is None else end)
|
self.times[-1] if end is None else end)
|
||||||
return [t for t in self.times if s <= t <= e]
|
return [t for t in self.times if s <= t <= e]
|
||||||
|
|
||||||
|
|
||||||
|
@ -441,6 +441,19 @@ class Result:
|
||||||
return self._manage_view('del',increments,times,phases,homogenizations,fields)
|
return self._manage_view('del',increments,times,phases,homogenizations,fields)
|
||||||
|
|
||||||
|
|
||||||
|
def view_all(self):
|
||||||
|
"""
|
||||||
|
Make all attributes visible.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
modified_view : damask.Result
|
||||||
|
View with all attributes visible.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.view(increments='*',phases='*',homogenizations='*',fields='*')
|
||||||
|
|
||||||
|
|
||||||
def rename(self,
|
def rename(self,
|
||||||
name_src: str,
|
name_src: str,
|
||||||
name_dst: str):
|
name_dst: str):
|
||||||
|
@ -471,10 +484,10 @@ class Result:
|
||||||
raise PermissionError('rename datasets')
|
raise PermissionError('rename datasets')
|
||||||
|
|
||||||
with h5py.File(self.fname,'a') as f:
|
with h5py.File(self.fname,'a') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self._visible['increments']:
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||||
path_src = '/'.join([inc,ty,label,field,name_src])
|
path_src = '/'.join([inc,ty,label,field,name_src])
|
||||||
path_dst = '/'.join([inc,ty,label,field,name_dst])
|
path_dst = '/'.join([inc,ty,label,field,name_dst])
|
||||||
if path_src in f.keys():
|
if path_src in f.keys():
|
||||||
|
@ -510,10 +523,10 @@ class Result:
|
||||||
raise PermissionError('delete datasets')
|
raise PermissionError('delete datasets')
|
||||||
|
|
||||||
with h5py.File(self.fname,'a') as f:
|
with h5py.File(self.fname,'a') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self._visible['increments']:
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||||
path = '/'.join([inc,ty,label,field,name])
|
path = '/'.join([inc,ty,label,field,name])
|
||||||
if path in f.keys(): del f[path]
|
if path in f.keys(): del f[path]
|
||||||
|
|
||||||
|
@ -530,13 +543,13 @@ class Result:
|
||||||
"""
|
"""
|
||||||
msg = []
|
msg = []
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self._visible['increments']:
|
||||||
msg += [f'\n{inc} ({self.times[self.increments.index(inc)]} s)']
|
msg += [f'\n{inc} ({self._times[int(inc.split("_")[1])]} s)']
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
msg += [f' {ty}']
|
msg += [f' {ty}']
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
msg += [f' {label}']
|
msg += [f' {label}']
|
||||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||||
msg += [f' {field}']
|
msg += [f' {field}']
|
||||||
for d in f['/'.join([inc,ty,label,field])].keys():
|
for d in f['/'.join([inc,ty,label,field])].keys():
|
||||||
dataset = f['/'.join([inc,ty,label,field,d])]
|
dataset = f['/'.join([inc,ty,label,field,d])]
|
||||||
|
@ -564,8 +577,28 @@ class Result:
|
||||||
return files
|
return files
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def incs(self):
|
def _incs(self):
|
||||||
return [int(i.split(prefix_inc)[-1]) for i in self.increments]
|
return [int(i.split(prefix_inc)[-1]) for i in self._increments]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def increments(self):
|
||||||
|
return [int(i.split(prefix_inc)[-1]) for i in self._visible['increments']]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def times(self):
|
||||||
|
return [self._times[i] for i in self.increments]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def phases(self):
|
||||||
|
return self._visible['phases']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homogenizations(self):
|
||||||
|
return self._visible['homogenizations']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fields(self):
|
||||||
|
return self._visible['fields']
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -1426,7 +1459,7 @@ class Result:
|
||||||
'meta':d.data.dtype.metadata}}
|
'meta':d.data.dtype.metadata}}
|
||||||
r = func(**dataset,**args)
|
r = func(**dataset,**args)
|
||||||
result = r['data'].reshape((-1,)+r['data'].shape[3:])
|
result = r['data'].reshape((-1,)+r['data'].shape[3:])
|
||||||
for x in self.visible[ty[0]+'s']:
|
for x in self._visible[ty[0]+'s']:
|
||||||
if ty[0] == 'phase':
|
if ty[0] == 'phase':
|
||||||
result1 = result[at_cell_ph[0][x]]
|
result1 = result[at_cell_ph[0][x]]
|
||||||
if ty[0] == 'homogenization':
|
if ty[0] == 'homogenization':
|
||||||
|
@ -1435,9 +1468,8 @@ class Result:
|
||||||
path = '/'.join(['/',increment[0],ty[0],x,field[0]])
|
path = '/'.join(['/',increment[0],ty[0],x,field[0]])
|
||||||
h5_dataset = f[path].create_dataset(r['label'],data=result1)
|
h5_dataset = f[path].create_dataset(r['label'],data=result1)
|
||||||
|
|
||||||
now = datetime.datetime.now().astimezone()
|
h5_dataset.attrs['created'] = util.time_stamp() if h5py3 else \
|
||||||
h5_dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
util.time_stamp().encode()
|
||||||
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
|
||||||
|
|
||||||
for l,v in r['meta'].items():
|
for l,v in r['meta'].items():
|
||||||
h5_dataset.attrs[l.lower()]=v.encode() if not h5py3 and type(v) is str else v
|
h5_dataset.attrs[l.lower()]=v.encode() if not h5py3 and type(v) is str else v
|
||||||
|
@ -1489,10 +1521,10 @@ class Result:
|
||||||
|
|
||||||
groups = []
|
groups = []
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self._visible['increments']:
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||||
group = '/'.join([inc,ty,label,field])
|
group = '/'.join([inc,ty,label,field])
|
||||||
if set(datasets.values()).issubset(f[group].keys()): groups.append(group)
|
if set(datasets.values()).issubset(f[group].keys()): groups.append(group)
|
||||||
|
|
||||||
|
@ -1522,9 +1554,8 @@ class Result:
|
||||||
compression_opts = 6 if compress else None,
|
compression_opts = 6 if compress else None,
|
||||||
shuffle=True,fletcher32=True)
|
shuffle=True,fletcher32=True)
|
||||||
|
|
||||||
now = datetime.datetime.now().astimezone()
|
dataset.attrs['created'] = util.time_stamp() if h5py3 else \
|
||||||
dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
util.time_stamp().encode()
|
||||||
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
|
||||||
|
|
||||||
for l,v in result['meta'].items():
|
for l,v in result['meta'].items():
|
||||||
dataset.attrs[l.lower()]=v.encode() if not h5py3 and type(v) is str else v
|
dataset.attrs[l.lower()]=v.encode() if not h5py3 and type(v) is str else v
|
||||||
|
@ -1545,14 +1576,14 @@ class Result:
|
||||||
in_data_ph = []
|
in_data_ph = []
|
||||||
for c in range(self.N_constituents):
|
for c in range(self.N_constituents):
|
||||||
at_cell_ph.append({label: np.where(self.phase[:,c] == label)[0] \
|
at_cell_ph.append({label: np.where(self.phase[:,c] == label)[0] \
|
||||||
for label in self.visible['phases']})
|
for label in self._visible['phases']})
|
||||||
in_data_ph.append({label: f['/'.join(['cell_to','phase'])]['entry'][at_cell_ph[c][label]][:,c] \
|
in_data_ph.append({label: f['/'.join(['cell_to','phase'])]['entry'][at_cell_ph[c][label]][:,c] \
|
||||||
for label in self.visible['phases']})
|
for label in self._visible['phases']})
|
||||||
|
|
||||||
at_cell_ho = {label: np.where(self.homogenization[:] == label)[0] \
|
at_cell_ho = {label: np.where(self.homogenization[:] == label)[0] \
|
||||||
for label in self.visible['homogenizations']}
|
for label in self._visible['homogenizations']}
|
||||||
in_data_ho = {label: f['/'.join(['cell_to','homogenization'])]['entry'][at_cell_ho[label]] \
|
in_data_ho = {label: f['/'.join(['cell_to','homogenization'])]['entry'][at_cell_ho[label]] \
|
||||||
for label in self.visible['homogenizations']}
|
for label in self._visible['homogenizations']}
|
||||||
|
|
||||||
return at_cell_ph,in_data_ph,at_cell_ho,in_data_ho
|
return at_cell_ph,in_data_ph,at_cell_ho,in_data_ho
|
||||||
|
|
||||||
|
@ -1585,16 +1616,16 @@ class Result:
|
||||||
r: Dict[str,Any] = {}
|
r: Dict[str,Any] = {}
|
||||||
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for inc in util.show_progress(self.visible['increments']):
|
for inc in util.show_progress(self._visible['increments']):
|
||||||
r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
|
r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
|
||||||
|
|
||||||
for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
|
for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
|
||||||
r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])])
|
r[inc]['geometry'][out] = _read(f['/'.join([inc,'geometry',out])])
|
||||||
|
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
r[inc][ty][label] = {}
|
r[inc][ty][label] = {}
|
||||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||||
r[inc][ty][label][field] = {}
|
r[inc][ty][label][field] = {}
|
||||||
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
||||||
r[inc][ty][label][field][out] = _read(f['/'.join([inc,ty,label,field,out])])
|
r[inc][ty][label][field][out] = _read(f['/'.join([inc,ty,label,field,out])])
|
||||||
|
@ -1660,15 +1691,15 @@ class Result:
|
||||||
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
|
|
||||||
for inc in util.show_progress(self.visible['increments']):
|
for inc in util.show_progress(self._visible['increments']):
|
||||||
r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
|
r[inc] = {'phase':{},'homogenization':{},'geometry':{}}
|
||||||
|
|
||||||
for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
|
for out in _match(output,f['/'.join([inc,'geometry'])].keys()):
|
||||||
r[inc]['geometry'][out] = ma.array(_read(f['/'.join([inc,'geometry',out])]),fill_value = fill_float)
|
r[inc]['geometry'][out] = ma.array(_read(f['/'.join([inc,'geometry',out])]),fill_value = fill_float)
|
||||||
|
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||||
if field not in r[inc][ty].keys():
|
if field not in r[inc][ty].keys():
|
||||||
r[inc][ty][field] = {}
|
r[inc][ty][field] = {}
|
||||||
|
|
||||||
|
@ -1752,11 +1783,10 @@ class Result:
|
||||||
time.attrib = {'TimeType': 'List'}
|
time.attrib = {'TimeType': 'List'}
|
||||||
|
|
||||||
time_data = ET.SubElement(time, 'DataItem')
|
time_data = ET.SubElement(time, 'DataItem')
|
||||||
times = [self.times[self.increments.index(i)] for i in self.visible['increments']]
|
|
||||||
time_data.attrib = {'Format': 'XML',
|
time_data.attrib = {'Format': 'XML',
|
||||||
'NumberType': 'Float',
|
'NumberType': 'Float',
|
||||||
'Dimensions': f'{len(times)}'}
|
'Dimensions': f'{len(self.times)}'}
|
||||||
time_data.text = ' '.join(map(str,times))
|
time_data.text = ' '.join(map(str,self.times))
|
||||||
|
|
||||||
attributes = []
|
attributes = []
|
||||||
data_items = []
|
data_items = []
|
||||||
|
@ -1767,7 +1797,7 @@ class Result:
|
||||||
hdf5_link = (hdf5_dir if absolute_path else Path(os.path.relpath(hdf5_dir,out_dir.resolve())))/hdf5_name
|
hdf5_link = (hdf5_dir if absolute_path else Path(os.path.relpath(hdf5_dir,out_dir.resolve())))/hdf5_name
|
||||||
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self._visible['increments']:
|
||||||
|
|
||||||
grid = ET.SubElement(collection,'Grid')
|
grid = ET.SubElement(collection,'Grid')
|
||||||
grid.attrib = {'GridType': 'Uniform',
|
grid.attrib = {'GridType': 'Uniform',
|
||||||
|
@ -1802,8 +1832,8 @@ class Result:
|
||||||
'Dimensions': '{} {} {} 3'.format(*(self.cells[::-1]+1))}
|
'Dimensions': '{} {} {} 3'.format(*(self.cells[::-1]+1))}
|
||||||
data_items[-1].text = f'{hdf5_link}:/{inc}/geometry/u_n'
|
data_items[-1].text = f'{hdf5_link}:/{inc}/geometry/u_n'
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
for field in _match(self.visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f['/'.join([inc,ty,label])].keys()):
|
||||||
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
||||||
name = '/'.join([inc,ty,label,field,out])
|
name = '/'.join([inc,ty,label,field,out])
|
||||||
shape = f[name].shape[1:]
|
shape = f[name].shape[1:]
|
||||||
|
@ -1879,7 +1909,7 @@ class Result:
|
||||||
|
|
||||||
v.comments = [util.execution_stamp('Result','export_VTK')]
|
v.comments = [util.execution_stamp('Result','export_VTK')]
|
||||||
|
|
||||||
N_digits = int(np.floor(np.log10(max(1,self.incs[-1]))))+1
|
N_digits = int(np.floor(np.log10(max(1,self._incs[-1]))))+1
|
||||||
|
|
||||||
constituents_ = constituents if isinstance(constituents,Iterable) else \
|
constituents_ = constituents if isinstance(constituents,Iterable) else \
|
||||||
(range(self.N_constituents) if constituents is None else [constituents]) # type: ignore
|
(range(self.N_constituents) if constituents is None else [constituents]) # type: ignore
|
||||||
|
@ -1893,20 +1923,20 @@ class Result:
|
||||||
out_dir.mkdir(parents=True,exist_ok=True)
|
out_dir.mkdir(parents=True,exist_ok=True)
|
||||||
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
if self.version_minor >= 13:
|
if self.version_major == 1 or self.version_minor >= 13:
|
||||||
creator = f.attrs['creator'] if h5py3 else f.attrs['creator'].decode()
|
creator = f.attrs['creator'] if h5py3 else f.attrs['creator'].decode()
|
||||||
created = f.attrs['created'] if h5py3 else f.attrs['created'].decode()
|
created = f.attrs['created'] if h5py3 else f.attrs['created'].decode()
|
||||||
v.comments += [f'{creator} ({created})']
|
v.comments += [f'{creator} ({created})']
|
||||||
|
|
||||||
for inc in util.show_progress(self.visible['increments']):
|
for inc in util.show_progress(self._visible['increments']):
|
||||||
|
|
||||||
u = _read(f['/'.join([inc,'geometry','u_n' if mode.lower() == 'cell' else 'u_p'])])
|
u = _read(f['/'.join([inc,'geometry','u_n' if mode.lower() == 'cell' else 'u_p'])])
|
||||||
v = v.set('u',u)
|
v = v.set('u',u)
|
||||||
|
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for field in self.visible['fields']:
|
for field in self._visible['fields']:
|
||||||
outs: Dict[str, np.ma.core.MaskedArray] = {}
|
outs: Dict[str, np.ma.core.MaskedArray] = {}
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
if field not in f['/'.join([inc,ty,label])].keys(): continue
|
if field not in f['/'.join([inc,ty,label])].keys(): continue
|
||||||
|
|
||||||
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
for out in _match(output,f['/'.join([inc,ty,label,field])].keys()):
|
||||||
|
@ -1974,7 +2004,7 @@ class Result:
|
||||||
if self.N_constituents != 1 or not self.structured:
|
if self.N_constituents != 1 or not self.structured:
|
||||||
raise NotImplementedError('not a structured grid with one constituent')
|
raise NotImplementedError('not a structured grid with one constituent')
|
||||||
|
|
||||||
N_digits = int(np.floor(np.log10(max(1,self.incs[-1]))))+1
|
N_digits = int(np.floor(np.log10(max(1,self._incs[-1]))))+1
|
||||||
|
|
||||||
at_cell_ph,in_data_ph,_,_ = self._mappings()
|
at_cell_ph,in_data_ph,_,_ = self._mappings()
|
||||||
|
|
||||||
|
@ -1982,14 +2012,14 @@ class Result:
|
||||||
out_dir.mkdir(parents=True,exist_ok=True)
|
out_dir.mkdir(parents=True,exist_ok=True)
|
||||||
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for inc in util.show_progress(self.visible['increments']):
|
for inc in util.show_progress(self._visible['increments']):
|
||||||
for c in range(self.N_constituents):
|
for c in range(self.N_constituents):
|
||||||
crystal_structure = [999]
|
crystal_structure = [999]
|
||||||
phase_name = ['Unknown Phase Type']
|
phase_name = ['Unknown Phase Type']
|
||||||
cell_orientation = np.zeros((np.prod(self.cells),3),np.float32)
|
cell_orientation = np.zeros((np.prod(self.cells),3),np.float32)
|
||||||
phase_ID = np.zeros((np.prod(self.cells)),dtype=np.int32)
|
phase_ID = np.zeros((np.prod(self.cells)),dtype=np.int32)
|
||||||
count = 1
|
count = 1
|
||||||
for label in self.visible['phases']:
|
for label in self._visible['phases']:
|
||||||
try:
|
try:
|
||||||
data = _read(f['/'.join([inc,'phase',label,'mechanical',q])])
|
data = _read(f['/'.join([inc,'phase',label,'mechanical',q])])
|
||||||
lattice = data.dtype.metadata['lattice']
|
lattice = data.dtype.metadata['lattice']
|
||||||
|
@ -2043,12 +2073,12 @@ class Result:
|
||||||
cell_ensemble.create_dataset(name='PhaseName',data = phase_name, dtype=h5py.Datatype(tid))
|
cell_ensemble.create_dataset(name='PhaseName',data = phase_name, dtype=h5py.Datatype(tid))
|
||||||
|
|
||||||
cell_ensemble.attrs['AttributeMatrixType'] = np.array([11],np.uint32)
|
cell_ensemble.attrs['AttributeMatrixType'] = np.array([11],np.uint32)
|
||||||
cell_ensemble.attrs['TupleDimensions'] = np.array([len(self.phases) + 1], np.uint64)
|
cell_ensemble.attrs['TupleDimensions'] = np.array([len(self._phases) + 1], np.uint64)
|
||||||
for group in ['CrystalStructures','PhaseTypes','PhaseName']:
|
for group in ['CrystalStructures','PhaseTypes','PhaseName']:
|
||||||
add_attribute(cell_ensemble[group], 'ComponentDimensions', np.array([1],np.uint64))
|
add_attribute(cell_ensemble[group], 'ComponentDimensions', np.array([1],np.uint64))
|
||||||
add_attribute(cell_ensemble[group], 'Tuple Axis Dimensions', f'x={len(self.phases)+1}')
|
add_attribute(cell_ensemble[group], 'Tuple Axis Dimensions', f'x={len(self._phases)+1}')
|
||||||
add_attribute(cell_ensemble[group], 'DataArrayVersion', np.array([2],np.int32))
|
add_attribute(cell_ensemble[group], 'DataArrayVersion', np.array([2],np.int32))
|
||||||
add_attribute(cell_ensemble[group], 'TupleDimensions', np.array([len(self.phases) + 1],np.uint64))
|
add_attribute(cell_ensemble[group], 'TupleDimensions', np.array([len(self._phases) + 1],np.uint64))
|
||||||
for group in ['CrystalStructures','PhaseTypes']:
|
for group in ['CrystalStructures','PhaseTypes']:
|
||||||
add_attribute(cell_ensemble[group], 'ObjectType', 'DataArray<uint32_t>')
|
add_attribute(cell_ensemble[group], 'ObjectType', 'DataArray<uint32_t>')
|
||||||
add_attribute(cell_ensemble['PhaseName'], 'ObjectType', 'StringDataArray')
|
add_attribute(cell_ensemble['PhaseName'], 'ObjectType', 'StringDataArray')
|
||||||
|
@ -2124,7 +2154,7 @@ class Result:
|
||||||
f_out['cell_to'].create_dataset('homogenization',data=mapping_homog.flatten())
|
f_out['cell_to'].create_dataset('homogenization',data=mapping_homog.flatten())
|
||||||
|
|
||||||
|
|
||||||
for inc in util.show_progress(self.visible['increments']):
|
for inc in util.show_progress(self._visible['increments']):
|
||||||
f_in.copy(inc,f_out,shallow=True)
|
f_in.copy(inc,f_out,shallow=True)
|
||||||
if mapping is None:
|
if mapping is None:
|
||||||
for label in ['u_p','u_n']:
|
for label in ['u_p','u_n']:
|
||||||
|
@ -2136,14 +2166,14 @@ class Result:
|
||||||
f_out[inc]['geometry'].create_dataset('u_n',data=u_n)
|
f_out[inc]['geometry'].create_dataset('u_n',data=u_n)
|
||||||
|
|
||||||
|
|
||||||
for label in self.homogenizations:
|
for label in self._homogenizations:
|
||||||
f_in[inc]['homogenization'].copy(label,f_out[inc]['homogenization'],shallow=True)
|
f_in[inc]['homogenization'].copy(label,f_out[inc]['homogenization'],shallow=True)
|
||||||
for label in self.phases:
|
for label in self._phases:
|
||||||
f_in[inc]['phase'].copy(label,f_out[inc]['phase'],shallow=True)
|
f_in[inc]['phase'].copy(label,f_out[inc]['phase'],shallow=True)
|
||||||
|
|
||||||
for ty in ['phase','homogenization']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty+'s']:
|
for label in self._visible[ty+'s']:
|
||||||
for field in _match(self.visible['fields'],f_in['/'.join([inc,ty,label])].keys()):
|
for field in _match(self._visible['fields'],f_in['/'.join([inc,ty,label])].keys()):
|
||||||
p = '/'.join([inc,ty,label,field])
|
p = '/'.join([inc,ty,label,field])
|
||||||
for out in _match(output,f_in[p].keys()):
|
for out in _match(output,f_in[p].keys()):
|
||||||
cp(f_in[p],f_out[p],out,None if mapping is None else mappings[ty][label.encode()])
|
cp(f_in[p],f_out[p],out,None if mapping is None else mappings[ty][label.encode()])
|
||||||
|
|
|
@ -223,13 +223,15 @@ def open_text(fname: _FileHandle,
|
||||||
else:
|
else:
|
||||||
yield fname
|
yield fname
|
||||||
|
|
||||||
|
def time_stamp() -> str:
|
||||||
|
"""Provide current time as formatted string."""
|
||||||
|
return _datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
|
||||||
|
|
||||||
def execution_stamp(class_name: str,
|
def execution_stamp(class_name: str,
|
||||||
function_name: _Optional[str] = None) -> str:
|
function_name: _Optional[str] = None) -> str:
|
||||||
"""Timestamp the execution of a (function within a) class."""
|
"""Timestamp the execution of a (function within a) class."""
|
||||||
now = _datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
|
|
||||||
_function_name = '' if function_name is None else f'.{function_name}'
|
_function_name = '' if function_name is None else f'.{function_name}'
|
||||||
return f'damask.{class_name}{_function_name} v{_version} ({now})'
|
return f'damask.{class_name}{_function_name} v{_version} ({time_stamp()})'
|
||||||
|
|
||||||
|
|
||||||
def natural_sort(key: str) -> _List[_Union[int, str]]:
|
def natural_sort(key: str) -> _List[_Union[int, str]]:
|
||||||
|
|
|
@ -78,7 +78,8 @@ class TestResult:
|
||||||
|
|
||||||
|
|
||||||
def test_view_all(self,default):
|
def test_view_all(self,default):
|
||||||
a = default.view(increments=True).get('F')
|
default = Result(default.fname)
|
||||||
|
a = default.view_all().get('F')
|
||||||
|
|
||||||
assert dict_equal(a,default.view(increments='*').get('F'))
|
assert dict_equal(a,default.view(increments='*').get('F'))
|
||||||
assert dict_equal(a,default.view(increments=default.increments_in_range(0,np.iinfo(int).max)).get('F'))
|
assert dict_equal(a,default.view(increments=default.increments_in_range(0,np.iinfo(int).max)).get('F'))
|
||||||
|
@ -95,7 +96,7 @@ class TestResult:
|
||||||
label = 'increments' if what == 'times' else what
|
label = 'increments' if what == 'times' else what
|
||||||
|
|
||||||
assert n0.get('F') is n1.get('F') is None and \
|
assert n0.get('F') is n1.get('F') is None and \
|
||||||
len(n0.visible[label]) == len(n1.visible[label]) == 0
|
len(n0._visible[label]) == len(n1._visible[label]) == 0
|
||||||
|
|
||||||
@pytest.mark.parametrize('what',['increments','times','phases','fields']) # ToDo: discuss homogenizations
|
@pytest.mark.parametrize('what',['increments','times','phases','fields']) # ToDo: discuss homogenizations
|
||||||
def test_view_more(self,default,what):
|
def test_view_more(self,default,what):
|
||||||
|
@ -116,7 +117,7 @@ class TestResult:
|
||||||
label = 'increments' if what == 'times' else what
|
label = 'increments' if what == 'times' else what
|
||||||
|
|
||||||
assert n0.get('F') is n1.get('F') is None and \
|
assert n0.get('F') is n1.get('F') is None and \
|
||||||
len(n0.visible[label]) == len(n1.visible[label]) == 0
|
len(n0._visible[label]) == len(n1._visible[label]) == 0
|
||||||
|
|
||||||
def test_view_invalid_incstimes(self,default):
|
def test_view_invalid_incstimes(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
@ -126,7 +127,23 @@ class TestResult:
|
||||||
@pytest.mark.parametrize('sign',[+1,-1])
|
@pytest.mark.parametrize('sign',[+1,-1])
|
||||||
def test_view_approxtimes(self,default,inc,sign):
|
def test_view_approxtimes(self,default,inc,sign):
|
||||||
eps = sign*1e-3
|
eps = sign*1e-3
|
||||||
assert [default.increments[inc]] == default.view(times=default.times[inc]+eps).visible['increments']
|
times = list(default._times.values())
|
||||||
|
assert [default._increments[inc]] == default.view(times=times[inc]+eps)._visible['increments']
|
||||||
|
|
||||||
|
def test_getters(self,default):
|
||||||
|
file_layout = default.get('non-existing',prune=False,flatten=False)
|
||||||
|
for i in default.increments:
|
||||||
|
increment = file_layout[f'increment_{i}']
|
||||||
|
fields = []
|
||||||
|
for p in default.phases:
|
||||||
|
phase = increment['phase'][p]
|
||||||
|
for f in default.fields:
|
||||||
|
fields.append(phase[f])
|
||||||
|
for h in default.homogenizations:
|
||||||
|
homogenization = increment['homogenization'][h]
|
||||||
|
for f in default.fields:
|
||||||
|
fields.append(homogenization[f])
|
||||||
|
assert len(fields) > 0
|
||||||
|
|
||||||
def test_add_invalid(self,default):
|
def test_add_invalid(self,default):
|
||||||
default.add_absolute('xxxx')
|
default.add_absolute('xxxx')
|
||||||
|
@ -470,7 +487,7 @@ class TestResult:
|
||||||
assert np.array_equal(dset,cur[path])
|
assert np.array_equal(dset,cur[path])
|
||||||
else:
|
else:
|
||||||
c = [_.decode() for _ in cur[path]]
|
c = [_.decode() for _ in cur[path]]
|
||||||
r = ['Unknown Phase Type'] + result.phases
|
r = ['Unknown Phase Type'] + result._phases
|
||||||
assert c == r
|
assert c == r
|
||||||
grp = str(path).rpartition('/')[0]
|
grp = str(path).rpartition('/')[0]
|
||||||
for attr in ref[grp].attrs:
|
for attr in ref[grp].attrs:
|
||||||
|
@ -654,8 +671,8 @@ class TestResult:
|
||||||
'check_compile_job1.hdf5',])
|
'check_compile_job1.hdf5',])
|
||||||
def test_export_DADF5(self,res_path,tmp_path,fname):
|
def test_export_DADF5(self,res_path,tmp_path,fname):
|
||||||
r = Result(res_path/fname)
|
r = Result(res_path/fname)
|
||||||
r = r.view(phases = random.sample(r.phases,1))
|
r = r.view(phases = random.sample(r._phases,1))
|
||||||
r = r.view(increments = random.sample(r.increments,np.random.randint(1,len(r.increments))))
|
r = r.view(increments = random.sample(r._increments,np.random.randint(1,len(r._increments))))
|
||||||
r.export_DADF5(tmp_path/fname)
|
r.export_DADF5(tmp_path/fname)
|
||||||
r_exp = Result(tmp_path/fname)
|
r_exp = Result(tmp_path/fname)
|
||||||
assert str(r.get()) == str(r_exp.get())
|
assert str(r.get()) == str(r_exp.get())
|
||||||
|
|
Loading…
Reference in New Issue