Merge branch 'drop-old-DADF5-support' into 'development'

Improvements to damask.Result

See merge request damask/DAMASK!373
This commit is contained in:
Philip Eisenlohr 2021-04-27 00:33:41 +00:00
commit e9cfb2f968
13 changed files with 108 additions and 72 deletions

View File

@ -34,7 +34,7 @@ def _read(dataset):
return np.array(dataset,dtype=dtype)
def _match(requested,existing):
"""Find matches among two sets of names."""
"""Find matches among two sets of labels."""
def flatten_list(list_of_lists):
return [e for e_ in list_of_lists for e in e_]
@ -98,35 +98,26 @@ class Result:
self.version_major = f.attrs['DADF5_version_major']
self.version_minor = f.attrs['DADF5_version_minor']
if self.version_major != 0 or not 7 <= self.version_minor <= 12:
if self.version_major != 0 or not 12 <= self.version_minor <= 13:
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
self.structured = 'grid' in f['geometry'].attrs.keys() or \
'cells' in f['geometry'].attrs.keys()
self.structured = 'cells' in f['geometry'].attrs.keys()
if self.structured:
try:
self.cells = f['geometry'].attrs['cells']
except KeyError:
self.cells = f['geometry'].attrs['grid']
self.size = f['geometry'].attrs['size']
self.origin = f['geometry'].attrs['origin']
r=re.compile('inc[0-9]+' if self.version_minor < 12 else 'increment_[0-9]+')
r=re.compile('increment_[0-9]+')
self.increments = sorted([i for i in f.keys() if r.match(i)],key=util.natural_sort)
self.times = [round(f[i].attrs['time/s' if self.version_minor < 12 else
't/s'],12) for i in self.increments]
self.times = [round(f[i].attrs['t/s'],12) for i in self.increments]
grp = 'mapping' if self.version_minor < 12 else 'cell_to'
self.N_materialpoints, self.N_constituents = np.shape(f['cell_to/phase'])
self.N_materialpoints, self.N_constituents = np.shape(f[f'{grp}/phase'])
self.homogenizations = [m.decode() for m in np.unique(f[f'{grp}/homogenization']
['Name' if self.version_minor < 12 else 'label'])]
self.homogenizations = sorted(self.homogenizations,key=util.natural_sort)
self.phases = [c.decode() for c in np.unique(f[f'{grp}/phase']
['Name' if self.version_minor < 12 else 'label'])]
self.phases = sorted(self.phases,key=util.natural_sort)
self.homogenization = f['cell_to/homogenization']['label'].astype('str')
self.homogenizations = sorted(np.unique(self.homogenization),key=util.natural_sort)
self.phase = f['cell_to/phase']['label'].astype('str')
self.phases = sorted(np.unique(self.phase),key=util.natural_sort)
self.fields = []
for c in self.phases:
@ -196,11 +187,10 @@ class Result:
choice = list(datasets).copy() if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
[datasets]
inc = 'inc' if self.version_minor < 12 else 'increment_' # compatibility hack
if what == 'increments':
choice = [c if isinstance(c,str) and c.startswith(inc) else
f'{inc}{c}' for c in choice]
if datasets == -1: choice = [self.increments[-1]]
choice = [c if isinstance(c,str) and c.startswith('increment_') else
self.increments[c] if isinstance(c,int) and c<0 else
f'increment_{c}' for c in choice]
elif what == 'times':
what = 'increments'
if choice == ['*']:
@ -279,13 +269,11 @@ class Result:
Increment number of all increments within the given bounds.
"""
# compatibility hack
ln = 3 if self.version_minor < 12 else 10
selected = []
for i,inc in enumerate([int(i[ln:]) for i in self.increments]):
s,e = map(lambda x: int(x[ln:] if isinstance(x,str) and x.startswith('inc') else x), (start,end))
for i,inc in enumerate([int(i[10:]) for i in self.increments]):
s,e = map(lambda x: int(x[10:] if isinstance(x,str) and x.startswith('inc') else x), (start,end))
if s <= inc <= e:
selected.append(int(self.increments[i].split('_')[1]))
selected.append(self.increments[i])
return selected
@ -412,8 +400,7 @@ class Result:
Rename/move datasets (within the same group/folder).
This operation is discouraged because the history of the
data becomes untracable and scientific integrity cannot be
ensured.
data becomes untraceable and data integrity is not ensured.
Parameters
----------
@ -454,8 +441,7 @@ class Result:
Remove/delete datasets.
This operation is discouraged because the history of the
data becomes untracable and scientific integrity cannot be
ensured.
data becomes untraceable and data integrity is not ensured.
Parameters
----------
@ -486,9 +472,6 @@ class Result:
def list_data(self):
"""Return information on all active datasets in the file."""
# compatibility hack
de = 'Description' if self.version_minor < 12 else 'description'
un = 'Unit' if self.version_minor < 12 else 'unit'
msg = ''
with h5py.File(self.fname,'r') as f:
for inc in self.visible['increments']:
@ -501,10 +484,10 @@ class Result:
msg = ' '.join([msg,f'{field}\n'])
for d in f['/'.join([inc,ty,label,field])].keys():
dataset = f['/'.join([inc,ty,label,field,d])]
unit = f' / {dataset.attrs[un]}' if h5py3 else \
f' / {dataset.attrs[un].decode()}'
description = dataset.attrs[de] if h5py3 else \
dataset.attrs[de].decode()
unit = f' / {dataset.attrs["unit"]}' if h5py3 else \
f' / {dataset.attrs["unit"].decode()}'
description = dataset.attrs['description'] if h5py3 else \
dataset.attrs['description'].decode()
msg = ' '.join([msg,f'{d}{unit}: {description}\n'])
return msg
@ -522,7 +505,7 @@ class Result:
return grid_filters.coordinates0_point(self.cells,self.size,self.origin).reshape(-1,3,order='F')
else:
with h5py.File(self.fname,'r') as f:
return f['geometry/x_c'][()]
return f['geometry/x_p'][()]
@property
def coordinates0_node(self):
@ -611,7 +594,7 @@ class Result:
>>> r = damask.Result('my_file.hdf5')
>>> r.add_calculation('np.sum(#rho_mob#,axis=1)','rho_mob_total',
... '1/m²','total mobile dislocation density')
>>> r.add_calculation(''np.sum(#rho_dip#,axis=1)',rho_dip_total',
>>> r.add_calculation('np.sum(#rho_dip#,axis=1)','rho_dip_total',
... '1/m²','total dislocation dipole density')
>>> r.add_calculation('#rho_dip_total#+#rho_mob_total','rho_total',
... '1/m²','total dislocation density')
@ -968,6 +951,13 @@ class Result:
F : str, optional
Name of deformation gradient dataset. Defaults to 'F'.
Notes
-----
The definition of the second Piola-Kirchhoff stress (S = [F^-1 P]_sym)
follows the standard definition in nonlinear continuum mechanics.
As such, no intermediate configuration, for instance that reached by F_p,
is taken into account.
"""
self._add_generic_pointwise(self._add_stress_second_Piola_Kirchhoff,{'P':P,'F':F})
@ -1262,7 +1252,6 @@ class Result:
Defaults to '*', in which case all datasets are considered.
"""
u = 'Unit' if self.version_minor < 12 else 'unit' # compatibility hack
if self.N_constituents != 1 or len(self.phases) != 1 or not self.structured:
raise TypeError('XDMF output requires structured grid with single phase and single constituent.')
@ -1343,7 +1332,8 @@ class Result:
shape = f[name].shape[1:]
dtype = f[name].dtype
unit = f[name].attrs[u] if h5py3 else f[name].attrs[u].decode()
unit = f[name].attrs['unit'] if h5py3 else \
f[name].attrs['unit'].decode()
attributes.append(ET.SubElement(grid, 'Attribute'))
attributes[-1].attrib = {'Name': '/'.join([ty,field,out])+f' / {unit}',
@ -1362,23 +1352,20 @@ class Result:
def _mappings(self):
grp = 'mapping' if self.version_minor < 12 else 'cell_to' # compatibility hack
name = 'Name' if self.version_minor < 12 else 'label' # compatibility hack
member = 'member' if self.version_minor < 12 else 'entry' # compatibility hack
"""Mappings to place data spatially."""
with h5py.File(self.fname,'r') as f:
at_cell_ph = []
in_data_ph = []
for c in range(self.N_constituents):
at_cell_ph.append({label: np.where(f['/'.join([grp,'phase'])][:,c][name] == label.encode())[0] \
at_cell_ph.append({label: np.where(self.phase[:,c] == label)[0] \
for label in self.visible['phases']})
in_data_ph.append({label: f['/'.join([grp,'phase'])][member][at_cell_ph[c][label]][:,c] \
in_data_ph.append({label: f['/'.join(['cell_to','phase'])]['entry'][at_cell_ph[c][label]][:,c] \
for label in self.visible['phases']})
at_cell_ho = {label: np.where(f['/'.join([grp,'homogenization'])][:][name] == label.encode())[0] \
at_cell_ho = {label: np.where(self.homogenization[:] == label)[0] \
for label in self.visible['homogenizations']}
in_data_ho = {label: f['/'.join([grp,'homogenization'])][member][at_cell_ho[label]] \
in_data_ho = {label: f['/'.join(['cell_to','homogenization'])]['entry'][at_cell_ho[label]] \
for label in self.visible['homogenizations']}
return at_cell_ph,in_data_ph,at_cell_ho,in_data_ho
@ -1420,9 +1407,9 @@ class Result:
v = self.geometry0
elif mode.lower()=='point':
v = VTK.from_poly_data(self.coordinates0_point)
v.set_comments(util.execution_stamp('Result','save_VTK'))
ln = 3 if self.version_minor < 12 else 10 # compatibility hack
N_digits = int(np.floor(np.log10(max(1,int(self.increments[-1][ln:])))))+1
N_digits = int(np.floor(np.log10(max(1,int(self.increments[-1][10:])))))+1
constituents_ = constituents if isinstance(constituents,Iterable) else \
(range(self.N_constituents) if constituents is None else [constituents])
@ -1433,6 +1420,10 @@ class Result:
at_cell_ph,in_data_ph,at_cell_ho,in_data_ho = self._mappings()
with h5py.File(self.fname,'r') as f:
if self.version_minor >= 13:
creator = f.attrs['creator'] if h5py3 else f.attrs['creator'].decode()
created = f.attrs['created'] if h5py3 else f.attrs['created'].decode()
v.add_comments(f'{creator} ({created})')
for inc in util.show_progress(self.visible['increments']):
@ -1466,7 +1457,7 @@ class Result:
for label,dataset in outs.items():
v.add(dataset,' / '.join(['/'.join([ty,field,label]),dataset.dtype.metadata['unit']]))
v.save(f'{self.fname.stem}_inc{inc[ln:].zfill(N_digits)}',parallel=parallel)
v.save(f'{self.fname.stem}_inc{inc[10:].zfill(N_digits)}',parallel=parallel)
def get(self,output='*',flatten=True,prune=True):

View File

@ -265,10 +265,18 @@ class VTK:
raise ValueError('No label defined for numpy.ndarray')
N_data = data.shape[0]
data_ = np.where(data.mask,data.fill_value,data) if isinstance(data,np.ma.MaskedArray) else\
data
d = np_to_vtk((data_.astype(np.single) if data_.dtype in [np.double, np.longdouble] else
data_).reshape(N_data,-1),deep=True) # avoid large files
data_ = (data if not isinstance(data,np.ma.MaskedArray) else
np.where(data.mask,data.fill_value,data)).reshape(N_data,-1)
if data_.dtype in [np.double,np.longdouble]:
d = np_to_vtk(data_.astype(np.single),deep=True) # avoid large files
elif data_.dtype.type is np.str_:
d = vtk.vtkStringArray()
for s in np.squeeze(data_):
d.InsertNextValue(s)
else:
d = np_to_vtk(data_,deep=True)
d.SetName(label)
if N_data == N_points:
@ -305,13 +313,23 @@ class VTK:
cell_data = self.vtk_data.GetCellData()
for a in range(cell_data.GetNumberOfArrays()):
if cell_data.GetArrayName(a) == label:
try:
return vtk_to_np(cell_data.GetArray(a))
except AttributeError:
vtk_array = cell_data.GetAbstractArray(a) # string array
point_data = self.vtk_data.GetPointData()
for a in range(point_data.GetNumberOfArrays()):
if point_data.GetArrayName(a) == label:
try:
return vtk_to_np(point_data.GetArray(a))
except AttributeError:
vtk_array = point_data.GetAbstractArray(a) # string array
try:
# string array
return np.array([vtk_array.GetValue(i) for i in range(vtk_array.GetNumberOfValues())]).astype(str)
except UnboundLocalError:
raise ValueError(f'Array "{label}" not found.')

Binary file not shown.

View File

@ -1 +1 @@
3b83384def67552ab7dd211efc0d54fd
0f68c932b85aac1d30e03e05a16c4605

View File

@ -1 +1 @@
c32c86ed50dbb39a93ca2a2ebe47d9cb
b206ef9e7a096586c7d71d58fc7278bd

View File

@ -1 +1 @@
ead4f6fcaff174fddc041d701e54ac60
11bd422f0a6c78ee1d3c939b1fccf1ee

View File

@ -1 +1 @@
bde8b728110c2c05a6a4740f7c5f9c06
541f423cfde8e2a98582491f7af3add5

View File

@ -1 +1 @@
e09bfa9248283fc390003ad28d15d36e
82e309984cab644fd94f433d5ec24133

View File

@ -1 +1 @@
3f21254164f96de8ee4a28249ae72cc6
f1f85bcdba23e3e4001512c1c6c4707a

View File

@ -333,7 +333,7 @@ class TestResult:
@pytest.mark.parametrize('output',['F','*',['P']],ids=range(3))
@pytest.mark.parametrize('fname',['12grains6x7x8_tensionY.hdf5'],ids=range(1))
@pytest.mark.parametrize('inc',[4,0],ids=range(2))
def test_vtk(self,request,tmp_path,ref_path,update,output,fname,inc):
def test_vtk(self,request,tmp_path,ref_path,update,patch_execution_stamp,patch_datetime_now,output,fname,inc):
result = Result(ref_path/fname).view('increments',inc)
os.chdir(tmp_path)
result.save_VTK(output)
@ -354,6 +354,19 @@ class TestResult:
with open((ref_path/'save_VTK'/request.node.name).with_suffix('.md5')) as f:
assert cur == f.read()
@pytest.mark.parametrize('mode',['point','cell'])
@pytest.mark.parametrize('output',[False,True])
def test_vtk_marc(self,tmp_path,ref_path,mode,output):
os.chdir(tmp_path)
result = Result(ref_path/'check_compile_job1.hdf5')
result.save_VTK(output,mode)
def test_marc_coordinates(self,ref_path):
result = Result(ref_path/'check_compile_job1.hdf5').view('increments',-1)
c_n = result.coordinates0_node + result.get('u_n')
c_p = result.coordinates0_point + result.get('u_p')
assert len(c_n) > len(c_p)
@pytest.mark.parametrize('mode',['point','cell'])
def test_vtk_mode(self,tmp_path,single_phase,mode):
os.chdir(tmp_path)

View File

@ -135,6 +135,18 @@ class TestVTK:
with pytest.raises(TypeError):
default.add('invalid_type','valid')
@pytest.mark.parametrize('data_type,shape',[(float,(3,)),
(float,(3,3)),
(float,(1,)),
(int,(4,)),
(str,(1,))])
@pytest.mark.parametrize('N_values',[5*6*7,6*7*8])
def test_add_get(self,default,data_type,shape,N_values):
data = np.squeeze(np.random.randint(0,100,(N_values,)+shape)).astype(data_type)
default.add(data,'data')
assert (np.squeeze(data.reshape(N_values,-1)) == default.get('data')).all()
def test_add_masked(self,default):
data = np.random.rand(5*6*7,3)
masked = ma.MaskedArray(data,mask=data<.4,fill_value=42.)

View File

@ -57,7 +57,7 @@ subroutine results_init(restart)
logical, intent(in) :: restart
character(len=pStringLen) :: commandLine
character(len=pPathLen) :: commandLine
print'(/,a)', ' <<<+- results init -+>>>'; flush(IO_STDOUT)
@ -67,8 +67,10 @@ subroutine results_init(restart)
if(.not. restart) then
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','w')
call results_addAttribute('DADF5_version_major',0)
call results_addAttribute('DADF5_version_minor',12)
call results_addAttribute('DAMASK_version',DAMASKVERSION)
call results_addAttribute('DADF5_version_minor',13)
call get_command_argument(0,commandLine)
call results_addAttribute('creator',trim(commandLine)//' '//DAMASKVERSION)
call results_addAttribute('created',now())
call get_command(commandLine)
call results_addAttribute('call',trim(commandLine))
call results_closeGroup(results_addGroup('cell_to'))