Merge branch 'keyword-view' into 'development'
using keywords instead of strings to control view Closes #133 See merge request damask/DAMASK!484
This commit is contained in:
commit
2ea2930638
|
@ -197,7 +197,7 @@ class Grid:
|
|||
Grid-based geometry from file.
|
||||
|
||||
"""
|
||||
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning,2)
|
||||
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.0.0', DeprecationWarning,2)
|
||||
try:
|
||||
f = open(fname)
|
||||
except TypeError:
|
||||
|
@ -629,7 +629,7 @@ class Grid:
|
|||
Compress geometry with 'x of y' and 'a to b'.
|
||||
|
||||
"""
|
||||
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning,2)
|
||||
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.0.0', DeprecationWarning,2)
|
||||
header = [f'{len(self.comments)+4} header'] + self.comments \
|
||||
+ ['grid a {} b {} c {}'.format(*self.cells),
|
||||
'size x {} y {} z {}'.format(*self.size),
|
||||
|
|
|
@ -4,6 +4,7 @@ import fnmatch
|
|||
import os
|
||||
import copy
|
||||
import datetime
|
||||
import warnings
|
||||
import xml.etree.ElementTree as ET
|
||||
import xml.dom.minidom
|
||||
from pathlib import Path
|
||||
|
@ -27,6 +28,20 @@ h5py3 = h5py.__version__[0] == '3'
|
|||
|
||||
chunk_size = 1024**2//8 # for compression in HDF5
|
||||
|
||||
def _view_transition(what,datasets,increments,times,phases,homogenizations,fields):
|
||||
if (datasets is not None and what is None) or (what is not None and datasets is None):
|
||||
raise ValueError('"what" and "datasets" need to be used as a pair')
|
||||
if datasets is not None or what is not None:
|
||||
warnings.warn('Arguments "what" and "datasets" will be removed in DAMASK v3.0.0-alpha7', DeprecationWarning,2)
|
||||
return what,datasets
|
||||
if sum(1 for _ in filter(None.__ne__, [increments,times,phases,homogenizations,fields])) > 1:
|
||||
raise ValueError('Only one out of "increments", "times", "phases", "homogenizations", and "fields" can be used')
|
||||
else:
|
||||
if increments is not None: return "increments", increments
|
||||
if times is not None: return "times", times
|
||||
if phases is not None: return "phases", phases
|
||||
if homogenizations is not None: return "homogenizations", homogenizations
|
||||
if fields is not None: return "fields", fields
|
||||
|
||||
def _read(dataset):
|
||||
"""Read a dataset and its metadata into a numpy.ndarray."""
|
||||
|
@ -79,7 +94,7 @@ class Result:
|
|||
>>> r.add_Cauchy()
|
||||
>>> r.add_equivalent_Mises('sigma')
|
||||
>>> r.export_VTK()
|
||||
>>> r_last = r.view('increments',-1)
|
||||
>>> r_last = r.view(increments=-1)
|
||||
>>> sigma_vM_last = r_last.get('sigma_vM')
|
||||
|
||||
"""
|
||||
|
@ -141,7 +156,7 @@ class Result:
|
|||
|
||||
self.fname = Path(fname).absolute()
|
||||
|
||||
self._allow_modification = False
|
||||
self._protected = True
|
||||
|
||||
|
||||
def __copy__(self):
|
||||
|
@ -155,10 +170,10 @@ class Result:
|
|||
"""Show summary of file content."""
|
||||
visible_increments = self.visible['increments']
|
||||
|
||||
first = self.view('increments',visible_increments[0:1]).list_data()
|
||||
first = self.view(increments=visible_increments[0:1]).list_data()
|
||||
|
||||
last = '' if len(visible_increments) < 2 else \
|
||||
self.view('increments',visible_increments[-1:]).list_data()
|
||||
self.view(increments=visible_increments[-1:]).list_data()
|
||||
|
||||
in_between = '' if len(visible_increments) < 3 else \
|
||||
''.join([f'\n{inc}\n ...\n' for inc in visible_increments[1:-1]])
|
||||
|
@ -231,36 +246,6 @@ class Result:
|
|||
return dup
|
||||
|
||||
|
||||
def modification_enable(self):
|
||||
"""
|
||||
Allow modification of existing data.
|
||||
|
||||
Returns
|
||||
-------
|
||||
modified_view : damask.Result
|
||||
View without write-protection of existing data.
|
||||
|
||||
"""
|
||||
print(util.warn('Warning: Modification of existing datasets allowed!'))
|
||||
dup = self.copy()
|
||||
dup._allow_modification = True
|
||||
return dup
|
||||
|
||||
def modification_disable(self):
|
||||
"""
|
||||
Prevent modification of existing data (default case).
|
||||
|
||||
Returns
|
||||
-------
|
||||
modified_view : damask.Result
|
||||
View with write-protection of existing data.
|
||||
|
||||
"""
|
||||
dup = self.copy()
|
||||
dup._allow_modification = False
|
||||
return dup
|
||||
|
||||
|
||||
def increments_in_range(self,start,end):
|
||||
"""
|
||||
Get all increments within a given range.
|
||||
|
@ -285,7 +270,6 @@ class Result:
|
|||
selected.append(self.increments[i])
|
||||
return selected
|
||||
|
||||
|
||||
def times_in_range(self,start,end):
|
||||
"""
|
||||
Get all increments within a given time range.
|
||||
|
@ -310,17 +294,38 @@ class Result:
|
|||
return selected
|
||||
|
||||
|
||||
def view(self,what,datasets):
|
||||
def view(self,what=None,datasets=None,*,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
homogenizations=None,
|
||||
fields=None,
|
||||
protected=None):
|
||||
"""
|
||||
Set view.
|
||||
|
||||
Wildcard matching with '?' and '*' is supported.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
|
||||
Parameters
|
||||
----------
|
||||
what : {'increments', 'times', 'phases', 'homogenizations', 'fields'}
|
||||
Attribute to change.
|
||||
Attribute to change. DEPRECATED.
|
||||
datasets : (list of) int (for increments), (list of) float (for times), (list of) str, or bool
|
||||
Name of datasets; supports '?' and '*' wildcards.
|
||||
Name of datasets; supports '?' and '*' wildcards. DEPRECATED.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
increments: (list of) int, (list of) str, or bool, optional.
|
||||
Number(s) of increments to select.
|
||||
times: (list of) float, (list of) str, or bool, optional.
|
||||
Simulation time(s) of increments to select.
|
||||
phases: (list of) str, or bool, optional.
|
||||
Name(s) of phases to select.
|
||||
homogenizations: (list of) str, or bool, optional.
|
||||
Name(s) of homogenizations to select.
|
||||
fields: (list of) str, or bool, optional.
|
||||
Name(s) of fields to select.
|
||||
protected: bool, optional.
|
||||
Protection status of existing data.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -333,29 +338,61 @@ class Result:
|
|||
|
||||
>>> import damask
|
||||
>>> r = damask.Result('my_file.hdf5')
|
||||
>>> r_first = r.view('increment',0)
|
||||
>>> r_first = r.view(increment=0)
|
||||
|
||||
Get a view that shows all results between simulation times of 10 to 40:
|
||||
|
||||
>>> import damask
|
||||
>>> r = damask.Result('my_file.hdf5')
|
||||
>>> r_t10to40 = r.view('times',r.times_in_range(10.0,40.0))
|
||||
>>> r_t10to40 = r.view(times=r.times_in_range(10.0,40.0))
|
||||
|
||||
"""
|
||||
return self._manage_view('set',what,datasets)
|
||||
v = _view_transition(what,datasets,increments,times,phases,homogenizations,fields)
|
||||
if protected is not None:
|
||||
if v is None:
|
||||
dup = self.copy()
|
||||
else:
|
||||
what_,datasets_ = v
|
||||
dup = self._manage_view('set',what_,datasets_)
|
||||
if not protected:
|
||||
print(util.warn('Warning: Modification of existing datasets allowed!'))
|
||||
dup._protected = protected
|
||||
else:
|
||||
what_,datasets_ = v
|
||||
dup = self._manage_view('set',what_,datasets_)
|
||||
|
||||
return dup
|
||||
|
||||
|
||||
def view_more(self,what,datasets):
|
||||
def view_more(self,what=None,datasets=None,*,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
homogenizations=None,
|
||||
fields=None):
|
||||
"""
|
||||
Add to view.
|
||||
|
||||
Wildcard matching with '?' and '*' is supported.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
|
||||
Parameters
|
||||
----------
|
||||
what : {'increments', 'times', 'phases', 'homogenizations', 'fields'}
|
||||
Attribute to change.
|
||||
Attribute to change. DEPRECATED.
|
||||
datasets : (list of) int (for increments), (list of) float (for times), (list of) str, or bool
|
||||
Name of datasets; supports '?' and '*' wildcards.
|
||||
Name of datasets; supports '?' and '*' wildcards. DEPRECATED.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
increments: (list of) int, (list of) str, or bool, optional.
|
||||
Number(s) of increments to select.
|
||||
times: (list of) float, (list of) str, or bool, optional.
|
||||
Simulation time(s) of increments to select.
|
||||
phases: (list of) str, or bool, optional.
|
||||
Name(s) of phases to select.
|
||||
homogenizations: (list of) str, or bool, optional.
|
||||
Name(s) of homogenizations to select.
|
||||
fields: (list of) str, or bool, optional.
|
||||
Name(s) of fields to select.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -367,25 +404,44 @@ class Result:
|
|||
Get a view that shows only results from first and last increment:
|
||||
|
||||
>>> import damask
|
||||
>>> r_empty = damask.Result('my_file.hdf5').view('increments',False)
|
||||
>>> r_first = r_empty.view_more('increments',0)
|
||||
>>> r_first_and_last = r.first.view_more('increments',-1)
|
||||
>>> r_empty = damask.Result('my_file.hdf5').view(increments=False)
|
||||
>>> r_first = r_empty.view_more(increments=0)
|
||||
>>> r_first_and_last = r.first.view_more(increments=-1)
|
||||
|
||||
"""
|
||||
return self._manage_view('add',what,datasets)
|
||||
what_, datasets_ = _view_transition(what,datasets,increments,times,phases,homogenizations,fields)
|
||||
return self._manage_view('add',what_,datasets_)
|
||||
|
||||
|
||||
def view_less(self,what,datasets):
|
||||
def view_less(self,what=None,datasets=None,*,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
homogenizations=None,
|
||||
fields=None):
|
||||
"""
|
||||
Remove from view.
|
||||
|
||||
Wildcard matching with '?' and '*' is supported.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
|
||||
Parameters
|
||||
----------
|
||||
what : {'increments', 'times', 'phases', 'homogenizations', 'fields'}
|
||||
Attribute to change.
|
||||
Attribute to change. DEPRECATED.
|
||||
datasets : (list of) int (for increments), (list of) float (for times), (list of) str, or bool
|
||||
Name of datasets; supports '?' and '*' wildcards.
|
||||
Name of datasets; supports '?' and '*' wildcards. DEPRECATED.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
increments: (list of) int, (list of) str, or bool, optional.
|
||||
Number(s) of increments to select.
|
||||
times: (list of) float, (list of) str, or bool, optional.
|
||||
Simulation time(s) of increments to select.
|
||||
phases: (list of) str, or bool, optional.
|
||||
Name(s) of phases to select.
|
||||
homogenizations: (list of) str, or bool, optional.
|
||||
Name(s) of homogenizations to select.
|
||||
fields: (list of) str, or bool, optional.
|
||||
Name(s) of fields to select.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -398,10 +454,11 @@ class Result:
|
|||
|
||||
>>> import damask
|
||||
>>> r_all = damask.Result('my_file.hdf5')
|
||||
>>> r_deformed = r_all.view_less('increments',0)
|
||||
>>> r_deformed = r_all.view_less(increments=0)
|
||||
|
||||
"""
|
||||
return self._manage_view('del',what,datasets)
|
||||
what_, datasets_ = _view_transition(what,datasets,increments,times,phases,homogenizations,fields)
|
||||
return self._manage_view('del',what_,datasets_)
|
||||
|
||||
|
||||
def rename(self,name_src,name_dst):
|
||||
|
@ -424,11 +481,11 @@ class Result:
|
|||
|
||||
>>> import damask
|
||||
>>> r = damask.Result('my_file.hdf5')
|
||||
>>> r_unprotected = r.modification_enable()
|
||||
>>> r_unprotected = r.view(protected=False)
|
||||
>>> r_unprotected.rename('F','def_grad')
|
||||
|
||||
"""
|
||||
if not self._allow_modification:
|
||||
if self._protected:
|
||||
raise PermissionError('Renaming datasets not permitted')
|
||||
|
||||
with h5py.File(self.fname,'a') as f:
|
||||
|
@ -463,11 +520,11 @@ class Result:
|
|||
|
||||
>>> import damask
|
||||
>>> r = damask.Result('my_file.hdf5')
|
||||
>>> r_unprotected = r.modification_enable()
|
||||
>>> r_unprotected = r.view(protected=False)
|
||||
>>> r_unprotected.remove('F')
|
||||
|
||||
"""
|
||||
if not self._allow_modification:
|
||||
if self._protected:
|
||||
raise PermissionError('Removing datasets not permitted')
|
||||
|
||||
with h5py.File(self.fname,'a') as f:
|
||||
|
@ -1358,7 +1415,7 @@ class Result:
|
|||
lock.acquire()
|
||||
with h5py.File(self.fname, 'a') as f:
|
||||
try:
|
||||
if self._allow_modification and '/'.join([group,result['label']]) in f:
|
||||
if not self._protected and '/'.join([group,result['label']]) in f:
|
||||
dataset = f['/'.join([group,result['label']])]
|
||||
dataset[...] = result['data']
|
||||
dataset.attrs['overwritten'] = True
|
||||
|
|
|
@ -25,7 +25,7 @@ def default(tmp_path,ref_path):
|
|||
fname = '12grains6x7x8_tensionY.hdf5'
|
||||
shutil.copy(ref_path/fname,tmp_path)
|
||||
f = Result(tmp_path/fname)
|
||||
return f.view('times',20.0)
|
||||
return f.view(times=20.0)
|
||||
|
||||
@pytest.fixture
|
||||
def single_phase(tmp_path,ref_path):
|
||||
|
@ -58,14 +58,14 @@ class TestResult:
|
|||
|
||||
|
||||
def test_view_all(self,default):
|
||||
a = default.view('increments',True).get('F')
|
||||
a = default.view(increments=True).get('F')
|
||||
|
||||
assert dict_equal(a,default.view('increments','*').get('F'))
|
||||
assert dict_equal(a,default.view('increments',default.increments_in_range(0,np.iinfo(int).max)).get('F'))
|
||||
assert dict_equal(a,default.view(increments='*').get('F'))
|
||||
assert dict_equal(a,default.view(increments=default.increments_in_range(0,np.iinfo(int).max)).get('F'))
|
||||
|
||||
assert dict_equal(a,default.view('times',True).get('F'))
|
||||
assert dict_equal(a,default.view('times','*').get('F'))
|
||||
assert dict_equal(a,default.view('times',default.times_in_range(0.0,np.inf)).get('F'))
|
||||
assert dict_equal(a,default.view(times=True).get('F'))
|
||||
assert dict_equal(a,default.view(times='*').get('F'))
|
||||
assert dict_equal(a,default.view(times=default.times_in_range(0.0,np.inf)).get('F'))
|
||||
|
||||
@pytest.mark.parametrize('what',['increments','times','phases','fields']) # ToDo: discuss homogenizations
|
||||
def test_view_none(self,default,what):
|
||||
|
@ -314,7 +314,7 @@ class TestResult:
|
|||
|
||||
@pytest.mark.parametrize('overwrite',['off','on'])
|
||||
def test_add_overwrite(self,default,overwrite):
|
||||
last = default.view('increments',-1)
|
||||
last = default.view(increments=-1)
|
||||
|
||||
last.add_stress_Cauchy()
|
||||
|
||||
|
@ -322,9 +322,9 @@ class TestResult:
|
|||
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
||||
|
||||
if overwrite == 'on':
|
||||
last = last.modification_enable()
|
||||
last = last.view(protected=False)
|
||||
else:
|
||||
last = last.modification_disable()
|
||||
last = last.view(protected=True)
|
||||
|
||||
time.sleep(2.)
|
||||
try:
|
||||
|
@ -344,10 +344,10 @@ class TestResult:
|
|||
def test_rename(self,default,allowed):
|
||||
if allowed == 'on':
|
||||
F = default.place('F')
|
||||
default = default.modification_enable()
|
||||
default = default.view(protected=False)
|
||||
default.rename('F','new_name')
|
||||
assert np.all(F == default.place('new_name'))
|
||||
default = default.modification_disable()
|
||||
default = default.view(protected=True)
|
||||
|
||||
with pytest.raises(PermissionError):
|
||||
default.rename('P','another_new_name')
|
||||
|
@ -355,7 +355,7 @@ class TestResult:
|
|||
@pytest.mark.parametrize('allowed',['off','on'])
|
||||
def test_remove(self,default,allowed):
|
||||
if allowed == 'on':
|
||||
unsafe = default.modification_enable()
|
||||
unsafe = default.view(protected=False)
|
||||
unsafe.remove('F')
|
||||
assert unsafe.get('F') is None
|
||||
else:
|
||||
|
@ -377,7 +377,7 @@ class TestResult:
|
|||
@pytest.mark.parametrize('inc',[4,0],ids=range(2))
|
||||
@pytest.mark.xfail(int(vtk.vtkVersion.GetVTKVersion().split('.')[0])<9, reason='missing "Direction" attribute')
|
||||
def test_vtk(self,request,tmp_path,ref_path,update,patch_execution_stamp,patch_datetime_now,output,fname,inc):
|
||||
result = Result(ref_path/fname).view('increments',inc)
|
||||
result = Result(ref_path/fname).view(increments=inc)
|
||||
os.chdir(tmp_path)
|
||||
result.export_VTK(output,parallel=False)
|
||||
fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vti'
|
||||
|
@ -400,7 +400,7 @@ class TestResult:
|
|||
result.export_VTK(output,mode)
|
||||
|
||||
def test_marc_coordinates(self,ref_path):
|
||||
result = Result(ref_path/'check_compile_job1.hdf5').view('increments',-1)
|
||||
result = Result(ref_path/'check_compile_job1.hdf5').view(increments=-1)
|
||||
c_n = result.coordinates0_node + result.get('u_n')
|
||||
c_p = result.coordinates0_point + result.get('u_p')
|
||||
assert len(c_n) > len(c_p)
|
||||
|
@ -440,7 +440,7 @@ class TestResult:
|
|||
dim_xdmf = reader_xdmf.GetOutput().GetDimensions()
|
||||
bounds_xdmf = reader_xdmf.GetOutput().GetBounds()
|
||||
|
||||
single_phase.view('increments',0).export_VTK(parallel=False)
|
||||
single_phase.view(increments=0).export_VTK(parallel=False)
|
||||
fname = os.path.splitext(os.path.basename(single_phase.fname))[0]+'_inc00.vti'
|
||||
reader_vti = vtk.vtkXMLImageDataReader()
|
||||
reader_vti.SetFileName(fname)
|
||||
|
|
Loading…
Reference in New Issue