out of place behavior
This commit is contained in:
parent
fc409fcf08
commit
62c85db745
|
@ -2,6 +2,7 @@ import multiprocessing as mp
|
||||||
import re
|
import re
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import os
|
import os
|
||||||
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
import xml.dom.minidom
|
import xml.dom.minidom
|
||||||
|
@ -27,11 +28,13 @@ h5py3 = h5py.__version__[0] == '3'
|
||||||
|
|
||||||
|
|
||||||
def _read(dataset):
|
def _read(dataset):
|
||||||
|
"""Read a dataset and its metadata into a numpy.ndarray."""
|
||||||
metadata = {k:(v if h5py3 else v.decode()) for k,v in dataset.attrs.items()}
|
metadata = {k:(v if h5py3 else v.decode()) for k,v in dataset.attrs.items()}
|
||||||
dtype = np.dtype(dataset.dtype,metadata=metadata)
|
dtype = np.dtype(dataset.dtype,metadata=metadata)
|
||||||
return np.array(dataset,dtype=dtype)
|
return np.array(dataset,dtype=dtype)
|
||||||
|
|
||||||
def _match(requested,existing):
|
def _match(requested,existing):
|
||||||
|
"""Find matches among two sets of labels"""
|
||||||
def flatten_list(list_of_lists):
|
def flatten_list(list_of_lists):
|
||||||
return [e for e_ in list_of_lists for e in e_]
|
return [e for e_ in list_of_lists for e in e_]
|
||||||
|
|
||||||
|
@ -47,6 +50,7 @@ def _match(requested,existing):
|
||||||
key=util.natural_sort)
|
key=util.natural_sort)
|
||||||
|
|
||||||
def _empty(dataset,N_materialpoints,fill_float,fill_int):
|
def _empty(dataset,N_materialpoints,fill_float,fill_int):
|
||||||
|
"""Create empty numpy.ma.MaskedArray."""
|
||||||
return ma.array(np.empty((N_materialpoints,)+dataset.shape[1:],dataset.dtype),
|
return ma.array(np.empty((N_materialpoints,)+dataset.shape[1:],dataset.dtype),
|
||||||
fill_value = fill_float if dataset.dtype in np.sctypes['float'] else fill_int,
|
fill_value = fill_float if dataset.dtype in np.sctypes['float'] else fill_int,
|
||||||
mask = True)
|
mask = True)
|
||||||
|
@ -123,17 +127,21 @@ class Result:
|
||||||
self._allow_modification = False
|
self._allow_modification = False
|
||||||
|
|
||||||
|
|
||||||
|
def __copy__(self):
|
||||||
|
"""Create deep copy."""
|
||||||
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Show summary of file content."""
|
"""Show summary of file content."""
|
||||||
visible_increments = self.visible['increments']
|
visible_increments = self.visible['increments']
|
||||||
|
|
||||||
self.view('increments',visible_increments[0:1])
|
first = self.view('increments',visible_increments[0:1]).list_data()
|
||||||
first = self.list_data()
|
|
||||||
|
|
||||||
self.view('increments',visible_increments[-1:])
|
last = '' if len(visible_increments) < 2 else \
|
||||||
last = '' if len(visible_increments) < 2 else self.list_data()
|
self.view('increments',visible_increments[-1:]).list_data()
|
||||||
|
|
||||||
self.view('increments',visible_increments)
|
|
||||||
|
|
||||||
in_between = '' if len(visible_increments) < 3 else \
|
in_between = '' if len(visible_increments) < 3 else \
|
||||||
''.join([f'\n{inc}\n ...\n' for inc in visible_increments[1:-1]])
|
''.join([f'\n{inc}\n ...\n' for inc in visible_increments[1:-1]])
|
||||||
|
@ -186,24 +194,31 @@ class Result:
|
||||||
valid = _match(choice,getattr(self,what))
|
valid = _match(choice,getattr(self,what))
|
||||||
existing = set(self.visible[what])
|
existing = set(self.visible[what])
|
||||||
|
|
||||||
|
dup = self.copy()
|
||||||
if action == 'set':
|
if action == 'set':
|
||||||
self.visible[what] = sorted(set(valid), key=util.natural_sort)
|
dup.visible[what] = sorted(set(valid), key=util.natural_sort)
|
||||||
elif action == 'add':
|
elif action == 'add':
|
||||||
add = existing.union(valid)
|
add = existing.union(valid)
|
||||||
self.visible[what] = sorted(add, key=util.natural_sort)
|
dup.visible[what] = sorted(add, key=util.natural_sort)
|
||||||
elif action == 'del':
|
elif action == 'del':
|
||||||
diff = existing.difference(valid)
|
diff = existing.difference(valid)
|
||||||
self.visible[what] = sorted(diff, key=util.natural_sort)
|
dup.visible[what] = sorted(diff, key=util.natural_sort)
|
||||||
|
|
||||||
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def allow_modification(self):
|
def allow_modification(self):
|
||||||
"""Allow to overwrite existing data."""
|
"""Allow to overwrite existing data."""
|
||||||
print(util.warn('Warning: Modification of existing datasets allowed!'))
|
print(util.warn('Warning: Modification of existing datasets allowed!'))
|
||||||
self._allow_modification = True
|
dup = self.copy()
|
||||||
|
dup._allow_modification = True
|
||||||
|
return dup
|
||||||
|
|
||||||
def disallow_modification(self):
|
def disallow_modification(self):
|
||||||
"""Disallow to overwrite existing data (default case)."""
|
"""Disallow to overwrite existing data (default case)."""
|
||||||
self._allow_modification = False
|
dup = self.copy()
|
||||||
|
dup._allow_modification = False
|
||||||
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def increments_in_range(self,start,end):
|
def increments_in_range(self,start,end):
|
||||||
|
@ -247,28 +262,6 @@ class Result:
|
||||||
return selected
|
return selected
|
||||||
|
|
||||||
|
|
||||||
def iterate(self,what):
|
|
||||||
"""
|
|
||||||
Iterate over visible items and view them independently.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
what : str
|
|
||||||
Attribute to change (must be from self.visible).
|
|
||||||
|
|
||||||
"""
|
|
||||||
datasets = self.visible[what]
|
|
||||||
last_view = datasets.copy()
|
|
||||||
for dataset in datasets:
|
|
||||||
if last_view != self.visible[what]:
|
|
||||||
self._manage_view('set',what,datasets)
|
|
||||||
raise Exception
|
|
||||||
self._manage_view('set',what,dataset)
|
|
||||||
last_view = self.visible[what]
|
|
||||||
yield dataset
|
|
||||||
self._manage_view('set',what,datasets)
|
|
||||||
|
|
||||||
|
|
||||||
def view(self,what,datasets):
|
def view(self,what,datasets):
|
||||||
"""
|
"""
|
||||||
Set view.
|
Set view.
|
||||||
|
@ -279,10 +272,10 @@ class Result:
|
||||||
Attribute to change (must be from self.visible).
|
Attribute to change (must be from self.visible).
|
||||||
datasets : list of str or bool
|
datasets : list of str or bool
|
||||||
Name of datasets as list; supports ? and * wildcards.
|
Name of datasets as list; supports ? and * wildcards.
|
||||||
True is equivalent to [*], False is equivalent to [].
|
True is equivalent to *, False is equivalent to [].
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._manage_view('set',what,datasets)
|
return self._manage_view('set',what,datasets)
|
||||||
|
|
||||||
|
|
||||||
def view_more(self,what,datasets):
|
def view_more(self,what,datasets):
|
||||||
|
@ -295,10 +288,10 @@ class Result:
|
||||||
Attribute to change (must be from self.visible).
|
Attribute to change (must be from self.visible).
|
||||||
datasets : list of str or bool
|
datasets : list of str or bool
|
||||||
Name of datasets as list; supports ? and * wildcards.
|
Name of datasets as list; supports ? and * wildcards.
|
||||||
True is equivalent to [*], False is equivalent to [].
|
True is equivalent to *, False is equivalent to [].
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._manage_view('add',what,datasets)
|
return self._manage_view('add',what,datasets)
|
||||||
|
|
||||||
|
|
||||||
def view_less(self,what,datasets):
|
def view_less(self,what,datasets):
|
||||||
|
@ -311,10 +304,10 @@ class Result:
|
||||||
Attribute to change (must be from self.visible).
|
Attribute to change (must be from self.visible).
|
||||||
datasets : list of str or bool
|
datasets : list of str or bool
|
||||||
Name of datasets as list; supports ? and * wildcards.
|
Name of datasets as list; supports ? and * wildcards.
|
||||||
True is equivalent to [*], False is equivalent to [].
|
True is equivalent to *, False is equivalent to [].
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._manage_view('del',what,datasets)
|
return self._manage_view('del',what,datasets)
|
||||||
|
|
||||||
|
|
||||||
def rename(self,name_old,name_new):
|
def rename(self,name_old,name_new):
|
||||||
|
@ -334,11 +327,11 @@ class Result:
|
||||||
|
|
||||||
with h5py.File(self.fname,'a') as f:
|
with h5py.File(self.fname,'a') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self.visible['increments']:
|
||||||
for ty in ['phases','homogenizations']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty]:
|
for label in self.visible[ty+'s']:
|
||||||
for field in self.visible['fields']:
|
for field in self.visible['fields']:
|
||||||
path_old = '/'.join([inc,ty[:-1],label,field,name_old])
|
path_old = '/'.join([inc,ty,label,field,name_old])
|
||||||
path_new = '/'.join([inc,ty[:-1],label,field,name_new])
|
path_new = '/'.join([inc,ty,label,field,name_new])
|
||||||
if path_old in f.keys():
|
if path_old in f.keys():
|
||||||
f[path_new] = f[path_old]
|
f[path_new] = f[path_old]
|
||||||
f[path_new].attrs['renamed'] = f'original name: {name_old}' if h5py3 else \
|
f[path_new].attrs['renamed'] = f'original name: {name_old}' if h5py3 else \
|
||||||
|
@ -351,48 +344,25 @@ class Result:
|
||||||
# compatibility hack
|
# compatibility hack
|
||||||
de = 'Description' if self.version_minor < 12 else 'description'
|
de = 'Description' if self.version_minor < 12 else 'description'
|
||||||
un = 'Unit' if self.version_minor < 12 else 'unit'
|
un = 'Unit' if self.version_minor < 12 else 'unit'
|
||||||
message = ''
|
msg = ''
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self.visible['increments']:
|
||||||
''.join([message,f'\n{inc} ({self.times[self.increments.index(inc)]}s)\n'])
|
msg = ''.join([msg,f'\n{inc} ({self.times[self.increments.index(inc)]}s)\n'])
|
||||||
for ty in ['phases','homogenizations']:
|
for ty in ['phase','homogenization']:
|
||||||
' '.join([message,f'{ty[:-1]}\n'])
|
msg = ' '.join([msg,f'{ty}\n'])
|
||||||
for label in self.visible[ty]:
|
for label in self.visible[ty+'s']:
|
||||||
' '.join([message,f'{label}\n'])
|
msg = ' '.join([msg,f'{label}\n'])
|
||||||
for field in self.visible['fields']:
|
for field in self.visible['fields']:
|
||||||
' '.join([message,f'{field}\n'])
|
msg = ' '.join([msg,f'{field}\n'])
|
||||||
for d in f['/'.join([inc,ty[:-1],label,field])].keys():
|
for d in f['/'.join([inc,ty,label,field])].keys():
|
||||||
dataset = f['/'.join([inc,ty[:-1],label,field,d])]
|
dataset = f['/'.join([inc,ty,label,field,d])]
|
||||||
unit = f' / {dataset.attrs[un]}' if h5py3 else \
|
unit = f' / {dataset.attrs[un]}' if h5py3 else \
|
||||||
f' / {dataset.attrs[un].decode()}'
|
f' / {dataset.attrs[un].decode()}'
|
||||||
description = dataset.attrs[de] if h5py3 else \
|
description = dataset.attrs[de] if h5py3 else \
|
||||||
dataset.attrs[de].decode()
|
dataset.attrs[de].decode()
|
||||||
' '.join([message,f'{d}{unit}: {description}\n'])
|
msg = ' '.join([msg,f'{d}{unit}: {description}\n'])
|
||||||
|
|
||||||
return message
|
return msg
|
||||||
|
|
||||||
|
|
||||||
def get_dataset_location(self,label):
|
|
||||||
"""Return the location of all active datasets with given label."""
|
|
||||||
path = []
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
|
||||||
for i in self.visible['increments']:
|
|
||||||
k = '/'.join([i,'geometry',label])
|
|
||||||
try:
|
|
||||||
f[k]
|
|
||||||
path.append(k)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
for o,p in zip(['phases','homogenizations'],['fields','fields']):
|
|
||||||
for oo in self.visible[o]:
|
|
||||||
for pp in self.visible[p]:
|
|
||||||
k = '/'.join([i,o[:-1],oo,pp,label])
|
|
||||||
try:
|
|
||||||
f[k]
|
|
||||||
path.append(k)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def enable_user_function(self,func):
|
def enable_user_function(self,func):
|
||||||
|
@ -400,60 +370,6 @@ class Result:
|
||||||
print(f'Function {func.__name__} enabled in add_calculation.')
|
print(f'Function {func.__name__} enabled in add_calculation.')
|
||||||
|
|
||||||
|
|
||||||
def read_dataset(self,path,c=0,plain=False):
|
|
||||||
"""
|
|
||||||
Dataset for all points/cells.
|
|
||||||
|
|
||||||
If more than one path is given, the dataset is composed of the individual contributions.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
path : list of strings
|
|
||||||
The name of the datasets to consider.
|
|
||||||
c : int, optional
|
|
||||||
The constituent to consider. Defaults to 0.
|
|
||||||
plain: boolean, optional
|
|
||||||
Convert into plain numpy datatype.
|
|
||||||
Only relevant for compound datatype, e.g. the orientation.
|
|
||||||
Defaults to False.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# compatibility hack
|
|
||||||
name = 'Name' if self.version_minor < 12 else 'label'
|
|
||||||
member = 'Position' if self.version_minor < 12 else 'entry'
|
|
||||||
grp = 'mapping' if self.version_minor < 12 else 'cell_to'
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
|
||||||
shape = (self.N_materialpoints,) + np.shape(f[path[0]])[1:]
|
|
||||||
if len(shape) == 1: shape = shape +(1,)
|
|
||||||
dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]]))
|
|
||||||
for pa in path:
|
|
||||||
label = pa.split('/')[2]
|
|
||||||
|
|
||||||
if pa.split('/')[1] == 'geometry':
|
|
||||||
dataset = np.array(f[pa])
|
|
||||||
continue
|
|
||||||
|
|
||||||
p = np.where(f[f'{grp}/phase'][:,c][name] == str.encode(label))[0]
|
|
||||||
if len(p)>0:
|
|
||||||
u = (f[f'{grp}/phase'][member][p,c])
|
|
||||||
a = np.array(f[pa])
|
|
||||||
if len(a.shape) == 1:
|
|
||||||
a=a.reshape([a.shape[0],1])
|
|
||||||
dataset[p,:] = a[u,:]
|
|
||||||
|
|
||||||
p = np.where(f[f'{grp}/homogenization'][name] == str.encode(label))[0]
|
|
||||||
if len(p)>0:
|
|
||||||
u = (f[f'{grp}/homogenization'][member][p.tolist()])
|
|
||||||
a = np.array(f[pa])
|
|
||||||
if len(a.shape) == 1:
|
|
||||||
a=a.reshape([a.shape[0],1])
|
|
||||||
dataset[p,:] = a[u,:]
|
|
||||||
|
|
||||||
if plain and dataset.dtype.names is not None:
|
|
||||||
return dataset.view(('float64',len(dataset.dtype.names)))
|
|
||||||
else:
|
|
||||||
return dataset
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def coordinates0_point(self):
|
def coordinates0_point(self):
|
||||||
"""Return initial coordinates of the cell centers."""
|
"""Return initial coordinates of the cell centers."""
|
||||||
|
@ -1044,10 +960,10 @@ class Result:
|
||||||
groups = []
|
groups = []
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for inc in self.visible['increments']:
|
for inc in self.visible['increments']:
|
||||||
for ty in ['phases','homogenizations']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty]:
|
for label in self.visible[ty+'s']:
|
||||||
for field in self.visible['fields']:
|
for field in self.visible['fields']:
|
||||||
group = '/'.join([inc,ty[:-1],label,field])
|
group = '/'.join([inc,ty,label,field])
|
||||||
if set(datasets.values()).issubset(f[group].keys()): groups.append(group)
|
if set(datasets.values()).issubset(f[group].keys()): groups.append(group)
|
||||||
|
|
||||||
if len(groups) == 0:
|
if len(groups) == 0:
|
||||||
|
@ -1176,11 +1092,11 @@ class Result:
|
||||||
'Dimensions': '{} {} {} 3'.format(*(self.cells+1))}
|
'Dimensions': '{} {} {} 3'.format(*(self.cells+1))}
|
||||||
data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
|
data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
|
||||||
|
|
||||||
for ty in ['phases','homogenizations']:
|
for ty in ['phase','homogenization']:
|
||||||
for label in self.visible[ty]:
|
for label in self.visible[ty+'s']:
|
||||||
for field in self.visible['fields']:
|
for field in self.visible['fields']:
|
||||||
for out in _match(output,f['/'.join((inc,ty[:-1],label,field))].keys()):
|
for out in _match(output,f['/'.join((inc,ty,label,field))].keys()):
|
||||||
name = '/'.join([inc,ty[:-1],label,field,out])
|
name = '/'.join([inc,ty,label,field,out])
|
||||||
shape = f[name].shape[1:]
|
shape = f[name].shape[1:]
|
||||||
dtype = f[name].dtype
|
dtype = f[name].dtype
|
||||||
|
|
||||||
|
|
|
@ -24,8 +24,7 @@ def default(tmp_path,ref_path):
|
||||||
fname = '12grains6x7x8_tensionY.hdf5'
|
fname = '12grains6x7x8_tensionY.hdf5'
|
||||||
shutil.copy(ref_path/fname,tmp_path)
|
shutil.copy(ref_path/fname,tmp_path)
|
||||||
f = Result(tmp_path/fname)
|
f = Result(tmp_path/fname)
|
||||||
f.view('times',20.0)
|
return f.view('times',20.0)
|
||||||
return f
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def single_phase(tmp_path,ref_path):
|
def single_phase(tmp_path,ref_path):
|
||||||
|
@ -58,49 +57,37 @@ class TestResult:
|
||||||
|
|
||||||
|
|
||||||
def test_view_all(self,default):
|
def test_view_all(self,default):
|
||||||
default.view('increments',True)
|
a = default.view('increments',True).read('F')
|
||||||
a = default.read('F')
|
|
||||||
|
|
||||||
default.view('increments','*')
|
assert dict_equal(a,default.view('increments','*').read('F'))
|
||||||
assert dict_equal(a,default.read('F'))
|
assert dict_equal(a,default.view('increments',default.increments_in_range(0,np.iinfo(int).max)).read('F'))
|
||||||
default.view('increments',default.increments_in_range(0,np.iinfo(int).max))
|
|
||||||
assert dict_equal(a,default.read('F'))
|
|
||||||
|
|
||||||
default.view('times',True)
|
assert dict_equal(a,default.view('times',True).read('F'))
|
||||||
assert dict_equal(a,default.read('F'))
|
assert dict_equal(a,default.view('times','*').read('F'))
|
||||||
default.view('times','*')
|
assert dict_equal(a,default.view('times',default.times_in_range(0.0,np.inf)).read('F'))
|
||||||
assert dict_equal(a,default.read('F'))
|
|
||||||
default.view('times',default.times_in_range(0.0,np.inf))
|
|
||||||
assert dict_equal(a,default.read('F'))
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
|
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
|
||||||
def test_view_none(self,default,what):
|
def test_view_none(self,default,what):
|
||||||
default.view(what,False)
|
a = default.view(what,False).read('F')
|
||||||
a = default.read('F')
|
b = default.view(what,[]).read('F')
|
||||||
default.view(what,[])
|
|
||||||
b = default.read('F')
|
|
||||||
|
|
||||||
assert a == b == {}
|
assert a == b == {}
|
||||||
|
|
||||||
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
|
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
|
||||||
def test_view_more(self,default,what):
|
def test_view_more(self,default,what):
|
||||||
default.view(what,False)
|
empty = default.view(what,False)
|
||||||
default.view_more(what,'*')
|
|
||||||
a = default.read('F')
|
|
||||||
|
|
||||||
default.view(what,True)
|
a = empty.view_more(what,'*').read('F')
|
||||||
b = default.read('F')
|
b = empty.view_more(what,True).read('F')
|
||||||
|
|
||||||
assert dict_equal(a,b)
|
assert dict_equal(a,b)
|
||||||
|
|
||||||
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
|
@pytest.mark.parametrize('what',['increments','times','phases']) # ToDo: discuss homogenizations
|
||||||
def test_view_less(self,default,what):
|
def test_view_less(self,default,what):
|
||||||
default.view(what,True)
|
full = default.view(what,True)
|
||||||
default.view_less(what,'*')
|
|
||||||
a = default.read('F')
|
|
||||||
|
|
||||||
default.view(what,False)
|
a = full.view_less(what,'*').read('F')
|
||||||
b = default.read('F')
|
b = full.view_less(what,True).read('F')
|
||||||
|
|
||||||
assert a == b == {}
|
assert a == b == {}
|
||||||
|
|
||||||
|
@ -279,41 +266,41 @@ class TestResult:
|
||||||
|
|
||||||
@pytest.mark.parametrize('overwrite',['off','on'])
|
@pytest.mark.parametrize('overwrite',['off','on'])
|
||||||
def test_add_overwrite(self,default,overwrite):
|
def test_add_overwrite(self,default,overwrite):
|
||||||
default.view('times',default.times_in_range(0,np.inf)[-1])
|
last = default.view('times',default.times_in_range(0,np.inf)[-1])
|
||||||
|
|
||||||
default.add_stress_Cauchy()
|
last.add_stress_Cauchy()
|
||||||
with h5py.File(default.fname,'r') as f:
|
with h5py.File(last.fname,'r') as f:
|
||||||
created_first = default.place('sigma').dtype.metadata['created']
|
created_first = default.place('sigma').dtype.metadata['created']
|
||||||
|
|
||||||
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
||||||
|
|
||||||
if overwrite == 'on':
|
if overwrite == 'on':
|
||||||
default.allow_modification()
|
last = last.allow_modification()
|
||||||
else:
|
else:
|
||||||
default.disallow_modification()
|
last = last.disallow_modification()
|
||||||
|
|
||||||
time.sleep(2.)
|
time.sleep(2.)
|
||||||
try:
|
try:
|
||||||
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
last.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
with h5py.File(default.fname,'r') as f:
|
with h5py.File(last.fname,'r') as f:
|
||||||
created_second = default.place('sigma').dtype.metadata['created']
|
created_second = last.place('sigma').dtype.metadata['created']
|
||||||
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
||||||
|
|
||||||
if overwrite == 'on':
|
if overwrite == 'on':
|
||||||
assert created_first < created_second and np.allclose(default.place('sigma'),311.)
|
assert created_first < created_second and np.allclose(last.place('sigma'),311.)
|
||||||
else:
|
else:
|
||||||
assert created_first == created_second and not np.allclose(default.place('sigma'),311.)
|
assert created_first == created_second and not np.allclose(last.place('sigma'),311.)
|
||||||
|
|
||||||
@pytest.mark.parametrize('allowed',['off','on'])
|
@pytest.mark.parametrize('allowed',['off','on'])
|
||||||
def test_rename(self,default,allowed):
|
def test_rename(self,default,allowed):
|
||||||
if allowed == 'on':
|
if allowed == 'on':
|
||||||
F = default.place('F')
|
F = default.place('F')
|
||||||
default.allow_modification()
|
default = default.allow_modification()
|
||||||
default.rename('F','new_name')
|
default.rename('F','new_name')
|
||||||
assert np.all(F == default.place('new_name'))
|
assert np.all(F == default.place('new_name'))
|
||||||
default.disallow_modification()
|
default = default.disallow_modification()
|
||||||
|
|
||||||
with pytest.raises(PermissionError):
|
with pytest.raises(PermissionError):
|
||||||
default.rename('P','another_new_name')
|
default.rename('P','another_new_name')
|
||||||
|
@ -333,8 +320,7 @@ class TestResult:
|
||||||
@pytest.mark.parametrize('fname',['12grains6x7x8_tensionY.hdf5'],ids=range(1))
|
@pytest.mark.parametrize('fname',['12grains6x7x8_tensionY.hdf5'],ids=range(1))
|
||||||
@pytest.mark.parametrize('inc',[4,0],ids=range(2))
|
@pytest.mark.parametrize('inc',[4,0],ids=range(2))
|
||||||
def test_vtk(self,request,tmp_path,ref_path,update,output,fname,inc):
|
def test_vtk(self,request,tmp_path,ref_path,update,output,fname,inc):
|
||||||
result = Result(ref_path/fname)
|
result = Result(ref_path/fname).view('increments',inc)
|
||||||
result.view('increments',inc)
|
|
||||||
os.chdir(tmp_path)
|
os.chdir(tmp_path)
|
||||||
result.save_VTK(output)
|
result.save_VTK(output)
|
||||||
fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vtr'
|
fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vtr'
|
||||||
|
@ -387,7 +373,7 @@ class TestResult:
|
||||||
def test_read(self,update,request,ref_path,view,output,compress,strip):
|
def test_read(self,update,request,ref_path,view,output,compress,strip):
|
||||||
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
|
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
|
||||||
for key,value in view.items():
|
for key,value in view.items():
|
||||||
result.view(key,value)
|
result = result.view(key,value)
|
||||||
|
|
||||||
fname = request.node.name
|
fname = request.node.name
|
||||||
cur = result.read(output,compress,strip)
|
cur = result.read(output,compress,strip)
|
||||||
|
@ -412,7 +398,7 @@ class TestResult:
|
||||||
def test_place(self,update,request,ref_path,view,output,compress,strip,constituents):
|
def test_place(self,update,request,ref_path,view,output,compress,strip,constituents):
|
||||||
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
|
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
|
||||||
for key,value in view.items():
|
for key,value in view.items():
|
||||||
result.view(key,value)
|
result = result.view(key,value)
|
||||||
|
|
||||||
fname = request.node.name
|
fname = request.node.name
|
||||||
cur = result.place(output,compress,strip,constituents)
|
cur = result.place(output,compress,strip,constituents)
|
||||||
|
|
Loading…
Reference in New Issue