encode/decode for h5py3 not needed anymore
attribute values with variable-length strings will be interpreted as str, not bytes
This commit is contained in:
parent
b8c97c5b7e
commit
a416f5ed5b
|
@ -21,6 +21,7 @@ from . import grid_filters
|
||||||
from . import mechanics
|
from . import mechanics
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
|
h5py3 = h5py.__version__[0] == '3'
|
||||||
|
|
||||||
class Result:
|
class Result:
|
||||||
"""
|
"""
|
||||||
|
@ -280,7 +281,8 @@ class Result:
|
||||||
for path_old in self.get_dataset_location(name_old):
|
for path_old in self.get_dataset_location(name_old):
|
||||||
path_new = os.path.join(os.path.dirname(path_old),name_new)
|
path_new = os.path.join(os.path.dirname(path_old),name_new)
|
||||||
f[path_new] = f[path_old]
|
f[path_new] = f[path_old]
|
||||||
f[path_new].attrs['Renamed'] = 'Original name: {}'.encode()
|
f[path_new].attrs['Renamed'] = f'Original name: {name_old}' if h5py3 else \
|
||||||
|
f'Original name: {name_old}'.decode()
|
||||||
del f[path_old]
|
del f[path_old]
|
||||||
else:
|
else:
|
||||||
raise PermissionError('Rename operation not permitted')
|
raise PermissionError('Rename operation not permitted')
|
||||||
|
@ -422,8 +424,13 @@ class Result:
|
||||||
for d in f[group].keys():
|
for d in f[group].keys():
|
||||||
try:
|
try:
|
||||||
dataset = f['/'.join([group,d])]
|
dataset = f['/'.join([group,d])]
|
||||||
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
|
if 'Unit' in dataset.attrs:
|
||||||
description = dataset.attrs['Description'].decode()
|
unit = f" / {dataset.attrs['Unit']}" if h5py3 else \
|
||||||
|
f" / {dataset.attrs['Unit'].decode()}"
|
||||||
|
else:
|
||||||
|
unit = ''
|
||||||
|
description = dataset.attrs['Description'] if h5py3 else \
|
||||||
|
dataset.attrs['Description'].decode()
|
||||||
message += f' {d}{unit}: {description}\n'
|
message += f' {d}{unit}: {description}\n'
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
@ -1035,7 +1042,7 @@ class Result:
|
||||||
loc = f[group+'/'+label]
|
loc = f[group+'/'+label]
|
||||||
datasets_in[arg]={'data' :loc[()],
|
datasets_in[arg]={'data' :loc[()],
|
||||||
'label':label,
|
'label':label,
|
||||||
'meta': {k:v.decode() for k,v in loc.attrs.items()}}
|
'meta': {k:(v if h5py3 else v.decode()) for k,v in loc.attrs.items()}}
|
||||||
lock.release()
|
lock.release()
|
||||||
r = func(**datasets_in,**args)
|
r = func(**datasets_in,**args)
|
||||||
return [group,r]
|
return [group,r]
|
||||||
|
@ -1080,17 +1087,21 @@ class Result:
|
||||||
if self._allow_modification and result[0]+'/'+result[1]['label'] in f:
|
if self._allow_modification and result[0]+'/'+result[1]['label'] in f:
|
||||||
dataset = f[result[0]+'/'+result[1]['label']]
|
dataset = f[result[0]+'/'+result[1]['label']]
|
||||||
dataset[...] = result[1]['data']
|
dataset[...] = result[1]['data']
|
||||||
dataset.attrs['Overwritten'] = 'Yes'.encode()
|
dataset.attrs['Overwritten'] = 'Yes' if h5py3 else \
|
||||||
|
'Yes'.encode()
|
||||||
else:
|
else:
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
||||||
|
|
||||||
now = datetime.datetime.now().astimezone()
|
now = datetime.datetime.now().astimezone()
|
||||||
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
||||||
|
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
||||||
|
|
||||||
for l,v in result[1]['meta'].items():
|
for l,v in result[1]['meta'].items():
|
||||||
dataset.attrs[l]=v.encode()
|
dataset.attrs[l]=v if h5py3 else v.encode()
|
||||||
creator = f"damask.Result.{dataset.attrs['Creator'].decode()} v{damask.version}"
|
creator = dataset.attrs['Creator'] if h5py3 else \
|
||||||
dataset.attrs['Creator'] = creator.encode()
|
dataset.attrs['Creator'].decode()
|
||||||
|
dataset.attrs['Creator'] = f"damask.Result.{creator} v{damask.version}" if h5py3 else \
|
||||||
|
f"damask.Result.{creator} v{damask.version}".encode()
|
||||||
|
|
||||||
except (OSError,RuntimeError) as err:
|
except (OSError,RuntimeError) as err:
|
||||||
print(f'Could not add dataset: {err}.')
|
print(f'Could not add dataset: {err}.')
|
||||||
|
|
|
@ -296,7 +296,11 @@ class TestResult:
|
||||||
default.add_Cauchy()
|
default.add_Cauchy()
|
||||||
loc = default.get_dataset_location('sigma')
|
loc = default.get_dataset_location('sigma')
|
||||||
with h5py.File(default.fname,'r') as f:
|
with h5py.File(default.fname,'r') as f:
|
||||||
created_first = f[loc[0]].attrs['Created'].decode()
|
# h5py3 compatibility
|
||||||
|
try:
|
||||||
|
created_first = f[loc[0]].attrs['Created'].decode()
|
||||||
|
except AttributeError:
|
||||||
|
created_first = f[loc[0]].attrs['Created']
|
||||||
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
||||||
|
|
||||||
if overwrite == 'on':
|
if overwrite == 'on':
|
||||||
|
@ -307,7 +311,11 @@ class TestResult:
|
||||||
time.sleep(2.)
|
time.sleep(2.)
|
||||||
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
||||||
with h5py.File(default.fname,'r') as f:
|
with h5py.File(default.fname,'r') as f:
|
||||||
created_second = f[loc[0]].attrs['Created'].decode()
|
# h5py3 compatibility
|
||||||
|
try:
|
||||||
|
created_second = f[loc[0]].attrs['Created'].decode()
|
||||||
|
except AttributeError:
|
||||||
|
created_second = f[loc[0]].attrs['Created']
|
||||||
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
||||||
if overwrite == 'on':
|
if overwrite == 'on':
|
||||||
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
|
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
|
||||||
|
|
Loading…
Reference in New Issue