polishing
This commit is contained in:
parent
dd4612dd9b
commit
e431d89957
|
@ -26,6 +26,8 @@ from . import util
|
||||||
|
|
||||||
h5py3 = h5py.__version__[0] == '3'
|
h5py3 = h5py.__version__[0] == '3'
|
||||||
|
|
||||||
|
chunk_size = 1024**2//8 # for compression in HDF5
|
||||||
|
|
||||||
|
|
||||||
def _read(dataset):
|
def _read(dataset):
|
||||||
"""Read a dataset and its metadata into a numpy.ndarray."""
|
"""Read a dataset and its metadata into a numpy.ndarray."""
|
||||||
|
@ -1124,8 +1126,8 @@ class Result:
|
||||||
'label': f"{t}({F['label']})",
|
'label': f"{t}({F['label']})",
|
||||||
'meta': {
|
'meta': {
|
||||||
'unit': F['meta']['unit'],
|
'unit': F['meta']['unit'],
|
||||||
'description': '{} stretch tensor of {} ({})'.format('left' if t.upper() == 'V' else 'right',
|
'description': f"{'left' if t.upper() == 'V' else 'right'} stretch tensor "\
|
||||||
F['label'],F['meta']['description']),
|
+f"of {F['label']} ({F['meta']['description']})",
|
||||||
'creator': 'add_stretch_tensor'
|
'creator': 'add_stretch_tensor'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1145,7 +1147,7 @@ class Result:
|
||||||
self._add_generic_pointwise(self._add_stretch_tensor,{'F':F},{'t':t})
|
self._add_generic_pointwise(self._add_stretch_tensor,{'F':F},{'t':t})
|
||||||
|
|
||||||
|
|
||||||
def _job(self,group,func,datasets,args,lock):
|
def _job_pointwise(self,group,func,datasets,args,lock):
|
||||||
"""Execute job for _add_generic_pointwise."""
|
"""Execute job for _add_generic_pointwise."""
|
||||||
try:
|
try:
|
||||||
datasets_in = {}
|
datasets_in = {}
|
||||||
|
@ -1163,7 +1165,6 @@ class Result:
|
||||||
print(f'Error during calculation: {err}.')
|
print(f'Error during calculation: {err}.')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _add_generic_pointwise(self,func,datasets,args={}):
|
def _add_generic_pointwise(self,func,datasets,args={}):
|
||||||
"""
|
"""
|
||||||
General function to add pointwise data.
|
General function to add pointwise data.
|
||||||
|
@ -1180,7 +1181,6 @@ class Result:
|
||||||
Arguments parsed to func.
|
Arguments parsed to func.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
chunk_size = 1024**2//8
|
|
||||||
pool = mp.Pool(int(os.environ.get('OMP_NUM_THREADS',4)))
|
pool = mp.Pool(int(os.environ.get('OMP_NUM_THREADS',4)))
|
||||||
lock = mp.Manager().Lock()
|
lock = mp.Manager().Lock()
|
||||||
|
|
||||||
|
@ -1197,34 +1197,34 @@ class Result:
|
||||||
print('No matching dataset found, no data was added.')
|
print('No matching dataset found, no data was added.')
|
||||||
return
|
return
|
||||||
|
|
||||||
default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock)
|
default_arg = partial(self._job_pointwise,func=func,datasets=datasets,args=args,lock=lock)
|
||||||
|
|
||||||
for result in util.show_progress(pool.imap_unordered(default_arg,groups),len(groups)):
|
for group,result in util.show_progress(pool.imap_unordered(default_arg,groups),len(groups)):
|
||||||
if not result:
|
if not result:
|
||||||
continue
|
continue
|
||||||
lock.acquire()
|
lock.acquire()
|
||||||
with h5py.File(self.fname, 'a') as f:
|
with h5py.File(self.fname, 'a') as f:
|
||||||
try:
|
try:
|
||||||
if self._allow_modification and result[0]+'/'+result[1]['label'] in f:
|
if self._allow_modification and '/'.join([group,result['label']]) in f:
|
||||||
dataset = f[result[0]+'/'+result[1]['label']]
|
dataset = f['/'.join([group,result['label']])]
|
||||||
dataset[...] = result[1]['data']
|
dataset[...] = result['data']
|
||||||
dataset.attrs['overwritten'] = True
|
dataset.attrs['overwritten'] = True
|
||||||
else:
|
else:
|
||||||
if result[1]['data'].size >= chunk_size*2:
|
if result['data'].size >= chunk_size*2:
|
||||||
shape = result[1]['data'].shape
|
shape = result['data'].shape
|
||||||
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
|
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'],
|
dataset = f[group].create_dataset(result['label'],data=result['data'],
|
||||||
maxshape=shape, chunks=chunks,
|
maxshape=shape, chunks=chunks,
|
||||||
compression='gzip', compression_opts=6,
|
compression='gzip', compression_opts=6,
|
||||||
shuffle=True,fletcher32=True)
|
shuffle=True,fletcher32=True)
|
||||||
else:
|
else:
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
dataset = f[group].create_dataset(result['label'],data=result['data'])
|
||||||
|
|
||||||
now = datetime.datetime.now().astimezone()
|
now = datetime.datetime.now().astimezone()
|
||||||
dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
||||||
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
||||||
|
|
||||||
for l,v in result[1]['meta'].items():
|
for l,v in result['meta'].items():
|
||||||
dataset.attrs[l.lower()]=v if h5py3 else v.encode()
|
dataset.attrs[l.lower()]=v if h5py3 else v.encode()
|
||||||
creator = dataset.attrs['creator'] if h5py3 else \
|
creator = dataset.attrs['creator'] if h5py3 else \
|
||||||
dataset.attrs['creator'].decode()
|
dataset.attrs['creator'].decode()
|
||||||
|
|
Loading…
Reference in New Issue