polishing
This commit is contained in:
parent
e9bf02a52c
commit
f604314207
|
@ -949,21 +949,20 @@ class DADF5():
|
||||||
|
|
||||||
|
|
||||||
def _job(self,group,func,datasets,args,lock):
|
def _job(self,group,func,datasets,args,lock):
|
||||||
def _read(group,datasets,lock):
|
"""
|
||||||
datasets_in = {}
|
Execute job for _add_generic_pointwise
|
||||||
lock.acquire()
|
"""
|
||||||
with h5py.File(self.fname,'r') as f:
|
|
||||||
for k,v in datasets.items():
|
|
||||||
loc = f[group+'/'+v]
|
|
||||||
datasets_in[k]={'data':loc[()],
|
|
||||||
'label':v,
|
|
||||||
'meta':{k2:v2.decode() for k2,v2 in loc.attrs.items()}}
|
|
||||||
lock.release()
|
|
||||||
return datasets_in
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
d = _read(group,datasets,lock)
|
datasets_in = {}
|
||||||
r = func(**d,**args)
|
lock.acquire()
|
||||||
|
with h5py.File(self.fname,'r') as f:
|
||||||
|
for arg,label in datasets.items():
|
||||||
|
loc = f[group+'/'+label]
|
||||||
|
datasets_in[arg]={'data' :loc[()],
|
||||||
|
'label':label,
|
||||||
|
'meta': {k:v.decode() for k,v in loc.attrs.items()}}
|
||||||
|
lock.release()
|
||||||
|
r = func(**datasets_in,**args)
|
||||||
return [group,r]
|
return [group,r]
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print('Error during calculation: {}.'.format(err))
|
print('Error during calculation: {}.'.format(err))
|
||||||
|
@ -971,30 +970,41 @@ class DADF5():
|
||||||
|
|
||||||
|
|
||||||
def _add_generic_pointwise(self,func,datasets,args={}):
|
def _add_generic_pointwise(self,func,datasets,args={}):
|
||||||
|
"""
|
||||||
|
General function to add pointwise data.
|
||||||
|
|
||||||
env = Environment()
|
Parameters
|
||||||
N_threads = int(env.options['DAMASK_NUM_THREADS'])
|
----------
|
||||||
pool = multiprocessing.Pool(N_threads)
|
func : function
|
||||||
m = multiprocessing.Manager()
|
Callback function that calculates a new dataset from one or more datasets per HDF5 group.
|
||||||
lock = m.Lock()
|
datasets : dictionary
|
||||||
|
Details of the datasets to be used: label (in HDF5 file) and arg (argument to which the data is parsed in func).
|
||||||
|
args : dictionary, optional
|
||||||
|
Arguments parsed to func.
|
||||||
|
"""
|
||||||
|
N_threads = int(Environment().options['DAMASK_NUM_THREADS'])
|
||||||
|
pool = multiprocessing.Pool(N_threads)
|
||||||
|
lock = multiprocessing.Manager().Lock()
|
||||||
|
|
||||||
groups = self.groups_with_datasets(datasets.values())
|
groups = self.groups_with_datasets(datasets.values())
|
||||||
default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock)
|
default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock)
|
||||||
util.progressBar(iteration=0,total=len(groups)-1)
|
|
||||||
for i,result in enumerate(pool.imap_unordered(default_arg,groups)):
|
util.progressBar(iteration=0,total=len(groups))
|
||||||
util.progressBar(iteration=i,total=len(groups)-1)
|
for i,result in enumerate(pool.imap_unordered(default_arg,groups)):
|
||||||
if not result: continue
|
util.progressBar(iteration=i+1,total=len(groups))
|
||||||
lock.acquire()
|
if not result: continue
|
||||||
with h5py.File(self.fname, 'a') as f:
|
lock.acquire()
|
||||||
try:
|
with h5py.File(self.fname, 'a') as f:
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
try:
|
||||||
for l,v in result[1]['meta'].items():
|
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
||||||
dataset.attrs[l]=v.encode()
|
for l,v in result[1]['meta'].items():
|
||||||
except OSError as err:
|
dataset.attrs[l]=v.encode()
|
||||||
print('Could not add dataset: {}.'.format(err))
|
except OSError as err:
|
||||||
lock.release()
|
print('Could not add dataset: {}.'.format(err))
|
||||||
pool.close()
|
lock.release()
|
||||||
pool.join()
|
|
||||||
|
pool.close()
|
||||||
|
pool.join()
|
||||||
|
|
||||||
|
|
||||||
def to_vtk(self,labels,mode='cell'):
|
def to_vtk(self,labels,mode='cell'):
|
||||||
|
|
Loading…
Reference in New Issue