more systematic reporting
This commit is contained in:
parent
130cf7fe2e
commit
bddb514072
|
@ -1,4 +1,4 @@
|
||||||
import multiprocessing
|
import multiprocessing as mp
|
||||||
import re
|
import re
|
||||||
import inspect
|
import inspect
|
||||||
import glob
|
import glob
|
||||||
|
@ -413,17 +413,18 @@ class Result:
|
||||||
for i in self.iterate('increments'):
|
for i in self.iterate('increments'):
|
||||||
message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n'
|
message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n'
|
||||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
|
message += f' {o[:-1]}\n'
|
||||||
for oo in self.iterate(o):
|
for oo in self.iterate(o):
|
||||||
message += f' {oo}\n'
|
message += f' {oo}\n'
|
||||||
for pp in self.iterate(p):
|
for pp in self.iterate(p):
|
||||||
message += f' {pp}\n'
|
message += f' {pp}\n'
|
||||||
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||||
for d in f[group].keys():
|
for d in f[group].keys():
|
||||||
try:
|
try:
|
||||||
dataset = f['/'.join([group,d])]
|
dataset = f['/'.join([group,d])]
|
||||||
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
|
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
|
||||||
description = dataset.attrs['Description'].decode()
|
description = dataset.attrs['Description'].decode()
|
||||||
message += f' {d}{unit}: {description}\n'
|
message += f' {d}{unit}: {description}\n'
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
return message
|
return message
|
||||||
|
@ -1066,8 +1067,8 @@ class Result:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
num_threads = Environment().options['DAMASK_NUM_THREADS']
|
num_threads = Environment().options['DAMASK_NUM_THREADS']
|
||||||
pool = multiprocessing.Pool(int(num_threads) if num_threads is not None else None)
|
pool = mp.Pool(int(num_threads) if num_threads is not None else None)
|
||||||
lock = multiprocessing.Manager().Lock()
|
lock = mp.Manager().Lock()
|
||||||
|
|
||||||
groups = self.groups_with_datasets(datasets.values())
|
groups = self.groups_with_datasets(datasets.values())
|
||||||
default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock)
|
default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock)
|
||||||
|
|
Loading…
Reference in New Issue