more systematic reporting

This commit is contained in:
Martin Diehl 2020-06-26 11:45:06 +02:00
parent 130cf7fe2e
commit bddb514072
1 changed files with 7 additions and 6 deletions

View File

@ -1,4 +1,4 @@
import multiprocessing import multiprocessing as mp
import re import re
import inspect import inspect
import glob import glob
@ -413,6 +413,7 @@ class Result:
for i in self.iterate('increments'): for i in self.iterate('increments'):
message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n' message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n'
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
message += f' {o[:-1]}\n'
for oo in self.iterate(o): for oo in self.iterate(o):
message += f' {oo}\n' message += f' {oo}\n'
for pp in self.iterate(p): for pp in self.iterate(p):
@ -1066,8 +1067,8 @@ class Result:
""" """
num_threads = Environment().options['DAMASK_NUM_THREADS'] num_threads = Environment().options['DAMASK_NUM_THREADS']
pool = multiprocessing.Pool(int(num_threads) if num_threads is not None else None) pool = mp.Pool(int(num_threads) if num_threads is not None else None)
lock = multiprocessing.Manager().Lock() lock = mp.Manager().Lock()
groups = self.groups_with_datasets(datasets.values()) groups = self.groups_with_datasets(datasets.values())
default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock) default_arg = partial(self._job,func=func,datasets=datasets,args=args,lock=lock)