solved problem with postprocessing
- to not 'try' with h5py library, it might have another 'try'. Check explicitly for empty argument also some polishing
This commit is contained in:
parent
b2b625af3e
commit
002383afc2
|
@ -1,9 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import numpy as np
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
|
||||||
|
|
||||||
import os,vtk
|
import os
|
||||||
import numpy as np
|
|
||||||
import argparse
|
import argparse
|
||||||
import damask
|
|
||||||
|
import numpy as np
|
||||||
|
import vtk
|
||||||
from vtk.util import numpy_support
|
from vtk.util import numpy_support
|
||||||
|
|
||||||
|
import damask
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
|
|
@ -100,18 +100,14 @@ class DADF5():
|
||||||
Get groups that are currently considered for evaluation.
|
Get groups that are currently considered for evaluation.
|
||||||
"""
|
"""
|
||||||
groups = []
|
groups = []
|
||||||
for i,x in enumerate(self.active['increments']):
|
for i in self.active['increments']:
|
||||||
group_inc = 'inc{:05}'.format(self.active['increments'][i]['inc']) #ToDo: Merge path only once at the end '/'.join(listE)
|
group_inc = 'inc{:05}'.format(i['inc']) #ToDo: Merge path only once at the end '/'.join(listE)
|
||||||
for c in self.active['constituents']:
|
for c in self.active['constituents']:
|
||||||
group_constituent = group_inc+'/constituent/'+c
|
|
||||||
for t in self.active['c_output_types']:
|
for t in self.active['c_output_types']:
|
||||||
group_output_types = group_constituent+'/'+t
|
groups.append('/'.join([group_inc,'constituent',c,t]))
|
||||||
groups.append(group_output_types)
|
|
||||||
for m in self.active['materialpoints']:
|
for m in self.active['materialpoints']:
|
||||||
group_materialpoint = group_inc+'/materialpoint/'+m
|
|
||||||
for t in self.active['m_output_types']:
|
for t in self.active['m_output_types']:
|
||||||
group_output_types = group_materialpoint+'/'+t
|
groups.append('/'.join([group_inc,'materialpoint',m,t]))
|
||||||
groups.append(group_output_types)
|
|
||||||
return groups
|
return groups
|
||||||
|
|
||||||
|
|
||||||
|
@ -150,20 +146,20 @@ class DADF5():
|
||||||
group_inc = 'inc{:05}'.format(i['inc'])
|
group_inc = 'inc{:05}'.format(i['inc'])
|
||||||
|
|
||||||
for c in self.active['constituents']:
|
for c in self.active['constituents']:
|
||||||
group_constituent = group_inc+'/constituent/'+c
|
|
||||||
for t in self.active['c_output_types']:
|
for t in self.active['c_output_types']:
|
||||||
try:
|
try:
|
||||||
f[group_constituent+'/'+t+'/'+label]
|
p = '/'.join([group_inc,'constituent',c,t,label])
|
||||||
path.append(group_constituent+'/'+t+'/'+label)
|
f[p]
|
||||||
|
path.append(p)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print('unable to locate constituents dataset: '+ str(e))
|
print('unable to locate constituents dataset: '+ str(e))
|
||||||
|
|
||||||
for m in self.active['materialpoints']:
|
for m in self.active['materialpoints']:
|
||||||
group_materialpoint = group_inc+'/materialpoint/'+m
|
|
||||||
for t in self.active['m_output_types']:
|
for t in self.active['m_output_types']:
|
||||||
try:
|
try:
|
||||||
f[group_materialpoint+'/'+t+'/'+label]
|
p = '/'.join([group_inc,'materialpoint',m,t,label])
|
||||||
path.append(group_materialpoint+'/'+t+'/'+label)
|
f[p]
|
||||||
|
path.append(p)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print('unable to locate materialpoints dataset: '+ str(e))
|
print('unable to locate materialpoints dataset: '+ str(e))
|
||||||
|
|
||||||
|
@ -182,24 +178,22 @@ class DADF5():
|
||||||
dataset = np.full(shape,np.nan)
|
dataset = np.full(shape,np.nan)
|
||||||
for pa in path:
|
for pa in path:
|
||||||
label = pa.split('/')[2]
|
label = pa.split('/')[2]
|
||||||
try:
|
|
||||||
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
|
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
|
||||||
|
if len(p)>0:
|
||||||
u = (f['mapping/cellResults/constituent'][p,c]['Position'])
|
u = (f['mapping/cellResults/constituent'][p,c]['Position'])
|
||||||
a = np.array(f[pa])
|
a = np.array(f[pa])
|
||||||
if len(a.shape) == 1:
|
if len(a.shape) == 1:
|
||||||
a=a.reshape([a.shape[0],1])
|
a=a.reshape([a.shape[0],1])
|
||||||
dataset[p,:] = a[u,:]
|
dataset[p,:] = a[u,:]
|
||||||
except KeyError as e:
|
|
||||||
print('unable to read constituent: '+ str(e))
|
p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0]
|
||||||
try:
|
if len(p)>0:
|
||||||
p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0]
|
|
||||||
u = (f['mapping/cellResults/materialpoint'][p.tolist()]['Position'])
|
u = (f['mapping/cellResults/materialpoint'][p.tolist()]['Position'])
|
||||||
a = np.array(f[pa])
|
a = np.array(f[pa])
|
||||||
if len(a.shape) == 1:
|
if len(a.shape) == 1:
|
||||||
a=a.reshape([a.shape[0],1])
|
a=a.reshape([a.shape[0],1])
|
||||||
dataset[p,:] = a[u,:]
|
dataset[p,:] = a[u,:]
|
||||||
except KeyError as e:
|
|
||||||
print('unable to read materialpoint: '+ str(e))
|
|
||||||
|
|
||||||
return dataset
|
return dataset
|
||||||
|
|
||||||
|
@ -424,7 +418,7 @@ class DADF5():
|
||||||
N_not_calculated = len(todo)
|
N_not_calculated = len(todo)
|
||||||
while N_not_calculated > 0:
|
while N_not_calculated > 0:
|
||||||
result = results.get()
|
result = results.get()
|
||||||
with h5py.File(self.filename,self.mode) as f: # write to file
|
with h5py.File(self.filename,'a') as f: # write to file
|
||||||
dataset_out = f[result['group']].create_dataset(result['label'],data=result['data'])
|
dataset_out = f[result['group']].create_dataset(result['label'],data=result['data'])
|
||||||
for k in result['meta'].keys():
|
for k in result['meta'].keys():
|
||||||
dataset_out.attrs[k] = result['meta'][k]
|
dataset_out.attrs[k] = result['meta'][k]
|
||||||
|
|
Loading…
Reference in New Issue