adding data to geometry
This commit is contained in:
parent
8eb1a35dfb
commit
7177813710
|
@ -5,6 +5,7 @@ import os,vtk
|
|||
import numpy as np
|
||||
import argparse
|
||||
import damask
|
||||
from vtk.util import numpy_support
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
@ -36,7 +37,7 @@ for filename in options.filenames:
|
|||
vtk.vtkDoubleArray(),
|
||||
]
|
||||
|
||||
rGrid.SetDimensions(*data.grid)
|
||||
rGrid.SetDimensions(*(data.grid+1))
|
||||
for dim in [0,1,2]:
|
||||
for c in np.linspace(0,data.size[dim],1+data.grid[dim]):
|
||||
coordArray[dim].InsertNextValue(c)
|
||||
|
@ -47,8 +48,11 @@ for filename in options.filenames:
|
|||
|
||||
|
||||
for i,inc in enumerate(data.increments):
|
||||
if not inc['active']: pass
|
||||
|
||||
data.active['increments'] = [inc]
|
||||
x = data.get_dataset_location('xi_sl')[0]
|
||||
VTKarray = numpy_support.numpy_to_vtk(num_array=data.read_dataset(x,0),deep=True,array_type= vtk.VTK_DOUBLE)
|
||||
VTKarray.SetName('xi_sl')
|
||||
rGrid.GetCellData().AddArray(VTKarray)
|
||||
if data.structured:
|
||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
import h5py
|
||||
import re
|
||||
import numpy as np
|
||||
import os
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
class DADF5():
|
||||
|
@ -20,7 +22,7 @@ class DADF5():
|
|||
with h5py.File(filename,'r') as f:
|
||||
|
||||
if f.attrs['DADF5-major'] != 0 or f.attrs['DADF5-minor'] != 1:
|
||||
print('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
|
||||
raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
|
||||
|
||||
self.structured = 'grid' in f['mapping'].attrs.keys()
|
||||
|
||||
|
@ -29,10 +31,49 @@ class DADF5():
|
|||
self.size = f['mapping'].attrs['size']
|
||||
|
||||
r=re.compile('inc[0-9]+')
|
||||
self.increments = [{'group': u,
|
||||
'time': f[u].attrs['time/s'],
|
||||
'active': True
|
||||
self.increments = [{'inc': int(u[3:]),
|
||||
'time': round(f[u].attrs['time/s'],12),
|
||||
} for u in f.keys() if r.match(u)]
|
||||
|
||||
self.constituents = np.unique(f['mapping/cellResults/constituent']['Name']).tolist() # ToDo: I am not to happy with the name
|
||||
self.constituents = [c.decode() for c in self.constituents]
|
||||
self.materialpoints = np.unique(f['mapping/cellResults/materialpoint']['Name']).tolist() # ToDo: I am not to happy with the name
|
||||
self.materialpoints = [m.decode() for m in self.materialpoints]
|
||||
self.Nconstitutents = np.shape(f['mapping/cellResults/constituent'])[1]
|
||||
self.Nmaterialpoints= np.shape(f['mapping/cellResults/constituent'])[0]
|
||||
|
||||
self.active= {'increments' :self.increments,
|
||||
'constituents' :self.constituents,
|
||||
'materialpoints':self.materialpoints}
|
||||
|
||||
self.filename = filename
|
||||
self.mode = mode
|
||||
|
||||
|
||||
def get_dataset_location(self,label):
|
||||
path = []
|
||||
with h5py.File(self.filename,'r') as f:
|
||||
for i in self.active['increments']:
|
||||
group_inc = 'inc{:05}'.format(i['inc'])
|
||||
for c in self.active['constituents']:
|
||||
group_constituent = group_inc+'/constituent/'+c
|
||||
for t in f[group_constituent].keys():
|
||||
try:
|
||||
f[group_constituent+'/'+t+'/'+label]
|
||||
path.append(group_constituent+'/'+t+'/'+label)
|
||||
except:
|
||||
pass
|
||||
return path
|
||||
|
||||
|
||||
def read_dataset(self,path,c):
|
||||
with h5py.File(self.filename,'r') as f:
|
||||
shape = (self.Nmaterialpoints,) + np.shape(f[path])[1:]
|
||||
dataset = np.full(shape,np.nan)
|
||||
label = path.split('/')[2]
|
||||
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))
|
||||
for s in p: dataset[s,:] = f[path][s,:]
|
||||
|
||||
return dataset
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue