handling of increments follows handling of other 'visible'-items
This commit is contained in:
parent
f6ac8c995f
commit
4cedcee0b4
|
@ -47,22 +47,20 @@ for filename in options.filenames:
|
||||||
|
|
||||||
coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
|
coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
|
||||||
|
|
||||||
for i,inc in enumerate(results.increments):
|
for i,inc in enumerate(results.iter_visible('increments')):
|
||||||
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
||||||
|
|
||||||
header = '1 header\n'
|
header = '1 header\n'
|
||||||
|
|
||||||
data = np.array([inc['inc'] for j in range(np.product(results.grid))]).reshape([np.product(results.grid),1])
|
data = np.array([int(inc[3:]) for j in range(np.product(results.grid))]).reshape([np.product(results.grid),1])
|
||||||
header+= 'inc'
|
header+= 'inc'
|
||||||
|
|
||||||
coords = coords.reshape([np.product(results.grid),3])
|
coords = coords.reshape([np.product(results.grid),3])
|
||||||
data = np.concatenate((data,coords),1)
|
data = np.concatenate((data,coords),1)
|
||||||
header+=' 1_pos 2_pos 3_pos'
|
header+=' 1_pos 2_pos 3_pos'
|
||||||
|
|
||||||
results.visible['increments'] = [inc]
|
|
||||||
|
|
||||||
for label in options.con:
|
for label in options.con:
|
||||||
for o in results.iter_visible('con_physics'):
|
for p in results.iter_visible('con_physics'):
|
||||||
for c in results.iter_visible('constituents'):
|
for c in results.iter_visible('constituents'):
|
||||||
x = results.get_dataset_location(label)
|
x = results.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
|
@ -77,7 +75,7 @@ for filename in options.filenames:
|
||||||
header+=' '+label
|
header+=' '+label
|
||||||
|
|
||||||
for label in options.mat:
|
for label in options.mat:
|
||||||
for o in results.iter_visible('mat_physics'):
|
for p in results.iter_visible('mat_physics'):
|
||||||
for m in results.iter_visible('materialpoints'):
|
for m in results.iter_visible('materialpoints'):
|
||||||
x = results.get_dataset_location(label)
|
x = results.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
|
@ -96,5 +94,5 @@ for filename in options.filenames:
|
||||||
os.mkdir(dirname)
|
os.mkdir(dirname)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
pass
|
||||||
file_out = '{}_inc{:04d}.txt'.format(filename.split('.')[0],inc['inc'])
|
file_out = '{}_{}.txt'.format(filename.split('.')[0],inc)
|
||||||
np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='')
|
np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='')
|
||||||
|
|
|
@ -57,17 +57,16 @@ for filename in options.filenames:
|
||||||
rGrid.SetZCoordinates(coordArray[2])
|
rGrid.SetZCoordinates(coordArray[2])
|
||||||
|
|
||||||
|
|
||||||
for i,inc in enumerate(results.increments):
|
for i,inc in enumerate(results.iter_visible('increments')):
|
||||||
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
||||||
vtk_data = []
|
vtk_data = []
|
||||||
results.visible['increments'] = [inc]
|
|
||||||
|
|
||||||
results.set_visible('materialpoints',False)
|
results.set_visible('materialpoints',False)
|
||||||
results.set_visible('constituents', True)
|
results.set_visible('constituents', True)
|
||||||
for label in options.con:
|
for label in options.con:
|
||||||
|
|
||||||
for o in results.iter_visible('con_physics'):
|
for p in results.iter_visible('con_physics'):
|
||||||
if o != 'generic':
|
if p != 'generic':
|
||||||
for c in results.iter_visible('constituents'):
|
for c in results.iter_visible('constituents'):
|
||||||
x = results.get_dataset_location(label)
|
x = results.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
|
@ -90,8 +89,8 @@ for filename in options.filenames:
|
||||||
results.set_visible('constituents', False)
|
results.set_visible('constituents', False)
|
||||||
results.set_visible('materialpoints',True)
|
results.set_visible('materialpoints',True)
|
||||||
for label in options.mat:
|
for label in options.mat:
|
||||||
for o in results.iter_visible('mat_physics'):
|
for p in results.iter_visible('mat_physics'):
|
||||||
if o != 'generic':
|
if p != 'generic':
|
||||||
for m in results.iter_visible('materialpoints'):
|
for m in results.iter_visible('materialpoints'):
|
||||||
x = results.get_dataset_location(label)
|
x = results.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
|
@ -120,7 +119,7 @@ for filename in options.filenames:
|
||||||
os.mkdir(dirname)
|
os.mkdir(dirname)
|
||||||
except FileExistsError:
|
except FileExistsError:
|
||||||
pass
|
pass
|
||||||
file_out = '{}_inc{:04d}.{}'.format(filename.split('.')[0],inc['inc'],writer.GetDefaultFileExtension())
|
file_out = '{}_{}.{}'.format(filename.split('.')[0],inc,writer.GetDefaultFileExtension())
|
||||||
|
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
|
|
|
@ -38,11 +38,7 @@ class DADF5():
|
||||||
self.size = f['geometry'].attrs['size']
|
self.size = f['geometry'].attrs['size']
|
||||||
|
|
||||||
r=re.compile('inc[0-9]+')
|
r=re.compile('inc[0-9]+')
|
||||||
self.time_information = [{'inc': int(u[3:]),
|
self.increments = [u for u in f.keys() if r.match(u)]
|
||||||
'time': round(f[u].attrs['time/s'],12),
|
|
||||||
} for u in f.keys() if r.match(u)]
|
|
||||||
|
|
||||||
self.increments = self.time_information.copy() # unify later
|
|
||||||
|
|
||||||
self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent'])
|
self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent'])
|
||||||
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
||||||
|
@ -51,20 +47,20 @@ class DADF5():
|
||||||
|
|
||||||
self.con_physics = []
|
self.con_physics = []
|
||||||
for c in self.constituents:
|
for c in self.constituents:
|
||||||
self.con_physics += f['inc{:05}/constituent/{}'.format(self.increments[0]['inc'],c)].keys()
|
self.con_physics += f['inc00000/constituent/{}'.format(c)].keys()
|
||||||
self.con_physics = list(set(self.con_physics)) # make unique
|
self.con_physics = list(set(self.con_physics)) # make unique
|
||||||
|
|
||||||
self.mat_physics = []
|
self.mat_physics = []
|
||||||
for m in self.materialpoints:
|
for m in self.materialpoints:
|
||||||
self.mat_physics += f['inc{:05}/materialpoint/{}'.format(self.increments[0]['inc'],m)].keys()
|
self.mat_physics += f['inc00000/materialpoint/{}'.format(m)].keys()
|
||||||
self.mat_physics = list(set(self.mat_physics)) # make unique
|
self.mat_physics = list(set(self.mat_physics)) # make unique
|
||||||
|
|
||||||
self.visible= {'increments': self.increments, # ToDo:simplify, activity only positions that translate into (no complex types)
|
self.visible= {'increments': self.increments, # ToDo:simplify, activity only positions that translate into (no complex types)
|
||||||
'constituents': self.constituents,
|
'constituents': self.constituents,
|
||||||
'materialpoints': self.materialpoints,
|
'materialpoints': self.materialpoints,
|
||||||
'constituent': range(self.Nconstituents), # ToDo: stupid naming
|
'constituent': range(self.Nconstituents), # ToDo: stupid naming
|
||||||
'con_physics': self.con_physics,
|
'con_physics': self.con_physics,
|
||||||
'mat_physics': self.mat_physics}
|
'mat_physics': self.mat_physics}
|
||||||
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
|
|
||||||
|
@ -100,24 +96,6 @@ class DADF5():
|
||||||
last_datasets = self.visible[what]
|
last_datasets = self.visible[what]
|
||||||
yield dataset
|
yield dataset
|
||||||
self.__manage_visible(datasets,what,'set')
|
self.__manage_visible(datasets,what,'set')
|
||||||
|
|
||||||
|
|
||||||
# ToDo: store increments, select icrements (trivial), position, and time
|
|
||||||
def increment_set_by_time(self,start,end):
|
|
||||||
for t in self.time_information:
|
|
||||||
if start<= t['time']< end:
|
|
||||||
print(t)
|
|
||||||
|
|
||||||
|
|
||||||
def increment_set_by_position(self,start,end):
|
|
||||||
for t in self.time_information[start:end]:
|
|
||||||
print(t)
|
|
||||||
|
|
||||||
|
|
||||||
def increment_set(self,start,end):
|
|
||||||
for t in self.time_information:
|
|
||||||
if start<= t['inc']< end:
|
|
||||||
print(t)
|
|
||||||
|
|
||||||
|
|
||||||
def set_visible(self,what,datasets):
|
def set_visible(self,what,datasets):
|
||||||
|
@ -163,19 +141,18 @@ class DADF5():
|
||||||
groups = []
|
groups = []
|
||||||
|
|
||||||
with h5py.File(self.filename,'r') as f:
|
with h5py.File(self.filename,'r') as f:
|
||||||
for i in self.visible['increments']:
|
for i in self.iter_visible('increments'): #ToDo: Merge path only once at the end '/'.join(listE)
|
||||||
group_inc = 'inc{:05}'.format(i['inc']) #ToDo: Merge path only once at the end '/'.join(listE)
|
|
||||||
for c in self.iter_visible('constituents'):
|
for c in self.iter_visible('constituents'):
|
||||||
for t in self.iter_visible('con_physics'):
|
for p in self.iter_visible('con_physics'):
|
||||||
group = '/'.join([group_inc,'constituent',c,t])
|
group = '/'.join([i,'constituent',c,p])
|
||||||
if sets is True:
|
if sets is True:
|
||||||
groups.append(group)
|
groups.append(group)
|
||||||
else:
|
else:
|
||||||
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
|
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
|
||||||
if len(set(match)) == len(sets) : groups.append(group)
|
if len(set(match)) == len(sets) : groups.append(group)
|
||||||
for m in self.iter_visible('materialpoints'):
|
for m in self.iter_visible('materialpoints'):
|
||||||
for t in self.iter_visible('mat_physics'):
|
for p in self.iter_visible('mat_physics'):
|
||||||
group = '/'.join([group_inc,'materialpoint',m,t])
|
group = '/'.join([i,'materialpoint',m,p])
|
||||||
if sets is True:
|
if sets is True:
|
||||||
groups.append(group)
|
groups.append(group)
|
||||||
else:
|
else:
|
||||||
|
@ -187,23 +164,23 @@ class DADF5():
|
||||||
def list_data(self): # print_datasets and have [] and ['*'], loop over all increment, soll auf anderen basieren (get groups with sternchen)
|
def list_data(self): # print_datasets and have [] and ['*'], loop over all increment, soll auf anderen basieren (get groups with sternchen)
|
||||||
"""Shows information on all active datasets in the file."""
|
"""Shows information on all active datasets in the file."""
|
||||||
with h5py.File(self.filename,'r') as f:
|
with h5py.File(self.filename,'r') as f:
|
||||||
group_inc = 'inc{:05}'.format(self.visible['increments'][0]['inc']) #ToDo: Merge path only once at the end '/'.join(listE)
|
i = 'inc{:05}'.format(0) #ToDo: Merge path only once at the end '/'.join(listE)
|
||||||
for c in self.iter_visible('constituents'):
|
for c in self.iter_visible('constituents'):
|
||||||
print('\n'+c)
|
print('\n'+c)
|
||||||
group_constituent = group_inc+'/constituent/'+c
|
group_constituent = i+'/constituent/'+c
|
||||||
for t in self.iter_visible('con_physics'):
|
for p in self.iter_visible('con_physics'):
|
||||||
print(' {}'.format(t))
|
print(' {}'.format(t))
|
||||||
group_output_types = group_constituent+'/'+t
|
group_output_types = group_constituent+'/'+p
|
||||||
try:
|
try:
|
||||||
for x in f[group_output_types].keys():
|
for x in f[group_output_types].keys():
|
||||||
print(' {} ({})'.format(x,f[group_output_types+'/'+x].attrs['Description'].decode()))
|
print(' {} ({})'.format(x,f[group_output_types+'/'+x].attrs['Description'].decode()))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
for m in self.iter_visible('materialpoints'):
|
for m in self.iter_visible('materialpoints'):
|
||||||
group_materialpoint = group_inc+'/materialpoint/'+m
|
group_materialpoint = i+'/materialpoint/'+m
|
||||||
for t in self.iter_visible('mat_physics'):
|
for p in self.iter_visible('mat_physics'):
|
||||||
print(' {}'.format(t))
|
print(' {}'.format(t))
|
||||||
group_output_types = group_materialpoint+'/'+t
|
group_output_types = group_materialpoint+'/'+p
|
||||||
try:
|
try:
|
||||||
for x in f[group_output_types].keys():
|
for x in f[group_output_types].keys():
|
||||||
print(' {} ({})'.format(x,f[group_output_types+'/'+x].attrs['Description'].decode()))
|
print(' {} ({})'.format(x,f[group_output_types+'/'+x].attrs['Description'].decode()))
|
||||||
|
@ -215,24 +192,22 @@ class DADF5():
|
||||||
"""Returns the location of all active datasets with given label.""" #ToDo: Merge path only once at the end '/'.join(listE)
|
"""Returns the location of all active datasets with given label.""" #ToDo: Merge path only once at the end '/'.join(listE)
|
||||||
path = []
|
path = []
|
||||||
with h5py.File(self.filename,'r') as f:
|
with h5py.File(self.filename,'r') as f:
|
||||||
for i in self.visible['increments']:
|
for i in self.iter_visible('increments'):
|
||||||
group_inc = 'inc{:05}'.format(i['inc'])
|
|
||||||
|
|
||||||
for c in self.iter_visible('constituents'):
|
for c in self.iter_visible('constituents'):
|
||||||
for t in self.iter_visible('con_physics'):
|
for t in self.iter_visible('con_physics'):
|
||||||
try:
|
try:
|
||||||
p = '/'.join([group_inc,'constituent',c,t,label])
|
k = '/'.join([i,'constituent',c,t,label])
|
||||||
f[p]
|
f[k]
|
||||||
path.append(p)
|
path.append(k)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print('unable to locate constituents dataset: '+ str(e))
|
print('unable to locate constituents dataset: '+ str(e))
|
||||||
|
|
||||||
for m in self.iter_visible('materialpoints'):
|
for m in self.iter_visible('materialpoints'):
|
||||||
for t in self.iter_visible('mat_physics'):
|
for t in self.iter_visible('mat_physics'):
|
||||||
try:
|
try:
|
||||||
p = '/'.join([group_inc,'materialpoint',m,t,label])
|
k = '/'.join([i,'materialpoint',m,t,label])
|
||||||
f[p]
|
f[k]
|
||||||
path.append(p)
|
path.append(k)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print('unable to locate materialpoints dataset: '+ str(e))
|
print('unable to locate materialpoints dataset: '+ str(e))
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue