avoid code duplication
This commit is contained in:
parent
ac2f366a72
commit
28bb92021d
|
@ -45,23 +45,22 @@ class DADF5():
|
||||||
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
||||||
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
|
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
|
||||||
|
|
||||||
|
|
||||||
self.con_physics = []
|
self.con_physics = []
|
||||||
for c in self.constituents:
|
for c in self.constituents:
|
||||||
self.con_physics += f['inc00000/constituent/{}'.format(c)].keys()
|
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
|
||||||
self.con_physics = list(set(self.con_physics)) # make unique
|
self.con_physics = list(set(self.con_physics)) # make unique
|
||||||
|
|
||||||
self.mat_physics = []
|
self.mat_physics = []
|
||||||
for m in self.materialpoints:
|
for m in self.materialpoints:
|
||||||
self.mat_physics += f['inc00000/materialpoint/{}'.format(m)].keys()
|
self.mat_physics += f['/'.join([self.increments[0],'materialpoint',m])].keys()
|
||||||
self.mat_physics = list(set(self.mat_physics)) # make unique
|
self.mat_physics = list(set(self.mat_physics)) # make unique
|
||||||
|
|
||||||
self.visible= {'increments': self.increments, # ToDo:simplify, activity only positions that translate into (no complex types)
|
self.visible= {'increments': self.increments,
|
||||||
'constituents': self.constituents,
|
'constituents': self.constituents,
|
||||||
'materialpoints': self.materialpoints,
|
'materialpoints': self.materialpoints,
|
||||||
'constituent': range(self.Nconstituents), # ToDo: stupid naming
|
'constituent': range(self.Nconstituents), # ToDo: stupid naming
|
||||||
'con_physics': self.con_physics,
|
'con_physics': self.con_physics,
|
||||||
'mat_physics': self.mat_physics}
|
'mat_physics': self.mat_physics}
|
||||||
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
|
|
||||||
|
@ -163,75 +162,53 @@ class DADF5():
|
||||||
groups = []
|
groups = []
|
||||||
|
|
||||||
with h5py.File(self.filename,'r') as f:
|
with h5py.File(self.filename,'r') as f:
|
||||||
for i in self.iter_visible('increments'): #ToDo: Merge path only once at the end '/'.join(listE)
|
for i in self.iter_visible('increments'):
|
||||||
for c in self.iter_visible('constituents'):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for p in self.iter_visible('con_physics'):
|
for oo in self.iter_visible(o):
|
||||||
group = '/'.join([i,'constituent',c,p])
|
for pp in self.iter_visible(p):
|
||||||
if sets is True:
|
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||||
groups.append(group)
|
if sets is True:
|
||||||
else:
|
groups.append(group)
|
||||||
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
|
else:
|
||||||
if len(set(match)) == len(sets) : groups.append(group)
|
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
|
||||||
for m in self.iter_visible('materialpoints'):
|
if len(set(match)) == len(sets) : groups.append(group)
|
||||||
for p in self.iter_visible('mat_physics'):
|
|
||||||
group = '/'.join([i,'materialpoint',m,p])
|
|
||||||
if sets is True:
|
|
||||||
groups.append(group)
|
|
||||||
else:
|
|
||||||
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
|
|
||||||
if len(set(match)) == len(sets) : groups.append(group)
|
|
||||||
return groups
|
return groups
|
||||||
|
|
||||||
|
|
||||||
def list_data(self):
|
def list_data(self):
|
||||||
"""Shows information on all active datasets in the file."""
|
"""Gices information on all active datasets in the file."""
|
||||||
|
message = ''
|
||||||
with h5py.File(self.filename,'r') as f:
|
with h5py.File(self.filename,'r') as f:
|
||||||
i = 'inc{:05}'.format(0)
|
for i in self.iter_visible('increments'):
|
||||||
for c in self.iter_visible('constituents'):
|
message+='\n{}\n'.format(i)
|
||||||
print('{}'.format(c))
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for p in self.iter_visible('con_physics'):
|
for oo in self.iter_visible(o):
|
||||||
print(' {}'.format(p))
|
message+=' {}\n'.format(oo)
|
||||||
try:
|
for pp in self.iter_visible(p):
|
||||||
k = '/'.join([i,'constituent',c,p])
|
message+=' {}\n'.format(pp)
|
||||||
for d in f[k].keys():
|
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||||
print(' {} ({})'.format(d,f[k+'/'+d].attrs['Description'].decode()))
|
for d in f[group].keys():
|
||||||
except KeyError:
|
try:
|
||||||
pass
|
message+=' {} ({})\n'.format(d,f['/'.join([group,d])].attrs['Description'].decode())
|
||||||
for m in self.iter_visible('materialpoints'):
|
except KeyError:
|
||||||
print('{}'.format(m))
|
pass
|
||||||
for p in self.iter_visible('mat_physics'):
|
return message
|
||||||
print(' {}'.format(p))
|
|
||||||
try:
|
|
||||||
k = '/'.join([i,'materialpoint',m,p])
|
|
||||||
for d in f[k].keys():
|
|
||||||
print(' {} ({})'.format(d,f[k+'/'+d].attrs['Description'].decode()))
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def get_dataset_location(self,label):
|
def get_dataset_location(self,label):
|
||||||
"""Returns the location of all active datasets with given label."""
|
"""Returns the location of all active datasets with given label."""
|
||||||
path = []
|
path = []
|
||||||
with h5py.File(self.filename,'r') as f:
|
with h5py.File(self.filename,'r') as f:
|
||||||
for i in self.iter_visible('increments'):
|
for i in self.iter_visible('increments'):
|
||||||
for c in self.iter_visible('constituents'):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for p in self.iter_visible('con_physics'):
|
for oo in self.iter_visible(o):
|
||||||
try:
|
for pp in self.iter_visible(p):
|
||||||
k = '/'.join([i,'constituent',c,p,label])
|
try:
|
||||||
f[k]
|
k = '/'.join([i,'constituent',oo,pp,label])
|
||||||
path.append(k)
|
f[k]
|
||||||
except KeyError as e:
|
path.append(k)
|
||||||
print('unable to locate constituents dataset: '+ str(e))
|
except KeyError as e:
|
||||||
|
print('unable to locate constituents dataset: '+ str(e))
|
||||||
for m in self.iter_visible('materialpoints'):
|
|
||||||
for p in self.iter_visible('mat_physics'):
|
|
||||||
try:
|
|
||||||
k = '/'.join([i,'materialpoint',m,p,label])
|
|
||||||
f[k]
|
|
||||||
path.append(k)
|
|
||||||
except KeyError as e:
|
|
||||||
print('unable to locate materialpoints dataset: '+ str(e))
|
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue