Merge branch 'MiscImprovements' into development
This commit is contained in:
commit
2012297188
|
@ -10,8 +10,8 @@
|
||||||
*.pdf binary
|
*.pdf binary
|
||||||
|
|
||||||
# ignore files from MSC.Marc in language statistics
|
# ignore files from MSC.Marc in language statistics
|
||||||
installation/mods_MarcMentat/* linguist-vendored
|
installation/mods_MarcMentat/20*/* linguist-vendored
|
||||||
src/MarcInclude/* linguist-vendored
|
src/marc/include/* linguist-vendored
|
||||||
|
|
||||||
# ignore reference files for tests in language statistics
|
# ignore reference files for tests in language statistics
|
||||||
python/tests/reference/* linguist-vendored
|
python/tests/reference/* linguist-vendored
|
||||||
|
|
|
@ -4,146 +4,146 @@ from . import Lattice
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
|
|
||||||
class Orientation:
|
class Orientation:
|
||||||
"""
|
"""
|
||||||
Crystallographic orientation.
|
Crystallographic orientation.
|
||||||
|
|
||||||
A crystallographic orientation contains a rotation and a lattice.
|
A crystallographic orientation contains a rotation and a lattice.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ['rotation','lattice']
|
__slots__ = ['rotation','lattice']
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Report lattice type and orientation."""
|
"""Report lattice type and orientation."""
|
||||||
return self.lattice.__repr__()+'\n'+self.rotation.__repr__()
|
return self.lattice.__repr__()+'\n'+self.rotation.__repr__()
|
||||||
|
|
||||||
def __init__(self, rotation, lattice):
|
def __init__(self, rotation, lattice):
|
||||||
"""
|
"""
|
||||||
New orientation from rotation and lattice.
|
New orientation from rotation and lattice.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
rotation : Rotation
|
rotation : Rotation
|
||||||
Rotation specifying the lattice orientation.
|
Rotation specifying the lattice orientation.
|
||||||
lattice : Lattice
|
lattice : Lattice
|
||||||
Lattice type of the crystal.
|
Lattice type of the crystal.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(lattice, Lattice):
|
if isinstance(lattice, Lattice):
|
||||||
self.lattice = lattice
|
self.lattice = lattice
|
||||||
else:
|
else:
|
||||||
self.lattice = Lattice(lattice) # assume string
|
self.lattice = Lattice(lattice) # assume string
|
||||||
|
|
||||||
if isinstance(rotation, Rotation):
|
if isinstance(rotation, Rotation):
|
||||||
self.rotation = rotation
|
self.rotation = rotation
|
||||||
else:
|
else:
|
||||||
self.rotation = Rotation.fromQuaternion(rotation) # assume quaternion
|
self.rotation = Rotation.fromQuaternion(rotation) # assume quaternion
|
||||||
|
|
||||||
def disorientation(self,
|
def disorientation(self,
|
||||||
other,
|
other,
|
||||||
SST = True,
|
SST = True,
|
||||||
symmetries = False):
|
symmetries = False):
|
||||||
"""
|
"""
|
||||||
Disorientation between myself and given other orientation.
|
Disorientation between myself and given other orientation.
|
||||||
|
|
||||||
Rotation axis falls into SST if SST == True.
|
Rotation axis falls into SST if SST == True.
|
||||||
(Currently requires same symmetry for both orientations.
|
(Currently requires same symmetry for both orientations.
|
||||||
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
|
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
|
||||||
"""
|
"""
|
||||||
if self.lattice.symmetry != other.lattice.symmetry:
|
if self.lattice.symmetry != other.lattice.symmetry:
|
||||||
raise NotImplementedError('disorientation between different symmetry classes not supported yet.')
|
raise NotImplementedError('disorientation between different symmetry classes not supported yet.')
|
||||||
|
|
||||||
mySymEqs = self.equivalentOrientations() if SST else self.equivalentOrientations([0]) # take all or only first sym operation
|
mySymEqs = self.equivalentOrientations() if SST else self.equivalentOrientations([0]) # take all or only first sym operation
|
||||||
otherSymEqs = other.equivalentOrientations()
|
otherSymEqs = other.equivalentOrientations()
|
||||||
|
|
||||||
for i,sA in enumerate(mySymEqs):
|
for i,sA in enumerate(mySymEqs):
|
||||||
aInv = sA.rotation.inversed()
|
aInv = sA.rotation.inversed()
|
||||||
for j,sB in enumerate(otherSymEqs):
|
for j,sB in enumerate(otherSymEqs):
|
||||||
b = sB.rotation
|
b = sB.rotation
|
||||||
r = b*aInv
|
r = b*aInv
|
||||||
for k in range(2):
|
for k in range(2):
|
||||||
r.inverse()
|
r.inverse()
|
||||||
breaker = self.lattice.symmetry.inFZ(r.asRodrigues(vector=True)) \
|
breaker = self.lattice.symmetry.inFZ(r.asRodrigues(vector=True)) \
|
||||||
and (not SST or other.lattice.symmetry.inDisorientationSST(r.asRodrigues(vector=True)))
|
and (not SST or other.lattice.symmetry.inDisorientationSST(r.asRodrigues(vector=True)))
|
||||||
if breaker: break
|
if breaker: break
|
||||||
if breaker: break
|
if breaker: break
|
||||||
if breaker: break
|
if breaker: break
|
||||||
|
|
||||||
return (Orientation(r,self.lattice), i,j, k == 1) if symmetries else r # disorientation ...
|
return (Orientation(r,self.lattice), i,j, k == 1) if symmetries else r # disorientation ...
|
||||||
# ... own sym, other sym,
|
# ... own sym, other sym,
|
||||||
# self-->other: True, self<--other: False
|
# self-->other: True, self<--other: False
|
||||||
def inFZ(self):
|
def inFZ(self):
|
||||||
return self.lattice.symmetry.inFZ(self.rotation.asRodrigues(vector=True))
|
return self.lattice.symmetry.inFZ(self.rotation.asRodrigues(vector=True))
|
||||||
|
|
||||||
|
|
||||||
def equivalentOrientations(self,members=[]):
|
def equivalentOrientations(self,members=[]):
|
||||||
"""List of orientations which are symmetrically equivalent."""
|
"""List of orientations which are symmetrically equivalent."""
|
||||||
try:
|
try:
|
||||||
iter(members) # asking for (even empty) list of members?
|
iter(members) # asking for (even empty) list of members?
|
||||||
except TypeError:
|
except TypeError:
|
||||||
return self.__class__(self.lattice.symmetry.symmetryOperations(members)*self.rotation,self.lattice) # no, return rotation object
|
return self.__class__(self.lattice.symmetry.symmetryOperations(members)*self.rotation,self.lattice) # no, return rotation object
|
||||||
else:
|
else:
|
||||||
return [self.__class__(q*self.rotation,self.lattice) \
|
return [self.__class__(q*self.rotation,self.lattice) \
|
||||||
for q in self.lattice.symmetry.symmetryOperations(members)] # yes, return list of rotations
|
for q in self.lattice.symmetry.symmetryOperations(members)] # yes, return list of rotations
|
||||||
|
|
||||||
def relatedOrientations(self,model):
|
def relatedOrientations(self,model):
|
||||||
"""List of orientations related by the given orientation relationship."""
|
"""List of orientations related by the given orientation relationship."""
|
||||||
r = self.lattice.relationOperations(model)
|
r = self.lattice.relationOperations(model)
|
||||||
return [self.__class__(o*self.rotation,r['lattice']) for o in r['rotations']]
|
return [self.__class__(o*self.rotation,r['lattice']) for o in r['rotations']]
|
||||||
|
|
||||||
|
|
||||||
def reduced(self):
|
def reduced(self):
|
||||||
"""Transform orientation to fall into fundamental zone according to symmetry."""
|
"""Transform orientation to fall into fundamental zone according to symmetry."""
|
||||||
for me in self.equivalentOrientations():
|
for me in self.equivalentOrientations():
|
||||||
if self.lattice.symmetry.inFZ(me.rotation.asRodrigues(vector=True)): break
|
if self.lattice.symmetry.inFZ(me.rotation.asRodrigues(vector=True)): break
|
||||||
|
|
||||||
return self.__class__(me.rotation,self.lattice)
|
return self.__class__(me.rotation,self.lattice)
|
||||||
|
|
||||||
|
|
||||||
def inversePole(self,
|
def inversePole(self,
|
||||||
axis,
|
axis,
|
||||||
proper = False,
|
proper = False,
|
||||||
SST = True):
|
SST = True):
|
||||||
"""Axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)."""
|
"""Axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)."""
|
||||||
if SST: # pole requested to be within SST
|
if SST: # pole requested to be within SST
|
||||||
for i,o in enumerate(self.equivalentOrientations()): # test all symmetric equivalent quaternions
|
for i,o in enumerate(self.equivalentOrientations()): # test all symmetric equivalent quaternions
|
||||||
pole = o.rotation*axis # align crystal direction to axis
|
pole = o.rotation*axis # align crystal direction to axis
|
||||||
if self.lattice.symmetry.inSST(pole,proper): break # found SST version
|
if self.lattice.symmetry.inSST(pole,proper): break # found SST version
|
||||||
else:
|
else:
|
||||||
pole = self.rotation*axis # align crystal direction to axis
|
pole = self.rotation*axis # align crystal direction to axis
|
||||||
|
|
||||||
return (pole,i if SST else 0)
|
return (pole,i if SST else 0)
|
||||||
|
|
||||||
|
|
||||||
def IPFcolor(self,axis):
|
def IPFcolor(self,axis):
|
||||||
"""TSL color of inverse pole figure for given axis."""
|
"""TSL color of inverse pole figure for given axis."""
|
||||||
color = np.zeros(3,'d')
|
color = np.zeros(3,'d')
|
||||||
|
|
||||||
for o in self.equivalentOrientations():
|
for o in self.equivalentOrientations():
|
||||||
pole = o.rotation*axis # align crystal direction to axis
|
pole = o.rotation*axis # align crystal direction to axis
|
||||||
inSST,color = self.lattice.symmetry.inSST(pole,color=True)
|
inSST,color = self.lattice.symmetry.inSST(pole,color=True)
|
||||||
if inSST: break
|
if inSST: break
|
||||||
|
|
||||||
return color
|
return color
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fromAverage(orientations,
|
def fromAverage(orientations,
|
||||||
weights = []):
|
weights = []):
|
||||||
"""Create orientation from average of list of orientations."""
|
"""Create orientation from average of list of orientations."""
|
||||||
if not all(isinstance(item, Orientation) for item in orientations):
|
if not all(isinstance(item, Orientation) for item in orientations):
|
||||||
raise TypeError("Only instances of Orientation can be averaged.")
|
raise TypeError("Only instances of Orientation can be averaged.")
|
||||||
|
|
||||||
closest = []
|
closest = []
|
||||||
ref = orientations[0]
|
ref = orientations[0]
|
||||||
for o in orientations:
|
for o in orientations:
|
||||||
closest.append(o.equivalentOrientations(
|
closest.append(o.equivalentOrientations(
|
||||||
ref.disorientation(o,
|
ref.disorientation(o,
|
||||||
SST = False, # select (o[ther]'s) sym orientation
|
SST = False, # select (o[ther]'s) sym orientation
|
||||||
symmetries = True)[2]).rotation) # with lowest misorientation
|
symmetries = True)[2]).rotation) # with lowest misorientation
|
||||||
|
|
||||||
return Orientation(Rotation.fromAverage(closest,weights),ref.lattice)
|
return Orientation(Rotation.fromAverage(closest,weights),ref.lattice)
|
||||||
|
|
||||||
|
|
||||||
def average(self,other):
|
def average(self,other):
|
||||||
"""Calculate the average rotation."""
|
"""Calculate the average rotation."""
|
||||||
return Orientation.fromAverage([self,other])
|
return Orientation.fromAverage([self,other])
|
||||||
|
|
|
@ -37,48 +37,48 @@ class Result:
|
||||||
"""
|
"""
|
||||||
with h5py.File(fname,'r') as f:
|
with h5py.File(fname,'r') as f:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.version_major = f.attrs['DADF5_version_major']
|
self.version_major = f.attrs['DADF5_version_major']
|
||||||
self.version_minor = f.attrs['DADF5_version_minor']
|
self.version_minor = f.attrs['DADF5_version_minor']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self.version_major = f.attrs['DADF5-major']
|
self.version_major = f.attrs['DADF5-major']
|
||||||
self.version_minor = f.attrs['DADF5-minor']
|
self.version_minor = f.attrs['DADF5-minor']
|
||||||
|
|
||||||
if self.version_major != 0 or not 2 <= self.version_minor <= 6:
|
if self.version_major != 0 or not 2 <= self.version_minor <= 6:
|
||||||
raise TypeError('Unsupported DADF5 version {}.{} '.format(self.version_major,
|
raise TypeError('Unsupported DADF5 version {}.{} '.format(self.version_major,
|
||||||
self.version_minor))
|
self.version_minor))
|
||||||
|
|
||||||
self.structured = 'grid' in f['geometry'].attrs.keys()
|
self.structured = 'grid' in f['geometry'].attrs.keys()
|
||||||
|
|
||||||
if self.structured:
|
if self.structured:
|
||||||
self.grid = f['geometry'].attrs['grid']
|
self.grid = f['geometry'].attrs['grid']
|
||||||
self.size = f['geometry'].attrs['size']
|
self.size = f['geometry'].attrs['size']
|
||||||
self.origin = f['geometry'].attrs['origin'] if self.version_major == 0 and self.version_minor >= 5 else \
|
self.origin = f['geometry'].attrs['origin'] if self.version_major == 0 and self.version_minor >= 5 else \
|
||||||
np.zeros(3)
|
np.zeros(3)
|
||||||
|
|
||||||
r=re.compile('inc[0-9]+')
|
r=re.compile('inc[0-9]+')
|
||||||
increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)}
|
increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)}
|
||||||
self.increments = [increments_unsorted[i] for i in sorted(increments_unsorted)]
|
self.increments = [increments_unsorted[i] for i in sorted(increments_unsorted)]
|
||||||
self.times = [round(f[i].attrs['time/s'],12) for i in self.increments]
|
self.times = [round(f[i].attrs['time/s'],12) for i in self.increments]
|
||||||
|
|
||||||
self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent'])
|
self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent'])
|
||||||
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
||||||
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
|
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
|
||||||
|
|
||||||
self.con_physics = []
|
self.con_physics = []
|
||||||
for c in self.constituents:
|
for c in self.constituents:
|
||||||
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
|
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
|
||||||
self.con_physics = list(set(self.con_physics)) # make unique
|
self.con_physics = list(set(self.con_physics)) # make unique
|
||||||
|
|
||||||
self.mat_physics = []
|
self.mat_physics = []
|
||||||
for m in self.materialpoints:
|
for m in self.materialpoints:
|
||||||
self.mat_physics += f['/'.join([self.increments[0],'materialpoint',m])].keys()
|
self.mat_physics += f['/'.join([self.increments[0],'materialpoint',m])].keys()
|
||||||
self.mat_physics = list(set(self.mat_physics)) # make unique
|
self.mat_physics = list(set(self.mat_physics)) # make unique
|
||||||
|
|
||||||
self.selection= {'increments': self.increments,
|
self.selection = {'increments': self.increments,
|
||||||
'constituents': self.constituents,'materialpoints': self.materialpoints,
|
'constituents': self.constituents,'materialpoints': self.materialpoints,
|
||||||
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
|
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
|
||||||
}
|
}
|
||||||
|
|
||||||
self.fname = fname
|
self.fname = fname
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ class Result:
|
||||||
iterator = map(float,choice)
|
iterator = map(float,choice)
|
||||||
choice = []
|
choice = []
|
||||||
for c in iterator:
|
for c in iterator:
|
||||||
idx=np.searchsorted(self.times,c)
|
idx = np.searchsorted(self.times,c)
|
||||||
if np.isclose(c,self.times[idx]):
|
if np.isclose(c,self.times[idx]):
|
||||||
choice.append(self.increments[idx])
|
choice.append(self.increments[idx])
|
||||||
elif np.isclose(c,self.times[idx+1]):
|
elif np.isclose(c,self.times[idx+1]):
|
||||||
|
@ -141,12 +141,12 @@ class Result:
|
||||||
if action == 'set':
|
if action == 'set':
|
||||||
self.selection[what] = valid
|
self.selection[what] = valid
|
||||||
elif action == 'add':
|
elif action == 'add':
|
||||||
add=existing.union(valid)
|
add = existing.union(valid)
|
||||||
add_sorted=sorted(add, key=lambda x: int("".join([i for i in x if i.isdigit()])))
|
add_sorted = sorted(add, key=lambda x: int("".join([i for i in x if i.isdigit()])))
|
||||||
self.selection[what] = add_sorted
|
self.selection[what] = add_sorted
|
||||||
elif action == 'del':
|
elif action == 'del':
|
||||||
diff=existing.difference(valid)
|
diff = existing.difference(valid)
|
||||||
diff_sorted=sorted(diff, key=lambda x: int("".join([i for i in x if i.isdigit()])))
|
diff_sorted = sorted(diff, key=lambda x: int("".join([i for i in x if i.isdigit()])))
|
||||||
self.selection[what] = diff_sorted
|
self.selection[what] = diff_sorted
|
||||||
|
|
||||||
|
|
||||||
|
@ -287,8 +287,8 @@ class Result:
|
||||||
inData[key] = f['mapping/cellResults/materialpoint'][inGeom[key].tolist()]['Position']
|
inData[key] = f['mapping/cellResults/materialpoint'][inGeom[key].tolist()]['Position']
|
||||||
shape = np.shape(f[path])
|
shape = np.shape(f[path])
|
||||||
data = np.full((self.Nmaterialpoints,) + (shape[1:] if len(shape)>1 else (1,)),
|
data = np.full((self.Nmaterialpoints,) + (shape[1:] if len(shape)>1 else (1,)),
|
||||||
np.nan,
|
np.nan,
|
||||||
dtype=np.dtype(f[path]))
|
dtype=np.dtype(f[path]))
|
||||||
data[inGeom[key]] = (f[path] if len(shape)>1 else np.expand_dims(f[path],1))[inData[key]]
|
data[inGeom[key]] = (f[path] if len(shape)>1 else np.expand_dims(f[path],1))[inData[key]]
|
||||||
path = (os.path.join(*([prop,name]+([cat] if cat else [])+([item] if item else []))) if split else path)+tag
|
path = (os.path.join(*([prop,name]+([cat] if cat else [])+([item] if item else []))) if split else path)+tag
|
||||||
if split:
|
if split:
|
||||||
|
@ -343,14 +343,14 @@ class Result:
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for i in self.iterate('increments'):
|
for i in self.iterate('increments'):
|
||||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for oo in self.iterate(o):
|
for oo in self.iterate(o):
|
||||||
for pp in self.iterate(p):
|
for pp in self.iterate(p):
|
||||||
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||||
if sets is True:
|
if sets is True:
|
||||||
groups.append(group)
|
groups.append(group)
|
||||||
else:
|
else:
|
||||||
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
|
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
|
||||||
if len(set(match)) == len(sets) : groups.append(group)
|
if len(set(match)) == len(sets): groups.append(group)
|
||||||
return groups
|
return groups
|
||||||
|
|
||||||
|
|
||||||
|
@ -359,20 +359,20 @@ class Result:
|
||||||
message = ''
|
message = ''
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for i in self.iterate('increments'):
|
for i in self.iterate('increments'):
|
||||||
message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
|
message += '\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
|
||||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for oo in self.iterate(o):
|
for oo in self.iterate(o):
|
||||||
message+=' {}\n'.format(oo)
|
message += ' {}\n'.format(oo)
|
||||||
for pp in self.iterate(p):
|
for pp in self.iterate(p):
|
||||||
message+=' {}\n'.format(pp)
|
message += ' {}\n'.format(pp)
|
||||||
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||||
for d in f[group].keys():
|
for d in f[group].keys():
|
||||||
try:
|
try:
|
||||||
dataset = f['/'.join([group,d])]
|
dataset = f['/'.join([group,d])]
|
||||||
message+=' {} / ({}): {}\n'.\
|
message += ' {} / ({}): {}\n'.\
|
||||||
format(d,dataset.attrs['Unit'].decode(),dataset.attrs['Description'].decode())
|
format(d,dataset.attrs['Unit'].decode(),dataset.attrs['Description'].decode())
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
@ -385,7 +385,7 @@ class Result:
|
||||||
try:
|
try:
|
||||||
f[k]
|
f[k]
|
||||||
path.append(k)
|
path.append(k)
|
||||||
except KeyError as e:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for oo in self.iterate(o):
|
for oo in self.iterate(o):
|
||||||
|
@ -394,7 +394,7 @@ class Result:
|
||||||
try:
|
try:
|
||||||
f[k]
|
f[k]
|
||||||
path.append(k)
|
path.append(k)
|
||||||
except KeyError as e:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
@ -419,31 +419,31 @@ class Result:
|
||||||
If more than one path is given, the dataset is composed of the individual contributions.
|
If more than one path is given, the dataset is composed of the individual contributions.
|
||||||
"""
|
"""
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:]
|
shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:]
|
||||||
if len(shape) == 1: shape = shape +(1,)
|
if len(shape) == 1: shape = shape +(1,)
|
||||||
dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]]))
|
dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]]))
|
||||||
for pa in path:
|
for pa in path:
|
||||||
label = pa.split('/')[2]
|
label = pa.split('/')[2]
|
||||||
|
|
||||||
if (pa.split('/')[1] == 'geometry'):
|
if pa.split('/')[1] == 'geometry':
|
||||||
dataset = np.array(f[pa])
|
dataset = np.array(f[pa])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
|
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
|
||||||
if len(p)>0:
|
if len(p)>0:
|
||||||
u = (f['mapping/cellResults/constituent']['Position'][p,c])
|
u = (f['mapping/cellResults/constituent']['Position'][p,c])
|
||||||
a = np.array(f[pa])
|
a = np.array(f[pa])
|
||||||
if len(a.shape) == 1:
|
if len(a.shape) == 1:
|
||||||
a=a.reshape([a.shape[0],1])
|
a=a.reshape([a.shape[0],1])
|
||||||
dataset[p,:] = a[u,:]
|
dataset[p,:] = a[u,:]
|
||||||
|
|
||||||
p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0]
|
p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0]
|
||||||
if len(p)>0:
|
if len(p)>0:
|
||||||
u = (f['mapping/cellResults/materialpoint']['Position'][p.tolist()])
|
u = (f['mapping/cellResults/materialpoint']['Position'][p.tolist()])
|
||||||
a = np.array(f[pa])
|
a = np.array(f[pa])
|
||||||
if len(a.shape) == 1:
|
if len(a.shape) == 1:
|
||||||
a=a.reshape([a.shape[0],1])
|
a=a.reshape([a.shape[0],1])
|
||||||
dataset[p,:] = a[u,:]
|
dataset[p,:] = a[u,:]
|
||||||
|
|
||||||
if plain and dataset.dtype.names is not None:
|
if plain and dataset.dtype.names is not None:
|
||||||
return dataset.view(('float64',len(dataset.dtype.names)))
|
return dataset.view(('float64',len(dataset.dtype.names)))
|
||||||
|
@ -518,7 +518,7 @@ class Result:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not vectorized:
|
if not vectorized:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
dataset_mapping = {d:d for d in set(re.findall(r'#(.*?)#',formula))} # datasets used in the formula
|
dataset_mapping = {d:d for d in set(re.findall(r'#(.*?)#',formula))} # datasets used in the formula
|
||||||
args = {'formula':formula,'label':label,'unit':unit,'description':description}
|
args = {'formula':formula,'label':label,'unit':unit,'description':description}
|
||||||
|
@ -661,9 +661,9 @@ class Result:
|
||||||
|
|
||||||
lattice = q['meta']['Lattice']
|
lattice = q['meta']['Lattice']
|
||||||
|
|
||||||
for i,q in enumerate(q['data']):
|
for i,qu in enumerate(q['data']):
|
||||||
o = Orientation(np.array([q['w'],q['x'],q['y'],q['z']]),lattice).reduced()
|
o = Orientation(np.array([qu['w'],qu['x'],qu['y'],qu['z']]),lattice).reduced()
|
||||||
colors[i] = np.uint8(o.IPFcolor(d_unit)*255)
|
colors[i] = np.uint8(o.IPFcolor(d_unit)*255)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': colors,
|
'data': colors,
|
||||||
|
@ -814,8 +814,8 @@ class Result:
|
||||||
m = util.scale_to_coprime(pole)
|
m = util.scale_to_coprime(pole)
|
||||||
coords = np.empty((len(q['data']),2))
|
coords = np.empty((len(q['data']),2))
|
||||||
|
|
||||||
for i,q in enumerate(q['data']):
|
for i,qu in enumerate(q['data']):
|
||||||
o = Rotation(np.array([q['w'],q['x'],q['y'],q['z']]))
|
o = Rotation(np.array([qu['w'],qu['x'],qu['y'],qu['z']]))
|
||||||
rotatedPole = o*unit_pole # rotate pole according to crystal orientation
|
rotatedPole = o*unit_pole # rotate pole according to crystal orientation
|
||||||
(x,y) = rotatedPole[0:2]/(1.+abs(unit_pole[2])) # stereographic projection
|
(x,y) = rotatedPole[0:2]/(1.+abs(unit_pole[2])) # stereographic projection
|
||||||
coords[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if polar else [x,y]
|
coords[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if polar else [x,y]
|
||||||
|
@ -1036,67 +1036,67 @@ class Result:
|
||||||
"""
|
"""
|
||||||
if mode.lower()=='cell':
|
if mode.lower()=='cell':
|
||||||
|
|
||||||
if self.structured:
|
if self.structured:
|
||||||
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
|
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
|
||||||
else:
|
else:
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
v = VTK.from_unstructuredGrid(f['/geometry/x_n'][()],
|
v = VTK.from_unstructuredGrid(f['/geometry/x_n'][()],
|
||||||
f['/geometry/T_c'][()]-1,
|
f['/geometry/T_c'][()]-1,
|
||||||
f['/geometry/T_c'].attrs['VTK_TYPE'].decode())
|
f['/geometry/T_c'].attrs['VTK_TYPE'].decode())
|
||||||
elif mode.lower()=='point':
|
elif mode.lower()=='point':
|
||||||
v = VTK.from_polyData(self.cell_coordinates())
|
v = VTK.from_polyData(self.cell_coordinates())
|
||||||
|
|
||||||
N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1
|
N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1
|
||||||
|
|
||||||
for i,inc in enumerate(util.show_progress(self.iterate('increments'),len(self.selection['increments']))):
|
for inc in util.show_progress(self.iterate('increments'),len(self.selection['increments'])):
|
||||||
|
|
||||||
materialpoints_backup = self.selection['materialpoints'].copy()
|
materialpoints_backup = self.selection['materialpoints'].copy()
|
||||||
self.pick('materialpoints',False)
|
self.pick('materialpoints',False)
|
||||||
for label in (labels if isinstance(labels,list) else [labels]):
|
for label in (labels if isinstance(labels,list) else [labels]):
|
||||||
for p in self.iterate('con_physics'):
|
for p in self.iterate('con_physics'):
|
||||||
if p != 'generic':
|
if p != 'generic':
|
||||||
for c in self.iterate('constituents'):
|
for c in self.iterate('constituents'):
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
|
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
|
||||||
else:
|
else:
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name
|
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name
|
||||||
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name
|
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name
|
||||||
v.add(array,dset_name)
|
v.add(array,dset_name)
|
||||||
self.pick('materialpoints',materialpoints_backup)
|
self.pick('materialpoints',materialpoints_backup)
|
||||||
|
|
||||||
constituents_backup = self.selection['constituents'].copy()
|
constituents_backup = self.selection['constituents'].copy()
|
||||||
self.pick('constituents',False)
|
self.pick('constituents',False)
|
||||||
for label in (labels if isinstance(labels,list) else [labels]):
|
for label in (labels if isinstance(labels,list) else [labels]):
|
||||||
for p in self.iterate('mat_physics'):
|
for p in self.iterate('mat_physics'):
|
||||||
if p != 'generic':
|
if p != 'generic':
|
||||||
for m in self.iterate('materialpoints'):
|
for m in self.iterate('materialpoints'):
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
|
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
|
||||||
else:
|
else:
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
v.add(array,'1_'+x[0].split('/',1)[1])
|
v.add(array,'1_'+x[0].split('/',1)[1])
|
||||||
self.pick('constituents',constituents_backup)
|
self.pick('constituents',constituents_backup)
|
||||||
|
|
||||||
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
||||||
v.add(u,'u')
|
v.add(u,'u')
|
||||||
|
|
||||||
file_out = '{}_inc{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
|
file_out = '{}_inc{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
|
||||||
inc[3:].zfill(N_digits))
|
inc[3:].zfill(N_digits))
|
||||||
|
|
||||||
v.write(file_out)
|
v.write(file_out)
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
# BEGIN DEPRECATED
|
# BEGIN DEPRECATED
|
||||||
|
|
|
@ -292,7 +292,7 @@ class Rotation:
|
||||||
if degrees: ax[ 3] = np.radians(ax[3])
|
if degrees: ax[ 3] = np.radians(ax[3])
|
||||||
if normalise: ax[0:3] /= np.linalg.norm(ax[0:3])
|
if normalise: ax[0:3] /= np.linalg.norm(ax[0:3])
|
||||||
if ax[3] < 0.0 or ax[3] > np.pi:
|
if ax[3] < 0.0 or ax[3] > np.pi:
|
||||||
raise ValueError('Axis angle rotation angle outside of [0..π].\n'.format(ax[3]))
|
raise ValueError('Axis angle rotation angle outside of [0..π].\n{}'.format(ax[3]))
|
||||||
if not np.isclose(np.linalg.norm(ax[0:3]), 1.0):
|
if not np.isclose(np.linalg.norm(ax[0:3]), 1.0):
|
||||||
raise ValueError('Axis angle rotation axis is not of unit length.\n{} {} {}'.format(*ax[0:3]))
|
raise ValueError('Axis angle rotation axis is not of unit length.\n{} {} {}'.format(*ax[0:3]))
|
||||||
|
|
||||||
|
@ -338,7 +338,7 @@ class Rotation:
|
||||||
if not np.isclose(np.linalg.norm(ro[0:3]), 1.0):
|
if not np.isclose(np.linalg.norm(ro[0:3]), 1.0):
|
||||||
raise ValueError('Rodrigues rotation axis is not of unit length.\n{} {} {}'.format(*ro[0:3]))
|
raise ValueError('Rodrigues rotation axis is not of unit length.\n{} {} {}'.format(*ro[0:3]))
|
||||||
if ro[3] < 0.0:
|
if ro[3] < 0.0:
|
||||||
raise ValueError('Rodrigues rotation angle not positive.\n'.format(ro[3]))
|
raise ValueError('Rodrigues rotation angle not positive.\n{}'.format(ro[3]))
|
||||||
|
|
||||||
return Rotation(Rotation.ro2qu(ro))
|
return Rotation(Rotation.ro2qu(ro))
|
||||||
|
|
||||||
|
@ -365,8 +365,7 @@ class Rotation:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fromAverage(rotations,
|
def fromAverage(rotations,weights = None):
|
||||||
weights = []):
|
|
||||||
"""
|
"""
|
||||||
Average rotation.
|
Average rotation.
|
||||||
|
|
||||||
|
@ -384,10 +383,10 @@ class Rotation:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not all(isinstance(item, Rotation) for item in rotations):
|
if not all(isinstance(item, Rotation) for item in rotations):
|
||||||
raise TypeError("Only instances of Rotation can be averaged.")
|
raise TypeError("Only instances of Rotation can be averaged.")
|
||||||
|
|
||||||
N = len(rotations)
|
N = len(rotations)
|
||||||
if weights == [] or not weights:
|
if not weights:
|
||||||
weights = np.ones(N,dtype='i')
|
weights = np.ones(N,dtype='i')
|
||||||
|
|
||||||
for i,(r,n) in enumerate(zip(rotations,weights)):
|
for i,(r,n) in enumerate(zip(rotations,weights)):
|
||||||
|
@ -713,8 +712,8 @@ class Rotation:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ro2om(ro):
|
def ro2om(ro):
|
||||||
"""Rodgrigues-Frank vector to rotation matrix."""
|
"""Rodgrigues-Frank vector to rotation matrix."""
|
||||||
return Rotation.ax2om(Rotation.ro2ax(ro))
|
return Rotation.ax2om(Rotation.ro2ax(ro))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ro2eu(ro):
|
def ro2eu(ro):
|
||||||
|
|
|
@ -327,9 +327,9 @@ class Table:
|
||||||
seen = set()
|
seen = set()
|
||||||
labels = []
|
labels = []
|
||||||
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
|
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
|
||||||
if(self.shapes[l] == (1,)):
|
if self.shapes[l] == (1,):
|
||||||
labels.append('{}'.format(l))
|
labels.append('{}'.format(l))
|
||||||
elif(len(self.shapes[l]) == 1):
|
elif len(self.shapes[l]) == 1:
|
||||||
labels += ['{}_{}'.format(i+1,l) \
|
labels += ['{}_{}'.format(i+1,l) \
|
||||||
for i in range(self.shapes[l][0])]
|
for i in range(self.shapes[l][0])]
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -103,7 +103,7 @@ class VTK:
|
||||||
Spatial position of the points.
|
Spatial position of the points.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
vtk_points= vtk.vtkPoints()
|
vtk_points = vtk.vtkPoints()
|
||||||
vtk_points.SetData(np_to_vtk(points))
|
vtk_points.SetData(np_to_vtk(points))
|
||||||
|
|
||||||
geom = vtk.vtkPolyData()
|
geom = vtk.vtkPolyData()
|
||||||
|
@ -168,11 +168,11 @@ class VTK:
|
||||||
Filename for writing.
|
Filename for writing.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if (isinstance(self.geom,vtk.vtkRectilinearGrid)):
|
if isinstance(self.geom,vtk.vtkRectilinearGrid):
|
||||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||||
elif(isinstance(self.geom,vtk.vtkUnstructuredGrid)):
|
elif isinstance(self.geom,vtk.vtkUnstructuredGrid):
|
||||||
writer = vtk.vtkXMLUnstructuredGridWriter()
|
writer = vtk.vtkXMLUnstructuredGridWriter()
|
||||||
elif(isinstance(self.geom,vtk.vtkPolyData)):
|
elif isinstance(self.geom,vtk.vtkPolyData):
|
||||||
writer = vtk.vtkXMLPolyDataWriter()
|
writer = vtk.vtkXMLPolyDataWriter()
|
||||||
|
|
||||||
default_ext = writer.GetDefaultFileExtension()
|
default_ext = writer.GetDefaultFileExtension()
|
||||||
|
@ -234,17 +234,17 @@ class VTK:
|
||||||
|
|
||||||
ren = vtk.vtkRenderer()
|
ren = vtk.vtkRenderer()
|
||||||
|
|
||||||
renWin = vtk.vtkRenderWindow()
|
window = vtk.vtkRenderWindow()
|
||||||
renWin.AddRenderer(ren)
|
window.AddRenderer(ren)
|
||||||
|
|
||||||
ren.AddActor(actor)
|
ren.AddActor(actor)
|
||||||
ren.SetBackground(0.2,0.2,0.2)
|
ren.SetBackground(0.2,0.2,0.2)
|
||||||
|
|
||||||
renWin.SetSize(Environment().screen_width,Environment().screen_height)
|
window.SetSize(Environment().screen_width,Environment().screen_height)
|
||||||
|
|
||||||
iren = vtk.vtkRenderWindowInteractor()
|
iren = vtk.vtkRenderWindowInteractor()
|
||||||
iren.SetRenderWindow(renWin)
|
iren.SetRenderWindow(window)
|
||||||
|
|
||||||
iren.Initialize()
|
iren.Initialize()
|
||||||
renWin.Render()
|
window.Render()
|
||||||
iren.Start()
|
iren.Start()
|
||||||
|
|
|
@ -41,10 +41,10 @@ def curl(size,field):
|
||||||
e[0, 2, 1] = e[2, 1, 0] = e[1, 0, 2] = -1.0
|
e[0, 2, 1] = e[2, 1, 0] = e[1, 0, 2] = -1.0
|
||||||
|
|
||||||
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
|
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
|
||||||
curl = (np.einsum('slm,ijkl,ijkm ->ijks', e,k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 3
|
curl_ = (np.einsum('slm,ijkl,ijkm ->ijks', e,k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 3
|
||||||
np.einsum('slm,ijkl,ijknm->ijksn',e,k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3x3
|
np.einsum('slm,ijkl,ijknm->ijksn',e,k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3x3
|
||||||
|
|
||||||
return np.fft.irfftn(curl,axes=(0,1,2),s=field.shape[:3])
|
return np.fft.irfftn(curl_,axes=(0,1,2),s=field.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def divergence(size,field):
|
def divergence(size,field):
|
||||||
|
@ -61,10 +61,10 @@ def divergence(size,field):
|
||||||
k_s = _ks(size,field.shape[:3],True)
|
k_s = _ks(size,field.shape[:3],True)
|
||||||
|
|
||||||
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
|
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
|
||||||
divergence = (np.einsum('ijkl,ijkl ->ijk', k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 1
|
div_ = (np.einsum('ijkl,ijkl ->ijk', k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 1
|
||||||
np.einsum('ijkm,ijklm->ijkl',k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3
|
np.einsum('ijkm,ijklm->ijkl',k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3
|
||||||
|
|
||||||
return np.fft.irfftn(divergence,axes=(0,1,2),s=field.shape[:3])
|
return np.fft.irfftn(div_,axes=(0,1,2),s=field.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def gradient(size,field):
|
def gradient(size,field):
|
||||||
|
@ -81,10 +81,10 @@ def gradient(size,field):
|
||||||
k_s = _ks(size,field.shape[:3],True)
|
k_s = _ks(size,field.shape[:3],True)
|
||||||
|
|
||||||
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
|
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
|
||||||
gradient = (np.einsum('ijkl,ijkm->ijkm', field_fourier,k_s)*2.0j*np.pi if n == 1 else # scalar, 1 -> 3
|
grad_ = (np.einsum('ijkl,ijkm->ijkm', field_fourier,k_s)*2.0j*np.pi if n == 1 else # scalar, 1 -> 3
|
||||||
np.einsum('ijkl,ijkm->ijklm',field_fourier,k_s)*2.0j*np.pi) # vector, 3 -> 3x3
|
np.einsum('ijkl,ijkm->ijklm',field_fourier,k_s)*2.0j*np.pi) # vector, 3 -> 3x3
|
||||||
|
|
||||||
return np.fft.irfftn(gradient,axes=(0,1,2),s=field.shape[:3])
|
return np.fft.irfftn(grad_,axes=(0,1,2),s=field.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def cell_coord0(grid,size,origin=np.zeros(3)):
|
def cell_coord0(grid,size,origin=np.zeros(3)):
|
||||||
|
|
Loading…
Reference in New Issue