DAMASK_EICMD/python/damask/dadf5.py

1177 lines
43 KiB
Python
Raw Normal View History

2020-02-16 00:39:24 +05:30
from queue import Queue
import re
import glob
import os
import vtk
from vtk.util import numpy_support
import h5py
2019-04-17 23:27:16 +05:30
import numpy as np
2019-05-20 23:24:57 +05:30
from . import util
2019-09-30 21:37:56 +05:30
from . import version
from . import mechanics
2020-02-21 11:51:45 +05:30
from . import Rotation
2020-02-16 00:39:24 +05:30
from . import Orientation
from . import Environment
class DADF5():
2019-09-20 01:02:15 +05:30
"""
2020-02-21 12:15:05 +05:30
Read and write to DADF5 files.
2019-09-16 08:49:14 +05:30
2020-02-21 12:15:05 +05:30
DADF5 files contain DAMASK results.
"""
def __init__(self,fname):
"""
Opens an existing DADF5 file.
2019-09-16 08:49:14 +05:30
2020-02-21 12:15:05 +05:30
Parameters
----------
fname : str
name of the DADF5 file to be openend.
2019-09-20 01:02:15 +05:30
2020-02-21 12:15:05 +05:30
"""
with h5py.File(fname,'r') as f:
try:
self.version_major = f.attrs['DADF5_version_major']
self.version_minor = f.attrs['DADF5_version_minor']
except KeyError:
self.version_major = f.attrs['DADF5-major']
self.version_minor = f.attrs['DADF5-minor']
2019-09-20 01:02:15 +05:30
2020-02-21 12:15:05 +05:30
if self.version_major != 0 or not 2 <= self.version_minor <= 6:
raise TypeError('Unsupported DADF5 version {}.{} '.format(f.attrs['DADF5_version_major'],
f.attrs['DADF5_version_minor']))
2019-09-16 08:49:14 +05:30
2020-02-21 12:15:05 +05:30
self.structured = 'grid' in f['geometry'].attrs.keys()
2019-09-16 08:49:14 +05:30
2020-02-21 12:15:05 +05:30
if self.structured:
self.grid = f['geometry'].attrs['grid']
self.size = f['geometry'].attrs['size']
if self.version_major == 0 and self.version_minor >= 5:
self.origin = f['geometry'].attrs['origin']
2019-09-20 01:02:15 +05:30
2020-02-21 12:15:05 +05:30
r=re.compile('inc[0-9]+')
increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)}
self.increments = [increments_unsorted[i] for i in sorted(increments_unsorted)]
self.times = [round(f[i].attrs['time/s'],12) for i in self.increments]
2020-02-21 12:15:05 +05:30
self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent'])
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
2020-02-21 12:15:05 +05:30
self.con_physics = []
for c in self.constituents:
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
self.con_physics = list(set(self.con_physics)) # make unique
2020-02-21 12:15:05 +05:30
self.mat_physics = []
for m in self.materialpoints:
self.mat_physics += f['/'.join([self.increments[0],'materialpoint',m])].keys()
self.mat_physics = list(set(self.mat_physics)) # make unique
self.selection= {'increments': self.increments,
'constituents': self.constituents,
'materialpoints': self.materialpoints,
'con_physics': self.con_physics,
'mat_physics': self.mat_physics}
2020-02-21 12:15:05 +05:30
self.fname = fname
2020-02-21 12:15:05 +05:30
def __manage_visible(self,datasets,what,action):
"""
Manages the visibility of the groups.
2020-02-21 12:15:05 +05:30
Parameters
----------
datasets : list of str or Boolean
name of datasets as list, supports ? and * wildcards.
True is equivalent to [*], False is equivalent to []
what : str
attribute to change (must be in self.selection)
2020-02-21 12:15:05 +05:30
action : str
select from 'set', 'add', and 'del'
"""
# allow True/False and string arguments
if datasets is True:
datasets = ['*']
elif datasets is False:
datasets = []
choice = [datasets] if isinstance(datasets,str) else datasets
2020-02-21 12:15:05 +05:30
valid = [e for e_ in [glob.fnmatch.filter(getattr(self,what),s) for s in choice] for e in e_]
existing = set(self.selection[what])
2020-02-21 12:15:05 +05:30
if action == 'set':
self.selection[what] = valid
2020-02-21 12:15:05 +05:30
elif action == 'add':
self.selection[what] = list(existing.union(valid))
2020-02-21 12:15:05 +05:30
elif action == 'del':
self.selection[what] = list(existing.difference_update(valid))
2020-02-21 12:15:05 +05:30
def __time_to_inc(self,start,end):
selected = []
for i,time in enumerate(self.times):
if start <= time <= end:
selected.append(self.increments[i])
return selected
def set_by_time(self,start,end):
"""
Set active increments based on start and end time.
Parameters
----------
start : float
start time (included)
end : float
end time (included)
"""
self.__manage_visible(self.__time_to_inc(start,end),'increments','set')
def add_by_time(self,start,end):
"""
Add to active increments based on start and end time.
Parameters
----------
start : float
start time (included)
end : float
end time (included)
"""
self.__manage_visible(self.__time_to_inc(start,end),'increments','add')
def del_by_time(self,start,end):
"""
Delete from active increments based on start and end time.
Parameters
----------
start : float
start time (included)
end : float
end time (included)
"""
self.__manage_visible(self.__time_to_inc(start,end),'increments','del')
def set_by_increment(self,start,end):
"""
Set active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
if self.version_minor >= 4:
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','set')
else:
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','set')
def add_by_increment(self,start,end):
"""
Add to active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
if self.version_minor >= 4:
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','add')
else:
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','add')
def del_by_increment(self,start,end):
"""
Delet from active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
if self.version_minor >= 4:
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','del')
else:
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','del')
def iter_visible(self,what):
"""
Iterate over visible items by setting each one visible.
Parameters
----------
what : str
attribute to change (must be in self.selection)
2020-02-21 12:15:05 +05:30
"""
datasets = self.selection[what]
2020-02-21 12:15:05 +05:30
last_datasets = datasets.copy()
for dataset in datasets:
if last_datasets != self.selection[what]:
2020-02-21 12:15:05 +05:30
self.__manage_visible(datasets,what,'set')
raise Exception
self.__manage_visible(dataset,what,'set')
last_datasets = self.selection[what]
2020-02-21 12:15:05 +05:30
yield dataset
2019-09-16 04:30:19 +05:30
self.__manage_visible(datasets,what,'set')
2020-02-21 12:15:05 +05:30
def set_visible(self,what,datasets):
"""
Set active groups.
2020-02-21 12:15:05 +05:30
Parameters
----------
datasets : list of str or Boolean
name of datasets as list, supports ? and * wildcards.
True is equivalent to [*], False is equivalent to []
what : str
attribute to change (must be in self.selection)
2020-02-21 12:15:05 +05:30
"""
self.__manage_visible(datasets,what,'set')
2020-02-21 12:15:05 +05:30
def add_visible(self,what,datasets):
"""
Add to active groups.
2020-02-21 12:15:05 +05:30
Parameters
----------
datasets : list of str or Boolean
name of datasets as list, supports ? and * wildcards.
True is equivalent to [*], False is equivalent to []
what : str
attribute to change (must be in self.selection)
2020-02-21 12:15:05 +05:30
"""
self.__manage_visible(datasets,what,'add')
2020-02-21 12:15:05 +05:30
def del_visible(self,what,datasets):
"""
Delete from active groupe.
2020-02-21 12:15:05 +05:30
Parameters
----------
datasets : list of str or Boolean
name of datasets as list, supports ? and * wildcards.
True is equivalent to [*], False is equivalent to []
what : str
attribute to change (must be in self.selection)
2019-10-19 16:40:46 +05:30
2020-02-21 12:15:05 +05:30
"""
self.__manage_visible(datasets,what,'del')
2020-02-21 12:15:05 +05:30
def groups_with_datasets(self,datasets):
"""
Get groups that contain all requested datasets.
2020-02-21 12:15:05 +05:30
Only groups within inc?????/constituent/*_*/* inc?????/materialpoint/*_*/*
are considered as they contain the data.
Single strings will be treated as list with one entry.
2020-02-21 12:15:05 +05:30
Wild card matching is allowed, but the number of arguments need to fit.
2020-02-21 12:15:05 +05:30
Parameters
----------
datasets : iterable or str or boolean
2020-02-21 12:15:05 +05:30
Examples
--------
datasets = False matches no group
datasets = True matches all groups
datasets = ['F','P'] matches a group with ['F','P','sigma']
datasets = ['*','P'] matches a group with ['F','P']
datasets = ['*'] does not match a group with ['F','P','sigma']
datasets = ['*','*'] does not match a group with ['F','P','sigma']
datasets = ['*','*','*'] matches a group with ['F','P','sigma']
2020-02-21 12:15:05 +05:30
"""
if datasets is False: return []
sets = [datasets] if isinstance(datasets,str) else datasets
2019-09-12 06:27:24 +05:30
2020-02-21 12:15:05 +05:30
groups = []
2020-02-21 12:15:05 +05:30
with h5py.File(self.fname,'r') as f:
for i in self.iter_visible('increments'):
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o):
for pp in self.iter_visible(p):
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
if sets is True:
groups.append(group)
else:
match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_]
if len(set(match)) == len(sets) : groups.append(group)
return groups
def list_data(self):
"""Return information on all active datasets in the file."""
message = ''
with h5py.File(self.fname,'r') as f:
for i in self.iter_visible('increments'):
message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o):
message+=' {}\n'.format(oo)
for pp in self.iter_visible(p):
message+=' {}\n'.format(pp)
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
for d in f[group].keys():
try:
dataset = f['/'.join([group,d])]
2020-02-21 12:28:10 +05:30
message+=' {} / ({}): {}\n'.\
format(d,dataset.attrs['Unit'].decode(),dataset.attrs['Description'].decode())
2020-02-21 12:15:05 +05:30
except KeyError:
pass
return message
def get_dataset_location(self,label):
"""Return the location of all active datasets with given label."""
path = []
with h5py.File(self.fname,'r') as f:
for i in self.iter_visible('increments'):
k = '/'.join([i,'geometry',label])
try:
f[k]
path.append(k)
except KeyError as e:
pass
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o):
for pp in self.iter_visible(p):
k = '/'.join([i,o[:-1],oo,pp,label])
try:
f[k]
path.append(k)
except KeyError as e:
pass
return path
def get_constituent_ID(self,c=0):
"""Pointwise constituent ID."""
with h5py.File(self.fname,'r') as f:
names = f['/mapping/cellResults/constituent']['Name'][:,c].astype('str')
return np.array([int(n.split('_')[0]) for n in names.tolist()],dtype=np.int32)
2020-02-21 12:15:05 +05:30
def get_crystal_structure(self): # ToDo: extension to multi constituents/phase
"""Info about the crystal structure."""
with h5py.File(self.fname,'r') as f:
return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string
2020-02-21 12:15:05 +05:30
def read_dataset(self,path,c=0,plain=False):
"""
Dataset for all points/cells.
2020-02-21 12:15:05 +05:30
If more than one path is given, the dataset is composed of the individual contributions.
"""
with h5py.File(self.fname,'r') as f:
shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:]
if len(shape) == 1: shape = shape +(1,)
dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]]))
for pa in path:
label = pa.split('/')[2]
if (pa.split('/')[1] == 'geometry'):
dataset = np.array(f[pa])
continue
2020-02-21 12:15:05 +05:30
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
if len(p)>0:
u = (f['mapping/cellResults/constituent']['Position'][p,c])
a = np.array(f[pa])
if len(a.shape) == 1:
a=a.reshape([a.shape[0],1])
dataset[p,:] = a[u,:]
p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0]
if len(p)>0:
u = (f['mapping/cellResults/materialpoint']['Position'][p.tolist()])
a = np.array(f[pa])
if len(a.shape) == 1:
a=a.reshape([a.shape[0],1])
dataset[p,:] = a[u,:]
if plain and dataset.dtype.names is not None:
return dataset.view(('float64',len(dataset.dtype.names)))
else:
return dataset
def cell_coordinates(self):
"""Return initial coordinates of the cell centers."""
if self.structured:
delta = self.size/self.grid*0.5
z, y, x = np.meshgrid(np.linspace(delta[2],self.size[2]-delta[2],self.grid[2]),
np.linspace(delta[1],self.size[1]-delta[1],self.grid[1]),
np.linspace(delta[0],self.size[0]-delta[0],self.grid[0]),
)
return np.concatenate((x[:,:,:,None],y[:,:,:,None],z[:,:,:,None]),axis = 3).reshape([np.product(self.grid),3])
else:
with h5py.File(self.fname,'r') as f:
return f['geometry/x_c'][()]
def add_absolute(self,x):
"""
Add absolute value.
Parameters
----------
x : str
Label of the dataset containing a scalar, vector, or tensor.
"""
def _add_absolute(x):
return {
'data': np.abs(x['data']),
'label': '|{}|'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_abs v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_absolute,{'x':x})
def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True):
"""
Add result of a general formula.
Parameters
----------
formula : str
Formula, refer to datasets by #Label#.
label : str
Label of the dataset containing the result of the calculation.
unit : str, optional
Physical unit of the result.
description : str, optional
Human readable description of the result.
vectorized : bool, optional
Indicate whether the formula is written in vectorized form. Default is True.
"""
if vectorized is False:
raise NotImplementedError
def _add_calculation(**kwargs):
formula = kwargs['formula']
for d in re.findall(r'#(.*?)#',formula):
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d))
return {
'data': eval(formula),
'label': kwargs['label'],
'meta': {
'Unit': kwargs['unit'],
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
'Creator': 'dadf5.py:add_calculation v{}'.format(version)
}
}
dataset_mapping = {d:d for d in set(re.findall(r'#(.*?)#',formula))} # datasets used in the formula
args = {'formula':formula,'label':label,'unit':unit,'description':description}
self.__add_generic_pointwise(_add_calculation,dataset_mapping,args)
def add_Cauchy(self,F='F',P='P'):
"""
Add Cauchy stress calculated from 1. Piola-Kirchhoff stress and deformation gradient.
Parameters
----------
P : str, optional
Label of the dataset containing the 1. Piola-Kirchhoff stress. Default value is P.
F : str, optional
Label of the dataset containing the deformation gradient. Default value is F.
"""
def _add_Cauchy(F,P):
return {
'data': mechanics.Cauchy(F['data'],P['data']),
'label': 'sigma',
'meta': {
'Unit': P['meta']['Unit'],
'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'dadf5.py:add_Cauchy v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_Cauchy,{'F':F,'P':P})
def add_determinant(self,x):
"""
Add the determinant of a tensor.
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def _add_determinant(x):
return {
'data': np.linalg.det(x['data']),
'label': 'det({})'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_determinant v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_determinant,{'x':x})
def add_deviator(self,x):
"""
Add the deviatoric part of a tensor.
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def _add_deviator(x):
if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
raise ValueError
return {
'data': mechanics.deviatoric_part(x['data']),
'label': 's_{}'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_deviator v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_deviator,{'x':x})
def add_eigenvalues(self,x):
"""
Add eigenvalues of symmetric tensor.
Parameters
----------
x : str
Label of the dataset containing a symmetric tensor.
"""
def _add_eigenvalue(x):
return {
'data': mechanics.eigenvalues(x['data']),
'label': 'lambda({})'.format(x['label']),
'meta' : {
'Unit': x['meta']['Unit'],
'Description': 'Eigenvalues of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_eigenvalues v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_eigenvalue,{'x':x})
def add_eigenvectors(self,x):
"""
Add eigenvectors of symmetric tensor.
Parameters
----------
x : str
Label of the dataset containing a symmetric tensor.
"""
def _add_eigenvector(x):
return {
'data': mechanics.eigenvectors(x['data']),
'label': 'v({})'.format(x['label']),
'meta' : {
'Unit': '1',
'Description': 'Eigenvectors of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_eigenvectors v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_eigenvector,{'x':x})
def add_IPFcolor(self,q,p):
"""
Add RGB color tuple of inverse pole figure (IPF) color.
Parameters
----------
q : str
Label of the dataset containing the orientation data as quaternions.
p : list of int
Pole direction as Miller indices.
"""
def _add_IPFcolor(q,p):
pole = np.array(p)
unit_pole = pole/np.linalg.norm(pole)
m = util.scale_to_coprime(pole)
colors = np.empty((len(q['data']),3),np.uint8)
lattice = q['meta']['Lattice']
for i,q in enumerate(q['data']):
o = Orientation(np.array([q['w'],q['x'],q['y'],q['z']]),lattice).reduced()
colors[i] = np.uint8(o.IPFcolor(unit_pole)*255)
return {
'data': colors,
'label': 'IPFcolor_{{{} {} {}>'.format(*m),
'meta' : {
'Unit': 'RGB (8bit)',
'Lattice': lattice,
'Description': 'Inverse Pole Figure (IPF) colors for direction/plane [{} {} {})'.format(*m),
'Creator': 'dadf5.py:addIPFcolor v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_IPFcolor,{'q':q},{'p':p})
def add_maximum_shear(self,x):
"""
Add maximum shear components of symmetric tensor.
Parameters
----------
x : str
Label of the dataset containing a symmetric tensor.
"""
def _add_maximum_shear(x):
return {
'data': mechanics.maximum_shear(x['data']),
'label': 'max_shear({})'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Maximum shear component of of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_maximum_shear v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_maximum_shear,{'x':x})
def add_Mises(self,x):
"""
Add the equivalent Mises stress or strain of a symmetric tensor.
Parameters
----------
x : str
Label of the dataset containing a symmetric stress or strain tensor.
"""
def _add_Mises(x):
t = 'strain' if x['meta']['Unit'] == '1' else \
'stress'
return {
'data': mechanics.Mises_strain(x['data']) if t=='strain' else mechanics.Mises_stress(x['data']),
'label': '{}_vM'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_Mises v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_Mises,{'x':x})
def add_norm(self,x,ord=None):
"""
Add the norm of vector or tensor.
Parameters
----------
x : str
Label of the dataset containing a vector or tensor.
ord : {non-zero int, inf, -inf, fro, nuc}, optional
Order of the norm. inf means numpys inf object. For details refer to numpy.linalg.norm.
"""
def _add_norm(x,ord):
o = ord
if len(x['data'].shape) == 2:
axis = 1
t = 'vector'
if o is None: o = 2
elif len(x['data'].shape) == 3:
axis = (1,2)
t = 'tensor'
if o is None: o = 'fro'
else:
raise ValueError
return {
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
'label': '|{}|_{}'.format(x['label'],o),
'meta': {
'Unit': x['meta']['Unit'],
'Description': '{}-Norm of {} {} ({})'.format(ord,t,x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_norm v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_norm,{'x':x},{'ord':ord})
def add_PK2(self,F='F',P='P'):
"""
Add 2. Piola-Kirchhoff calculated from 1. Piola-Kirchhoff stress and deformation gradient.
Parameters
----------
P : str, optional
Label of the dataset containing the 1. Piola-Kirchhoff stress. Default value is P.
F : str, optional
Label of the dataset containing the deformation gradient. Default value is F.
"""
def _add_PK2(F,P):
return {
'data': mechanics.PK2(F['data'],P['data']),
'label': 'S',
'meta': {
'Unit': P['meta']['Unit'],
'Description': '2. Kirchhoff stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'dadf5.py:add_PK2 v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_PK2,{'F':F,'P':P})
def add_pole(self,q,p,polar=False):
"""
Add coordinates of stereographic projection of given pole in crystal frame.
Parameters
----------
q : str
Label of the dataset containing the crystallographic orientation as a quaternion.
p : numpy.array of shape (3)
Pole in crystal frame.
polar : bool, optional
Give pole in polar coordinates. Default is false.
"""
def _add_pole(q,p,polar):
pole = np.array(p)
unit_pole = pole/np.linalg.norm(pole)
m = util.scale_to_coprime(pole)
coords = np.empty((len(q['data']),2))
for i,q in enumerate(q['data']):
o = Rotation(np.array([q['w'],q['x'],q['y'],q['z']]))
2020-02-21 12:36:58 +05:30
rotatedPole = o*unit_pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(unit_pole[2])) # stereographic projection
2020-02-21 12:15:05 +05:30
coords[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if polar else [x,y]
return {
'data': coords,
'label': 'p^{}_[{} {} {})'.format(u'' if polar else 'xy',*m),
'meta' : {
'Unit': '1',
'Description': '{} coordinates of stereographic projection of pole (direction/plane) in crystal frame'\
2020-02-21 12:36:58 +05:30
.format('Polar' if polar else 'Cartesian'),
'Creator' : 'dadf5.py:add_pole v{}'.format(version)
2020-02-21 12:15:05 +05:30
}
}
self.__add_generic_pointwise(_add_pole,{'q':q},{'p':p,'polar':polar})
def add_rotational_part(self,F):
"""
Add rotational part of a deformation gradient.
Parameters
----------
F : str
Label of the dataset containing a deformation gradient. Default value is F.
"""
def _add_rotational_part(F):
return {
'data': mechanics.rotational_part(F['data']),
'label': 'R({})'.format(F['label']),
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'dadf5.py:add_rotational_part v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_rotational_part,{'F':F})
def add_spherical(self,x):
"""
Add the spherical (hydrostatic) part of a tensor.
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def _add_spherical(x):
if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
raise ValueError
return {
'data': mechanics.spherical_part(x['data']),
'label': 'p_{}'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Spherical component of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_spherical v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_spherical,{'x':x})
def add_strain_tensor(self,F='F',t='U',m=0):
"""
Add strain tensor calculated from a deformation gradient.
For details refer to damask.mechanics.strain_tensor
Parameters
----------
F : str, optional
Label of the dataset containing the deformation gradient. Default value is F.
t : {V, U}, optional
Type of the polar decomposition, U for right stretch tensor and V for left stretch tensor.
Default value is U.
m : float, optional
Order of the strain calculation. Default value is 0.0.
"""
def _add_strain_tensor(F,t,m):
return {
'data': mechanics.strain_tensor(F['data'],t,m),
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'dadf5.py:add_strain_tensor v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_strain_tensor,{'F':F},{'t':t,'m':m})
def add_stretch_tensor(self,F='F',t='U'):
"""
Add stretch tensor calculated from a deformation gradient.
Parameters
----------
F : str, optional
Label of the dataset containing the deformation gradient. Default value is F.
t : {V, U}, optional
Type of the polar decomposition, U for right stretch tensor and V for left stretch tensor.
Default value is U.
"""
def _add_stretch_tensor(F,t):
return {
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']),
'label': '{}({})'.format(t,F['label']),
'meta': {
'Unit': F['meta']['Unit'],
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
F['label'],F['meta']['Description']),
'Creator': 'dadf5.py:add_stretch_tensor v{}'.format(version)
}
}
self.__add_generic_pointwise(_add_stretch_tensor,{'F':F},{'t':t})
def __add_generic_pointwise(self,func,dataset_mapping,args={}):
"""
General function to add pointwise data.
Parameters
----------
func : function
Function that calculates a new dataset from one or more datasets per HDF5 group.
datasets_requested : list of dictionaries
Details of the datasets to be used: label (in HDF5 file) and arg (argument to which the data is parsed in func).
extra_args : dictionary, optional
Any extra arguments parsed to func.
"""
def job(args):
"""Call function with input data + extra arguments, returns results + group."""
args['results'].put({**args['func'](**args['in']),'group':args['group']})
env = Environment()
2020-02-21 12:36:58 +05:30
N_threads = int(env.options['DAMASK_NUM_THREADS'])
N_threads //=N_threads # disable for the moment
2020-02-21 12:15:05 +05:30
results = Queue(N_threads)
pool = util.ThreadPool(N_threads)
N_added = N_threads + 1
todo = []
# ToDo: It would be more memory efficient to read only from file when required, i.e. do to it in pool.add_task
for group in self.groups_with_datasets(dataset_mapping.values()):
with h5py.File(self.fname,'r') as f:
datasets_in = {}
for arg,label in dataset_mapping.items():
loc = f[group+'/'+label]
data = loc[()]
meta = {k:loc.attrs[k].decode() for k in loc.attrs.keys()}
datasets_in[arg] = {'data': data, 'meta': meta, 'label': label}
todo.append({'in':{**datasets_in,**args},'func':func,'group':group,'results':results})
pool.map(job, todo[:N_added]) # initialize
N_not_calculated = len(todo)
while N_not_calculated > 0:
result = results.get()
with h5py.File(self.fname,'a') as f: # write to file
dataset_out = f[result['group']].create_dataset(result['label'],data=result['data'])
for k in result['meta'].keys():
dataset_out.attrs[k] = result['meta'][k].encode()
N_not_calculated-=1
if N_added < len(todo): # add more jobs
pool.add_task(job,todo[N_added])
N_added +=1
pool.wait_completion()
def to_vtk(self,labels,mode='Cell'):
"""
Export to vtk cell/point data.
Parameters
----------
labels : str or list of
Labels of the datasets to be exported.
mode : str, either 'Cell' or 'Point'
Export in cell format or point format.
Default value is 'Cell'.
"""
if mode=='Cell':
if self.structured:
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
for dim in [0,1,2]:
for c in np.linspace(0,self.size[dim],1+self.grid[dim]):
coordArray[dim].InsertNextValue(c)
vtk_geom = vtk.vtkRectilinearGrid()
vtk_geom.SetDimensions(*(self.grid+1))
vtk_geom.SetXCoordinates(coordArray[0])
vtk_geom.SetYCoordinates(coordArray[1])
vtk_geom.SetZCoordinates(coordArray[2])
2020-02-21 12:15:05 +05:30
else:
2020-02-21 12:15:05 +05:30
nodes = vtk.vtkPoints()
with h5py.File(self.fname,'r') as f:
nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True))
2020-02-21 12:15:05 +05:30
vtk_geom = vtk.vtkUnstructuredGrid()
vtk_geom.SetPoints(nodes)
vtk_geom.Allocate(f['/geometry/T_c'].shape[0])
2020-01-13 14:33:13 +05:30
2020-02-21 12:15:05 +05:30
if self.version_major == 0 and self.version_minor <= 5:
vtk_type = vtk.VTK_HEXAHEDRON
n_nodes = 8
else:
if f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TRIANGLE':
vtk_type = vtk.VTK_TRIANGLE
n_nodes = 3
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'QUAD':
vtk_type = vtk.VTK_QUAD
n_nodes = 4
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TETRA': # not tested
vtk_type = vtk.VTK_TETRA
n_nodes = 4
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'HEXAHEDRON':
vtk_type = vtk.VTK_HEXAHEDRON
n_nodes = 8
for i in f['/geometry/T_c']:
vtk_geom.InsertNextCell(vtk_type,n_nodes,i-1)
elif mode == 'Point':
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for c in self.cell_coordinates():
pointID = Points.InsertNextPoint(c)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
vtk_geom = vtk.vtkPolyData()
vtk_geom.SetPoints(Points)
vtk_geom.SetVerts(Vertices)
vtk_geom.Modified()
N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1
for i,inc in enumerate(self.iter_visible('increments')):
vtk_data = []
materialpoints_backup = self.selection['materialpoints'].copy()
2020-02-21 12:15:05 +05:30
self.set_visible('materialpoints',False)
for label in (labels if isinstance(labels,list) else [labels]):
for p in self.iter_visible('con_physics'):
if p != 'generic':
for c in self.iter_visible('constituents'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
vtk_geom.GetCellData().AddArray(vtk_data[-1])
2019-12-13 19:06:52 +05:30
2020-02-21 12:15:05 +05:30
else:
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name
vtk_data[-1].SetName(dset_name)
vtk_geom.GetCellData().AddArray(vtk_data[-1])
self.set_visible('materialpoints',materialpoints_backup)
constituents_backup = self.selection['constituents'].copy()
2020-02-21 12:15:05 +05:30
self.set_visible('constituents',False)
for label in (labels if isinstance(labels,list) else [labels]):
for p in self.iter_visible('mat_physics'):
if p != 'generic':
for m in self.iter_visible('materialpoints'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
vtk_geom.GetCellData().AddArray(vtk_data[-1])
else:
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
vtk_geom.GetCellData().AddArray(vtk_data[-1])
self.set_visible('constituents',constituents_backup)
if mode=='Cell':
writer = vtk.vtkXMLRectilinearGridWriter() if self.structured else \
vtk.vtkXMLUnstructuredGridWriter()
x = self.get_dataset_location('u_n')
vtk_data.append(numpy_support.numpy_to_vtk(num_array=self.read_dataset(x,0),
deep=True,array_type=vtk.VTK_DOUBLE))
vtk_data[-1].SetName('u')
vtk_geom.GetPointData().AddArray(vtk_data[-1])
elif mode == 'Point':
writer = vtk.vtkXMLPolyDataWriter()
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(file_out)
writer.SetInputData(vtk_geom)
writer.Write()