diff --git a/PRIVATE b/PRIVATE index ec615d249..6db5f4666 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit ec615d249d39e5d01446b01ab9a5b7e7601340ad +Subproject commit 6db5f4666fc651b4de3b44ceaed3f2b848170ac9 diff --git a/processing/post/addCauchy.py b/processing/post/addCauchy.py index afc5a57be..8bbc7fb0e 100755 --- a/processing/post/addCauchy.py +++ b/processing/post/addCauchy.py @@ -42,8 +42,8 @@ for name in filenames: table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) table.add('Cauchy', - damask.mechanics.Cauchy(table.get(options.defgrad).reshape(-1,3,3), - table.get(options.stress ).reshape(-1,3,3)).reshape(-1,9), + damask.mechanics.Cauchy(table.get(options.stress ).reshape(-1,3,3), + table.get(options.defgrad).reshape(-1,3,3)).reshape(-1,9), scriptID+' '+' '.join(sys.argv[1:])) table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addPK2.py b/processing/post/addPK2.py index 185160d79..2894a5a90 100755 --- a/processing/post/addPK2.py +++ b/processing/post/addPK2.py @@ -43,8 +43,8 @@ for name in filenames: table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) table.add('S', - damask.mechanics.PK2(table.get(options.defgrad).reshape(-1,3,3), - table.get(options.stress ).reshape(-1,3,3)).reshape(-1,9), + damask.mechanics.PK2(table.get(options.stress ).reshape(-1,3,3), + table.get(options.defgrad).reshape(-1,3,3)).reshape(-1,9), scriptID+' '+' '.join(sys.argv[1:])) table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addSpectralDecomposition.py b/processing/post/addSpectralDecomposition.py index c711c6a45..5909ca147 100755 --- a/processing/post/addSpectralDecomposition.py +++ b/processing/post/addSpectralDecomposition.py @@ -2,6 +2,7 @@ import os import sys +from io import StringIO from optparse import OptionParser import numpy as np @@ -33,69 +34,27 @@ parser.add_option('--no-check', parser.set_defaults(rh = True, ) + (options,filenames) = parser.parse_args() - -if options.tensor is None: - parser.error('no data column specified.') - -# --- loop over input files ------------------------------------------------------------------------- - if filenames == []: filenames = [None] for name in filenames: - try: - table = damask.ASCIItable(name = name, - buffered = False) - except: continue - damask.util.report(scriptName,name) + damask.util.report(scriptName,name) -# ------------------------------------------ read header ------------------------------------------ + table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - table.head_read() + for tensor in options.tensor: + t = table.get(tensor).reshape(-1,3,3) + (u,v) = np.linalg.eigh(damask.mechanics.symmetric(t)) + if options.rh: v[np.linalg.det(v) < 0.0,:,2] *= -1.0 -# ------------------------------------------ assemble header 1 ------------------------------------ + for i,o in enumerate(['Min','Mid','Max']): + table.add('eigval{}({})'.format(o,tensor),u[:,i], + scriptID+' '+' '.join(sys.argv[1:])) - items = { - 'tensor': {'dim': 9, 'shape': [3,3], 'labels':options.tensor, 'column': []}, - } - errors = [] - remarks = [] - - for type, data in items.items(): - for what in data['labels']: - dim = table.label_dimension(what) - if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type)) - else: - items[type]['column'].append(table.label_index(what)) - for order in ['Min','Mid','Max']: - table.labels_append(['eigval{}({})'.format(order,what)]) # extend ASCII header with new labels - for order in ['Min','Mid','Max']: - table.labels_append(['{}_eigvec{}({})'.format(i+1,order,what) for i in range(3)]) # extend ASCII header with new labels - - if remarks != []: damask.util.croak(remarks) - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# ------------------------------------------ assemble header 2 ------------------------------------ - - table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - table.head_write() - -# ------------------------------------------ process data ----------------------------------------- - - outputAlive = True - while outputAlive and table.data_read(): # read next data line of ASCII table - for type, data in items.items(): - for column in data['column']: - (u,v) = np.linalg.eigh(np.array(list(map(float,table.data[column:column+data['dim']]))).reshape(data['shape'])) - if options.rh and np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : v[:, 2] *= -1.0 # ensure right-handed eigenvector basis - table.data_append(list(u)) # vector of max,mid,min eigval - table.data_append(list(v.transpose().reshape(data['dim']))) # 3x3=9 combo vector of max,mid,min eigvec coordinates - outputAlive = table.data_write() # output processed line in accordance with column labeling - -# ------------------------------------------ output finalization ----------------------------------- - - table.close() # close input ASCII table (works for stdin) + for i,o in enumerate(['Min','Mid','Max']): + table.add('eigvec{}({})'.format(o,tensor),v[:,:,i], + scriptID+' '+' '.join(sys.argv[1:])) + + table.to_ASCII(sys.stdout if name is None else name) diff --git a/python/damask/__init__.py b/python/damask/__init__.py index 30fb37ced..2961ba6e3 100644 --- a/python/damask/__init__.py +++ b/python/damask/__init__.py @@ -15,6 +15,7 @@ from .config import Material # noqa from .colormaps import Colormap, Color # noqa from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa from .dadf5 import DADF5 # noqa +from .dadf5 import DADF5 as Result # noqa from .geom import Geom # noqa from .solver import Solver # noqa diff --git a/python/damask/dadf5.py b/python/damask/dadf5.py index 819b5603e..ccbd3be03 100644 --- a/python/damask/dadf5.py +++ b/python/damask/dadf5.py @@ -1,4 +1,4 @@ -from queue import Queue +from queue import Queue import re import glob import os @@ -11,998 +11,1165 @@ import numpy as np from . import util from . import version from . import mechanics +from . import Rotation +from . import Orientation +from . import Environment +from . import grid_filters -# ------------------------------------------------------------------ class DADF5(): - """ - Read and write to DADF5 files. - - DADF5 files contain DAMASK results. - """ - -# ------------------------------------------------------------------ - def __init__(self,fname): """ - Opens an existing DADF5 file. - - Parameters - ---------- - fname : str - name of the DADF5 file to be openend. - + Read and write to DADF5 files. + + DADF5 files contain DAMASK results. """ - with h5py.File(fname,'r') as f: - - try: - self.version_major = f.attrs['DADF5_version_major'] - self.version_minor = f.attrs['DADF5_version_minor'] - except KeyError: - self.version_major = f.attrs['DADF5-major'] - self.version_minor = f.attrs['DADF5-minor'] - if self.version_major != 0 or not 2 <= self.version_minor <= 6: - raise TypeError('Unsupported DADF5 version {}.{} '.format(f.attrs['DADF5_version_major'], - f.attrs['DADF5_version_minor'])) - - self.structured = 'grid' in f['geometry'].attrs.keys() - - if self.structured: - self.grid = f['geometry'].attrs['grid'] - self.size = f['geometry'].attrs['size'] - if self.version_major == 0 and self.version_minor >= 5: - self.origin = f['geometry'].attrs['origin'] + def __init__(self,fname): + """ + Opens an existing DADF5 file. - - r=re.compile('inc[0-9]+') - increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)} - self.increments = [increments_unsorted[i] for i in sorted(increments_unsorted)] - self.times = [round(f[i].attrs['time/s'],12) for i in self.increments] + Parameters + ---------- + fname : str + name of the DADF5 file to be openend. - self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent']) - self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])] - self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])] + """ + with h5py.File(fname,'r') as f: - self.con_physics = [] - for c in self.constituents: - self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys() - self.con_physics = list(set(self.con_physics)) # make unique + try: + self.version_major = f.attrs['DADF5_version_major'] + self.version_minor = f.attrs['DADF5_version_minor'] + except KeyError: + self.version_major = f.attrs['DADF5-major'] + self.version_minor = f.attrs['DADF5-minor'] - self.mat_physics = [] - for m in self.materialpoints: - self.mat_physics += f['/'.join([self.increments[0],'materialpoint',m])].keys() - self.mat_physics = list(set(self.mat_physics)) # make unique + if self.version_major != 0 or not 2 <= self.version_minor <= 6: + raise TypeError('Unsupported DADF5 version {}.{} '.format(f.attrs['DADF5_version_major'], + f.attrs['DADF5_version_minor'])) - self.visible= {'increments': self.increments, - 'constituents': self.constituents, - 'materialpoints': self.materialpoints, - 'constituent': range(self.Nconstituents), # ToDo: stupid naming - 'con_physics': self.con_physics, - 'mat_physics': self.mat_physics} - - self.fname = fname + self.structured = 'grid' in f['geometry'].attrs.keys() + + if self.structured: + self.grid = f['geometry'].attrs['grid'] + self.size = f['geometry'].attrs['size'] + if self.version_major == 0 and self.version_minor >= 5: + self.origin = f['geometry'].attrs['origin'] - def __manage_visible(self,datasets,what,action): - """ - Manages the visibility of the groups. - - Parameters - ---------- - datasets : list of str or Boolean - name of datasets as list, supports ? and * wildcards. - True is equivalent to [*], False is equivalent to [] - what : str - attribute to change (must be in self.visible) - action : str - select from 'set', 'add', and 'del' + r=re.compile('inc[0-9]+') + increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)} + self.increments = [increments_unsorted[i] for i in sorted(increments_unsorted)] + self.times = [round(f[i].attrs['time/s'],12) for i in self.increments] - """ - # allow True/False and string arguments - if datasets is True: - datasets = ['*'] - elif datasets is False: - datasets = [] - choice = [datasets] if isinstance(datasets,str) else datasets - - valid = [e for e_ in [glob.fnmatch.filter(getattr(self,what),s) for s in choice] for e in e_] - existing = set(self.visible[what]) + self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent']) + self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])] + self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])] - if action == 'set': - self.visible[what] = valid - elif action == 'add': - self.visible[what] = list(existing.union(valid)) - elif action == 'del': - self.visible[what] = list(existing.difference_update(valid)) - - - def __time_to_inc(self,start,end): - selected = [] - for i,time in enumerate(self.times): - if start <= time <= end: - selected.append(self.increments[i]) - return selected + self.con_physics = [] + for c in self.constituents: + self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys() + self.con_physics = list(set(self.con_physics)) # make unique + + self.mat_physics = [] + for m in self.materialpoints: + self.mat_physics += f['/'.join([self.increments[0],'materialpoint',m])].keys() + self.mat_physics = list(set(self.mat_physics)) # make unique + + self.selection= {'increments': self.increments, + 'constituents': self.constituents, + 'materialpoints': self.materialpoints, + 'con_physics': self.con_physics, + 'mat_physics': self.mat_physics} + + self.fname = fname - def set_by_time(self,start,end): - """ - Set active increments based on start and end time. + def __manage_visible(self,datasets,what,action): + """ + Manages the visibility of the groups. - Parameters - ---------- - start : float - start time (included) - end : float - end time (included) + Parameters + ---------- + datasets : list of str or Boolean + name of datasets as list, supports ? and * wildcards. + True is equivalent to [*], False is equivalent to [] + what : str + attribute to change (must be in self.selection) + action : str + select from 'set', 'add', and 'del' - """ - self.__manage_visible(self.__time_to_inc(start,end),'increments','set') + """ + # allow True/False and string arguments + if datasets is True: + datasets = ['*'] + elif datasets is False: + datasets = [] + choice = [datasets] if isinstance(datasets,str) else datasets + + valid = [e for e_ in [glob.fnmatch.filter(getattr(self,what),s) for s in choice] for e in e_] + existing = set(self.selection[what]) + + if action == 'set': + self.selection[what] = valid + elif action == 'add': + add=existing.union(valid) + add_sorted=sorted(add, key=lambda x: int("".join([i for i in x if i.isdigit()]))) + self.selection[what] = add_sorted + elif action == 'del': + diff=existing.difference(valid) + diff_sorted=sorted(diff, key=lambda x: int("".join([i for i in x if i.isdigit()]))) + self.selection[what] = diff_sorted + + def __time_to_inc(self,start,end): + selected = [] + for i,time in enumerate(self.times): + if start <= time <= end: + selected.append(self.increments[i]) + return selected - def add_by_time(self,start,end): - """ - Add to active increments based on start and end time. + def set_by_time(self,start,end): + """ + Set active increments based on start and end time. - Parameters - ---------- - start : float - start time (included) - end : float - end time (included) + Parameters + ---------- + start : float + start time (included) + end : float + end time (included) - """ - self.__manage_visible(self.__time_to_inc(start,end),'increments','add') + """ + self.__manage_visible(self.__time_to_inc(start,end),'increments','set') - def del_by_time(self,start,end): - """ - Delete from active increments based on start and end time. + def add_by_time(self,start,end): + """ + Add to active increments based on start and end time. - Parameters - ---------- - start : float - start time (included) - end : float - end time (included) + Parameters + ---------- + start : float + start time (included) + end : float + end time (included) - """ - self.__manage_visible(self.__time_to_inc(start,end),'increments','del') - - - def set_by_increment(self,start,end): - """ - Set active time increments based on start and end increment. - - Parameters - ---------- - start : int - start increment (included) - end : int - end increment (included) - - """ - if self.version_minor >= 4: - self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','set') - else: - self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','set') + """ + self.__manage_visible(self.__time_to_inc(start,end),'increments','add') - def add_by_increment(self,start,end): - """ - Add to active time increments based on start and end increment. + def del_by_time(self,start,end): + """ + Delete from active increments based on start and end time. - Parameters - ---------- - start : int - start increment (included) - end : int - end increment (included) + Parameters + ---------- + start : float + start time (included) + end : float + end time (included) - """ - if self.version_minor >= 4: - self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','add') - else: - self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','add') + """ + self.__manage_visible(self.__time_to_inc(start,end),'increments','del') - def del_by_increment(self,start,end): - """ - Delet from active time increments based on start and end increment. + def set_by_increment(self,start,end): + """ + Set active time increments based on start and end increment. - Parameters - ---------- - start : int - start increment (included) - end : int - end increment (included) + Parameters + ---------- + start : int + start increment (included) + end : int + end increment (included) - """ - if self.version_minor >= 4: - self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','del') - else: - self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','del') + """ + if self.version_minor >= 4: + self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','set') + else: + self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','set') - def iter_visible(self,what): - """ - Iterate over visible items by setting each one visible. + def add_by_increment(self,start,end): + """ + Add to active time increments based on start and end increment. - Parameters - ---------- - what : str - attribute to change (must be in self.visible) + Parameters + ---------- + start : int + start increment (included) + end : int + end increment (included) - """ - datasets = self.visible[what] - last_datasets = datasets.copy() - for dataset in datasets: - if last_datasets != self.visible[what]: + """ + if self.version_minor >= 4: + self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','add') + else: + self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','add') + + + def del_by_increment(self,start,end): + """ + Delet from active time increments based on start and end increment. + + Parameters + ---------- + start : int + start increment (included) + end : int + end increment (included) + + """ + if self.version_minor >= 4: + self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','del') + else: + self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','del') + + + def iter_visible(self,what): + """ + Iterate over visible items by setting each one visible. + + Parameters + ---------- + what : str + attribute to change (must be in self.selection) + + """ + datasets = self.selection[what] + last_datasets = datasets.copy() + for dataset in datasets: + if last_datasets != self.selection[what]: + self.__manage_visible(datasets,what,'set') + raise Exception + self.__manage_visible(dataset,what,'set') + last_datasets = self.selection[what] + yield dataset self.__manage_visible(datasets,what,'set') - raise Exception - self.__manage_visible(dataset,what,'set') - last_datasets = self.visible[what] - yield dataset - self.__manage_visible(datasets,what,'set') - - - def set_visible(self,what,datasets): - """ - Set active groups. - - Parameters - ---------- - datasets : list of str or Boolean - name of datasets as list, supports ? and * wildcards. - True is equivalent to [*], False is equivalent to [] - what : str - attribute to change (must be in self.visible) - """ - self.__manage_visible(datasets,what,'set') + def set_visible(self,what,datasets): + """ + Set active groups. - def add_visible(self,what,datasets): - """ - Add to active groups. - - Parameters - ---------- - datasets : list of str or Boolean - name of datasets as list, supports ? and * wildcards. - True is equivalent to [*], False is equivalent to [] - what : str - attribute to change (must be in self.visible) + Parameters + ---------- + datasets : list of str or Boolean + name of datasets as list, supports ? and * wildcards. + True is equivalent to [*], False is equivalent to [] + what : str + attribute to change (must be in self.selection) - """ - self.__manage_visible(datasets,what,'add') + """ + self.__manage_visible(datasets,what,'set') - def del_visible(self,what,datasets): - """ - Delete from active groupse. - - Parameters - ---------- - datasets : list of str or Boolean - name of datasets as list, supports ? and * wildcards. - True is equivalent to [*], False is equivalent to [] - what : str - attribute to change (must be in self.visible) + def add_visible(self,what,datasets): + """ + Add to active groups. - """ - self.__manage_visible(datasets,what,'del') + Parameters + ---------- + datasets : list of str or Boolean + name of datasets as list, supports ? and * wildcards. + True is equivalent to [*], False is equivalent to [] + what : str + attribute to change (must be in self.selection) + """ + self.__manage_visible(datasets,what,'add') - def groups_with_datasets(self,datasets): - """ - Get groups that contain all requested datasets. - - Only groups within inc?????/constituent/*_*/* inc?????/materialpoint/*_*/* - are considered as they contain the data. - Single strings will be treated as list with one entry. - - Wild card matching is allowed, but the number of arguments need to fit. - - Parameters - ---------- - datasets : iterable or str or boolean - - Examples - -------- - datasets = False matches no group - datasets = True matches all groups - datasets = ['F','P'] matches a group with ['F','P','sigma'] - datasets = ['*','P'] matches a group with ['F','P'] - datasets = ['*'] does not match a group with ['F','P','sigma'] - datasets = ['*','*'] does not match a group with ['F','P','sigma'] - datasets = ['*','*','*'] matches a group with ['F','P','sigma'] - - """ - if datasets is False: return [] - sets = [datasets] if isinstance(datasets,str) else datasets - groups = [] - - with h5py.File(self.fname,'r') as f: - for i in self.iter_visible('increments'): - for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): - for oo in self.iter_visible(o): - for pp in self.iter_visible(p): - group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue - if sets is True: - groups.append(group) - else: - match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_] - if len(set(match)) == len(sets) : groups.append(group) - return groups + def del_visible(self,what,datasets): + """ + Delete from active groupe. + Parameters + ---------- + datasets : list of str or Boolean + name of datasets as list, supports ? and * wildcards. + True is equivalent to [*], False is equivalent to [] + what : str + attribute to change (must be in self.selection) - def list_data(self): - """Return information on all active datasets in the file.""" - message = '' - with h5py.File(self.fname,'r') as f: - for i in self.iter_visible('increments'): - message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)]) - for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): - for oo in self.iter_visible(o): - message+=' {}\n'.format(oo) - for pp in self.iter_visible(p): - message+=' {}\n'.format(pp) - group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue - for d in f[group].keys(): - try: - dataset = f['/'.join([group,d])] - message+=' {} / ({}): {}\n'.format(d,dataset.attrs['Unit'].decode(),dataset.attrs['Description'].decode()) - except KeyError: - pass - return message + """ + self.__manage_visible(datasets,what,'del') - def get_dataset_location(self,label): - """Return the location of all active datasets with given label.""" - path = [] - with h5py.File(self.fname,'r') as f: - for i in self.iter_visible('increments'): - k = '/'.join([i,'geometry',label]) - try: - f[k] - path.append(k) - except KeyError as e: - pass - for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): - for oo in self.iter_visible(o): - for pp in self.iter_visible(p): - k = '/'.join([i,o[:-1],oo,pp,label]) - try: - f[k] - path.append(k) - except KeyError as e: - pass - return path - - - def get_constituent_ID(self,c=0): - """Pointwise constituent ID.""" - with h5py.File(self.fname,'r') as f: - names = f['/mapping/cellResults/constituent']['Name'][:,c].astype('str') - return np.array([int(n.split('_')[0]) for n in names.tolist()],dtype=np.int32) + def groups_with_datasets(self,datasets): + """ + Get groups that contain all requested datasets. + Only groups within inc?????/constituent/*_*/* inc?????/materialpoint/*_*/* + are considered as they contain the data. + Single strings will be treated as list with one entry. - def get_crystal_structure(self): # ToDo: extension to multi constituents/phase - """Info about the crystal structure.""" - with h5py.File(self.fname,'r') as f: - return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string + Wild card matching is allowed, but the number of arguments need to fit. + Parameters + ---------- + datasets : iterable or str or boolean - def read_dataset(self,path,c=0,plain=False): - """ - Dataset for all points/cells. - - If more than one path is given, the dataset is composed of the individual contributions. - """ - with h5py.File(self.fname,'r') as f: - shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:] - if len(shape) == 1: shape = shape +(1,) - dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]])) - for pa in path: - label = pa.split('/')[2] - - if (pa.split('/')[1] == 'geometry'): - dataset = np.array(f[pa]) - continue - - p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0] - if len(p)>0: - u = (f['mapping/cellResults/constituent']['Position'][p,c]) - a = np.array(f[pa]) - if len(a.shape) == 1: - a=a.reshape([a.shape[0],1]) - dataset[p,:] = a[u,:] + Examples + -------- + datasets = False matches no group + datasets = True matches all groups + datasets = ['F','P'] matches a group with ['F','P','sigma'] + datasets = ['*','P'] matches a group with ['F','P'] + datasets = ['*'] does not match a group with ['F','P','sigma'] + datasets = ['*','*'] does not match a group with ['F','P','sigma'] + datasets = ['*','*','*'] matches a group with ['F','P','sigma'] - p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0] - if len(p)>0: - u = (f['mapping/cellResults/materialpoint']['Position'][p.tolist()]) - a = np.array(f[pa]) - if len(a.shape) == 1: - a=a.reshape([a.shape[0],1]) - dataset[p,:] = a[u,:] - - if plain and dataset.dtype.names is not None: - return dataset.view(('float64',len(dataset.dtype.names))) - else: - return dataset + """ + if datasets is False: return [] + sets = [datasets] if isinstance(datasets,str) else datasets + groups = [] - def cell_coordinates(self): - """Return initial coordinates of the cell centers.""" - if self.structured: - delta = self.size/self.grid*0.5 - z, y, x = np.meshgrid(np.linspace(delta[2],self.size[2]-delta[2],self.grid[2]), - np.linspace(delta[1],self.size[1]-delta[1],self.grid[1]), - np.linspace(delta[0],self.size[0]-delta[0],self.grid[0]), - ) - return np.concatenate((x[:,:,:,None],y[:,:,:,None],z[:,:,:,None]),axis = 3).reshape([np.product(self.grid),3]) - else: - with h5py.File(self.fname,'r') as f: - return f['geometry/x_c'][()] - - - def add_absolute(self,x): - """ - Add absolute value. - - Parameters - ---------- - x : str - Label of the dataset containing a scalar, vector, or tensor. - - """ - def __add_absolute(x): - - return { - 'data': np.abs(x['data']), - 'label': '|{}|'.format(x['label']), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_abs v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_absolute,requested) - - - def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True): - """ - Add result of a general formula. - - Parameters - ---------- - formula : str - Formula, refer to datasets by ‘#Label#‘. - label : str - Label of the dataset containing the result of the calculation. - unit : str, optional - Physical unit of the result. - description : str, optional - Human readable description of the result. - vectorized : bool, optional - Indicate whether the formula is written in vectorized form. Default is ‘True’. - - """ - if vectorized is False: - raise NotImplementedError - - def __add_calculation(**kwargs): - - formula = kwargs['formula'] - for d in re.findall(r'#(.*?)#',formula): - formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d)) - - return { - 'data': eval(formula), - 'label': kwargs['label'], - 'meta': { - 'Unit': kwargs['unit'], - 'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']), - 'Creator': 'dadf5.py:add_calculation v{}'.format(version) - } - } - - requested = [{'label':d,'arg':d} for d in set(re.findall(r'#(.*?)#',formula))] # datasets used in the formula - pass_through = {'formula':formula,'label':label,'unit':unit,'description':description} - - self.__add_generic_pointwise(__add_calculation,requested,pass_through) - - - def add_Cauchy(self,P='P',F='F'): - """ - Add Cauchy stress calculated from 1. Piola-Kirchhoff stress and deformation gradient. - - Parameters - ---------- - P : str, optional - Label of the dataset containing the 1. Piola-Kirchhoff stress. Default value is ‘P’. - F : str, optional - Label of the dataset containing the deformation gradient. Default value is ‘F’. - - """ - def __add_Cauchy(F,P): - - return { - 'data': mechanics.Cauchy(F['data'],P['data']), - 'label': 'sigma', - 'meta': { - 'Unit': P['meta']['Unit'], - 'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],P['meta']['Description'])+\ - 'and deformation gradient {} ({})'.format(F['label'],F['meta']['Description']), - 'Creator': 'dadf5.py:add_Cauchy v{}'.format(version) - } - } - - requested = [{'label':F,'arg':'F'}, - {'label':P,'arg':'P'} ] - - self.__add_generic_pointwise(__add_Cauchy,requested) - - - def add_determinant(self,x): - """ - Add the determinant of a tensor. - - Parameters - ---------- - x : str - Label of the dataset containing a tensor. - - """ - def __add_determinant(x): - - return { - 'data': np.linalg.det(x['data']), - 'label': 'det({})'.format(x['label']), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_determinant v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_determinant,requested) - - - def add_deviator(self,x): - """ - Add the deviatoric part of a tensor. - - Parameters - ---------- - x : str - Label of the dataset containing a tensor. - - """ - def __add_deviator(x): - - if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])): - raise ValueError - - return { - 'data': mechanics.deviatoric_part(x['data']), - 'label': 's_{}'.format(x['label']), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_deviator v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_deviator,requested) - - - def add_maximum_shear(self,x): - """ - Add maximum shear components of symmetric tensor. - - Parameters - ---------- - x : str - Label of the dataset containing a symmetric tensor. - - """ - def __add_maximum_shear(x): - - return { - 'data': mechanics.maximum_shear(x['data']), - 'label': 'max_shear({})'.format(x['label']), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': 'Maximum shear component of of {} ({})'.format(x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_maximum_shear v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_maximum_shear,requested) - - - def add_Mises(self,x): - """ - Add the equivalent Mises stress or strain of a symmetric tensor. - - Parameters - ---------- - x : str - Label of the dataset containing a symmetric stress or strain tensor. - - """ - def __add_Mises(x): - - t = 'strain' if x['meta']['Unit'] == '1' else \ - 'stress' - return { - 'data': mechanics.Mises_strain(x['data']) if t=='strain' else mechanics.Mises_stress(x['data']), - 'label': '{}_vM'.format(x['label']), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': 'Mises equivalent {} of {} ({})'.format(t,x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_Mises v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_Mises,requested) - - - def add_norm(self,x,ord=None): - """ - Add the norm of vector or tensor. - - Parameters - ---------- - x : str - Label of the dataset containing a vector or tensor. - ord : {non-zero int, inf, -inf, ‘fro’, ‘nuc’}, optional - Order of the norm. inf means numpy’s inf object. For details refer to numpy.linalg.norm. - - """ - def __add_norm(x,ord): - - o = ord - if len(x['data'].shape) == 2: - axis = 1 - t = 'vector' - if o is None: o = 2 - elif len(x['data'].shape) == 3: - axis = (1,2) - t = 'tensor' - if o is None: o = 'fro' - else: - raise ValueError - - return { - 'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True), - 'label': '|{}|_{}'.format(x['label'],o), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': '{}-Norm of {} {} ({})'.format(ord,t,x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_norm v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_norm,requested,{'ord':ord}) - - - def add_principal_components(self,x): - """ - Add principal components of symmetric tensor. - - The principal components are sorted in descending order, each repeated according to its multiplicity. - - Parameters - ---------- - x : str - Label of the dataset containing a symmetric tensor. - - """ - def __add_principal_components(x): - - return { - 'data': mechanics.principal_components(x['data']), - 'label': 'lambda_{}'.format(x['label']), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': 'Pricipal components of {} ({})'.format(x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_principal_components v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_principal_components,requested) - - - def add_spherical(self,x): - """ - Add the spherical (hydrostatic) part of a tensor. - - Parameters - ---------- - x : str - Label of the dataset containing a tensor. - - """ - def __add_spherical(x): - - if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])): - raise ValueError - - return { - 'data': mechanics.spherical_part(x['data']), - 'label': 'p_{}'.format(x['label']), - 'meta': { - 'Unit': x['meta']['Unit'], - 'Description': 'Spherical component of tensor {} ({})'.format(x['label'],x['meta']['Description']), - 'Creator': 'dadf5.py:add_spherical v{}'.format(version) - } - } - - requested = [{'label':x,'arg':'x'}] - - self.__add_generic_pointwise(__add_spherical,requested) - - - def add_strain_tensor(self,F='F',t='U',m=0): - """ - Add strain tensor calculated from a deformation gradient. - - For details refer to damask.mechanics.strain_tensor - - Parameters - ---------- - F : str, optional - Label of the dataset containing the deformation gradient. Default value is ‘F’. - t : {‘V’, ‘U’}, optional - Type of the polar decomposition, ‘V’ for right stretch tensor and ‘U’ for left stretch tensor. - Defaults value is ‘U’. - m : float, optional - Order of the strain calculation. Default value is ‘0.0’. - - """ - def __add_strain_tensor(F,t,m): - - return { - 'data': mechanics.strain_tensor(F['data'],t,m), - 'label': 'epsilon_{}^{}({})'.format(t,m,F['label']), - 'meta': { - 'Unit': F['meta']['Unit'], - 'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']), - 'Creator': 'dadf5.py:add_strain_tensor v{}'.format(version) - } - } - - requested = [{'label':F,'arg':'F'}] - - self.__add_generic_pointwise(__add_strain_tensor,requested,{'t':t,'m':m}) - - - def __add_generic_pointwise(self,func,datasets_requested,extra_args={}): - """ - General function to add pointwise data. - - Parameters - ---------- - func : function - Function that calculates a new dataset from one or more datasets per HDF5 group. - datasets_requested : list of dictionaries - Details of the datasets to be used: label (in HDF5 file) and arg (argument to which the data is parsed in func). - extra_args : dictionary, optional - Any extra arguments parsed to func. - - """ - def job(args): - """Call function with input data + extra arguments, returns results + group.""" - args['results'].put({**args['func'](**args['in']),'group':args['group']}) - - - N_threads = 1 # ToDo: should be a parameter - - results = Queue(N_threads) - pool = util.ThreadPool(N_threads) - N_added = N_threads + 1 - - todo = [] - # ToDo: It would be more memory efficient to read only from file when required, i.e. do to it in pool.add_task - for group in self.groups_with_datasets([d['label'] for d in datasets_requested]): - with h5py.File(self.fname,'r') as f: - datasets_in = {} - for d in datasets_requested: - loc = f[group+'/'+d['label']] - data = loc[()] - meta = {k:loc.attrs[k].decode() for k in loc.attrs.keys()} - datasets_in[d['arg']] = {'data': data, 'meta' : meta, 'label' : d['label']} - - todo.append({'in':{**datasets_in,**extra_args},'func':func,'group':group,'results':results}) - - pool.map(job, todo[:N_added]) # initialize - - N_not_calculated = len(todo) - while N_not_calculated > 0: - result = results.get() - with h5py.File(self.fname,'a') as f: # write to file - dataset_out = f[result['group']].create_dataset(result['label'],data=result['data']) - for k in result['meta'].keys(): - dataset_out.attrs[k] = result['meta'][k].encode() - N_not_calculated-=1 - - if N_added < len(todo): # add more jobs - pool.add_task(job,todo[N_added]) - N_added +=1 - - pool.wait_completion() - - - def to_vtk(self,labels,mode='Cell'): - """ - Export to vtk cell/point data. - - Parameters - ---------- - labels : str or list of - Labels of the datasets to be exported. - mode : str, either 'Cell' or 'Point' - Export in cell format or point format. - Default value is 'Cell'. - - """ - if mode=='Cell': - - if self.structured: - - coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()] - for dim in [0,1,2]: - for c in np.linspace(0,self.size[dim],1+self.grid[dim]): - coordArray[dim].InsertNextValue(c) - - vtk_geom = vtk.vtkRectilinearGrid() - vtk_geom.SetDimensions(*(self.grid+1)) - vtk_geom.SetXCoordinates(coordArray[0]) - vtk_geom.SetYCoordinates(coordArray[1]) - vtk_geom.SetZCoordinates(coordArray[2]) - - else: - - nodes = vtk.vtkPoints() with h5py.File(self.fname,'r') as f: - nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True)) - - vtk_geom = vtk.vtkUnstructuredGrid() - vtk_geom.SetPoints(nodes) - vtk_geom.Allocate(f['/geometry/T_c'].shape[0]) + for i in self.iter_visible('increments'): + for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): + for oo in self.iter_visible(o): + for pp in self.iter_visible(p): + group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue + if sets is True: + groups.append(group) + else: + match = [e for e_ in [glob.fnmatch.filter(f[group].keys(),s) for s in sets] for e in e_] + if len(set(match)) == len(sets) : groups.append(group) + return groups - if self.version_major == 0 and self.version_minor <= 5: - vtk_type = vtk.VTK_HEXAHEDRON - n_nodes = 8 - else: - if f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TRIANGLE': - vtk_type = vtk.VTK_TRIANGLE - n_nodes = 3 - elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'QUAD': - vtk_type = vtk.VTK_QUAD - n_nodes = 4 - elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TETRA': # not tested - vtk_type = vtk.VTK_TETRA - n_nodes = 4 - elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'HEXAHEDRON': - vtk_type = vtk.VTK_HEXAHEDRON - n_nodes = 8 - for i in f['/geometry/T_c']: - vtk_geom.InsertNextCell(vtk_type,n_nodes,i-1) + def list_data(self): + """Return information on all active datasets in the file.""" + message = '' + with h5py.File(self.fname,'r') as f: + for i in self.iter_visible('increments'): + message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)]) + for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): + for oo in self.iter_visible(o): + message+=' {}\n'.format(oo) + for pp in self.iter_visible(p): + message+=' {}\n'.format(pp) + group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue + for d in f[group].keys(): + try: + dataset = f['/'.join([group,d])] + message+=' {} / ({}): {}\n'.\ + format(d,dataset.attrs['Unit'].decode(),dataset.attrs['Description'].decode()) + except KeyError: + pass + return message - elif mode == 'Point': - Points = vtk.vtkPoints() - Vertices = vtk.vtkCellArray() - for c in self.cell_coordinates(): - pointID = Points.InsertNextPoint(c) - Vertices.InsertNextCell(1) - Vertices.InsertCellPoint(pointID) - - vtk_geom = vtk.vtkPolyData() - vtk_geom.SetPoints(Points) - vtk_geom.SetVerts(Vertices) - vtk_geom.Modified() - - N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1 - - for i,inc in enumerate(self.iter_visible('increments')): - vtk_data = [] - - materialpoints_backup = self.visible['materialpoints'].copy() - self.set_visible('materialpoints',False) - for label in (labels if isinstance(labels,list) else [labels]): - for p in self.iter_visible('con_physics'): - if p != 'generic': - for c in self.iter_visible('constituents'): - x = self.get_dataset_location(label) - if len(x) == 0: - continue - array = self.read_dataset(x,0) - shape = [array.shape[0],np.product(array.shape[1:])] - vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), - deep=True,array_type= vtk.VTK_DOUBLE)) - vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1! - vtk_geom.GetCellData().AddArray(vtk_data[-1]) - else: - x = self.get_dataset_location(label) - if len(x) == 0: + def get_dataset_location(self,label): + """Return the location of all active datasets with given label.""" + path = [] + with h5py.File(self.fname,'r') as f: + for i in self.iter_visible('increments'): + k = '/'.join([i,'geometry',label]) + try: + f[k] + path.append(k) + except KeyError as e: + pass + for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): + for oo in self.iter_visible(o): + for pp in self.iter_visible(p): + k = '/'.join([i,o[:-1],oo,pp,label]) + try: + f[k] + path.append(k) + except KeyError as e: + pass + return path + + + def get_constituent_ID(self,c=0): + """Pointwise constituent ID.""" + with h5py.File(self.fname,'r') as f: + names = f['/mapping/cellResults/constituent']['Name'][:,c].astype('str') + return np.array([int(n.split('_')[0]) for n in names.tolist()],dtype=np.int32) + + + def get_crystal_structure(self): # ToDo: extension to multi constituents/phase + """Info about the crystal structure.""" + with h5py.File(self.fname,'r') as f: + return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string + + + def read_dataset(self,path,c=0,plain=False): + """ + Dataset for all points/cells. + + If more than one path is given, the dataset is composed of the individual contributions. + """ + with h5py.File(self.fname,'r') as f: + shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:] + if len(shape) == 1: shape = shape +(1,) + dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]])) + for pa in path: + label = pa.split('/')[2] + + if (pa.split('/')[1] == 'geometry'): + dataset = np.array(f[pa]) continue - array = self.read_dataset(x,0) - shape = [array.shape[0],np.product(array.shape[1:])] - vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), - deep=True,array_type= vtk.VTK_DOUBLE)) - ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name - dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name - vtk_data[-1].SetName(dset_name) - vtk_geom.GetCellData().AddArray(vtk_data[-1]) - self.set_visible('materialpoints',materialpoints_backup) + p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0] + if len(p)>0: + u = (f['mapping/cellResults/constituent']['Position'][p,c]) + a = np.array(f[pa]) + if len(a.shape) == 1: + a=a.reshape([a.shape[0],1]) + dataset[p,:] = a[u,:] - constituents_backup = self.visible['constituents'].copy() - self.set_visible('constituents',False) - for label in (labels if isinstance(labels,list) else [labels]): - for p in self.iter_visible('mat_physics'): - if p != 'generic': - for m in self.iter_visible('materialpoints'): - x = self.get_dataset_location(label) - if len(x) == 0: - continue - array = self.read_dataset(x,0) - shape = [array.shape[0],np.product(array.shape[1:])] - vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), - deep=True,array_type= vtk.VTK_DOUBLE)) - vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: why 1_? - vtk_geom.GetCellData().AddArray(vtk_data[-1]) + p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0] + if len(p)>0: + u = (f['mapping/cellResults/materialpoint']['Position'][p.tolist()]) + a = np.array(f[pa]) + if len(a.shape) == 1: + a=a.reshape([a.shape[0],1]) + dataset[p,:] = a[u,:] + + if plain and dataset.dtype.names is not None: + return dataset.view(('float64',len(dataset.dtype.names))) + else: + return dataset + + + def cell_coordinates(self): + """Return initial coordinates of the cell centers.""" + if self.structured: + return grid_filters.cell_coord0(self.grid,self.size,self.origin) + else: + with h5py.File(self.fname,'r') as f: + return f['geometry/x_c'][()] + + + def add_absolute(self,x): + """ + Add absolute value. + + Parameters + ---------- + x : str + Label of scalar, vector, or tensor dataset to take absolute value of. + + """ + def _add_absolute(x): + + return { + 'data': np.abs(x['data']), + 'label': '|{}|'.format(x['label']), + 'meta': { + 'Unit': x['meta']['Unit'], + 'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']), + 'Creator': 'dadf5.py:add_abs v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_absolute,{'x':x}) + + + def add_calculation(self,label,formula,unit='n/a',description=None,vectorized=True): + """ + Add result of a general formula. + + Parameters + ---------- + label : str + Label of resulting dataset. + formula : str + Formula to calculate resulting dataset. Existing datasets are referenced by ‘#TheirLabel#‘. + unit : str, optional + Physical unit of the result. + description : str, optional + Human-readable description of the result. + vectorized : bool, optional + Indicate whether the formula can be used in vectorized form. Defaults to ‘True’. + + """ + if not vectorized: + raise NotImplementedError + + def _add_calculation(**kwargs): + + formula = kwargs['formula'] + for d in re.findall(r'#(.*?)#',formula): + formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d)) + + return { + 'data': eval(formula), + 'label': kwargs['label'], + 'meta': { + 'Unit': kwargs['unit'], + 'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']), + 'Creator': 'dadf5.py:add_calculation v{}'.format(version) + } + } + + dataset_mapping = {d:d for d in set(re.findall(r'#(.*?)#',formula))} # datasets used in the formula + args = {'formula':formula,'label':label,'unit':unit,'description':description} + + self.__add_generic_pointwise(_add_calculation,dataset_mapping,args) + + + def add_Cauchy(self,P='P',F='F'): + """ + Add Cauchy stress calculated from first Piola-Kirchhoff stress and deformation gradient. + + Parameters + ---------- + P : str, optional + Label of the dataset containing the first Piola-Kirchhoff stress. Defaults to ‘P’. + F : str, optional + Label of the dataset containing the deformation gradient. Defaults to ‘F’. + + """ + def _add_Cauchy(P,F): + + return { + 'data': mechanics.Cauchy(P['data'],F['data']), + 'label': 'sigma', + 'meta': { + 'Unit': P['meta']['Unit'], + 'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'], + P['meta']['Description'])+\ + 'and {} ({})'.format(F['label'],F['meta']['Description']), + 'Creator': 'dadf5.py:add_Cauchy v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_Cauchy,{'P':P,'F':F}) + + + def add_determinant(self,T): + """ + Add the determinant of a tensor. + + Parameters + ---------- + T : str + Label of tensor dataset. + + """ + def _add_determinant(T): + + return { + 'data': np.linalg.det(T['data']), + 'label': 'det({})'.format(T['label']), + 'meta': { + 'Unit': T['meta']['Unit'], + 'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']), + 'Creator': 'dadf5.py:add_determinant v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_determinant,{'T':T}) + + + def add_deviator(self,T): + """ + Add the deviatoric part of a tensor. + + Parameters + ---------- + T : str + Label of tensor dataset. + + """ + def _add_deviator(T): + + if not np.all(np.array(T['data'].shape[1:]) == np.array([3,3])): + raise ValueError + + return { + 'data': mechanics.deviatoric_part(T['data']), + 'label': 's_{}'.format(T['label']), + 'meta': { + 'Unit': T['meta']['Unit'], + 'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']), + 'Creator': 'dadf5.py:add_deviator v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_deviator,{'T':T}) + + + def add_eigenvalues(self,S): + """ + Add eigenvalues of symmetric tensor. + + Parameters + ---------- + S : str + Label of symmetric tensor dataset. + + """ + def _add_eigenvalue(S): + + return { + 'data': mechanics.eigenvalues(S['data']), + 'label': 'lambda({})'.format(S['label']), + 'meta' : { + 'Unit': S['meta']['Unit'], + 'Description': 'Eigenvalues of {} ({})'.format(S['label'],S['meta']['Description']), + 'Creator': 'dadf5.py:add_eigenvalues v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_eigenvalue,{'S':S}) + + + def add_eigenvectors(self,S): + """ + Add eigenvectors of symmetric tensor. + + Parameters + ---------- + S : str + Label of symmetric tensor dataset. + + """ + def _add_eigenvector(S): + + return { + 'data': mechanics.eigenvectors(S['data']), + 'label': 'v({})'.format(S['label']), + 'meta' : { + 'Unit': '1', + 'Description': 'Eigenvectors of {} ({})'.format(S['label'],S['meta']['Description']), + 'Creator': 'dadf5.py:add_eigenvectors v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_eigenvector,{'S':S}) + + + def add_IPFcolor(self,q,l): + """ + Add RGB color tuple of inverse pole figure (IPF) color. + + Parameters + ---------- + q : str + Label of the dataset containing the crystallographic orientation as quaternions. + l : numpy.array of shape (3) + Lab frame direction for inverse pole figure. + + """ + def _add_IPFcolor(q,l): + + d = np.array(l) + d_unit = d/np.linalg.norm(d) + m = util.scale_to_coprime(d) + colors = np.empty((len(q['data']),3),np.uint8) + + lattice = q['meta']['Lattice'] + + for i,q in enumerate(q['data']): + o = Orientation(np.array([q['w'],q['x'],q['y'],q['z']]),lattice).reduced() + colors[i] = np.uint8(o.IPFcolor(d_unit)*255) + + return { + 'data': colors, + 'label': 'IPFcolor_[{} {} {}]'.format(*m), + 'meta' : { + 'Unit': 'RGB (8bit)', + 'Lattice': lattice, + 'Description': 'Inverse Pole Figure (IPF) colors for direction/plane [{} {} {})'.format(*m), + 'Creator': 'dadf5.py:add_IPFcolor v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_IPFcolor,{'q':q},{'l':l}) + + + def add_maximum_shear(self,S): + """ + Add maximum shear components of symmetric tensor. + + Parameters + ---------- + S : str + Label of symmetric tensor dataset. + + """ + def _add_maximum_shear(S): + + return { + 'data': mechanics.maximum_shear(S['data']), + 'label': 'max_shear({})'.format(S['label']), + 'meta': { + 'Unit': S['meta']['Unit'], + 'Description': 'Maximum shear component of {} ({})'.format(S['label'],S['meta']['Description']), + 'Creator': 'dadf5.py:add_maximum_shear v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_maximum_shear,{'S':S}) + + + def add_Mises(self,S): + """ + Add the equivalent Mises stress or strain of a symmetric tensor. + + Parameters + ---------- + S : str + Label of symmetric tensorial stress or strain dataset. + + """ + def _add_Mises(S): + + t = 'strain' if S['meta']['Unit'] == '1' else \ + 'stress' + return { + 'data': mechanics.Mises_strain(S['data']) if t=='strain' else mechanics.Mises_stress(S['data']), + 'label': '{}_vM'.format(S['label']), + 'meta': { + 'Unit': S['meta']['Unit'], + 'Description': 'Mises equivalent {} of {} ({})'.format(t,S['label'],S['meta']['Description']), + 'Creator': 'dadf5.py:add_Mises v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_Mises,{'S':S}) + + + def add_norm(self,x,ord=None): + """ + Add the norm of vector or tensor. + + Parameters + ---------- + x : str + Label of vector or tensor dataset. + ord : {non-zero int, inf, -inf, ‘fro’, ‘nuc’}, optional + Order of the norm. inf means NumPy’s inf object. For details refer to numpy.linalg.norm. + + """ + def _add_norm(x,ord): + + o = ord + if len(x['data'].shape) == 2: + axis = 1 + t = 'vector' + if o is None: o = 2 + elif len(x['data'].shape) == 3: + axis = (1,2) + t = 'tensor' + if o is None: o = 'fro' else: - x = self.get_dataset_location(label) - if len(x) == 0: - continue - array = self.read_dataset(x,0) - shape = [array.shape[0],np.product(array.shape[1:])] - vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), - deep=True,array_type= vtk.VTK_DOUBLE)) - vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) - vtk_geom.GetCellData().AddArray(vtk_data[-1]) - self.set_visible('constituents',constituents_backup) - - if mode=='Cell': - writer = vtk.vtkXMLRectilinearGridWriter() if self.structured else \ - vtk.vtkXMLUnstructuredGridWriter() - x = self.get_dataset_location('u_n') - vtk_data.append(numpy_support.numpy_to_vtk(num_array=self.read_dataset(x,0), - deep=True,array_type=vtk.VTK_DOUBLE)) - vtk_data[-1].SetName('u') - vtk_geom.GetPointData().AddArray(vtk_data[-1]) - elif mode == 'Point': - writer = vtk.vtkXMLPolyDataWriter() + raise ValueError - - file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.basename(self.fname))[0], - inc[3:].zfill(N_digits), - writer.GetDefaultFileExtension()) - - writer.SetCompressorTypeToZLib() - writer.SetDataModeToBinary() - writer.SetFileName(file_out) - writer.SetInputData(vtk_geom) + return { + 'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True), + 'label': '|{}|_{}'.format(x['label'],o), + 'meta': { + 'Unit': x['meta']['Unit'], + 'Description': '{}-norm of {} {} ({})'.format(ord,t,x['label'],x['meta']['Description']), + 'Creator': 'dadf5.py:add_norm v{}'.format(version) + } + } - writer.Write() + self.__add_generic_pointwise(_add_norm,{'x':x},{'ord':ord}) + + + def add_PK2(self,P='P',F='F'): + """ + Add 2. Piola-Kirchhoff calculated from first Piola-Kirchhoff stress and deformation gradient. + + Parameters + ---------- + P : str, optional + Label first Piola-Kirchhoff stress dataset. Defaults to ‘P’. + F : str, optional + Label of deformation gradient dataset. Defaults to ‘F’. + + """ + def _add_PK2(P,F): + + return { + 'data': mechanics.PK2(P['data'],F['data']), + 'label': 'S', + 'meta': { + 'Unit': P['meta']['Unit'], + 'Description': '2. Kirchhoff stress calculated from {} ({}) '.format(P['label'], + P['meta']['Description'])+\ + 'and {} ({})'.format(F['label'],F['meta']['Description']), + 'Creator': 'dadf5.py:add_PK2 v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_PK2,{'P':P,'F':F}) + + + def add_pole(self,q,p,polar=False): + """ + Add coordinates of stereographic projection of given pole in crystal frame. + + Parameters + ---------- + q : str + Label of the dataset containing the crystallographic orientation as quaternions. + p : numpy.array of shape (3) + Crystallographic direction or plane. + polar : bool, optional + Give pole in polar coordinates. Defaults to False. + + """ + def _add_pole(q,p,polar): + + pole = np.array(p) + unit_pole = pole/np.linalg.norm(pole) + m = util.scale_to_coprime(pole) + coords = np.empty((len(q['data']),2)) + + for i,q in enumerate(q['data']): + o = Rotation(np.array([q['w'],q['x'],q['y'],q['z']])) + rotatedPole = o*unit_pole # rotate pole according to crystal orientation + (x,y) = rotatedPole[0:2]/(1.+abs(unit_pole[2])) # stereographic projection + coords[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if polar else [x,y] + + return { + 'data': coords, + 'label': 'p^{}_[{} {} {})'.format(u'rφ' if polar else 'xy',*m), + 'meta' : { + 'Unit': '1', + 'Description': '{} coordinates of stereographic projection of pole (direction/plane) in crystal frame'\ + .format('Polar' if polar else 'Cartesian'), + 'Creator' : 'dadf5.py:add_pole v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_pole,{'q':q},{'p':p,'polar':polar}) + + + def add_rotational_part(self,F): + """ + Add rotational part of a deformation gradient. + + Parameters + ---------- + F : str, optional + Label of deformation gradient dataset. + + """ + def _add_rotational_part(F): + + return { + 'data': mechanics.rotational_part(F['data']), + 'label': 'R({})'.format(F['label']), + 'meta': { + 'Unit': F['meta']['Unit'], + 'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']), + 'Creator': 'dadf5.py:add_rotational_part v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_rotational_part,{'F':F}) + + + def add_spherical(self,T): + """ + Add the spherical (hydrostatic) part of a tensor. + + Parameters + ---------- + T : str + Label of tensor dataset. + + """ + def _add_spherical(T): + + if not np.all(np.array(T['data'].shape[1:]) == np.array([3,3])): + raise ValueError + + return { + 'data': mechanics.spherical_part(T['data']), + 'label': 'p_{}'.format(T['label']), + 'meta': { + 'Unit': T['meta']['Unit'], + 'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']), + 'Creator': 'dadf5.py:add_spherical v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_spherical,{'T':T}) + + + def add_strain_tensor(self,F='F',t='V',m=0.0): + """ + Add strain tensor of a deformation gradient. + + For details refer to damask.mechanics.strain_tensor + + Parameters + ---------- + F : str, optional + Label of deformation gradient dataset. Defaults to ‘F’. + t : {‘V’, ‘U’}, optional + Type of the polar decomposition, ‘V’ for left stretch tensor and ‘U’ for right stretch tensor. + Defaults to ‘V’. + m : float, optional + Order of the strain calculation. Defaults to ‘0.0’. + + """ + def _add_strain_tensor(F,t,m): + + return { + 'data': mechanics.strain_tensor(F['data'],t,m), + 'label': 'epsilon_{}^{}({})'.format(t,m,F['label']), + 'meta': { + 'Unit': F['meta']['Unit'], + 'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']), + 'Creator': 'dadf5.py:add_strain_tensor v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_strain_tensor,{'F':F},{'t':t,'m':m}) + + + def add_stretch_tensor(self,F='F',t='V'): + """ + Add stretch tensor of a deformation gradient. + + Parameters + ---------- + F : str, optional + Label of deformation gradient dataset. Defaults to ‘F’. + t : {‘V’, ‘U’}, optional + Type of the polar decomposition, ‘V’ for left stretch tensor and ‘U’ for right stretch tensor. + Defaults to ‘V’. + + """ + def _add_stretch_tensor(F,t): + + return { + 'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']), + 'label': '{}({})'.format(t,F['label']), + 'meta': { + 'Unit': F['meta']['Unit'], + 'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right', + F['label'],F['meta']['Description']), + 'Creator': 'dadf5.py:add_stretch_tensor v{}'.format(version) + } + } + + self.__add_generic_pointwise(_add_stretch_tensor,{'F':F},{'t':t}) + + + def __add_generic_pointwise(self,func,dataset_mapping,args={}): + """ + General function to add pointwise data. + + Parameters + ---------- + func : function + Function that calculates a new dataset from one or more datasets per HDF5 group. + dataset_mapping : dictionary + Mapping HDF5 data label to callback function argument + extra_args : dictionary, optional + Any extra arguments parsed to func. + + """ + def job(args): + """Call function with input data + extra arguments, returns results + group.""" + args['results'].put({**args['func'](**args['in']),'group':args['group']}) + + env = Environment() + N_threads = int(env.options['DAMASK_NUM_THREADS']) + N_threads //=N_threads # disable for the moment + + results = Queue(N_threads) + pool = util.ThreadPool(N_threads) + N_added = N_threads + 1 + + todo = [] + # ToDo: It would be more memory efficient to read only from file when required, i.e. do to it in pool.add_task + for group in self.groups_with_datasets(dataset_mapping.values()): + with h5py.File(self.fname,'r') as f: + datasets_in = {} + for arg,label in dataset_mapping.items(): + loc = f[group+'/'+label] + data = loc[()] + meta = {k:loc.attrs[k].decode() for k in loc.attrs.keys()} + datasets_in[arg] = {'data': data, 'meta': meta, 'label': label} + + todo.append({'in':{**datasets_in,**args},'func':func,'group':group,'results':results}) + + pool.map(job, todo[:N_added]) # initialize + + N_not_calculated = len(todo) + while N_not_calculated > 0: + result = results.get() + with h5py.File(self.fname,'a') as f: # write to file + dataset_out = f[result['group']].create_dataset(result['label'],data=result['data']) + for k in result['meta'].keys(): + dataset_out.attrs[k] = result['meta'][k].encode() + N_not_calculated-=1 + + if N_added < len(todo): # add more jobs + pool.add_task(job,todo[N_added]) + N_added +=1 + + pool.wait_completion() + + + def to_vtk(self,labels,mode='cell'): + """ + Export to vtk cell/point data. + + Parameters + ---------- + labels : str or list of + Labels of the datasets to be exported. + mode : str, either 'cell' or 'point' + Export in cell format or point format. + Defaults to 'cell'. + + """ + if mode.lower()=='cell': + + if self.structured: + + coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()] + for dim in [0,1,2]: + for c in np.linspace(0,self.size[dim],1+self.grid[dim]): + coordArray[dim].InsertNextValue(c) + + vtk_geom = vtk.vtkRectilinearGrid() + vtk_geom.SetDimensions(*(self.grid+1)) + vtk_geom.SetXCoordinates(coordArray[0]) + vtk_geom.SetYCoordinates(coordArray[1]) + vtk_geom.SetZCoordinates(coordArray[2]) + + else: + + nodes = vtk.vtkPoints() + with h5py.File(self.fname,'r') as f: + nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True)) + + vtk_geom = vtk.vtkUnstructuredGrid() + vtk_geom.SetPoints(nodes) + vtk_geom.Allocate(f['/geometry/T_c'].shape[0]) + + if self.version_major == 0 and self.version_minor <= 5: + vtk_type = vtk.VTK_HEXAHEDRON + n_nodes = 8 + else: + if f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TRIANGLE': + vtk_type = vtk.VTK_TRIANGLE + n_nodes = 3 + elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'QUAD': + vtk_type = vtk.VTK_QUAD + n_nodes = 4 + elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TETRA': # not tested + vtk_type = vtk.VTK_TETRA + n_nodes = 4 + elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'HEXAHEDRON': + vtk_type = vtk.VTK_HEXAHEDRON + n_nodes = 8 + + for i in f['/geometry/T_c']: + vtk_geom.InsertNextCell(vtk_type,n_nodes,i-1) + + elif mode.lower()=='point': + Points = vtk.vtkPoints() + Vertices = vtk.vtkCellArray() + for c in self.cell_coordinates(): + pointID = Points.InsertNextPoint(c) + Vertices.InsertNextCell(1) + Vertices.InsertCellPoint(pointID) + + vtk_geom = vtk.vtkPolyData() + vtk_geom.SetPoints(Points) + vtk_geom.SetVerts(Vertices) + vtk_geom.Modified() + + N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1 + + for i,inc in enumerate(self.iter_visible('increments')): + vtk_data = [] + + materialpoints_backup = self.selection['materialpoints'].copy() + self.set_visible('materialpoints',False) + for label in (labels if isinstance(labels,list) else [labels]): + for p in self.iter_visible('con_physics'): + if p != 'generic': + for c in self.iter_visible('constituents'): + x = self.get_dataset_location(label) + if len(x) == 0: + continue + array = self.read_dataset(x,0) + shape = [array.shape[0],np.product(array.shape[1:])] + vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), + deep=True,array_type= vtk.VTK_DOUBLE)) + vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1! + vtk_geom.GetCellData().AddArray(vtk_data[-1]) + + else: + x = self.get_dataset_location(label) + if len(x) == 0: + continue + array = self.read_dataset(x,0) + shape = [array.shape[0],np.product(array.shape[1:])] + vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), + deep=True,array_type= vtk.VTK_DOUBLE)) + ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name + dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name + vtk_data[-1].SetName(dset_name) + vtk_geom.GetCellData().AddArray(vtk_data[-1]) + + self.set_visible('materialpoints',materialpoints_backup) + + constituents_backup = self.selection['constituents'].copy() + self.set_visible('constituents',False) + for label in (labels if isinstance(labels,list) else [labels]): + for p in self.iter_visible('mat_physics'): + if p != 'generic': + for m in self.iter_visible('materialpoints'): + x = self.get_dataset_location(label) + if len(x) == 0: + continue + array = self.read_dataset(x,0) + shape = [array.shape[0],np.product(array.shape[1:])] + vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), + deep=True,array_type= vtk.VTK_DOUBLE)) + vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: why 1_? + vtk_geom.GetCellData().AddArray(vtk_data[-1]) + else: + x = self.get_dataset_location(label) + if len(x) == 0: + continue + array = self.read_dataset(x,0) + shape = [array.shape[0],np.product(array.shape[1:])] + vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape), + deep=True,array_type= vtk.VTK_DOUBLE)) + vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) + vtk_geom.GetCellData().AddArray(vtk_data[-1]) + self.set_visible('constituents',constituents_backup) + + if mode.lower()=='cell': + writer = vtk.vtkXMLRectilinearGridWriter() if self.structured else \ + vtk.vtkXMLUnstructuredGridWriter() + x = self.get_dataset_location('u_n') + vtk_data.append(numpy_support.numpy_to_vtk(num_array=self.read_dataset(x,0), + deep=True,array_type=vtk.VTK_DOUBLE)) + vtk_data[-1].SetName('u') + vtk_geom.GetPointData().AddArray(vtk_data[-1]) + elif mode.lower()=='point': + writer = vtk.vtkXMLPolyDataWriter() + + + file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.basename(self.fname))[0], + inc[3:].zfill(N_digits), + writer.GetDefaultFileExtension()) + + writer.SetCompressorTypeToZLib() + writer.SetDataModeToBinary() + writer.SetFileName(file_out) + writer.SetInputData(vtk_geom) + + writer.Write() diff --git a/python/damask/environment.py b/python/damask/environment.py index 2fde2c329..4c9ab762c 100644 --- a/python/damask/environment.py +++ b/python/damask/environment.py @@ -8,7 +8,7 @@ class Environment(): def __init__(self): """Read and provide values of DAMASK configuration.""" self.options = {} - self.get_options() + self.__get_options() def relPath(self,relative = '.'): return os.path.join(self.rootDir(),relative) @@ -16,7 +16,7 @@ class Environment(): def rootDir(self): return os.path.normpath(os.path.join(os.path.realpath(__file__),'../../../')) - def get_options(self): + def __get_options(self): for item in ['DAMASK_NUM_THREADS', 'MSC_ROOT', 'MARC_VERSION', diff --git a/python/damask/mechanics.py b/python/damask/mechanics.py index 307f1d83d..9ffd76536 100644 --- a/python/damask/mechanics.py +++ b/python/damask/mechanics.py @@ -1,11 +1,11 @@ import numpy as np -def Cauchy(F,P): +def Cauchy(P,F): """ - Return Cauchy stress calculated from 1. Piola-Kirchhoff stress and deformation gradient. - + Return Cauchy stress calculated from first Piola-Kirchhoff stress and deformation gradient. + Resulting tensor is symmetrized as the Cauchy stress needs to be symmetric. - + Parameters ---------- F : numpy.array of shape (:,3,3) or (3,3) @@ -21,67 +21,10 @@ def Cauchy(F,P): return symmetric(sigma) -def PK2(F,P): - """ - Return 2. Piola-Kirchhoff stress calculated from 1. Piola-Kirchhoff stress and deformation gradient. - - Parameters - ---------- - F : numpy.array of shape (:,3,3) or (3,3) - Deformation gradient. - P : numpy.array of shape (:,3,3) or (3,3) - 1. Piola-Kirchhoff stress. - - """ - if np.shape(F) == np.shape(P) == (3,3): - S = np.dot(np.linalg.inv(F),P) - else: - S = np.einsum('ijk,ikl->ijl',np.linalg.inv(F),P) - return S - - -def strain_tensor(F,t,m): - """ - Return strain tensor calculated from deformation gradient. - - For details refer to https://en.wikipedia.org/wiki/Finite_strain_theory and - https://de.wikipedia.org/wiki/Verzerrungstensor - - Parameters - ---------- - F : numpy.array of shape (:,3,3) or (3,3) - Deformation gradient. - t : {‘V’, ‘U’} - Type of the polar decomposition, ‘V’ for left stretch tensor and ‘U’ for right stretch tensor. - m : float - Order of the strain. - - """ - F_ = F.reshape((1,3,3)) if F.shape == (3,3) else F - if t == 'V': - B = np.matmul(F_,transpose(F_)) - w,n = np.linalg.eigh(B) - elif t == 'U': - C = np.matmul(transpose(F_),F_) - w,n = np.linalg.eigh(C) - - if m > 0.0: - eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n)) - - np.broadcast_to(np.eye(3),[F_.shape[0],3,3])) - elif m < 0.0: - eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n)) - + np.broadcast_to(np.eye(3),[F_.shape[0],3,3])) - else: - eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n)) - - return eps.reshape((3,3)) if np.shape(F) == (3,3) else \ - eps - - def deviatoric_part(x): """ Return deviatoric part of a tensor. - + Parameters ---------- x : numpy.array of shape (:,3,3) or (3,3) @@ -89,13 +32,151 @@ def deviatoric_part(x): """ return x - np.eye(3)*spherical_part(x) if np.shape(x) == (3,3) else \ - x - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[x.shape[0],3,3]),spherical_part(x)) + x - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[x.shape[0],3,3]),spherical_part(x)) + + +def eigenvalues(x): + """ + Return the eigenvalues, i.e. principal components, of a symmetric tensor. + + The eigenvalues are sorted in ascending order, each repeated according to + its multiplicity. + + Parameters + ---------- + x : numpy.array of shape (:,3,3) or (3,3) + Symmetric tensor of which the eigenvalues are computed. + + """ + return np.linalg.eigvalsh(symmetric(x)) + + +def eigenvectors(x,RHS=False): + """ + Return eigenvectors of a symmetric tensor. + + The eigenvalues are sorted in ascending order of their associated eigenvalues. + + Parameters + ---------- + x : numpy.array of shape (:,3,3) or (3,3) + Symmetric tensor of which the eigenvectors are computed. + RHS: bool, optional + Enforce right-handed coordinate system. Default is False. + + """ + (u,v) = np.linalg.eigh(symmetric(x)) + + if RHS: + if np.shape(x) == (3,3): + if np.linalg.det(v) < 0.0: v[:,2] *= -1.0 + else: + v[np.linalg.det(v) < 0.0,:,2] *= -1.0 + return v + + +def left_stretch(x): + """ + Return the left stretch of a tensor. + + Parameters + ---------- + x : numpy.array of shape (:,3,3) or (3,3) + Tensor of which the left stretch is computed. + + """ + return __polar_decomposition(x,'V')[0] + + +def maximum_shear(x): + """ + Return the maximum shear component of a symmetric tensor. + + Parameters + ---------- + x : numpy.array of shape (:,3,3) or (3,3) + Symmetric tensor of which the maximum shear is computed. + + """ + w = eigenvalues(x) + return (w[0] - w[2])*0.5 if np.shape(x) == (3,3) else \ + (w[:,0] - w[:,2])*0.5 + + +def Mises_strain(epsilon): + """ + Return the Mises equivalent of a strain tensor. + + Parameters + ---------- + epsilon : numpy.array of shape (:,3,3) or (3,3) + Symmetric strain tensor of which the von Mises equivalent is computed. + + """ + return __Mises(epsilon,2.0/3.0) + + +def Mises_stress(sigma): + """ + Return the Mises equivalent of a stress tensor. + + Parameters + ---------- + sigma : numpy.array of shape (:,3,3) or (3,3) + Symmetric stress tensor of which the von Mises equivalent is computed. + + """ + return __Mises(sigma,3.0/2.0) + + +def PK2(P,F): + """ + Calculate second Piola-Kirchhoff stress from first Piola-Kirchhoff stress and deformation gradient. + + Parameters + ---------- + P : numpy.array of shape (:,3,3) or (3,3) + 1. Piola-Kirchhoff stress. + F : numpy.array of shape (:,3,3) or (3,3) + Deformation gradient. + + """ + if np.shape(F) == np.shape(P) == (3,3): + S = np.dot(np.linalg.inv(F),P) + else: + S = np.einsum('ijk,ikl->ijl',np.linalg.inv(F),P) + return symmetric(S) + +def right_stretch(x): + """ + Return the right stretch of a tensor. + + Parameters + ---------- + x : numpy.array of shape (:,3,3) or (3,3) + Tensor of which the right stretch is computed. + + """ + return __polar_decomposition(x,'U')[0] + + +def rotational_part(x): + """ + Return the rotational part of a tensor. + + Parameters + ---------- + x : numpy.array of shape (:,3,3) or (3,3) + Tensor of which the rotational part is computed. + + """ + return __polar_decomposition(x,'R')[0] def spherical_part(x,tensor=False): """ Return spherical (hydrostatic) part of a tensor. - + Parameters ---------- x : numpy.array of shape (:,3,3) or (3,3) @@ -113,42 +194,50 @@ def spherical_part(x,tensor=False): return sph else: return np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),(x.shape[0],3,3)),sph) - - -def Mises_stress(sigma): + + +def strain_tensor(F,t,m): """ - Return the Mises equivalent of a stress tensor. - + Return strain tensor calculated from deformation gradient. + + For details refer to https://en.wikipedia.org/wiki/Finite_strain_theory and + https://de.wikipedia.org/wiki/Verzerrungstensor + Parameters ---------- - sigma : numpy.array of shape (:,3,3) or (3,3) - Symmetric stress tensor of which the von Mises equivalent is computed. + F : numpy.array of shape (:,3,3) or (3,3) + Deformation gradient. + t : {‘V’, ‘U’} + Type of the polar decomposition, ‘V’ for left stretch tensor and ‘U’ for right stretch tensor. + m : float + Order of the strain. """ - s = deviatoric_part(sigma) - return np.sqrt(3.0/2.0*(np.sum(s**2.0))) if np.shape(sigma) == (3,3) else \ - np.sqrt(3.0/2.0*np.einsum('ijk->i',s**2.0)) - - -def Mises_strain(epsilon): - """ - Return the Mises equivalent of a strain tensor. - - Parameters - ---------- - epsilon : numpy.array of shape (:,3,3) or (3,3) - Symmetric strain tensor of which the von Mises equivalent is computed. + F_ = F.reshape((1,3,3)) if F.shape == (3,3) else F + if t == 'V': + B = np.matmul(F_,transpose(F_)) + w,n = np.linalg.eigh(B) + elif t == 'U': + C = np.matmul(transpose(F_),F_) + w,n = np.linalg.eigh(C) - """ - s = deviatoric_part(epsilon) - return np.sqrt(2.0/3.0*(np.sum(s**2.0))) if np.shape(epsilon) == (3,3) else \ - np.sqrt(2.0/3.0*np.einsum('ijk->i',s**2.0)) + if m > 0.0: + eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n)) + - np.broadcast_to(np.eye(3),[F_.shape[0],3,3])) + elif m < 0.0: + eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n)) + + np.broadcast_to(np.eye(3),[F_.shape[0],3,3])) + else: + eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n)) + + return eps.reshape((3,3)) if np.shape(F) == (3,3) else \ + eps def symmetric(x): """ Return the symmetrized tensor. - + Parameters ---------- x : numpy.array of shape (:,3,3) or (3,3) @@ -158,43 +247,10 @@ def symmetric(x): return (x+transpose(x))*0.5 -def maximum_shear(x): - """ - Return the maximum shear component of a symmetric tensor. - - Parameters - ---------- - x : numpy.array of shape (:,3,3) or (3,3) - Symmetric tensor of which the maximum shear is computed. - - """ - w = np.linalg.eigvalsh(symmetric(x)) # eigenvalues in ascending order - return (w[2] - w[0])*0.5 if np.shape(x) == (3,3) else \ - (w[:,2] - w[:,0])*0.5 - - -def principal_components(x): - """ - Return the principal components of a symmetric tensor. - - The principal components (eigenvalues) are sorted in descending order, each repeated according to - its multiplicity. - - Parameters - ---------- - x : numpy.array of shape (:,3,3) or (3,3) - Symmetric tensor of which the principal compontents are computed. - - """ - w = np.linalg.eigvalsh(symmetric(x)) # eigenvalues in ascending order - return w[::-1] if np.shape(x) == (3,3) else \ - w[:,::-1] - - def transpose(x): """ Return the transpose of a tensor. - + Parameters ---------- x : numpy.array of shape (:,3,3) or (3,3) @@ -205,62 +261,23 @@ def transpose(x): np.transpose(x,(0,2,1)) -def rotational_part(x): - """ - Return the rotational part of a tensor. - - Parameters - ---------- - x : numpy.array of shape (:,3,3) or (3,3) - Tensor of which the rotational part is computed. - - """ - return __polar_decomposition(x,'R')[0] - - -def left_stretch(x): - """ - Return the left stretch of a tensor. - - Parameters - ---------- - x : numpy.array of shape (:,3,3) or (3,3) - Tensor of which the left stretch is computed. - - """ - return __polar_decomposition(x,'V')[0] - - -def right_stretch(x): - """ - Return the right stretch of a tensor. - - Parameters - ---------- - x : numpy.array of shape (:,3,3) or (3,3) - Tensor of which the right stretch is computed. - - """ - return __polar_decomposition(x,'U')[0] - - def __polar_decomposition(x,requested): """ Singular value decomposition. - + Parameters ---------- x : numpy.array of shape (:,3,3) or (3,3) Tensor of which the singular values are computed. requested : iterable of str - Requested outputs: ‘R’ for the rotation tensor, + Requested outputs: ‘R’ for the rotation tensor, ‘V’ for left stretch tensor and ‘U’ for right stretch tensor. """ u, s, vh = np.linalg.svd(x) R = np.dot(u,vh) if np.shape(x) == (3,3) else \ np.einsum('ijk,ikl->ijl',u,vh) - + output = [] if 'R' in requested: output.append(R) @@ -268,5 +285,22 @@ def __polar_decomposition(x,requested): output.append(np.dot(x,R.T) if np.shape(x) == (3,3) else np.einsum('ijk,ilk->ijl',x,R)) if 'U' in requested: output.append(np.dot(R.T,x) if np.shape(x) == (3,3) else np.einsum('ikj,ikl->ijl',R,x)) - + return tuple(output) + + +def __Mises(x,s): + """ + Base equation for Mises equivalent of a stres or strain tensor. + + Parameters + ---------- + x : numpy.array of shape (:,3,3) or (3,3) + Symmetric tensor of which the von Mises equivalent is computed. + s : float + Scaling factor (2/3 for strain, 3/2 for stress). + + """ + d = deviatoric_part(x) + return np.sqrt(s*(np.sum(d**2.0))) if np.shape(x) == (3,3) else \ + np.sqrt(s*np.einsum('ijk->i',d**2.0)) diff --git a/python/damask/util.py b/python/damask/util.py index 0065daba5..1cb7b8602 100644 --- a/python/damask/util.py +++ b/python/damask/util.py @@ -3,14 +3,18 @@ import time import os import subprocess import shlex +from fractions import Fraction +from functools import reduce from optparse import Option from queue import Queue from threading import Thread +import numpy as np + class bcolors: """ ASCII Colors (Blender code). - + https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python """ @@ -36,7 +40,7 @@ class bcolors: self.BOLD = '' self.UNDERLINE = '' self.CROSSOUT = '' - + # ----------------------------- def srepr(arg,glue = '\n'): @@ -159,11 +163,29 @@ def progressBar(iteration, total, prefix='', bar_length=50): if iteration == total: sys.stderr.write('\n') sys.stderr.flush() - + + +def scale_to_coprime(v): + """Scale vector to co-prime (relatively prime) integers.""" + MAX_DENOMINATOR = 1000 + + def get_square_denominator(x): + """Denominator of the square of a number.""" + return Fraction(x ** 2).limit_denominator(MAX_DENOMINATOR).denominator + + def lcm(a, b): + """Least common multiple.""" + return a * b // np.gcd(a, b) + + denominators = [int(get_square_denominator(i)) for i in v] + s = reduce(lcm, denominators) ** 0.5 + m = (np.array(v)*s).astype(np.int) + return m//reduce(np.gcd,m) + class return_message(): """Object with formatted return message.""" - + def __init__(self,message): """ Sets return message. @@ -175,7 +197,7 @@ class return_message(): """ self.message = message - + def __repr__(self): """Return message suitable for interactive shells.""" return srepr(self.message) diff --git a/python/tests/test_DADF5.py b/python/tests/test_DADF5.py index 669ce8446..4cf3c4e2b 100644 --- a/python/tests/test_DADF5.py +++ b/python/tests/test_DADF5.py @@ -40,7 +40,7 @@ class TestDADF5: assert np.allclose(in_memory,in_file) def test_add_calculation(self,default): - default.add_calculation('2.0*np.abs(#F#)-1.0','x','-','test') + default.add_calculation('x','2.0*np.abs(#F#)-1.0','-','my notes') loc = {'F': default.get_dataset_location('F'), 'x': default.get_dataset_location('x')} in_memory = 2.0*np.abs(default.read_dataset(loc['F'],0))-1.0 @@ -52,8 +52,8 @@ class TestDADF5: loc = {'F': default.get_dataset_location('F'), 'P': default.get_dataset_location('P'), 'sigma':default.get_dataset_location('sigma')} - in_memory = mechanics.Cauchy(default.read_dataset(loc['F'],0), - default.read_dataset(loc['P'],0)) + in_memory = mechanics.Cauchy(default.read_dataset(loc['P'],0), + default.read_dataset(loc['F'],0)) in_file = default.read_dataset(loc['sigma'],0) assert np.allclose(in_memory,in_file) @@ -73,6 +73,54 @@ class TestDADF5: in_file = default.read_dataset(loc['s_P'],0) assert np.allclose(in_memory,in_file) + def test_add_eigenvalues(self,default): + default.add_Cauchy('P','F') + default.add_eigenvalues('sigma') + loc = {'sigma' :default.get_dataset_location('sigma'), + 'lambda(sigma)':default.get_dataset_location('lambda(sigma)')} + in_memory = mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)) + in_file = default.read_dataset(loc['lambda(sigma)'],0) + assert np.allclose(in_memory,in_file) + + def test_add_eigenvectors(self,default): + default.add_Cauchy('P','F') + default.add_eigenvectors('sigma') + loc = {'sigma' :default.get_dataset_location('sigma'), + 'v(sigma)':default.get_dataset_location('v(sigma)')} + in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0)) + in_file = default.read_dataset(loc['v(sigma)'],0) + assert np.allclose(in_memory,in_file) + + def test_add_maximum_shear(self,default): + default.add_Cauchy('P','F') + default.add_maximum_shear('sigma') + loc = {'sigma' :default.get_dataset_location('sigma'), + 'max_shear(sigma)':default.get_dataset_location('max_shear(sigma)')} + in_memory = mechanics.maximum_shear(default.read_dataset(loc['sigma'],0)).reshape(-1,1) + in_file = default.read_dataset(loc['max_shear(sigma)'],0) + assert np.allclose(in_memory,in_file) + + def test_add_Mises_strain(self,default): + t = ['V','U'][np.random.randint(0,2)] + m = np.random.random()*2.0 - 1.0 + default.add_strain_tensor('F',t,m) + label = 'epsilon_{}^{}(F)'.format(t,m) + default.add_Mises(label) + loc = {label :default.get_dataset_location(label), + label+'_vM':default.get_dataset_location(label+'_vM')} + in_memory = mechanics.Mises_strain(default.read_dataset(loc[label],0)).reshape(-1,1) + in_file = default.read_dataset(loc[label+'_vM'],0) + assert np.allclose(in_memory,in_file) + + def test_add_Mises_stress(self,default): + default.add_Cauchy('P','F') + default.add_Mises('sigma') + loc = {'sigma' :default.get_dataset_location('sigma'), + 'sigma_vM':default.get_dataset_location('sigma_vM')} + in_memory = mechanics.Mises_stress(default.read_dataset(loc['sigma'],0)).reshape(-1,1) + in_file = default.read_dataset(loc['sigma_vM'],0) + assert np.allclose(in_memory,in_file) + def test_add_norm(self,default): default.add_norm('F',1) loc = {'F': default.get_dataset_location('F'), @@ -81,6 +129,24 @@ class TestDADF5: in_file = default.read_dataset(loc['|F|_1'],0) assert np.allclose(in_memory,in_file) + def test_add_PK2(self,default): + default.add_PK2('P','F') + loc = {'F':default.get_dataset_location('F'), + 'P':default.get_dataset_location('P'), + 'S':default.get_dataset_location('S')} + in_memory = mechanics.PK2(default.read_dataset(loc['P'],0), + default.read_dataset(loc['F'],0)) + in_file = default.read_dataset(loc['S'],0) + assert np.allclose(in_memory,in_file) + + def test_add_rotational_part(self,default): + default.add_rotational_part('F') + loc = {'F': default.get_dataset_location('F'), + 'R(F)': default.get_dataset_location('R(F)')} + in_memory = mechanics.rotational_part(default.read_dataset(loc['F'],0)) + in_file = default.read_dataset(loc['R(F)'],0) + assert np.allclose(in_memory,in_file) + def test_add_spherical(self,default): default.add_spherical('P') loc = {'P': default.get_dataset_location('P'), @@ -88,3 +154,30 @@ class TestDADF5: in_memory = mechanics.spherical_part(default.read_dataset(loc['P'],0)).reshape(-1,1) in_file = default.read_dataset(loc['p_P'],0) assert np.allclose(in_memory,in_file) + + def test_add_strain(self,default): + t = ['V','U'][np.random.randint(0,2)] + m = np.random.random()*2.0 - 1.0 + default.add_strain_tensor('F',t,m) + label = 'epsilon_{}^{}(F)'.format(t,m) + loc = {'F': default.get_dataset_location('F'), + label: default.get_dataset_location(label)} + in_memory = mechanics.strain_tensor(default.read_dataset(loc['F'],0),t,m) + in_file = default.read_dataset(loc[label],0) + assert np.allclose(in_memory,in_file) + + def test_add_stretch_right(self,default): + default.add_stretch_tensor('F','U') + loc = {'F': default.get_dataset_location('F'), + 'U(F)': default.get_dataset_location('U(F)')} + in_memory = mechanics.right_stretch(default.read_dataset(loc['F'],0)) + in_file = default.read_dataset(loc['U(F)'],0) + assert np.allclose(in_memory,in_file) + + def test_add_stretch_left(self,default): + default.add_stretch_tensor('F','V') + loc = {'F': default.get_dataset_location('F'), + 'V(F)': default.get_dataset_location('V(F)')} + in_memory = mechanics.left_stretch(default.read_dataset(loc['F'],0)) + in_file = default.read_dataset(loc['V(F)'],0) + assert np.allclose(in_memory,in_file) diff --git a/python/tests/test_mechanics.py b/python/tests/test_mechanics.py index 9e1d9bc0c..4248254ab 100644 --- a/python/tests/test_mechanics.py +++ b/python/tests/test_mechanics.py @@ -2,187 +2,224 @@ import numpy as np from damask import mechanics class TestMechanics: - + n = 1000 c = np.random.randint(n) - + def test_vectorize_Cauchy(self): - P = np.random.random((self.n,3,3)) - F = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.Cauchy(F,P)[self.c], - mechanics.Cauchy(F[self.c],P[self.c])) - - - def test_vectorize_strain_tensor(self): - F = np.random.random((self.n,3,3)) - t = ['V','U'][np.random.randint(0,2)] - m = np.random.random()*10. -5.0 - assert np.allclose(mechanics.strain_tensor(F,t,m)[self.c], - mechanics.strain_tensor(F[self.c],t,m)) - + P = np.random.random((self.n,3,3)) + F = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.Cauchy(P,F)[self.c], + mechanics.Cauchy(P[self.c],F[self.c])) def test_vectorize_deviatoric_part(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.deviatoric_part(x)[self.c], - mechanics.deviatoric_part(x[self.c])) + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.deviatoric_part(x)[self.c], + mechanics.deviatoric_part(x[self.c])) + def test_vectorize_eigenvalues(self): + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.eigenvalues(x)[self.c], + mechanics.eigenvalues(x[self.c])) - def test_vectorize_spherical_part(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.spherical_part(x,True)[self.c], - mechanics.spherical_part(x[self.c],True)) - - - def test_vectorize_Mises_stress(self): - sigma = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.Mises_stress(sigma)[self.c], - mechanics.Mises_stress(sigma[self.c])) - - - def test_vectorize_Mises_strain(self): - epsilon = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.Mises_strain(epsilon)[self.c], - mechanics.Mises_strain(epsilon[self.c])) - - - def test_vectorize_symmetric(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.symmetric(x)[self.c], - mechanics.symmetric(x[self.c])) - - - def test_vectorize_maximum_shear(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.maximum_shear(x)[self.c], - mechanics.maximum_shear(x[self.c])) - - - def test_vectorize_principal_components(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.principal_components(x)[self.c], - mechanics.principal_components(x[self.c])) - - - def test_vectorize_transpose(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.transpose(x)[self.c], - mechanics.transpose(x[self.c])) - - - def test_vectorize_rotational_part(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.rotational_part(x)[self.c], - mechanics.rotational_part(x[self.c])) - + def test_vectorize_eigenvectors(self): + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.eigenvectors(x)[self.c], + mechanics.eigenvectors(x[self.c])) def test_vectorize_left_stretch(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.left_stretch(x)[self.c], - mechanics.left_stretch(x[self.c])) + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.left_stretch(x)[self.c], + mechanics.left_stretch(x[self.c])) + def test_vectorize_maximum_shear(self): + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.maximum_shear(x)[self.c], + mechanics.maximum_shear(x[self.c])) + + def test_vectorize_Mises_strain(self): + epsilon = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.Mises_strain(epsilon)[self.c], + mechanics.Mises_strain(epsilon[self.c])) + + def test_vectorize_Mises_stress(self): + sigma = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.Mises_stress(sigma)[self.c], + mechanics.Mises_stress(sigma[self.c])) + + def test_vectorize_PK2(self): + F = np.random.random((self.n,3,3)) + P = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.PK2(P,F)[self.c], + mechanics.PK2(P[self.c],F[self.c])) def test_vectorize_right_stretch(self): - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.right_stretch(x)[self.c], - mechanics.right_stretch(x[self.c])) + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.right_stretch(x)[self.c], + mechanics.right_stretch(x[self.c])) + + def test_vectorize_rotational_part(self): + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.rotational_part(x)[self.c], + mechanics.rotational_part(x[self.c])) + + def test_vectorize_spherical_part(self): + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.spherical_part(x,True)[self.c], + mechanics.spherical_part(x[self.c],True)) + + def test_vectorize_strain_tensor(self): + F = np.random.random((self.n,3,3)) + t = ['V','U'][np.random.randint(0,2)] + m = np.random.random()*10. -5.0 + assert np.allclose(mechanics.strain_tensor(F,t,m)[self.c], + mechanics.strain_tensor(F[self.c],t,m)) + + def test_vectorize_symmetric(self): + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.symmetric(x)[self.c], + mechanics.symmetric(x[self.c])) + + def test_vectorize_transpose(self): + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.transpose(x)[self.c], + mechanics.transpose(x[self.c])) def test_Cauchy(self): - """Ensure Cauchy stress is symmetrized 1. Piola-Kirchhoff stress for no deformation.""" - P = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.Cauchy(np.broadcast_to(np.eye(3),(self.n,3,3)),P), - mechanics.symmetric(P)) + """Ensure Cauchy stress is symmetrized 1. Piola-Kirchhoff stress for no deformation.""" + P = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.Cauchy(P,np.broadcast_to(np.eye(3),(self.n,3,3))), + mechanics.symmetric(P)) + def test_polar_decomposition(self): - """F = RU = VR.""" - F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.random((self.n,3,3)) - R = mechanics.rotational_part(F) - V = mechanics.left_stretch(F) - U = mechanics.right_stretch(F) - assert np.allclose(np.matmul(R,U), - np.matmul(V,R)) - + """F = RU = VR.""" + F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.random((self.n,3,3)) + R = mechanics.rotational_part(F) + V = mechanics.left_stretch(F) + U = mechanics.right_stretch(F) + assert np.allclose(np.matmul(R,U), + np.matmul(V,R)) + + + def test_PK2(self): + """Ensure 2. Piola-Kirchhoff stress is symmetrized 1. Piola-Kirchhoff stress for no deformation.""" + P = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.PK2(P,np.broadcast_to(np.eye(3),(self.n,3,3))), + mechanics.symmetric(P)) + def test_strain_tensor_no_rotation(self): - """Ensure that left and right stretch give same results for no rotation.""" - F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.random((self.n,3,3)) - m = np.random.random()*20.0-10.0 - assert np.allclose(mechanics.strain_tensor(F,'U',m), - mechanics.strain_tensor(F,'V',m)) - + """Ensure that left and right stretch give same results for no rotation.""" + F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.random((self.n,3,3)) + m = np.random.random()*20.0-10.0 + assert np.allclose(mechanics.strain_tensor(F,'U',m), + mechanics.strain_tensor(F,'V',m)) + def test_strain_tensor_rotation_equivalence(self): - """Ensure that left and right strain differ only by a rotation.""" - F = np.broadcast_to(np.eye(3),[self.n,3,3]) + (np.random.random((self.n,3,3))*0.5 - 0.25) - m = np.random.random()*5.0-2.5 - assert np.allclose(np.linalg.det(mechanics.strain_tensor(F,'U',m)), - np.linalg.det(mechanics.strain_tensor(F,'V',m))) + """Ensure that left and right strain differ only by a rotation.""" + F = np.broadcast_to(np.eye(3),[self.n,3,3]) + (np.random.random((self.n,3,3))*0.5 - 0.25) + m = np.random.random()*5.0-2.5 + assert np.allclose(np.linalg.det(mechanics.strain_tensor(F,'U',m)), + np.linalg.det(mechanics.strain_tensor(F,'V',m))) def test_strain_tensor_rotation(self): - """Ensure that pure rotation results in no strain.""" - F = mechanics.rotational_part(np.random.random((self.n,3,3))) - t = ['V','U'][np.random.randint(0,2)] - m = np.random.random()*2.0 - 1.0 - assert np.allclose(mechanics.strain_tensor(F,t,m), - 0.0) - - def test_rotation_determinant(self): - """ - Ensure that the determinant of the rotational part is +- 1. + """Ensure that pure rotation results in no strain.""" + F = mechanics.rotational_part(np.random.random((self.n,3,3))) + t = ['V','U'][np.random.randint(0,2)] + m = np.random.random()*2.0 - 1.0 + assert np.allclose(mechanics.strain_tensor(F,t,m), + 0.0) - Should be +1, but random F might contain a reflection. - """ - x = np.random.random((self.n,3,3)) - assert np.allclose(np.abs(np.linalg.det(mechanics.rotational_part(x))), - 1.0) + def test_rotation_determinant(self): + """ + Ensure that the determinant of the rotational part is +- 1. + + Should be +1, but random F might contain a reflection. + """ + x = np.random.random((self.n,3,3)) + assert np.allclose(np.abs(np.linalg.det(mechanics.rotational_part(x))), + 1.0) def test_spherical_deviatoric_part(self): - """Ensure that full tensor is sum of spherical and deviatoric part.""" - x = np.random.random((self.n,3,3)) - sph = mechanics.spherical_part(x,True) - assert np.allclose(sph + mechanics.deviatoric_part(x), - x) + """Ensure that full tensor is sum of spherical and deviatoric part.""" + x = np.random.random((self.n,3,3)) + sph = mechanics.spherical_part(x,True) + assert np.allclose(sph + mechanics.deviatoric_part(x), + x) def test_deviatoric_Mises(self): - """Ensure that Mises equivalent stress depends only on deviatoric part.""" - x = np.random.random((self.n,3,3)) - full = mechanics.Mises_stress(x) - dev = mechanics.Mises_stress(mechanics.deviatoric_part(x)) - assert np.allclose(full, - dev) + """Ensure that Mises equivalent stress depends only on deviatoric part.""" + x = np.random.random((self.n,3,3)) + full = mechanics.Mises_stress(x) + dev = mechanics.Mises_stress(mechanics.deviatoric_part(x)) + assert np.allclose(full, + dev) def test_spherical_mapping(self): - """Ensure that mapping to tensor is correct.""" - x = np.random.random((self.n,3,3)) - tensor = mechanics.spherical_part(x,True) - scalar = mechanics.spherical_part(x) - assert np.allclose(np.linalg.det(tensor), - scalar**3.0) + """Ensure that mapping to tensor is correct.""" + x = np.random.random((self.n,3,3)) + tensor = mechanics.spherical_part(x,True) + scalar = mechanics.spherical_part(x) + assert np.allclose(np.linalg.det(tensor), + scalar**3.0) def test_spherical_Mises(self): - """Ensure that Mises equivalent strrain of spherical strain is 0.""" - x = np.random.random((self.n,3,3)) - sph = mechanics.spherical_part(x,True) - assert np.allclose(mechanics.Mises_strain(sph), - 0.0) + """Ensure that Mises equivalent strrain of spherical strain is 0.""" + x = np.random.random((self.n,3,3)) + sph = mechanics.spherical_part(x,True) + assert np.allclose(mechanics.Mises_strain(sph), + 0.0) def test_symmetric(self): - """Ensure that a symmetric tensor is half of the sum of a tensor and its transpose.""" - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.symmetric(x)*2.0, - mechanics.transpose(x)+x) + """Ensure that a symmetric tensor is half of the sum of a tensor and its transpose.""" + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.symmetric(x)*2.0, + mechanics.transpose(x)+x) def test_transpose(self): - """Ensure that a symmetric tensor equals its transpose.""" - x = mechanics.symmetric(np.random.random((self.n,3,3))) - assert np.allclose(mechanics.transpose(x), - x) + """Ensure that a symmetric tensor equals its transpose.""" + x = mechanics.symmetric(np.random.random((self.n,3,3))) + assert np.allclose(mechanics.transpose(x), + x) def test_Mises(self): - """Ensure that equivalent stress is 3/2 of equivalent strain.""" - x = np.random.random((self.n,3,3)) - assert np.allclose(mechanics.Mises_stress(x)/mechanics.Mises_strain(x), - 1.5) + """Ensure that equivalent stress is 3/2 of equivalent strain.""" + x = np.random.random((self.n,3,3)) + assert np.allclose(mechanics.Mises_stress(x)/mechanics.Mises_strain(x), + 1.5) + + + def test_eigenvalues(self): + """Ensure that the characteristic polynomial can be solved.""" + A = mechanics.symmetric(np.random.random((self.n,3,3))) + lambd = mechanics.eigenvalues(A) + s = np.random.randint(self.n) + for i in range(3): + assert np.allclose(np.linalg.det(A[s]-lambd[s,i]*np.eye(3)),.0) + + def test_eigenvalues_and_vectors(self): + """Ensure that eigenvalues and -vectors are the solution to the characteristic polynomial.""" + A = mechanics.symmetric(np.random.random((self.n,3,3))) + lambd = mechanics.eigenvalues(A) + x = mechanics.eigenvectors(A) + s = np.random.randint(self.n) + for i in range(3): + assert np.allclose(np.dot(A[s]-lambd[s,i]*np.eye(3),x[s,:,i]),.0) + + def test_eigenvectors_RHS(self): + """Ensure that RHS coordinate system does only change sign of determinant.""" + A = mechanics.symmetric(np.random.random((self.n,3,3))) + LRHS = np.linalg.det(mechanics.eigenvectors(A,RHS=False)) + RHS = np.linalg.det(mechanics.eigenvectors(A,RHS=True)) + assert np.allclose(np.abs(LRHS),RHS) + + def test_spherical_no_shear(self): + """Ensure that sherical stress has max shear of 0.0.""" + A = mechanics.spherical_part(mechanics.symmetric(np.random.random((self.n,3,3))),True) + assert np.allclose(mechanics.maximum_shear(A),0.0)