diff --git a/processing/post/addCalculation.py b/processing/post/addCalculation.py index db0428753..b1eed3c6d 100755 --- a/processing/post/addCalculation.py +++ b/processing/post/addCalculation.py @@ -4,7 +4,7 @@ import os import sys from optparse import OptionParser import re -import collections +from collections.abc import Iterable import math # noqa import scipy # noqa @@ -18,7 +18,7 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) def listify(x): - return x if isinstance(x, collections.Iterable) else [x] + return x if isinstance(x, Iterable) else [x] # -------------------------------------------------------------------- @@ -65,9 +65,10 @@ for i in range(len(options.formulas)): if filenames == []: filenames = [None] for name in filenames: - try: table = damask.ASCIItable(name = name, - buffered = False) - except: continue + try: + table = damask.ASCIItable(name = name, buffered = False) + except IOError: + continue damask.util.report(scriptName,name) # ------------------------------------------ read header ------------------------------------------- diff --git a/processing/post/addCumulative.py b/processing/post/addCumulative.py index b81a9d14f..c94737b94 100755 --- a/processing/post/addCumulative.py +++ b/processing/post/addCumulative.py @@ -41,9 +41,9 @@ if filenames == []: filenames = [None] for name in filenames: try: - table = damask.ASCIItable(name = name, - buffered = False) - except IOError: continue + table = damask.ASCIItable(name = name, buffered = False) + except IOError: + continue damask.util.report(scriptName,name) # ------------------------------------------ read header ------------------------------------------ diff --git a/processing/post/addCurl.py b/processing/post/addCurl.py index 484af9677..25639dc7c 100755 --- a/processing/post/addCurl.py +++ b/processing/post/addCurl.py @@ -2,6 +2,7 @@ import os import sys +from io import StringIO from optparse import OptionParser import numpy as np @@ -12,48 +13,6 @@ import damask scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) -def merge_dicts(*dict_args): - """Given any number of dicts, shallow copy and merge into a new dict, with precedence going to key value pairs in latter dicts.""" - result = {} - for dictionary in dict_args: - result.update(dictionary) - return result - -def curlFFT(geomdim,field): - """Calculate curl of a vector or tensor field by transforming into Fourier space.""" - shapeFFT = np.array(np.shape(field))[0:3] - grid = np.array(np.shape(field)[2::-1]) - N = grid.prod() # field size - n = np.array(np.shape(field)[3:]).prod() # data size - - field_fourier = np.fft.rfftn(field,axes=(0,1,2),s=shapeFFT) - curl_fourier = np.empty(field_fourier.shape,'c16') - - # differentiation in Fourier space - TWOPIIMG = 2.0j*np.pi - einsums = { - 3:'slm,ijkl,ijkm->ijks', # vector, 3 -> 3 - 9:'slm,ijkl,ijknm->ijksn', # tensor, 3x3 -> 3x3 - } - k_sk = np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2]))/geomdim[0] - if grid[2]%2 == 0: k_sk[grid[2]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) - - k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/geomdim[1] - if grid[1]%2 == 0: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) - - k_si = np.arange(grid[0]//2+1)/geomdim[2] - - kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij') - k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3).astype('c16') - - e = np.zeros((3, 3, 3)) - e[0, 1, 2] = e[1, 2, 0] = e[2, 0, 1] = 1.0 # Levi-Civita symbols - e[0, 2, 1] = e[2, 1, 0] = e[1, 0, 2] = -1.0 - - curl_fourier = np.einsum(einsums[n],e,k_s,field_fourier)*TWOPIIMG - - return np.fft.irfftn(curl_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n]) - # -------------------------------------------------------------------- # MAIN @@ -61,8 +20,7 @@ def curlFFT(geomdim,field): parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ Add column(s) containing curl of requested column(s). -Operates on periodic ordered three-dimensional data sets -of vector and tensor fields. +Operates on periodic ordered three-dimensional data sets of vector and tensor fields. """, version = scriptID) parser.add_option('-p','--pos','--periodiccellcenter', @@ -70,93 +28,30 @@ parser.add_option('-p','--pos','--periodiccellcenter', type = 'string', metavar = 'string', help = 'label of coordinates [%default]') parser.add_option('-l','--label', - dest = 'data', + dest = 'labels', action = 'extend', metavar = '', help = 'label(s) of field values') parser.set_defaults(pos = 'pos', ) - (options,filenames) = parser.parse_args() - -if options.data is None: parser.error('no data column specified.') - -# --- define possible data types ------------------------------------------------------------------- - -datatypes = { - 3: {'name': 'vector', - 'shape': [3], - }, - 9: {'name': 'tensor', - 'shape': [3,3], - }, - } - -# --- loop over input files ------------------------------------------------------------------------ - if filenames == []: filenames = [None] +if options.labels is None: parser.error('no data column specified.') + for name in filenames: - try: table = damask.ASCIItable(name = name,buffered = False) - except: continue - damask.util.report(scriptName,name) + damask.util.report(scriptName,name) -# --- interpret header ---------------------------------------------------------------------------- + table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos)) - table.head_read() - - remarks = [] - errors = [] - active = [] - - coordDim = table.label_dimension(options.pos) - if coordDim != 3: - errors.append('coordinates "{}" must be three-dimensional.'.format(options.pos)) - else: coordCol = table.label_index(options.pos) - - for me in options.data: - dim = table.label_dimension(me) - if dim in datatypes: - active.append(merge_dicts({'label':me},datatypes[dim])) - remarks.append('differentiating {} "{}"...'.format(datatypes[dim]['name'],me)) - else: - remarks.append('skipping "{}" of dimension {}...'.format(me,dim) if dim != -1 else \ - '"{}" not found...'.format(me) ) - - if remarks != []: damask.util.croak(remarks) - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# ------------------------------------------ assemble header -------------------------------------- - - table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - for data in active: - table.labels_append(['{}_curlFFT({})'.format(i+1,data['label']) - for i in range(np.prod(np.array(data['shape'])))]) # extend ASCII header with new labels - table.head_write() - -# --------------- figure out size and grid --------------------------------------------------------- - - table.data_readArray() - grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)]) - -# ------------------------------------------ process value field ----------------------------------- - - stack = [table.data] - for data in active: - # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation - stack.append(curlFFT(size[::-1], - table.data[:,table.label_indexrange(data['label'])]. - reshape(grid[::-1].tolist()+data['shape']))) - -# ------------------------------------------ output result ----------------------------------------- - - if len(stack) > 1: table.data = np.hstack(tuple(stack)) - table.data_writeArray('%.12g') - -# ------------------------------------------ output finalization ----------------------------------- - - table.close() # close input ASCII table (works for stdin) + for label in options.labels: + field = table.get(label) + shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor + field = field.reshape(np.append(grid[::-1],shape)) + table.add('curlFFT({})'.format(label), + damask.grid_filters.curl(size[::-1],field).reshape((-1,np.prod(shape))), + scriptID+' '+' '.join(sys.argv[1:])) + + table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addDisplacement.py b/processing/post/addDisplacement.py index 99d07fd18..59630a6c6 100755 --- a/processing/post/addDisplacement.py +++ b/processing/post/addDisplacement.py @@ -2,10 +2,10 @@ import os import sys +from io import StringIO from optparse import OptionParser import numpy as np -import scipy.ndimage import damask @@ -14,79 +14,6 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) -#-------------------------------------------------------------------------------------------------- -def cell2node(cellData,grid): - - nodeData = 0.0 - datalen = np.array(cellData.shape[3:]).prod() - - for i in range(datalen): - node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i], - np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells - mode = 'wrap', - origin = -1, # offset to have cell origin as center - ) # now averaged at cell origins - node = np.append(node,node[np.newaxis,0,:,:,...],axis=0) # wrap along z - node = np.append(node,node[:,0,np.newaxis,:,...],axis=1) # wrap along y - node = np.append(node,node[:,:,0,np.newaxis,...],axis=2) # wrap along x - - nodeData = node[...,np.newaxis] if i==0 else np.concatenate((nodeData,node[...,np.newaxis]),axis=-1) - - return nodeData - -#-------------------------------------------------------------------------------------------------- -def displacementAvgFFT(F,grid,size,nodal=False,transformed=False): - """Calculate average cell center (or nodal) displacement for deformation gradient field specified in each grid cell""" - if nodal: - x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]), - np.linspace(0,size[1],1+grid[1]), - np.linspace(0,size[0],1+grid[0]), - indexing = 'ij') - else: - delta = size/grid*0.5 - x, y, z = np.meshgrid(np.linspace(delta[2],size[2]-delta[2],grid[2]), - np.linspace(delta[1],size[1]-delta[1],grid[1]), - np.linspace(delta[0],size[0]-delta[0],grid[0]), - indexing = 'ij') - - origCoords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3) - - F_fourier = F if transformed else np.fft.rfftn(F,axes=(0,1,2)) # transform or use provided data - Favg = np.real(F_fourier[0,0,0,:,:])/grid.prod() # take zero freq for average - avgDisplacement = np.einsum('ml,ijkl->ijkm',Favg-np.eye(3),origCoords) # dX = Favg.X - - return avgDisplacement - -#-------------------------------------------------------------------------------------------------- -def displacementFluctFFT(F,grid,size,nodal=False,transformed=False): - """Calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell""" - integrator = 0.5j * size / np.pi - - kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])), - np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1])), - np.arange(grid[0]//2+1), - indexing = 'ij') - k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3) - k_sSquared = np.einsum('...l,...l',k_s,k_s) - k_sSquared[0,0,0] = 1.0 # ignore global average frequency - -#-------------------------------------------------------------------------------------------------- -# integration in Fourier space - - displacement_fourier = -np.einsum('ijkml,ijkl,l->ijkm', - F if transformed else np.fft.rfftn(F,axes=(0,1,2)), - k_s, - integrator, - ) / k_sSquared[...,np.newaxis] - -#-------------------------------------------------------------------------------------------------- -# backtransformation to real space - - displacement = np.fft.irfftn(displacement_fourier,grid[::-1],axes=(0,1,2)) - - return cell2node(displacement,grid) if nodal else displacement - - # -------------------------------------------------------------------- # MAIN # -------------------------------------------------------------------- @@ -100,7 +27,7 @@ Outputs at cell centers or cell nodes (into separate file). parser.add_option('-f', '--defgrad', - dest = 'defgrad', + dest = 'f', metavar = 'string', help = 'label of deformation gradient [%default]') parser.add_option('-p', @@ -113,108 +40,34 @@ parser.add_option('--nodal', action = 'store_true', help = 'output nodal (instead of cell-centered) displacements') -parser.set_defaults(defgrad = 'f', - pos = 'pos', +parser.set_defaults(f = 'f', + pos = 'pos', ) (options,filenames) = parser.parse_args() -# --- loop over input files ------------------------------------------------------------------------- - -if filenames == []: filenames = [None] - for name in filenames: - outname = (os.path.splitext(name)[0] + - '_nodal' + - os.path.splitext(name)[1]) if (options.nodal and name) else None - try: table = damask.ASCIItable(name = name, - outname = outname, - buffered = False) - except: continue - damask.util.report(scriptName,'{}{}'.format(name if name else '', - ' --> {}'.format(outname) if outname else '')) + damask.util.report(scriptName,name) -# ------------------------------------------ read header ------------------------------------------ - - table.head_read() - -# ------------------------------------------ sanity checks ---------------------------------------- - - errors = [] - remarks = [] - - if table.label_dimension(options.defgrad) != 9: - errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad)) - - coordDim = table.label_dimension(options.pos) - if not 3 >= coordDim >= 1: - errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos)) - elif coordDim < 3: - remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim, - 's' if coordDim < 2 else '', - options.pos)) - - if remarks != []: damask.util.croak(remarks) - if errors != []: - damask.util.croak(errors) - table.close(dismiss=True) - continue - -# --------------- figure out size and grid --------------------------------------------------------- - - table.data_readArray([options.defgrad,options.pos]) - table.data_rewind() - - if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape - if table.data[:,9:].shape[1] < 3: - table.data = np.hstack((table.data, - np.zeros((table.data.shape[0], - 3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros - - grid,size = damask.util.coordGridAndSize(table.data[:,9:12]) - N = grid.prod() - - if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid)) - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# ------------------------------------------ process data ------------------------------------------ - - F_fourier = np.fft.rfftn(table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),axes=(0,1,2)) # perform transform only once... - - fluctDisplacement = displacementFluctFFT(F_fourier,grid,size,options.nodal,transformed=True) - avgDisplacement = displacementAvgFFT (F_fourier,grid,size,options.nodal,transformed=True) - -# ------------------------------------------ assemble header --------------------------------------- - - if options.nodal: - table.info_clear() - table.labels_clear() - - table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - table.labels_append((['{}_pos' .format(i+1) for i in range(3)] if options.nodal else []) + - ['{}_avg({}).{}' .format(i+1,options.defgrad,options.pos) for i in range(3)] + - ['{}_fluct({}).{}'.format(i+1,options.defgrad,options.pos) for i in range(3)] ) - table.head_write() - -# ------------------------------------------ output data ------------------------------------------- - - Zrange = np.linspace(0,size[2],1+grid[2]) if options.nodal else range(grid[2]) - Yrange = np.linspace(0,size[1],1+grid[1]) if options.nodal else range(grid[1]) - Xrange = np.linspace(0,size[0],1+grid[0]) if options.nodal else range(grid[0]) - - for i,z in enumerate(Zrange): - for j,y in enumerate(Yrange): - for k,x in enumerate(Xrange): - if options.nodal: table.data_clear() - else: table.data_read() - table.data_append([x,y,z] if options.nodal else []) - table.data_append(list( avgDisplacement[i,j,k,:])) - table.data_append(list(fluctDisplacement[i,j,k,:])) - table.data_write() - -# ------------------------------------------ output finalization ----------------------------------- - - table.close() # close ASCII tables + table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos)) + + F = table.get(options.f).reshape(np.append(grid[::-1],(3,3))) + if options.nodal: + table = damask.Table(damask.grid_filters.node_coord0(grid[::-1],size[::-1]).reshape((-1,3)), + {'pos':(3,)}) + table.add('avg({}).{}'.format(options.f,options.pos), + damask.grid_filters.node_displacement_avg(size[::-1],F).reshape((-1,3)), + scriptID+' '+' '.join(sys.argv[1:])) + table.add('fluct({}).{}'.format(options.f,options.pos), + damask.grid_filters.node_displacement_fluct(size[::-1],F).reshape((-1,3)), + scriptID+' '+' '.join(sys.argv[1:])) + table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt') + else: + table.add('avg({}).{}'.format(options.f,options.pos), + damask.grid_filters.cell_displacement_avg(size[::-1],F).reshape((-1,3)), + scriptID+' '+' '.join(sys.argv[1:])) + table.add('fluct({}).{}'.format(options.f,options.pos), + damask.grid_filters.cell_displacement_fluct(size[::-1],F).reshape((-1,3)), + scriptID+' '+' '.join(sys.argv[1:])) + table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addDivergence.py b/processing/post/addDivergence.py index 31a18f8e1..585ebb5a5 100755 --- a/processing/post/addDivergence.py +++ b/processing/post/addDivergence.py @@ -2,6 +2,7 @@ import os import sys +from io import StringIO from optparse import OptionParser import numpy as np @@ -12,53 +13,14 @@ import damask scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) -def merge_dicts(*dict_args): - """Given any number of dicts, shallow copy and merge into a new dict, with precedence going to key value pairs in latter dicts.""" - result = {} - for dictionary in dict_args: - result.update(dictionary) - return result - -def divFFT(geomdim,field): - """Calculate divergence of a vector or tensor field by transforming into Fourier space.""" - shapeFFT = np.array(np.shape(field))[0:3] - grid = np.array(np.shape(field)[2::-1]) - N = grid.prod() # field size - n = np.array(np.shape(field)[3:]).prod() # data size - - field_fourier = np.fft.rfftn(field,axes=(0,1,2),s=shapeFFT) - div_fourier = np.empty(field_fourier.shape[0:len(np.shape(field))-1],'c16') - - # differentiation in Fourier space - TWOPIIMG = 2.0j*np.pi - einsums = { - 3:'ijkl,ijkl->ijk', # vector, 3 -> 1 - 9:'ijkm,ijklm->ijkl', # tensor, 3x3 -> 3 - } - k_sk = np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2]))/geomdim[0] - if grid[2]%2 == 0: k_sk[grid[2]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) - - k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/geomdim[1] - if grid[1]%2 == 0: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) - - k_si = np.arange(grid[0]//2+1)/geomdim[2] - - kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij') - k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3).astype('c16') - - div_fourier = np.einsum(einsums[n],k_s,field_fourier)*TWOPIIMG - - return np.fft.irfftn(div_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n//3]) - # -------------------------------------------------------------------- # MAIN # -------------------------------------------------------------------- -parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """ -Add column(s) containing curl of requested column(s). -Operates on periodic ordered three-dimensional data sets -of vector and tensor fields. +parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ +Add column(s) containing divergence of requested column(s). +Operates on periodic ordered three-dimensional data sets of vector and tensor fields. """, version = scriptID) parser.add_option('-p','--pos','--periodiccellcenter', @@ -66,95 +28,30 @@ parser.add_option('-p','--pos','--periodiccellcenter', type = 'string', metavar = 'string', help = 'label of coordinates [%default]') parser.add_option('-l','--label', - dest = 'data', + dest = 'labels', action = 'extend', metavar = '', help = 'label(s) of field values') parser.set_defaults(pos = 'pos', ) - (options,filenames) = parser.parse_args() - -if options.data is None: parser.error('no data column specified.') - -# --- define possible data types ------------------------------------------------------------------- - -datatypes = { - 3: {'name': 'vector', - 'shape': [3], - }, - 9: {'name': 'tensor', - 'shape': [3,3], - }, - } - -# --- loop over input files ------------------------------------------------------------------------ - if filenames == []: filenames = [None] +if options.labels is None: parser.error('no data column specified.') + for name in filenames: - try: table = damask.ASCIItable(name = name,buffered = False) - except: continue - damask.util.report(scriptName,name) + damask.util.report(scriptName,name) -# --- interpret header ---------------------------------------------------------------------------- + table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos)) - table.head_read() - - remarks = [] - errors = [] - active = [] - - coordDim = table.label_dimension(options.pos) - if coordDim != 3: - errors.append('coordinates "{}" must be three-dimensional.'.format(options.pos)) - else: coordCol = table.label_index(options.pos) - - for me in options.data: - dim = table.label_dimension(me) - if dim in datatypes: - active.append(merge_dicts({'label':me},datatypes[dim])) - remarks.append('differentiating {} "{}"...'.format(datatypes[dim]['name'],me)) - else: - remarks.append('skipping "{}" of dimension {}...'.format(me,dim) if dim != -1 else \ - '"{}" not found...'.format(me) ) - - if remarks != []: damask.util.croak(remarks) - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# ------------------------------------------ assemble header -------------------------------------- - - table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - for data in active: - table.labels_append(['divFFT({})'.format(data['label']) if data['shape'] == [3] \ - else '{}_divFFT({})'.format(i+1,data['label']) - for i in range(np.prod(np.array(data['shape']))//3)]) # extend ASCII header with new labels - table.head_write() - -# --------------- figure out size and grid --------------------------------------------------------- - - table.data_readArray() - - grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)]) - -# ------------------------------------------ process value field ----------------------------------- - - stack = [table.data] - for data in active: - # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation - stack.append(divFFT(size[::-1], - table.data[:,table.label_indexrange(data['label'])]. - reshape(grid[::-1].tolist()+data['shape']))) - -# ------------------------------------------ output result ----------------------------------------- - - if len(stack) > 1: table.data = np.hstack(tuple(stack)) - table.data_writeArray('%.12g') - -# ------------------------------------------ output finalization ----------------------------------- - - table.close() # close input ASCII table (works for stdin) + for label in options.labels: + field = table.get(label) + shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor + field = field.reshape(np.append(grid[::-1],shape)) + table.add('divFFT({})'.format(label), + damask.grid_filters.divergence(size[::-1],field).reshape((-1,np.prod(shape)//3)), + scriptID+' '+' '.join(sys.argv[1:])) + + table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addGradient.py b/processing/post/addGradient.py index bfadb578e..54b80ed26 100755 --- a/processing/post/addGradient.py +++ b/processing/post/addGradient.py @@ -2,6 +2,7 @@ import os import sys +from io import StringIO from optparse import OptionParser import numpy as np @@ -12,44 +13,6 @@ import damask scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) -def merge_dicts(*dict_args): - """Given any number of dicts, shallow copy and merge into a new dict, with precedence going to key value pairs in latter dicts.""" - result = {} - for dictionary in dict_args: - result.update(dictionary) - return result - -def gradFFT(geomdim,field): - """Calculate gradient of a vector or scalar field by transforming into Fourier space.""" - shapeFFT = np.array(np.shape(field))[0:3] - grid = np.array(np.shape(field)[2::-1]) - N = grid.prod() # field size - n = np.array(np.shape(field)[3:]).prod() # data size - - field_fourier = np.fft.rfftn(field,axes=(0,1,2),s=shapeFFT) - grad_fourier = np.empty(field_fourier.shape+(3,),'c16') - - # differentiation in Fourier space - TWOPIIMG = 2.0j*np.pi - einsums = { - 1:'ijkl,ijkm->ijkm', # scalar, 1 -> 3 - 3:'ijkl,ijkm->ijklm', # vector, 3 -> 3x3 - } - - k_sk = np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2]))/geomdim[0] - if grid[2]%2 == 0: k_sk[grid[2]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) - - k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/geomdim[1] - if grid[1]%2 == 0: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) - - k_si = np.arange(grid[0]//2+1)/geomdim[2] - - kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij') - k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3).astype('c16') - grad_fourier = np.einsum(einsums[n],field_fourier,k_s)*TWOPIIMG - - return np.fft.irfftn(grad_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,3*n]) - # -------------------------------------------------------------------- # MAIN @@ -57,9 +20,7 @@ def gradFFT(geomdim,field): parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ Add column(s) containing gradient of requested column(s). -Operates on periodic ordered three-dimensional data sets -of vector and scalar fields. - +Operates on periodic ordered three-dimensional data sets of scalar and vector fields. """, version = scriptID) parser.add_option('-p','--pos','--periodiccellcenter', @@ -67,7 +28,7 @@ parser.add_option('-p','--pos','--periodiccellcenter', type = 'string', metavar = 'string', help = 'label of coordinates [%default]') parser.add_option('-l','--label', - dest = 'data', + dest = 'labels', action = 'extend', metavar = '', help = 'label(s) of field values') @@ -75,85 +36,22 @@ parser.set_defaults(pos = 'pos', ) (options,filenames) = parser.parse_args() - -if options.data is None: parser.error('no data column specified.') - -# --- define possible data types ------------------------------------------------------------------- - -datatypes = { - 1: {'name': 'scalar', - 'shape': [1], - }, - 3: {'name': 'vector', - 'shape': [3], - }, - } - -# --- loop over input files ------------------------------------------------------------------------ - if filenames == []: filenames = [None] +if options.labels is None: parser.error('no data column specified.') + for name in filenames: - try: table = damask.ASCIItable(name = name,buffered = False) - except: continue - damask.util.report(scriptName,name) + damask.util.report(scriptName,name) -# --- interpret header ---------------------------------------------------------------------------- + table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) + grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos)) - table.head_read() - - remarks = [] - errors = [] - active = [] - - coordDim = table.label_dimension(options.pos) - if coordDim != 3: - errors.append('coordinates "{}" must be three-dimensional.'.format(options.pos)) - else: coordCol = table.label_index(options.pos) - - for me in options.data: - dim = table.label_dimension(me) - if dim in datatypes: - active.append(merge_dicts({'label':me},datatypes[dim])) - remarks.append('differentiating {} "{}"...'.format(datatypes[dim]['name'],me)) - else: - remarks.append('skipping "{}" of dimension {}...'.format(me,dim) if dim != -1 else \ - '"{}" not found...'.format(me) ) - - if remarks != []: damask.util.croak(remarks) - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# ------------------------------------------ assemble header -------------------------------------- - - table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) - for data in active: - table.labels_append(['{}_gradFFT({})'.format(i+1,data['label']) - for i in range(coordDim*np.prod(np.array(data['shape'])))]) # extend ASCII header with new labels - table.head_write() - -# --------------- figure out size and grid --------------------------------------------------------- - - table.data_readArray() - - grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)]) - -# ------------------------------------------ process value field ----------------------------------- - - stack = [table.data] - for data in active: - # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation - stack.append(gradFFT(size[::-1], - table.data[:,table.label_indexrange(data['label'])]. - reshape(grid[::-1].tolist()+data['shape']))) - -# ------------------------------------------ output result ----------------------------------------- - - if len(stack) > 1: table.data = np.hstack(tuple(stack)) - table.data_writeArray('%.12g') - -# ------------------------------------------ output finalization ----------------------------------- - - table.close() # close input ASCII table (works for stdin) + for label in options.labels: + field = table.get(label) + shape = (1,) if np.prod(field.shape)//np.prod(grid) == 1 else (3,) # scalar or vector + field = field.reshape(np.append(grid[::-1],shape)) + table.add('gradFFT({})'.format(label), + damask.grid_filters.gradient(size[::-1],field).reshape((-1,np.prod(shape)*3)), + scriptID+' '+' '.join(sys.argv[1:])) + + table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index 31ce6aeb3..2c46ee5ee 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -125,9 +125,10 @@ R = damask.Rotation.fromAxisAngle(np.array(options.labrotation),options.degrees, if filenames == []: filenames = [None] for name in filenames: - try: table = damask.ASCIItable(name = name, - buffered = False) - except Exception: continue + try: + table = damask.ASCIItable(name = name, buffered = False) + except IOError: + continue damask.util.report(scriptName,name) # ------------------------------------------ read header ------------------------------------------ diff --git a/processing/pre/geom_fromTable.py b/processing/pre/geom_fromTable.py index f513c4834..40fd17437 100755 --- a/processing/pre/geom_fromTable.py +++ b/processing/pre/geom_fromTable.py @@ -78,36 +78,15 @@ for name in filenames: table = damask.ASCIItable(name = name,readonly=True) table.head_read() # read ASCII header info -# ------------------------------------------ sanity checks --------------------------------------- - - coordDim = table.label_dimension(options.pos) - - errors = [] - if not 3 >= coordDim >= 2: - errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.pos)) - if not np.all(table.label_dimension(label) == dim): - errors.append('input "{}" needs to have dimension {}.'.format(label,dim)) - if options.phase and table.label_dimension(options.phase) != 1: - errors.append('phase column "{}" is not scalar.'.format(options.phase)) - - if errors != []: - damask.util.croak(errors) - continue table.data_readArray([options.pos] \ + (label if isinstance(label, list) else [label]) \ + ([options.phase] if options.phase else [])) - if coordDim == 2: - table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input if options.phase is None: table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given - grid,size = damask.util.coordGridAndSize(table.data[:,0:3]) - coords = [np.unique(table.data[:,i]) for i in range(3)] - mincorner = np.array(list(map(min,coords))) - origin = mincorner - 0.5*size/grid # shift from cell center to corner - + grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.data[:,0:3]) indices = np.lexsort((table.data[:,0],table.data[:,1],table.data[:,2])) # indices of position when sorting x fast, z slow microstructure = np.empty(grid,dtype = int) # initialize empty microstructure @@ -142,7 +121,6 @@ for name in filenames: config_header += [''] for i,data in enumerate(unique): config_header += ['[Grain{}]'.format(i+1), - 'crystallite 1', '(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(int(data[4]),i+1), ] diff --git a/processing/pre/geom_toTable.py b/processing/pre/geom_toTable.py index ed33d9c85..3d955ad53 100755 --- a/processing/pre/geom_toTable.py +++ b/processing/pre/geom_toTable.py @@ -2,10 +2,8 @@ import os import sys -from optparse import OptionParser from io import StringIO - -import numpy as np +from optparse import OptionParser import damask @@ -24,38 +22,25 @@ Translate geom description into ASCIItable containing position and microstructur """, version = scriptID) (options, filenames) = parser.parse_args() - - if filenames == []: filenames = [None] for name in filenames: - damask.util.report(scriptName,name) + damask.util.report(scriptName,name) - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + damask.util.croak(geom) - damask.util.croak(geom) + coord0 = damask.grid_filters.cell_coord0(geom.grid,geom.size,geom.origin).reshape((-1,3),order='F') -# --- generate grid -------------------------------------------------------------------------------- + comments = geom.comments \ + + [scriptID + ' ' + ' '.join(sys.argv[1:]), + "grid\ta {}\tb {}\tc {}".format(*geom.grid), + "size\tx {}\ty {}\tz {}".format(*geom.size), + "origin\tx {}\ty {}\tz {}".format(*geom.origin), + "homogenization\t{}".format(geom.homogenization)] - grid = geom.get_grid() - size = geom.get_size() - origin = geom.get_origin() + table = damask.Table(coord0,{'pos':(3,)},comments) + table.add('microstructure',geom.microstructure.reshape((-1,1))) - x = (0.5 + np.arange(grid[0],dtype=float))/grid[0]*size[0]+origin[0] - y = (0.5 + np.arange(grid[1],dtype=float))/grid[1]*size[1]+origin[1] - z = (0.5 + np.arange(grid[2],dtype=float))/grid[2]*size[2]+origin[2] - - xx = np.tile( x, grid[1]* grid[2]) - yy = np.tile(np.repeat(y,grid[0] ),grid[2]) - zz = np.repeat(z,grid[0]*grid[1]) - -# --- create ASCII table -------------------------------------------------------------------------- - - table = damask.ASCIItable(outname = os.path.splitext(name)[0]+'.txt' if name else name) - table.info_append(geom.get_comments() + [scriptID + '\t' + ' '.join(sys.argv[1:])]) - table.labels_append(['{}_{}'.format(1+i,'pos') for i in range(3)]+['microstructure']) - table.head_write() - table.output_flush() - table.data = np.squeeze(np.dstack((xx,yy,zz,geom.microstructure.flatten('F'))),axis=0) - table.data_writeArray() - table.close() + table.to_ASCII(sys.stdout if name is None else \ + os.path.splitext(name)[0]+'.txt') diff --git a/processing/pre/mentat_spectralBox.py b/processing/pre/mentat_spectralBox.py index 89f4a7a43..a61bef57a 100755 --- a/processing/pre/mentat_spectralBox.py +++ b/processing/pre/mentat_spectralBox.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 -# -*- coding: UTF-8 no BOM -*- -import os,sys -import numpy as np +import os +import sys +from io import StringIO from optparse import OptionParser + import damask scriptName = os.path.splitext(os.path.basename(__file__))[0] @@ -191,78 +192,45 @@ parser.add_option('-p', '--port', dest = 'port', type = 'int', metavar = 'int', help = 'Mentat connection port [%default]') -parser.add_option('--homogenization', - dest = 'homogenization', - type = 'int', metavar = 'int', - help = 'homogenization index to be used [auto]') -parser.set_defaults(port = None, - homogenization = None, -) +parser.set_defaults(port = None, + ) (options, filenames) = parser.parse_args() -if options.port: - try: - import py_mentat - except: - parser.error('no valid Mentat release found.') +if options.port is not None: + try: + import py_mentat + except ImportError: + parser.error('no valid Mentat release found.') # --- loop over input files ------------------------------------------------------------------------ if filenames == []: filenames = [None] for name in filenames: - try: - table = damask.ASCIItable(name = name, - outname = os.path.splitext(name)[0]+'.proc' if name else name, - buffered = False, labeled = False) - except: continue - damask.util.report(scriptName,name) - -# --- interpret header ---------------------------------------------------------------------------- - - table.head_read() - info,extra_header = table.head_getGeom() - if options.homogenization: info['homogenization'] = options.homogenization + damask.util.report(scriptName,name) - damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))), - 'size x y z: %s'%(' x '.join(map(str,info['size']))), - 'origin x y z: %s'%(' : '.join(map(str,info['origin']))), - 'homogenization: %i'%info['homogenization'], - 'microstructures: %i'%info['microstructures'], - ]) + geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + microstructure = geom.get_microstructure().flatten(order='F') - errors = [] - if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') - if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# --- read data ------------------------------------------------------------------------------------ - - microstructure = table.microstructure_read(info['grid']).reshape(info['grid'].prod(),order='F') # read microstructure - - cmds = [\ - init(), - mesh(info['grid'],info['size']), - material(), - geometry(), - initial_conditions(info['homogenization'],microstructure), - '*identify_sets', - '*show_model', - '*redraw', - '*draw_automatic', - ] - - outputLocals = {} - if options.port: - py_mentat.py_connect('',options.port) - output(cmds,outputLocals,'Mentat') - py_mentat.py_disconnect() - else: - output(cmds,outputLocals,table.__IO__['out']) # bad hack into internals of table class... - - table.close() + cmds = [\ + init(), + mesh(geom.grid,geom.size), + material(), + geometry(), + initial_conditions(geom.homogenization,microstructure), + '*identify_sets', + '*show_model', + '*redraw', + '*draw_automatic', + ] + + outputLocals = {} + if options.port: + py_mentat.py_connect('',options.port) + output(cmds,outputLocals,'Mentat') + py_mentat.py_disconnect() + else: + with sys.stdout if name is None else open(os.path.splitext(name)[0]+'.proc','w') as f: + output(cmds,outputLocals,f) diff --git a/processing/pre/seeds_fromGeom.py b/processing/pre/seeds_fromGeom.py index 889ef6146..2118f049d 100755 --- a/processing/pre/seeds_fromGeom.py +++ b/processing/pre/seeds_fromGeom.py @@ -1,9 +1,12 @@ #!/usr/bin/env python3 -# -*- coding: UTF-8 no BOM -*- -import os,sys -import numpy as np +import os +import sys +from io import StringIO from optparse import OptionParser + +import numpy as np + import damask scriptName = os.path.splitext(os.path.basename(__file__))[0] @@ -29,88 +32,39 @@ parser.add_option('-b', action = 'extend', metavar = '', dest = 'blacklist', help = 'blacklist of grain IDs') -parser.add_option('-p', - '--pos', '--seedposition', - dest = 'pos', - type = 'string', metavar = 'string', - help = 'label of coordinates [%default]') parser.set_defaults(whitelist = [], blacklist = [], - pos = 'pos', ) (options,filenames) = parser.parse_args() - -options.whitelist = list(map(int,options.whitelist)) -options.blacklist = list(map(int,options.blacklist)) - -# --- loop over output files ------------------------------------------------------------------------- - if filenames == []: filenames = [None] +options.whitelist = [int(i) for i in options.whitelist] +options.blacklist = [int(i) for i in options.blacklist] + for name in filenames: - try: table = damask.ASCIItable(name = name, - outname = os.path.splitext(name)[0]+'.seeds' if name else name, - buffered = False, - labeled = False) - except: continue - damask.util.report(scriptName,name) + damask.util.report(scriptName,name) + + geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) + microstructure = geom.get_microstructure().reshape((-1,1),order='F') -# --- interpret header ---------------------------------------------------------------------------- + mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist else \ + np.full(geom.grid.prod(),True,dtype=bool), + np.in1d(microstructure,options.blacklist,invert=True) if options.blacklist else \ + np.full(geom.grid.prod(),True,dtype=bool)) + + seeds = np.concatenate((damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape((-1,3)), + microstructure), + axis=1)[mask] + + comments = geom.comments \ + + [scriptID + ' ' + ' '.join(sys.argv[1:]), + "grid\ta {}\tb {}\tc {}".format(*geom.grid), + "size\tx {}\ty {}\tz {}".format(*geom.size), + "origin\tx {}\ty {}\tz {}".format(*geom.origin), + "homogenization\t{}".format(geom.homogenization)] - table.head_read() - info,extra_header = table.head_getGeom() - damask.util.report_geom(info) - - errors = [] - if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') - if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# --- read data ------------------------------------------------------------------------------------ - - microstructure = table.microstructure_read(info['grid']) # read (linear) microstructure - -# --- generate grid -------------------------------------------------------------------------------- - - x = (0.5 + np.arange(info['grid'][0],dtype=float))/info['grid'][0]*info['size'][0]+info['origin'][0] - y = (0.5 + np.arange(info['grid'][1],dtype=float))/info['grid'][1]*info['size'][1]+info['origin'][1] - z = (0.5 + np.arange(info['grid'][2],dtype=float))/info['grid'][2]*info['size'][2]+info['origin'][2] - - xx = np.tile( x, info['grid'][1]* info['grid'][2]) - yy = np.tile(np.repeat(y,info['grid'][0] ),info['grid'][2]) - zz = np.repeat(z,info['grid'][0]*info['grid'][1]) - - mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist != [] - else np.full_like(microstructure,True,dtype=bool), - np.in1d(microstructure,options.blacklist,invert=True ) if options.blacklist != [] - else np.full_like(microstructure,True,dtype=bool)) - -# ------------------------------------------ assemble header --------------------------------------- - - table.info_clear() - table.info_append(extra_header+[ - scriptID + ' ' + ' '.join(sys.argv[1:]), - "grid\ta {}\tb {}\tc {}".format(*info['grid']), - "size\tx {}\ty {}\tz {}".format(*info['size']), - "origin\tx {}\ty {}\tz {}".format(*info['origin']), - "homogenization\t{}".format(info['homogenization']), - "microstructures\t{}".format(info['microstructures']), - ]) - table.labels_clear() - table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.pos) for i in range(3)]+['microstructure']) - table.head_write() - table.output_flush() - -# --- write seeds information ------------------------------------------------------------ - - table.data = np.squeeze(np.dstack((xx,yy,zz,microstructure)))[mask] - table.data_writeArray() - -# ------------------------------------------ finalize output --------------------------------------- - - table.close() + table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments) + table.to_ASCII(sys.stdout if name is None else \ + os.path.splitext(name)[0]+'.seeds') diff --git a/processing/pre/seeds_fromPokes.py b/processing/pre/seeds_fromPokes.py index 08e600ffe..1436841d0 100755 --- a/processing/pre/seeds_fromPokes.py +++ b/processing/pre/seeds_fromPokes.py @@ -1,11 +1,14 @@ #!/usr/bin/env python3 -# -*- coding: UTF-8 no BOM -*- -import os,math,sys -import numpy as np -import damask +import os +import sys +from io import StringIO from optparse import OptionParser +import numpy as np + +import damask + scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptID = ' '.join([scriptName,damask.version]) @@ -35,117 +38,58 @@ parser.add_option('-y', action = 'store_true', dest = 'y', help = 'poke 45 deg along y') -parser.add_option('-p','--position', - dest = 'position', - type = 'string', metavar = 'string', - help = 'column label for coordinates [%default]') parser.set_defaults(x = False, y = False, box = [0.0,1.0,0.0,1.0,0.0,1.0], N = 16, - position = 'pos', ) (options,filenames) = parser.parse_args() +if filenames == []: filenames = [None] options.box = np.array(options.box).reshape(3,2) -# --- loop over output files ------------------------------------------------------------------------- - -if filenames == []: filenames = [None] - for name in filenames: - try: - table = damask.ASCIItable(name = name, - outname = os.path.splitext(name)[-2]+'_poked_{}.seeds'.format(options.N) if name else name, - buffered = False, labeled = False) - except: continue - damask.util.report(scriptName,name) - -# --- interpret header ---------------------------------------------------------------------------- - - table.head_read() - info,extra_header = table.head_getGeom() - - damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))), - 'size x y z: %s'%(' x '.join(map(str,info['size']))), - 'origin x y z: %s'%(' : '.join(map(str,info['origin']))), - 'homogenization: %i'%info['homogenization'], - 'microstructures: %i'%info['microstructures'], - ]) - - errors = [] - if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') - if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# --- read data ------------------------------------------------------------------------------------ - - microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure - -# --- do work ------------------------------------------------------------------------------------ - - newInfo = { - 'microstructures': 0, - } - offset = (np.amin(options.box, axis=1)*info['grid']/info['size']).astype(int) - box = np.amax(options.box, axis=1) - np.amin(options.box, axis=1) - - Nx = int(options.N/math.sqrt(options.N*info['size'][1]*box[1]/info['size'][0]/box[0])) - Ny = int(options.N/math.sqrt(options.N*info['size'][0]*box[0]/info['size'][1]/box[1])) - Nz = int(box[2]*info['grid'][2]) - - damask.util.croak('poking {} x {} x {} in box {} {} {}...'.format(Nx,Ny,Nz,*box)) - - seeds = np.zeros((Nx*Ny*Nz,4),'d') - grid = np.zeros(3,'i') - - n = 0 - for i in range(Nx): - for j in range(Ny): - grid[0] = round((i+0.5)*box[0]*info['grid'][0]/Nx-0.5)+offset[0] - grid[1] = round((j+0.5)*box[1]*info['grid'][1]/Ny-0.5)+offset[1] - for k in range(Nz): - grid[2] = k + offset[2] - grid %= info['grid'] - seeds[n,0:3] = (0.5+grid)/info['grid'] # normalize coordinates to box - seeds[n, 3] = microstructure[grid[0],grid[1],grid[2]] - if options.x: grid[0] += 1 - if options.y: grid[1] += 1 - n += 1 - - newInfo['microstructures'] = len(np.unique(seeds[:,3])) - -# --- report --------------------------------------------------------------------------------------- - if (newInfo['microstructures'] != info['microstructures']): - damask.util.croak('--> microstructures: %i'%newInfo['microstructures']) - - -# ------------------------------------------ assemble header --------------------------------------- - table.info_clear() - table.info_append(extra_header+[ - scriptID + ' ' + ' '.join(sys.argv[1:]), - "poking\ta {}\tb {}\tc {}".format(Nx,Ny,Nz), - "grid\ta {}\tb {}\tc {}".format(*info['grid']), - "size\tx {}\ty {}\tz {}".format(*info['size']), - "origin\tx {}\ty {}\tz {}".format(*info['origin']), - "homogenization\t{}".format(info['homogenization']), - "microstructures\t{}".format(newInfo['microstructures']), - ]) - table.labels_clear() - table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in range(3)]+['microstructure']) - table.head_write() - table.output_flush() - -# --- write seeds information ------------------------------------------------------------ - - table.data = seeds - table.data_writeArray() + damask.util.report(scriptName,name) + geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) -# --- output finalization -------------------------------------------------------------------------- - - table.close() # close ASCII table + offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int) + box = np.amax(options.box, axis=1) \ + - np.amin(options.box, axis=1) + + Nx = int(options.N/np.sqrt(options.N*geom.size[1]*box[1]/geom.size[0]/box[0])) + Ny = int(options.N/np.sqrt(options.N*geom.size[0]*box[0]/geom.size[1]/box[1])) + Nz = int(box[2]*geom.grid[2]) + + damask.util.croak('poking {} x {} x {} in box {} {} {}...'.format(Nx,Ny,Nz,*box)) + + seeds = np.zeros((Nx*Ny*Nz,4),'d') + g = np.zeros(3,'i') + + n = 0 + for i in range(Nx): + for j in range(Ny): + g[0] = round((i+0.5)*box[0]*geom.grid[0]/Nx-0.5)+offset[0] + g[1] = round((j+0.5)*box[1]*geom.grid[1]/Ny-0.5)+offset[1] + for k in range(Nz): + g[2] = k + offset[2] + g %= geom.grid + seeds[n,0:3] = (g+0.5)/geom.grid # normalize coordinates to box + seeds[n, 3] = geom.microstructure[g[0],g[1],g[2]] + if options.x: g[0] += 1 + if options.y: g[1] += 1 + n += 1 + + + comments = geom.comments \ + + [scriptID + ' ' + ' '.join(sys.argv[1:]), + "poking\ta {}\tb {}\tc {}".format(Nx,Ny,Nz), + "grid\ta {}\tb {}\tc {}".format(*geom.grid), + "size\tx {}\ty {}\tz {}".format(*geom.size), + "origin\tx {}\ty {}\tz {}".format(*geom.origin), + "homogenization\t{}".format(geom.homogenization)] + + table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments) + table.to_ASCII(sys.stdout if name is None else \ + os.path.splitext(name)[0]+'_poked_{}.seeds'.format(options.N)) diff --git a/python/damask/__init__.py b/python/damask/__init__.py index 2cd26cf1c..77666561c 100644 --- a/python/damask/__init__.py +++ b/python/damask/__init__.py @@ -23,4 +23,5 @@ from .util import extendableOption # noqa # functions in modules from . import mechanics # noqa +from . import grid_filters # noqa diff --git a/python/damask/geom.py b/python/damask/geom.py index 32ea2ed89..63ce20115 100644 --- a/python/damask/geom.py +++ b/python/damask/geom.py @@ -205,6 +205,9 @@ class Geom(): else: self.homogenization = homogenization + @property + def grid(self): + return self.get_grid() def get_microstructure(self): """Return the microstructure representation.""" diff --git a/python/damask/grid_filters.py b/python/damask/grid_filters.py new file mode 100644 index 000000000..a1e1ff06d --- /dev/null +++ b/python/damask/grid_filters.py @@ -0,0 +1,299 @@ +from scipy import spatial +import numpy as np + +def __ks(size,grid,first_order=False): + """ + Get wave numbers operator. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + + """ + k_sk = np.where(np.arange(grid[0])>grid[0]//2,np.arange(grid[0])-grid[0],np.arange(grid[0]))/size[0] + if grid[0]%2 == 0 and first_order: k_sk[grid[0]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) + + k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/size[1] + if grid[1]%2 == 0 and first_order: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) + + k_si = np.arange(grid[2]//2+1)/size[2] + + kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij') + return np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3) + + +def curl(size,field): + """ + Calculate curl of a vector or tensor field in Fourier space. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + + """ + n = np.prod(field.shape[3:]) + k_s = __ks(size,field.shape[:3],True) + + e = np.zeros((3, 3, 3)) + e[0, 1, 2] = e[1, 2, 0] = e[2, 0, 1] = +1.0 # Levi-Civita symbol + e[0, 2, 1] = e[2, 1, 0] = e[1, 0, 2] = -1.0 + + field_fourier = np.fft.rfftn(field,axes=(0,1,2)) + curl = (np.einsum('slm,ijkl,ijkm ->ijks', e,k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 3 + np.einsum('slm,ijkl,ijknm->ijksn',e,k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3x3 + + return np.fft.irfftn(curl,axes=(0,1,2),s=field.shape[:3]) + + +def divergence(size,field): + """ + Calculate divergence of a vector or tensor field in Fourier space. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + + """ + n = np.prod(field.shape[3:]) + k_s = __ks(size,field.shape[:3],True) + + field_fourier = np.fft.rfftn(field,axes=(0,1,2)) + divergence = (np.einsum('ijkl,ijkl ->ijk', k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 1 + np.einsum('ijkm,ijklm->ijkl',k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3 + + return np.fft.irfftn(divergence,axes=(0,1,2),s=field.shape[:3]) + + +def gradient(size,field): + """ + Calculate gradient of a vector or scalar field in Fourier space. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + + """ + n = np.prod(field.shape[3:]) + k_s = __ks(size,field.shape[:3],True) + + field_fourier = np.fft.rfftn(field,axes=(0,1,2)) + gradient = (np.einsum('ijkl,ijkm->ijkm', field_fourier,k_s)*2.0j*np.pi if n == 1 else # scalar, 1 -> 3 + np.einsum('ijkl,ijkm->ijklm',field_fourier,k_s)*2.0j*np.pi) # vector, 3 -> 3x3 + + return np.fft.irfftn(gradient,axes=(0,1,2),s=field.shape[:3]) + + +def cell_coord0(grid,size,origin=np.zeros(3)): + """ + Cell center positions (undeformed). + + Parameters + ---------- + grid : numpy.ndarray + number of grid points. + size : numpy.ndarray + physical size of the periodic field. + + """ + start = origin + size/grid*.5 + end = origin - size/grid*.5 + size + x, y, z = np.meshgrid(np.linspace(start[2],end[2],grid[2]), + np.linspace(start[1],end[1],grid[1]), + np.linspace(start[0],end[0],grid[0]), + indexing = 'ij') + + return np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3) + +def cell_displacement_fluct(size,F): + """ + Cell center displacement field from fluctuation part of the deformation gradient field. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + F : numpy.ndarray + deformation gradient field. + + """ + integrator = 0.5j*size/np.pi + + k_s = __ks(size,F.shape[:3],False) + k_s_squared = np.einsum('...l,...l',k_s,k_s) + k_s_squared[0,0,0] = 1.0 + + displacement = -np.einsum('ijkml,ijkl,l->ijkm', + np.fft.rfftn(F,axes=(0,1,2)), + k_s, + integrator, + ) / k_s_squared[...,np.newaxis] + + return np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3]) + +def cell_displacement_avg(size,F): + """ + Cell center displacement field from average part of the deformation gradient field. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + F : numpy.ndarray + deformation gradient field. + + """ + F_avg = np.average(F,axis=(0,1,2)) + return np.einsum('ml,ijkl->ijkm',F_avg-np.eye(3),cell_coord0(F.shape[:3],size)) + +def cell_coord0_2_DNA(coord0,ordered=True): + """ + Return grid 'DNA', i.e. grid, size, and origin from array of cell positions. + + Parameters + ---------- + coord0 : numpy.ndarray + array of undeformed cell coordinates. + ordered : bool, optional + expect coord0 data to be ordered (x fast, z slow). + + """ + coords = [np.unique(coord0[:,i]) for i in range(3)] + mincorner = np.array(list(map(min,coords))) + maxcorner = np.array(list(map(max,coords))) + grid = np.array(list(map(len,coords)),'i') + size = grid/np.maximum(grid-1,1) * (maxcorner-mincorner) + delta = size/grid + origin = mincorner - delta*.5 + + if grid.prod() != len(coord0): + raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid)) + + start = origin + delta*.5 + end = origin - delta*.5 + size + + if not np.allclose(coords[0],np.linspace(start[0],end[0],grid[0])) and \ + np.allclose(coords[1],np.linspace(start[1],end[1],grid[1])) and \ + np.allclose(coords[2],np.linspace(start[2],end[2],grid[2])): + raise ValueError('Regular grid spacing violated.') + + if ordered and not np.allclose(coord0.reshape(tuple(grid[::-1])+(3,)),cell_coord0(grid,size,origin)): + raise ValueError('Input data is not a regular grid.') + + return (grid,size,origin) + + +def node_coord0(grid,size,origin=np.zeros(3)): + """ + Nodal positions (undeformed). + + Parameters + ---------- + grid : numpy.ndarray + number of grid points. + size : numpy.ndarray + physical size of the periodic field. + + """ + x, y, z = np.meshgrid(np.linspace(origin[2],size[2]+origin[2],1+grid[2]), + np.linspace(origin[1],size[1]+origin[1],1+grid[1]), + np.linspace(origin[0],size[0]+origin[0],1+grid[0]), + indexing = 'ij') + + return np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3) + +def node_displacement_fluct(size,F): + """ + Nodal displacement field from fluctuation part of the deformation gradient field. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + F : numpy.ndarray + deformation gradient field. + + """ + return cell_2_node(cell_displacement_fluct(size,F)) + +def node_displacement_avg(size,F): + """ + Nodal displacement field from average part of the deformation gradient field. + + Parameters + ---------- + size : numpy.ndarray + physical size of the periodic field. + F : numpy.ndarray + deformation gradient field. + + """ + F_avg = np.average(F,axis=(0,1,2)) + return np.einsum('ml,ijkl->ijkm',F_avg-np.eye(3),node_coord0(F.shape[:3],size)) + + +def cell_2_node(cell_data): + """Interpolate cell data to nodal data.""" + n = ( cell_data + np.roll(cell_data,1,(0,1,2)) + + np.roll(cell_data,1,(0,)) + np.roll(cell_data,1,(1,)) + np.roll(cell_data,1,(2,)) + + np.roll(cell_data,1,(0,1)) + np.roll(cell_data,1,(1,2)) + np.roll(cell_data,1,(2,0)))*0.125 + + return np.pad(n,((0,1),(0,1),(0,1))+((0,0),)*len(cell_data.shape[3:]),mode='wrap') + +def node_2_cell(node_data): + """Interpolate nodal data to cell data.""" + c = ( node_data + np.roll(node_data,1,(0,1,2)) + + np.roll(node_data,1,(0,)) + np.roll(node_data,1,(1,)) + np.roll(node_data,1,(2,)) + + np.roll(node_data,1,(0,1)) + np.roll(node_data,1,(1,2)) + np.roll(node_data,1,(2,0)))*0.125 + + return c[:-1,:-1,:-1] + +def node_coord0_2_DNA(coord0,ordered=False): + """ + Return grid 'DNA', i.e. grid, size, and origin from array of nodal positions. + + Parameters + ---------- + coord0 : numpy.ndarray + array of undeformed nodal coordinates + ordered : bool, optional + expect coord0 data to be ordered (x fast, z slow). + + """ + coords = [np.unique(coord0[:,i]) for i in range(3)] + mincorner = np.array(list(map(min,coords))) + maxcorner = np.array(list(map(max,coords))) + grid = np.array(list(map(len,coords)),'i') - 1 + size = maxcorner-mincorner + origin = mincorner + + if (grid+1).prod() != len(coord0): + raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid)) + + if not np.allclose(coords[0],np.linspace(mincorner[0],maxcorner[0],grid[0]+1)) and \ + np.allclose(coords[1],np.linspace(mincorner[1],maxcorner[1],grid[1]+1)) and \ + np.allclose(coords[2],np.linspace(mincorner[2],maxcorner[2],grid[2]+1)): + raise ValueError('Regular grid spacing violated.') + + if ordered and not np.allclose(coord0.reshape(tuple((grid+1)[::-1])+(3,)),node_coord0(grid,size,origin)): + raise ValueError('Input data is not a regular grid.') + + return (grid,size,origin) + + +def regrid(size,F,new_grid): + """tbd.""" + c = cell_coord0(F.shape[:3][::-1],size) \ + + cell_displacement_avg(size,F) \ + + cell_displacement_fluct(size,F) + + outer = np.dot(np.average(F,axis=(0,1,2)),size) + for d in range(3): + c[np.where(c[:,:,:,d]<0)] += outer[d] + c[np.where(c[:,:,:,d]>outer[d])] -= outer[d] + + tree = spatial.cKDTree(c.reshape((-1,3)),boxsize=outer) + return tree.query(cell_coord0(new_grid,outer))[1] diff --git a/python/damask/table.py b/python/damask/table.py index 56af8b622..5aa74106c 100644 --- a/python/damask/table.py +++ b/python/damask/table.py @@ -97,7 +97,6 @@ class Table(): @property def labels(self): - """Return the labels of all columns.""" return list(self.shapes.keys()) diff --git a/python/tests/test_grid_filters.py b/python/tests/test_grid_filters.py new file mode 100644 index 000000000..b23fad549 --- /dev/null +++ b/python/tests/test_grid_filters.py @@ -0,0 +1,54 @@ +import pytest +import numpy as np + +from damask import grid_filters + +class TestGridFilters: + + def test_cell_coord0(self): + size = np.random.random(3) + grid = np.random.randint(8,32,(3)) + coord = grid_filters.cell_coord0(grid,size) + assert np.allclose(coord[0,0,0],size/grid*.5) and coord.shape == tuple(grid[::-1]) + (3,) + + def test_node_coord0(self): + size = np.random.random(3) + grid = np.random.randint(8,32,(3)) + coord = grid_filters.node_coord0(grid,size) + assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(grid[::-1]+1) + (3,) + + def test_coord0(self): + size = np.random.random(3) + grid = np.random.randint(8,32,(3)) + c = grid_filters.cell_coord0(grid+1,size+size/grid) + n = grid_filters.node_coord0(grid,size) + size/grid*.5 + assert np.allclose(c,n) + + @pytest.mark.parametrize('mode',[('cell'),('node')]) + def test_grid_DNA(self,mode): + """Ensure that xx_coord0_2_DNA is the inverse of xx_coord0.""" + grid = np.random.randint(8,32,(3)) + size = np.random.random(3) + origin = np.random.random(3) + + coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa + _grid,_size,_origin = eval('grid_filters.{}_coord0_2_DNA(coord0.reshape((-1,3)))'.format(mode)) + assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin) + + + @pytest.mark.parametrize('mode',[('cell'),('node')]) + def test_displacement_avg_vanishes(self,mode): + """Ensure that random fluctuations in F do not result in average displacement.""" + size = np.random.random(3) # noqa + grid = np.random.randint(8,32,(3)) + F = np.random.random(tuple(grid)+(3,3)) + F += np.eye(3) - np.average(F,axis=(0,1,2)) + assert np.allclose(eval('grid_filters.{}_displacement_avg(size,F)'.format(mode)),0.0) + + @pytest.mark.parametrize('mode',[('cell'),('node')]) + def test_displacement_fluct_vanishes(self,mode): + """Ensure that constant F does not result in fluctuating displacement.""" + size = np.random.random(3) # noqa + grid = np.random.randint(8,32,(3)) + F = np.broadcast_to(np.random.random((3,3)), tuple(grid)+(3,3)) # noqa + assert np.allclose(eval('grid_filters.{}_displacement_fluct(size,F)'.format(mode)),0.0)