2012-02-07 18:39:10 +05:30
|
|
|
#!/usr/bin/env python
|
2014-04-02 00:11:14 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
2011-08-25 22:14:36 +05:30
|
|
|
|
2016-03-01 22:55:14 +05:30
|
|
|
import os,sys
|
2014-07-10 14:57:51 +05:30
|
|
|
import numpy as np
|
|
|
|
from optparse import OptionParser
|
|
|
|
import damask
|
2011-08-25 22:14:36 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2011-08-25 22:14:36 +05:30
|
|
|
|
2016-03-24 17:05:33 +05:30
|
|
|
|
2016-03-24 18:49:00 +05:30
|
|
|
def volTetrahedron(coords):
|
2016-03-24 17:05:33 +05:30
|
|
|
"""
|
|
|
|
Return the volume of the tetrahedron with given vertices or sides. If
|
|
|
|
vertices are given they must be in a NumPy array with shape (4,3): the
|
|
|
|
position vectors of the 4 vertices in 3 dimensions; if the six sides are
|
|
|
|
given, they must be an array of length 6. If both are given, the sides
|
|
|
|
will be used in the calculation.
|
|
|
|
|
|
|
|
This method implements
|
|
|
|
Tartaglia's formula using the Cayley-Menger determinant:
|
|
|
|
|0 1 1 1 1 |
|
|
|
|
|1 0 s1^2 s2^2 s3^2|
|
|
|
|
288 V^2 = |1 s1^2 0 s4^2 s5^2|
|
|
|
|
|1 s2^2 s4^2 0 s6^2|
|
|
|
|
|1 s3^2 s5^2 s6^2 0 |
|
|
|
|
where s1, s2, ..., s6 are the tetrahedron side lengths.
|
|
|
|
|
|
|
|
from http://codereview.stackexchange.com/questions/77593/calculating-the-volume-of-a-tetrahedron
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
# The indexes of rows in the vertices array corresponding to all
|
|
|
|
# possible pairs of vertices
|
|
|
|
vertex_pair_indexes = np.array(((0, 1), (0, 2), (0, 3),
|
|
|
|
(1, 2), (1, 3), (2, 3)))
|
|
|
|
|
|
|
|
# Get all the squares of all side lengths from the differences between
|
|
|
|
# the 6 different pairs of vertex positions
|
2016-03-24 18:49:00 +05:30
|
|
|
vertices = np.concatenate((coords[0],coords[1],coords[2],coords[3])).reshape([4,3])
|
2016-03-24 17:05:33 +05:30
|
|
|
vertex1, vertex2 = vertex_pair_indexes[:,0], vertex_pair_indexes[:,1]
|
|
|
|
sides_squared = np.sum((vertices[vertex1] - vertices[vertex2])**2,axis=-1)
|
|
|
|
|
|
|
|
|
|
|
|
# Set up the Cayley-Menger determinant
|
|
|
|
M = np.zeros((5,5))
|
|
|
|
# Fill in the upper triangle of the matrix
|
|
|
|
M[0,1:] = 1
|
|
|
|
# The squared-side length elements can be indexed using the vertex
|
|
|
|
# pair indices (compare with the determinant illustrated above)
|
|
|
|
M[tuple(zip(*(vertex_pair_indexes + 1)))] = sides_squared
|
|
|
|
|
|
|
|
# The matrix is symmetric, so we can fill in the lower triangle by
|
|
|
|
# adding the transpose
|
|
|
|
M = M + M.T
|
2016-03-24 18:49:00 +05:30
|
|
|
return np.sqrt(np.linalg.det(M) / 288)
|
2016-03-24 17:05:33 +05:30
|
|
|
|
|
|
|
|
2016-03-24 18:49:00 +05:30
|
|
|
def volumeMismatch(size,F,nodes):
|
2016-03-24 17:05:33 +05:30
|
|
|
"""
|
|
|
|
calculates the mismatch between volume of reconstructed (compatible) cube and
|
|
|
|
determinant of defgrad at the FP
|
|
|
|
"""
|
2016-03-24 18:49:00 +05:30
|
|
|
|
|
|
|
coords = np.empty([8,3])
|
|
|
|
vMismatch = np.empty(grid)
|
|
|
|
volInitial = size.prod()/grid.prod()
|
2016-03-24 17:05:33 +05:30
|
|
|
|
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
# calculate actual volume and volume resulting from deformation gradient
|
2016-03-24 18:49:00 +05:30
|
|
|
for k in xrange(grid[2]):
|
|
|
|
for j in xrange(grid[1]):
|
|
|
|
for i in xrange(grid[0]):
|
|
|
|
coords[0,0:3] = nodes[0:3,i, j, k ]
|
|
|
|
coords[1,0:3] = nodes[0:3,i+1,j, k ]
|
|
|
|
coords[2,0:3] = nodes[0:3,i+1,j+1,k ]
|
|
|
|
coords[3,0:3] = nodes[0:3,i, j+1,k ]
|
|
|
|
coords[4,0:3] = nodes[0:3,i, j, k+1]
|
|
|
|
coords[5,0:3] = nodes[0:3,i+1,j, k+1]
|
|
|
|
coords[6,0:3] = nodes[0:3,i+1,j+1,k+1]
|
|
|
|
coords[7,0:3] = nodes[0:3,i, j+1,k+1]
|
|
|
|
vMismatch[i,j,k] = \
|
|
|
|
abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[7,0:3],coords[3,0:3]])) \
|
|
|
|
+ abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[7,0:3],coords[4,0:3]])) \
|
|
|
|
+ abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[2,0:3],coords[3,0:3]])) \
|
|
|
|
+ abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[2,0:3],coords[1,0:3]])) \
|
|
|
|
+ abs(volTetrahedron([coords[6,0:3],coords[4,0:3],coords[1,0:3],coords[5,0:3]])) \
|
|
|
|
+ abs(volTetrahedron([coords[6,0:3],coords[4,0:3],coords[1,0:3],coords[0,0:3]]))
|
|
|
|
vMismatch[i,j,k] = vMismatch[i,j,k]/np.linalg.det(F[0:3,0:3,i,j,k])
|
|
|
|
|
|
|
|
return vMismatch/volInitial
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def shapeMismatch(size,F,nodes,centres):
|
2016-03-24 17:05:33 +05:30
|
|
|
"""
|
|
|
|
Routine to calculate the mismatch between the vectors from the central point to
|
|
|
|
the corners of reconstructed (combatible) volume element and the vectors calculated by deforming
|
|
|
|
the initial volume element with the current deformation gradient
|
|
|
|
"""
|
|
|
|
|
2016-03-24 18:49:00 +05:30
|
|
|
coordsInitial = np.empty([8,3])
|
|
|
|
sMismatch = np.empty(grid)
|
2016-03-24 17:05:33 +05:30
|
|
|
|
2016-03-24 18:49:00 +05:30
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
# initial positions
|
|
|
|
coordsInitial[0,0:3] = [-size[0]/grid[0],-size[1]/grid[1],-size[2]/grid[2]]
|
|
|
|
coordsInitial[1,0:3] = [+size[0]/grid[0],-size[1]/grid[1],-size[2]/grid[2]]
|
|
|
|
coordsInitial[2,0:3] = [+size[0]/grid[0],+size[1]/grid[1],-size[2]/grid[2]]
|
|
|
|
coordsInitial[3,0:3] = [-size[0]/grid[0],+size[1]/grid[1],-size[2]/grid[2]]
|
|
|
|
coordsInitial[4,0:3] = [-size[0]/grid[0],-size[1]/grid[1],+size[2]/grid[2]]
|
|
|
|
coordsInitial[5,0:3] = [+size[0]/grid[0],-size[1]/grid[1],+size[2]/grid[2]]
|
|
|
|
coordsInitial[6,0:3] = [+size[0]/grid[0],+size[1]/grid[1],+size[2]/grid[2]]
|
|
|
|
coordsInitial[7,0:3] = [-size[0]/grid[0],+size[1]/grid[1],+size[2]/grid[2]]
|
|
|
|
coordsInitial = coordsInitial/2.0
|
2016-03-24 17:05:33 +05:30
|
|
|
|
2016-03-24 18:49:00 +05:30
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
# compare deformed original and deformed positions to actual positions
|
|
|
|
for k in xrange(grid[2]):
|
|
|
|
for j in xrange(grid[1]):
|
|
|
|
for i in xrange(grid[0]):
|
|
|
|
sMismatch[i,j,k] = \
|
|
|
|
np.linalg.norm(nodes[0:3,i, j, k] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[0,0:3]))\
|
|
|
|
+ np.linalg.norm(nodes[0:3,i+1,j, k] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[1,0:3]))\
|
|
|
|
+ np.linalg.norm(nodes[0:3,i+1,j+1,k ] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[2,0:3]))\
|
|
|
|
+ np.linalg.norm(nodes[0:3,i, j+1,k ] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[3,0:3]))\
|
|
|
|
+ np.linalg.norm(nodes[0:3,i, j, k+1] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[4,0:3]))\
|
|
|
|
+ np.linalg.norm(nodes[0:3,i+1,j, k+1] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[5,0:3]))\
|
|
|
|
+ np.linalg.norm(nodes[0:3,i+1,j+1,k+1] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[6,0:3]))\
|
|
|
|
+ np.linalg.norm(nodes[0:3,i, j+1,k+1] - centres[0:3,i,j,k] - np.dot(F[:,:,i,j,k], coordsInitial[7,0:3]))
|
|
|
|
return sMismatch
|
|
|
|
|
2016-03-24 17:05:33 +05:30
|
|
|
|
2011-08-25 22:14:36 +05:30
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
|
2014-07-10 14:57:51 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
|
2015-08-08 00:33:26 +05:30
|
|
|
Add column(s) containing the shape and volume mismatch resulting from given deformation gradient.
|
|
|
|
Operates on periodic three-dimensional x,y,z-ordered data sets.
|
2011-08-25 22:14:36 +05:30
|
|
|
|
2014-08-06 18:57:09 +05:30
|
|
|
""", version = scriptID)
|
|
|
|
|
2011-08-25 22:14:36 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
parser.add_option('-c','--coordinates',
|
|
|
|
dest = 'coords',
|
|
|
|
type = 'string', metavar = 'string',
|
|
|
|
help = 'column heading of coordinates [%default]')
|
|
|
|
parser.add_option('-f','--defgrad',
|
|
|
|
dest = 'defgrad',
|
|
|
|
type = 'string', metavar = 'string ',
|
|
|
|
help = 'column heading of deformation gradient [%default]')
|
|
|
|
parser.add_option('--no-shape','-s',
|
|
|
|
dest = 'shape',
|
|
|
|
action = 'store_false',
|
|
|
|
help = 'omit shape mismatch')
|
|
|
|
parser.add_option('--no-volume','-v',
|
|
|
|
dest = 'volume',
|
|
|
|
action = 'store_false',
|
|
|
|
help = 'omit volume mismatch')
|
|
|
|
parser.set_defaults(coords = 'ipinitialcoord',
|
|
|
|
defgrad = 'f',
|
|
|
|
shape = True,
|
|
|
|
volume = True,
|
|
|
|
)
|
2011-08-25 22:14:36 +05:30
|
|
|
|
|
|
|
(options,filenames) = parser.parse_args()
|
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# --- loop over input files -------------------------------------------------------------------------
|
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
if filenames == []: filenames = [None]
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2014-07-22 01:25:05 +05:30
|
|
|
for name in filenames:
|
2015-08-21 01:12:05 +05:30
|
|
|
try:
|
|
|
|
table = damask.ASCIItable(name = name,
|
|
|
|
buffered = False)
|
|
|
|
except: continue
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.report(scriptName,name)
|
2015-08-08 00:33:26 +05:30
|
|
|
|
|
|
|
# ------------------------------------------ read header ------------------------------------------
|
|
|
|
|
|
|
|
table.head_read()
|
|
|
|
|
|
|
|
# ------------------------------------------ sanity checks ----------------------------------------
|
|
|
|
|
|
|
|
errors = []
|
|
|
|
remarks = []
|
|
|
|
|
|
|
|
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
|
|
|
else: colCoord = table.label_index(options.coords)
|
2011-08-25 22:14:36 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
if table.label_dimension(options.defgrad) != 9: errors.append('deformation gradient {} is not a tensor.'.format(options.defgrad))
|
|
|
|
else: colF = table.label_index(options.defgrad)
|
|
|
|
|
2015-09-24 14:54:42 +05:30
|
|
|
if remarks != []: damask.util.croak(remarks)
|
2015-08-08 00:33:26 +05:30
|
|
|
if errors != []:
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.croak(errors)
|
2015-08-08 00:33:26 +05:30
|
|
|
table.close(dismiss = True)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# ------------------------------------------ assemble header --------------------------------------
|
2012-04-18 15:28:59 +05:30
|
|
|
|
2014-08-06 18:57:09 +05:30
|
|
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
2015-08-08 00:33:26 +05:30
|
|
|
if options.shape: table.labels_append('shapeMismatch({})'.format(options.defgrad))
|
|
|
|
if options.volume: table.labels_append('volMismatch({})'.format(options.defgrad))
|
2012-04-18 15:28:59 +05:30
|
|
|
|
2014-08-06 20:55:18 +05:30
|
|
|
# --------------- figure out size and grid ---------------------------------------------------------
|
2015-08-08 00:33:26 +05:30
|
|
|
|
|
|
|
table.data_readArray()
|
2014-07-10 14:57:51 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
|
|
|
mincorner = np.array(map(min,coords))
|
|
|
|
maxcorner = np.array(map(max,coords))
|
|
|
|
grid = np.array(map(len,coords),'i')
|
|
|
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
2016-03-02 01:14:43 +05:30
|
|
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
2011-08-25 22:14:36 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
N = grid.prod()
|
2011-08-25 23:25:36 +05:30
|
|
|
|
2015-09-01 02:52:44 +05:30
|
|
|
# --------------- figure out columns to process ---------------------------------------------------
|
|
|
|
key = '1_%s'%options.defgrad
|
|
|
|
if key not in table.labels:
|
|
|
|
file['croak'].write('column %s not found...\n'%key)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
column = table.labels.index(key) # remember columns of requested data
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2015-09-01 02:52:44 +05:30
|
|
|
# ------------------------------------------ assemble header ---------------------------------------
|
|
|
|
if options.shape: table.labels_append(['shapeMismatch(%s)' %options.defgrad])
|
|
|
|
if options.volume: table.labels_append(['volMismatch(%s)'%options.defgrad])
|
|
|
|
table.head_write()
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2015-09-01 02:52:44 +05:30
|
|
|
# ------------------------------------------ read deformation gradient field -----------------------
|
|
|
|
table.data_rewind()
|
|
|
|
F = np.zeros(N*9,'d').reshape([3,3]+list(grid))
|
|
|
|
idx = 0
|
|
|
|
while table.data_read():
|
|
|
|
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
|
|
|
idx += 1
|
2016-03-03 15:13:43 +05:30
|
|
|
F[0:3,0:3,x,y,z] = np.array(map(float,table.data[column:column+9]),'d').reshape(3,3)
|
2016-03-24 18:49:00 +05:30
|
|
|
Favg = damask.core.math.tensorAvg(F)
|
2015-09-01 02:52:44 +05:30
|
|
|
centres = damask.core.mesh.deformedCoordsFFT(size,F,Favg,[1.0,1.0,1.0])
|
2015-08-31 16:30:45 +05:30
|
|
|
|
2015-09-01 02:52:44 +05:30
|
|
|
nodes = damask.core.mesh.nodesAroundCentres(size,Favg,centres)
|
2016-03-24 18:49:00 +05:30
|
|
|
if options.shape: shapeMismatch = shapeMismatch( size,F,nodes,centres)
|
|
|
|
if options.volume: volumeMismatch = volumeMismatch(size,F,nodes)
|
2015-09-01 02:52:44 +05:30
|
|
|
|
|
|
|
# ------------------------------------------ process data ------------------------------------------
|
|
|
|
table.data_rewind()
|
|
|
|
idx = 0
|
|
|
|
outputAlive = True
|
|
|
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
|
|
|
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
|
|
|
idx += 1
|
|
|
|
if options.shape: table.data_append( shapeMismatch[x,y,z])
|
|
|
|
if options.volume: table.data_append(volumeMismatch[x,y,z])
|
|
|
|
outputAlive = table.data_write()
|
2015-08-08 00:33:26 +05:30
|
|
|
|
|
|
|
# ------------------------------------------ output finalization -----------------------------------
|
|
|
|
|
2016-03-03 15:13:43 +05:30
|
|
|
table.close() # close ASCII tables
|