Merge branch 'modernize-testing' into 'development'

Modernize testing

See merge request damask/DAMASK!289
This commit is contained in:
Sharan Roongta 2020-11-25 10:58:31 +01:00
commit a70c69718b
11 changed files with 70 additions and 484 deletions

View File

@ -2,14 +2,10 @@
stages: stages:
- prepareAll - prepareAll
- python - python
- preprocessing - deprecated
- postprocessing - compile
- compilePETSc
- prepareGrid
- grid - grid
- compileMarc
- marc - marc
- example
- performance - performance
- createPackage - createPackage
- createDocumentation - createDocumentation
@ -111,71 +107,70 @@ Pytest_python:
################################################################################################### ###################################################################################################
Pre_SeedGeneration: Pre_SeedGeneration:
stage: preprocessing stage: deprecated
script: PreProcessing_SeedGeneration/test.py script: PreProcessing_SeedGeneration/test.py
except: except:
- master - master
- release - release
Pre_GeomGeneration: Pre_GeomGeneration:
stage: preprocessing stage: deprecated
script: PreProcessing_GeomGeneration/test.py script: PreProcessing_GeomGeneration/test.py
except: except:
- master - master
- release - release
Pre_GeomModification: Pre_GeomModification:
stage: preprocessing stage: deprecated
script: PreProcessing_GeomModification/test.py script: PreProcessing_GeomModification/test.py
except: except:
- master - master
- release - release
Pre_General: Pre_General:
stage: preprocessing stage: deprecated
script: PreProcessing/test.py script: PreProcessing/test.py
except: except:
- master - master
- release - release
###################################################################################################
Post_General: Post_General:
stage: postprocessing stage: deprecated
script: PostProcessing/test.py script: PostProcessing/test.py
except: except:
- master - master
- release - release
Post_GeometryReconstruction: Post_GeometryReconstruction:
stage: postprocessing stage: deprecated
script: spectral_geometryReconstruction/test.py script: spectral_geometryReconstruction/test.py
except: except:
- master - master
- release - release
Post_addCurl: Post_addCurl:
stage: postprocessing stage: deprecated
script: addCurl/test.py script: addCurl/test.py
except: except:
- master - master
- release - release
Post_addDivergence: Post_addDivergence:
stage: postprocessing stage: deprecated
script: addDivergence/test.py script: addDivergence/test.py
except: except:
- master - master
- release - release
Post_addGradient: Post_addGradient:
stage: postprocessing stage: deprecated
script: addGradient/test.py script: addGradient/test.py
except: except:
- master - master
- release - release
Post_OrientationAverageMisorientation: Post_OrientationAverageMisorientation:
stage: postprocessing stage: deprecated
script: script:
- OrientationAverageMisorientation/test.py - OrientationAverageMisorientation/test.py
except: except:
@ -183,8 +178,8 @@ Post_OrientationAverageMisorientation:
- release - release
################################################################################################### ###################################################################################################
grid_mech_compile_Intel: compile_grid_Intel:
stage: compilePETSc stage: compile
script: script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel - module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cd pytest - cd pytest
@ -193,8 +188,8 @@ grid_mech_compile_Intel:
- master - master
- release - release
Compile_FEM_Intel: compile_mesh_Intel:
stage: compilePETSc stage: compile
script: script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel - module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cd pytest - cd pytest
@ -203,8 +198,8 @@ Compile_FEM_Intel:
- master - master
- release - release
grid_mech_compile_GNU: compile_grid_GNU:
stage: compilePETSc stage: compile
script: script:
- module load $GNUCompiler $MPICH_GNU $PETSc_MPICH_GNU - module load $GNUCompiler $MPICH_GNU $PETSc_MPICH_GNU
- cd pytest - cd pytest
@ -213,8 +208,8 @@ grid_mech_compile_GNU:
- master - master
- release - release
Compile_FEM_GNU: compile_mesh_GNU:
stage: compilePETSc stage: compile
script: script:
- module load $GNUCompiler $MPICH_GNU $PETSc_MPICH_GNU - module load $GNUCompiler $MPICH_GNU $PETSc_MPICH_GNU
- cd pytest - cd pytest
@ -223,13 +218,36 @@ Compile_FEM_GNU:
- master - master
- release - release
################################################################################################### compile_MARC:
Compile_Intel_Prepare: stage: compile
stage: prepareGrid script:
- module load $IntelMarc $HDF5Marc $MSC
- cd pytest
- pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc
except:
- master
- release
setup_grid:
stage: compile
script: script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel - module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cd $DAMASKROOT - BUILD_DIR=$(mktemp -d)
- make clean grid processing - cd ${BUILD_DIR}
- cmake -DDAMASK_SOLVER=GRID -DCMAKE_INSTALL_PREFIX=${DAMASKROOT} ${DAMASKROOT}
- make -j2 all install
except:
- master
- release
setup_mesh:
stage: compile
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- BUILD_DIR=$(mktemp -d)
- cd ${BUILD_DIR}
- cmake -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${DAMASKROOT} ${DAMASKROOT}
- make -j2 all install
except: except:
- master - master
- release - release
@ -252,13 +270,6 @@ Thermal:
- master - master
- release - release
grid_parsingArguments:
stage: grid
script: grid_parsingArguments/test.py
except:
- master
- release
Nonlocal_Damage_DetectChanges: Nonlocal_Damage_DetectChanges:
stage: grid stage: grid
script: Nonlocal_Damage_DetectChanges/test.py script: Nonlocal_Damage_DetectChanges/test.py
@ -280,16 +291,6 @@ Phenopowerlaw_singleSlip:
- master - master
- release - release
###################################################################################################
Marc_compileIfort:
stage: compileMarc
script:
- module load $IntelMarc $HDF5Marc $MSC
- cd pytest
- pytest -k 'compile and Marc' --basetemp=${TESTROOT}/compile_Marc
except:
- master
- release
################################################################################################### ###################################################################################################
Hex_elastic: Hex_elastic:
@ -337,14 +338,6 @@ Marc_elementLib:
- master - master
- release - release
###################################################################################################
grid_all_example:
stage: example
script: grid_all_example/test.py
except:
- master
- release
################################################################################################### ###################################################################################################
SpectralRuntime: SpectralRuntime:
stage: performance stage: performance

@ -1 +1 @@
Subproject commit 8bd09b5511d1e0e0ea288b47d16ce4924d75adcd Subproject commit a6be226f2ab08cfa44adabf37168f4d952d6174f

View File

@ -13,85 +13,6 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def volTetrahedron(coords):
"""
Return the volume of the tetrahedron with given vertices or sides.
If vertices are given they must be in a NumPy array with shape (4,3): the
position vectors of the 4 vertices in 3 dimensions; if the six sides are
given, they must be an array of length 6. If both are given, the sides
will be used in the calculation.
This method implements
Tartaglia's formula using the Cayley-Menger determinant:
|0 1 1 1 1 |
|1 0 s1^2 s2^2 s3^2|
288 V^2 = |1 s1^2 0 s4^2 s5^2|
|1 s2^2 s4^2 0 s6^2|
|1 s3^2 s5^2 s6^2 0 |
where s1, s2, ..., s6 are the tetrahedron side lengths.
from http://codereview.stackexchange.com/questions/77593/calculating-the-volume-of-a-tetrahedron
"""
# The indexes of rows in the vertices array corresponding to all
# possible pairs of vertices
vertex_pair_indexes = np.array(((0, 1), (0, 2), (0, 3),
(1, 2), (1, 3), (2, 3)))
# Get all the squares of all side lengths from the differences between
# the 6 different pairs of vertex positions
vertices = np.concatenate((coords[0],coords[1],coords[2],coords[3])).reshape(4,3)
vertex1, vertex2 = vertex_pair_indexes[:,0], vertex_pair_indexes[:,1]
sides_squared = np.sum((vertices[vertex1] - vertices[vertex2])**2,axis=-1)
# Set up the Cayley-Menger determinant
M = np.zeros((5,5))
# Fill in the upper triangle of the matrix
M[0,1:] = 1
# The squared-side length elements can be indexed using the vertex
# pair indices (compare with the determinant illustrated above)
M[tuple(zip(*(vertex_pair_indexes + 1)))] = sides_squared
# The matrix is symmetric, so we can fill in the lower triangle by
# adding the transpose
M = M + M.T
return np.sqrt(np.linalg.det(M) / 288)
def volumeMismatch(size,F,nodes):
"""
Calculates the volume mismatch.
volume mismatch is defined as the difference between volume of reconstructed
(compatible) cube and determinant of deformation gradient at Fourier point.
"""
coords = np.empty([8,3])
vMismatch = np.empty(F.shape[:3])
#--------------------------------------------------------------------------------------------------
# calculate actual volume and volume resulting from deformation gradient
for k in range(grid[0]):
for j in range(grid[1]):
for i in range(grid[2]):
coords[0,0:3] = nodes[k, j, i ,0:3]
coords[1,0:3] = nodes[k ,j, i+1,0:3]
coords[2,0:3] = nodes[k ,j+1,i+1,0:3]
coords[3,0:3] = nodes[k, j+1,i ,0:3]
coords[4,0:3] = nodes[k+1,j, i ,0:3]
coords[5,0:3] = nodes[k+1,j, i+1,0:3]
coords[6,0:3] = nodes[k+1,j+1,i+1,0:3]
coords[7,0:3] = nodes[k+1,j+1,i ,0:3]
vMismatch[k,j,i] = \
( abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[7,0:3],coords[3,0:3]])) \
+ abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[7,0:3],coords[4,0:3]])) \
+ abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[2,0:3],coords[3,0:3]])) \
+ abs(volTetrahedron([coords[6,0:3],coords[0,0:3],coords[2,0:3],coords[1,0:3]])) \
+ abs(volTetrahedron([coords[6,0:3],coords[4,0:3],coords[1,0:3],coords[5,0:3]])) \
+ abs(volTetrahedron([coords[6,0:3],coords[4,0:3],coords[1,0:3],coords[0,0:3]]))) \
/np.linalg.det(F[k,j,i,0:3,0:3])
return vMismatch/(size.prod()/grid.prod())
def shapeMismatch(size,F,nodes,centres): def shapeMismatch(size,F,nodes,centres):
""" """
@ -101,35 +22,16 @@ def shapeMismatch(size,F,nodes,centres):
the corners of reconstructed (combatible) volume element and the vectors calculated by deforming the corners of reconstructed (combatible) volume element and the vectors calculated by deforming
the initial volume element with the current deformation gradient. the initial volume element with the current deformation gradient.
""" """
sMismatch = np.empty(F.shape[:3])
#--------------------------------------------------------------------------------------------------
# initial positions
delta = size/grid*.5 delta = size/grid*.5
coordsInitial = np.vstack((delta * np.array((-1,-1,-1)),
delta * np.array((+1,-1,-1)),
delta * np.array((+1,+1,-1)),
delta * np.array((-1,+1,-1)),
delta * np.array((-1,-1,+1)),
delta * np.array((+1,-1,+1)),
delta * np.array((+1,+1,+1)),
delta * np.array((-1,+1,+1))))
#-------------------------------------------------------------------------------------------------- return + np.linalg.norm(nodes[:-1,:-1,:-1] -centres - np.dot(F,delta * np.array((-1,-1,-1))),axis=-1)\
# compare deformed original and deformed positions to actual positions + np.linalg.norm(nodes[+1:,:-1,:-1] -centres - np.dot(F,delta * np.array((+1,-1,-1))),axis=-1)\
for k in range(grid[0]): + np.linalg.norm(nodes[+1:,+1:,:-1] -centres - np.dot(F,delta * np.array((+1,+1,-1))),axis=-1)\
for j in range(grid[1]): + np.linalg.norm(nodes[:-1,+1:,:-1] -centres - np.dot(F,delta * np.array((-1,+1,-1))),axis=-1)\
for i in range(grid[2]): + np.linalg.norm(nodes[:-1,:-1,+1:] -centres - np.dot(F,delta * np.array((-1,-1,+1))),axis=-1)\
sMismatch[k,j,i] = \ + np.linalg.norm(nodes[+1:,:-1,+1:] -centres - np.dot(F,delta * np.array((+1,-1,+1))),axis=-1)\
+ np.linalg.norm(nodes[k, j, i ,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[0,0:3]))\ + np.linalg.norm(nodes[+1:,+1:,+1:] -centres - np.dot(F,delta * np.array((+1,+1,+1))),axis=-1)\
+ np.linalg.norm(nodes[k+1,j, i ,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[1,0:3]))\ + np.linalg.norm(nodes[:-1,+1:,+1:] -centres - np.dot(F,delta * np.array((-1,+1,+1))),axis=-1)
+ np.linalg.norm(nodes[k+1,j+1,i ,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[2,0:3]))\
+ np.linalg.norm(nodes[k, j+1,i ,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[3,0:3]))\
+ np.linalg.norm(nodes[k, j, i+1,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[4,0:3]))\
+ np.linalg.norm(nodes[k+1,j, i+1,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[5,0:3]))\
+ np.linalg.norm(nodes[k+1,j+1,i+1,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[6,0:3]))\
+ np.linalg.norm(nodes[k ,j+1,i+1,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[7,0:3]))
return sMismatch
# -------------------------------------------------------------------- # --------------------------------------------------------------------
@ -155,10 +57,6 @@ parser.add_option('--no-shape','-s',
dest = 'shape', dest = 'shape',
action = 'store_false', action = 'store_false',
help = 'omit shape mismatch') help = 'omit shape mismatch')
parser.add_option('--no-volume','-v',
dest = 'volume',
action = 'store_false',
help = 'omit volume mismatch')
parser.set_defaults(pos = 'pos', parser.set_defaults(pos = 'pos',
defgrad = 'f', defgrad = 'f',
shape = True, shape = True,
@ -185,10 +83,4 @@ for name in filenames:
shapeMismatch.reshape(-1,1,order='F'), shapeMismatch.reshape(-1,1,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
if options.volume: table.save((sys.stdout if name is None else name))
volumeMismatch = volumeMismatch(size,F,nodes)
table = table.add('volMismatch(({}))'.format(options.defgrad),
volumeMismatch.reshape(-1,1,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True)

View File

@ -55,4 +55,4 @@ for name in filenames:
curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'), curl.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape),order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True) table.save((sys.stdout if name is None else name))

View File

@ -1,74 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def derivative(coordinates,what):
result = np.empty_like(what)
# use differentiation by interpolation
# as described in http://www2.math.umd.edu/~dlevy/classes/amsc466/lecture-notes/differentiation-chap.pdf
result[1:-1,:] = + what[1:-1,:] * (2.*coordinates[1:-1]-coordinates[:-2]-coordinates[2:]) / \
((coordinates[1:-1]-coordinates[:-2])*(coordinates[1:-1]-coordinates[2:])) \
+ what[2:,:] * (coordinates[1:-1]-coordinates[:-2]) / \
((coordinates[2:]-coordinates[1:-1])*(coordinates[2:]-coordinates[:-2])) \
+ what[:-2,:] * (coordinates[1:-1]-coordinates[2:]) / \
((coordinates[:-2]-coordinates[1:-1])*(coordinates[:-2]-coordinates[2:])) \
result[0,:] = (what[0,:] - what[1,:]) / \
(coordinates[0] - coordinates[1])
result[-1,:] = (what[-1,:] - what[-2,:]) / \
(coordinates[-1] - coordinates[-2])
return result
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing numerical derivative of requested column(s) with respect to given coordinates.
""", version = scriptID)
parser.add_option('-c','--coordinates',
dest = 'coordinates',
type = 'string', metavar='string',
help = 'heading of coordinate column')
parser.add_option('-l','--label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'heading of column(s) to differentiate')
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.coordinates is None:
parser.error('no coordinate column specified.')
if options.labels is None:
parser.error('no data column specified.')
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
table = table.add('d({})/d({})'.format(label,options.coordinates),
derivative(table.get(options.coordinates),table.get(label)),
scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True)

View File

@ -60,7 +60,7 @@ for name in filenames:
.add('fluct({}).{}'.format(options.f,options.pos), .add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'), damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\ scriptID+' '+' '.join(sys.argv[1:]))\
.save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'), legacy=True) .save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'))
else: else:
table.add('avg({}).{}'.format(options.f,options.pos), table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'), damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
@ -68,4 +68,4 @@ for name in filenames:
.add('fluct({}).{}'.format(options.f,options.pos), .add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'), damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\ scriptID+' '+' '.join(sys.argv[1:]))\
.save((sys.stdout if name is None else name), legacy=True) .save((sys.stdout if name is None else name))

View File

@ -55,4 +55,4 @@ for name in filenames:
div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'), div.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)//3,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True) table.save((sys.stdout if name is None else name))

View File

@ -184,4 +184,4 @@ for name in filenames:
distance[i,:], distance[i,:],
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True) table.save((sys.stdout if name is None else name))

View File

@ -55,4 +55,4 @@ for name in filenames:
grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'), grad.reshape(tuple(grid)+(-1,)).reshape(-1,np.prod(shape)*3,order='F'),
scriptID+' '+' '.join(sys.argv[1:])) scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True) table.save((sys.stdout if name is None else name))

View File

@ -1,150 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add quaternion and/or Bunge Euler angle representation of crystal lattice orientation.
Orientation is given by quaternion, Euler angles, rotation matrix, or crystal frame coordinates
(i.e. component vectors of rotation matrix).
Additional (globally fixed) rotations of the lab frame and/or crystal frame can be applied.
""", version = scriptID)
representations = ['quaternion', 'rodrigues', 'eulers', 'matrix', 'axisangle']
parser.add_option('-o',
'--output',
dest = 'output',
action = 'extend', metavar = '<string LIST>',
help = 'output orientation formats {{{}}}'.format(', '.join(representations)))
parser.add_option('-d',
'--degrees',
dest = 'degrees',
action = 'store_true',
help = 'all angles in degrees')
parser.add_option('-R',
'--labrotation',
dest='labrotation',
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
help = 'axis and angle of additional lab frame rotation [%default]')
parser.add_option('-r',
'--crystalrotation',
dest='crystalrotation',
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
help = 'axis and angle of additional crystal frame rotation [%default]')
parser.add_option('--eulers',
dest = 'eulers',
metavar = 'string',
help = 'Euler angles label')
parser.add_option('--rodrigues',
dest = 'rodrigues',
metavar = 'string',
help = 'Rodrigues vector label')
parser.add_option('--matrix',
dest = 'matrix',
metavar = 'string',
help = 'orientation matrix label')
parser.add_option('--quaternion',
dest = 'quaternion',
metavar = 'string',
help = 'quaternion label')
parser.add_option('-x',
dest = 'x',
metavar = 'string',
help = 'label of lab x vector (expressed in crystal coords)')
parser.add_option('-y',
dest = 'y',
metavar = 'string',
help = 'label of lab y vector (expressed in crystal coords)')
parser.add_option('-z',
dest = 'z',
metavar = 'string',
help = 'label of lab z vector (expressed in crystal coords)')
parser.add_option('--lattice',
dest = 'lattice',
metavar = 'string',
help = 'lattice structure to reduce rotation into fundamental zone')
parser.set_defaults(output = [],
labrotation = (1.,1.,1.,0.), # no rotation about (1,1,1)
crystalrotation = (1.,1.,1.,0.), # no rotation about (1,1,1)
lattice = None,
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.output == [] or (not set(options.output).issubset(set(representations))):
parser.error('output must be chosen from {}.'.format(', '.join(representations)))
input = [options.eulers is not None,
options.rodrigues is not None,
options.x is not None and \
options.y is not None and \
options.z is not None,
options.matrix is not None,
options.quaternion is not None,
]
if np.sum(input) != 1: parser.error('needs exactly one input format.')
r = damask.Rotation.from_axis_angle(np.array(options.crystalrotation),options.degrees,normalize=True)
R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degrees,normalize=True)
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
if options.eulers is not None:
label = options.eulers
print(np.max(table.get(options.eulers),axis=0))
o = damask.Rotation.from_Euler_angles(table.get(options.eulers), options.degrees)
elif options.rodrigues is not None:
label = options.rodrigues
o = damask.Rotation.from_Rodrigues_vector(table.get(options.rodrigues))
elif options.matrix is not None:
label = options.matrix
o = damask.Rotation.from_matrix(table.get(options.matrix).reshape(-1,3,3))
elif options.x is not None:
label = '<{},{},{}>'.format(options.x,options.y,options.z)
M = np.block([table.get(options.x),table.get(options.y),table.get(options.z)]).reshape(-1,3,3)
o = damask.Rotation.from_matrix(M/np.linalg.norm(M,axis=0))
elif options.quaternion is not None:
label = options.quaternion
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
o = r.broadcast_to(o.shape) @ o @ R.broadcast_to(o.shape)
#if options.lattice is not None:
# o = damask.Orientation(rotation = o,lattice = options.lattice).reduced().rotation
if 'rodrigues' in options.output:
table = table.add('ro({})'.format(label),o.as_Rodrigues_vector(), scriptID+' '+' '.join(sys.argv[1:]))
if 'eulers' in options.output:
table = table.add('eu({})'.format(label),o.as_Euler_angles(options.degrees),scriptID+' '+' '.join(sys.argv[1:]))
if 'quaternion' in options.output:
table = table.add('qu({})'.format(label),o.as_quaternion(), scriptID+' '+' '.join(sys.argv[1:]))
if 'matrix' in options.output:
table = table.add('om({})'.format(label),o.as_matrix(), scriptID+' '+' '.join(sys.argv[1:]))
if 'axisangle' in options.output:
table = table.add('om({})'.format(label),o.as_axis_angle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True)

View File

@ -14,84 +14,9 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
slipSystems = { slipSystems = {
'fcc': 'fcc': damask.lattice.kinematics['cF']['slip'][:12],
np.array([ 'bcc': damask.lattice.kinematics['cI']['slip'],
[+0,+1,-1 , +1,+1,+1], 'hex': damask.lattice.kinematics['hP']['slip'],
[-1,+0,+1 , +1,+1,+1],
[+1,-1,+0 , +1,+1,+1],
[+0,-1,-1 , -1,-1,+1],
[+1,+0,+1 , -1,-1,+1],
[-1,+1,+0 , -1,-1,+1],
[+0,-1,+1 , +1,-1,-1],
[-1,+0,-1 , +1,-1,-1],
[+1,+1,+0 , +1,-1,-1],
[+0,+1,+1 , -1,+1,-1],
[+1,+0,-1 , -1,+1,-1],
[-1,-1,+0 , -1,+1,-1],
],'d'),
'bcc':
np.array([
[+1,-1,+1 , +0,+1,+1],
[-1,-1,+1 , +0,+1,+1],
[+1,+1,+1 , +0,-1,+1],
[-1,+1,+1 , +0,-1,+1],
[-1,+1,+1 , +1,+0,+1],
[-1,-1,+1 , +1,+0,+1],
[+1,+1,+1 , -1,+0,+1],
[+1,-1,+1 , -1,+0,+1],
[-1,+1,+1 , +1,+1,+0],
[-1,+1,-1 , +1,+1,+0],
[+1,+1,+1 , -1,+1,+0],
[+1,+1,-1 , -1,+1,+0],
[-1,+1,+1 , +2,+1,+1],
[+1,+1,+1 , -2,+1,+1],
[+1,+1,-1 , +2,-1,+1],
[+1,-1,+1 , +2,+1,-1],
[+1,-1,+1 , +1,+2,+1],
[+1,+1,-1 , -1,+2,+1],
[+1,+1,+1 , +1,-2,+1],
[-1,+1,+1 , +1,+2,-1],
[+1,+1,-1 , +1,+1,+2],
[+1,-1,+1 , -1,+1,+2],
[-1,+1,+1 , +1,-1,+2],
[+1,+1,+1 , +1,+1,-2],
],'d'),
'hex':
np.array([
[+2,-1,-1,+0 , +0,+0,+0,+1],
[-1,+2,-1,+0 , +0,+0,+0,+1],
[-1,-1,+2,+0 , +0,+0,+0,+1],
[+2,-1,-1,+0 , +0,+1,-1,+0],
[-1,+2,-1,+0 , -1,+0,+1,+0],
[-1,-1,+2,+0 , +1,-1,+0,+0],
[-1,+1,+0,+0 , +1,+1,-2,+0],
[+0,-1,+1,+0 , -2,+1,+1,+0],
[+1,+0,-1,+0 , +1,-2,+1,+0],
[-1,+2,-1,+0 , +1,+0,-1,+1],
[-2,+1,+1,+0 , +0,+1,-1,+1],
[-1,-1,+2,+0 , -1,+1,+0,+1],
[+1,-2,+1,+0 , -1,+0,+1,+1],
[+2,-1,-1,+0 , +0,-1,+1,+1],
[+1,+1,-2,+0 , +1,-1,+0,+1],
[-2,+1,+1,+3 , +1,+0,-1,+1],
[-1,-1,+2,+3 , +1,+0,-1,+1],
[-1,-1,+2,+3 , +0,+1,-1,+1],
[+1,-2,+1,+3 , +0,+1,-1,+1],
[+1,-2,+1,+3 , -1,+1,+0,+1],
[+2,-1,-1,+3 , -1,+1,+0,+1],
[+2,-1,-1,+3 , -1,+0,+1,+1],
[+1,+1,-2,+3 , -1,+0,+1,+1],
[+1,+1,-2,+3 , +0,-1,+1,+1],
[-1,+2,-1,+3 , +0,-1,+1,+1],
[-1,+2,-1,+3 , +1,-1,+0,+1],
[-2,+1,+1,+3 , +1,-1,+0,+1],
[-1,-1,+2,+3 , +1,+1,-2,+2],
[+1,-2,+1,+3 , -1,+2,-1,+2],
[+2,-1,-1,+3 , -2,+1,+1,+2],
[+1,+1,-2,+3 , -1,-1,+2,+2],
[-1,+2,-1,+3 , +1,-2,+1,+2],
[-2,+1,+1,+3 , +2,-1,-1,+2],
],'d'),
} }
# -------------------------------------------------------------------- # --------------------------------------------------------------------
@ -189,4 +114,4 @@ for name in filenames:
for i,label in enumerate(labels): for i,label in enumerate(labels):
table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:])) table = table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
table.save((sys.stdout if name is None else name), legacy=True) table.save((sys.stdout if name is None else name))