Merge branch 'misc-improvements' into 'development'

Misc improvements

See merge request damask/DAMASK!173
This commit is contained in:
Vitesh 2020-05-30 20:12:19 +02:00
commit 4b6b9478a0
25 changed files with 911 additions and 441 deletions

View File

@ -110,7 +110,7 @@ for executable in icc icpc ifort ;do
done
firstLevel "MPI Wrappers"
for executable in mpicc mpiCC mpiicc mpic++ mpicpc mpicxx mpifort mpif90 mpif77; do
for executable in mpicc mpiCC mpiicc mpic++ mpiicpc mpicxx mpifort mpiifort mpif90 mpif77; do
getDetails $executable '-show'
done

View File

@ -1,6 +1,9 @@
###################################################################################################
# GNU Compiler
###################################################################################################
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 8.0)
message (FATAL_ERROR "GCC Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-fopenmp")

View File

@ -1,6 +1,10 @@
###################################################################################################
# Intel Compiler
###################################################################################################
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 18.0)
message (FATAL_ERROR "Intel Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-qopenmp -parallel")
endif ()

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -15,91 +16,82 @@ scriptID = ' '.join([scriptName,damask.version])
slipSystems = {
'fcc':
np.array([
# Slip direction Plane normal
[ 0, 1,-1, 1, 1, 1, ],
[-1, 0, 1, 1, 1, 1, ],
[ 1,-1, 0, 1, 1, 1, ],
[ 0,-1,-1, -1,-1, 1, ],
[ 1, 0, 1, -1,-1, 1, ],
[-1, 1, 0, -1,-1, 1, ],
[ 0,-1, 1, 1,-1,-1, ],
[-1, 0,-1, 1,-1,-1, ],
[ 1, 1, 0, 1,-1,-1, ],
[ 0, 1, 1, -1, 1,-1, ],
[ 1, 0,-1, -1, 1,-1, ],
[-1,-1, 0, -1, 1,-1, ],
],'f'),
[+0,+1,-1 , +1,+1,+1],
[-1,+0,+1 , +1,+1,+1],
[+1,-1,+0 , +1,+1,+1],
[+0,-1,-1 , -1,-1,+1],
[+1,+0,+1 , -1,-1,+1],
[-1,+1,+0 , -1,-1,+1],
[+0,-1,+1 , +1,-1,-1],
[-1,+0,-1 , +1,-1,-1],
[+1,+1,+0 , +1,-1,-1],
[+0,+1,+1 , -1,+1,-1],
[+1,+0,-1 , -1,+1,-1],
[-1,-1,+0 , -1,+1,-1],
],'d'),
'bcc':
np.array([
# Slip system <111>{110}
[ 1,-1, 1, 0, 1, 1, ],
[-1,-1, 1, 0, 1, 1, ],
[ 1, 1, 1, 0,-1, 1, ],
[-1, 1, 1, 0,-1, 1, ],
[-1, 1, 1, 1, 0, 1, ],
[-1,-1, 1, 1, 0, 1, ],
[ 1, 1, 1, -1, 0, 1, ],
[ 1,-1, 1, -1, 0, 1, ],
[-1, 1, 1, 1, 1, 0, ],
[-1, 1,-1, 1, 1, 0, ],
[ 1, 1, 1, -1, 1, 0, ],
[ 1, 1,-1, -1, 1, 0, ],
# Slip system <111>{112}
[-1, 1, 1, 2, 1, 1, ],
[ 1, 1, 1, -2, 1, 1, ],
[ 1, 1,-1, 2,-1, 1, ],
[ 1,-1, 1, 2, 1,-1, ],
[ 1,-1, 1, 1, 2, 1, ],
[ 1, 1,-1, -1, 2, 1, ],
[ 1, 1, 1, 1,-2, 1, ],
[-1, 1, 1, 1, 2,-1, ],
[ 1, 1,-1, 1, 1, 2, ],
[ 1,-1, 1, -1, 1, 2, ],
[-1, 1, 1, 1,-1, 2, ],
[ 1, 1, 1, 1, 1,-2, ],
],'f'),
[+1,-1,+1 , +0,+1,+1],
[-1,-1,+1 , +0,+1,+1],
[+1,+1,+1 , +0,-1,+1],
[-1,+1,+1 , +0,-1,+1],
[-1,+1,+1 , +1,+0,+1],
[-1,-1,+1 , +1,+0,+1],
[+1,+1,+1 , -1,+0,+1],
[+1,-1,+1 , -1,+0,+1],
[-1,+1,+1 , +1,+1,+0],
[-1,+1,-1 , +1,+1,+0],
[+1,+1,+1 , -1,+1,+0],
[+1,+1,-1 , -1,+1,+0],
[-1,+1,+1 , +2,+1,+1],
[+1,+1,+1 , -2,+1,+1],
[+1,+1,-1 , +2,-1,+1],
[+1,-1,+1 , +2,+1,-1],
[+1,-1,+1 , +1,+2,+1],
[+1,+1,-1 , -1,+2,+1],
[+1,+1,+1 , +1,-2,+1],
[-1,+1,+1 , +1,+2,-1],
[+1,+1,-1 , +1,+1,+2],
[+1,-1,+1 , -1,+1,+2],
[-1,+1,+1 , +1,-1,+2],
[+1,+1,+1 , +1,+1,-2],
],'d'),
'hex':
np.array([
# Basal systems <11.0>{00.1} (independent of c/a-ratio, Bravais notation (4 coordinate base))
[ 2, -1, -1, 0, 0, 0, 0, 1, ],
[-1, 2, -1, 0, 0, 0, 0, 1, ],
[-1, -1, 2, 0, 0, 0, 0, 1, ],
# 1st type prismatic systems <11.0>{10.0} (independent of c/a-ratio)
[ 2, -1, -1, 0, 0, 1, -1, 0, ],
[-1, 2, -1, 0, -1, 0, 1, 0, ],
[-1, -1, 2, 0, 1, -1, 0, 0, ],
# 2nd type prismatic systems <10.0>{11.0} -- a slip; plane normals independent of c/a-ratio
[ 0, 1, -1, 0, 2, -1, -1, 0, ],
[-1, 0, 1, 0, -1, 2, -1, 0, ],
[ 1, -1, 0, 0, -1, -1, 2, 0, ],
# 1st type 1st order pyramidal systems <11.0>{-11.1} -- plane normals depend on the c/a-ratio
[ 2, -1, -1, 0, 0, 1, -1, 1, ],
[-1, 2, -1, 0, -1, 0, 1, 1, ],
[-1, -1, 2, 0, 1, -1, 0, 1, ],
[ 1, 1, -2, 0, -1, 1, 0, 1, ],
[-2, 1, 1, 0, 0, -1, 1, 1, ],
[ 1, -2, 1, 0, 1, 0, -1, 1, ],
# pyramidal system: c+a slip <11.3>{-10.1} -- plane normals depend on the c/a-ratio
[ 2, -1, -1, 3, -1, 1, 0, 1, ],
[ 1, -2, 1, 3, -1, 1, 0, 1, ],
[-1, -1, 2, 3, 1, 0, -1, 1, ],
[-2, 1, 1, 3, 1, 0, -1, 1, ],
[-1, 2, -1, 3, 0, -1, 1, 1, ],
[ 1, 1, -2, 3, 0, -1, 1, 1, ],
[-2, 1, 1, 3, 1, -1, 0, 1, ],
[-1, 2, -1, 3, 1, -1, 0, 1, ],
[ 1, 1, -2, 3, -1, 0, 1, 1, ],
[ 2, -1, -1, 3, -1, 0, 1, 1, ],
[ 1, -2, 1, 3, 0, 1, -1, 1, ],
[-1, -1, 2, 3, 0, 1, -1, 1, ],
# pyramidal system: c+a slip <11.3>{-1-1.2} -- as for hexagonal ice (Castelnau et al. 1996, similar to twin system found below)
[ 2, -1, -1, 3, -2, 1, 1, 2, ], # sorted according to similar twin system
[-1, 2, -1, 3, 1, -2, 1, 2, ], # <11.3>{-1-1.2} shear = 2((c/a)^2-2)/(3 c/a)
[-1, -1, 2, 3, 1, 1, -2, 2, ],
[-2, 1, 1, 3, 2, -1, -1, 2, ],
[ 1, -2, 1, 3, -1, 2, -1, 2, ],
[ 1, 1, -2, 3, -1, -1, 2, 2, ],
],'f'),
[+2,-1,-1,+0 , +0,+0,+0,+1],
[-1,+2,-1,+0 , +0,+0,+0,+1],
[-1,-1,+2,+0 , +0,+0,+0,+1],
[+2,-1,-1,+0 , +0,+1,-1,+0],
[-1,+2,-1,+0 , -1,+0,+1,+0],
[-1,-1,+2,+0 , +1,-1,+0,+0],
[-1,+1,+0,+0 , +1,+1,-2,+0],
[+0,-1,+1,+0 , -2,+1,+1,+0],
[+1,+0,-1,+0 , +1,-2,+1,+0],
[-1,+2,-1,+0 , +1,+0,-1,+1],
[-2,+1,+1,+0 , +0,+1,-1,+1],
[-1,-1,+2,+0 , -1,+1,+0,+1],
[+1,-2,+1,+0 , -1,+0,+1,+1],
[+2,-1,-1,+0 , +0,-1,+1,+1],
[+1,+1,-2,+0 , +1,-1,+0,+1],
[-2,+1,+1,+3 , +1,+0,-1,+1],
[-1,-1,+2,+3 , +1,+0,-1,+1],
[-1,-1,+2,+3 , +0,+1,-1,+1],
[+1,-2,+1,+3 , +0,+1,-1,+1],
[+1,-2,+1,+3 , -1,+1,+0,+1],
[+2,-1,-1,+3 , -1,+1,+0,+1],
[+2,-1,-1,+3 , -1,+0,+1,+1],
[+1,+1,-2,+3 , -1,+0,+1,+1],
[+1,+1,-2,+3 , +0,-1,+1,+1],
[-1,+2,-1,+3 , +0,-1,+1,+1],
[-1,+2,-1,+3 , +1,-1,+0,+1],
[-2,+1,+1,+3 , +1,-1,+0,+1],
[-1,-1,+2,+3 , +1,+1,-2,+2],
[+1,-2,+1,+3 , -1,+2,-1,+2],
[+2,-1,-1,+3 , -2,+1,+1,+2],
[+1,+1,-2,+3 , -1,-1,+2,+2],
[-1,+2,-1,+3 , +1,-2,+1,+2],
[-2,+1,+1,+3 , +2,-1,-1,+2],
],'d'),
}
# --------------------------------------------------------------------
@ -111,11 +103,11 @@ Add columns listing Schmid factors (and optional trace vector of selected system
""", version = scriptID)
latticeChoices = ('fcc','bcc','hex')
lattice_choices = list(slipSystems.keys())
parser.add_option('-l',
'--lattice',
dest = 'lattice', type = 'choice', choices = latticeChoices, metavar='string',
help = 'type of lattice structure [%default] {}'.format(latticeChoices))
dest = 'lattice', type = 'choice', choices = lattice_choices, metavar='string',
help = 'type of lattice structure [%default] {}'.format(lattice_choices))
parser.add_option('--covera',
dest = 'CoverA', type = 'float', metavar = 'float',
help = 'C over A ratio for hexagonal systems [%default]')
@ -138,88 +130,63 @@ parser.add_option('-o',
parser.set_defaults(force = (0.0,0.0,1.0),
quaternion='orientation',
normal = None,
lattice = latticeChoices[0],
lattice = lattice_choices[0],
CoverA = np.sqrt(8./3.),
)
(options, filenames) = parser.parse_args()
force = np.array(options.force)
force /= np.linalg.norm(force)
if options.normal is not None:
normal = np.array(options.normal)
normal /= np.linalg.norm(normal)
if abs(np.dot(force,normal)) > 1e-3:
parser.error('stress plane normal not orthogonal to force direction')
else:
normal = force
slip_direction = np.zeros((len(slipSystems[options.lattice]),3),'f')
slip_normal = np.zeros_like(slip_direction)
if options.lattice in latticeChoices[:2]:
slip_direction = slipSystems[options.lattice][:,:3]
slip_normal = slipSystems[options.lattice][:,3:]
elif options.lattice == latticeChoices[2]:
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
for i in range(len(slip_direction)):
slip_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
slipSystems['hex'][i,3]*options.CoverA,
])
slip_normal[i] = np.array([slipSystems['hex'][i,4],
(slipSystems['hex'][i,4] + 2.*slipSystems['hex'][i,5])/np.sqrt(3),
slipSystems['hex'][i,7]/options.CoverA,
])
slip_direction /= np.tile(np.linalg.norm(slip_direction,axis=1),(3,1)).T
slip_normal /= np.tile(np.linalg.norm(slip_normal ,axis=1),(3,1)).T
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
force = np.array(options.force)/np.linalg.norm(options.force)
if options.normal is not None:
normal = np.array(options.normal)/np.linalg.norm(options.ormal)
if abs(np.dot(force,normal)) > 1e-3:
parser.error('stress plane normal not orthogonal to force direction')
else:
normal = force
if options.lattice in ['bcc','fcc']:
slip_direction = slipSystems[options.lattice][:,:3]
slip_normal = slipSystems[options.lattice][:,3:]
elif options.lattice == 'hex':
slip_direction = np.zeros((len(slipSystems['hex']),3),'d')
slip_normal = np.zeros_like(slip_direction)
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
for i in range(len(slip_direction)):
slip_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
slipSystems['hex'][i,3]*options.CoverA,
])
slip_normal[i] = np.array([slipSystems['hex'][i,4],
(slipSystems['hex'][i,4] + 2.*slipSystems['hex'][i,5])/np.sqrt(3),
slipSystems['hex'][i,7]/options.CoverA,
])
slip_direction /= np.linalg.norm(slip_direction,axis=1,keepdims=True)
slip_normal /= np.linalg.norm(slip_normal, axis=1,keepdims=True)
labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
.format(normal = theNormal, direction = theDirection,
) for theNormal,theDirection in zip(slip_normal,slip_direction)]
for name in filenames:
try:
table = damask.ASCIItable(name = name)
except IOError:
continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read()
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
# ------------------------------------------ sanity checks ----------------------------------------
if not table.label_dimension(options.quaternion) == 4:
damask.util.croak('input {} does not have dimension 4.'.format(options.quaternion))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
force = np.broadcast_to(force, o.shape+(3,))
normal = np.broadcast_to(normal,o.shape+(3,))
slip_direction = np.broadcast_to(slip_direction,o.shape+slip_direction.shape)
slip_normal = np.broadcast_to(slip_normal, o.shape+slip_normal.shape)
S = np.abs(np.einsum('ijk,ik->ij',slip_direction,(o@force))*
np.einsum('ijk,ik->ij',slip_normal, (o@normal)))
column = table.label_index(options.quaternion)
for i,label in enumerate(labels):
table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
.format(normal = theNormal,
direction = theDirection,
) for theNormal,theDirection in zip(slip_normal,slip_direction)])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
o = damask.Rotation(np.array(list(map(float,table.data[column:column+4]))))
table.data_append( np.abs( np.sum(slip_direction * (o * force) ,axis=1) \
* np.sum(slip_normal * (o * normal),axis=1)))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -40,35 +40,22 @@ parser.add_option('-f','--fill',
parser.set_defaults(offset = (0,0,0))
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
origin = geom.get_origin()
size = geom.get_size()
old = new = geom.get_grid()
offset = np.asarray(options.offset)
if options.grid is not None:
new = np.maximum(1,
np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
else int(n) for o,n in zip(old,options.grid)],dtype=int))
grid = np.maximum(1,
np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
else int(n) for o,n in zip(geom.grid,options.grid)],dtype=int))
else:
grid = np.array(options.grid,dtype=int)
canvas = np.full(new,options.fill if options.fill is not None
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
l = np.clip( offset, 0,np.minimum(old +offset,new)) # noqa
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
L = np.clip(-offset, 0,np.minimum(new -offset,old))
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = geom.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
damask.util.croak(geom.canvas(grid,np.asarray(options.offset),options.fill))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -22,14 +22,12 @@ Renumber sorted microstructure indices to 1,...,N.
""", version=scriptID)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.renumber())
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.renumber())
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -5,7 +5,6 @@ import sys
from io import StringIO
from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask
@ -52,47 +51,27 @@ parser.add_option('-f', '--fill',
parser.set_defaults(degrees = False)
(options, filenames) = parser.parse_args()
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) < 3:
parser.error('more than one rotation specified.')
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) > 3:
parser.error('no rotation specified.')
if options.quaternion is not None:
rot = damask.Rotation.from_quaternion(np.array(options.quaternion)) # we might need P=+1 here, too...
if options.rotation is not None:
rot = damask.Rotation.from_axis_angle(np.array(options.rotation),degrees=options.degrees,normalise=True,P=+1)
if options.matrix is not None:
rot = damask.Rotation.from_matrix(np.array(options.Matrix))
if options.eulers is not None:
rot = damask.Rotation.from_Eulers(np.array(options.eulers),degrees=options.degrees)
eulers = rot.as_Eulers(degrees=True)
if filenames == []: filenames = [None]
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) < 3:
parser.error('more than one rotation specified.')
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) > 3:
parser.error('no rotation specified.')
if options.quaternion is not None:
rot = damask.Rotation.from_quaternion(np.array(options.quaternion)) # we might need P=+1 here, too...
if options.rotation is not None:
rot = damask.Rotation.from_axis_angle(np.array(options.rotation),degrees=options.degrees,normalise=True,P=+1)
if options.matrix is not None:
rot = damask.Rotation.from_matrix(np.array(options.Matrix))
if options.eulers is not None:
rot = damask.Rotation.from_Eulers(np.array(options.eulers),degrees=options.degrees)
for name in filenames:
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
size = geom.get_size()
grid = geom.get_grid()
origin = geom.get_origin()
microstructure = geom.get_microstructure()
fill = np.nanmax(microstructure)+1 if options.fill is None else options.fill
dtype = float if np.isnan(fill) or int(fill) != fill or microstructure.dtype==np.float else int
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
# this seems to be ok, see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around z
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around x
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around z
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.rotate(rot,options.fill))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -40,22 +40,17 @@ parser.set_defaults(origin = (0.0,0.0,0.0),
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
sub = list(map(int,options.substitute))
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
substituted = geom.get_microstructure()
for old,new in zip(sub[0::2],sub[1::2]): substituted[geom.microstructure==old] = new # substitute microstructure indices
substituted += options.microstructure # constant shift
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
geom.substitute(sub[0::2],sub[1::2])
geom.set_origin(geom.origin+options.origin)
geom.set_microstructure(geom.microstructure+options.microstructure)
damask.util.croak(geom)
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -242,7 +242,7 @@ class Geom:
def get_grid(self):
"""Return the grid discretization."""
return np.array(self.microstructure.shape)
return np.asarray(self.microstructure.shape)
def get_homogenization(self):
@ -533,8 +533,8 @@ class Geom:
Parameters
----------
grid : iterable of int
new grid dimension
grid : numpy.ndarray of shape (3)
number of grid points in x,y,z direction.
"""
#self.add_comments('geom.py:scale v{}'.format(version)
@ -581,3 +581,90 @@ class Geom:
#self.add_comments('geom.py:renumber v{}'.format(version)
return self.update(renumbered)
def rotate(self,R,fill=None):
"""
Rotate microstructure (pad if required).
Parameters
----------
R : damask.Rotation
rotation to apply to the microstructure.
fill : int or float, optional
microstructure index to fill the corners. Defaults to microstructure.max() + 1.
"""
if fill is None: fill = np.nanmax(self.microstructure) + 1
dtype = float if np.isnan(fill) or int(fill) != fill or self.microstructure.dtype==np.float else int
Eulers = R.as_Eulers(degrees=True)
microstructure_in = self.get_microstructure()
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
# see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
for angle,axes in zip(Eulers[::-1], [(0,1),(1,2),(0,1)]):
microstructure_out = ndimage.rotate(microstructure_in,angle,axes,order=0,
prefilter=False,output=dtype,cval=fill)
if np.prod(microstructure_in.shape) == np.prod(microstructure_out.shape):
# avoid scipy interpolation errors for rotations close to multiples of 90°
microstructure_in = np.rot90(microstructure_in,k=np.rint(angle/90.).astype(int),axes=axes)
else:
microstructure_in = microstructure_out
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
#self.add_comments('geom.py:rotate v{}'.format(version)
return self.update(microstructure_in,origin=origin,rescale=True)
def canvas(self,grid=None,offset=None,fill=None):
"""
Crop or enlarge/pad microstructure.
Parameters
----------
grid : numpy.ndarray of shape (3)
number of grid points in x,y,z direction.
offset : numpy.ndarray of shape (3)
offset (measured in grid points) from old to new microstructue[0,0,0].
fill : int or float, optional
microstructure index to fill the corners. Defaults to microstructure.max() + 1.
"""
if fill is None: fill = np.nanmax(self.microstructure) + 1
if offset is None: offset = 0
dtype = float if int(fill) != fill or self.microstructure.dtype==np.float else int
canvas = np.full(self.grid if grid is None else grid,
fill if fill is not None else np.nanmax(self.microstructure)+1,dtype)
l = np.clip( offset, 0,np.minimum(self.grid +offset,grid)) # noqa
r = np.clip( offset+self.grid,0,np.minimum(self.grid*2+offset,grid))
L = np.clip(-offset, 0,np.minimum(grid -offset,self.grid))
R = np.clip(-offset+grid, 0,np.minimum(grid*2 -offset,self.grid))
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
#self.add_comments('geom.py:canvas v{}'.format(version)
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
def substitute(self,from_microstructure,to_microstructure):
"""
Substitude microstructure indices.
Parameters
----------
from_microstructure : iterable of ints
microstructure indices to be substituted.
to_microstructure : iterable of ints
new microstructure indices.
"""
substituted = self.get_microstructure()
for from_ms,to_ms in zip(from_microstructure,to_microstructure):
substituted[self.microstructure==from_ms] = to_ms
#self.add_comments('geom.py:substitute v{}'.format(version)
return self.update(substituted)

View File

@ -1,7 +1,9 @@
import multiprocessing
import re
import inspect
import glob
import os
import datetime
import xml.etree.ElementTree as ET
import xml.dom.minidom
from functools import partial
@ -88,6 +90,8 @@ class Result:
self.fname = os.path.abspath(fname)
self._allow_overwrite = False
def __repr__(self):
"""Show selected data."""
@ -123,7 +127,7 @@ class Result:
"""
# allow True/False and string arguments
if datasets is True:
if datasets is True:
datasets = ['*']
elif datasets is False:
datasets = []
@ -142,6 +146,7 @@ class Result:
choice = []
for c in iterator:
idx = np.searchsorted(self.times,c)
if idx >= len(self.times): continue
if np.isclose(c,self.times[idx]):
choice.append(self.increments[idx])
elif np.isclose(c,self.times[idx+1]):
@ -162,6 +167,16 @@ class Result:
self.selection[what] = diff_sorted
def enable_overwrite(self):
print(util.bcolors().WARNING,util.bcolors().BOLD,
'Warning: Enabled overwrite of existing datasets!',
util.bcolors().ENDC)
self._allow_overwrite = True
def disable_overwrite(self):
self._allow_overwrite = False
def incs_in_range(self,start,end):
selected = []
for i,inc in enumerate([int(i[3:]) for i in self.increments]):
@ -489,7 +504,7 @@ class Result:
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'result.py:add_abs v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_absolute(self,x):
@ -517,7 +532,7 @@ class Result:
'meta': {
'Unit': kwargs['unit'],
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
'Creator': 'result.py:add_calculation v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_calculation(self,label,formula,unit='n/a',description=None,vectorized=True):
@ -553,10 +568,9 @@ class Result:
'label': 'sigma',
'meta': {
'Unit': P['meta']['Unit'],
'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_Cauchy v{}'.format(version)
'Description': 'Cauchy stress calculated from {} ({}) and {} ({})'\
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_Cauchy(self,P='P',F='F'):
@ -582,7 +596,7 @@ class Result:
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_determinant v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_determinant(self,T):
@ -600,16 +614,13 @@ class Result:
@staticmethod
def _add_deviator(T):
if not T['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.deviatoric_part(T['data']),
'label': 's_{}'.format(T['label']),
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_deviator v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_deviator(self,T):
@ -626,17 +637,24 @@ class Result:
@staticmethod
def _add_eigenvalue(T_sym):
def _add_eigenvalue(T_sym,eigenvalue):
if eigenvalue == 'max':
label,p = 'Maximum',2
elif eigenvalue == 'mid':
label,p = 'Intermediate',1
elif eigenvalue == 'min':
label,p = 'Minimum',0
return {
'data': mechanics.eigenvalues(T_sym['data']),
'label': 'lambda({})'.format(T_sym['label']),
'data': mechanics.eigenvalues(T_sym['data'])[:,p],
'label': 'lambda_{}({})'.format(eigenvalue,T_sym['label']),
'meta' : {
'Unit': T_sym['meta']['Unit'],
'Description': 'Eigenvalues of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_eigenvalues v{}'.format(version)
'Description': '{} eigenvalue of {} ({})'.format(label,T_sym['label'],T_sym['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_eigenvalues(self,T_sym):
def add_eigenvalue(self,T_sym,eigenvalue='max'):
"""
Add eigenvalues of symmetric tensor.
@ -644,33 +662,46 @@ class Result:
----------
T_sym : str
Label of symmetric tensor dataset.
eigenvalue : str, optional
Eigenvalue. Select from max, mid, min. Defaults to max.
"""
self._add_generic_pointwise(self._add_eigenvalue,{'T_sym':T_sym})
self._add_generic_pointwise(self._add_eigenvalue,{'T_sym':T_sym},{'eigenvalue':eigenvalue})
@staticmethod
def _add_eigenvector(T_sym):
def _add_eigenvector(T_sym,eigenvalue):
if eigenvalue == 'max':
label,p = 'maximum',2
elif eigenvalue == 'mid':
label,p = 'intermediate',1
elif eigenvalue == 'min':
label,p = 'minimum',0
print('p',eigenvalue)
return {
'data': mechanics.eigenvectors(T_sym['data']),
'label': 'v({})'.format(T_sym['label']),
'data': mechanics.eigenvectors(T_sym['data'])[:,p],
'label': 'v_{}({})'.format(eigenvalue,T_sym['label']),
'meta' : {
'Unit': '1',
'Description': 'Eigenvectors of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_eigenvectors v{}'.format(version)
'Description': 'Eigenvector corresponding to {} eigenvalue of {} ({})'\
.format(label,T_sym['label'],T_sym['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_eigenvectors(self,T_sym):
def add_eigenvector(self,T_sym,eigenvalue='max'):
"""
Add eigenvectors of symmetric tensor.
Add eigenvector of symmetric tensor.
Parameters
----------
T_sym : str
Label of symmetric tensor dataset.
eigenvalue : str, optional
Eigenvalue to which the eigenvector corresponds. Select from
max, mid, min. Defaults to max.
"""
self._add_generic_pointwise(self._add_eigenvector,{'T_sym':T_sym})
self._add_generic_pointwise(self._add_eigenvector,{'T_sym':T_sym},{'eigenvalue':eigenvalue})
@staticmethod
@ -693,7 +724,7 @@ class Result:
'Unit': 'RGB (8bit)',
'Lattice': lattice,
'Description': 'Inverse Pole Figure (IPF) colors along sample direction [{} {} {}]'.format(*m),
'Creator': 'result.py:add_IPFcolor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_IPFcolor(self,q,l):
@ -719,7 +750,7 @@ class Result:
'meta': {
'Unit': T_sym['meta']['Unit'],
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_maximum_shear v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_maximum_shear(self,T_sym):
@ -746,7 +777,7 @@ class Result:
'meta': {
'Unit': T_sym['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_Mises v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_Mises(self,T_sym):
@ -782,7 +813,7 @@ class Result:
'meta': {
'Unit': x['meta']['Unit'],
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']),
'Creator': 'result.py:add_norm v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_norm(self,x,ord=None):
@ -807,10 +838,9 @@ class Result:
'label': 'S',
'meta': {
'Unit': P['meta']['Unit'],
'Description': '2. Kirchhoff stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_PK2 v{}'.format(version)
'Description': '2. Piola-Kirchhoff stress calculated from {} ({}) and {} ({})'\
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_PK2(self,P='P',F='F'):
@ -833,22 +863,20 @@ class Result:
pole = np.array(p)
unit_pole = pole/np.linalg.norm(pole)
m = util.scale_to_coprime(pole)
coords = np.empty((len(q['data']),2))
for i,qu in enumerate(q['data']):
o = Rotation(np.array([qu['w'],qu['x'],qu['y'],qu['z']]))
rotatedPole = o*unit_pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(unit_pole[2])) # stereographic projection
coords[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if polar else [x,y]
rot = Rotation(q['data'].view(np.double).reshape(-1,4))
rotatedPole = rot @ np.broadcast_to(unit_pole,rot.shape+(3,)) # rotate pole according to crystal orientation
xy = rotatedPole[:,0:2]/(1.+abs(unit_pole[2])) # stereographic projection
coords = xy if not polar else \
np.block([np.sqrt(xy[:,0:1]*xy[:,0:1]+xy[:,1:2]*xy[:,1:2]),np.arctan2(xy[:,1:2],xy[:,0:1])])
return {
'data': coords,
'label': 'p^{}_[{} {} {})'.format(u'' if polar else 'xy',*m),
'meta' : {
'Unit': '1',
'Unit': '1',
'Description': '{} coordinates of stereographic projection of pole (direction/plane) in crystal frame'\
.format('Polar' if polar else 'Cartesian'),
'Creator' : 'result.py:add_pole v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_pole(self,q,p,polar=False):
@ -870,15 +898,13 @@ class Result:
@staticmethod
def _add_rotational_part(F):
if not F['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.rotational_part(F['data']),
'label': 'R({})'.format(F['label']),
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_rotational_part v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_rotational_part(self,F):
@ -896,16 +922,13 @@ class Result:
@staticmethod
def _add_spherical(T):
if not T['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.spherical_part(T['data']),
'label': 'p_{}'.format(T['label']),
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_spherical v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_spherical(self,T):
@ -923,16 +946,13 @@ class Result:
@staticmethod
def _add_strain_tensor(F,t,m):
if not F['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.strain_tensor(F['data'],t,m),
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_strain_tensor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_strain_tensor(self,F='F',t='V',m=0.0):
@ -957,9 +977,6 @@ class Result:
@staticmethod
def _add_stretch_tensor(F,t):
if not F['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']),
'label': '{}({})'.format(t,F['label']),
@ -967,7 +984,7 @@ class Result:
'Unit': F['meta']['Unit'],
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
F['label'],F['meta']['Description']),
'Creator': 'result.py:add_stretch_tensor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_stretch_tensor(self,F='F',t='V'):
@ -1030,11 +1047,23 @@ class Result:
continue
lock.acquire()
with h5py.File(self.fname, 'a') as f:
try: # ToDo: Replace if exists?
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
try:
if self._allow_overwrite and result[0]+'/'+result[1]['label'] in f:
dataset = f[result[0]+'/'+result[1]['label']]
dataset[...] = result[1]['data']
dataset.attrs['Overwritten'] = 'Yes'.encode()
else:
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
now = datetime.datetime.now().astimezone()
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
for l,v in result[1]['meta'].items():
dataset.attrs[l]=v.encode()
except OSError as err:
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version)
dataset.attrs['Creator'] = creator.encode()
except (OSError,RuntimeError) as err:
print('Could not add dataset: {}.'.format(err))
lock.release()
@ -1042,7 +1071,7 @@ class Result:
pool.join()
def write_XMDF(self):
def write_XDMF(self):
"""
Write XDMF file to directly visualize data in DADF5 file.
@ -1050,7 +1079,7 @@ class Result:
Selection is not taken into account.
"""
if len(self.constituents) != 1 or not self.structured:
raise NotImplementedError
raise NotImplementedError('XDMF only available for grid results with 1 constituent.')
xdmf=ET.Element('Xdmf')
xdmf.attrib={'Version': '2.0',
@ -1218,14 +1247,6 @@ class Result:
###################################################################################################
# BEGIN DEPRECATED
def _time_to_inc(self,start,end):
selected = []
for i,time in enumerate(self.times):
if start <= time <= end:
selected.append(self.increments[i])
return selected
def set_by_time(self,start,end):
"""
Set active increments based on start and end time.
@ -1238,4 +1259,4 @@ class Result:
end time (included)
"""
self._manage_selection('set','increments',self._time_to_inc(start,end))
self._manage_selection('set','times',self.times_in_range(start,end))

View File

@ -12,7 +12,7 @@ _R1 = (3.*np.pi/4.)**(1./3.)
class Rotation:
u"""
Orientation stored with functionality for conversion to different representations.
The following conventions apply:
- coordinate frames are right-handed.
@ -161,10 +161,10 @@ class Rotation:
if self.shape == ():
q = np.broadcast_to(self.quaternion,shape+(4,))
else:
q = np.block([np.broadcast_to(self.quaternion[...,0:1],shape+(1,)),
np.broadcast_to(self.quaternion[...,1:2],shape+(1,)),
np.broadcast_to(self.quaternion[...,2:3],shape+(1,)),
np.broadcast_to(self.quaternion[...,3:4],shape+(1,))])
q = np.block([np.broadcast_to(self.quaternion[...,0:1],shape).reshape(shape+(1,)),
np.broadcast_to(self.quaternion[...,1:2],shape).reshape(shape+(1,)),
np.broadcast_to(self.quaternion[...,2:3],shape).reshape(shape+(1,)),
np.broadcast_to(self.quaternion[...,3:4],shape).reshape(shape+(1,))])
return self.__class__(q)
@ -538,7 +538,7 @@ class Rotation:
)
# reduce Euler angles to definition range
eu[np.abs(eu)<1.e-6] = 0.0
eu = np.where(eu<0, (eu+2.0*np.pi)%np.array([2.0*np.pi,np.pi,2.0*np.pi]),eu)
eu = np.where(eu<0, (eu+2.0*np.pi)%np.array([2.0*np.pi,np.pi,2.0*np.pi]),eu) # needed?
return eu
@staticmethod

View File

@ -0,0 +1,85 @@
4 header
grid a 8 b 10 c 8
size x 8e-06 y 1e-05 z 8e-06
origin x 0.0 y -2.5e-06 z -2e-06
homogenization 1
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 30 14 18 42 42 42
42 42 29 13 17 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 31 15 19 42 42 42
42 6 10 2 2 42 42 42
42 1 2 2 2 2 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 32 16 20 42 42 42
42 7 11 2 2 42 42 42
42 7 11 2 2 42 42 42
42 2 2 2 2 2 42 42
42 42 2 2 31 35 42 42
42 42 22 26 10 14 1 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 4 2 2 2 2 42 42
42 42 2 2 32 36 42 42
42 42 24 28 12 16 1 42
42 42 42 7 7 1 1 42
42 42 42 7 7 1 1 42
42 42 42 1 1 1 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 25 29 13 17 1 42
42 42 42 8 8 1 1 42
42 42 42 1 1 1 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 1 1 1 42 42
42 42 42 1 1 1 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42

View File

@ -0,0 +1,104 @@
4 header
grid a 11 b 11 c 9
size x 1.1e-05 y 1.1000000000000001e-05 z 9e-06
origin x -1.5e-06 y -3.0000000000000005e-06 z -2.4999999999999998e-06
homogenization 1
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 2 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 1 42 42 42 42 42 42
42 42 42 42 1 5 42 42 42 42 42
42 42 42 1 7 4 42 42 42 42 42
42 42 42 42 42 27 42 42 42 42 42
42 42 42 42 42 42 2 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 1 42 42 42 42 42 42
42 42 42 1 1 9 29 42 42 42 42
42 42 1 1 11 8 28 2 42 42 42
42 42 42 1 10 31 2 42 42 42 42
42 42 42 42 30 2 2 2 42 42 42
42 42 42 42 42 42 2 1 42 42 42
42 42 42 42 42 42 42 1 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 42 42 42 42 42 42 42
42 42 42 1 1 42 42 42 42 42 42
42 42 1 16 36 12 32 42 42 42 42
42 42 42 15 35 2 2 2 42 42 42
42 42 42 42 2 2 2 11 3 42 42
42 42 42 42 42 42 10 6 42 42 42
42 42 42 42 42 42 42 6 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 42 42 42 42 42 42 42
42 42 42 1 17 42 42 42 42 42 42
42 42 42 16 36 37 2 42 42 42 42
42 42 42 42 39 2 2 12 42 42 42
42 42 42 38 2 2 2 11 8 42 42
42 42 42 42 2 2 14 30 42 42 42
42 42 42 42 42 42 13 30 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 40 42 42 42 42 42 42
42 42 42 42 42 2 42 42 42 42 42
42 42 42 42 42 2 2 15 42 42 42
42 42 42 42 42 2 18 42 42 42 42
42 42 42 42 42 42 17 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 2 20 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42

View File

@ -1,68 +1,4 @@
68 header
geom_fromVoronoiTessellation 2.0.3-1073-g6f3cb071
<texture>
[Grain1]
(gauss) phi1 358.98 Phi 65.62 phi2 24.48
[Grain2]
(gauss) phi1 121.05 Phi 176.11 phi2 295.73
[Grain3]
(gauss) phi1 43.79 Phi 113.76 phi2 345.90
[Grain4]
(gauss) phi1 265.15 Phi 62.52 phi2 299.71
[Grain5]
(gauss) phi1 221.23 Phi 26.54 phi2 207.05
[Grain6]
(gauss) phi1 249.81 Phi 61.47 phi2 152.14
[Grain7]
(gauss) phi1 332.45 Phi 99.16 phi2 345.34
[Grain8]
(gauss) phi1 312.27 Phi 118.27 phi2 181.59
[Grain9]
(gauss) phi1 303.10 Phi 48.21 phi2 358.03
[Grain10]
(gauss) phi1 338.26 Phi 48.11 phi2 176.78
[Grain11]
(gauss) phi1 115.17 Phi 56.54 phi2 223.84
[Grain12]
(gauss) phi1 281.04 Phi 97.48 phi2 27.94
<microstructure>
[Grain1]
crystallite 1
(constituent) phase 1 texture 1 fraction 1.0
[Grain2]
crystallite 1
(constituent) phase 1 texture 2 fraction 1.0
[Grain3]
crystallite 1
(constituent) phase 1 texture 3 fraction 1.0
[Grain4]
crystallite 1
(constituent) phase 1 texture 4 fraction 1.0
[Grain5]
crystallite 1
(constituent) phase 1 texture 5 fraction 1.0
[Grain6]
crystallite 1
(constituent) phase 1 texture 6 fraction 1.0
[Grain7]
crystallite 1
(constituent) phase 1 texture 7 fraction 1.0
[Grain8]
crystallite 1
(constituent) phase 1 texture 8 fraction 1.0
[Grain9]
crystallite 1
(constituent) phase 1 texture 9 fraction 1.0
[Grain10]
crystallite 1
(constituent) phase 1 texture 10 fraction 1.0
[Grain11]
crystallite 1
(constituent) phase 1 texture 11 fraction 1.0
[Grain12]
crystallite 1
(constituent) phase 1 texture 12 fraction 1.0
<!skip>
4 header
grid a 6 b 7 c 8
size x 0.75 y 0.875 z 1.0
origin x 0.0 y 0.0 z 0.0

View File

@ -0,0 +1,61 @@
4 header
grid a 6 b 7 c 8
size x 0.75 y 0.875 z 1.0
origin x 0.0 y 0.0 z 0.0
homogenization 1
3 3 3 4 3 3
3 1 1 1 3 3
3 5 1 1 1 3
1 5 5 1 1 1
1 5 5 1 1 1
6 3 3 4 1 6
6 3 3 4 4 6
6 3 3 1 3 3
3 1 1 1 3 3
3 1 1 1 1 1
1 1 1 1 1 1
6 6 3 1 1 1
6 3 3 3 6 6
6 3 3 3 6 6
6 3 3 1 1 6
3 1 1 1 1 3
6 1 1 1 2 2
1 6 2 2 2 2
6 6 2 2 2 6
6 3 3 3 6 6
6 3 3 3 6 6
5 6 6 6 1 6
6 6 6 6 2 2
6 6 6 2 2 2
2 6 2 2 2 2
6 5 2 2 2 2
6 5 5 2 2 6
5 5 5 3 6 6
5 5 6 6 6 5
6 6 6 6 6 6
6 6 6 6 2 2
4 4 6 2 2 2
4 4 2 2 2 2
5 5 5 2 2 2
5 5 5 5 2 5
5 5 5 4 4 5
6 6 6 6 4 4
4 4 5 5 2 4
4 4 5 2 2 4
4 4 2 2 2 2
5 5 5 2 2 2
5 5 5 4 4 5
5 5 4 4 4 3
4 5 5 5 4 3
4 4 5 5 5 4
4 4 5 5 2 4
4 4 2 2 2 2
5 5 2 2 2 2
5 5 4 4 4 4
3 4 4 4 4 3
3 5 5 4 3 3
4 5 5 5 3 3
4 5 5 5 1 1
4 4 5 2 1 1
6 4 4 4 4 1
3 4 4 4 4 3

View File

@ -124,6 +124,3 @@ a_slip 2.25
h0_slipslip 75e6
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
atol_resistance 1
<crystallite>
[dummy]

View File

@ -5,12 +5,13 @@ import pytest
import numpy as np
from damask import Geom
from damask import Rotation
def geom_equal(a,b):
return np.all(a.get_microstructure() == b.get_microstructure()) and \
np.all(a.get_size() == b.get_size()) and \
np.all(a.get_grid() == b.get_grid())
np.all(a.get_grid() == b.get_grid()) and \
np.allclose(a.get_size(), b.get_size())
@pytest.fixture
def default():
@ -36,6 +37,7 @@ class TestGeom:
default.get_size(),
default.get_origin()
)
print(modified)
assert geom_equal(modified,default)
@ -57,6 +59,22 @@ class TestGeom:
new = Geom.from_file(tmpdir.join('default.geom'))
assert geom_equal(new,default)
def test_invalid_combination(self,default):
with pytest.raises(ValueError):
default.update(default.microstructure[1:,1:,1:],size=np.ones(3), rescale=True)
def test_invalid_size(self,default):
with pytest.raises(ValueError):
default.update(default.microstructure[1:,1:,1:],size=np.ones(2))
def test_invalid_microstructure(self,default):
with pytest.raises(ValueError):
default.update(default.microstructure[1])
def test_invalid_homogenization(self,default):
with pytest.raises(TypeError):
default.set_homogenization(homogenization=0)
@pytest.mark.parametrize('directions,reflect',[
(['x'], False),
(['x','y','z'],True),
@ -98,6 +116,52 @@ class TestGeom:
if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference))
def test_renumber(self,default):
modified = copy.deepcopy(default)
microstructure = modified.get_microstructure()
for m in np.unique(microstructure):
microstructure[microstructure==m] = microstructure.max() + np.random.randint(1,30)
modified.update(microstructure)
assert not geom_equal(modified,default)
modified.renumber()
assert geom_equal(modified,default)
def test_substitute(self,default):
modified = copy.deepcopy(default)
microstructure = modified.get_microstructure()
offset = np.random.randint(1,500)
microstructure+=offset
modified.update(microstructure)
assert not geom_equal(modified,default)
modified.substitute(np.arange(default.microstructure.max())+1+offset,
np.arange(default.microstructure.max())+1)
assert geom_equal(modified,default)
@pytest.mark.parametrize('axis_angle',[np.array([1,0,0,86.7]), np.array([0,1,0,90.4]), np.array([0,0,1,90]),
np.array([1,0,0,175]),np.array([0,-1,0,178]),np.array([0,0,1,180])])
def test_rotate360(self,default,axis_angle):
modified = copy.deepcopy(default)
for i in range(np.rint(360/axis_angle[3]).astype(int)):
modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True))
assert geom_equal(modified,default)
@pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0],
[0.0,32.0,240.0]])
def test_rotate(self,default,update,reference_dir,Eulers):
modified = copy.deepcopy(default)
modified.rotate(Rotation.from_Eulers(Eulers,degrees=True))
tag = 'Eulers={}-{}-{}'.format(*Eulers)
reference = os.path.join(reference_dir,'rotate_{}.geom'.format(tag))
if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference))
def test_canvas(self,default):
grid_add = np.random.randint(0,30,(3))
modified = copy.deepcopy(default)
modified.canvas(modified.grid + grid_add)
e = default.grid
assert np.all(modified.microstructure[:e[0],:e[1],:e[2]] == default.microstructure)
@pytest.mark.parametrize('periodic',[True,False])
def test_tessellation_approaches(self,periodic):
grid = np.random.randint(10,20,3)

View File

@ -0,0 +1,41 @@
import random
import pytest
import numpy as np
from damask import Symmetry
class TestSymmetry:
@pytest.mark.parametrize('invalid_symmetry',['fcc','bcc','hello'])
def test_invalid_symmetry(self,invalid_symmetry):
with pytest.raises(KeyError):
s = Symmetry(invalid_symmetry) # noqa
def test_equal(self):
symmetry = random.choice(Symmetry.lattices)
print(symmetry)
assert Symmetry(symmetry) == Symmetry(symmetry)
def test_not_equal(self):
symmetries = random.sample(Symmetry.lattices,k=2)
assert Symmetry(symmetries[0]) != Symmetry(symmetries[1])
@pytest.mark.parametrize('lattice',Symmetry.lattices)
def test_inFZ(self,lattice):
assert Symmetry(lattice).inFZ(np.zeros(3))
@pytest.mark.parametrize('lattice',Symmetry.lattices)
def test_inDisorientationSST(self,lattice):
assert Symmetry(lattice).inDisorientationSST(np.zeros(3))
@pytest.mark.parametrize('lattice',Symmetry.lattices)
@pytest.mark.parametrize('proper',[True,False])
def test_inSST(self,lattice,proper):
assert Symmetry(lattice).inSST(np.zeros(3),proper)
@pytest.mark.parametrize('function',['inFZ','inDisorientationSST'])
def test_invalid_argument(self,function):
s = Symmetry() # noqa
with pytest.raises(ValueError):
eval('s.{}(np.ones(4))'.format(function))

View File

@ -1,9 +1,13 @@
import time
import shutil
import os
from datetime import datetime
import pytest
import numpy as np
import h5py
import damask
from damask import Result
from damask import mechanics
@ -13,9 +17,16 @@ def default(tmp_path,reference_dir):
fname = '12grains6x7x8_tensionY.hdf5'
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
f = Result(os.path.join(tmp_path,fname))
f.set_by_time(20.0,20.0)
f.pick('times',20.0)
return f
@pytest.fixture
def single_phase(tmp_path,reference_dir):
"""Single phase Result file in temp location for modification."""
fname = '6grains6x7x8_single_phase_tensionY.hdf5'
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
return Result(os.path.join(tmp_path,fname))
@pytest.fixture
def reference_dir(reference_dir_base):
"""Directory containing reference results."""
@ -28,12 +39,56 @@ class TestResult:
print(default)
def test_time_increments(self,default):
shape = default.read_dataset(default.get_dataset_location('F'),0).shape
default.set_by_time(0.0,20.0)
for i in default.iterate('increments'):
assert shape == default.read_dataset(default.get_dataset_location('F'),0).shape
def test_pick_all(self,default):
default.pick('increments',True)
a = default.get_dataset_location('F')
default.pick('increments','*')
b = default.get_dataset_location('F')
default.pick('increments',default.incs_in_range(0,np.iinfo(int).max))
c = default.get_dataset_location('F')
default.pick('times',True)
d = default.get_dataset_location('F')
default.pick('times','*')
e = default.get_dataset_location('F')
default.pick('times',default.times_in_range(0.0,np.inf))
f = default.get_dataset_location('F')
assert a == b == c == d == e ==f
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
def test_pick_none(self,default,what):
default.pick(what,False)
a = default.get_dataset_location('F')
default.pick(what,[])
b = default.get_dataset_location('F')
assert a == b == []
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
def test_pick_more(self,default,what):
default.pick(what,False)
default.pick_more(what,'*')
a = default.get_dataset_location('F')
default.pick(what,True)
b = default.get_dataset_location('F')
assert a == b
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
def test_pick_less(self,default,what):
default.pick(what,True)
default.pick_less(what,'*')
a = default.get_dataset_location('F')
default.pick(what,False)
b = default.get_dataset_location('F')
assert a == b == []
def test_pick_invalid(self,default):
with pytest.raises(AttributeError):
default.pick('invalid',True)
def test_add_absolute(self,default):
default.add_absolute('Fe')
@ -77,24 +132,40 @@ class TestResult:
in_file = default.read_dataset(loc['s_P'],0)
assert np.allclose(in_memory,in_file)
def test_add_eigenvalues(self,default):
@pytest.mark.parametrize('eigenvalue,function',[('max',np.amax),('min',np.amin)])
def test_add_eigenvalue(self,default,eigenvalue,function):
default.add_Cauchy('P','F')
default.add_eigenvalues('sigma')
loc = {'sigma' :default.get_dataset_location('sigma'),
'lambda(sigma)':default.get_dataset_location('lambda(sigma)')}
in_memory = mechanics.eigenvalues(default.read_dataset(loc['sigma'],0))
in_file = default.read_dataset(loc['lambda(sigma)'],0)
default.add_eigenvalue('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'),
'lambda':default.get_dataset_location('lambda_{}(sigma)'.format(eigenvalue))}
in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
in_file = default.read_dataset(loc['lambda'],0)
assert np.allclose(in_memory,in_file)
def test_add_eigenvectors(self,default):
@pytest.mark.parametrize('eigenvalue,idx',[('max',2),('mid',1),('min',0)])
def test_add_eigenvector(self,default,eigenvalue,idx):
default.add_Cauchy('P','F')
default.add_eigenvectors('sigma')
default.add_eigenvector('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'),
'v(sigma)':default.get_dataset_location('v(sigma)')}
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))
'v(sigma)':default.get_dataset_location('v_{}(sigma)'.format(eigenvalue))}
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
in_file = default.read_dataset(loc['v(sigma)'],0)
assert np.allclose(in_memory,in_file)
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
def test_add_IPFcolor(self,default,d):
default.add_IPFcolor('orientation',d)
loc = {'orientation': default.get_dataset_location('orientation'),
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
qu = default.read_dataset(loc['orientation']).view(np.double).reshape(-1,4)
crystal_structure = default.get_crystal_structure()
in_memory = np.empty((qu.shape[0],3),np.uint8)
for i,q in enumerate(qu):
o = damask.Orientation(q,crystal_structure).reduced()
in_memory[i] = np.uint8(o.IPFcolor(np.array(d))*255)
in_file = default.read_dataset(loc['color'])
assert np.allclose(in_memory,in_file)
def test_add_maximum_shear(self,default):
default.add_Cauchy('P','F')
default.add_maximum_shear('sigma')
@ -143,6 +214,20 @@ class TestResult:
in_file = default.read_dataset(loc['S'],0)
assert np.allclose(in_memory,in_file)
@pytest.mark.parametrize('polar',[True,False])
def test_add_pole(self,default,polar):
pole = np.array([1.,0.,0.])
default.add_pole('orientation',pole,polar)
loc = {'orientation': default.get_dataset_location('orientation'),
'pole': default.get_dataset_location('p^{}_[1 0 0)'.format(u'' if polar else 'xy'))}
rot = damask.Rotation(default.read_dataset(loc['orientation']).view(np.double))
rotated_pole = rot * np.broadcast_to(pole,rot.shape+(3,))
xy = rotated_pole[:,0:2]/(1.+abs(pole[2]))
in_memory = xy if not polar else \
np.block([np.sqrt(xy[:,0:1]*xy[:,0:1]+xy[:,1:2]*xy[:,1:2]),np.arctan2(xy[:,1:2],xy[:,0:1])])
in_file = default.read_dataset(loc['pole'])
assert np.allclose(in_memory,in_file)
def test_add_rotational_part(self,default):
default.add_rotational_part('F')
loc = {'F': default.get_dataset_location('F'),
@ -185,3 +270,42 @@ class TestResult:
in_memory = mechanics.left_stretch(default.read_dataset(loc['F'],0))
in_file = default.read_dataset(loc['V(F)'],0)
assert np.allclose(in_memory,in_file)
def test_add_invalid(self,default):
with pytest.raises(TypeError):
default.add_calculation('#invalid#*2')
@pytest.mark.parametrize('overwrite',['off','on'])
def test_add_overwrite(self,default,overwrite):
default.pick('times',default.times_in_range(0,np.inf)[-1])
default.add_Cauchy()
loc = default.get_dataset_location('sigma')
print(loc)
with h5py.File(default.fname,'r') as f:
created_first = f[loc[0]].attrs['Created'].decode()
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
default.enable_overwrite()
else:
default.disable_overwrite()
time.sleep(2.)
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
with h5py.File(default.fname,'r') as f:
created_second = f[loc[0]].attrs['Created'].decode()
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
else:
assert created_first == created_second and not np.allclose(default.read_dataset(loc),311.)
@pytest.mark.parametrize('output',['F',[],['F','P']])
def test_vtk(self,tmp_path,default,output):
os.chdir(tmp_path)
default.to_vtk(output)
def test_XDMF(self,tmp_path,single_phase):
os.chdir(tmp_path)
single_phase.write_XDMF

View File

@ -866,12 +866,23 @@ class TestRotation:
with pytest.raises(ValueError):
function(invalid_shape)
@pytest.mark.parametrize('shape',[None,(3,),(4,2)])
def test_broadcast(self,shape):
rot = Rotation.from_random(shape)
new_shape = tuple(np.random.randint(8,32,(3))) if shape is None else \
rot.shape + (np.random.randint(8,32),)
rot_broadcast = rot.broadcast_to(tuple(new_shape))
for i in range(rot_broadcast.shape[-1]):
assert np.allclose(rot_broadcast.quaternion[...,i,:], rot.quaternion)
@pytest.mark.parametrize('function,invalid',[(Rotation.from_quaternion, np.array([-1,0,0,0])),
(Rotation.from_quaternion, np.array([1,1,1,0])),
(Rotation.from_Eulers, np.array([1,4,0])),
(Rotation.from_axis_angle, np.array([1,0,0,4])),
(Rotation.from_axis_angle, np.array([1,1,0,1])),
(Rotation.from_matrix, np.random.rand(3,3)),
(Rotation.from_matrix, np.array([[1,1,0],[1,2,0],[0,0,1]])),
(Rotation.from_Rodrigues, np.array([1,0,0,-1])),
(Rotation.from_Rodrigues, np.array([1,1,0,1])),
(Rotation.from_homochoric, np.array([2,2,2])),

View File

@ -63,25 +63,25 @@ class TestMechanics:
assert np.allclose(np.matmul(R,U),
np.matmul(V,R))
def test_strain_tensor_no_rotation(self):
@pytest.mark.parametrize('m',[0.0,np.random.random()*10.,np.random.random()*-10.])
def test_strain_tensor_no_rotation(self,m):
"""Ensure that left and right stretch give same results for no rotation."""
F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.rand(self.n,3,3)
m = np.random.random()*20.0-10.0
assert np.allclose(mechanics.strain_tensor(F,'U',m),
mechanics.strain_tensor(F,'V',m))
def test_strain_tensor_rotation_equivalence(self):
@pytest.mark.parametrize('m',[0.0,np.random.random()*2.5,np.random.random()*-2.5])
def test_strain_tensor_rotation_equivalence(self,m):
"""Ensure that left and right strain differ only by a rotation."""
F = np.broadcast_to(np.eye(3),[self.n,3,3]) + (np.random.rand(self.n,3,3)*0.5 - 0.25)
m = np.random.random()*5.0-2.5
assert np.allclose(np.linalg.det(mechanics.strain_tensor(F,'U',m)),
np.linalg.det(mechanics.strain_tensor(F,'V',m)))
def test_strain_tensor_rotation(self):
@pytest.mark.parametrize('m',[0.0,np.random.random(),np.random.random()*-1.])
@pytest.mark.parametrize('t',['V','U'])
def test_strain_tensor_rotation(self,m,t):
"""Ensure that pure rotation results in no strain."""
F = mechanics.rotational_part(np.random.rand(self.n,3,3))
t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0
assert np.allclose(mechanics.strain_tensor(F,t,m),
0.0)

View File

@ -9,8 +9,6 @@
!> by DAMASK. Interpretating the command line arguments to get load case, geometry file,
!> and working directory.
!--------------------------------------------------------------------------------------------------
#define GCC_MIN 6
#define INTEL_MIN 1700
#define PETSC_MAJOR 3
#define PETSC_MINOR_MIN 10
#define PETSC_MINOR_MAX 13
@ -50,29 +48,6 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine DAMASK_interface_init
#include <petsc/finclude/petscsys.h>
#if defined(__GFORTRAN__) && __GNUC__<GCC_MIN
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
=============== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ======================
================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ===================
===================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ================
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
#endif
#if defined(__INTEL_COMPILER) && __INTEL_COMPILER<INTEL_MIN
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =======================
==================== THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION ====================
======================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =================
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
#endif
#if PETSC_VERSION_MAJOR!=3 || PETSC_VERSION_MINOR<PETSC_MINOR_MIN || PETSC_VERSION_MINOR>PETSC_MINOR_MAX
===================================================================================================

View File

@ -65,7 +65,7 @@ subroutine results_init(restart)
character(len=pStringLen) :: commandLine
write(6,'(/,a)') ' <<<+- results init -+>>>'
write(6,'(/,a)') ' <<<+- results init -+>>>'; flush(6)
write(6,'(/,a)') ' Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):8391, 2017'
write(6,'(a)') ' https://doi.org/10.1007/s40192-017-0084-5'
@ -311,6 +311,8 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeScalarDataset_real
@ -340,6 +342,8 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeVectorDataset_real
@ -391,6 +395,8 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeTensorDataset_real
@ -421,6 +427,8 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeVectorDataset_int
@ -451,6 +459,8 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeTensorDataset_int
@ -481,6 +491,8 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l
call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeScalarDataset_rotation
@ -756,6 +768,21 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
end subroutine results_mapping_materialpoint
!--------------------------------------------------------------------------------------------------
!> @brief current date and time (including time zone information)
!--------------------------------------------------------------------------------------------------
character(len=24) function now()
character(len=5) :: zone
integer, dimension(8) :: values
call date_and_time(values=values,zone=zone)
write(now,'(i4.4,5(a,i2.2),a)') &
values(1),'-',values(2),'-',values(3),' ',values(5),':',values(6),':',values(7),zone
end function now
!!--------------------------------------------------------------------------------------------------
!!> @brief adds the backward mapping from spatial position and constituent ID to results
!!--------------------------------------------------------------------------------------------------

View File

@ -105,6 +105,10 @@ subroutine rotations_init
call quaternions_init
write(6,'(/,a)') ' <<<+- rotations init -+>>>'; flush(6)
write(6,'(/,a)') ' Rowenhorst et al., Modelling and Simulation in Materials Science and Engineering 23:083501, 2015'
write(6,'(a)') ' https://doi.org/10.1088/0965-0393/23/8/083501'
call selfTest
end subroutine rotations_init