Merge branch 'development' into negativeRange
This commit is contained in:
commit
a04968d43f
|
@ -1174,7 +1174,7 @@ end subroutine plastic_disloUCLA_dotState
|
||||||
function plastic_disloUCLA_postResults(Tstar_v,Temperature,ipc,ip,el)
|
function plastic_disloUCLA_postResults(Tstar_v,Temperature,ipc,ip,el)
|
||||||
use prec, only: &
|
use prec, only: &
|
||||||
tol_math_check, &
|
tol_math_check, &
|
||||||
dEq0
|
dEq
|
||||||
use math, only: &
|
use math, only: &
|
||||||
pi
|
pi
|
||||||
use material, only: &
|
use material, only: &
|
||||||
|
@ -1402,7 +1402,7 @@ function plastic_disloUCLA_postResults(Tstar_v,Temperature,ipc,ip,el)
|
||||||
c = c + ns
|
c = c + ns
|
||||||
elseif(plastic_disloUCLA_outputID(o,instance) == stress_exponent_ID) then
|
elseif(plastic_disloUCLA_outputID(o,instance) == stress_exponent_ID) then
|
||||||
do j = 1_pInt, ns
|
do j = 1_pInt, ns
|
||||||
if (dEq0(gdot_slip_pos(j)+gdot_slip_neg(j))) then
|
if (dEq(gdot_slip_pos(j)+gdot_slip_neg(j),0.0_pReal)) then
|
||||||
plastic_disloUCLA_postResults(c+j) = 0.0_pReal
|
plastic_disloUCLA_postResults(c+j) = 0.0_pReal
|
||||||
else
|
else
|
||||||
plastic_disloUCLA_postResults(c+j) = (tau_slip_pos(j)+tau_slip_neg(j))/&
|
plastic_disloUCLA_postResults(c+j) = (tau_slip_pos(j)+tau_slip_neg(j))/&
|
||||||
|
|
|
@ -84,7 +84,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def _quote(self,
|
def _quote(self,
|
||||||
what):
|
what):
|
||||||
"""quote empty or white space-containing output"""
|
"""Quote empty or white space-containing output"""
|
||||||
import re
|
import re
|
||||||
|
|
||||||
return '{quote}{content}{quote}'.format(
|
return '{quote}{content}{quote}'.format(
|
||||||
|
@ -107,7 +107,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def output_write(self,
|
def output_write(self,
|
||||||
what):
|
what):
|
||||||
"""aggregate a single row (string) or list of (possibly containing further lists of) rows into output"""
|
"""Aggregate a single row (string) or list of (possibly containing further lists of) rows into output"""
|
||||||
if not isinstance(what, (str, unicode)):
|
if not isinstance(what, (str, unicode)):
|
||||||
try:
|
try:
|
||||||
for item in what: self.output_write(item)
|
for item in what: self.output_write(item)
|
||||||
|
@ -147,7 +147,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_read(self):
|
def head_read(self):
|
||||||
"""
|
"""
|
||||||
get column labels
|
Get column labels
|
||||||
|
|
||||||
by either reading the first row or,
|
by either reading the first row or,
|
||||||
if keyword "head[*]" is present, the last line of the header
|
if keyword "head[*]" is present, the last line of the header
|
||||||
|
@ -200,7 +200,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_write(self,
|
def head_write(self,
|
||||||
header = True):
|
header = True):
|
||||||
"""write current header information (info + labels)"""
|
"""Write current header information (info + labels)"""
|
||||||
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
||||||
head.append(self.info)
|
head.append(self.info)
|
||||||
if self.__IO__['labeled']: head.append('\t'.join(map(self._quote,self.tags)))
|
if self.__IO__['labeled']: head.append('\t'.join(map(self._quote,self.tags)))
|
||||||
|
@ -209,7 +209,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_getGeom(self):
|
def head_getGeom(self):
|
||||||
"""interpret geom header"""
|
"""Interpret geom header"""
|
||||||
identifiers = {
|
identifiers = {
|
||||||
'grid': ['a','b','c'],
|
'grid': ['a','b','c'],
|
||||||
'size': ['x','y','z'],
|
'size': ['x','y','z'],
|
||||||
|
@ -249,7 +249,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_putGeom(self,info):
|
def head_putGeom(self,info):
|
||||||
"""translate geometry description to header"""
|
"""Translate geometry description to header"""
|
||||||
self.info_append([
|
self.info_append([
|
||||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||||
|
@ -262,7 +262,7 @@ class ASCIItable():
|
||||||
def labels_append(self,
|
def labels_append(self,
|
||||||
what,
|
what,
|
||||||
reset = False):
|
reset = False):
|
||||||
"""add item or list to existing set of labels (and switch on labeling)"""
|
"""Add item or list to existing set of labels (and switch on labeling)"""
|
||||||
if not isinstance(what, (str, unicode)):
|
if not isinstance(what, (str, unicode)):
|
||||||
try:
|
try:
|
||||||
for item in what: self.labels_append(item)
|
for item in what: self.labels_append(item)
|
||||||
|
@ -276,7 +276,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def labels_clear(self):
|
def labels_clear(self):
|
||||||
"""delete existing labels and switch to no labeling"""
|
"""Delete existing labels and switch to no labeling"""
|
||||||
self.tags = []
|
self.tags = []
|
||||||
self.__IO__['labeled'] = False
|
self.__IO__['labeled'] = False
|
||||||
|
|
||||||
|
@ -285,7 +285,7 @@ class ASCIItable():
|
||||||
tags = None,
|
tags = None,
|
||||||
raw = False):
|
raw = False):
|
||||||
"""
|
"""
|
||||||
tell abstract labels.
|
Tell abstract labels.
|
||||||
|
|
||||||
"x" for "1_x","2_x",... unless raw output is requested.
|
"x" for "1_x","2_x",... unless raw output is requested.
|
||||||
operates on object tags or given list.
|
operates on object tags or given list.
|
||||||
|
@ -322,7 +322,7 @@ class ASCIItable():
|
||||||
def label_index(self,
|
def label_index(self,
|
||||||
labels):
|
labels):
|
||||||
"""
|
"""
|
||||||
tell index of column label(s).
|
Tell index of column label(s).
|
||||||
|
|
||||||
return numpy array if asked for list of labels.
|
return numpy array if asked for list of labels.
|
||||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||||
|
@ -363,7 +363,7 @@ class ASCIItable():
|
||||||
def label_dimension(self,
|
def label_dimension(self,
|
||||||
labels):
|
labels):
|
||||||
"""
|
"""
|
||||||
tell dimension (length) of column label(s).
|
Tell dimension (length) of column label(s).
|
||||||
|
|
||||||
return numpy array if asked for list of labels.
|
return numpy array if asked for list of labels.
|
||||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||||
|
@ -417,7 +417,7 @@ class ASCIItable():
|
||||||
def label_indexrange(self,
|
def label_indexrange(self,
|
||||||
labels):
|
labels):
|
||||||
"""
|
"""
|
||||||
tell index range for given label(s).
|
Tell index range for given label(s).
|
||||||
|
|
||||||
return numpy array if asked for list of labels.
|
return numpy array if asked for list of labels.
|
||||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||||
|
@ -434,7 +434,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def info_append(self,
|
def info_append(self,
|
||||||
what):
|
what):
|
||||||
"""add item or list to existing set of infos"""
|
"""Add item or list to existing set of infos"""
|
||||||
if not isinstance(what, (str, unicode)):
|
if not isinstance(what, (str, unicode)):
|
||||||
try:
|
try:
|
||||||
for item in what: self.info_append(item)
|
for item in what: self.info_append(item)
|
||||||
|
@ -445,7 +445,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def info_clear(self):
|
def info_clear(self):
|
||||||
"""delete any info block"""
|
"""Delete any info block"""
|
||||||
self.info = []
|
self.info = []
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
@ -458,7 +458,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_skipLines(self,
|
def data_skipLines(self,
|
||||||
count):
|
count):
|
||||||
"""wind forward by count number of lines"""
|
"""Wind forward by count number of lines"""
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
alive = self.data_read()
|
alive = self.data_read()
|
||||||
|
|
||||||
|
@ -468,7 +468,7 @@ class ASCIItable():
|
||||||
def data_read(self,
|
def data_read(self,
|
||||||
advance = True,
|
advance = True,
|
||||||
respectLabels = True):
|
respectLabels = True):
|
||||||
"""read next line (possibly buffered) and parse it into data array"""
|
"""Read next line (possibly buffered) and parse it into data array"""
|
||||||
import shlex
|
import shlex
|
||||||
|
|
||||||
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
||||||
|
@ -490,7 +490,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_readArray(self,
|
def data_readArray(self,
|
||||||
labels = []):
|
labels = []):
|
||||||
"""read whole data of all (given) labels as numpy array"""
|
"""Read whole data of all (given) labels as numpy array"""
|
||||||
from collections import Iterable
|
from collections import Iterable
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -527,7 +527,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_write(self,
|
def data_write(self,
|
||||||
delimiter = '\t'):
|
delimiter = '\t'):
|
||||||
"""write current data array and report alive output back"""
|
"""Write current data array and report alive output back"""
|
||||||
if len(self.data) == 0: return True
|
if len(self.data) == 0: return True
|
||||||
|
|
||||||
if isinstance(self.data[0],list):
|
if isinstance(self.data[0],list):
|
||||||
|
@ -539,7 +539,7 @@ class ASCIItable():
|
||||||
def data_writeArray(self,
|
def data_writeArray(self,
|
||||||
fmt = None,
|
fmt = None,
|
||||||
delimiter = '\t'):
|
delimiter = '\t'):
|
||||||
"""write whole numpy array data"""
|
"""Write whole numpy array data"""
|
||||||
for row in self.data:
|
for row in self.data:
|
||||||
try:
|
try:
|
||||||
output = [fmt % value for value in row] if fmt else list(map(repr,row))
|
output = [fmt % value for value in row] if fmt else list(map(repr,row))
|
||||||
|
@ -562,7 +562,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_set(self,
|
def data_set(self,
|
||||||
what, where):
|
what, where):
|
||||||
"""update data entry in column "where". grows data array if needed."""
|
"""Update data entry in column "where". grows data array if needed."""
|
||||||
idx = -1
|
idx = -1
|
||||||
try:
|
try:
|
||||||
idx = self.label_index(where)
|
idx = self.label_index(where)
|
||||||
|
@ -589,7 +589,7 @@ class ASCIItable():
|
||||||
grid,
|
grid,
|
||||||
type = 'i',
|
type = 'i',
|
||||||
strict = False):
|
strict = False):
|
||||||
"""read microstructure data (from .geom format)"""
|
"""Read microstructure data (from .geom format)"""
|
||||||
def datatype(item):
|
def datatype(item):
|
||||||
return int(item) if type.lower() == 'i' else float(item)
|
return int(item) if type.lower() == 'i' else float(item)
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,12 @@ import math,numpy as np
|
||||||
### --- COLOR CLASS --------------------------------------------------
|
### --- COLOR CLASS --------------------------------------------------
|
||||||
|
|
||||||
class Color():
|
class Color():
|
||||||
"""Conversion of colors between different color-spaces.
|
"""
|
||||||
|
Conversion of colors between different color-spaces.
|
||||||
|
|
||||||
Colors should be given in the form
|
Colors should be given in the form Color('model',[vector]).
|
||||||
Color('model',[vector]).To convert and copy color from one space to other, use the methods
|
To convert or copy color from one space to other, use the methods
|
||||||
convertTo('model') and expressAs('model')spectively
|
convertTo('model') or expressAs('model'), respectively.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = [
|
__slots__ = [
|
||||||
|
@ -85,9 +86,9 @@ class Color():
|
||||||
|
|
||||||
def _HSL2RGB(self):
|
def _HSL2RGB(self):
|
||||||
"""
|
"""
|
||||||
convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue)
|
Convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue)
|
||||||
|
|
||||||
with S,L,H,R,G,B running from 0 to 1
|
with all values in the range of 0 to 1
|
||||||
from http://en.wikipedia.org/wiki/HSL_and_HSV
|
from http://en.wikipedia.org/wiki/HSL_and_HSV
|
||||||
"""
|
"""
|
||||||
if self.model != 'HSL': return
|
if self.model != 'HSL': return
|
||||||
|
@ -111,9 +112,9 @@ class Color():
|
||||||
|
|
||||||
def _RGB2HSL(self):
|
def _RGB2HSL(self):
|
||||||
"""
|
"""
|
||||||
convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance)
|
Convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance)
|
||||||
|
|
||||||
with S,L,H,R,G,B running from 0 to 1
|
with all values in the range of 0 to 1
|
||||||
from http://130.113.54.154/~monger/hsl-rgb.html
|
from http://130.113.54.154/~monger/hsl-rgb.html
|
||||||
"""
|
"""
|
||||||
if self.model != 'RGB': return
|
if self.model != 'RGB': return
|
||||||
|
@ -151,7 +152,7 @@ class Color():
|
||||||
|
|
||||||
def _RGB2XYZ(self):
|
def _RGB2XYZ(self):
|
||||||
"""
|
"""
|
||||||
convert R(ed) G(reen) B(lue) to CIE XYZ
|
Convert R(ed) G(reen) B(lue) to CIE XYZ
|
||||||
|
|
||||||
with all values in the range of 0 to 1
|
with all values in the range of 0 to 1
|
||||||
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||||
|
@ -180,12 +181,13 @@ class Color():
|
||||||
|
|
||||||
def _XYZ2RGB(self):
|
def _XYZ2RGB(self):
|
||||||
"""
|
"""
|
||||||
convert CIE XYZ to R(ed) G(reen) B(lue)
|
Convert CIE XYZ to R(ed) G(reen) B(lue)
|
||||||
|
|
||||||
with all values in the range of 0 to 1
|
with all values in the range of 0 to 1
|
||||||
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||||
"""
|
"""
|
||||||
if self.model != 'XYZ': return
|
if self.model != 'XYZ':
|
||||||
|
return
|
||||||
|
|
||||||
convert = np.array([[ 3.240479,-1.537150,-0.498535],
|
convert = np.array([[ 3.240479,-1.537150,-0.498535],
|
||||||
[-0.969256, 1.875992, 0.041556],
|
[-0.969256, 1.875992, 0.041556],
|
||||||
|
@ -211,7 +213,7 @@ class Color():
|
||||||
|
|
||||||
def _CIELAB2XYZ(self):
|
def _CIELAB2XYZ(self):
|
||||||
"""
|
"""
|
||||||
convert CIE Lab to CIE XYZ
|
Convert CIE Lab to CIE XYZ
|
||||||
|
|
||||||
with XYZ in the range of 0 to 1
|
with XYZ in the range of 0 to 1
|
||||||
from http://www.easyrgb.com/index.php?X=MATH&H=07#text7
|
from http://www.easyrgb.com/index.php?X=MATH&H=07#text7
|
||||||
|
@ -235,10 +237,11 @@ class Color():
|
||||||
|
|
||||||
def _XYZ2CIELAB(self):
|
def _XYZ2CIELAB(self):
|
||||||
"""
|
"""
|
||||||
convert CIE XYZ to CIE Lab
|
Convert CIE XYZ to CIE Lab
|
||||||
|
|
||||||
with XYZ in the range of 0 to 1
|
with XYZ in the range of 0 to 1
|
||||||
from http://en.wikipedia.org/wiki/Lab_color_space, http://www.cs.rit.edu/~ncs/color/t_convert.html
|
from http://en.wikipedia.org/wiki/Lab_color_space,
|
||||||
|
http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||||
"""
|
"""
|
||||||
if self.model != 'XYZ': return
|
if self.model != 'XYZ': return
|
||||||
|
|
||||||
|
@ -258,7 +261,7 @@ class Color():
|
||||||
|
|
||||||
def _CIELAB2MSH(self):
|
def _CIELAB2MSH(self):
|
||||||
"""
|
"""
|
||||||
convert CIE Lab to Msh colorspace
|
Convert CIE Lab to Msh colorspace
|
||||||
|
|
||||||
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||||
"""
|
"""
|
||||||
|
@ -278,9 +281,9 @@ class Color():
|
||||||
|
|
||||||
def _MSH2CIELAB(self):
|
def _MSH2CIELAB(self):
|
||||||
"""
|
"""
|
||||||
convert Msh colorspace to CIE Lab
|
Convert Msh colorspace to CIE Lab
|
||||||
|
|
||||||
s,h in radians
|
with s,h in radians
|
||||||
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||||
"""
|
"""
|
||||||
if self.model != 'MSH': return
|
if self.model != 'MSH': return
|
||||||
|
@ -296,7 +299,7 @@ class Color():
|
||||||
|
|
||||||
|
|
||||||
class Colormap():
|
class Colormap():
|
||||||
"""perceptually uniform diverging or sequential colormaps."""
|
"""Perceptually uniform diverging or sequential colormaps."""
|
||||||
|
|
||||||
__slots__ = [
|
__slots__ = [
|
||||||
'left',
|
'left',
|
||||||
|
@ -371,7 +374,7 @@ class Colormap():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""left and right value of colormap"""
|
"""Left and right value of colormap"""
|
||||||
return 'Left: %s Right: %s'%(self.left,self.right)
|
return 'Left: %s Right: %s'%(self.left,self.right)
|
||||||
|
|
||||||
|
|
||||||
|
@ -415,11 +418,7 @@ class Colormap():
|
||||||
return Color('MSH',Msh)
|
return Color('MSH',Msh)
|
||||||
|
|
||||||
def interpolate_linear(lo, hi, frac):
|
def interpolate_linear(lo, hi, frac):
|
||||||
"""
|
"""Linear interpolation between lo and hi color at given fraction; output in model of lo color."""
|
||||||
linearly interpolate color at given fraction between lower and
|
|
||||||
|
|
||||||
higher color in model of lower color
|
|
||||||
"""
|
|
||||||
interpolation = (1.0 - frac) * np.array(lo.color[:]) \
|
interpolation = (1.0 - frac) * np.array(lo.color[:]) \
|
||||||
+ frac * np.array(hi.expressAs(lo.model).color[:])
|
+ frac * np.array(hi.expressAs(lo.model).color[:])
|
||||||
|
|
||||||
|
@ -443,10 +442,10 @@ class Colormap():
|
||||||
"""
|
"""
|
||||||
[RGB] colormap for use in paraview or gmsh, or as raw string, or array.
|
[RGB] colormap for use in paraview or gmsh, or as raw string, or array.
|
||||||
|
|
||||||
arguments: name, format, steps, crop.
|
Arguments: name, format, steps, crop.
|
||||||
format is one of (paraview, gmsh, raw, list).
|
Format is one of (paraview, gmsh, raw, list).
|
||||||
crop selects a (sub)range in [-1.0,1.0].
|
Crop selects a (sub)range in [-1.0,1.0].
|
||||||
generates sequential map if one limiting color is either white or black,
|
Generates sequential map if one limiting color is either white or black,
|
||||||
diverging map otherwise.
|
diverging map otherwise.
|
||||||
"""
|
"""
|
||||||
format = format.lower() # consistent comparison basis
|
format = format.lower() # consistent comparison basis
|
||||||
|
@ -456,9 +455,8 @@ class Colormap():
|
||||||
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
|
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
|
||||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
|
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
|
||||||
for i,color in enumerate(colors[:-1])]\
|
for i,color in enumerate(colors[:-1])]\
|
||||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(i+1,colors[-1][0],colors[-1][1],colors[-1][2],)]\
|
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(len(colors),colors[-1][0],colors[-1][1],colors[-1][2],)]\
|
||||||
+ [' ]\n }\n]']
|
+ [' ]\n }\n]']
|
||||||
|
|
||||||
elif format == 'gmsh':
|
elif format == 'gmsh':
|
||||||
colormap = ['View.ColorTable = {'] \
|
colormap = ['View.ColorTable = {'] \
|
||||||
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \
|
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \
|
||||||
|
@ -481,4 +479,3 @@ class Colormap():
|
||||||
raise NameError('unknown color export format')
|
raise NameError('unknown color export format')
|
||||||
|
|
||||||
return '\n'.join(colormap) + '\n' if type(colormap[0]) is str else colormap
|
return '\n'.join(colormap) + '\n' if type(colormap[0]) is str else colormap
|
||||||
|
|
||||||
|
|
|
@ -104,7 +104,7 @@ class Material():
|
||||||
__slots__ = ['data']
|
__slots__ = ['data']
|
||||||
|
|
||||||
def __init__(self,verbose=True):
|
def __init__(self,verbose=True):
|
||||||
"""generates ordered list of parts"""
|
"""Generates ordered list of parts"""
|
||||||
self.parts = [
|
self.parts = [
|
||||||
'homogenization',
|
'homogenization',
|
||||||
'microstructure',
|
'microstructure',
|
||||||
|
@ -122,7 +122,7 @@ class Material():
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""returns current configuration to be used as material.config"""
|
"""Returns current configuration to be used as material.config"""
|
||||||
me = []
|
me = []
|
||||||
for part in self.parts:
|
for part in self.parts:
|
||||||
if self.verbose: print('doing '+part)
|
if self.verbose: print('doing '+part)
|
||||||
|
|
|
@ -24,7 +24,7 @@ except(NameError):
|
||||||
|
|
||||||
|
|
||||||
def lables_to_path(label, dsXMLPath=None):
|
def lables_to_path(label, dsXMLPath=None):
|
||||||
"""read the xml definition file and return the path."""
|
"""Read the XML definition file and return the path."""
|
||||||
if dsXMLPath is None:
|
if dsXMLPath is None:
|
||||||
# use the default storage layout in DS_HDF5.xml
|
# use the default storage layout in DS_HDF5.xml
|
||||||
if "h5table.pyc" in __file__:
|
if "h5table.pyc" in __file__:
|
||||||
|
@ -48,31 +48,32 @@ def lables_to_path(label, dsXMLPath=None):
|
||||||
|
|
||||||
|
|
||||||
class H5Table(object):
|
class H5Table(object):
|
||||||
"""light weight interface class for h5py
|
"""
|
||||||
|
Lightweight interface class for h5py
|
||||||
|
|
||||||
DESCRIPTION
|
DESCRIPTION
|
||||||
-----------
|
-----------
|
||||||
Interface/wrapper class for manipulating data in HDF5 with DAMASK
|
Interface/wrapper class for manipulating data in HDF5 with DAMASK
|
||||||
specialized data structure.
|
specialized data structure.
|
||||||
-->try to maintain a minimal API design.
|
--> try to maintain a minimal API design.
|
||||||
PARAMETERS
|
PARAMETERS
|
||||||
----------
|
----------
|
||||||
h5f_path: str
|
h5f_path: str
|
||||||
Absolute path the HDF5 file
|
Absolute path of the HDF5 file
|
||||||
METHOD
|
METHOD
|
||||||
------
|
------
|
||||||
del_entry() -- Force delete attributes/group/datasets (Dangerous)
|
del_entry() -- Force delete attributes/group/datasets (dangerous)
|
||||||
get_attr() -- Return attributes if possible
|
get_attr() -- Return attributes if possible
|
||||||
add_attr() -- Add NEW attributes to dataset/group (no force overwrite)
|
add_attr() -- Add NEW attributes to dataset/group (no force overwrite)
|
||||||
get_data() -- Retrieve data in numpy.ndarray
|
get_data() -- Retrieve data in numpy.ndarray
|
||||||
add_data() -- Add dataset to H5 file
|
add_data() -- Add dataset to H5 file
|
||||||
get_cmdlog() -- Return the command used to generate the data if possible.
|
get_cmdlog() -- Return the command used to generate the data if possible
|
||||||
NOTE
|
NOTE
|
||||||
----
|
----
|
||||||
1. As an interface class, it uses the lazy evaluation design
|
1. As an interface class, it uses the lazy evaluation design
|
||||||
that read the data only when its absolutely necessary.
|
that reads the data only when it is absolutely necessary.
|
||||||
2. The command line used to generate new feature is stored with
|
2. The command line used to generate each new feature is stored with
|
||||||
each dataset as dataset attribute.
|
each dataset as dataset attribute.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -85,7 +86,7 @@ class H5Table(object):
|
||||||
h5f['/'].attrs['description'] = msg
|
h5f['/'].attrs['description'] = msg
|
||||||
|
|
||||||
def del_entry(self, feature_name):
|
def del_entry(self, feature_name):
|
||||||
"""delete entry in HDF5 table"""
|
"""Delete entry in HDF5 table"""
|
||||||
dataType, h5f_path = lables_to_path(feature_name,
|
dataType, h5f_path = lables_to_path(feature_name,
|
||||||
dsXMLPath=self.dsXMLFile)
|
dsXMLPath=self.dsXMLFile)
|
||||||
with h5py.File(self.h5f_path, 'a') as h5f:
|
with h5py.File(self.h5f_path, 'a') as h5f:
|
||||||
|
@ -106,7 +107,7 @@ class H5Table(object):
|
||||||
h5f.flush()
|
h5f.flush()
|
||||||
|
|
||||||
def get_data(self, feature_name=None):
|
def get_data(self, feature_name=None):
|
||||||
"""extract dataset from HDF5 table and return it in a numpy array"""
|
"""Extract dataset from HDF5 table and return it in a numpy array"""
|
||||||
dataType, h5f_path = lables_to_path(feature_name,
|
dataType, h5f_path = lables_to_path(feature_name,
|
||||||
dsXMLPath=self.dsXMLFile)
|
dsXMLPath=self.dsXMLFile)
|
||||||
with h5py.File(self.h5f_path, 'a') as h5f:
|
with h5py.File(self.h5f_path, 'a') as h5f:
|
||||||
|
@ -116,7 +117,7 @@ class H5Table(object):
|
||||||
return rst_data
|
return rst_data
|
||||||
|
|
||||||
def add_data(self, feature_name, dataset, cmd_log=None):
|
def add_data(self, feature_name, dataset, cmd_log=None):
|
||||||
"""adding new feature into existing HDF5 file"""
|
"""Adding new feature into existing HDF5 file"""
|
||||||
dataType, h5f_path = lables_to_path(feature_name,
|
dataType, h5f_path = lables_to_path(feature_name,
|
||||||
dsXMLPath=self.dsXMLFile)
|
dsXMLPath=self.dsXMLFile)
|
||||||
with h5py.File(self.h5f_path, 'a') as h5f:
|
with h5py.File(self.h5f_path, 'a') as h5f:
|
||||||
|
@ -126,8 +127,7 @@ class H5Table(object):
|
||||||
# record its state as fresh in the cmd log.
|
# record its state as fresh in the cmd log.
|
||||||
try:
|
try:
|
||||||
del h5f[h5f_path]
|
del h5f[h5f_path]
|
||||||
print "***deleting old {} from {}".format(feature_name,
|
print("***deleting old {} from {}".format(feature_name,self.h5f_path))
|
||||||
self.h5f_path)
|
|
||||||
except:
|
except:
|
||||||
# if no cmd log, None will used
|
# if no cmd log, None will used
|
||||||
cmd_log = str(cmd_log) + " [FRESH]"
|
cmd_log = str(cmd_log) + " [FRESH]"
|
||||||
|
@ -138,7 +138,7 @@ class H5Table(object):
|
||||||
h5f.flush()
|
h5f.flush()
|
||||||
|
|
||||||
def get_cmdlog(self, feature_name):
|
def get_cmdlog(self, feature_name):
|
||||||
"""get cmd history used to generate the feature"""
|
"""Get cmd history used to generate the feature"""
|
||||||
dataType, h5f_path = lables_to_path(feature_name,
|
dataType, h5f_path = lables_to_path(feature_name,
|
||||||
dsXMLPath=self.dsXMLFile)
|
dsXMLPath=self.dsXMLFile)
|
||||||
with h5py.File(self.h5f_path, 'a') as h5f:
|
with h5py.File(self.h5f_path, 'a') as h5f:
|
||||||
|
|
|
@ -28,21 +28,21 @@ class Rodrigues:
|
||||||
# ******************************************************************************************
|
# ******************************************************************************************
|
||||||
class Quaternion:
|
class Quaternion:
|
||||||
"""
|
"""
|
||||||
Orientation represented as unit quaternion
|
Orientation represented as unit quaternion.
|
||||||
|
|
||||||
All methods and naming conventions based on http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions
|
All methods and naming conventions based on http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions.
|
||||||
|
|
||||||
w is the real part, (x, y, z) are the imaginary parts
|
w is the real part, (x, y, z) are the imaginary parts.
|
||||||
Representation of rotation is in ACTIVE form!
|
Representation of rotation is in ACTIVE form!
|
||||||
(derived directly or through angleAxis, Euler angles, or active matrix)
|
(Derived directly or through angleAxis, Euler angles, or active matrix)
|
||||||
vector "a" (defined in coordinate system "A") is actively rotated to new coordinates "b"
|
Vector "a" (defined in coordinate system "A") is actively rotated to new coordinates "b".
|
||||||
b = Q * a
|
b = Q * a
|
||||||
b = np.dot(Q.asMatrix(),a)
|
b = np.dot(Q.asMatrix(),a)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
quatArray = [1.0,0.0,0.0,0.0]):
|
quatArray = [1.0,0.0,0.0,0.0]):
|
||||||
"""initializes to identity if not given"""
|
"""Initializes to identity if not given"""
|
||||||
self.w, \
|
self.w, \
|
||||||
self.x, \
|
self.x, \
|
||||||
self.y, \
|
self.y, \
|
||||||
|
@ -50,23 +50,23 @@ class Quaternion:
|
||||||
self.homomorph()
|
self.homomorph()
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
"""components"""
|
"""Components"""
|
||||||
return iter([self.w,self.x,self.y,self.z])
|
return iter([self.w,self.x,self.y,self.z])
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
"""create copy"""
|
"""Create copy"""
|
||||||
Q = Quaternion([self.w,self.x,self.y,self.z])
|
Q = Quaternion([self.w,self.x,self.y,self.z])
|
||||||
return Q
|
return Q
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""readbable string"""
|
"""Readbable string"""
|
||||||
return 'Quaternion(real=%+.6f, imag=<%+.6f, %+.6f, %+.6f>)' % \
|
return 'Quaternion(real=%+.6f, imag=<%+.6f, %+.6f, %+.6f>)' % \
|
||||||
(self.w, self.x, self.y, self.z)
|
(self.w, self.x, self.y, self.z)
|
||||||
|
|
||||||
def __pow__(self, exponent):
|
def __pow__(self, exponent):
|
||||||
"""power"""
|
"""Power"""
|
||||||
omega = math.acos(self.w)
|
omega = math.acos(self.w)
|
||||||
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
||||||
Q = Quaternion()
|
Q = Quaternion()
|
||||||
|
@ -77,7 +77,7 @@ class Quaternion:
|
||||||
return Q
|
return Q
|
||||||
|
|
||||||
def __ipow__(self, exponent):
|
def __ipow__(self, exponent):
|
||||||
"""in place power"""
|
"""In-place power"""
|
||||||
omega = math.acos(self.w)
|
omega = math.acos(self.w)
|
||||||
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
||||||
self.w = np.cos(exponent*omega)
|
self.w = np.cos(exponent*omega)
|
||||||
|
@ -87,7 +87,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __mul__(self, other):
|
def __mul__(self, other):
|
||||||
"""multiplication"""
|
"""Multiplication"""
|
||||||
try: # quaternion
|
try: # quaternion
|
||||||
Aw = self.w
|
Aw = self.w
|
||||||
Ax = self.x
|
Ax = self.x
|
||||||
|
@ -135,7 +135,7 @@ class Quaternion:
|
||||||
return self.copy()
|
return self.copy()
|
||||||
|
|
||||||
def __imul__(self, other):
|
def __imul__(self, other):
|
||||||
"""in place multiplication"""
|
"""In-place multiplication"""
|
||||||
try: # Quaternion
|
try: # Quaternion
|
||||||
Ax = self.x
|
Ax = self.x
|
||||||
Ay = self.y
|
Ay = self.y
|
||||||
|
@ -153,7 +153,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __div__(self, other):
|
def __div__(self, other):
|
||||||
"""division"""
|
"""Division"""
|
||||||
if isinstance(other, (int,float)):
|
if isinstance(other, (int,float)):
|
||||||
w = self.w / other
|
w = self.w / other
|
||||||
x = self.x / other
|
x = self.x / other
|
||||||
|
@ -164,7 +164,7 @@ class Quaternion:
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
def __idiv__(self, other):
|
def __idiv__(self, other):
|
||||||
"""in place division"""
|
"""In-place division"""
|
||||||
if isinstance(other, (int,float)):
|
if isinstance(other, (int,float)):
|
||||||
self.w /= other
|
self.w /= other
|
||||||
self.x /= other
|
self.x /= other
|
||||||
|
@ -173,7 +173,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
"""addition"""
|
"""Addition"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
w = self.w + other.w
|
w = self.w + other.w
|
||||||
x = self.x + other.x
|
x = self.x + other.x
|
||||||
|
@ -184,7 +184,7 @@ class Quaternion:
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
def __iadd__(self, other):
|
def __iadd__(self, other):
|
||||||
"""in place division"""
|
"""In-place addition"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
self.w += other.w
|
self.w += other.w
|
||||||
self.x += other.x
|
self.x += other.x
|
||||||
|
@ -193,7 +193,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __sub__(self, other):
|
def __sub__(self, other):
|
||||||
"""subtraction"""
|
"""Subtraction"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
Q = self.copy()
|
Q = self.copy()
|
||||||
Q.w -= other.w
|
Q.w -= other.w
|
||||||
|
@ -205,7 +205,7 @@ class Quaternion:
|
||||||
return self.copy()
|
return self.copy()
|
||||||
|
|
||||||
def __isub__(self, other):
|
def __isub__(self, other):
|
||||||
"""in place subtraction"""
|
"""In-place subtraction"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
self.w -= other.w
|
self.w -= other.w
|
||||||
self.x -= other.x
|
self.x -= other.x
|
||||||
|
@ -214,7 +214,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __neg__(self):
|
def __neg__(self):
|
||||||
"""additive inverse"""
|
"""Additive inverse"""
|
||||||
self.w = -self.w
|
self.w = -self.w
|
||||||
self.x = -self.x
|
self.x = -self.x
|
||||||
self.y = -self.y
|
self.y = -self.y
|
||||||
|
@ -222,7 +222,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __abs__(self):
|
def __abs__(self):
|
||||||
"""norm"""
|
"""Norm"""
|
||||||
return math.sqrt(self.w ** 2 + \
|
return math.sqrt(self.w ** 2 + \
|
||||||
self.x ** 2 + \
|
self.x ** 2 + \
|
||||||
self.y ** 2 + \
|
self.y ** 2 + \
|
||||||
|
@ -231,7 +231,7 @@ class Quaternion:
|
||||||
magnitude = __abs__
|
magnitude = __abs__
|
||||||
|
|
||||||
def __eq__(self,other):
|
def __eq__(self,other):
|
||||||
"""equal at e-8 precision"""
|
"""Equal at e-8 precision"""
|
||||||
return (abs(self.w-other.w) < 1e-8 and \
|
return (abs(self.w-other.w) < 1e-8 and \
|
||||||
abs(self.x-other.x) < 1e-8 and \
|
abs(self.x-other.x) < 1e-8 and \
|
||||||
abs(self.y-other.y) < 1e-8 and \
|
abs(self.y-other.y) < 1e-8 and \
|
||||||
|
@ -243,12 +243,12 @@ class Quaternion:
|
||||||
abs(-self.z-other.z) < 1e-8)
|
abs(-self.z-other.z) < 1e-8)
|
||||||
|
|
||||||
def __ne__(self,other):
|
def __ne__(self,other):
|
||||||
"""not equal at e-8 precision"""
|
"""Not equal at e-8 precision"""
|
||||||
return not self.__eq__(self,other)
|
return not self.__eq__(self,other)
|
||||||
|
|
||||||
def __cmp__(self,other):
|
def __cmp__(self,other):
|
||||||
"""linear ordering"""
|
"""Linear ordering"""
|
||||||
return cmp(self.Rodrigues(),other.Rodrigues())
|
return (self.Rodrigues()>other.Rodrigues()) - (self.Rodrigues()<other.Rodrigues())
|
||||||
|
|
||||||
def magnitude_squared(self):
|
def magnitude_squared(self):
|
||||||
return self.w ** 2 + \
|
return self.w ** 2 + \
|
||||||
|
@ -339,12 +339,12 @@ class Quaternion:
|
||||||
degrees = False,
|
degrees = False,
|
||||||
standardRange = False):
|
standardRange = False):
|
||||||
"""
|
"""
|
||||||
Orientation as Bunge-Euler angles
|
Orientation as Bunge-Euler angles.
|
||||||
|
|
||||||
conversion of ACTIVE rotation to Euler angles taken from:
|
Conversion of ACTIVE rotation to Euler angles taken from:
|
||||||
Melcher, A.; Unser, A.; Reichhardt, M.; Nestler, B.; Poetschke, M.; Selzer, M.
|
Melcher, A.; Unser, A.; Reichhardt, M.; Nestler, B.; Poetschke, M.; Selzer, M.
|
||||||
Conversion of EBSD data by a quaternion based algorithm to be used for grain structure simulations
|
Conversion of EBSD data by a quaternion based algorithm to be used for grain structure simulations
|
||||||
Technische Mechanik 30 (2010) pp 401--413
|
Technische Mechanik 30 (2010) pp 401--413.
|
||||||
"""
|
"""
|
||||||
angles = [0.0,0.0,0.0]
|
angles = [0.0,0.0,0.0]
|
||||||
|
|
||||||
|
@ -508,10 +508,10 @@ class Quaternion:
|
||||||
@classmethod
|
@classmethod
|
||||||
def new_interpolate(cls, q1, q2, t):
|
def new_interpolate(cls, q1, q2, t):
|
||||||
"""
|
"""
|
||||||
interpolation
|
Interpolation
|
||||||
|
|
||||||
see http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/20070017872_2007014421.pdf
|
See http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/20070017872_2007014421.pdf
|
||||||
for (another?) way to interpolate quaternions
|
for (another?) way to interpolate quaternions.
|
||||||
"""
|
"""
|
||||||
assert isinstance(q1, Quaternion) and isinstance(q2, Quaternion)
|
assert isinstance(q1, Quaternion) and isinstance(q2, Quaternion)
|
||||||
Q = cls()
|
Q = cls()
|
||||||
|
@ -555,7 +555,7 @@ class Symmetry:
|
||||||
lattices = [None,'orthorhombic','tetragonal','hexagonal','cubic',]
|
lattices = [None,'orthorhombic','tetragonal','hexagonal','cubic',]
|
||||||
|
|
||||||
def __init__(self, symmetry = None):
|
def __init__(self, symmetry = None):
|
||||||
"""lattice with given symmetry, defaults to None"""
|
"""Lattice with given symmetry, defaults to None"""
|
||||||
if isinstance(symmetry, str) and symmetry.lower() in Symmetry.lattices:
|
if isinstance(symmetry, str) and symmetry.lower() in Symmetry.lattices:
|
||||||
self.lattice = symmetry.lower()
|
self.lattice = symmetry.lower()
|
||||||
else:
|
else:
|
||||||
|
@ -563,28 +563,30 @@ class Symmetry:
|
||||||
|
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
"""copy"""
|
"""Copy"""
|
||||||
return self.__class__(self.lattice)
|
return self.__class__(self.lattice)
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""readbable string"""
|
"""Readbable string"""
|
||||||
return '%s' % (self.lattice)
|
return '%s' % (self.lattice)
|
||||||
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
"""equal"""
|
"""Equal"""
|
||||||
return self.lattice == other.lattice
|
return self.lattice == other.lattice
|
||||||
|
|
||||||
def __neq__(self, other):
|
def __neq__(self, other):
|
||||||
"""not equal"""
|
"""Not equal"""
|
||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
def __cmp__(self,other):
|
def __cmp__(self,other):
|
||||||
"""linear ordering"""
|
"""Linear ordering"""
|
||||||
return cmp(Symmetry.lattices.index(self.lattice),Symmetry.lattices.index(other.lattice))
|
myOrder = Symmetry.lattices.index(self.lattice)
|
||||||
|
otherOrder = Symmetry.lattices.index(other.lattice)
|
||||||
|
return (myOrder > otherOrder) - (myOrder < otherOrder)
|
||||||
|
|
||||||
def symmetryQuats(self,who = []):
|
def symmetryQuats(self,who = []):
|
||||||
"""List of symmetry operations as quaternions."""
|
"""List of symmetry operations as quaternions."""
|
||||||
|
@ -742,39 +744,39 @@ class Symmetry:
|
||||||
|
|
||||||
if self.lattice == 'cubic':
|
if self.lattice == 'cubic':
|
||||||
basis = {'improper':np.array([ [-1. , 0. , 1. ],
|
basis = {'improper':np.array([ [-1. , 0. , 1. ],
|
||||||
[ np.sqrt(2.) , -np.sqrt(2.) , 0. ],
|
[ np.sqrt(2.) , -np.sqrt(2.) , 0. ],
|
||||||
[ 0. , np.sqrt(3.) , 0. ] ]),
|
[ 0. , np.sqrt(3.) , 0. ] ]),
|
||||||
'proper':np.array([ [ 0. , -1. , 1. ],
|
'proper':np.array([ [ 0. , -1. , 1. ],
|
||||||
[-np.sqrt(2.) , np.sqrt(2.) , 0. ],
|
[-np.sqrt(2.) , np.sqrt(2.) , 0. ],
|
||||||
[ np.sqrt(3.) , 0. , 0. ] ]),
|
[ np.sqrt(3. ) , 0. , 0. ] ]),
|
||||||
}
|
}
|
||||||
elif self.lattice == 'hexagonal':
|
elif self.lattice == 'hexagonal':
|
||||||
basis = {'improper':np.array([ [ 0. , 0. , 1. ],
|
basis = {'improper':np.array([ [ 0. , 0. , 1. ],
|
||||||
[ 1. , -np.sqrt(3.), 0. ],
|
[ 1. , -np.sqrt(3.) , 0. ],
|
||||||
[ 0. , 2. , 0. ] ]),
|
[ 0. , 2. , 0. ] ]),
|
||||||
'proper':np.array([ [ 0. , 0. , 1. ],
|
'proper':np.array([ [ 0. , 0. , 1. ],
|
||||||
[-1. , np.sqrt(3.) , 0. ],
|
[-1. , np.sqrt(3.) , 0. ],
|
||||||
[ np.sqrt(3) , -1. , 0. ] ]),
|
[ np.sqrt(3) , -1. , 0. ] ]),
|
||||||
}
|
}
|
||||||
elif self.lattice == 'tetragonal':
|
elif self.lattice == 'tetragonal':
|
||||||
basis = {'improper':np.array([ [ 0. , 0. , 1. ],
|
basis = {'improper':np.array([ [ 0. , 0. , 1. ],
|
||||||
[ 1. , -1. , 0. ],
|
[ 1. , -1. , 0. ],
|
||||||
[ 0. , np.sqrt(2.), 0. ] ]),
|
[ 0. , np.sqrt(2.) , 0. ] ]),
|
||||||
'proper':np.array([ [ 0. , 0. , 1. ],
|
'proper':np.array([ [ 0. , 0. , 1. ],
|
||||||
[-1. , 1. , 0. ],
|
[-1. , 1. , 0. ],
|
||||||
[ np.sqrt(2.) , 0. , 0. ] ]),
|
[ np.sqrt(2.) , 0. , 0. ] ]),
|
||||||
}
|
}
|
||||||
elif self.lattice == 'orthorhombic':
|
elif self.lattice == 'orthorhombic':
|
||||||
basis = {'improper':np.array([ [ 0., 0., 1.],
|
basis = {'improper':np.array([ [ 0., 0., 1.],
|
||||||
[ 1., 0., 0.],
|
[ 1., 0., 0.],
|
||||||
[ 0., 1., 0.] ]),
|
[ 0., 1., 0.] ]),
|
||||||
'proper':np.array([ [ 0., 0., 1.],
|
'proper':np.array([ [ 0., 0., 1.],
|
||||||
[-1., 0., 0.],
|
[-1., 0., 0.],
|
||||||
[ 0., 1., 0.] ]),
|
[ 0., 1., 0.] ]),
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
basis = {'improper':np.zeros((3,3),dtype=float),
|
basis = {'improper': np.zeros((3,3),dtype=float),
|
||||||
'proper':np.zeros((3,3),dtype=float),
|
'proper': np.zeros((3,3),dtype=float),
|
||||||
}
|
}
|
||||||
|
|
||||||
if np.all(basis == 0.0):
|
if np.all(basis == 0.0):
|
||||||
|
@ -845,14 +847,14 @@ class Orientation:
|
||||||
self.symmetry = Symmetry(symmetry)
|
self.symmetry = Symmetry(symmetry)
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
"""copy"""
|
"""Copy"""
|
||||||
return self.__class__(quaternion=self.quaternion,symmetry=self.symmetry.lattice)
|
return self.__class__(quaternion=self.quaternion,symmetry=self.symmetry.lattice)
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""value as all implemented representations"""
|
"""Value as all implemented representations"""
|
||||||
return 'Symmetry: %s\n' % (self.symmetry) + \
|
return 'Symmetry: %s\n' % (self.symmetry) + \
|
||||||
'Quaternion: %s\n' % (self.quaternion) + \
|
'Quaternion: %s\n' % (self.quaternion) + \
|
||||||
'Matrix:\n%s\n' % ( '\n'.join(['\t'.join(map(str,self.asMatrix()[i,:])) for i in range(3)]) ) + \
|
'Matrix:\n%s\n' % ( '\n'.join(['\t'.join(map(str,self.asMatrix()[i,:])) for i in range(3)]) ) + \
|
||||||
|
@ -937,7 +939,7 @@ class Orientation:
|
||||||
axis,
|
axis,
|
||||||
proper = False,
|
proper = False,
|
||||||
SST = True):
|
SST = True):
|
||||||
"""axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)"""
|
"""Axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)"""
|
||||||
if SST: # pole requested to be within SST
|
if SST: # pole requested to be within SST
|
||||||
for i,q in enumerate(self.symmetry.equivalentQuaternions(self.quaternion)): # test all symmetric equivalent quaternions
|
for i,q in enumerate(self.symmetry.equivalentQuaternions(self.quaternion)): # test all symmetric equivalent quaternions
|
||||||
pole = q.conjugated()*axis # align crystal direction to axis
|
pole = q.conjugated()*axis # align crystal direction to axis
|
||||||
|
@ -963,7 +965,7 @@ class Orientation:
|
||||||
orientations,
|
orientations,
|
||||||
multiplicity = []):
|
multiplicity = []):
|
||||||
"""
|
"""
|
||||||
average orientation
|
Average orientation
|
||||||
|
|
||||||
ref: F. Landis Markley, Yang Cheng, John Lucas Crassidis, and Yaakov Oshman.
|
ref: F. Landis Markley, Yang Cheng, John Lucas Crassidis, and Yaakov Oshman.
|
||||||
Averaging Quaternions,
|
Averaging Quaternions,
|
||||||
|
@ -996,7 +998,7 @@ class Orientation:
|
||||||
direction,
|
direction,
|
||||||
targetSymmetry = None):
|
targetSymmetry = None):
|
||||||
"""
|
"""
|
||||||
orientation relationship
|
Orientation relationship
|
||||||
|
|
||||||
positive number: fcc --> bcc
|
positive number: fcc --> bcc
|
||||||
negative number: bcc --> fcc
|
negative number: bcc --> fcc
|
||||||
|
|
|
@ -13,7 +13,6 @@ import numpy as np
|
||||||
import py_post # MSC closed source module to access marc result files
|
import py_post # MSC closed source module to access marc result files
|
||||||
|
|
||||||
class MARC_POST():
|
class MARC_POST():
|
||||||
import re
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.projdir='./'
|
self.projdir='./'
|
||||||
|
|
||||||
|
@ -383,7 +382,6 @@ class VTK_WRITER():
|
||||||
to plot semi-transparent iso-surfaces.
|
to plot semi-transparent iso-surfaces.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.p=MARC_POST() # self.p
|
self.p=MARC_POST() # self.p
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
|
||||||
import os,sys,shutil
|
import os,sys,shutil
|
||||||
import logging,logging.config
|
import logging,logging.config
|
||||||
import damask
|
import damask
|
||||||
|
@ -191,7 +190,7 @@ class Test():
|
||||||
def copy(self, mapA, mapB,
|
def copy(self, mapA, mapB,
|
||||||
A = [], B = []):
|
A = [], B = []):
|
||||||
"""
|
"""
|
||||||
copy list of files from (mapped) source to target.
|
Copy list of files from (mapped) source to target.
|
||||||
|
|
||||||
mapA/B is one of self.fileInX.
|
mapA/B is one of self.fileInX.
|
||||||
"""
|
"""
|
||||||
|
@ -382,7 +381,7 @@ class Test():
|
||||||
line0 += 1
|
line0 += 1
|
||||||
|
|
||||||
for i in range(dataLength):
|
for i in range(dataLength):
|
||||||
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
if not perLine: norm[i] = [np.max(norm[i]) for j in range(line0-len(skipLines))]
|
||||||
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
|
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
|
||||||
if any(norm[i]) == 0.0 or absTol[i]:
|
if any(norm[i]) == 0.0 or absTol[i]:
|
||||||
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
||||||
|
@ -425,7 +424,7 @@ class Test():
|
||||||
stdTol = 1.0e-6,
|
stdTol = 1.0e-6,
|
||||||
preFilter = 1.0e-9):
|
preFilter = 1.0e-9):
|
||||||
"""
|
"""
|
||||||
calculate statistics of tables
|
Calculate statistics of tables
|
||||||
|
|
||||||
threshold can be used to ignore small values (a negative number disables this feature)
|
threshold can be used to ignore small values (a negative number disables this feature)
|
||||||
"""
|
"""
|
||||||
|
@ -478,7 +477,7 @@ class Test():
|
||||||
rtol = 1e-5,
|
rtol = 1e-5,
|
||||||
atol = 1e-8,
|
atol = 1e-8,
|
||||||
debug = False):
|
debug = False):
|
||||||
"""compare multiple tables with np.allclose"""
|
"""Compare multiple tables with np.allclose"""
|
||||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||||
files = [str(files)]
|
files = [str(files)]
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
# damask utility functions
|
|
||||||
import sys,time,random,threading,os,subprocess,shlex
|
import sys,time,random,threading,os,subprocess,shlex
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import Option
|
from optparse import Option
|
||||||
|
@ -36,7 +35,7 @@ class bcolors:
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def srepr(arg,glue = '\n'):
|
def srepr(arg,glue = '\n'):
|
||||||
"""joins arguments as individual lines"""
|
"""Joins arguments as individual lines"""
|
||||||
if (not hasattr(arg, "strip") and
|
if (not hasattr(arg, "strip") and
|
||||||
hasattr(arg, "__getitem__") or
|
hasattr(arg, "__getitem__") or
|
||||||
hasattr(arg, "__iter__")):
|
hasattr(arg, "__iter__")):
|
||||||
|
@ -45,21 +44,21 @@ def srepr(arg,glue = '\n'):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def croak(what, newline = True):
|
def croak(what, newline = True):
|
||||||
"""writes formated to stderr"""
|
"""Writes formated to stderr"""
|
||||||
sys.stderr.write(srepr(what,glue = '\n') + ('\n' if newline else ''))
|
sys.stderr.write(srepr(what,glue = '\n') + ('\n' if newline else ''))
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def report(who = None,
|
def report(who = None,
|
||||||
what = None):
|
what = None):
|
||||||
"""reports script and file name"""
|
"""Reports script and file name"""
|
||||||
croak( (emph(who)+': ' if who else '') + (what if what else '') )
|
croak( (emph(who)+': ' if who else '') + (what if what else '') )
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def report_geom(info,
|
def report_geom(info,
|
||||||
what = ['grid','size','origin','homogenization','microstructures']):
|
what = ['grid','size','origin','homogenization','microstructures']):
|
||||||
"""reports (selected) geometry information"""
|
"""Reports (selected) geometry information"""
|
||||||
output = {
|
output = {
|
||||||
'grid' : 'grid a b c: {}'.format(' x '.join(map(str,info['grid' ]))),
|
'grid' : 'grid a b c: {}'.format(' x '.join(map(str,info['grid' ]))),
|
||||||
'size' : 'size x y z: {}'.format(' x '.join(map(str,info['size' ]))),
|
'size' : 'size x y z: {}'.format(' x '.join(map(str,info['size' ]))),
|
||||||
|
@ -71,24 +70,24 @@ def report_geom(info,
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def emph(what):
|
def emph(what):
|
||||||
"""boldens string"""
|
"""Boldens string"""
|
||||||
return bcolors.BOLD+srepr(what)+bcolors.ENDC
|
return bcolors.BOLD+srepr(what)+bcolors.ENDC
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def deemph(what):
|
def deemph(what):
|
||||||
"""dims string"""
|
"""Dims string"""
|
||||||
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def delete(what):
|
def delete(what):
|
||||||
"""dims string"""
|
"""Dims string"""
|
||||||
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def execute(cmd,
|
def execute(cmd,
|
||||||
streamIn = None,
|
streamIn = None,
|
||||||
wd = './'):
|
wd = './'):
|
||||||
"""executes a command in given directory and returns stdout and stderr for optional stdin"""
|
"""Executes a command in given directory and returns stdout and stderr for optional stdin"""
|
||||||
initialPath = os.getcwd()
|
initialPath = os.getcwd()
|
||||||
os.chdir(wd)
|
os.chdir(wd)
|
||||||
process = subprocess.Popen(shlex.split(cmd),
|
process = subprocess.Popen(shlex.split(cmd),
|
||||||
|
@ -127,7 +126,7 @@ def gridIndex(location,res):
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class extendableOption(Option):
|
class extendableOption(Option):
|
||||||
"""
|
"""
|
||||||
used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
Used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
||||||
|
|
||||||
taken from online tutorial http://docs.python.org/library/optparse.html
|
taken from online tutorial http://docs.python.org/library/optparse.html
|
||||||
"""
|
"""
|
||||||
|
@ -146,7 +145,7 @@ class extendableOption(Option):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class backgroundMessage(threading.Thread):
|
class backgroundMessage(threading.Thread):
|
||||||
"""reporting with animation to indicate progress"""
|
"""Reporting with animation to indicate progress"""
|
||||||
|
|
||||||
choices = {'bounce': ['_', 'o', 'O', '°', '‾', '‾', '°', 'O', 'o', '_'],
|
choices = {'bounce': ['_', 'o', 'O', '°', '‾', '‾', '°', 'O', 'o', '_'],
|
||||||
'spin': ['◜', '◝', '◞', '◟'],
|
'spin': ['◜', '◝', '◞', '◟'],
|
||||||
|
@ -168,7 +167,7 @@ class backgroundMessage(threading.Thread):
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self,symbol = None,wait = 0.1):
|
def __init__(self,symbol = None,wait = 0.1):
|
||||||
"""sets animation symbol"""
|
"""Sets animation symbol"""
|
||||||
super(backgroundMessage, self).__init__()
|
super(backgroundMessage, self).__init__()
|
||||||
self._stop = threading.Event()
|
self._stop = threading.Event()
|
||||||
self.message = ''
|
self.message = ''
|
||||||
|
@ -179,7 +178,7 @@ class backgroundMessage(threading.Thread):
|
||||||
self.waittime = wait
|
self.waittime = wait
|
||||||
|
|
||||||
def __quit__(self):
|
def __quit__(self):
|
||||||
"""cleans output"""
|
"""Cleans output"""
|
||||||
length = len(self.symbols[self.counter] + self.gap + self.message)
|
length = len(self.symbols[self.counter] + self.gap + self.message)
|
||||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length)
|
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length)
|
||||||
sys.stderr.write('')
|
sys.stderr.write('')
|
||||||
|
@ -282,7 +281,7 @@ def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
|
||||||
return grad
|
return grad
|
||||||
|
|
||||||
def _int2extFunc(bounds):
|
def _int2extFunc(bounds):
|
||||||
"""transform internal parameters into external parameters."""
|
"""Transform internal parameters into external parameters."""
|
||||||
local = [_int2extLocal(b) for b in bounds]
|
local = [_int2extLocal(b) for b in bounds]
|
||||||
def _transform_i2e(p_int):
|
def _transform_i2e(p_int):
|
||||||
p_ext = np.empty_like(p_int)
|
p_ext = np.empty_like(p_int)
|
||||||
|
@ -291,7 +290,7 @@ def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
|
||||||
return _transform_i2e
|
return _transform_i2e
|
||||||
|
|
||||||
def _ext2intFunc(bounds):
|
def _ext2intFunc(bounds):
|
||||||
"""transform external parameters into internal parameters."""
|
"""Transform external parameters into internal parameters."""
|
||||||
local = [_ext2intLocal(b) for b in bounds]
|
local = [_ext2intLocal(b) for b in bounds]
|
||||||
def _transform_e2i(p_ext):
|
def _transform_e2i(p_ext):
|
||||||
p_int = np.empty_like(p_ext)
|
p_int = np.empty_like(p_ext)
|
||||||
|
@ -300,7 +299,7 @@ def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
|
||||||
return _transform_e2i
|
return _transform_e2i
|
||||||
|
|
||||||
def _int2extLocal(bound):
|
def _int2extLocal(bound):
|
||||||
"""transform a single internal parameter to an external parameter."""
|
"""Transform a single internal parameter to an external parameter."""
|
||||||
lower, upper = bound
|
lower, upper = bound
|
||||||
if lower is None and upper is None: # no constraints
|
if lower is None and upper is None: # no constraints
|
||||||
return lambda x: x
|
return lambda x: x
|
||||||
|
@ -312,7 +311,7 @@ def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
|
||||||
return lambda x: lower + ((upper - lower)/2.0)*(np.sin(x) + 1.0)
|
return lambda x: lower + ((upper - lower)/2.0)*(np.sin(x) + 1.0)
|
||||||
|
|
||||||
def _ext2intLocal(bound):
|
def _ext2intLocal(bound):
|
||||||
"""transform a single external parameter to an internal parameter."""
|
"""Transform a single external parameter to an internal parameter."""
|
||||||
lower, upper = bound
|
lower, upper = bound
|
||||||
if lower is None and upper is None: # no constraints
|
if lower is None and upper is None: # no constraints
|
||||||
return lambda x: x
|
return lambda x: x
|
||||||
|
@ -469,4 +468,4 @@ def curve_fit_bound(f, xdata, ydata, p0=None, sigma=None, bounds=None, **kw):
|
||||||
else:
|
else:
|
||||||
pcov = np.inf
|
pcov = np.inf
|
||||||
|
|
||||||
return (popt, pcov, infodict, errmsg, ier) if return_full else (popt, pcov)
|
return (popt, pcov, infodict, errmsg, ier) if return_full else (popt, pcov)
|
||||||
|
|
|
@ -11,15 +11,10 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
def runFit(exponent, eqStress, dimension, criterion):
|
def runFit(exponent, eqStress, dimension, criterion):
|
||||||
global s, threads, myFit, myLoad
|
global threads, myFit, myLoad
|
||||||
global fitResults, fitErrors, fitResidual, stressAll, strainAll
|
global fitResidual
|
||||||
global N_simulations, Guess, dDim
|
global Guess, dDim
|
||||||
|
|
||||||
fitResults = []
|
|
||||||
fitErrors = []
|
|
||||||
fitResidual = []
|
|
||||||
Guess = []
|
|
||||||
threads=[]
|
|
||||||
dDim = dimension - 3
|
dDim = dimension - 3
|
||||||
nParas = len(fitCriteria[criterion]['bound'][dDim])
|
nParas = len(fitCriteria[criterion]['bound'][dDim])
|
||||||
nExpo = fitCriteria[criterion]['nExpo']
|
nExpo = fitCriteria[criterion]['nExpo']
|
||||||
|
@ -28,7 +23,7 @@ def runFit(exponent, eqStress, dimension, criterion):
|
||||||
nParas = nParas-nExpo
|
nParas = nParas-nExpo
|
||||||
fitCriteria[criterion]['bound'][dDim] = fitCriteria[criterion]['bound'][dDim][:nParas]
|
fitCriteria[criterion]['bound'][dDim] = fitCriteria[criterion]['bound'][dDim][:nParas]
|
||||||
|
|
||||||
for i in xrange(nParas):
|
for i in range(nParas):
|
||||||
temp = fitCriteria[criterion]['bound'][dDim][i]
|
temp = fitCriteria[criterion]['bound'][dDim][i]
|
||||||
if fitCriteria[criterion]['bound'][dDim][i] == (None,None):
|
if fitCriteria[criterion]['bound'][dDim][i] == (None,None):
|
||||||
Guess.append(1.0)
|
Guess.append(1.0)
|
||||||
|
@ -37,13 +32,9 @@ def runFit(exponent, eqStress, dimension, criterion):
|
||||||
if g == 0: g = temp[1]*0.5
|
if g == 0: g = temp[1]*0.5
|
||||||
Guess.append(g)
|
Guess.append(g)
|
||||||
|
|
||||||
N_simulations=0
|
|
||||||
s=threading.Semaphore(1)
|
|
||||||
myLoad = Loadcase(options.load[0],options.load[1],options.load[2],
|
myLoad = Loadcase(options.load[0],options.load[1],options.load[2],
|
||||||
nSet = 10, dimension = dimension, vegter = options.criterion=='vegter')
|
nSet = 10, dimension = dimension, vegter = options.criterion=='vegter')
|
||||||
|
|
||||||
stressAll= [np.zeros(0,'d').reshape(0,0) for i in xrange(int(options.yieldValue[2]))]
|
|
||||||
strainAll= [np.zeros(0,'d').reshape(0,0) for i in xrange(int(options.yieldValue[2]))]
|
|
||||||
|
|
||||||
myFit = Criterion(exponent,eqStress, dimension, criterion)
|
myFit = Criterion(exponent,eqStress, dimension, criterion)
|
||||||
for t in range(options.threads):
|
for t in range(options.threads):
|
||||||
|
@ -57,12 +48,12 @@ def runFit(exponent, eqStress, dimension, criterion):
|
||||||
|
|
||||||
def principalStresses(sigmas):
|
def principalStresses(sigmas):
|
||||||
"""
|
"""
|
||||||
computes principal stresses (i.e. eigenvalues) for a set of Cauchy stresses.
|
Computes principal stresses (i.e. eigenvalues) for a set of Cauchy stresses.
|
||||||
|
|
||||||
sorted in descending order.
|
sorted in descending order.
|
||||||
"""
|
"""
|
||||||
lambdas=np.zeros(0,'d')
|
lambdas=np.zeros(0,'d')
|
||||||
for i in xrange(np.shape(sigmas)[1]):
|
for i in range(np.shape(sigmas)[1]):
|
||||||
eigenvalues = np.linalg.eigvalsh(sym6toT33(sigmas[:,i]))
|
eigenvalues = np.linalg.eigvalsh(sym6toT33(sigmas[:,i]))
|
||||||
lambdas = np.append(lambdas,np.sort(eigenvalues)[::-1]) #append eigenvalues in descending order
|
lambdas = np.append(lambdas,np.sort(eigenvalues)[::-1]) #append eigenvalues in descending order
|
||||||
lambdas = np.transpose(lambdas.reshape(np.shape(sigmas)[1],3))
|
lambdas = np.transpose(lambdas.reshape(np.shape(sigmas)[1],3))
|
||||||
|
@ -82,7 +73,7 @@ def principalStress(p):
|
||||||
t1 + t2*np.cos(phi+np.pi*2.0/3.0),
|
t1 + t2*np.cos(phi+np.pi*2.0/3.0),
|
||||||
t1 + t2*np.cos(phi+np.pi*4.0/3.0)])
|
t1 + t2*np.cos(phi+np.pi*4.0/3.0)])
|
||||||
|
|
||||||
def principalStrs_Der(p, (s1, s2, s3, s4, s5, s6), dim, Karafillis=False):
|
def principalStrs_Der(p, s, dim, Karafillis=False):
|
||||||
"""Derivative of principal stress with respect to stress"""
|
"""Derivative of principal stress with respect to stress"""
|
||||||
third = 1.0/3.0
|
third = 1.0/3.0
|
||||||
third2 = 2.0*third
|
third2 = 2.0*third
|
||||||
|
@ -111,31 +102,31 @@ def principalStrs_Der(p, (s1, s2, s3, s4, s5, s6), dim, Karafillis=False):
|
||||||
dSdI = np.array([dSidIj(phi),dSidIj(phi+np.pi*2.0/3.0),dSidIj(phi+np.pi*4.0/3.0)]) # i=1,2,3; j=1,2,3
|
dSdI = np.array([dSidIj(phi),dSidIj(phi+np.pi*2.0/3.0),dSidIj(phi+np.pi*4.0/3.0)]) # i=1,2,3; j=1,2,3
|
||||||
|
|
||||||
# calculate the derivation of principal stress with regards to the anisotropic coefficients
|
# calculate the derivation of principal stress with regards to the anisotropic coefficients
|
||||||
one = np.ones_like(s1); zero = np.zeros_like(s1); num = len(s1)
|
one = np.ones_like(s); zero = np.zeros_like(s); num = len(s)
|
||||||
dIdp = np.array([[one, one, one, zero, zero, zero],
|
dIdp = np.array([[one, one, one, zero, zero, zero],
|
||||||
[p[1]+p[2], p[2]+p[0], p[0]+p[1], -2.0*p[3], -2.0*p[4], -2.0*p[5]],
|
[p[1]+p[2], p[2]+p[0], p[0]+p[1], -2.0*p[3], -2.0*p[4], -2.0*p[5]],
|
||||||
[p[1]*p[2]-p[4]**2, p[2]*p[0]-p[5]**2, p[0]*p[1]-p[3]**2,
|
[p[1]*p[2]-p[4]**2, p[2]*p[0]-p[5]**2, p[0]*p[1]-p[3]**2,
|
||||||
-2.0*p[3]*p[2]+2.0*p[4]*p[5], -2.0*p[4]*p[0]+2.0*p[5]*p[3], -2.0*p[5]*p[1]+2.0*p[3]*p[4]] ])
|
-2.0*p[3]*p[2]+2.0*p[4]*p[5], -2.0*p[4]*p[0]+2.0*p[5]*p[3], -2.0*p[5]*p[1]+2.0*p[3]*p[4]] ])
|
||||||
if Karafillis:
|
if Karafillis:
|
||||||
dpdc = np.array([[zero,s1-s3,s1-s2], [s2-s3,zero,s2-s1], [s3-s2,s3-s1,zero]])/3.0
|
dpdc = np.array([[zero,s[0]-s[2],s[0]-s[1]], [s[1]-s[2],zero,s[1]-s[0]], [s[2]-s[1],s[2]-s[0],zero]])/3.0
|
||||||
dSdp = np.array([np.dot(dSdI[:,:,i],dIdp[:,:,i]).T for i in xrange(num)]).T
|
dSdp = np.array([np.dot(dSdI[:,:,i],dIdp[:,:,i]).T for i in range(num)]).T
|
||||||
if dim == 2:
|
if dim == 2:
|
||||||
temp = np.vstack([dSdp[:,3]*s4]).T.reshape(num,1,3).T
|
temp = np.vstack([dSdp[:,3]*s[3]]).T.reshape(num,1,3).T
|
||||||
else:
|
else:
|
||||||
temp = np.vstack([dSdp[:,3]*s4,dSdp[:,4]*s5,dSdp[:,5]*s6]).T.reshape(num,3,3).T
|
temp = np.vstack([dSdp[:,3]*s[3],dSdp[:,4]*s[4],dSdp[:,5]*s[5]]).T.reshape(num,3,3).T
|
||||||
|
|
||||||
return np.concatenate((np.array([np.dot(dSdp[:,0:3,i], dpdc[:,:,i]).T for i in xrange(num)]).T,
|
return np.concatenate((np.array([np.dot(dSdp[:,0:3,i], dpdc[:,:,i]).T for i in range(num)]).T,
|
||||||
temp), axis=1)
|
temp), axis=1)
|
||||||
else:
|
else:
|
||||||
if dim == 2:
|
if dim == 2:
|
||||||
dIdc=np.array([[-dIdp[i,0]*s2, -dIdp[i,1]*s1, -dIdp[i,1]*s3,
|
dIdc=np.array([[-dIdp[i,0]*s[1], -dIdp[i,1]*s[0], -dIdp[i,1]*s[2],
|
||||||
-dIdp[i,2]*s2, -dIdp[i,2]*s1, -dIdp[i,0]*s3,
|
-dIdp[i,2]*s[1], -dIdp[i,2]*s[0], -dIdp[i,0]*s[2],
|
||||||
dIdp[i,3]*s4 ] for i in xrange(3)])
|
dIdp[i,3]*s[3] ] for i in range(3)])
|
||||||
else:
|
else:
|
||||||
dIdc=np.array([[-dIdp[i,0]*s2, -dIdp[i,1]*s1, -dIdp[i,1]*s3,
|
dIdc=np.array([[-dIdp[i,0]*s[1], -dIdp[i,1]*s[0], -dIdp[i,1]*s[2],
|
||||||
-dIdp[i,2]*s2, -dIdp[i,2]*s1, -dIdp[i,0]*s3,
|
-dIdp[i,2]*s[1], -dIdp[i,2]*s[0], -dIdp[i,0]*s[2],
|
||||||
dIdp[i,3]*s4, dIdp[i,4]*s5, dIdp[i,5]*s6 ] for i in xrange(3)])
|
dIdp[i,3]*s[3], dIdp[i,4]*s[4], dIdp[i,5]*s[5] ] for i in range(3)])
|
||||||
return np.array([np.dot(dSdI[:,:,i],dIdc[:,:,i]).T for i in xrange(num)]).T
|
return np.array([np.dot(dSdI[:,:,i],dIdc[:,:,i]).T for i in range(num)]).T
|
||||||
|
|
||||||
def invariant(sigmas):
|
def invariant(sigmas):
|
||||||
I = np.zeros(3)
|
I = np.zeros(3)
|
||||||
|
@ -194,7 +185,7 @@ class Vegter(object):
|
||||||
refNormals = np.empty([13,2])
|
refNormals = np.empty([13,2])
|
||||||
refPts[12] = refPtsQtr[0]
|
refPts[12] = refPtsQtr[0]
|
||||||
refNormals[12] = refNormalsQtr[0]
|
refNormals[12] = refNormalsQtr[0]
|
||||||
for i in xrange(3):
|
for i in range(3):
|
||||||
refPts[i] = refPtsQtr[i]
|
refPts[i] = refPtsQtr[i]
|
||||||
refPts[i+3] = refPtsQtr[3-i][::-1]
|
refPts[i+3] = refPtsQtr[3-i][::-1]
|
||||||
refPts[i+6] =-refPtsQtr[i]
|
refPts[i+6] =-refPtsQtr[i]
|
||||||
|
@ -207,7 +198,7 @@ class Vegter(object):
|
||||||
|
|
||||||
def _getHingePoints(self):
|
def _getHingePoints(self):
|
||||||
"""
|
"""
|
||||||
calculate the hinge point B according to the reference points A,C and the normals n,m
|
Calculate the hinge point B according to the reference points A,C and the normals n,m
|
||||||
|
|
||||||
refPoints = np.array([[p1_x, p1_y], [p2_x, p2_y]]);
|
refPoints = np.array([[p1_x, p1_y], [p2_x, p2_y]]);
|
||||||
refNormals = np.array([[n1_x, n1_y], [n2_x, n2_y]])
|
refNormals = np.array([[n1_x, n1_y], [n2_x, n2_y]])
|
||||||
|
@ -220,7 +211,7 @@ class Vegter(object):
|
||||||
B1 = (m2*(n1*A1 + n2*A2) - n2*(m1*C1 + m2*C2))/(n1*m2-m1*n2)
|
B1 = (m2*(n1*A1 + n2*A2) - n2*(m1*C1 + m2*C2))/(n1*m2-m1*n2)
|
||||||
B2 = (n1*(m1*C1 + m2*C2) - m1*(n1*A1 + n2*A2))/(n1*m2-m1*n2)
|
B2 = (n1*(m1*C1 + m2*C2) - m1*(n1*A1 + n2*A2))/(n1*m2-m1*n2)
|
||||||
return np.array([B1,B2])
|
return np.array([B1,B2])
|
||||||
return np.array([hingPoint(self.refPts[i:i+2],self.refNormals[i:i+2]) for i in xrange(len(self.refPts)-1)])
|
return np.array([hingPoint(self.refPts[i:i+2],self.refNormals[i:i+2]) for i in range(len(self.refPts)-1)])
|
||||||
|
|
||||||
def getBezier(self):
|
def getBezier(self):
|
||||||
def bezier(R,H):
|
def bezier(R,H):
|
||||||
|
@ -228,7 +219,7 @@ class Vegter(object):
|
||||||
for mu in np.linspace(0.0,1.0,self.nspace):
|
for mu in np.linspace(0.0,1.0,self.nspace):
|
||||||
b.append(np.array(R[0]*np.ones_like(mu) + 2.0*mu*(H - R[0]) + mu**2*(R[0]+R[1] - 2.0*H)))
|
b.append(np.array(R[0]*np.ones_like(mu) + 2.0*mu*(H - R[0]) + mu**2*(R[0]+R[1] - 2.0*H)))
|
||||||
return b
|
return b
|
||||||
return np.array([bezier(self.refPts[i:i+2],self.hingePts[i]) for i in xrange(len(self.refPts)-1)])
|
return np.array([bezier(self.refPts[i:i+2],self.hingePts[i]) for i in range(len(self.refPts)-1)])
|
||||||
|
|
||||||
def VetgerCriterion(stress,lankford, rhoBi0, theta=0.0):
|
def VetgerCriterion(stress,lankford, rhoBi0, theta=0.0):
|
||||||
"""0-pure shear; 1-uniaxial; 2-plane strain; 3-equi-biaxial"""
|
"""0-pure shear; 1-uniaxial; 2-plane strain; 3-equi-biaxial"""
|
||||||
|
@ -238,7 +229,7 @@ def VetgerCriterion(stress,lankford, rhoBi0, theta=0.0):
|
||||||
lmatrix = np.empty([nset,nset])
|
lmatrix = np.empty([nset,nset])
|
||||||
theta = np.linspace(0.0,np.pi/2,nset)
|
theta = np.linspace(0.0,np.pi/2,nset)
|
||||||
for i,th in enumerate(theta):
|
for i,th in enumerate(theta):
|
||||||
lmatrix[i] = np.array([np.cos(2*j*th) for j in xrange(nset)])
|
lmatrix[i] = np.array([np.cos(2*j*th) for j in range(nset)])
|
||||||
return np.linalg.solve(lmatrix, r)
|
return np.linalg.solve(lmatrix, r)
|
||||||
|
|
||||||
nps = len(stress)
|
nps = len(stress)
|
||||||
|
@ -250,10 +241,10 @@ def VetgerCriterion(stress,lankford, rhoBi0, theta=0.0):
|
||||||
strsSet = stress.reshape(nset,4,2)
|
strsSet = stress.reshape(nset,4,2)
|
||||||
refPts = np.empty([4,2])
|
refPts = np.empty([4,2])
|
||||||
|
|
||||||
fouriercoeffs = np.array([np.cos(2.0*i*theta) for i in xrange(nset)])
|
fouriercoeffs = np.array([np.cos(2.0*i*theta) for i in range(nset)])
|
||||||
for i in xrange(2):
|
for i in range(2):
|
||||||
refPts[3,i] = sum(strsSet[:,3,i])/nset
|
refPts[3,i] = sum(strsSet[:,3,i])/nset
|
||||||
for j in xrange(3):
|
for j in range(3):
|
||||||
refPts[j,i] = np.dot(getFourierParas(strsSet[:,j,i]), fouriercoeffs)
|
refPts[j,i] = np.dot(getFourierParas(strsSet[:,j,i]), fouriercoeffs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -752,9 +743,9 @@ def Yld2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
(4.0*D[0]-4.0*D[2]-4.0*D[1]+ D[3])*s11 + (8.0*D[1]-2.0*D[3]-2.0*D[0]+2.0*D[2])*s22,
|
(4.0*D[0]-4.0*D[2]-4.0*D[1]+ D[3])*s11 + (8.0*D[1]-2.0*D[3]-2.0*D[0]+2.0*D[2])*s22,
|
||||||
9.0*D[4]*s12 ])/9.0
|
9.0*D[4]*s12 ])/9.0
|
||||||
|
|
||||||
def priStrs((sx,sy,sxy)):
|
def priStrs(s):
|
||||||
temp = np.sqrt( (sx-sy)**2 + 4.0*sxy**2 )
|
temp = np.sqrt( (s[0]-s[1])**2 + 4.0*s[2]**2 )
|
||||||
return 0.5*(sx+sy + temp), 0.5*(sx+sy - temp)
|
return 0.5*(s[0]+s[1] + temp), 0.5*(s[0]+s[1] - temp)
|
||||||
m2 = m/2.0; m21 = m2 - 1.0
|
m2 = m/2.0; m21 = m2 - 1.0
|
||||||
(X1,X2), (Y1,Y2) = priStrs(X), priStrs(Y) # Principal values of X, Y
|
(X1,X2), (Y1,Y2) = priStrs(X), priStrs(Y) # Principal values of X, Y
|
||||||
phi1s, phi21s, phi22s = (X1-X2)**2, (2.0*Y2+Y1)**2, (2.0*Y1+Y2)**2
|
phi1s, phi21s, phi22s = (X1-X2)**2, (2.0*Y2+Y1)**2, (2.0*Y1+Y2)**2
|
||||||
|
@ -768,10 +759,11 @@ def Yld2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
drdl, drdm = r/m/left, r*math_ln(0.5*left)*(-1.0/m/m) #/(-m*m)
|
drdl, drdm = r/m/left, r*math_ln(0.5*left)*(-1.0/m/m) #/(-m*m)
|
||||||
dldm = ( phi1*math_ln(phi1s) + phi21*math_ln(phi21s) + phi22*math_ln(phi22s) )*0.5
|
dldm = ( phi1*math_ln(phi1s) + phi21*math_ln(phi21s) + phi22*math_ln(phi22s) )*0.5
|
||||||
zero = np.zeros_like(s11); num = len(s11)
|
zero = np.zeros_like(s11); num = len(s11)
|
||||||
def dPrincipalds((X1,X2,X12)): # derivative of principla with respect to stress
|
def dPrincipalds(X):
|
||||||
temp = 1.0/np.sqrt( (X1-X2)**2 + 4.0*X12**2 )
|
"""Derivative of principla with respect to stress"""
|
||||||
dP1dsi = 0.5*np.array([ 1.0+temp*(X1-X2), 1.0-temp*(X1-X2), temp*4.0*X12])
|
temp = 1.0/np.sqrt( (X[0]-X[1])**2 + 4.0*X[2]**2 )
|
||||||
dP2dsi = 0.5*np.array([ 1.0-temp*(X1-X2), 1.0+temp*(X1-X2), -temp*4.0*X12])
|
dP1dsi = 0.5*np.array([ 1.0+temp*(X[0]-X[1]), 1.0-temp*(X[0]-X[1]), temp*4.0*X[2]])
|
||||||
|
dP2dsi = 0.5*np.array([ 1.0-temp*(X[0]-X[1]), 1.0+temp*(X[0]-X[1]), -temp*4.0*X[2]])
|
||||||
return np.array([dP1dsi, dP2dsi])
|
return np.array([dP1dsi, dP2dsi])
|
||||||
|
|
||||||
dXdXi, dYdYi = dPrincipalds(X), dPrincipalds(Y)
|
dXdXi, dYdYi = dPrincipalds(X), dPrincipalds(Y)
|
||||||
|
@ -782,14 +774,14 @@ def Yld2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
[ 4.0*s11-2.0*s22, -4.0*s11+8.0*s22, -4.0*s11+2.0*s22, s11-2.0*s22, zero ], #dY22dD
|
[ 4.0*s11-2.0*s22, -4.0*s11+8.0*s22, -4.0*s11+2.0*s22, s11-2.0*s22, zero ], #dY22dD
|
||||||
[ zero, zero, zero, zero, 9.0*s12 ] ])/9.0 #dY12dD
|
[ zero, zero, zero, zero, 9.0*s12 ] ])/9.0 #dY12dD
|
||||||
|
|
||||||
dXdC=np.array([np.dot(dXdXi[:,:,i], dXidC[:,:,i]).T for i in xrange(num)]).T
|
dXdC=np.array([np.dot(dXdXi[:,:,i], dXidC[:,:,i]).T for i in range(num)]).T
|
||||||
dYdD=np.array([np.dot(dYdYi[:,:,i], dYidD[:,:,i]).T for i in xrange(num)]).T
|
dYdD=np.array([np.dot(dYdYi[:,:,i], dYidD[:,:,i]).T for i in range(num)]).T
|
||||||
|
|
||||||
dldX = m*np.array([ phi1s**m21*(X1-X2), phi1s**m21*(X2-X1)])
|
dldX = m*np.array([ phi1s**m21*(X1-X2), phi1s**m21*(X2-X1)])
|
||||||
dldY = m*np.array([phi21s**m21*(2.0*Y2+Y1) + 2.0*phi22s**m21*(2.0*Y1+Y2), \
|
dldY = m*np.array([phi21s**m21*(2.0*Y2+Y1) + 2.0*phi22s**m21*(2.0*Y1+Y2), \
|
||||||
phi22s**m21*(2.0*Y1+Y2) + 2.0*phi21s**m21*(2.0*Y2+Y1) ])
|
phi22s**m21*(2.0*Y1+Y2) + 2.0*phi21s**m21*(2.0*Y2+Y1) ])
|
||||||
jC = drdl*np.array([np.dot(dldX[:,i], dXdC[:,:,i]) for i in xrange(num)]).T
|
jC = drdl*np.array([np.dot(dldX[:,i], dXdC[:,:,i]) for i in range(num)]).T
|
||||||
jD = drdl*np.array([np.dot(dldY[:,i], dYdD[:,:,i]) for i in xrange(num)]).T
|
jD = drdl*np.array([np.dot(dldY[:,i], dYdD[:,:,i]) for i in range(num)]).T
|
||||||
|
|
||||||
jm = drdl*dldm + drdm
|
jm = drdl*dldm + drdm
|
||||||
if mFix[0]: return np.vstack((jC,jD)).T
|
if mFix[0]: return np.vstack((jC,jD)).T
|
||||||
|
@ -817,7 +809,7 @@ def Yld200418p(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
p,q = ys(sdev, C), ys(sdev, D)
|
p,q = ys(sdev, C), ys(sdev, D)
|
||||||
pLambdas, qLambdas = principalStress(p), principalStress(q) # no sort
|
pLambdas, qLambdas = principalStress(p), principalStress(q) # no sort
|
||||||
|
|
||||||
m2 = m/2.0; x3 = xrange(3); num = len(sv)
|
m2 = m/2.0; x3 = range(3); num = len(sv)
|
||||||
PiQj = np.array([(pLambdas[i,:]-qLambdas[j,:]) for i in x3 for j in x3])
|
PiQj = np.array([(pLambdas[i,:]-qLambdas[j,:]) for i in x3 for j in x3])
|
||||||
QiPj = np.array([(qLambdas[i,:]-pLambdas[j,:]) for i in x3 for j in x3]).reshape(3,3,num)
|
QiPj = np.array([(qLambdas[i,:]-pLambdas[j,:]) for i in x3 for j in x3]).reshape(3,3,num)
|
||||||
PiQjs = PiQj**2
|
PiQjs = PiQj**2
|
||||||
|
@ -831,8 +823,8 @@ def Yld200418p(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
dldm = np.sum(PiQjs**m2*math_ln(PiQjs),axis=0)*0.5
|
dldm = np.sum(PiQjs**m2*math_ln(PiQjs),axis=0)*0.5
|
||||||
dPdc, dQdd = principalStrs_Der(p, sdev, dim), principalStrs_Der(q, sdev, dim)
|
dPdc, dQdd = principalStrs_Der(p, sdev, dim), principalStrs_Der(q, sdev, dim)
|
||||||
PiQjs3d = ( PiQjs**(m2-1.0) ).reshape(3,3,num)
|
PiQjs3d = ( PiQjs**(m2-1.0) ).reshape(3,3,num)
|
||||||
dldP = -m*np.array([np.diag(np.dot(PiQjs3d[:,:,i], QiPj [:,:,i])) for i in xrange(num)]).T
|
dldP = -m*np.array([np.diag(np.dot(PiQjs3d[:,:,i], QiPj [:,:,i])) for i in range(num)]).T
|
||||||
dldQ = m*np.array([np.diag(np.dot(QiPj [:,:,i], PiQjs3d[:,:,i])) for i in xrange(num)]).T
|
dldQ = m*np.array([np.diag(np.dot(QiPj [:,:,i], PiQjs3d[:,:,i])) for i in range(num)]).T
|
||||||
|
|
||||||
jm = drdl*dldm + drdm
|
jm = drdl*dldm + drdm
|
||||||
jc = drdl*np.sum([dldP[i]*dPdc[i] for i in x3],axis=0)
|
jc = drdl*np.sum([dldP[i]*dPdc[i] for i in x3],axis=0)
|
||||||
|
@ -851,9 +843,9 @@ def KarafillisBoyce(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
0<c<1, m are optional
|
0<c<1, m are optional
|
||||||
criteria are invalid input
|
criteria are invalid input
|
||||||
"""
|
"""
|
||||||
ks = lambda (s1,s2,s3,s4,s5,s6),(c1,c2,c3,c4,c5,c6): np.array( [
|
ks = lambda s,c: np.array( [
|
||||||
((c2+c3)*s1-c3*s2-c2*s3)/3.0, ((c3+c1)*s2-c3*s1-c1*s3)/3.0,
|
((c[1]+c[2])*s[0]-c[2]*s[1]-c[1]*s[2])/3.0, ((c[2]+c[0])*s[1]-c[2]*s[0]-c[0]*s[2])/3.0,
|
||||||
((c1+c2)*s3-c2*s1-c1*s2)/3.0, c4*s4, c5*s5, c6*s6 ])
|
((c[0]+c[1])*s[2]-c[1]*s[0]-c[0]*s[1])/3.0, c[3]*s[3], c[4]*s[4], c[5]*s[5] ])
|
||||||
if dim == 2: C1,c = np.append(paras[0:4],[0.0,0.0]), paras[4]
|
if dim == 2: C1,c = np.append(paras[0:4],[0.0,0.0]), paras[4]
|
||||||
else: C1,c = paras[0:6], paras[6]
|
else: C1,c = paras[0:6], paras[6]
|
||||||
if mFix[0]: m = mFix[1]
|
if mFix[0]: m = mFix[1]
|
||||||
|
@ -887,7 +879,7 @@ def KarafillisBoyce(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
dldP = (1.0-c)*dphi1dP + c*dphi2dP*rm2
|
dldP = (1.0-c)*dphi1dP + c*dphi2dP*rm2
|
||||||
|
|
||||||
jm = drdl * dldm + drdm #drda*(-1.0/m/m)
|
jm = drdl * dldm + drdm #drda*(-1.0/m/m)
|
||||||
jc1 = drdl * np.sum([dldP[i]*dPdc[i] for i in xrange(3)],axis=0)
|
jc1 = drdl * np.sum([dldP[i]*dPdc[i] for i in range(3)],axis=0)
|
||||||
jc = drdl * (-phi1 + rm2*phi2)
|
jc = drdl * (-phi1 + rm2*phi2)
|
||||||
|
|
||||||
if mFix[0]: return np.vstack((jc1,jc)).T
|
if mFix[0]: return np.vstack((jc1,jc)).T
|
||||||
|
@ -1029,7 +1021,7 @@ thresholdParameter = ['totalshear','equivalentStrain']
|
||||||
|
|
||||||
#---------------------------------------------------------------------------------------------------
|
#---------------------------------------------------------------------------------------------------
|
||||||
class Loadcase():
|
class Loadcase():
|
||||||
"""generating load cases for the spectral solver"""
|
"""Generating load cases for the spectral solver"""
|
||||||
|
|
||||||
def __init__(self,finalStrain,incs,time,nSet=1,dimension=3,vegter=False):
|
def __init__(self,finalStrain,incs,time,nSet=1,dimension=3,vegter=False):
|
||||||
self.finalStrain = finalStrain
|
self.finalStrain = finalStrain
|
||||||
|
@ -1098,10 +1090,10 @@ class Loadcase():
|
||||||
' time %s'%self.time
|
' time %s'%self.time
|
||||||
|
|
||||||
def _vegterLoadcase(self):
|
def _vegterLoadcase(self):
|
||||||
"""generate the stress points for Vegter criteria (incomplete/untested)"""
|
"""Generate the stress points for Vegter criteria (incomplete/untested)"""
|
||||||
theta = np.linspace(0.0,np.pi/2.0,self.nSet)
|
theta = np.linspace(0.0,np.pi/2.0,self.nSet)
|
||||||
f = [0.0, 0.0, '*']*3; loadcase = []
|
f = [0.0, 0.0, '*']*3; loadcase = []
|
||||||
for i in xrange(self.nSet*4): loadcase.append(f)
|
for i in range(self.nSet*4): loadcase.append(f)
|
||||||
|
|
||||||
# more to do for F
|
# more to do for F
|
||||||
F = np.array([ [[1.1, 0.1], [0.1, 1.1]], # uniaxial tension
|
F = np.array([ [[1.1, 0.1], [0.1, 1.1]], # uniaxial tension
|
||||||
|
@ -1111,12 +1103,12 @@ class Loadcase():
|
||||||
])
|
])
|
||||||
for i,t in enumerate(theta):
|
for i,t in enumerate(theta):
|
||||||
R = np.array([np.cos(t), np.sin(t), -np.sin(t), np.cos(t)]).reshape(2,2)
|
R = np.array([np.cos(t), np.sin(t), -np.sin(t), np.cos(t)]).reshape(2,2)
|
||||||
for j in xrange(4):
|
for j in range(4):
|
||||||
loadcase[i*4+j][0],loadcase[i*4+j][1],loadcase[i*4+j][3],loadcase[i*4+j][4] = np.dot(R.T,np.dot(F[j],R)).reshape(4)
|
loadcase[i*4+j][0],loadcase[i*4+j][1],loadcase[i*4+j][3],loadcase[i*4+j][4] = np.dot(R.T,np.dot(F[j],R)).reshape(4)
|
||||||
return loadcase
|
return loadcase
|
||||||
|
|
||||||
def _getLoadcase2dRandom(self):
|
def _getLoadcase2dRandom(self):
|
||||||
"""generate random stress points for 2D tests"""
|
"""Generate random stress points for 2D tests"""
|
||||||
self.NgeneratedLoadCases+=1
|
self.NgeneratedLoadCases+=1
|
||||||
defgrad=['0', '0', '*']*3
|
defgrad=['0', '0', '*']*3
|
||||||
stress =['*', '*', '0']*3
|
stress =['*', '*', '0']*3
|
||||||
|
@ -1221,17 +1213,17 @@ class myThread (threading.Thread):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.threadID = threadID
|
self.threadID = threadID
|
||||||
def run(self):
|
def run(self):
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
conv=converged()
|
conv=converged()
|
||||||
s.release()
|
semaphore.release()
|
||||||
while not conv:
|
while not conv:
|
||||||
doSim(self.name)
|
doSim(self.name)
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
conv=converged()
|
conv=converged()
|
||||||
s.release()
|
semaphore.release()
|
||||||
|
|
||||||
def doSim(thread):
|
def doSim(thread):
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
global myLoad
|
global myLoad
|
||||||
loadNo=loadcaseNo()
|
loadNo=loadcaseNo()
|
||||||
if not os.path.isfile('%s.load'%loadNo):
|
if not os.path.isfile('%s.load'%loadNo):
|
||||||
|
@ -1239,22 +1231,22 @@ def doSim(thread):
|
||||||
f=open('%s.load'%loadNo,'w')
|
f=open('%s.load'%loadNo,'w')
|
||||||
f.write(myLoad.getLoadcase(loadNo))
|
f.write(myLoad.getLoadcase(loadNo))
|
||||||
f.close()
|
f.close()
|
||||||
s.release()
|
semaphore.release()
|
||||||
else: s.release()
|
else: semaphore.release()
|
||||||
|
|
||||||
# if spectralOut does not exist, run simulation
|
# if spectralOut does not exist, run simulation
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
if not os.path.isfile('%s_%i.spectralOut'%(options.geometry,loadNo)):
|
if not os.path.isfile('%s_%i.spectralOut'%(options.geometry,loadNo)):
|
||||||
damask.util.croak('Starting simulation %i (%s)'%(loadNo,thread))
|
damask.util.croak('Starting simulation %i (%s)'%(loadNo,thread))
|
||||||
s.release()
|
semaphore.release()
|
||||||
damask.util.execute('DAMASK_spectral -g %s -l %i'%(options.geometry,loadNo))
|
damask.util.execute('DAMASK_spectral -g %s -l %i'%(options.geometry,loadNo))
|
||||||
else: s.release()
|
else: semaphore.release()
|
||||||
|
|
||||||
# if ASCII tables do not exist, run postprocessing
|
# if ASCII tables do not exist, run postprocessing
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
if not os.path.isfile('./postProc/%s_%i.txt'%(options.geometry,loadNo)):
|
if not os.path.isfile('./postProc/%s_%i.txt'%(options.geometry,loadNo)):
|
||||||
damask.util.croak('Starting post processing for simulation %i (%s)'%(loadNo,thread))
|
damask.util.croak('Starting post processing for simulation %i (%s)'%(loadNo,thread))
|
||||||
s.release()
|
semaphore.release()
|
||||||
try:
|
try:
|
||||||
damask.util.execute('postResults --cr f,p --co totalshear %s_%i.spectralOut'%(options.geometry,loadNo))
|
damask.util.execute('postResults --cr f,p --co totalshear %s_%i.spectralOut'%(options.geometry,loadNo))
|
||||||
except:
|
except:
|
||||||
|
@ -1262,10 +1254,10 @@ def doSim(thread):
|
||||||
damask.util.execute('addCauchy ./postProc/%s_%i.txt'%(options.geometry,loadNo))
|
damask.util.execute('addCauchy ./postProc/%s_%i.txt'%(options.geometry,loadNo))
|
||||||
damask.util.execute('addStrainTensors -0 -v ./postProc/%s_%i.txt'%(options.geometry,loadNo))
|
damask.util.execute('addStrainTensors -0 -v ./postProc/%s_%i.txt'%(options.geometry,loadNo))
|
||||||
damask.util.execute('addMises -s Cauchy -e ln(V) ./postProc/%s_%i.txt'%(options.geometry,loadNo))
|
damask.util.execute('addMises -s Cauchy -e ln(V) ./postProc/%s_%i.txt'%(options.geometry,loadNo))
|
||||||
else: s.release()
|
else: semaphore.release()
|
||||||
|
|
||||||
# reading values from ASCII table (including linear interpolation between points)
|
# reading values from ASCII table (including linear interpolation between points)
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
damask.util.croak('Reading values from simulation %i (%s)'%(loadNo,thread))
|
damask.util.croak('Reading values from simulation %i (%s)'%(loadNo,thread))
|
||||||
refFile = './postProc/%s_%i.txt'%(options.geometry,loadNo)
|
refFile = './postProc/%s_%i.txt'%(options.geometry,loadNo)
|
||||||
table = damask.ASCIItable(refFile,readonly=True)
|
table = damask.ASCIItable(refFile,readonly=True)
|
||||||
|
@ -1277,9 +1269,9 @@ def doSim(thread):
|
||||||
|
|
||||||
for l in [thresholdKey,'1_Cauchy']:
|
for l in [thresholdKey,'1_Cauchy']:
|
||||||
if l not in table.labels(raw = True): damask.util.croak('%s not found'%l)
|
if l not in table.labels(raw = True): damask.util.croak('%s not found'%l)
|
||||||
s.release()
|
semaphore.release()
|
||||||
|
|
||||||
table.data_readArray(['%i_Cauchy'%(i+1) for i in xrange(9)]+[thresholdKey]+['%i_ln(V)'%(i+1) for i in xrange(9)])
|
table.data_readArray(['%i_Cauchy'%(i+1) for i in range(9)]+[thresholdKey]+['%i_ln(V)'%(i+1) for i in range(9)])
|
||||||
|
|
||||||
validity = np.zeros((int(options.yieldValue[2])), dtype=bool) # found data for desired threshold
|
validity = np.zeros((int(options.yieldValue[2])), dtype=bool) # found data for desired threshold
|
||||||
yieldStress = np.empty((int(options.yieldValue[2]),6),'d')
|
yieldStress = np.empty((int(options.yieldValue[2]),6),'d')
|
||||||
|
@ -1302,13 +1294,13 @@ def doSim(thread):
|
||||||
else:
|
else:
|
||||||
line+=1
|
line+=1
|
||||||
if not validity[i]:
|
if not validity[i]:
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
damask.util.croak('The data of result %i at the threshold %f is invalid,'%(loadNo,threshold)\
|
damask.util.croak('The data of result %i at the threshold %f is invalid,'%(loadNo,threshold)\
|
||||||
+'the fitting at this point is skipped')
|
+'the fitting at this point is skipped')
|
||||||
s.release()
|
semaphore.release()
|
||||||
|
|
||||||
# do the actual fitting procedure and write results to file
|
# do the actual fitting procedure and write results to file
|
||||||
s.acquire()
|
semaphore.acquire()
|
||||||
global stressAll, strainAll
|
global stressAll, strainAll
|
||||||
f=open(options.geometry+'_'+options.criterion+'_'+str(time.time())+'.txt','w')
|
f=open(options.geometry+'_'+options.criterion+'_'+str(time.time())+'.txt','w')
|
||||||
f.write(' '.join([options.fitting]+myFit.report_labels())+'\n')
|
f.write(' '.join([options.fitting]+myFit.report_labels())+'\n')
|
||||||
|
@ -1321,10 +1313,10 @@ def doSim(thread):
|
||||||
' '.join(map(str,myFit.fit(stressAll[i].reshape(len(stressAll[i])//6,6).transpose())))+'\n')
|
' '.join(map(str,myFit.fit(stressAll[i].reshape(len(stressAll[i])//6,6).transpose())))+'\n')
|
||||||
except Exception:
|
except Exception:
|
||||||
damask.util.croak('Could not fit results of simulation (%s)'%thread)
|
damask.util.croak('Could not fit results of simulation (%s)'%thread)
|
||||||
s.release()
|
semaphore.release()
|
||||||
return
|
return
|
||||||
damask.util.croak('\n')
|
damask.util.croak('\n')
|
||||||
s.release()
|
semaphore.release()
|
||||||
|
|
||||||
def loadcaseNo():
|
def loadcaseNo():
|
||||||
global N_simulations
|
global N_simulations
|
||||||
|
@ -1419,6 +1411,20 @@ if options.dimension not in fitCriteria[options.criterion]['dimen']:
|
||||||
|
|
||||||
if options.criterion not in ['vonmises','tresca','drucker','hill1948'] and options.eqStress is None:
|
if options.criterion not in ['vonmises','tresca','drucker','hill1948'] and options.eqStress is None:
|
||||||
parser.error('please specify an equivalent stress (e.g. fitting to von Mises)')
|
parser.error('please specify an equivalent stress (e.g. fitting to von Mises)')
|
||||||
|
|
||||||
|
# global variables
|
||||||
|
fitResults = []
|
||||||
|
fitErrors = []
|
||||||
|
fitResidual = []
|
||||||
|
stressAll= [np.zeros(0,'d').reshape(0,0) for i in range(int(options.yieldValue[2]))]
|
||||||
|
strainAll= [np.zeros(0,'d').reshape(0,0) for i in range(int(options.yieldValue[2]))]
|
||||||
|
N_simulations=0
|
||||||
|
Guess = []
|
||||||
|
threads=[]
|
||||||
|
semaphore=threading.Semaphore(1)
|
||||||
|
dDim = None
|
||||||
|
myLoad = None
|
||||||
|
myFit = None
|
||||||
|
|
||||||
run = runFit(options.exponent, options.eqStress, options.dimension, options.criterion)
|
run = runFit(options.exponent, options.eqStress, options.dimension, options.criterion)
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ for name in filenames:
|
||||||
if columnMissing: continue
|
if columnMissing: continue
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
table.labels_append(['%i_coord'%(i+1) for i in xrange(3)]) # extend ASCII header with new labels
|
table.labels_append(['%i_coord'%(i+1) for i in range(3)]) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
@ -94,4 +94,4 @@ for name in filenames:
|
||||||
table.input_close() # close input ASCII table (works for stdin)
|
table.input_close() # close input ASCII table (works for stdin)
|
||||||
table.output_close() # close output ASCII table (works for stdout)
|
table.output_close() # close output ASCII table (works for stdout)
|
||||||
if file['name'] != 'STDIN':
|
if file['name'] != 'STDIN':
|
||||||
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new
|
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new
|
||||||
|
|
|
@ -53,7 +53,7 @@ if options.labels is None or options.formulas is None:
|
||||||
if len(options.labels) != len(options.formulas):
|
if len(options.labels) != len(options.formulas):
|
||||||
parser.error('number of labels ({}) and formulas ({}) do not match.'.format(len(options.labels),len(options.formulas)))
|
parser.error('number of labels ({}) and formulas ({}) do not match.'.format(len(options.labels),len(options.formulas)))
|
||||||
|
|
||||||
for i in xrange(len(options.formulas)):
|
for i in range(len(options.formulas)):
|
||||||
options.formulas[i] = options.formulas[i].replace(';',',')
|
options.formulas[i] = options.formulas[i].replace(';',',')
|
||||||
|
|
||||||
# ------------------------------------- loop over input files --------------------------------------
|
# ------------------------------------- loop over input files --------------------------------------
|
||||||
|
@ -154,7 +154,7 @@ for name in filenames:
|
||||||
# ----------------------------------- line 1: assemble header --------------------------------------
|
# ----------------------------------- line 1: assemble header --------------------------------------
|
||||||
|
|
||||||
for newby in newbies:
|
for newby in newbies:
|
||||||
table.labels_append(['{}_{}'.format(i+1,newby) for i in xrange(resultDim[newby])]
|
table.labels_append(['{}_{}'.format(i+1,newby) for i in range(resultDim[newby])]
|
||||||
if resultDim[newby] > 1 else newby)
|
if resultDim[newby] > 1 else newby)
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
|
|
@ -68,7 +68,7 @@ for name in filenames:
|
||||||
# ------------------------------------------ assemble header --------------------------------------
|
# ------------------------------------------ assemble header --------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append(['{}_Cauchy'.format(i+1) for i in xrange(9)]) # extend ASCII header with new labels
|
table.labels_append(['{}_Cauchy'.format(i+1) for i in range(9)]) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
|
@ -17,7 +17,7 @@ def cell2node(cellData,grid):
|
||||||
nodeData = 0.0
|
nodeData = 0.0
|
||||||
datalen = np.array(cellData.shape[3:]).prod()
|
datalen = np.array(cellData.shape[3:]).prod()
|
||||||
|
|
||||||
for i in xrange(datalen):
|
for i in range(datalen):
|
||||||
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i],
|
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i],
|
||||||
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
|
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
|
||||||
mode = 'wrap',
|
mode = 'wrap',
|
||||||
|
@ -33,7 +33,7 @@ def cell2node(cellData,grid):
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
def deformationAvgFFT(F,grid,size,nodal=False,transformed=False):
|
def deformationAvgFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
"""calculate average cell center (or nodal) deformation for deformation gradient field specified in each grid cell"""
|
"""Calculate average cell center (or nodal) deformation for deformation gradient field specified in each grid cell"""
|
||||||
if nodal:
|
if nodal:
|
||||||
x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]),
|
x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]),
|
||||||
np.linspace(0,size[1],1+grid[1]),
|
np.linspace(0,size[1],1+grid[1]),
|
||||||
|
@ -55,7 +55,7 @@ def deformationAvgFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
|
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
"""calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
"""Calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
||||||
integrator = 0.5j * size / math.pi
|
integrator = 0.5j * size / math.pi
|
||||||
|
|
||||||
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
|
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
|
||||||
|
@ -131,7 +131,7 @@ def volTetrahedron(coords):
|
||||||
|
|
||||||
def volumeMismatch(size,F,nodes):
|
def volumeMismatch(size,F,nodes):
|
||||||
"""
|
"""
|
||||||
calculates the volume mismatch
|
Calculates the volume mismatch
|
||||||
|
|
||||||
volume mismatch is defined as the difference between volume of reconstructed
|
volume mismatch is defined as the difference between volume of reconstructed
|
||||||
(compatible) cube and determinant of defgrad at the FP
|
(compatible) cube and determinant of defgrad at the FP
|
||||||
|
@ -142,9 +142,9 @@ def volumeMismatch(size,F,nodes):
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
# calculate actual volume and volume resulting from deformation gradient
|
# calculate actual volume and volume resulting from deformation gradient
|
||||||
for k in xrange(grid[2]):
|
for k in range(grid[2]):
|
||||||
for j in xrange(grid[1]):
|
for j in range(grid[1]):
|
||||||
for i in xrange(grid[0]):
|
for i in range(grid[0]):
|
||||||
coords[0,0:3] = nodes[k, j, i ,0:3]
|
coords[0,0:3] = nodes[k, j, i ,0:3]
|
||||||
coords[1,0:3] = nodes[k ,j, i+1,0:3]
|
coords[1,0:3] = nodes[k ,j, i+1,0:3]
|
||||||
coords[2,0:3] = nodes[k ,j+1,i+1,0:3]
|
coords[2,0:3] = nodes[k ,j+1,i+1,0:3]
|
||||||
|
@ -190,9 +190,9 @@ def shapeMismatch(size,F,nodes,centres):
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
# compare deformed original and deformed positions to actual positions
|
# compare deformed original and deformed positions to actual positions
|
||||||
for k in xrange(grid[2]):
|
for k in range(grid[2]):
|
||||||
for j in xrange(grid[1]):
|
for j in range(grid[1]):
|
||||||
for i in xrange(grid[0]):
|
for i in range(grid[0]):
|
||||||
sMismatch[k,j,i] = \
|
sMismatch[k,j,i] = \
|
||||||
+ np.linalg.norm(nodes[k, j, i ,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[0,0:3]))\
|
+ np.linalg.norm(nodes[k, j, i ,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[0,0:3]))\
|
||||||
+ np.linalg.norm(nodes[k, j, i+1,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[1,0:3]))\
|
+ np.linalg.norm(nodes[k, j, i+1,0:3] - centres[k,j,i,0:3] - np.dot(F[k,j,i,:,:], coordsInitial[1,0:3]))\
|
||||||
|
@ -288,7 +288,7 @@ for name in filenames:
|
||||||
np.zeros((table.data.shape[0],
|
np.zeros((table.data.shape[0],
|
||||||
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,9+i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,9+i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
@ -324,9 +324,9 @@ for name in filenames:
|
||||||
volumeMismatch = volumeMismatch(size,table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),nodes)
|
volumeMismatch = volumeMismatch(size,table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),nodes)
|
||||||
|
|
||||||
# ------------------------------------------ output data -------------------------------------------
|
# ------------------------------------------ output data -------------------------------------------
|
||||||
for i in xrange(grid[2]):
|
for i in range(grid[2]):
|
||||||
for j in xrange(grid[1]):
|
for j in range(grid[1]):
|
||||||
for k in xrange(grid[0]):
|
for k in range(grid[0]):
|
||||||
table.data_read()
|
table.data_read()
|
||||||
if options.shape: table.data_append(shapeMismatch[i,j,k])
|
if options.shape: table.data_append(shapeMismatch[i,j,k])
|
||||||
if options.volume: table.data_append(volumeMismatch[i,j,k])
|
if options.volume: table.data_append(volumeMismatch[i,j,k])
|
||||||
|
|
|
@ -59,7 +59,7 @@ for name in filenames:
|
||||||
dims.append(dim)
|
dims.append(dim)
|
||||||
columns.append(table.label_index(what))
|
columns.append(table.label_index(what))
|
||||||
table.labels_append('cum({})'.format(what) if dim == 1 else
|
table.labels_append('cum({})'.format(what) if dim == 1 else
|
||||||
['{}_cum({})'.format(i+1,what) for i in xrange(dim)] ) # extend ASCII header with new labels
|
['{}_cum({})'.format(i+1,what) for i in range(dim)] ) # extend ASCII header with new labels
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
|
|
@ -24,24 +24,24 @@ def curlFFT(geomdim,field):
|
||||||
# differentiation in Fourier space
|
# differentiation in Fourier space
|
||||||
k_s = np.zeros([3],'i')
|
k_s = np.zeros([3],'i')
|
||||||
TWOPIIMG = 2.0j*math.pi
|
TWOPIIMG = 2.0j*math.pi
|
||||||
for i in xrange(grid[2]):
|
for i in range(grid[2]):
|
||||||
k_s[0] = i
|
k_s[0] = i
|
||||||
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
elif i > grid[2]//2: k_s[0] -= grid[2]
|
elif i > grid[2]//2: k_s[0] -= grid[2]
|
||||||
|
|
||||||
for j in xrange(grid[1]):
|
for j in range(grid[1]):
|
||||||
k_s[1] = j
|
k_s[1] = j
|
||||||
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
elif j > grid[1]//2: k_s[1] -= grid[1]
|
elif j > grid[1]//2: k_s[1] -= grid[1]
|
||||||
|
|
||||||
for k in xrange(grid[0]//2+1):
|
for k in range(grid[0]//2+1):
|
||||||
k_s[2] = k
|
k_s[2] = k
|
||||||
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
|
|
||||||
xi = (k_s/geomdim)[2::-1].astype('c16') # reversing the field input order
|
xi = (k_s/geomdim)[2::-1].astype('c16') # reversing the field input order
|
||||||
|
|
||||||
if dataType == 'tensor':
|
if dataType == 'tensor':
|
||||||
for l in xrange(3):
|
for l in range(3):
|
||||||
curl_fourier[i,j,k,0,l] = ( field_fourier[i,j,k,l,2]*xi[1]\
|
curl_fourier[i,j,k,0,l] = ( field_fourier[i,j,k,l,2]*xi[1]\
|
||||||
-field_fourier[i,j,k,l,1]*xi[2]) *TWOPIIMG
|
-field_fourier[i,j,k,l,1]*xi[2]) *TWOPIIMG
|
||||||
curl_fourier[i,j,k,1,l] = (-field_fourier[i,j,k,l,2]*xi[0]\
|
curl_fourier[i,j,k,1,l] = (-field_fourier[i,j,k,l,2]*xi[0]\
|
||||||
|
@ -136,14 +136,14 @@ for name in filenames:
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for label in data['active']:
|
for label in data['active']:
|
||||||
table.labels_append(['{}_curlFFT({})'.format(i+1,label) for i in xrange(data['dim'])]) # extend ASCII header with new labels
|
table.labels_append(['{}_curlFFT({})'.format(i+1,label) for i in range(data['dim'])]) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,colCoord+i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
|
|
@ -85,7 +85,7 @@ for name in filenames:
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for label in data['active']:
|
for label in data['active']:
|
||||||
table.labels_append(['{}_dev({})'.format(i+1,label) for i in xrange(data['dim'])] + \
|
table.labels_append(['{}_dev({})'.format(i+1,label) for i in range(data['dim'])] + \
|
||||||
(['sph({})'.format(label)] if options.spherical else [])) # extend ASCII header with new labels
|
(['sph({})'.format(label)] if options.spherical else [])) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
|
@ -101,4 +101,4 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
table.close() # close input ASCII table (works for stdin)
|
table.close() # close input ASCII table (works for stdin)
|
||||||
|
|
|
@ -17,7 +17,7 @@ def cell2node(cellData,grid):
|
||||||
nodeData = 0.0
|
nodeData = 0.0
|
||||||
datalen = np.array(cellData.shape[3:]).prod()
|
datalen = np.array(cellData.shape[3:]).prod()
|
||||||
|
|
||||||
for i in xrange(datalen):
|
for i in range(datalen):
|
||||||
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i],
|
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i],
|
||||||
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
|
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
|
||||||
mode = 'wrap',
|
mode = 'wrap',
|
||||||
|
@ -33,7 +33,7 @@ def cell2node(cellData,grid):
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
def displacementAvgFFT(F,grid,size,nodal=False,transformed=False):
|
def displacementAvgFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
"""calculate average cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
"""Calculate average cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
||||||
if nodal:
|
if nodal:
|
||||||
x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]),
|
x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]),
|
||||||
np.linspace(0,size[1],1+grid[1]),
|
np.linspace(0,size[1],1+grid[1]),
|
||||||
|
@ -55,7 +55,7 @@ def displacementAvgFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
|
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
"""calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
"""Calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
||||||
integrator = 0.5j * size / math.pi
|
integrator = 0.5j * size / math.pi
|
||||||
|
|
||||||
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
|
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
|
||||||
|
@ -167,7 +167,7 @@ for name in filenames:
|
||||||
np.zeros((table.data.shape[0],
|
np.zeros((table.data.shape[0],
|
||||||
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,9+i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,9+i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
@ -196,16 +196,16 @@ for name in filenames:
|
||||||
table.labels_clear()
|
table.labels_clear()
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append((['{}_pos' .format(i+1) for i in xrange(3)] if options.nodal else []) +
|
table.labels_append((['{}_pos' .format(i+1) for i in range(3)] if options.nodal else []) +
|
||||||
['{}_avg({}).{}' .format(i+1,options.defgrad,options.pos) for i in xrange(3)] +
|
['{}_avg({}).{}' .format(i+1,options.defgrad,options.pos) for i in range(3)] +
|
||||||
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.pos) for i in xrange(3)] )
|
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.pos) for i in range(3)] )
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ output data -------------------------------------------
|
# ------------------------------------------ output data -------------------------------------------
|
||||||
|
|
||||||
Zrange = np.linspace(0,size[2],1+grid[2]) if options.nodal else xrange(grid[2])
|
Zrange = np.linspace(0,size[2],1+grid[2]) if options.nodal else range(grid[2])
|
||||||
Yrange = np.linspace(0,size[1],1+grid[1]) if options.nodal else xrange(grid[1])
|
Yrange = np.linspace(0,size[1],1+grid[1]) if options.nodal else range(grid[1])
|
||||||
Xrange = np.linspace(0,size[0],1+grid[0]) if options.nodal else xrange(grid[0])
|
Xrange = np.linspace(0,size[0],1+grid[0]) if options.nodal else range(grid[0])
|
||||||
|
|
||||||
for i,z in enumerate(Zrange):
|
for i,z in enumerate(Zrange):
|
||||||
for j,y in enumerate(Yrange):
|
for j,y in enumerate(Yrange):
|
||||||
|
|
|
@ -21,23 +21,23 @@ def divFFT(geomdim,field):
|
||||||
# differentiation in Fourier space
|
# differentiation in Fourier space
|
||||||
k_s = np.zeros([3],'i')
|
k_s = np.zeros([3],'i')
|
||||||
TWOPIIMG = 2.0j*math.pi
|
TWOPIIMG = 2.0j*math.pi
|
||||||
for i in xrange(grid[2]):
|
for i in range(grid[2]):
|
||||||
k_s[0] = i
|
k_s[0] = i
|
||||||
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
elif i > grid[2]//2: k_s[0] -= grid[2]
|
elif i > grid[2]//2: k_s[0] -= grid[2]
|
||||||
|
|
||||||
for j in xrange(grid[1]):
|
for j in range(grid[1]):
|
||||||
k_s[1] = j
|
k_s[1] = j
|
||||||
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
elif j > grid[1]//2: k_s[1] -= grid[1]
|
elif j > grid[1]//2: k_s[1] -= grid[1]
|
||||||
|
|
||||||
for k in xrange(grid[0]//2+1):
|
for k in range(grid[0]//2+1):
|
||||||
k_s[2] = k
|
k_s[2] = k
|
||||||
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
|
|
||||||
xi = (k_s/geomdim)[2::-1].astype('c16') # reversing the field input order
|
xi = (k_s/geomdim)[2::-1].astype('c16') # reversing the field input order
|
||||||
if n == 9: # tensor, 3x3 -> 3
|
if n == 9: # tensor, 3x3 -> 3
|
||||||
for l in xrange(3):
|
for l in range(3):
|
||||||
div_fourier[i,j,k,l] = sum(field_fourier[i,j,k,l,0:3]*xi) *TWOPIIMG
|
div_fourier[i,j,k,l] = sum(field_fourier[i,j,k,l,0:3]*xi) *TWOPIIMG
|
||||||
elif n == 3: # vector, 3 -> 1
|
elif n == 3: # vector, 3 -> 1
|
||||||
div_fourier[i,j,k] = sum(field_fourier[i,j,k,0:3]*xi) *TWOPIIMG
|
div_fourier[i,j,k] = sum(field_fourier[i,j,k,0:3]*xi) *TWOPIIMG
|
||||||
|
@ -123,14 +123,14 @@ for name in filenames:
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for label in data['active']:
|
for label in data['active']:
|
||||||
table.labels_append(['divFFT({})'.format(label) if type == 'vector' else
|
table.labels_append(['divFFT({})'.format(label) if type == 'vector' else
|
||||||
'{}_divFFT({})'.format(i+1,label) for i in xrange(data['dim']//3)]) # extend ASCII header with new labels
|
'{}_divFFT({})'.format(i+1,label) for i in range(data['dim']//3)]) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,colCoord+i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
|
|
@ -20,13 +20,13 @@ def periodic_3Dpad(array, rimdim=(1,1,1)):
|
||||||
rimdim[2]:rimdim[2]+size[2]] = array
|
rimdim[2]:rimdim[2]+size[2]] = array
|
||||||
|
|
||||||
p = np.zeros(3,'i')
|
p = np.zeros(3,'i')
|
||||||
for side in xrange(3):
|
for side in range(3):
|
||||||
for p[(side+2)%3] in xrange(padded.shape[(side+2)%3]):
|
for p[(side+2)%3] in range(padded.shape[(side+2)%3]):
|
||||||
for p[(side+1)%3] in xrange(padded.shape[(side+1)%3]):
|
for p[(side+1)%3] in range(padded.shape[(side+1)%3]):
|
||||||
for p[side%3] in xrange(rimdim[side%3]):
|
for p[side%3] in range(rimdim[side%3]):
|
||||||
spot = (p-rimdim)%size
|
spot = (p-rimdim)%size
|
||||||
padded[p[0],p[1],p[2]] = array[spot[0],spot[1],spot[2]]
|
padded[p[0],p[1],p[2]] = array[spot[0],spot[1],spot[2]]
|
||||||
for p[side%3] in xrange(rimdim[side%3]+size[side%3],size[side%3]+2*rimdim[side%3]):
|
for p[side%3] in range(rimdim[side%3]+size[side%3],size[side%3]+2*rimdim[side%3]):
|
||||||
spot = (p-rimdim)%size
|
spot = (p-rimdim)%size
|
||||||
padded[p[0],p[1],p[2]] = array[spot[0],spot[1],spot[2]]
|
padded[p[0],p[1],p[2]] = array[spot[0],spot[1],spot[2]]
|
||||||
return padded
|
return padded
|
||||||
|
@ -178,7 +178,7 @@ for name in filenames:
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,coordCol+i]) for i in xrange(coordDim)]
|
coords = [np.unique(table.data[:,coordCol+i]) for i in range(coordDim)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords)+[1]*(3-len(coords)),'i')
|
grid = np.array(map(len,coords)+[1]*(3-len(coords)),'i')
|
||||||
|
@ -215,7 +215,7 @@ for name in filenames:
|
||||||
# ...reflects number of unique neighbors
|
# ...reflects number of unique neighbors
|
||||||
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
||||||
|
|
||||||
for i in xrange(1,len(neighborhood)): # check remaining points in neighborhood
|
for i in range(1,len(neighborhood)): # check remaining points in neighborhood
|
||||||
uniques += np.where(np.logical_and(
|
uniques += np.where(np.logical_and(
|
||||||
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
|
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
|
||||||
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
|
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
|
||||||
|
@ -229,7 +229,7 @@ for name in filenames:
|
||||||
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
|
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
|
||||||
|
|
||||||
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
|
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
|
||||||
for i in xrange(len(feature_list)):
|
for i in range(len(feature_list)):
|
||||||
stack.append(distance[i,:])
|
stack.append(distance[i,:])
|
||||||
|
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
# ------------------------------------------ output result -----------------------------------------
|
||||||
|
@ -239,4 +239,4 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
table.close() # close input ASCII table (works for stdin)
|
table.close() # close input ASCII table (works for stdin)
|
||||||
|
|
|
@ -24,17 +24,17 @@ def gradFFT(geomdim,field):
|
||||||
# differentiation in Fourier space
|
# differentiation in Fourier space
|
||||||
k_s = np.zeros([3],'i')
|
k_s = np.zeros([3],'i')
|
||||||
TWOPIIMG = 2.0j*math.pi
|
TWOPIIMG = 2.0j*math.pi
|
||||||
for i in xrange(grid[2]):
|
for i in range(grid[2]):
|
||||||
k_s[0] = i
|
k_s[0] = i
|
||||||
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
elif i > grid[2]//2: k_s[0] -= grid[2]
|
elif i > grid[2]//2: k_s[0] -= grid[2]
|
||||||
|
|
||||||
for j in xrange(grid[1]):
|
for j in range(grid[1]):
|
||||||
k_s[1] = j
|
k_s[1] = j
|
||||||
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
elif j > grid[1]//2: k_s[1] -= grid[1]
|
elif j > grid[1]//2: k_s[1] -= grid[1]
|
||||||
|
|
||||||
for k in xrange(grid[0]//2+1):
|
for k in range(grid[0]//2+1):
|
||||||
k_s[2] = k
|
k_s[2] = k
|
||||||
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
|
|
||||||
|
@ -126,14 +126,14 @@ for name in filenames:
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for label in data['active']:
|
for label in data['active']:
|
||||||
table.labels_append(['{}_gradFFT({})'.format(i+1,label) for i in xrange(3 * data['dim'])]) # extend ASCII header with new labels
|
table.labels_append(['{}_gradFFT({})'.format(i+1,label) for i in range(3 * data['dim'])]) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,colCoord+i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
|
|
@ -108,7 +108,7 @@ for name in filenames:
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append(['{}_IPF_{:g}{:g}{:g}_{sym}'.format(i+1,*options.pole,sym = options.symmetry.lower()) for i in xrange(3)])
|
table.labels_append(['{}_IPF_{:g}{:g}{:g}_{sym}'.format(i+1,*options.pole,sym = options.symmetry.lower()) for i in range(3)])
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
|
@ -131,9 +131,9 @@ for name in filenames:
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
for output in options.output:
|
for output in options.output:
|
||||||
if output == 'quaternion': table.labels_append(['{}_{}_{}({})'.format(i+1,'quat',options.symmetry,label) for i in xrange(4)])
|
if output == 'quaternion': table.labels_append(['{}_{}_{}({})'.format(i+1,'quat',options.symmetry,label) for i in range(4)])
|
||||||
elif output == 'rodrigues': table.labels_append(['{}_{}_{}({})'.format(i+1,'rodr',options.symmetry,label) for i in xrange(3)])
|
elif output == 'rodrigues': table.labels_append(['{}_{}_{}({})'.format(i+1,'rodr',options.symmetry,label) for i in range(3)])
|
||||||
elif output == 'eulers': table.labels_append(['{}_{}_{}({})'.format(i+1,'eulr',options.symmetry,label) for i in xrange(3)])
|
elif output == 'eulers': table.labels_append(['{}_{}_{}({})'.format(i+1,'eulr',options.symmetry,label) for i in range(3)])
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
|
@ -69,7 +69,7 @@ for name in filenames:
|
||||||
# ------------------------------------------ assemble header --------------------------------------
|
# ------------------------------------------ assemble header --------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append(['%i_S'%(i+1) for i in xrange(9)]) # extend ASCII header with new labels
|
table.labels_append(['{}_S'.format(i+1) for i in range(9)]) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
|
@ -113,7 +113,7 @@ for name in filenames:
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append(['{}_pole_{}{}{}'.format(i+1,*options.pole) for i in xrange(2)])
|
table.labels_append(['{}_pole_{}{}{}'.format(i+1,*options.pole) for i in range(2)])
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
|
@ -199,7 +199,7 @@ if options.lattice in latticeChoices[:2]:
|
||||||
c_normal = slipSystems[options.lattice][:,3:]
|
c_normal = slipSystems[options.lattice][:,3:]
|
||||||
elif options.lattice == latticeChoices[2]:
|
elif options.lattice == latticeChoices[2]:
|
||||||
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
|
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
|
||||||
for i in xrange(len(c_direction)):
|
for i in range(len(c_direction)):
|
||||||
c_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
|
c_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
|
||||||
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
|
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
|
||||||
slipSystems['hex'][i,3]*options.CoverA,
|
slipSystems['hex'][i,3]*options.CoverA,
|
||||||
|
|
|
@ -57,8 +57,8 @@ for name in filenames:
|
||||||
if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type))
|
if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type))
|
||||||
else:
|
else:
|
||||||
items[type]['column'].append(table.label_index(what))
|
items[type]['column'].append(table.label_index(what))
|
||||||
table.labels_append(['{}_eigval({})'.format(i+1,what) for i in xrange(3)]) # extend ASCII header with new labels
|
table.labels_append(['{}_eigval({})'.format(i+1,what) for i in range(3)]) # extend ASCII header with new labels
|
||||||
table.labels_append(['{}_eigvec({})'.format(i+1,what) for i in xrange(9)]) # extend ASCII header with new labels
|
table.labels_append(['{}_eigvec({})'.format(i+1,what) for i in range(9)]) # extend ASCII header with new labels
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
|
|
@ -112,7 +112,7 @@ for name in filenames:
|
||||||
table.labels_append(['{}_{}({}){}'.format(i+1, # extend ASCII header with new labels
|
table.labels_append(['{}_{}({}){}'.format(i+1, # extend ASCII header with new labels
|
||||||
theStrain,
|
theStrain,
|
||||||
theStretch,
|
theStretch,
|
||||||
what if what != 'f' else '') for i in xrange(9)])
|
what if what != 'f' else '') for i in range(9)])
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
|
|
@ -79,7 +79,7 @@ incNum = int(asciiTable.data[asciiTable.label_index('inc'), 0])
|
||||||
fullTable = np.copy(asciiTable.data) # deep copy all data, just to be safe
|
fullTable = np.copy(asciiTable.data) # deep copy all data, just to be safe
|
||||||
labels = asciiTable.labels()
|
labels = asciiTable.labels()
|
||||||
labels_idx = [asciiTable.label_index(label) for label in labels]
|
labels_idx = [asciiTable.label_index(label) for label in labels]
|
||||||
featuresDim = [labels_idx[i+1] - labels_idx[i] for i in xrange(len(labels)-1)]
|
featuresDim = [labels_idx[i+1] - labels_idx[i] for i in range(len(labels)-1)]
|
||||||
featuresDim.append(fullTable.shape[1] - labels_idx[-1])
|
featuresDim.append(fullTable.shape[1] - labels_idx[-1])
|
||||||
|
|
||||||
# ----- figure out size and grid ----- #
|
# ----- figure out size and grid ----- #
|
||||||
|
@ -113,7 +113,7 @@ h5f.add_data("Vz", Vz, cmd_log=cmd_log)
|
||||||
|
|
||||||
# add the rest of data from table
|
# add the rest of data from table
|
||||||
labelsProcessed = ['inc']
|
labelsProcessed = ['inc']
|
||||||
for fi in xrange(len(labels)):
|
for fi in range(len(labels)):
|
||||||
featureName = labels[fi]
|
featureName = labels[fi]
|
||||||
# remove trouble maker "("" and ")" from label/feature name
|
# remove trouble maker "("" and ")" from label/feature name
|
||||||
if "(" in featureName:
|
if "(" in featureName:
|
||||||
|
@ -136,7 +136,7 @@ for fi in xrange(len(labels)):
|
||||||
# mshGridDim[2],
|
# mshGridDim[2],
|
||||||
# dataset.shape[1]))
|
# dataset.shape[1]))
|
||||||
# write out data
|
# write out data
|
||||||
print "adding {}...".format(featureName)
|
print("adding {}...".format(featureName))
|
||||||
h5f.add_data(featureName, dataset, cmd_log=cmd_log)
|
h5f.add_data(featureName, dataset, cmd_log=cmd_log)
|
||||||
# write down the processed label
|
# write down the processed label
|
||||||
labelsProcessed.append(featureName)
|
labelsProcessed.append(featureName)
|
||||||
|
|
|
@ -94,7 +94,7 @@ for name in filenames:
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
if (any(options.grid) == 0 or any(options.size) == 0.0):
|
if (any(options.grid) == 0 or any(options.size) == 0.0):
|
||||||
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,colCoord+i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
|
|
@ -127,12 +127,12 @@ for name in filenames:
|
||||||
weights=None if options.weight is None else table.data[:,2])
|
weights=None if options.weight is None else table.data[:,2])
|
||||||
|
|
||||||
if options.normCol:
|
if options.normCol:
|
||||||
for x in xrange(options.bins[0]):
|
for x in range(options.bins[0]):
|
||||||
sum = np.sum(grid[x,:])
|
sum = np.sum(grid[x,:])
|
||||||
if sum > 0.0:
|
if sum > 0.0:
|
||||||
grid[x,:] /= sum
|
grid[x,:] /= sum
|
||||||
if options.normRow:
|
if options.normRow:
|
||||||
for y in xrange(options.bins[1]):
|
for y in range(options.bins[1]):
|
||||||
sum = np.sum(grid[:,y])
|
sum = np.sum(grid[:,y])
|
||||||
if sum > 0.0:
|
if sum > 0.0:
|
||||||
grid[:,y] /= sum
|
grid[:,y] /= sum
|
||||||
|
@ -150,8 +150,8 @@ for name in filenames:
|
||||||
|
|
||||||
delta[2] = minmax[2,1]-minmax[2,0]
|
delta[2] = minmax[2,1]-minmax[2,0]
|
||||||
|
|
||||||
for x in xrange(options.bins[0]):
|
for x in range(options.bins[0]):
|
||||||
for y in xrange(options.bins[1]):
|
for y in range(options.bins[1]):
|
||||||
result[x,y,:] = [minmax[0,0]+delta[0]/options.bins[0]*(x+0.5),
|
result[x,y,:] = [minmax[0,0]+delta[0]/options.bins[0]*(x+0.5),
|
||||||
minmax[1,0]+delta[1]/options.bins[1]*(y+0.5),
|
minmax[1,0]+delta[1]/options.bins[1]*(y+0.5),
|
||||||
min(1.0,max(0.0,(grid[x,y]-minmax[2,0])/delta[2]))]
|
min(1.0,max(0.0,(grid[x,y]-minmax[2,0])/delta[2]))]
|
||||||
|
|
|
@ -79,7 +79,7 @@ for name in filenames:
|
||||||
table.data_readArray(options.pos)
|
table.data_readArray(options.pos)
|
||||||
table.data_rewind()
|
table.data_rewind()
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
@ -99,9 +99,9 @@ for name in filenames:
|
||||||
data = np.zeros(outSize.tolist()+[len(table.labels(raw = True))])
|
data = np.zeros(outSize.tolist()+[len(table.labels(raw = True))])
|
||||||
p = np.zeros(3,'i')
|
p = np.zeros(3,'i')
|
||||||
|
|
||||||
for p[2] in xrange(grid[2]):
|
for p[2] in range(grid[2]):
|
||||||
for p[1] in xrange(grid[1]):
|
for p[1] in range(grid[1]):
|
||||||
for p[0] in xrange(grid[0]):
|
for p[0] in range(grid[0]):
|
||||||
d = p*packing
|
d = p*packing
|
||||||
table.data_read()
|
table.data_read()
|
||||||
data[d[0]:d[0]+packing[0],
|
data[d[0]:d[0]+packing[0],
|
||||||
|
@ -110,9 +110,9 @@ for name in filenames:
|
||||||
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
|
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
|
||||||
elementSize = size/grid/packing
|
elementSize = size/grid/packing
|
||||||
elem = 1
|
elem = 1
|
||||||
for c in xrange(outSize[2]):
|
for c in range(outSize[2]):
|
||||||
for b in xrange(outSize[1]):
|
for b in range(outSize[1]):
|
||||||
for a in xrange(outSize[0]):
|
for a in range(outSize[0]):
|
||||||
data[a,b,c,colCoord:colCoord+3] = [a+0.5,b+0.5,c+0.5]*elementSize
|
data[a,b,c,colCoord:colCoord+3] = [a+0.5,b+0.5,c+0.5]*elementSize
|
||||||
if colElem != -1: data[a,b,c,colElem] = elem
|
if colElem != -1: data[a,b,c,colElem] = elem
|
||||||
table.data = data[a,b,c,:].tolist()
|
table.data = data[a,b,c,:].tolist()
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys
|
import os,sys
|
||||||
import math # noqa
|
import math # noqa
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser, OptionGroup
|
from optparse import OptionParser, OptionGroup
|
||||||
import damask
|
import damask
|
||||||
|
@ -110,18 +110,18 @@ for name in filenames:
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
rows,cols = table.data.shape
|
rows,cols = table.data.shape
|
||||||
|
|
||||||
table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn
|
table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn
|
||||||
|
|
||||||
values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions
|
values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions
|
||||||
index = np.append(index,rows) # add termination position
|
index = np.append(index,rows) # add termination position
|
||||||
grpTable = np.empty((len(values), cols)) # initialize output
|
grpTable = np.empty((len(values), cols)) # initialize output
|
||||||
|
|
||||||
for i in xrange(len(values)): # iterate over groups (unique values in grpColumn)
|
for i in range(len(values)): # iterate over groups (unique values in grpColumn)
|
||||||
if options.periodic :
|
if options.periodic :
|
||||||
grpTable[i] = periodicAverage(table.data[index[i]:index[i+1]],options.boundary) # apply periodicAverage mapping function
|
grpTable[i] = periodicAverage(table.data[index[i]:index[i+1]],options.boundary) # apply periodicAverage mapping function
|
||||||
else :
|
else :
|
||||||
grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function
|
grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function
|
||||||
if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value
|
if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value
|
||||||
|
|
||||||
table.data = grpTable
|
table.data = grpTable
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ parser.set_defaults(condition=None)
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
# ----- parse formulas ----- #
|
# ----- parse formulas ----- #
|
||||||
for i in xrange(len(options.formulas)):
|
for i in range(len(options.formulas)):
|
||||||
options.formulas[i] = options.formulas[i].replace(';', ',')
|
options.formulas[i] = options.formulas[i].replace(';', ',')
|
||||||
|
|
||||||
# ----- loop over input files ----- #
|
# ----- loop over input files ----- #
|
||||||
|
@ -64,7 +64,7 @@ for name in filenames:
|
||||||
try:
|
try:
|
||||||
h5f = damask.H5Table(name, new_file=False)
|
h5f = damask.H5Table(name, new_file=False)
|
||||||
except:
|
except:
|
||||||
print "!!!Cannot process {}".format(name)
|
print("!!!Cannot process {}".format(name))
|
||||||
continue
|
continue
|
||||||
damask.util.report(scriptName, name)
|
damask.util.report(scriptName, name)
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ scriptID = ' '.join([scriptName, damask.version])
|
||||||
|
|
||||||
|
|
||||||
def getCauchy(f, p):
|
def getCauchy(f, p):
|
||||||
"""return Cauchy stress for given f and p"""
|
"""Return Cauchy stress for given f and p"""
|
||||||
# [Cauchy] = (1/det(F)) * [P].[F_transpose]
|
# [Cauchy] = (1/det(F)) * [P].[F_transpose]
|
||||||
f = f.reshape((3, 3))
|
f = f.reshape((3, 3))
|
||||||
p = p.reshape((3, 3))
|
p = p.reshape((3, 3))
|
||||||
|
|
|
@ -119,7 +119,7 @@ for name in filenames:
|
||||||
|
|
||||||
# calculate the IPF color
|
# calculate the IPF color
|
||||||
rgbArrays = np.zeros((orieData.shape[0], 3))
|
rgbArrays = np.zeros((orieData.shape[0], 3))
|
||||||
for ci in xrange(rgbArrays.shape[0]):
|
for ci in range(rgbArrays.shape[0]):
|
||||||
if inputtype == 'eulers':
|
if inputtype == 'eulers':
|
||||||
o = damask.Orientation(Eulers=np.array(orieData[ci, :])*toRadians,
|
o = damask.Orientation(Eulers=np.array(orieData[ci, :])*toRadians,
|
||||||
symmetry=options.symmetry).reduced()
|
symmetry=options.symmetry).reduced()
|
||||||
|
|
|
@ -14,7 +14,7 @@ scriptID = ' '.join([scriptName, damask.version])
|
||||||
|
|
||||||
# ----- Helper functions ----- #
|
# ----- Helper functions ----- #
|
||||||
def calcMises(what, tensor):
|
def calcMises(what, tensor):
|
||||||
"""calculate von Mises equivalent"""
|
"""Calculate von Mises equivalent"""
|
||||||
dev = tensor - np.trace(tensor)/3.0*np.eye(3)
|
dev = tensor - np.trace(tensor)/3.0*np.eye(3)
|
||||||
symdev = 0.5*(dev+dev.T)
|
symdev = 0.5*(dev+dev.T)
|
||||||
return math.sqrt(np.sum(symdev*symdev.T) *
|
return math.sqrt(np.sum(symdev*symdev.T) *
|
||||||
|
@ -61,7 +61,7 @@ for name in filenames:
|
||||||
|
|
||||||
# calculate von Mises equivalent row by row
|
# calculate von Mises equivalent row by row
|
||||||
vmStress = np.zeros(tnsr.shape[0])
|
vmStress = np.zeros(tnsr.shape[0])
|
||||||
for ri in xrange(tnsr.shape[0]):
|
for ri in range(tnsr.shape[0]):
|
||||||
stressTnsr = tnsr[ri, :].reshape(3, 3)
|
stressTnsr = tnsr[ri, :].reshape(3, 3)
|
||||||
vmStress[ri] = calcMises('stress', stressTnsr)
|
vmStress[ri] = calcMises('stress', stressTnsr)
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ for name in filenames:
|
||||||
if options.strain is not None:
|
if options.strain is not None:
|
||||||
tnsr = h5f.get_data(options.strain)
|
tnsr = h5f.get_data(options.strain)
|
||||||
vmStrain = np.zeros(tnsr.shape[0])
|
vmStrain = np.zeros(tnsr.shape[0])
|
||||||
for ri in xrange(tnsr.shape[0]):
|
for ri in range(tnsr.shape[0]):
|
||||||
strainTnsr = tnsr[ri, :].reshape(3, 3)
|
strainTnsr = tnsr[ri, :].reshape(3, 3)
|
||||||
vmStrain[ri] = calcMises('strain', strainTnsr)
|
vmStrain[ri] = calcMises('strain', strainTnsr)
|
||||||
label = "Mises{}".format(options.strain)
|
label = "Mises{}".format(options.strain)
|
||||||
|
|
|
@ -25,13 +25,13 @@ def operator(stretch, strain, eigenvalues):
|
||||||
|
|
||||||
|
|
||||||
def calcEPS(defgrads, stretchType, strainType):
|
def calcEPS(defgrads, stretchType, strainType):
|
||||||
"""calculate specific type of strain tensor"""
|
"""Calculate specific type of strain tensor"""
|
||||||
eps = np.zeros(defgrads.shape) # initialize container
|
eps = np.zeros(defgrads.shape) # initialize container
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# this loop can use some performance boost
|
# this loop can use some performance boost
|
||||||
# (multi-threading?)
|
# (multi-threading?)
|
||||||
for ri in xrange(defgrads.shape[0]):
|
for ri in range(defgrads.shape[0]):
|
||||||
f = defgrads[ri, :].reshape(3, 3)
|
f = defgrads[ri, :].reshape(3, 3)
|
||||||
U, S, Vh = np.linalg.svd(f)
|
U, S, Vh = np.linalg.svd(f)
|
||||||
R = np.dot(U, Vh) # rotation of polar decomposition
|
R = np.dot(U, Vh) # rotation of polar decomposition
|
||||||
|
|
|
@ -29,7 +29,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# ----- HELPER FUNCTIONS -----#
|
# ----- HELPER FUNCTIONS -----#
|
||||||
def addTopLvlCmt(xmlstr, topLevelCmt):
|
def addTopLvlCmt(xmlstr, topLevelCmt):
|
||||||
"""add top level comment to string from ET"""
|
"""Add top level comment to string from ET"""
|
||||||
# a quick hack to add the top level comment to XML file
|
# a quick hack to add the top level comment to XML file
|
||||||
# --> somehow Elementtree does not provide this functionality
|
# --> somehow Elementtree does not provide this functionality
|
||||||
# --> by default
|
# --> by default
|
||||||
|
@ -42,7 +42,7 @@ def addTopLvlCmt(xmlstr, topLevelCmt):
|
||||||
# MAIN
|
# MAIN
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
msg = 'generate Xdmf wrapper for HDF5 file.'
|
msg = 'Generate Xdmf wrapper for HDF5 file.'
|
||||||
parser = OptionParser(option_class=damask.extendableOption,
|
parser = OptionParser(option_class=damask.extendableOption,
|
||||||
usage='%prog options [file[s]]',
|
usage='%prog options [file[s]]',
|
||||||
description = msg,
|
description = msg,
|
||||||
|
@ -108,7 +108,7 @@ labelsProcessed = ['Vx', 'Vy', 'Vz']
|
||||||
# walk through each attributes
|
# walk through each attributes
|
||||||
for label in labels:
|
for label in labels:
|
||||||
if label in labelsProcessed: continue
|
if label in labelsProcessed: continue
|
||||||
print "adding {}...".format(label)
|
print("adding {}...".format(label))
|
||||||
attr = ET.SubElement(grid, 'Attribute',
|
attr = ET.SubElement(grid, 'Attribute',
|
||||||
Name=label,
|
Name=label,
|
||||||
Type="None",
|
Type="None",
|
||||||
|
|
|
@ -21,24 +21,24 @@ scriptID = ' '.join([scriptName, damask.version])
|
||||||
|
|
||||||
# ----- HELPER FUNCTION ----- #
|
# ----- HELPER FUNCTION ----- #
|
||||||
def getMeshFromXYZ(xyzArray, mode):
|
def getMeshFromXYZ(xyzArray, mode):
|
||||||
"""calc Vx,Vy,Vz vectors for vtk rectangular mesh"""
|
"""Calc Vx,Vy,Vz vectors for vtk rectangular mesh"""
|
||||||
# NOTE:
|
# NOTE:
|
||||||
# --> np.unique will automatically sort the list
|
# --> np.unique will automatically sort the list
|
||||||
# --> although not exactly n(1), but since mesh dimension should
|
# --> although not exactly n(1), but since mesh dimension should
|
||||||
# small anyway, so this is still light weight.
|
# small anyway, so this is still light weight.
|
||||||
dim = xyzArray.shape[1] # 2D:2, 3D:3
|
dim = xyzArray.shape[1] # 2D:2, 3D:3
|
||||||
coords = [np.unique(xyzArray[:, i]) for i in xrange(dim)]
|
coords = [np.unique(xyzArray[:, i]) for i in range(dim)]
|
||||||
|
|
||||||
if mode == 'cell':
|
if mode == 'cell':
|
||||||
# since x, y, z might now have the same number of elements,
|
# since x, y, z might now have the same number of elements,
|
||||||
# we have to deal with them individually
|
# we have to deal with them individually
|
||||||
for ri in xrange(dim):
|
for ri in range(dim):
|
||||||
vctr_pt = coords[ri]
|
vctr_pt = coords[ri]
|
||||||
vctr_cell = np.empty(len(vctr_pt)+1)
|
vctr_cell = np.empty(len(vctr_pt)+1)
|
||||||
# calculate first and last end point
|
# calculate first and last end point
|
||||||
vctr_cell[0] = vctr_pt[0] - 0.5*abs(vctr_pt[1] - vctr_pt[0])
|
vctr_cell[0] = vctr_pt[0] - 0.5*abs(vctr_pt[1] - vctr_pt[0])
|
||||||
vctr_cell[-1] = vctr_pt[-1] + 0.5*abs(vctr_pt[-2] - vctr_pt[-1])
|
vctr_cell[-1] = vctr_pt[-1] + 0.5*abs(vctr_pt[-2] - vctr_pt[-1])
|
||||||
for cj in xrange(1, len(vctr_cell)-1):
|
for cj in range(1, len(vctr_cell)-1):
|
||||||
vctr_cell[cj] = 0.5*(vctr_pt[cj-1] + vctr_pt[cj])
|
vctr_cell[cj] = 0.5*(vctr_pt[cj-1] + vctr_pt[cj])
|
||||||
# update the coords
|
# update the coords
|
||||||
coords[ri] = vctr_cell
|
coords[ri] = vctr_cell
|
||||||
|
|
|
@ -168,8 +168,8 @@ for name in filenames:
|
||||||
im = Image.new('RGBA',imagesize)
|
im = Image.new('RGBA',imagesize)
|
||||||
draw = ImageDraw.Draw(im)
|
draw = ImageDraw.Draw(im)
|
||||||
|
|
||||||
for y in xrange(options.dimension[1]):
|
for y in range(options.dimension[1]):
|
||||||
for x in xrange(options.dimension[0]):
|
for x in range(options.dimension[0]):
|
||||||
draw.polygon([nodes[0,x ,y ,options.z],
|
draw.polygon([nodes[0,x ,y ,options.z],
|
||||||
nodes[1,x ,y ,options.z],
|
nodes[1,x ,y ,options.z],
|
||||||
nodes[0,x+1,y ,options.z],
|
nodes[0,x+1,y ,options.z],
|
||||||
|
|
|
@ -27,9 +27,9 @@ def outStdout(cmd,locals):
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
cmd = eval(cmd[3:])
|
cmd = eval(cmd[3:])
|
||||||
print cmd
|
print(cmd)
|
||||||
else:
|
else:
|
||||||
print cmd
|
print(cmd)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,17 +121,17 @@ if options.inverse:
|
||||||
theMap = theMap.invert()
|
theMap = theMap.invert()
|
||||||
|
|
||||||
if options.palettef:
|
if options.palettef:
|
||||||
print theMap.export(format='raw',steps=options.colorcount)
|
print(theMap.export(format='raw',steps=options.colorcount))
|
||||||
elif options.palette:
|
elif options.palette:
|
||||||
for theColor in theMap.export(format='list',steps=options.colorcount):
|
for theColor in theMap.export(format='list',steps=options.colorcount):
|
||||||
print '\t'.join(map(lambda x: str(int(255*x)),theColor))
|
print('\t'.join(map(lambda x: str(int(255*x)),theColor)))
|
||||||
else: # connect to Mentat and change colorMap
|
else: # connect to Mentat and change colorMap
|
||||||
sys.path.append(damask.solver.Marc().libraryPath())
|
sys.path.append(damask.solver.Marc().libraryPath())
|
||||||
try:
|
try:
|
||||||
import py_mentat
|
import py_mentat
|
||||||
print 'waiting to connect...'
|
print('waiting to connect...')
|
||||||
py_mentat.py_connect('',options.port)
|
py_mentat.py_connect('',options.port)
|
||||||
print 'connected...'
|
print('connected...')
|
||||||
mentat = True
|
mentat = True
|
||||||
except:
|
except:
|
||||||
sys.stderr.write('warning: no valid Mentat release found\n')
|
sys.stderr.write('warning: no valid Mentat release found\n')
|
||||||
|
|
|
@ -150,7 +150,7 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
self.expectedFileSize = self.dataOffset+self.N_increments*(self.tagLen+self.N_elements*self.N_element_scalars*8)
|
self.expectedFileSize = self.dataOffset+self.N_increments*(self.tagLen+self.N_elements*self.N_element_scalars*8)
|
||||||
if options.legacy: self.expectedFileSize+=self.expectedFileSize//self.fourByteLimit*8 # add extra 8 bytes for additional headers at 4 GB limits
|
if options.legacy: self.expectedFileSize+=self.expectedFileSize//self.fourByteLimit*8 # add extra 8 bytes for additional headers at 4 GB limits
|
||||||
if self.expectedFileSize != self.filesize:
|
if self.expectedFileSize != self.filesize:
|
||||||
print '\n**\n* Unexpected file size. Incomplete simulation or file corrupted!\n**'
|
print('\n**\n* Unexpected file size. Incomplete simulation or file corrupted!\n**')
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Summary of results file"""
|
"""Summary of results file"""
|
||||||
|
@ -288,8 +288,8 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
self.file.seek(incStart+where)
|
self.file.seek(incStart+where)
|
||||||
value = struct.unpack('d',self.file.read(8))[0]
|
value = struct.unpack('d',self.file.read(8))[0]
|
||||||
except:
|
except:
|
||||||
print 'seeking',incStart+where
|
print('seeking {}'.format(incStart+where))
|
||||||
print 'e',e,'idx',idx
|
print('e {} idx {}'.format(e,idx))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -304,7 +304,7 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
try:
|
try:
|
||||||
if where%self.fourByteLimit + 8 >= self.fourByteLimit: # danger of reading into fortran record footer at 4 byte limit
|
if where%self.fourByteLimit + 8 >= self.fourByteLimit: # danger of reading into fortran record footer at 4 byte limit
|
||||||
data=''
|
data=''
|
||||||
for i in xrange(8):
|
for i in range(8):
|
||||||
self.file.seek(incStart+where+(where//self.fourByteLimit)*8+4)
|
self.file.seek(incStart+where+(where//self.fourByteLimit)*8+4)
|
||||||
data += self.file.read(1)
|
data += self.file.read(1)
|
||||||
where += 1
|
where += 1
|
||||||
|
@ -313,8 +313,8 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
self.file.seek(incStart+where+(where//self.fourByteLimit)*8+4)
|
self.file.seek(incStart+where+(where//self.fourByteLimit)*8+4)
|
||||||
value = struct.unpack('d',self.file.read(8))[0]
|
value = struct.unpack('d',self.file.read(8))[0]
|
||||||
except:
|
except:
|
||||||
print 'seeking',incStart+where+(where//self.fourByteLimit)*8+4
|
print('seeking {}'.format(incStart+where+(where//self.fourByteLimit)*8+4))
|
||||||
print 'e',e,'idx',idx
|
print('e {} idx {}'.format(e,idx))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
return [elemental_scalar(node,value) for node in self.element(e).items]
|
return [elemental_scalar(node,value) for node in self.element(e).items]
|
||||||
|
@ -327,7 +327,7 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ipCoords(elemType, nodalCoordinates):
|
def ipCoords(elemType, nodalCoordinates):
|
||||||
"""returns IP coordinates for a given element"""
|
"""Returns IP coordinates for a given element"""
|
||||||
nodeWeightsPerNode = {
|
nodeWeightsPerNode = {
|
||||||
7: [ [27.0, 9.0, 3.0, 9.0, 9.0, 3.0, 1.0, 3.0],
|
7: [ [27.0, 9.0, 3.0, 9.0, 9.0, 3.0, 1.0, 3.0],
|
||||||
[ 9.0, 27.0, 9.0, 3.0, 3.0, 9.0, 3.0, 1.0],
|
[ 9.0, 27.0, 9.0, 3.0, 3.0, 9.0, 3.0, 1.0],
|
||||||
|
@ -376,7 +376,7 @@ def ipCoords(elemType, nodalCoordinates):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ipIDs(elemType):
|
def ipIDs(elemType):
|
||||||
"""returns IP numbers for given element type"""
|
"""Returns IP numbers for given element type"""
|
||||||
ipPerNode = {
|
ipPerNode = {
|
||||||
7: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
7: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
||||||
57: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
57: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
||||||
|
@ -392,7 +392,7 @@ def ipIDs(elemType):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def substituteLocation(string, mesh, coords):
|
def substituteLocation(string, mesh, coords):
|
||||||
"""do variable interpolation in group and filter strings"""
|
"""Do variable interpolation in group and filter strings"""
|
||||||
substitute = string
|
substitute = string
|
||||||
substitute = substitute.replace('elem', str(mesh[0]))
|
substitute = substitute.replace('elem', str(mesh[0]))
|
||||||
substitute = substitute.replace('node', str(mesh[1]))
|
substitute = substitute.replace('node', str(mesh[1]))
|
||||||
|
@ -407,7 +407,7 @@ def substituteLocation(string, mesh, coords):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def heading(glue,parts):
|
def heading(glue,parts):
|
||||||
"""joins pieces from parts by glue. second to last entry in pieces tells multiplicity"""
|
"""Joins pieces from parts by glue. second to last entry in pieces tells multiplicity"""
|
||||||
header = []
|
header = []
|
||||||
for pieces in parts:
|
for pieces in parts:
|
||||||
if pieces[-2] == 0:
|
if pieces[-2] == 0:
|
||||||
|
@ -420,7 +420,7 @@ def heading(glue,parts):
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def mapIncremental(label, mapping, N, base, new):
|
def mapIncremental(label, mapping, N, base, new):
|
||||||
"""
|
"""
|
||||||
applies the function defined by "mapping"
|
Applies the function defined by "mapping"
|
||||||
|
|
||||||
(can be either 'min','max','avg', 'sum', or user specified)
|
(can be either 'min','max','avg', 'sum', or user specified)
|
||||||
to a list of data
|
to a list of data
|
||||||
|
@ -450,7 +450,7 @@ def mapIncremental(label, mapping, N, base, new):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def OpenPostfile(name,type,nodal = False):
|
def OpenPostfile(name,type,nodal = False):
|
||||||
"""open postfile with extrapolation mode 'translate'"""
|
"""Open postfile with extrapolation mode 'translate'"""
|
||||||
p = {\
|
p = {\
|
||||||
'spectral': MPIEspectral_result,\
|
'spectral': MPIEspectral_result,\
|
||||||
'marc': post_open,\
|
'marc': post_open,\
|
||||||
|
@ -463,7 +463,7 @@ def OpenPostfile(name,type,nodal = False):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParseOutputFormat(filename,what,me):
|
def ParseOutputFormat(filename,what,me):
|
||||||
"""parse .output* files in order to get a list of outputs"""
|
"""Parse .output* files in order to get a list of outputs"""
|
||||||
content = []
|
content = []
|
||||||
format = {'outputs':{},'specials':{'brothers':[]}}
|
format = {'outputs':{},'specials':{'brothers':[]}}
|
||||||
for prefix in ['']+map(str,range(1,17)):
|
for prefix in ['']+map(str,range(1,17)):
|
||||||
|
@ -508,7 +508,7 @@ def ParseOutputFormat(filename,what,me):
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParsePostfile(p,filename, outputFormat):
|
def ParsePostfile(p,filename, outputFormat):
|
||||||
"""
|
"""
|
||||||
parse postfile in order to get position and labels of outputs
|
Parse postfile in order to get position and labels of outputs
|
||||||
|
|
||||||
needs "outputFormat" for mapping of output names to postfile output indices
|
needs "outputFormat" for mapping of output names to postfile output indices
|
||||||
"""
|
"""
|
||||||
|
@ -592,7 +592,7 @@ def ParsePostfile(p,filename, outputFormat):
|
||||||
try:
|
try:
|
||||||
stat['LabelOfElementalScalar'][startIndex + offset] = label
|
stat['LabelOfElementalScalar'][startIndex + offset] = label
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print 'trying to assign %s at position %i+%i'%(label,startIndex,offset)
|
print('trying to assign {} at position {}+{}'.format(label,startIndex,offset))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
offset += 1
|
offset += 1
|
||||||
|
|
||||||
|
@ -821,8 +821,8 @@ bg.set_message('parsing .output files...')
|
||||||
for what in me:
|
for what in me:
|
||||||
outputFormat[what] = ParseOutputFormat(filename, what, me[what])
|
outputFormat[what] = ParseOutputFormat(filename, what, me[what])
|
||||||
if '_id' not in outputFormat[what]['specials']:
|
if '_id' not in outputFormat[what]['specials']:
|
||||||
print "\nsection '%s' not found in <%s>"%(me[what], what)
|
print("\nsection '{}' not found in <{}>".format(me[what], what))
|
||||||
print '\n'.join(map(lambda x:' [%s]'%x, outputFormat[what]['specials']['brothers']))
|
print('\n'.join(map(lambda x:' [%s]'%x, outputFormat[what]['specials']['brothers'])))
|
||||||
|
|
||||||
bg.set_message('opening result file...')
|
bg.set_message('opening result file...')
|
||||||
p = OpenPostfile(filename+extension,options.filetype,options.nodal)
|
p = OpenPostfile(filename+extension,options.filetype,options.nodal)
|
||||||
|
@ -851,17 +851,17 @@ for opt in ['nodalScalar','elemScalar','elemTensor','homogenizationResult','crys
|
||||||
|
|
||||||
if options.info:
|
if options.info:
|
||||||
if options.filetype == 'marc':
|
if options.filetype == 'marc':
|
||||||
print '\n\nMentat release %s'%damask.solver.Marc().version('../../')
|
print('\n\nMentat release {}'.format(damask.solver.Marc().version('../../')))
|
||||||
if options.filetype == 'spectral':
|
if options.filetype == 'spectral':
|
||||||
print '\n\n',p
|
print('\n\n{}'.format(p))
|
||||||
|
|
||||||
SummarizePostfile(stat)
|
SummarizePostfile(stat)
|
||||||
|
|
||||||
print '\nUser Defined Outputs'
|
print('\nUser Defined Outputs')
|
||||||
for what in me:
|
for what in me:
|
||||||
print '\n ',what,':'
|
print('\n {}:'.format(what))
|
||||||
for output in outputFormat[what]['outputs']:
|
for output in outputFormat[what]['outputs']:
|
||||||
print ' ',output
|
print(' {}'.format(output))
|
||||||
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
@ -869,7 +869,7 @@ if options.info:
|
||||||
# --- build connectivity maps
|
# --- build connectivity maps
|
||||||
|
|
||||||
elementsOfNode = {}
|
elementsOfNode = {}
|
||||||
for e in xrange(stat['NumberOfElements']):
|
for e in range(stat['NumberOfElements']):
|
||||||
if e%1000 == 0:
|
if e%1000 == 0:
|
||||||
bg.set_message('connect elem %i...'%e)
|
bg.set_message('connect elem %i...'%e)
|
||||||
for n in map(p.node_sequence,p.element(e).items):
|
for n in map(p.node_sequence,p.element(e).items):
|
||||||
|
@ -892,7 +892,7 @@ groupCount = 0
|
||||||
memberCount = 0
|
memberCount = 0
|
||||||
|
|
||||||
if options.nodalScalar:
|
if options.nodalScalar:
|
||||||
for n in xrange(stat['NumberOfNodes']):
|
for n in range(stat['NumberOfNodes']):
|
||||||
if n%1000 == 0:
|
if n%1000 == 0:
|
||||||
bg.set_message('scan node %i...'%n)
|
bg.set_message('scan node %i...'%n)
|
||||||
myNodeID = p.node_id(n)
|
myNodeID = p.node_id(n)
|
||||||
|
@ -927,7 +927,7 @@ if options.nodalScalar:
|
||||||
memberCount += 1
|
memberCount += 1
|
||||||
|
|
||||||
else:
|
else:
|
||||||
for e in xrange(stat['NumberOfElements']):
|
for e in range(stat['NumberOfElements']):
|
||||||
if e%1000 == 0:
|
if e%1000 == 0:
|
||||||
bg.set_message('scan elem %i...'%e)
|
bg.set_message('scan elem %i...'%e)
|
||||||
myElemID = p.element_id(e)
|
myElemID = p.element_id(e)
|
||||||
|
@ -947,27 +947,27 @@ else:
|
||||||
# --- filter valid locations
|
# --- filter valid locations
|
||||||
# generates an expression that is only true for the locations specified by options.filter
|
# generates an expression that is only true for the locations specified by options.filter
|
||||||
filter = substituteLocation(options.filter, [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n])
|
filter = substituteLocation(options.filter, [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n])
|
||||||
if filter != '' and not eval(filter): # for all filter expressions that are not true:...
|
if filter != '' and not eval(filter): # for all filter expressions that are not true:...
|
||||||
continue # ... ignore this data point and continue with next
|
continue # ... ignore this data point and continue with next
|
||||||
|
|
||||||
# --- group data locations
|
# --- group data locations
|
||||||
# generates a unique key for a group of separated data based on the separation criterium for the location
|
# generates a unique key for a group of separated data based on the separation criterium for the location
|
||||||
grp = substituteLocation('#'.join(options.sep), [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n])
|
grp = substituteLocation('#'.join(options.sep), [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n])
|
||||||
|
|
||||||
if grp not in index: # create a new group if not yet present
|
if grp not in index: # create a new group if not yet present
|
||||||
index[grp] = groupCount
|
index[grp] = groupCount
|
||||||
groups.append([[0,0,0,0,0.0,0.0,0.0]]) # initialize with avg location
|
groups.append([[0,0,0,0,0.0,0.0,0.0]]) # initialize with avg location
|
||||||
groupCount += 1
|
groupCount += 1
|
||||||
|
|
||||||
groups[index[grp]][0][:4] = mapIncremental('','unique',
|
groups[index[grp]][0][:4] = mapIncremental('','unique',
|
||||||
len(groups[index[grp]])-1,
|
len(groups[index[grp]])-1,
|
||||||
groups[index[grp]][0][:4],
|
groups[index[grp]][0][:4],
|
||||||
[myElemID,myNodeID,myIpID,myGrainID]) # keep only if unique average location
|
[myElemID,myNodeID,myIpID,myGrainID]) # keep only if unique average location
|
||||||
groups[index[grp]][0][4:] = mapIncremental('','avg',
|
groups[index[grp]][0][4:] = mapIncremental('','avg',
|
||||||
len(groups[index[grp]])-1,
|
len(groups[index[grp]])-1,
|
||||||
groups[index[grp]][0][4:],
|
groups[index[grp]][0][4:],
|
||||||
myIpCoordinates[n]) # incrementally update average location
|
myIpCoordinates[n]) # incrementally update average location
|
||||||
groups[index[grp]].append([myElemID,myNodeID,myIpID,myGrainID,n]) # append a new list defining each group member
|
groups[index[grp]].append([myElemID,myNodeID,myIpID,myGrainID,n]) # append a new list defining each group member
|
||||||
memberCount += 1
|
memberCount += 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -996,14 +996,14 @@ if 'none' not in map(str.lower, options.sort):
|
||||||
|
|
||||||
sortKeys = eval('lambda x:(%s)'%(','.join(theKeys)))
|
sortKeys = eval('lambda x:(%s)'%(','.join(theKeys)))
|
||||||
bg.set_message('sorting groups...')
|
bg.set_message('sorting groups...')
|
||||||
groups.sort(key = sortKeys) # in-place sorting to save mem
|
groups.sort(key = sortKeys) # in-place sorting to save mem
|
||||||
|
|
||||||
|
|
||||||
# --------------------------- create output dir --------------------------------
|
# --------------------------- create output dir --------------------------------
|
||||||
|
|
||||||
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
||||||
if not os.path.isdir(dirname):
|
if not os.path.isdir(dirname):
|
||||||
os.mkdir(dirname,0755)
|
os.mkdir(dirname,0o755)
|
||||||
|
|
||||||
fileOpen = False
|
fileOpen = False
|
||||||
assembleHeader = True
|
assembleHeader = True
|
||||||
|
@ -1049,7 +1049,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
|
|
||||||
if options.separateFiles:
|
if options.separateFiles:
|
||||||
if fileOpen:
|
if fileOpen:
|
||||||
file.close()
|
file.close() # noqa
|
||||||
fileOpen = False
|
fileOpen = False
|
||||||
outFilename = eval('"'+eval("'%%s_inc%%0%ii%%s.txt'%(math.log10(max(increments+[1]))+1)")\
|
outFilename = eval('"'+eval("'%%s_inc%%0%ii%%s.txt'%(math.log10(max(increments+[1]))+1)")\
|
||||||
+'"%(dirname + os.sep + options.prefix + os.path.split(filename)[1],increments[incCount],options.suffix)')
|
+'"%(dirname + os.sep + options.prefix + os.path.split(filename)[1],increments[incCount],options.suffix)')
|
||||||
|
@ -1070,15 +1070,15 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
member = 0
|
member = 0
|
||||||
for group in groups:
|
for group in groups:
|
||||||
|
|
||||||
N = 0 # group member counter
|
N = 0 # group member counter
|
||||||
for (e,n,i,g,n_local) in group[1:]: # loop over group members
|
for (e,n,i,g,n_local) in group[1:]: # loop over group members
|
||||||
member += 1
|
member += 1
|
||||||
if member%1000 == 0:
|
if member%1000 == 0:
|
||||||
time_delta = ((len(locations)*memberCount)/float(member+incCount*memberCount)-1.0)*(time.time()-time_start)
|
time_delta = ((len(locations)*memberCount)/float(member+incCount*memberCount)-1.0)*(time.time()-time_start)
|
||||||
bg.set_message('(%02i:%02i:%02i) processing point %i of %i from increment %i (position %i)...'
|
bg.set_message('(%02i:%02i:%02i) processing point %i of %i from increment %i (position %i)...'
|
||||||
%(time_delta//3600,time_delta%3600//60,time_delta%60,member,memberCount,increments[incCount],position))
|
%(time_delta//3600,time_delta%3600//60,time_delta%60,member,memberCount,increments[incCount],position))
|
||||||
|
|
||||||
newby = [] # current member's data
|
newby = [] # current member's data
|
||||||
|
|
||||||
if options.nodalScalar:
|
if options.nodalScalar:
|
||||||
for label in options.nodalScalar:
|
for label in options.nodalScalar:
|
||||||
|
@ -1126,7 +1126,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
['Crystallite']*len(options.crystalliteResult) +
|
['Crystallite']*len(options.crystalliteResult) +
|
||||||
['Constitutive']*len(options.constitutiveResult)
|
['Constitutive']*len(options.constitutiveResult)
|
||||||
):
|
):
|
||||||
outputIndex = list(zip(*outputFormat[resultType]['outputs'])[0]).index(label) # find the position of this output in the outputFormat
|
outputIndex = list(zip(*outputFormat[resultType]['outputs'])[0]).index(label) # find the position of this output in the outputFormat
|
||||||
length = int(outputFormat[resultType]['outputs'][outputIndex][1])
|
length = int(outputFormat[resultType]['outputs'][outputIndex][1])
|
||||||
thisHead = heading('_',[[component,''.join( label.split() )] for component in range(int(length>1),length+int(length>1))])
|
thisHead = heading('_',[[component,''.join( label.split() )] for component in range(int(length>1),length+int(length>1))])
|
||||||
if assembleHeader: header += thisHead
|
if assembleHeader: header += thisHead
|
||||||
|
@ -1138,13 +1138,13 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
'content':[ p.element_scalar(p.element_sequence(e),stat['IndexOfLabel'][head])[n_local].value
|
'content':[ p.element_scalar(p.element_sequence(e),stat['IndexOfLabel'][head])[n_local].value
|
||||||
for head in thisHead ]})
|
for head in thisHead ]})
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print '\nDAMASK outputs seem missing from "post" section of the *.dat file!'
|
print('\nDAMASK outputs seem missing from "post" section of the *.dat file!')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
assembleHeader = False
|
assembleHeader = False
|
||||||
|
|
||||||
if N == 0:
|
if N == 0:
|
||||||
mappedResult = [float(x) for x in xrange(len(header))] # initialize with debug data (should get deleted by *N at N=0)
|
mappedResult = [float(x) for x in range(len(header))] # init with debug data (should get deleted by *N at N=0)
|
||||||
|
|
||||||
pos = 0
|
pos = 0
|
||||||
for chunk in newby:
|
for chunk in newby:
|
||||||
|
|
|
@ -67,7 +67,7 @@ for name in filenames:
|
||||||
if index == -1: remarks.append('label "{}" not present...'.format(options.label[i]))
|
if index == -1: remarks.append('label "{}" not present...'.format(options.label[i]))
|
||||||
else:
|
else:
|
||||||
m = pattern[dimensions[i]>1].match(table.tags[index]) # isolate label name
|
m = pattern[dimensions[i]>1].match(table.tags[index]) # isolate label name
|
||||||
for j in xrange(dimensions[i]):
|
for j in range(dimensions[i]):
|
||||||
table.tags[index+j] = table.tags[index+j].replace(m.group(2),options.substitute[i]) # replace name with substitute
|
table.tags[index+j] = table.tags[index+j].replace(m.group(2),options.substitute[i]) # replace name with substitute
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
|
|
@ -12,7 +12,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def getHeader(filename,sizeFastIndex,sizeSlowIndex,stepsize):
|
def getHeader(filename,sizeFastIndex,sizeSlowIndex,stepsize):
|
||||||
"""returns header for ang file step size in micrometer"""
|
"""Returns header for ang file step size in micrometer"""
|
||||||
return '\n'.join([ \
|
return '\n'.join([ \
|
||||||
'# TEM_PIXperUM 1.000000', \
|
'# TEM_PIXperUM 1.000000', \
|
||||||
'# x-star 1.000000', \
|
'# x-star 1.000000', \
|
||||||
|
@ -48,7 +48,7 @@ def getHeader(filename,sizeFastIndex,sizeSlowIndex,stepsize):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def positiveRadians(angle):
|
def positiveRadians(angle):
|
||||||
"""returns positive angle in radians from angle in degrees"""
|
"""Returns positive angle in radians from angle in degrees"""
|
||||||
angle = math.radians(float(angle))
|
angle = math.radians(float(angle))
|
||||||
while angle < 0.0:
|
while angle < 0.0:
|
||||||
angle += 2.0 * math.pi
|
angle += 2.0 * math.pi
|
||||||
|
@ -59,9 +59,9 @@ def positiveRadians(angle):
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def getDataLine(angles,x,y,validData=True):
|
def getDataLine(angles,x,y,validData=True):
|
||||||
"""
|
"""
|
||||||
returns string of one line in ang file
|
Returns string of one line in ang file.
|
||||||
|
|
||||||
convention in ang file: y coordinate comes first and is fastest index
|
Convention in ang file: y coordinate comes first and is fastest index
|
||||||
positions in micrometer
|
positions in micrometer
|
||||||
"""
|
"""
|
||||||
info = {True: (9999.9, 1.0, 0,99999,0.0),
|
info = {True: (9999.9, 1.0, 0,99999,0.0),
|
||||||
|
@ -295,13 +295,13 @@ for filename in filenames:
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGENERATING POINTS FOR POINT GRID")
|
if options.verbose: sys.stdout.write("\nGENERATING POINTS FOR POINT GRID")
|
||||||
points = vtk.vtkPoints()
|
points = vtk.vtkPoints()
|
||||||
for k in xrange(Npoints[2]):
|
for k in range(Npoints[2]):
|
||||||
for j in xrange(Npoints[0]):
|
for j in range(Npoints[0]):
|
||||||
for i in xrange(Npoints[1]): # y is fastest index
|
for i in range(Npoints[1]): # y is fastest index
|
||||||
rotatedpoint = np.array([rotatedbox[0][0] + (float(j) + 0.5) * options.resolution,
|
rotatedpoint = np.array([rotatedbox[0][0] + (float(j) + 0.5) * options.resolution,
|
||||||
rotatedbox[1][0] + (float(i) + 0.5) * options.resolution,
|
rotatedbox[1][0] + (float(i) + 0.5) * options.resolution,
|
||||||
rotatedbox[2][0] + (float(k) + 0.5) * options.distance ]) # point in rotated system
|
rotatedbox[2][0] + (float(k) + 0.5) * options.distance ]) # point in rotated system
|
||||||
point = np.dot(R.T,rotatedpoint) # point in mesh system
|
point = np.dot(R.T,rotatedpoint) # point in mesh system
|
||||||
points.InsertNextPoint(list(point))
|
points.InsertNextPoint(list(point))
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
sys.stdout.write("\rGENERATING POINTS FOR POINT GRID %d%%" %(100*(Npoints[1]*(k*Npoints[0]+j)+i+1)/totalNpoints))
|
sys.stdout.write("\rGENERATING POINTS FOR POINT GRID %d%%" %(100*(Npoints[1]*(k*Npoints[0]+j)+i+1)/totalNpoints))
|
||||||
|
@ -315,7 +315,7 @@ for filename in filenames:
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGENERATING VERTICES FOR POINT GRID")
|
if options.verbose: sys.stdout.write("\nGENERATING VERTICES FOR POINT GRID")
|
||||||
vertices = vtk.vtkCellArray()
|
vertices = vtk.vtkCellArray()
|
||||||
for i in xrange(totalNpoints):
|
for i in range(totalNpoints):
|
||||||
vertex = vtk.vtkVertex()
|
vertex = vtk.vtkVertex()
|
||||||
vertex.GetPointIds().SetId(0,i) # each vertex consists of exactly one (index 0) point with ID "i"
|
vertex.GetPointIds().SetId(0,i) # each vertex consists of exactly one (index 0) point with ID "i"
|
||||||
vertices.InsertNextCell(vertex)
|
vertices.InsertNextCell(vertex)
|
||||||
|
@ -378,7 +378,7 @@ for filename in filenames:
|
||||||
with open(angfilename,'w') as angfile:
|
with open(angfilename,'w') as angfile:
|
||||||
if options.verbose: sys.stdout.write(" %s\n"%angfilename)
|
if options.verbose: sys.stdout.write(" %s\n"%angfilename)
|
||||||
angfile.write(getHeader(filename,Npoints[1],Npoints[0],options.resolution*options.scale))
|
angfile.write(getHeader(filename,Npoints[1],Npoints[0],options.resolution*options.scale))
|
||||||
for i in xrange(sliceN*NpointsPerSlice,(sliceN+1)*NpointsPerSlice): # Iterate over points on slice
|
for i in range(sliceN*NpointsPerSlice,(sliceN+1)*NpointsPerSlice): # Iterate over points on slice
|
||||||
|
|
||||||
|
|
||||||
# Get euler angles of closest IDs
|
# Get euler angles of closest IDs
|
||||||
|
|
|
@ -76,12 +76,12 @@ for name in filenames:
|
||||||
np.zeros((table.data.shape[0],
|
np.zeros((table.data.shape[0],
|
||||||
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,i]) for i in range(3)]
|
||||||
|
|
||||||
if options.mode == 'cell':
|
if options.mode == 'cell':
|
||||||
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
|
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
|
||||||
[coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \
|
[coords[i][j-1] + coords[i][j] for j in range(1,len(coords[i]))] + \
|
||||||
[3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in xrange(3)]
|
[3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in range(3)]
|
||||||
|
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
|
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
import sys,os,re
|
import sys,os,re
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
@ -14,13 +14,13 @@ def ParseOutputFormat(filename,what,me):
|
||||||
|
|
||||||
outputmetafile = filename+'.output'+what
|
outputmetafile = filename+'.output'+what
|
||||||
try:
|
try:
|
||||||
file = open(outputmetafile)
|
myFile = open(outputmetafile)
|
||||||
except:
|
except:
|
||||||
print('Could not open file %s'%outputmetafile)
|
print('Could not open file %s'%outputmetafile)
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
content = file.readlines()
|
content = myFile.readlines()
|
||||||
file.close()
|
myFile.close()
|
||||||
|
|
||||||
tag = ''
|
tag = ''
|
||||||
tagID = 0
|
tagID = 0
|
||||||
|
@ -51,7 +51,7 @@ def ParseOutputFormat(filename,what,me):
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Abaqus.Inputfile(s)', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Abaqus.Inputfile(s)', description = """
|
||||||
Transfer the output variables requested in the material.config to
|
Transfer the output variables requested in the material.config to
|
||||||
properly labelled user defined variables within the Abaqus input file (*.inp).
|
properly labelled user-defined variables within the Abaqus input file (*.inp).
|
||||||
|
|
||||||
Requires the files
|
Requires the files
|
||||||
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
||||||
|
@ -60,7 +60,7 @@ that are written during the first run of the model.
|
||||||
Specify which user block format you want to apply by stating the homogenization, crystallite, and phase identifiers.
|
Specify which user block format you want to apply by stating the homogenization, crystallite, and phase identifiers.
|
||||||
Or have an existing set of user variables copied over from another *.inp file.
|
Or have an existing set of user variables copied over from another *.inp file.
|
||||||
|
|
||||||
""", version= scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-m', dest='number', type='int', metavar = 'int',
|
parser.add_option('-m', dest='number', type='int', metavar = 'int',
|
||||||
help='maximum requested User Defined Variable [%default]')
|
help='maximum requested User Defined Variable [%default]')
|
||||||
|
@ -84,7 +84,7 @@ parser.set_defaults(number = 0,
|
||||||
(options, files) = parser.parse_args()
|
(options, files) = parser.parse_args()
|
||||||
|
|
||||||
if not files:
|
if not files:
|
||||||
parser.error('no file(s) specified...')
|
parser.error('no file(s) specified.')
|
||||||
|
|
||||||
me = { 'Homogenization': options.homog,
|
me = { 'Homogenization': options.homog,
|
||||||
'Crystallite': options.cryst,
|
'Crystallite': options.cryst,
|
||||||
|
@ -92,18 +92,18 @@ me = { 'Homogenization': options.homog,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
for file in files:
|
for myFile in files:
|
||||||
print '\033[1m'+scriptName+'\033[0m: '+file+'\n'
|
damask.util.report(scriptName,myFile)
|
||||||
if options.useFile:
|
if options.useFile is not None:
|
||||||
formatFile = os.path.splitext(options.useFile)[0]
|
formatFile = os.path.splitext(options.useFile)[0]
|
||||||
else:
|
else:
|
||||||
formatFile = os.path.splitext(file)[0]
|
formatFile = os.path.splitext(myFile)[0]
|
||||||
file = os.path.splitext(file)[0]+'.inp'
|
myFile = os.path.splitext(myFile)[0]+'.inp'
|
||||||
if not os.path.lexists(file):
|
if not os.path.lexists(myFile):
|
||||||
print file,'not found'
|
print('{} not found'.format(myFile))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print('Scanning format files of: %s'%formatFile)
|
print('Scanning format files of: {}'.format(formatFile))
|
||||||
|
|
||||||
if options.number < 1:
|
if options.number < 1:
|
||||||
outputFormat = {}
|
outputFormat = {}
|
||||||
|
@ -111,8 +111,8 @@ for file in files:
|
||||||
for what in me:
|
for what in me:
|
||||||
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
||||||
if '_id' not in outputFormat[what]['specials']:
|
if '_id' not in outputFormat[what]['specials']:
|
||||||
print "'%s' not found in <%s>"%(me[what],what)
|
print("'{}' not found in <{}>".format(me[what],what))
|
||||||
print '\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers']))
|
print('\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers'])))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
UserVars = ['HomogenizationCount']
|
UserVars = ['HomogenizationCount']
|
||||||
|
@ -140,13 +140,13 @@ for file in files:
|
||||||
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
|
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
|
||||||
|
|
||||||
# Now change *.inp file(s)
|
# Now change *.inp file(s)
|
||||||
print('Adding labels to: %s'%file)
|
print('Adding labels to: {}'.format(myFile))
|
||||||
inFile = open(file)
|
inFile = open(myFile)
|
||||||
input = inFile.readlines()
|
input = inFile.readlines()
|
||||||
inFile.close()
|
inFile.close()
|
||||||
output = open(file,'w')
|
output = open(myFile,'w')
|
||||||
thisSection = ''
|
thisSection = ''
|
||||||
if options.damaskOption:
|
if options.damaskOption is not None:
|
||||||
output.write('$damask {0}\n'.format(options.damaskOption))
|
output.write('$damask {0}\n'.format(options.damaskOption))
|
||||||
for line in input:
|
for line in input:
|
||||||
#Abaqus keyword line begins with: *keyword, argument1, ...
|
#Abaqus keyword line begins with: *keyword, argument1, ...
|
||||||
|
@ -154,11 +154,11 @@ for file in files:
|
||||||
if m:
|
if m:
|
||||||
lastSection = thisSection
|
lastSection = thisSection
|
||||||
thisSection = m.group(1)
|
thisSection = m.group(1)
|
||||||
if (lastSection.upper() == '*DEPVAR' and thisSection.upper() == '*USER'): #Abaqus keyword can be upper or lower case
|
if (lastSection.upper() == '*DEPVAR' and thisSection.upper() == '*USER'): # Abaqus keyword can be upper or lower case
|
||||||
if options.number > 0:
|
if options.number > 0:
|
||||||
output.write('%i\n'%options.number) #Abaqus needs total number of SDVs in the line after *Depvar keyword
|
output.write('{}\n'.format(options.number)) # Abaqus needs total number of SDVs in the line after *Depvar keyword
|
||||||
else:
|
else:
|
||||||
output.write('%i\n'%len(UserVars))
|
output.write('{}\n'.format(len(UserVars)))
|
||||||
|
|
||||||
for i in range(len(UserVars)):
|
for i in range(len(UserVars)):
|
||||||
output.write('%i,"%i%s","%i%s"\n'%(i+1,0,UserVars[i],0,UserVars[i])) #index,output variable key,output variable description
|
output.write('%i,"%i%s","%i%s"\n'%(i+1,0,UserVars[i],0,UserVars[i])) #index,output variable key,output variable description
|
||||||
|
|
|
@ -93,12 +93,12 @@ for name in filenames:
|
||||||
|
|
||||||
microstructure_cropped = np.zeros(newInfo['grid'],datatype)
|
microstructure_cropped = np.zeros(newInfo['grid'],datatype)
|
||||||
microstructure_cropped.fill(options.fill if options.real or options.fill > 0 else microstructure.max()+1)
|
microstructure_cropped.fill(options.fill if options.real or options.fill > 0 else microstructure.max()+1)
|
||||||
xindex = list(set(xrange(options.offset[0],options.offset[0]+newInfo['grid'][0])) & \
|
xindex = list(set(range(options.offset[0],options.offset[0]+newInfo['grid'][0])) & \
|
||||||
set(xrange(info['grid'][0])))
|
set(range(info['grid'][0])))
|
||||||
yindex = list(set(xrange(options.offset[1],options.offset[1]+newInfo['grid'][1])) & \
|
yindex = list(set(range(options.offset[1],options.offset[1]+newInfo['grid'][1])) & \
|
||||||
set(xrange(info['grid'][1])))
|
set(range(info['grid'][1])))
|
||||||
zindex = list(set(xrange(options.offset[2],options.offset[2]+newInfo['grid'][2])) & \
|
zindex = list(set(range(options.offset[2],options.offset[2]+newInfo['grid'][2])) & \
|
||||||
set(xrange(info['grid'][2])))
|
set(range(info['grid'][2])))
|
||||||
translate_x = [i - options.offset[0] for i in xindex]
|
translate_x = [i - options.offset[0] for i in xindex]
|
||||||
translate_y = [i - options.offset[1] for i in yindex]
|
translate_y = [i - options.offset[1] for i in yindex]
|
||||||
translate_z = [i - options.offset[2] for i in zindex]
|
translate_z = [i - options.offset[2] for i in zindex]
|
||||||
|
|
|
@ -125,10 +125,10 @@ for name in filenames:
|
||||||
Y = options.periods*2.0*math.pi*(np.arange(options.grid[1])+0.5)/options.grid[1]
|
Y = options.periods*2.0*math.pi*(np.arange(options.grid[1])+0.5)/options.grid[1]
|
||||||
Z = options.periods*2.0*math.pi*(np.arange(options.grid[2])+0.5)/options.grid[2]
|
Z = options.periods*2.0*math.pi*(np.arange(options.grid[2])+0.5)/options.grid[2]
|
||||||
|
|
||||||
for z in xrange(options.grid[2]):
|
for z in range(options.grid[2]):
|
||||||
for y in xrange(options.grid[1]):
|
for y in range(options.grid[1]):
|
||||||
table.data_clear()
|
table.data_clear()
|
||||||
for x in xrange(options.grid[0]):
|
for x in range(options.grid[0]):
|
||||||
table.data_append(options.microstructure[options.threshold < surface[options.type](X[x],Y[y],Z[z])])
|
table.data_append(options.microstructure[options.threshold < surface[options.type](X[x],Y[y],Z[z])])
|
||||||
table.data_write()
|
table.data_write()
|
||||||
|
|
||||||
|
|
|
@ -105,8 +105,8 @@ microstructure = np.where(radius < float(options.canal),1,0) + np.where(radius >
|
||||||
|
|
||||||
alphaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d')
|
alphaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d')
|
||||||
betaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d')
|
betaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d')
|
||||||
for y in xrange(info['grid'][1]):
|
for y in range(info['grid'][1]):
|
||||||
for x in xrange(info['grid'][0]):
|
for x in range(info['grid'][0]):
|
||||||
if microstructure[y,x] == 0:
|
if microstructure[y,x] == 0:
|
||||||
microstructure[y,x] = info['microstructures']
|
microstructure[y,x] = info['microstructures']
|
||||||
alphaOfGrain[info['microstructures']] = alpha[y,x]
|
alphaOfGrain[info['microstructures']] = alpha[y,x]
|
||||||
|
@ -129,7 +129,7 @@ header.append('(constituent)\tphase 1\ttexture 1\tfraction 1.0')
|
||||||
header.append('[interstitial]')
|
header.append('[interstitial]')
|
||||||
header.append('crystallite %i'%options.crystallite)
|
header.append('crystallite %i'%options.crystallite)
|
||||||
header.append('(constituent)\tphase 2\ttexture 2\tfraction 1.0')
|
header.append('(constituent)\tphase 2\ttexture 2\tfraction 1.0')
|
||||||
for i in xrange(3,info['microstructures']):
|
for i in range(3,info['microstructures']):
|
||||||
header.append('[Grain%s]'%(str(i).zfill(formatwidth)))
|
header.append('[Grain%s]'%(str(i).zfill(formatwidth)))
|
||||||
header.append('crystallite %i'%options.crystallite)
|
header.append('crystallite %i'%options.crystallite)
|
||||||
header.append('(constituent)\tphase 3\ttexture %s\tfraction 1.0'%(str(i).rjust(formatwidth)))
|
header.append('(constituent)\tphase 3\ttexture %s\tfraction 1.0'%(str(i).rjust(formatwidth)))
|
||||||
|
@ -137,7 +137,7 @@ for i in xrange(3,info['microstructures']):
|
||||||
header.append('<texture>')
|
header.append('<texture>')
|
||||||
header.append('[canal]')
|
header.append('[canal]')
|
||||||
header.append('[interstitial]')
|
header.append('[interstitial]')
|
||||||
for i in xrange(3,info['microstructures']):
|
for i in range(3,info['microstructures']):
|
||||||
header.append('[Grain%s]'%(str(i).zfill(formatwidth)))
|
header.append('[Grain%s]'%(str(i).zfill(formatwidth)))
|
||||||
header.append('(gauss)\tphi1 %g\tPhi %g\tphi2 0\tscatter 0.0\tfraction 1.0'\
|
header.append('(gauss)\tphi1 %g\tPhi %g\tphi2 0\tscatter 0.0\tfraction 1.0'\
|
||||||
%(alphaOfGrain[i],betaOfGrain[i]))
|
%(alphaOfGrain[i],betaOfGrain[i]))
|
||||||
|
|
|
@ -163,7 +163,7 @@ for name in filenames:
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,i]) for i in range(3)]
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
|
@ -217,9 +217,9 @@ for name in filenames:
|
||||||
tick = time.clock()
|
tick = time.clock()
|
||||||
if options.verbose: bg.set_message('assigning grain IDs...')
|
if options.verbose: bg.set_message('assigning grain IDs...')
|
||||||
|
|
||||||
for z in xrange(grid[2]):
|
for z in range(grid[2]):
|
||||||
for y in xrange(grid[1]):
|
for y in range(grid[1]):
|
||||||
for x in xrange(grid[0]):
|
for x in range(grid[0]):
|
||||||
if (myPos+1)%(N/500.) < 1:
|
if (myPos+1)%(N/500.) < 1:
|
||||||
time_delta = (time.clock()-tick) * (N - myPos) / myPos
|
time_delta = (time.clock()-tick) * (N - myPos) / myPos
|
||||||
if options.verbose: bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'
|
if options.verbose: bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'
|
||||||
|
|
|
@ -13,7 +13,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
|
||||||
def meshgrid2(*arrs):
|
def meshgrid2(*arrs):
|
||||||
"""code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d"""
|
"""Code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d"""
|
||||||
arrs = tuple(reversed(arrs))
|
arrs = tuple(reversed(arrs))
|
||||||
arrs = tuple(arrs)
|
arrs = tuple(arrs)
|
||||||
lens = np.array(map(len, arrs))
|
lens = np.array(map(len, arrs))
|
||||||
|
@ -240,7 +240,7 @@ for name in filenames:
|
||||||
if np.any(info['size'] <= 0.0) \
|
if np.any(info['size'] <= 0.0) \
|
||||||
and np.all(info['grid'] < 1): errors.append('invalid size x y z.')
|
and np.all(info['grid'] < 1): errors.append('invalid size x y z.')
|
||||||
else:
|
else:
|
||||||
for i in xrange(3):
|
for i in range(3):
|
||||||
if info['size'][i] <= 0.0: # any invalid size?
|
if info['size'][i] <= 0.0: # any invalid size?
|
||||||
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
|
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
|
||||||
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
|
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
|
||||||
|
|
|
@ -95,7 +95,7 @@ for name in filenames:
|
||||||
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
||||||
|
|
||||||
|
|
||||||
for smoothIter in xrange(options.N):
|
for smoothIter in range(options.N):
|
||||||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||||
grid[1]/2:-grid[1]/2,
|
grid[1]/2:-grid[1]/2,
|
||||||
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
||||||
|
|
|
@ -76,7 +76,7 @@ for name in filenames:
|
||||||
items = table.data
|
items = table.data
|
||||||
if len(items) > 2:
|
if len(items) > 2:
|
||||||
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
|
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
|
||||||
elif items[1].lower() == 'to': items = xrange(int(items[0]),1+int(items[2]))
|
elif items[1].lower() == 'to': items = range(int(items[0]),1+int(items[2]))
|
||||||
else: items = map(int,items)
|
else: items = map(int,items)
|
||||||
else: items = map(int,items)
|
else: items = map(int,items)
|
||||||
|
|
||||||
|
|
|
@ -92,10 +92,10 @@ for name in filenames:
|
||||||
newInfo['size'] = np.where(newInfo['size'] <= 0.0, info['size'],newInfo['size'])
|
newInfo['size'] = np.where(newInfo['size'] <= 0.0, info['size'],newInfo['size'])
|
||||||
|
|
||||||
multiplicity = []
|
multiplicity = []
|
||||||
for j in xrange(3):
|
for j in range(3):
|
||||||
multiplicity.append([])
|
multiplicity.append([])
|
||||||
last = 0
|
last = 0
|
||||||
for i in xrange(info['grid'][j]):
|
for i in range(info['grid'][j]):
|
||||||
this = int((i+1)*float(newInfo['grid'][j])/info['grid'][j])
|
this = int((i+1)*float(newInfo['grid'][j])/info['grid'][j])
|
||||||
multiplicity[j].append(this-last)
|
multiplicity[j].append(this-last)
|
||||||
last = this
|
last = this
|
||||||
|
|
|
@ -63,7 +63,7 @@ for name in filenames:
|
||||||
table.info_clear()
|
table.info_clear()
|
||||||
table.info_append(extra_header + [scriptID + '\t' + ' '.join(sys.argv[1:])])
|
table.info_append(extra_header + [scriptID + '\t' + ' '.join(sys.argv[1:])])
|
||||||
table.labels_clear()
|
table.labels_clear()
|
||||||
table.labels_append(['{}_{}'.format(1+i,'pos') for i in xrange(3)]+['microstructure'])
|
table.labels_append(['{}_{}'.format(1+i,'pos') for i in range(3)]+['microstructure'])
|
||||||
table.head_write()
|
table.head_write()
|
||||||
table.output_flush()
|
table.output_flush()
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ parser.set_defaults(origin = (0.0,0.0,0.0),
|
||||||
datatype = 'f' if options.real else 'i'
|
datatype = 'f' if options.real else 'i'
|
||||||
|
|
||||||
sub = {}
|
sub = {}
|
||||||
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
|
for i in range(len(options.substitute)/2): # split substitution list into "from" -> "to"
|
||||||
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
|
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
|
||||||
|
|
||||||
# --- loop over input files ----------------------------------------------------------------------
|
# --- loop over input files ----------------------------------------------------------------------
|
||||||
|
@ -82,7 +82,7 @@ for name in filenames:
|
||||||
|
|
||||||
# --- read data ----------------------------------------------------------------------------------
|
# --- read data ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
|
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
|
||||||
|
|
||||||
# --- do work ------------------------------------------------------------------------------------
|
# --- do work ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ def integerFactorization(i):
|
||||||
return j
|
return j
|
||||||
|
|
||||||
def binAsBins(bin,intervals):
|
def binAsBins(bin,intervals):
|
||||||
"""explode compound bin into 3D bins list"""
|
"""Explode compound bin into 3D bins list"""
|
||||||
bins = [0]*3
|
bins = [0]*3
|
||||||
bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0]
|
bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0]
|
||||||
bins[1] = (bin//intervals[2]) % intervals[1]
|
bins[1] = (bin//intervals[2]) % intervals[1]
|
||||||
|
@ -27,17 +27,17 @@ def binAsBins(bin,intervals):
|
||||||
return bins
|
return bins
|
||||||
|
|
||||||
def binsAsBin(bins,intervals):
|
def binsAsBin(bins,intervals):
|
||||||
"""implode 3D bins into compound bin"""
|
"""Implode 3D bins into compound bin"""
|
||||||
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
||||||
|
|
||||||
def EulersAsBins(Eulers,intervals,deltas,center):
|
def EulersAsBins(Eulers,intervals,deltas,center):
|
||||||
"""return list of Eulers translated into 3D bins list"""
|
"""Return list of Eulers translated into 3D bins list"""
|
||||||
return [int((euler+(0.5-center)*delta)//delta)%interval \
|
return [int((euler+(0.5-center)*delta)//delta)%interval \
|
||||||
for euler,delta,interval in zip(Eulers,deltas,intervals) \
|
for euler,delta,interval in zip(Eulers,deltas,intervals) \
|
||||||
]
|
]
|
||||||
|
|
||||||
def binAsEulers(bin,intervals,deltas,center):
|
def binAsEulers(bin,intervals,deltas,center):
|
||||||
"""compound bin number translated into list of Eulers"""
|
"""Compound bin number translated into list of Eulers"""
|
||||||
Eulers = [0.0]*3
|
Eulers = [0.0]*3
|
||||||
Eulers[2] = (bin%intervals[2] + center)*deltas[2]
|
Eulers[2] = (bin%intervals[2] + center)*deltas[2]
|
||||||
Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1]
|
Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1]
|
||||||
|
@ -45,7 +45,7 @@ def binAsEulers(bin,intervals,deltas,center):
|
||||||
return Eulers
|
return Eulers
|
||||||
|
|
||||||
def directInvRepetitions(probability,scale):
|
def directInvRepetitions(probability,scale):
|
||||||
"""calculate number of samples drawn by direct inversion"""
|
"""Calculate number of samples drawn by direct inversion"""
|
||||||
nDirectInv = 0
|
nDirectInv = 0
|
||||||
for bin in range(len(probability)): # loop over bins
|
for bin in range(len(probability)): # loop over bins
|
||||||
nDirectInv += int(round(probability[bin]*scale)) # calc repetition
|
nDirectInv += int(round(probability[bin]*scale)) # calc repetition
|
||||||
|
@ -270,7 +270,7 @@ for name in filenames:
|
||||||
ODF['limit'] = np.radians(limits[1,:]) # right hand limits in radians
|
ODF['limit'] = np.radians(limits[1,:]) # right hand limits in radians
|
||||||
ODF['center'] = 0.0 if all(limits[0,:]<1e-8) else 0.5 # vertex or cell centered
|
ODF['center'] = 0.0 if all(limits[0,:]<1e-8) else 0.5 # vertex or cell centered
|
||||||
|
|
||||||
ODF['interval'] = np.array(map(len,[np.unique(table.data[:,i]) for i in xrange(3)]),'i') # steps are number of distict values
|
ODF['interval'] = np.array(map(len,[np.unique(table.data[:,i]) for i in range(3)]),'i') # steps are number of distict values
|
||||||
ODF['nBins'] = ODF['interval'].prod()
|
ODF['nBins'] = ODF['interval'].prod()
|
||||||
ODF['delta'] = np.radians(np.array(limits[1,0:3]-limits[0,0:3])/(ODF['interval']-1)) # step size
|
ODF['delta'] = np.radians(np.array(limits[1,0:3]-limits[0,0:3])/(ODF['interval']-1)) # step size
|
||||||
|
|
||||||
|
@ -349,7 +349,7 @@ for name in filenames:
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
]
|
]
|
||||||
|
|
||||||
for i,ID in enumerate(xrange(nSamples)):
|
for i,ID in enumerate(range(nSamples)):
|
||||||
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
||||||
'crystallite %i'%options.crystallite,
|
'crystallite %i'%options.crystallite,
|
||||||
'(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)),
|
'(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)),
|
||||||
|
@ -361,7 +361,7 @@ for name in filenames:
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
]
|
]
|
||||||
|
|
||||||
for ID in xrange(nSamples):
|
for ID in range(nSamples):
|
||||||
eulers = Orientations[ID]
|
eulers = Orientations[ID]
|
||||||
|
|
||||||
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
||||||
|
|
|
@ -1,13 +1,6 @@
|
||||||
#!/usr/bin/env python2.7
|
#!/usr/bin/env python2.7
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
"""
|
|
||||||
Writes meaningful labels to the marc input file (*.dat)
|
|
||||||
|
|
||||||
output is based on the files
|
|
||||||
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
|
||||||
that are written during the first run of the model.
|
|
||||||
"""
|
|
||||||
import sys,os,re
|
import sys,os,re
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -21,13 +14,13 @@ def ParseOutputFormat(filename,what,me):
|
||||||
|
|
||||||
outputmetafile = filename+'.output'+what
|
outputmetafile = filename+'.output'+what
|
||||||
try:
|
try:
|
||||||
file = open(outputmetafile)
|
myFile = open(outputmetafile)
|
||||||
except:
|
except:
|
||||||
print('Could not open file %s'%outputmetafile)
|
print('Could not open file %s'%outputmetafile)
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
content = file.readlines()
|
content = myFile.readlines()
|
||||||
file.close()
|
myFile.close()
|
||||||
|
|
||||||
tag = ''
|
tag = ''
|
||||||
tagID = 0
|
tagID = 0
|
||||||
|
@ -55,9 +48,9 @@ def ParseOutputFormat(filename,what,me):
|
||||||
format['outputs'].append([output,length])
|
format['outputs'].append([output,length])
|
||||||
return format
|
return format
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Marc.Inputfile(s)', description="""
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Marc.Inputfile(s)', description = """
|
||||||
Transfer the output variables requested in the material.config to
|
Transfer the output variables requested in the material.config to
|
||||||
properly labelled user defined variables within the Marc input file (*.dat).
|
properly labelled user-defined variables within the Marc input file (*.dat).
|
||||||
|
|
||||||
Requires the files
|
Requires the files
|
||||||
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
||||||
|
@ -68,40 +61,29 @@ Or have an existing set of user variables copied over from another *.dat file.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
|
parser.add_option('-m', dest='number', type='int', metavar = 'int',
|
||||||
parser.add_option('-n','--number', dest='number', type='int', \
|
help='maximum requested User Defined Variable [%default]')
|
||||||
metavar='int',
|
parser.add_option('--homogenization', dest='homog', metavar = 'string',
|
||||||
help='maximum requested User Defined Variable [%default]')
|
help='homogenization name or index [%default]')
|
||||||
parser.add_option('--homogenization', dest='homog', \
|
parser.add_option('--crystallite', dest='cryst', metavar = 'string',
|
||||||
metavar='string',
|
help='crystallite identifier name or index [%default]')
|
||||||
help='homogenization identifier (as string or integer [%default])')
|
parser.add_option('--phase', dest='phase', metavar = 'string',
|
||||||
parser.add_option('--crystallite', dest='cryst', \
|
help='phase identifier name or index [%default]')
|
||||||
metavar='string',
|
parser.add_option('--use', dest='useFile', metavar = 'string',
|
||||||
help='crystallite identifier (as string or integer [%default])')
|
help='optionally parse output descriptors from '+
|
||||||
parser.add_option('--phase', dest='phase', \
|
'outputXXX files of given name')
|
||||||
metavar='string',
|
parser.add_option('--option', dest='damaskOption', metavar = 'string',
|
||||||
help='phase identifier (as string or integer [%default])')
|
help='Add DAMASK option to input file, e.g. "periodic x z"')
|
||||||
parser.add_option('--use', dest='useFile', \
|
|
||||||
metavar='string',
|
parser.set_defaults(number = 0,
|
||||||
help='Optionally parse output descriptors from '+
|
homog = '1',
|
||||||
'different <model_job>.outputZZZ file. Saves the effort '+
|
cryst = '1',
|
||||||
'to start a calculation for each job)')
|
phase = '1')
|
||||||
parser.add_option('--option', dest='damaskOption', \
|
|
||||||
metavar='string',
|
|
||||||
help='Add damask option to input file '+
|
|
||||||
'for example: "periodic x z"')
|
|
||||||
parser.set_defaults(number = 0)
|
|
||||||
parser.set_defaults(homog = '1')
|
|
||||||
parser.set_defaults(cryst = '1')
|
|
||||||
parser.set_defaults(phase = '1')
|
|
||||||
parser.set_defaults(useFile = '')
|
|
||||||
parser.set_defaults(damaskOption = '')
|
|
||||||
|
|
||||||
(options, files) = parser.parse_args()
|
(options, files) = parser.parse_args()
|
||||||
|
|
||||||
if not files:
|
if not files:
|
||||||
parser.print_help()
|
parser.error('no file(s) specified.')
|
||||||
parser.error('no file(s) specified...')
|
|
||||||
|
|
||||||
me = { 'Homogenization': options.homog,
|
me = { 'Homogenization': options.homog,
|
||||||
'Crystallite': options.cryst,
|
'Crystallite': options.cryst,
|
||||||
|
@ -109,18 +91,18 @@ me = { 'Homogenization': options.homog,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
for file in files:
|
for myFile in files:
|
||||||
print '\033[1m'+scriptName+'\033[0m: '+file+'\n'
|
damask.util.report(scriptName,myFile)
|
||||||
if options.useFile != '':
|
if options.useFile is not None:
|
||||||
formatFile = os.path.splitext(options.useFile)[0]
|
formatFile = os.path.splitext(options.useFile)[0]
|
||||||
else:
|
else:
|
||||||
formatFile = os.path.splitext(file)[0]
|
formatFile = os.path.splitext(myFile)[0]
|
||||||
file = os.path.splitext(file)[0]+'.dat'
|
myFile = os.path.splitext(myFile)[0]+'.dat'
|
||||||
if not os.path.lexists(file):
|
if not os.path.lexists(myFile):
|
||||||
print file,'not found'
|
print('{} not found'.format(myFile))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print('Scanning format files of: %s'%formatFile)
|
print('Scanning format files of: {}'.format(formatFile))
|
||||||
|
|
||||||
if options.number < 1:
|
if options.number < 1:
|
||||||
outputFormat = {}
|
outputFormat = {}
|
||||||
|
@ -128,8 +110,8 @@ for file in files:
|
||||||
for what in me:
|
for what in me:
|
||||||
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
||||||
if '_id' not in outputFormat[what]['specials']:
|
if '_id' not in outputFormat[what]['specials']:
|
||||||
print "'%s' not found in <%s>"%(me[what],what)
|
print("'{}' not found in <{}>".format(me[what],what))
|
||||||
print '\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers']))
|
print('\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers'])))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
UserVars = ['HomogenizationCount']
|
UserVars = ['HomogenizationCount']
|
||||||
|
@ -157,13 +139,13 @@ for file in files:
|
||||||
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
|
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
|
||||||
|
|
||||||
# Now change *.dat file(s)
|
# Now change *.dat file(s)
|
||||||
print('Adding labels to: %s'%file)
|
print('Adding labels to: {}'.format(myFile))
|
||||||
inFile = open(file)
|
inFile = open(myFile)
|
||||||
input = inFile.readlines()
|
input = inFile.readlines()
|
||||||
inFile.close()
|
inFile.close()
|
||||||
output = open(file,'w')
|
output = open(myFile,'w')
|
||||||
thisSection = ''
|
thisSection = ''
|
||||||
if options.damaskOption:
|
if options.damaskOption is not None:
|
||||||
output.write('$damask {0}\n'.format(options.damaskOption))
|
output.write('$damask {0}\n'.format(options.damaskOption))
|
||||||
for line in input:
|
for line in input:
|
||||||
m = re.match('(\w+)\s',line)
|
m = re.match('(\w+)\s',line)
|
||||||
|
|
|
@ -58,14 +58,14 @@ def servoLink():
|
||||||
}
|
}
|
||||||
Nnodes = py_mentat.py_get_int("nnodes()")
|
Nnodes = py_mentat.py_get_int("nnodes()")
|
||||||
NodeCoords = np.zeros((Nnodes,3),dtype='d')
|
NodeCoords = np.zeros((Nnodes,3),dtype='d')
|
||||||
for node in xrange(Nnodes):
|
for node in range(Nnodes):
|
||||||
NodeCoords[node,0] = py_mentat.py_get_float("node_x(%i)"%(node+1))
|
NodeCoords[node,0] = py_mentat.py_get_float("node_x(%i)"%(node+1))
|
||||||
NodeCoords[node,1] = py_mentat.py_get_float("node_y(%i)"%(node+1))
|
NodeCoords[node,1] = py_mentat.py_get_float("node_y(%i)"%(node+1))
|
||||||
NodeCoords[node,2] = py_mentat.py_get_float("node_z(%i)"%(node+1))
|
NodeCoords[node,2] = py_mentat.py_get_float("node_z(%i)"%(node+1))
|
||||||
box['min'] = NodeCoords.min(axis=0) # find the bounding box
|
box['min'] = NodeCoords.min(axis=0) # find the bounding box
|
||||||
box['max'] = NodeCoords.max(axis=0)
|
box['max'] = NodeCoords.max(axis=0)
|
||||||
box['delta'] = box['max']-box['min']
|
box['delta'] = box['max']-box['min']
|
||||||
for coord in xrange(3): # calc the dimension of the bounding box
|
for coord in range(3): # calc the dimension of the bounding box
|
||||||
if box['delta'][coord] != 0.0:
|
if box['delta'][coord] != 0.0:
|
||||||
for extremum in ['min','max']:
|
for extremum in ['min','max']:
|
||||||
rounded = round(box[extremum][coord]*1e+15/box['delta'][coord]) * \
|
rounded = round(box[extremum][coord]*1e+15/box['delta'][coord]) * \
|
||||||
|
@ -76,12 +76,12 @@ def servoLink():
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
# loop over all nodes
|
# loop over all nodes
|
||||||
for node in xrange(Nnodes):
|
for node in range(Nnodes):
|
||||||
key = {}
|
key = {}
|
||||||
maxFlag = [False, False, False]
|
maxFlag = [False, False, False]
|
||||||
Nmax = 0
|
Nmax = 0
|
||||||
Nmin = 0
|
Nmin = 0
|
||||||
for coord in xrange(3): # for each direction
|
for coord in range(3): # for each direction
|
||||||
if box['delta'][coord] != 0.0:
|
if box['delta'][coord] != 0.0:
|
||||||
rounded = round(NodeCoords[node,coord]*1e+15/box['delta'][coord]) * \
|
rounded = round(NodeCoords[node,coord]*1e+15/box['delta'][coord]) * \
|
||||||
1e-15*box['delta'][coord] # rounding to 1e-15 of dimension
|
1e-15*box['delta'][coord] # rounding to 1e-15 of dimension
|
||||||
|
@ -102,18 +102,18 @@ def servoLink():
|
||||||
if key['z'] not in baseNode[key['x']][key['y']].keys():
|
if key['z'] not in baseNode[key['x']][key['y']].keys():
|
||||||
baseNode[key['x']][key['y']][key['z']] = 0
|
baseNode[key['x']][key['y']][key['z']] = 0
|
||||||
|
|
||||||
baseNode[key['x']][key['y']][key['z']] = node+1 # remember the base node id
|
baseNode[key['x']][key['y']][key['z']] = node+1 # remember the base node id
|
||||||
|
|
||||||
if Nmax > 0 and Nmax >= Nmin: # node is on at least as many front than back faces
|
if Nmax > 0 and Nmax >= Nmin: # node is on at least as many front than back faces
|
||||||
if any([maxFlag[i] and active[i] for i in xrange(3)]):
|
if any([maxFlag[i] and active[i] for i in range(3)]):
|
||||||
linkNodes.append({'id': node+1,'coord': NodeCoords[node], 'faceMember': [maxFlag[i] and active[i] for i in xrange(3)]})
|
linkNodes.append({'id': node+1,'coord': NodeCoords[node], 'faceMember': [maxFlag[i] and active[i] for i in range(3)]})
|
||||||
|
|
||||||
baseCorner = baseNode["%.8e"%box['min'][0]]["%.8e"%box['min'][1]]["%.8e"%box['min'][2]] # detect ultimate base node
|
baseCorner = baseNode["%.8e"%box['min'][0]]["%.8e"%box['min'][1]]["%.8e"%box['min'][2]] # detect ultimate base node
|
||||||
|
|
||||||
|
|
||||||
for node in linkNodes: # loop over all linked nodes
|
for node in linkNodes: # loop over all linked nodes
|
||||||
linkCoord = [node['coord']] # start list of control node coords with my coords
|
linkCoord = [node['coord']] # start list of control node coords with my coords
|
||||||
for dir in xrange(3): # check for each direction
|
for dir in range(3): # check for each direction
|
||||||
if node['faceMember'][dir]: # me on this front face
|
if node['faceMember'][dir]: # me on this front face
|
||||||
linkCoord[0][dir] = box['min'][dir] # project me onto rear face along dir
|
linkCoord[0][dir] = box['min'][dir] # project me onto rear face along dir
|
||||||
linkCoord.append(np.array(box['min'])) # append base corner
|
linkCoord.append(np.array(box['min'])) # append base corner
|
||||||
|
|
|
@ -13,7 +13,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
try: # check for Python Image Lib
|
try: # check for Python Image Lib
|
||||||
from PIL import Image,ImageDraw
|
from PIL import Image,ImageDraw
|
||||||
ImageCapability = True
|
ImageCapability = True
|
||||||
except:
|
except ImportError:
|
||||||
ImageCapability = False
|
ImageCapability = False
|
||||||
|
|
||||||
sys.path.append(damask.solver.Marc().libraryPath())
|
sys.path.append(damask.solver.Marc().libraryPath())
|
||||||
|
@ -21,7 +21,7 @@ sys.path.append(damask.solver.Marc().libraryPath())
|
||||||
try: # check for MSC.Mentat Python interface
|
try: # check for MSC.Mentat Python interface
|
||||||
import py_mentat
|
import py_mentat
|
||||||
MentatCapability = True
|
MentatCapability = True
|
||||||
except:
|
except ImportError:
|
||||||
MentatCapability = False
|
MentatCapability = False
|
||||||
|
|
||||||
|
|
||||||
|
@ -42,9 +42,9 @@ def outStdout(cmd,locals):
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
cmd = eval(cmd[3:])
|
cmd = eval(cmd[3:])
|
||||||
print cmd
|
print(cmd)
|
||||||
else:
|
else:
|
||||||
print cmd
|
print(cmd)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ def rcbOrientationParser(content,idcolumn):
|
||||||
return grains
|
return grains
|
||||||
|
|
||||||
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
|
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
|
||||||
"""parser for TSL-OIM reconstructed boundary files"""
|
"""Parser for TSL-OIM reconstructed boundary files"""
|
||||||
# find bounding box
|
# find bounding box
|
||||||
boxX = [1.*sys.maxint,-1.*sys.maxint]
|
boxX = [1.*sys.maxint,-1.*sys.maxint]
|
||||||
boxY = [1.*sys.maxint,-1.*sys.maxint]
|
boxY = [1.*sys.maxint,-1.*sys.maxint]
|
||||||
|
@ -333,21 +333,21 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
|
||||||
for neighbors in grainNeighbors:
|
for neighbors in grainNeighbors:
|
||||||
rcData['neighbors'].append(neighbors)
|
rcData['neighbors'].append(neighbors)
|
||||||
|
|
||||||
for legs in grains['legs']: # loop over grains
|
for legs in grains['legs']: # loop over grains
|
||||||
rcData['grain'].append(legs) # store list of boundary segments
|
rcData['grain'].append(legs) # store list of boundary segments
|
||||||
myNeighbors = {}
|
myNeighbors = {}
|
||||||
for leg in legs: # test each boundary segment
|
for leg in legs: # test each boundary segment
|
||||||
if leg < len(grainNeighbors): # a valid segment index?
|
if leg < len(grainNeighbors): # a valid segment index?
|
||||||
for side in range(2): # look at both sides of the segment
|
for side in range(2): # look at both sides of the segment
|
||||||
if grainNeighbors[leg][side] in myNeighbors: # count occurrence of grain IDs
|
if grainNeighbors[leg][side] in myNeighbors: # count occurrence of grain IDs
|
||||||
myNeighbors[grainNeighbors[leg][side]] += 1
|
myNeighbors[grainNeighbors[leg][side]] += 1
|
||||||
else:
|
else:
|
||||||
myNeighbors[grainNeighbors[leg][side]] = 1
|
myNeighbors[grainNeighbors[leg][side]] = 1
|
||||||
if myNeighbors: # do I have any neighbors (i.e., non-bounding box segment)
|
if myNeighbors: # do I have any neighbors (i.e., non-bounding box segment)
|
||||||
candidateGrains = sorted(myNeighbors.iteritems(), key=lambda (k,v): (v,k), reverse=True) # sort grain counting
|
candidateGrains = sorted(myNeighbors.iteritems(), key=lambda p: (p[1],p[0]), reverse=True) # sort grain counting
|
||||||
# most frequent one not yet seen?
|
# most frequent one not yet seen?
|
||||||
rcData['grainMapping'].append(candidateGrains[0 if candidateGrains[0][0] not in rcData['grainMapping'] else 1][0]) # must be me then
|
rcData['grainMapping'].append(candidateGrains[0 if candidateGrains[0][0] not in rcData['grainMapping'] else 1][0]) # must be me then
|
||||||
# special case of bi-crystal situation...
|
# special case of bi-crystal situation...
|
||||||
|
|
||||||
damask.util.croak(' found {} grains'.format(len(rcData['grain'])))
|
damask.util.croak(' found {} grains'.format(len(rcData['grain'])))
|
||||||
|
|
||||||
|
@ -647,7 +647,6 @@ def job(grainNumber,grainMapping,twoD):
|
||||||
"*job_param univ_gas_const 8.314472",
|
"*job_param univ_gas_const 8.314472",
|
||||||
"*job_param planck_radiation_2 1.4387752e-2",
|
"*job_param planck_radiation_2 1.4387752e-2",
|
||||||
"*job_param speed_light_vacuum 299792458",
|
"*job_param speed_light_vacuum 299792458",
|
||||||
# "*job_usersub_file /san/%s/FEM/DAMASK/code/mpie_cpfem_marc2010.f90 | subroutine definition"%(pwd.getpwuid(os.geteuid())[0].rpartition("\\")[2]),
|
|
||||||
"*job_option user_source:compile_save",
|
"*job_option user_source:compile_save",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -729,7 +728,7 @@ def image(name,imgsize,marginX,marginY,rcData):
|
||||||
|
|
||||||
# -------------------------
|
# -------------------------
|
||||||
def inside(x,y,points):
|
def inside(x,y,points):
|
||||||
"""tests whether point(x,y) is within polygon described by points"""
|
"""Tests whether point(x,y) is within polygon described by points"""
|
||||||
inside = False
|
inside = False
|
||||||
npoints=len(points)
|
npoints=len(points)
|
||||||
(x1,y1) = points[npoints-1] # start with last point of points
|
(x1,y1) = points[npoints-1] # start with last point of points
|
||||||
|
@ -750,8 +749,8 @@ def inside(x,y,points):
|
||||||
return inside
|
return inside
|
||||||
|
|
||||||
# -------------------------
|
# -------------------------
|
||||||
def fftbuild(rcData,height,xframe,yframe,resolution,extrusion):
|
def fftbuild(rcData,height,xframe,yframe,grid,extrusion):
|
||||||
"""build array of grain numbers"""
|
"""Build array of grain numbers"""
|
||||||
maxX = -1.*sys.maxint
|
maxX = -1.*sys.maxint
|
||||||
maxY = -1.*sys.maxint
|
maxY = -1.*sys.maxint
|
||||||
for line in rcData['point']: # find data range
|
for line in rcData['point']: # find data range
|
||||||
|
@ -760,14 +759,14 @@ def fftbuild(rcData,height,xframe,yframe,resolution,extrusion):
|
||||||
maxY = max(maxY, y)
|
maxY = max(maxY, y)
|
||||||
xsize = maxX+2*xframe # add framsize
|
xsize = maxX+2*xframe # add framsize
|
||||||
ysize = maxY+2*yframe
|
ysize = maxY+2*yframe
|
||||||
xres = int(round(resolution/2.0)*2) # use only even resolution
|
xres = int(grid)
|
||||||
yres = int(round(xres/xsize*ysize/2.0)*2) # calculate other resolutions
|
yres = int(xres/xsize*ysize)
|
||||||
zres = extrusion
|
zres = extrusion
|
||||||
zsize = extrusion*min([xsize/xres,ysize/yres])
|
zsize = extrusion*min([xsize/xres,ysize/yres])
|
||||||
|
|
||||||
fftdata = {'fftpoints':[], \
|
fftdata = {'fftpoints':[], \
|
||||||
'resolution':(xres,yres,zres), \
|
'grid':(xres,yres,zres), \
|
||||||
'dimension':(xsize,ysize,zsize)}
|
'size':(xsize,ysize,zsize)}
|
||||||
|
|
||||||
frameindex=len(rcData['grain'])+1 # calculate frame index as largest grain index plus one
|
frameindex=len(rcData['grain'])+1 # calculate frame index as largest grain index plus one
|
||||||
dx = xsize/(xres) # calculate step sizes
|
dx = xsize/(xres) # calculate step sizes
|
||||||
|
@ -841,8 +840,8 @@ parser.add_option('-x', '--xmargin', type='float', metavar = 'float',
|
||||||
dest='xmargin',help='margin in x in units of patch size [%default]')
|
dest='xmargin',help='margin in x in units of patch size [%default]')
|
||||||
parser.add_option('-y', '--ymargin', type='float', metavar = 'float',
|
parser.add_option('-y', '--ymargin', type='float', metavar = 'float',
|
||||||
dest='ymargin', help='margin in y in units of patch size [%default]')
|
dest='ymargin', help='margin in y in units of patch size [%default]')
|
||||||
parser.add_option('-r', '--resolution', type='int', metavar = 'int',
|
parser.add_option('-g', '--grid', type='int', metavar = 'int',
|
||||||
dest='resolution',help='number of Fourier points/Finite Elements across patch size + x_margin [%default]')
|
dest='grid',help='number of Fourier points/Finite Elements across patch size + x_margin [%default]')
|
||||||
parser.add_option('-z', '--extrusion', type='int', metavar = 'int',
|
parser.add_option('-z', '--extrusion', type='int', metavar = 'int',
|
||||||
dest='extrusion', help='number of repetitions in z-direction [%default]')
|
dest='extrusion', help='number of repetitions in z-direction [%default]')
|
||||||
parser.add_option('-i', '--imagesize', type='int', metavar = 'int',
|
parser.add_option('-i', '--imagesize', type='int', metavar = 'int',
|
||||||
|
@ -861,7 +860,7 @@ parser.set_defaults(output = [],
|
||||||
port = 40007,
|
port = 40007,
|
||||||
xmargin = 0.0,
|
xmargin = 0.0,
|
||||||
ymargin = 0.0,
|
ymargin = 0.0,
|
||||||
resolution = 64,
|
grid = 64,
|
||||||
extrusion = 2,
|
extrusion = 2,
|
||||||
imgsize = 512,
|
imgsize = 512,
|
||||||
M = (0.0,1.0,1.0,0.0), # M_11, M_12, M_21, M_22. x,y in RCB is y,x of Eulers!!
|
M = (0.0,1.0,1.0,0.0), # M_11, M_12, M_21, M_22. x,y in RCB is y,x of Eulers!!
|
||||||
|
@ -903,14 +902,14 @@ rcData = rcbParser(boundarySegments,options.M,options.size,options.tolerance,opt
|
||||||
Minv = np.linalg.inv(np.array(options.M).reshape(2,2))
|
Minv = np.linalg.inv(np.array(options.M).reshape(2,2))
|
||||||
|
|
||||||
if 'rcb' in options.output:
|
if 'rcb' in options.output:
|
||||||
print """# Header:
|
print('# Header:\n'+
|
||||||
#
|
'# \n'+
|
||||||
# Column 1-3: right hand average orientation (phi1, PHI, phi2 in radians)
|
'# Column 1-3: right hand average orientation (phi1, PHI, phi2 in radians)\n'+
|
||||||
# Column 4-6: left hand average orientation (phi1, PHI, phi2 in radians)
|
'# Column 4-6: left hand average orientation (phi1, PHI, phi2 in radians)\n'+
|
||||||
# Column 7: length (in microns)
|
'# Column 7: length (in microns)\n'+
|
||||||
# Column 8: trace angle (in degrees)
|
'# Column 8: trace angle (in degrees)\n'+
|
||||||
# Column 9-12: x,y coordinates of endpoints (in microns)
|
'# Column 9-12: x,y coordinates of endpoints (in microns)\n'+
|
||||||
# Column 13-14: IDs of right hand and left hand grains"""
|
'# Column 13-14: IDs of right hand and left hand grains')
|
||||||
for i,(left,right) in enumerate(rcData['neighbors']):
|
for i,(left,right) in enumerate(rcData['neighbors']):
|
||||||
if rcData['segment'][i]:
|
if rcData['segment'][i]:
|
||||||
first = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][0]][0]/rcData['scale'],
|
first = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][0]][0]/rcData['scale'],
|
||||||
|
@ -919,12 +918,12 @@ if 'rcb' in options.output:
|
||||||
second = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][1]][0]/rcData['scale'],
|
second = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][1]][0]/rcData['scale'],
|
||||||
rcData['bounds'][0][1]+rcData['point'][rcData['segment'][i][1]][1]/rcData['scale'],
|
rcData['bounds'][0][1]+rcData['point'][rcData['segment'][i][1]][1]/rcData['scale'],
|
||||||
]))
|
]))
|
||||||
print ' '.join(map(str,orientationData[left-1]+orientationData[right-1])),
|
print(' '.join(map(str,orientationData[left-1]+orientationData[right-1]))+
|
||||||
print np.linalg.norm(first-second),
|
str(np.linalg.norm(first-second))+
|
||||||
print '0',
|
'0'+
|
||||||
print ' '.join(map(str,first)),
|
' '.join(map(str,first))+
|
||||||
print ' '.join(map(str,second)),
|
' '.join(map(str,second))+
|
||||||
print ' '.join(map(str,[left,right]))
|
' '.join(map(str,[left,right])))
|
||||||
|
|
||||||
# ----- write image -----
|
# ----- write image -----
|
||||||
|
|
||||||
|
@ -934,28 +933,67 @@ if 'image' in options.output and options.imgsize > 0:
|
||||||
else:
|
else:
|
||||||
damask.util.croak('...no image drawing possible (PIL missing)...')
|
damask.util.croak('...no image drawing possible (PIL missing)...')
|
||||||
|
|
||||||
|
# ----- generate material.config -----
|
||||||
|
|
||||||
|
if any(output in options.output for output in ['spectral','mentat']):
|
||||||
|
config = []
|
||||||
|
config.append('<microstructure>')
|
||||||
|
|
||||||
|
for i,grain in enumerate(rcData['grainMapping']):
|
||||||
|
config+=['[grain{}]'.format(grain),
|
||||||
|
'crystallite\t1',
|
||||||
|
'(constituent)\tphase 1\ttexture {}\tfraction 1.0'.format(i+1)]
|
||||||
|
if (options.xmargin > 0.0):
|
||||||
|
config+=['[x-margin]',
|
||||||
|
'crystallite\t1',
|
||||||
|
'(constituent)\tphase 2\ttexture {}\tfraction 1.0\n'.format(len(rcData['grainMapping'])+1)]
|
||||||
|
if (options.ymargin > 0.0):
|
||||||
|
config+=['[y-margin]',
|
||||||
|
'crystallite\t1',
|
||||||
|
'(constituent)\tphase 2\ttexture {}\tfraction 1.0\n'.format(len(rcData['grainMapping'])+1)]
|
||||||
|
if (options.xmargin > 0.0 and options.ymargin > 0.0):
|
||||||
|
config+=['[xy-margin]',
|
||||||
|
'crystallite\t1',
|
||||||
|
'(constituent)\tphase 2\ttexture {}\tfraction 1.0\n'.format(len(rcData['grainMapping'])+1)]
|
||||||
|
|
||||||
|
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
||||||
|
config.append('[margin]')
|
||||||
|
|
||||||
|
config.append('<texture>')
|
||||||
|
for grain in rcData['grainMapping']:
|
||||||
|
config+=['[grain{}]'.format(grain),
|
||||||
|
'(gauss)\tphi1\t%f\tphi\t%f\tphi2\t%f\tscatter\t%f\tfraction\t1.0'\
|
||||||
|
%(math.degrees(orientationData[grain-1][0]),math.degrees(orientationData[grain-1][1]),\
|
||||||
|
math.degrees(orientationData[grain-1][2]),options.scatter)]
|
||||||
|
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
||||||
|
config+=['[margin]',
|
||||||
|
'(random)\t\tscatter\t0.0\tfraction\t1.0']
|
||||||
|
|
||||||
# ----- write spectral geom -----
|
# ----- write spectral geom -----
|
||||||
|
|
||||||
if 'spectral' in options.output:
|
if 'spectral' in options.output:
|
||||||
fftdata = fftbuild(rcData, options.size, options.xmargin, options.ymargin, options.resolution, options.extrusion)
|
fftdata = fftbuild(rcData, options.size, options.xmargin, options.ymargin, options.grid, options.extrusion)
|
||||||
|
|
||||||
geomFile = open(myName+'_'+str(int(fftdata['resolution'][0]))+'.geom','w') # open geom file for writing
|
|
||||||
geomFile.write('3\theader\n') # write header info
|
|
||||||
geomFile.write('grid a %i b %i c %i\n'%(fftdata['resolution'])) # grid resolution
|
|
||||||
geomFile.write('size x %f y %f z %f\n'%(fftdata['dimension'])) # size
|
|
||||||
geomFile.write('homogenization 1\n') # homogenization
|
|
||||||
for z in xrange(fftdata['resolution'][2]): # z repetions
|
|
||||||
for y in xrange(fftdata['resolution'][1]): # each x-row separately
|
|
||||||
geomFile.write('\t'.join(map(str,fftdata['fftpoints'][ y *fftdata['resolution'][0]:
|
|
||||||
(y+1)*fftdata['resolution'][0]]))+'\n') # grain indexes, x-row per line
|
|
||||||
geomFile.close() # close geom file
|
|
||||||
|
|
||||||
damask.util.croak('assigned {} out of {} (2D) Fourier points...'
|
|
||||||
.format(len(fftdata['fftpoints']),
|
|
||||||
int(fftdata['resolution'][0])*int(fftdata['resolution'][1])))
|
|
||||||
|
|
||||||
|
table = damask.ASCIItable(outname = myName+'_'+str(int(fftdata['grid'][0]))+'.geom',
|
||||||
|
labeled = False,
|
||||||
|
buffered = False)
|
||||||
|
table.labels_clear()
|
||||||
|
table.info_clear()
|
||||||
|
table.info_append([
|
||||||
|
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||||
|
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=fftdata['grid']),
|
||||||
|
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=fftdata['size']),
|
||||||
|
"homogenization\t1",
|
||||||
|
])
|
||||||
|
table.info_append(config)
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
table.data = np.array(fftdata['fftpoints']*options.extrusion).\
|
||||||
|
reshape(fftdata['grid'][1]*fftdata['grid'][2],fftdata['grid'][0])
|
||||||
|
formatwidth = 1+int(math.log10(np.max(table.data)))
|
||||||
|
table.data_writeArray('%%%ii'%(formatwidth),delimiter=' ')
|
||||||
|
table.close()
|
||||||
|
|
||||||
# ----- write Mentat procedure -----
|
|
||||||
|
|
||||||
if 'mentat' in options.output:
|
if 'mentat' in options.output:
|
||||||
if MentatCapability:
|
if MentatCapability:
|
||||||
|
@ -964,19 +1002,19 @@ if 'mentat' in options.output:
|
||||||
|
|
||||||
cmds = [\
|
cmds = [\
|
||||||
init(),
|
init(),
|
||||||
sample(options.size,rcData['dimension'][1]/rcData['dimension'][0],12,options.xmargin,options.ymargin),
|
sample(options.size,rcData['dimension'][1]/rcData['size'][0],12,options.xmargin,options.ymargin),
|
||||||
patch(options.size,options.resolution,options.mesh,rcData),
|
patch(options.size,options.grid,options.mesh,rcData),
|
||||||
gage(options.mesh,rcData),
|
gage(options.mesh,rcData),
|
||||||
]
|
]
|
||||||
|
|
||||||
if not options.twoD:
|
if not options.twoD:
|
||||||
cmds += [expand3D(options.size*(1.0+2.0*options.xmargin)/options.resolution*options.extrusion,options.extrusion),]
|
cmds += [expand3D(options.size*(1.0+2.0*options.xmargin)/options.grid*options.extrusion,options.extrusion),]
|
||||||
|
|
||||||
cmds += [\
|
cmds += [\
|
||||||
cleanUp(options.size),
|
cleanUp(options.size),
|
||||||
materials(),
|
materials(),
|
||||||
initial_conditions(len(rcData['grain']),rcData['grainMapping']),
|
initial_conditions(len(rcData['grain']),rcData['grainMapping']),
|
||||||
boundary_conditions(options.strainrate,options.size*(1.0+2.0*options.xmargin)/options.resolution*options.extrusion,\
|
boundary_conditions(options.strainrate,options.size*(1.0+2.0*options.xmargin)/options.grid*options.extrusion,\
|
||||||
options.size,rcData['dimension'][1]/rcData['dimension'][0],options.xmargin,options.ymargin),
|
options.size,rcData['dimension'][1]/rcData['dimension'][0],options.xmargin,options.ymargin),
|
||||||
loadcase(options.strain/options.strainrate,options.increments,0.01),
|
loadcase(options.strain/options.strainrate,options.increments,0.01),
|
||||||
job(len(rcData['grain']),rcData['grainMapping'],options.twoD),
|
job(len(rcData['grain']),rcData['grainMapping'],options.twoD),
|
||||||
|
@ -996,51 +1034,5 @@ if 'mentat' in options.output:
|
||||||
else:
|
else:
|
||||||
damask.util.croak('...no interaction with Mentat possible...')
|
damask.util.croak('...no interaction with Mentat possible...')
|
||||||
|
|
||||||
|
with open(myName+'.config','w') as configFile:
|
||||||
# ----- write config data to file -----
|
configFile.write('\n'.join(config))
|
||||||
|
|
||||||
if 'mentat' in options.output or 'spectral' in options.output:
|
|
||||||
output = ''
|
|
||||||
output += '\n\n<homogenization>\n' + \
|
|
||||||
'\n[SX]\n' + \
|
|
||||||
'type\tisostrain\n' + \
|
|
||||||
'Ngrains\t1\n' + \
|
|
||||||
'\n\n<microstructure>\n'
|
|
||||||
|
|
||||||
for i,grain in enumerate(rcData['grainMapping']):
|
|
||||||
output += '\n[grain %i]\n'%grain + \
|
|
||||||
'crystallite\t1\n' + \
|
|
||||||
'(constituent)\tphase 1\ttexture %i\tfraction 1.0\n'%(i+1)
|
|
||||||
if (options.xmargin > 0.0):
|
|
||||||
output += '\n[x-margin]\n' + \
|
|
||||||
'crystallite\t1\n' + \
|
|
||||||
'(constituent)\tphase 2\ttexture %i\tfraction 1.0\n'%(len(rcData['grainMapping'])+1)
|
|
||||||
if (options.ymargin > 0.0):
|
|
||||||
output += '\n[y-margin]\n' + \
|
|
||||||
'crystallite\t1\n' + \
|
|
||||||
'(constituent)\tphase 2\ttexture %i\tfraction 1.0\n'%(len(rcData['grainMapping'])+1)
|
|
||||||
if (options.xmargin > 0.0 and options.ymargin > 0.0):
|
|
||||||
output += '\n[margin edge]\n' + \
|
|
||||||
'crystallite\t1\n' + \
|
|
||||||
'(constituent)\tphase 2\ttexture %i\tfraction 1.0\n'%(len(rcData['grainMapping'])+1)
|
|
||||||
|
|
||||||
output += '\n\n<crystallite>\n' + \
|
|
||||||
'\n[fillMeIn]\n' + \
|
|
||||||
'\n\n<phase>\n' + \
|
|
||||||
'\n[patch]\n'
|
|
||||||
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
|
||||||
output += '\n[margin]\n'
|
|
||||||
|
|
||||||
output += '\n\n<texture>\n\n'
|
|
||||||
for grain in rcData['grainMapping']:
|
|
||||||
output += '\n[grain %i]\n'%grain + \
|
|
||||||
'(gauss)\tphi1\t%f\tphi\t%f\tphi2\t%f\tscatter\t%f\tfraction\t1.0\n'\
|
|
||||||
%(math.degrees(orientationData[grain-1][0]),math.degrees(orientationData[grain-1][1]),\
|
|
||||||
math.degrees(orientationData[grain-1][2]),options.scatter)
|
|
||||||
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
|
||||||
output += '\n[margin]\n' + \
|
|
||||||
'(random)\t\tscatter\t0.0\tfraction\t1.0\n'
|
|
||||||
|
|
||||||
configFile = open(myName+'.config','w')
|
|
||||||
configFile.write(output)
|
|
||||||
configFile.close()
|
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
import threading,time,os,sys,random
|
import threading,time,os,sys,random
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from operator import mul
|
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -65,7 +64,7 @@ class myThread (threading.Thread):
|
||||||
NmoveGrains = random.randrange(1,maxSeeds)
|
NmoveGrains = random.randrange(1,maxSeeds)
|
||||||
selectedMs = []
|
selectedMs = []
|
||||||
direction = []
|
direction = []
|
||||||
for i in xrange(NmoveGrains):
|
for i in range(NmoveGrains):
|
||||||
selectedMs.append(random.randrange(1,nMicrostructures))
|
selectedMs.append(random.randrange(1,nMicrostructures))
|
||||||
|
|
||||||
direction.append(np.array(((random.random()-0.5)*delta[0],
|
direction.append(np.array(((random.random()-0.5)*delta[0],
|
||||||
|
@ -110,7 +109,7 @@ class myThread (threading.Thread):
|
||||||
currentData=np.bincount(perturbedGeomTable.data.astype(int).ravel())[1:]/points
|
currentData=np.bincount(perturbedGeomTable.data.astype(int).ravel())[1:]/points
|
||||||
currentError=[]
|
currentError=[]
|
||||||
currentHist=[]
|
currentHist=[]
|
||||||
for i in xrange(nMicrostructures): # calculate the deviation in all bins per histogram
|
for i in range(nMicrostructures): # calculate the deviation in all bins per histogram
|
||||||
currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0])
|
currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0])
|
||||||
currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum()))
|
currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum()))
|
||||||
|
|
||||||
|
@ -122,12 +121,12 @@ class myThread (threading.Thread):
|
||||||
bestMatch = match
|
bestMatch = match
|
||||||
#--- count bin classes with no mismatch ----------------------------------------------------------------------
|
#--- count bin classes with no mismatch ----------------------------------------------------------------------
|
||||||
myMatch=0
|
myMatch=0
|
||||||
for i in xrange(nMicrostructures):
|
for i in range(nMicrostructures):
|
||||||
if currentError[i] > 0.0: break
|
if currentError[i] > 0.0: break
|
||||||
myMatch = i+1
|
myMatch = i+1
|
||||||
|
|
||||||
if myNmicrostructures == nMicrostructures:
|
if myNmicrostructures == nMicrostructures:
|
||||||
for i in xrange(min(nMicrostructures,myMatch+options.bins)):
|
for i in range(min(nMicrostructures,myMatch+options.bins)):
|
||||||
if currentError[i] > target[i]['error']: # worse fitting, next try
|
if currentError[i] > target[i]['error']: # worse fitting, next try
|
||||||
randReset = True
|
randReset = True
|
||||||
break
|
break
|
||||||
|
@ -146,7 +145,7 @@ class myThread (threading.Thread):
|
||||||
for line in perturbedSeedsVFile:
|
for line in perturbedSeedsVFile:
|
||||||
currentSeedsFile.write(line)
|
currentSeedsFile.write(line)
|
||||||
bestSeedsVFile.write(line)
|
bestSeedsVFile.write(line)
|
||||||
for j in xrange(nMicrostructures): # save new errors for all bins
|
for j in range(nMicrostructures): # save new errors for all bins
|
||||||
target[j]['error'] = currentError[j]
|
target[j]['error'] = currentError[j]
|
||||||
if myMatch > match: # one or more new bins have no deviation
|
if myMatch > match: # one or more new bins have no deviation
|
||||||
damask.util.croak( 'Stage {:d} cleared'.format(myMatch))
|
damask.util.croak( 'Stage {:d} cleared'.format(myMatch))
|
||||||
|
@ -160,7 +159,7 @@ class myThread (threading.Thread):
|
||||||
bestSeedsVFile = StringIO()
|
bestSeedsVFile = StringIO()
|
||||||
for line in perturbedSeedsVFile:
|
for line in perturbedSeedsVFile:
|
||||||
bestSeedsVFile.write(line)
|
bestSeedsVFile.write(line)
|
||||||
for j in xrange(nMicrostructures):
|
for j in range(nMicrostructures):
|
||||||
target[j]['error'] = currentError[j]
|
target[j]['error'] = currentError[j]
|
||||||
randReset = True
|
randReset = True
|
||||||
else: #--- not all grains are tessellated
|
else: #--- not all grains are tessellated
|
||||||
|
@ -219,8 +218,7 @@ if options.randomSeed is None:
|
||||||
damask.util.croak(options.randomSeed)
|
damask.util.croak(options.randomSeed)
|
||||||
delta = (options.scale/options.grid[0],options.scale/options.grid[1],options.scale/options.grid[2])
|
delta = (options.scale/options.grid[0],options.scale/options.grid[1],options.scale/options.grid[2])
|
||||||
baseFile=os.path.splitext(os.path.basename(options.seedFile))[0]
|
baseFile=os.path.splitext(os.path.basename(options.seedFile))[0]
|
||||||
points = float(reduce(mul,options.grid))
|
points = np.array(options.grid).prod().astype('float')
|
||||||
|
|
||||||
|
|
||||||
# ----------- calculate target distribution and bin edges
|
# ----------- calculate target distribution and bin edges
|
||||||
targetGeomFile = os.path.splitext(os.path.basename(options.target))[0]+'.geom'
|
targetGeomFile = os.path.splitext(os.path.basename(options.target))[0]+'.geom'
|
||||||
|
@ -231,7 +229,7 @@ nMicrostructures = info['microstructures']
|
||||||
targetVolFrac = np.bincount(targetGeomTable.microstructure_read(info['grid']))[1:nMicrostructures+1]/\
|
targetVolFrac = np.bincount(targetGeomTable.microstructure_read(info['grid']))[1:nMicrostructures+1]/\
|
||||||
float(info['grid'].prod())
|
float(info['grid'].prod())
|
||||||
target=[]
|
target=[]
|
||||||
for i in xrange(1,nMicrostructures+1):
|
for i in range(1,nMicrostructures+1):
|
||||||
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
|
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
|
||||||
target.append({'histogram':targetHist,'bins':targetBins})
|
target.append({'histogram':targetHist,'bins':targetBins})
|
||||||
|
|
||||||
|
@ -260,7 +258,7 @@ info,devNull = initialGeomTable.head_getGeom()
|
||||||
if info['microstructures'] != nMicrostructures: damask.util.croak('error. Microstructure count mismatch')
|
if info['microstructures'] != nMicrostructures: damask.util.croak('error. Microstructure count mismatch')
|
||||||
|
|
||||||
initialData = np.bincount(initialGeomTable.microstructure_read(info['grid']))/points
|
initialData = np.bincount(initialGeomTable.microstructure_read(info['grid']))/points
|
||||||
for i in xrange(nMicrostructures):
|
for i in range(nMicrostructures):
|
||||||
initialHist = np.histogram(initialData,bins=target[i]['bins'])[0]
|
initialHist = np.histogram(initialData,bins=target[i]['bins'])[0]
|
||||||
target[i]['error']=np.sqrt(np.square(np.array(target[i]['histogram']-initialHist)).sum())
|
target[i]['error']=np.sqrt(np.square(np.array(target[i]['histogram']-initialHist)).sum())
|
||||||
|
|
||||||
|
@ -269,7 +267,7 @@ if target[0]['error'] > 0.0:
|
||||||
target[0]['error'] *=((target[0]['bins'][0]-np.min(initialData))**2.0+
|
target[0]['error'] *=((target[0]['bins'][0]-np.min(initialData))**2.0+
|
||||||
(target[0]['bins'][1]-np.max(initialData))**2.0)**0.5
|
(target[0]['bins'][1]-np.max(initialData))**2.0)**0.5
|
||||||
match=0
|
match=0
|
||||||
for i in xrange(nMicrostructures):
|
for i in range(nMicrostructures):
|
||||||
if target[i]['error'] > 0.0: break
|
if target[i]['error'] > 0.0: break
|
||||||
match = i+1
|
match = i+1
|
||||||
|
|
||||||
|
|
|
@ -105,12 +105,12 @@ for name in filenames:
|
||||||
grid = np.zeros(3,'i')
|
grid = np.zeros(3,'i')
|
||||||
|
|
||||||
n = 0
|
n = 0
|
||||||
for i in xrange(Nx):
|
for i in range(Nx):
|
||||||
for j in xrange(Ny):
|
for j in range(Ny):
|
||||||
grid[0] = round((i+0.5)*box[0]*info['grid'][0]/Nx-0.5)+offset[0]
|
grid[0] = round((i+0.5)*box[0]*info['grid'][0]/Nx-0.5)+offset[0]
|
||||||
grid[1] = round((j+0.5)*box[1]*info['grid'][1]/Ny-0.5)+offset[1]
|
grid[1] = round((j+0.5)*box[1]*info['grid'][1]/Ny-0.5)+offset[1]
|
||||||
damask.util.croak('x,y coord on surface: {},{}...'.format(*grid[:2]))
|
damask.util.croak('x,y coord on surface: {},{}...'.format(*grid[:2]))
|
||||||
for k in xrange(Nz):
|
for k in range(Nz):
|
||||||
grid[2] = k + offset[2]
|
grid[2] = k + offset[2]
|
||||||
grid %= info['grid']
|
grid %= info['grid']
|
||||||
seeds[n,0:3] = (0.5+grid)/info['grid'] # normalize coordinates to box
|
seeds[n,0:3] = (0.5+grid)/info['grid'] # normalize coordinates to box
|
||||||
|
|
|
@ -14,12 +14,12 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
# ------------------------------------------ aux functions ---------------------------------
|
# ------------------------------------------ aux functions ---------------------------------
|
||||||
|
|
||||||
def kdtree_search(cloud, queryPoints):
|
def kdtree_search(cloud, queryPoints):
|
||||||
"""find distances to nearest neighbor among cloud (N,d) for each of the queryPoints (n,d)"""
|
"""Find distances to nearest neighbor among cloud (N,d) for each of the queryPoints (n,d)"""
|
||||||
n = queryPoints.shape[0]
|
n = queryPoints.shape[0]
|
||||||
distances = np.zeros(n,dtype=float)
|
distances = np.zeros(n,dtype=float)
|
||||||
tree = spatial.cKDTree(cloud)
|
tree = spatial.cKDTree(cloud)
|
||||||
|
|
||||||
for i in xrange(n):
|
for i in range(n):
|
||||||
distances[i], index = tree.query(queryPoints[i])
|
distances[i], index = tree.query(queryPoints[i])
|
||||||
|
|
||||||
return distances
|
return distances
|
||||||
|
@ -227,8 +227,8 @@ for name in filenames:
|
||||||
"randomSeed\t{}".format(options.randomSeed),
|
"randomSeed\t{}".format(options.randomSeed),
|
||||||
])
|
])
|
||||||
table.labels_clear()
|
table.labels_clear()
|
||||||
table.labels_append( ['{dim}_{label}'.format(dim = 1+k,label = 'pos') for k in xrange(3)] +
|
table.labels_append( ['{dim}_{label}'.format(dim = 1+k,label = 'pos') for k in range(3)] +
|
||||||
['{dim}_{label}'.format(dim = 1+k,label = 'euler') for k in xrange(3)] +
|
['{dim}_{label}'.format(dim = 1+k,label = 'euler') for k in range(3)] +
|
||||||
['microstructure'] +
|
['microstructure'] +
|
||||||
(['weight'] if options.weights else []))
|
(['weight'] if options.weights else []))
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
Loading…
Reference in New Issue