further attemps to make it conform with best python practice

This commit is contained in:
Martin Diehl 2016-03-01 20:44:43 +01:00
parent 6eb170bc07
commit e55d0ae3ca
13 changed files with 51 additions and 76 deletions

View File

@ -56,8 +56,7 @@ def unravel(item):
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
def vtk_writeASCII_mesh(mesh,data,res,sep):
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
""" function writes data array defined on a hexahedral mesh (geometry) """
"""function writes data array defined on a hexahedral mesh (geometry)"""
info = {\
'tensor': {'name':'tensor','len':9},\
'vector': {'name':'vector','len':3},\
@ -111,10 +110,9 @@ def vtk_writeASCII_mesh(mesh,data,res,sep):
return cmds
# +++++++++++++++++++++++++++++++++++++++++++++++++++
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def vtk_writeASCII_points(coordinates,data,res,sep):
# +++++++++++++++++++++++++++++++++++++++++++++++++++
""" function writes data array defined on a point field """
"""function writes data array defined on a point field"""
N = res[0]*res[1]*res[2]
cmds = [\
@ -216,7 +214,7 @@ for filename in args:
content = file.readlines()
file.close()
m = re.search('(\d+)\s*head', content[0].lower())
if m == None:
if m is None:
continue
print filename,'\n'
sys.stdout.flush()

View File

@ -19,13 +19,13 @@ Transform X,Y,Z,F APS BeamLine 34 coordinates to x,y,z APS strain coordinates.
""", version = scriptID)
parser.add_option('-f','--frame', dest='frame', nargs=4, type='string', metavar='<string string string string>',
parser.add_option('-f','--frame', dest='frame', nargs=4, type='string', metavar='string string string string',
help='APS X,Y,Z coords, and depth F')
parser.set_defaults(frame = None)
(options,filenames) = parser.parse_args()
if options.frame == None:
if options.frame is None:
parser.error('no data column specified...')
@ -33,7 +33,7 @@ datainfo = {'len':3,
'label':[]
}
if options.frame != None: datainfo['label'] += options.frame
datainfo['label'] += options.frame
# --- loop over input files -------------------------------------------------------------------------
if filenames == []:
@ -75,8 +75,8 @@ for name in filenames:
# ------------------------------------------ process data ------------------------------------------
theta=-0.75*np.pi
RotMat2TSL=np.array([[1., 0., 0.],
[0., np.cos(theta), np.sin(theta)],
[0., -np.sin(theta), np.cos(theta)]]) # Orientation Matrix to account for -135 degree rotation for TSL Convention[Adapted from Chen Zhang's code]
[0., np.cos(theta), np.sin(theta)], # Orientation to account for -135 deg
[0., -np.sin(theta), np.cos(theta)]]) # rotation for TSL convention
vec = np.zeros(4)
outputAlive = True

View File

@ -39,7 +39,7 @@ parser.add_option('-f','--formula',
(options,filenames) = parser.parse_args()
if options.labels == None or options.formulas == None:
if options.labels is None or options.formulas is None:
parser.error('no formulas and/or labels specified.')
if len(options.labels) != len(options.formulas):
parser.error('number of labels ({}) and formulas ({}) do not match.'.format(len(options.labels),len(options.formulas)))

View File

@ -3,33 +3,14 @@
import os,string,h5py
import numpy as np
from optparse import OptionParser, Option
# -----------------------------
class extendableOption(Option):
# -----------------------------
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
# taken from online tutorial http://docs.python.org/library/optparse.html
ACTIONS = Option.ACTIONS + ("extend",)
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
def take_action(self, action, dest, opt, value, values, parser):
if action == "extend":
lvalue = value.split(",")
values.ensure_value(dest, []).extend(lvalue)
else:
Option.take_action(self, action, dest, opt, value, values, parser)
from optparse import OptionParser
import damask
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=extendableOption, usage='%prog options [file[s]]', description = """
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add column(s) containing Cauchy stress based on given column(s) of
deformation gradient and first Piola--Kirchhoff stress.
@ -49,7 +30,7 @@ parser.set_defaults(output = 'crystallite')
(options,filenames) = parser.parse_args()
if options.defgrad == None or options.stress == None or options.output == None:
if options.defgrad is None or options.stress is None or options.output is None:
parser.error('missing data column...')
@ -78,6 +59,3 @@ for myFile in files:
cauchy[p,...] = 1.0/np.linalg.det(defgrad[p,...])*np.dot(stress[p,...],defgrad[p,...].T) # [Cauchy] = (1/det(F)) * [P].[F_transpose]
cauchyFile = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance].create_dataset('cauchy', data=cauchy)
cauchyFile.attrs['units'] = 'Pa'

View File

@ -81,7 +81,6 @@ for name in filenames:
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
if options.shape: table.labels_append('shapeMismatch({})'.format(options.defgrad))
if options.volume: table.labels_append('volMismatch({})'.format(options.defgrad))
#table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
@ -92,7 +91,7 @@ for name in filenames:
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
N = grid.prod()

View File

@ -92,7 +92,7 @@ parser.set_defaults(coords = 'ipinitialcoord',
(options,filenames) = parser.parse_args()
if options.vector == None and options.tensor == None:
if options.vector is None and options.tensor is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
@ -161,9 +161,9 @@ for name in filenames:
stack = [table.data]
for type, data in items.iteritems():
for i,label in enumerate(data['active']):
stack.append(curlFFT(size[::-1], # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
table.data[:,data['column'][i]:data['column'][i]+data['dim']].\
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
stack.append(curlFFT(size[::-1], # we need to reverse order here, because x
table.data[:,data['column'][i]:data['column'][i]+data['dim']]. # is fastest,ie rightmost, but leftmost in
reshape([grid[2],grid[1],grid[0]]+data['shape']))) # our x,y,z notation
# ------------------------------------------ output result -----------------------------------------

View File

@ -11,7 +11,7 @@ scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
def deformedCoordsFFT(F,undeformed=False):
#--------------------------------------------------------------------------------------------------
wgt = 1.0/grid.prod()
integrator = np.array([0.+1.j,0.+1.j,0.+1.j],'c16') * size/ 2.0 / math.pi
step = size/grid
@ -127,7 +127,7 @@ for name in filenames:
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
N = grid.prod()

View File

@ -32,7 +32,7 @@ parser.add_option('-t','--tensor',
(options,filenames) = parser.parse_args()
if options.tensor == None:
if options.tensor is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------

View File

@ -10,7 +10,7 @@ scriptID = ' '.join([scriptName,damask.version])
oneThird = 1.0/3.0
def deviator(m,spherical = False): # Carefull, do not change the value of m (its intent(inout)!)
def deviator(m,spherical = False): # Careful, do not change the value of m, its intent(inout)!
sph = oneThird*(m[0]+m[4]+m[8])
dev = [
m[0]-sph, m[1], m[2],
@ -39,7 +39,7 @@ parser.add_option('-s','--spherical',
(options,filenames) = parser.parse_args()
if options.tensor == None:
if options.tensor is None:
parser.error('no data column specified...')
# --- loop over input files -------------------------------------------------------------------------

View File

@ -77,7 +77,7 @@ parser.set_defaults(coords = 'ipinitialcoord',
(options,filenames) = parser.parse_args()
if options.vector == None and options.tensor == None:
if options.vector is None and options.tensor is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
@ -140,16 +140,16 @@ for name in filenames:
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
for type, data in items.iteritems():
for i,label in enumerate(data['active']):
stack.append(divFFT(size[::-1], # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
table.data[:,data['column'][i]:data['column'][i]+data['dim']].\
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
stack.append(divFFT(size[::-1], # we need to reverse order here, because x
table.data[:,data['column'][i]:data['column'][i]+data['dim']]. # is fastest,ie rightmost, but leftmost in
reshape([grid[2],grid[1],grid[0]]+data['shape']))) # our x,y,z notation
# ------------------------------------------ output result -----------------------------------------

View File

@ -48,7 +48,7 @@ parser.set_defaults(hkl = (1,1,1),
(options,filenames) = parser.parse_args()
if options.stiffness == None:
if options.stiffness is None:
parser.error('no data column specified...')
# --- loop over input files -------------------------------------------------------------------------

View File

@ -105,7 +105,7 @@ parser.set_defaults(scale = 1.0)
(options,filenames) = parser.parse_args()
if options.type == None:
if options.type is None:
parser.error('no feature type selected.')
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
@ -175,7 +175,7 @@ for name in filenames:
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
# ------------------------------------------ process value field -----------------------------------

View File

@ -68,15 +68,15 @@ parser.set_defaults(symmetry = 'cubic',
(options, filenames) = parser.parse_args()
if options.radius == None:
if options.radius is None:
parser.error('no radius specified.')
input = [options.eulers != None,
options.a != None and \
options.b != None and \
options.c != None,
options.matrix != None,
options.quaternion != None,
input = [options.eulers is not None,
options.a is not None and \
options.b is not None and \
options.c is not None,
options.matrix is not None,
options.quaternion is not None,
]
if np.sum(input) != 1: parser.error('needs exactly one input format.')