2011-02-01 16:18:44 +05:30
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
|
|
|
|
|
|
|
# This script is used for the post processing of the results achieved by the spectral method.
|
|
|
|
# As it reads in the data coming from "materialpoint_results", it can be adopted to the data
|
2011-08-25 20:58:33 +05:30
|
|
|
# computed using the FEM solvers. Its capable to handle elements with one IP in a regular order
|
2011-02-01 16:18:44 +05:30
|
|
|
|
2013-05-29 15:54:00 +05:30
|
|
|
import os,sys,threading,re,numpy,time,string,fnmatch
|
|
|
|
import damask
|
2011-02-01 16:18:44 +05:30
|
|
|
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP
|
|
|
|
|
|
|
|
# -----------------------------
|
|
|
|
class extendedOption(Option):
|
|
|
|
# -----------------------------
|
|
|
|
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
|
|
|
# taken from online tutorial http://docs.python.org/library/optparse.html
|
|
|
|
|
|
|
|
ACTIONS = Option.ACTIONS + ("extend",)
|
|
|
|
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
|
|
|
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
|
|
|
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
|
|
|
|
|
|
|
def take_action(self, action, dest, opt, value, values, parser):
|
|
|
|
if action == "extend":
|
|
|
|
lvalue = value.split(",")
|
|
|
|
values.ensure_value(dest, []).extend(lvalue)
|
|
|
|
else:
|
|
|
|
Option.take_action(self, action, dest, opt, value, values, parser)
|
|
|
|
|
|
|
|
|
|
|
|
# -----------------------------
|
|
|
|
class backgroundMessage(threading.Thread):
|
|
|
|
# -----------------------------
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self.message = ''
|
|
|
|
self.new_message = ''
|
|
|
|
self.counter = 0
|
|
|
|
self.symbols = ['- ', '\ ', '| ', '/ ']
|
|
|
|
self.waittime = 0.5
|
|
|
|
|
|
|
|
def __quit__(self):
|
|
|
|
length = len(self.message) + len(self.symbols[self.counter])
|
|
|
|
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length)
|
|
|
|
sys.stderr.write('')
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
while not threading.enumerate()[0]._Thread__stopped:
|
|
|
|
time.sleep(self.waittime)
|
|
|
|
self.update_message()
|
|
|
|
self.__quit__()
|
|
|
|
|
|
|
|
def set_message(self, new_message):
|
|
|
|
self.new_message = new_message
|
|
|
|
self.print_message()
|
|
|
|
|
|
|
|
def print_message(self):
|
|
|
|
length = len(self.message) + len(self.symbols[self.counter])
|
|
|
|
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length) # delete former message
|
|
|
|
sys.stderr.write(self.symbols[self.counter] + self.new_message) # print new message
|
|
|
|
self.message = self.new_message
|
|
|
|
|
|
|
|
def update_message(self):
|
|
|
|
self.counter = (self.counter + 1)%len(self.symbols)
|
|
|
|
self.print_message()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def outStdout(cmd,locals):
|
|
|
|
if cmd[0:3] == '(!)':
|
|
|
|
exec(cmd[3:])
|
|
|
|
elif cmd[0:3] == '(?)':
|
|
|
|
cmd = eval(cmd[3:])
|
|
|
|
print cmd
|
|
|
|
else:
|
|
|
|
print cmd
|
|
|
|
return
|
|
|
|
|
|
|
|
def outFile(cmd,locals):
|
|
|
|
if cmd[0:3] == '(!)':
|
|
|
|
exec(cmd[3:])
|
|
|
|
elif cmd[0:3] == '(?)':
|
|
|
|
cmd = eval(cmd[3:])
|
|
|
|
locals['filepointer'].write(cmd+'\n')
|
|
|
|
else:
|
|
|
|
locals['filepointer'].write(cmd+'\n')
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
def output(cmds,locals,dest):
|
|
|
|
for cmd in cmds:
|
|
|
|
if isinstance(cmd,list):
|
|
|
|
output(cmd,locals,dest)
|
|
|
|
else:
|
|
|
|
{\
|
|
|
|
'File': outFile,\
|
|
|
|
'Stdout': outStdout,\
|
|
|
|
}[dest](str(cmd),locals)
|
|
|
|
return
|
|
|
|
|
|
|
|
|
2011-02-01 22:13:00 +05:30
|
|
|
def transliterateToFloat(x):
|
|
|
|
try:
|
|
|
|
return float(x)
|
|
|
|
except:
|
|
|
|
return 0.0
|
|
|
|
|
2012-01-20 02:12:50 +05:30
|
|
|
|
|
|
|
def unravel(item):
|
|
|
|
if hasattr(item,'__contains__'): return ' '.join(map(unravel,item))
|
|
|
|
else: return str(item)
|
|
|
|
|
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
|
2012-06-19 21:30:59 +05:30
|
|
|
def vtk_writeASCII_mesh(mesh,data,res,sep):
|
2011-02-01 16:18:44 +05:30
|
|
|
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
|
|
|
|
""" function writes data array defined on a hexahedral mesh (geometry) """
|
2012-06-19 21:00:16 +05:30
|
|
|
info = {\
|
|
|
|
'tensor': {'name':'tensor','len':9},\
|
|
|
|
'vector': {'name':'vector','len':3},\
|
|
|
|
'scalar': {'name':'scalar','len':1},\
|
|
|
|
'double': {'name':'scalar','len':2},\
|
|
|
|
'triple': {'name':'scalar','len':3},\
|
|
|
|
'quadruple': {'name':'scalar','len':4},\
|
|
|
|
}
|
2011-02-01 16:18:44 +05:30
|
|
|
N1 = (res[0]+1)*(res[1]+1)*(res[2]+1)
|
|
|
|
N = res[0]*res[1]*res[2]
|
|
|
|
|
|
|
|
cmds = [\
|
|
|
|
'# vtk DataFile Version 3.1',
|
2011-05-26 15:14:10 +05:30
|
|
|
string.replace('powered by $Id$','\n','\\n'),
|
2011-02-01 16:18:44 +05:30
|
|
|
'ASCII',
|
|
|
|
'DATASET UNSTRUCTURED_GRID',
|
2013-05-29 15:54:00 +05:30
|
|
|
'POINTS %i double'%N1,
|
2013-01-31 21:58:08 +05:30
|
|
|
[[['\t'.join(map(str,mesh[:,i,j,k])) for i in range(res[0]+1)] for j in range(res[1]+1)] for k in range(res[2]+1)],
|
2011-02-01 16:18:44 +05:30
|
|
|
'CELLS %i %i'%(N,N*9),
|
|
|
|
]
|
|
|
|
|
|
|
|
# cells
|
2011-04-14 15:41:23 +05:30
|
|
|
for z in range (res[2]):
|
|
|
|
for y in range (res[1]):
|
|
|
|
for x in range (res[0]):
|
|
|
|
base = z*(res[1]+1)*(res[0]+1)+y*(res[0]+1)+x
|
2011-02-01 16:18:44 +05:30
|
|
|
cmds.append('8 '+'\t'.join(map(str,[ \
|
|
|
|
base,
|
|
|
|
base+1,
|
2011-04-14 15:41:23 +05:30
|
|
|
base+res[0]+2,
|
|
|
|
base+res[0]+1,
|
|
|
|
base+(res[1]+1)*(res[0]+1),
|
|
|
|
base+(res[1]+1)*(res[0]+1)+1,
|
|
|
|
base+(res[1]+1)*(res[0]+1)+res[0]+2,
|
|
|
|
base+(res[1]+1)*(res[0]+1)+res[0]+1,
|
2011-02-01 16:18:44 +05:30
|
|
|
])))
|
|
|
|
cmds += [\
|
|
|
|
'CELL_TYPES %i'%N,
|
|
|
|
['12']*N,
|
|
|
|
'CELL_DATA %i'%N,
|
|
|
|
]
|
|
|
|
|
|
|
|
for type in data:
|
2011-05-22 19:31:18 +05:30
|
|
|
plural = {True:'',False:'S'}[type.lower().endswith('s')]
|
2013-05-29 15:54:00 +05:30
|
|
|
for item in data[type]['_order_']:
|
2011-02-01 16:18:44 +05:30
|
|
|
cmds += [\
|
2013-05-29 15:54:00 +05:30
|
|
|
'%s %s double %i'%(info[type]['name'].upper()+plural,item,info[type]['len']),
|
2012-06-19 21:00:16 +05:30
|
|
|
{True:'LOOKUP_TABLE default',False:''}[info[type]['name'][:3]=='sca'],
|
2012-06-19 21:30:59 +05:30
|
|
|
[[[sep.join(map(unravel,data[type][item][:,j,k]))] for j in range(res[1])] for k in range(res[2])],
|
2011-02-01 16:18:44 +05:30
|
|
|
]
|
|
|
|
|
2011-02-07 20:05:42 +05:30
|
|
|
return cmds
|
|
|
|
|
|
|
|
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
|
2012-06-19 21:30:59 +05:30
|
|
|
def gmsh_writeASCII_mesh(mesh,data,res,sep):
|
2011-02-07 20:05:42 +05:30
|
|
|
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
|
|
|
|
""" function writes data array defined on a hexahedral mesh (geometry) """
|
2012-06-19 21:00:16 +05:30
|
|
|
info = {\
|
|
|
|
'tensor': {'name':'tensor','len':9},\
|
|
|
|
'vector': {'name':'vector','len':3},\
|
|
|
|
'scalar': {'name':'scalar','len':1},\
|
|
|
|
'double': {'name':'scalar','len':2},\
|
|
|
|
'triple': {'name':'scalar','len':3},\
|
|
|
|
'quadruple': {'name':'scalar','len':4},\
|
|
|
|
}
|
2011-02-07 20:05:42 +05:30
|
|
|
N1 = (res[0]+1)*(res[1]+1)*(res[2]+1)
|
|
|
|
N = res[0]*res[1]*res[2]
|
|
|
|
|
|
|
|
cmds = [\
|
|
|
|
'$MeshFormat',
|
|
|
|
'2.1 0 8',
|
|
|
|
'$EndMeshFormat',
|
|
|
|
'$Nodes',
|
|
|
|
'%i float'%N1,
|
2013-01-31 21:58:08 +05:30
|
|
|
[[['\t'.join(map(str,l,mesh[:,i,j,k])) for l in range(1,N1+1) for i in range(res[0]+1)] for j in range(res[1]+1)] for k in range(res[2]+1)],
|
2011-02-07 20:05:42 +05:30
|
|
|
'$EndNodes',
|
|
|
|
'$Elements',
|
|
|
|
'%i'%N,
|
|
|
|
]
|
|
|
|
|
2011-04-14 15:41:23 +05:30
|
|
|
# cells
|
2011-02-07 20:05:42 +05:30
|
|
|
n_elem = 0
|
2011-04-14 15:41:23 +05:30
|
|
|
for z in range (res[2]):
|
|
|
|
for y in range (res[1]):
|
|
|
|
for x in range (res[0]):
|
|
|
|
base = z*(res[1]+1)*(res[0]+1)+y*(res[0]+1)+x
|
2011-02-07 20:05:42 +05:30
|
|
|
n_elem +=1
|
|
|
|
cmds.append('\t'.join(map(str,[ \
|
|
|
|
n_elem,
|
|
|
|
'5',
|
|
|
|
base,
|
|
|
|
base+1,
|
2011-04-14 15:41:23 +05:30
|
|
|
base+res[0]+2,
|
|
|
|
base+res[0]+1,
|
|
|
|
base+(res[1]+1)*(res[0]+1),
|
|
|
|
base+(res[1]+1)*(res[0]+1)+1,
|
|
|
|
base+(res[1]+1)*(res[0]+1)+res[0]+2,
|
|
|
|
base+(res[1]+1)*(res[0]+1)+res[0]+1,
|
2011-02-07 20:05:42 +05:30
|
|
|
])))
|
2011-04-14 15:41:23 +05:30
|
|
|
|
2011-02-07 20:05:42 +05:30
|
|
|
cmds += [\
|
|
|
|
'ElementData',
|
|
|
|
'1',
|
|
|
|
'%s'%item, # name of the view
|
|
|
|
'0.0', # thats the time value
|
|
|
|
'3',
|
|
|
|
'0', # time step
|
|
|
|
'1',
|
|
|
|
'%i'%N
|
|
|
|
]
|
|
|
|
|
|
|
|
for type in data:
|
2011-05-22 19:31:18 +05:30
|
|
|
plural = {True:'',False:'S'}[type.lower().endswith('s')]
|
2013-05-29 15:54:00 +05:30
|
|
|
for item in data[type]['_order_']:
|
2011-02-07 20:05:42 +05:30
|
|
|
cmds += [\
|
2012-06-19 21:00:16 +05:30
|
|
|
'%s %s float %i'%(info[type]['name'].upper()+plural,item,info[type]['len']),
|
2011-02-07 20:05:42 +05:30
|
|
|
'LOOKUP_TABLE default',
|
2012-06-19 21:30:59 +05:30
|
|
|
[[[sep.join(map(str,data[type][item][:,j,k]))] for j in range(res[1])] for k in range(res[2])],
|
2011-02-07 20:05:42 +05:30
|
|
|
]
|
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
return cmds
|
|
|
|
|
|
|
|
# +++++++++++++++++++++++++++++++++++++++++++++++++++
|
2012-06-19 21:30:59 +05:30
|
|
|
def vtk_writeASCII_points(coordinates,data,res,sep):
|
2011-02-01 16:18:44 +05:30
|
|
|
# +++++++++++++++++++++++++++++++++++++++++++++++++++
|
|
|
|
""" function writes data array defined on a point field """
|
|
|
|
N = res[0]*res[1]*res[2]
|
|
|
|
|
|
|
|
cmds = [\
|
|
|
|
'# vtk DataFile Version 3.1',
|
2011-04-14 15:41:23 +05:30
|
|
|
'powered by $Id$',
|
2011-02-01 16:18:44 +05:30
|
|
|
'ASCII',
|
|
|
|
'DATASET UNSTRUCTURED_GRID',
|
|
|
|
'POINTS %i float'%N,
|
|
|
|
[[['\t'.join(map(str,coordinates[i,j,k])) for i in range(res[0])] for j in range(res[1])] for k in range(res[2])],
|
|
|
|
'CELLS %i %i'%(N,N*2),
|
|
|
|
['1\t%i'%i for i in range(N)],
|
|
|
|
'CELL_TYPES %i'%N,
|
|
|
|
['1']*N,
|
|
|
|
'POINT_DATA %i'%N,
|
|
|
|
]
|
|
|
|
|
|
|
|
for type in data:
|
2011-05-22 19:31:18 +05:30
|
|
|
plural = {True:'',False:'S'}[type.lower().endswith('s')]
|
2011-02-01 16:18:44 +05:30
|
|
|
for item in data[type]:
|
|
|
|
cmds += [\
|
2011-05-22 19:31:18 +05:30
|
|
|
'%s %s float'%(type.upper()+plural,item),
|
2012-01-20 02:12:50 +05:30
|
|
|
{True:'LOOKUP_TABLE default',False:''}[type.lower()[:3]=='sca'],
|
2012-06-19 21:30:59 +05:30
|
|
|
[[[sep.join(map(unravel,data[type][item][:,j,k]))] for j in range(res[1])] for k in range(res[2])],
|
2011-02-01 16:18:44 +05:30
|
|
|
]
|
|
|
|
|
|
|
|
return cmds
|
|
|
|
|
|
|
|
# +++++++++++++++++++++++++++++++++++++++++++++++++++++
|
|
|
|
def vtk_writeASCII_box(diag,defgrad):
|
|
|
|
# +++++++++++++++++++++++++++++++++++++++++++++++++++++
|
|
|
|
""" corner box for the average defgrad """
|
|
|
|
points = numpy.array([\
|
|
|
|
[0.0,0.0,0.0,],\
|
|
|
|
[diag[0],0.0,0.0,],\
|
|
|
|
[diag[0],diag[1],0.0,],\
|
|
|
|
[0.0,diag[1],0.0,],\
|
|
|
|
[0.0,0.0,diag[2],],\
|
|
|
|
[diag[0],0.0,diag[2],],\
|
|
|
|
[diag[0],diag[1],diag[2],],\
|
|
|
|
[0.0,diag[1],diag[2],],\
|
|
|
|
])
|
|
|
|
|
|
|
|
cmds = [\
|
|
|
|
'# vtk DataFile Version 3.1',
|
2011-04-14 15:41:23 +05:30
|
|
|
'powered by $Id$',
|
2011-02-01 16:18:44 +05:30
|
|
|
'ASCII',
|
|
|
|
'DATASET UNSTRUCTURED_GRID',
|
|
|
|
'POINTS 8 float',
|
|
|
|
['\t'.join(map(str,numpy.dot(defgrad_av,points[p]))) for p in range(8)],
|
|
|
|
'CELLS 8 16',
|
|
|
|
['1\t%i'%i for i in range(8)],
|
|
|
|
'CELL_TYPES 8',
|
|
|
|
['1']*8,
|
|
|
|
]
|
|
|
|
|
|
|
|
return cmds
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# ----------------------- MAIN -------------------------------
|
|
|
|
|
2011-05-26 15:14:10 +05:30
|
|
|
parser = OptionParser(option_class=extendedOption, usage='%prog [options] datafile[s]', description = """
|
2011-10-20 17:59:59 +05:30
|
|
|
Produce VTK file from data field. Coordinates are taken from (consecutive) x, y, and z columns.
|
2011-02-01 16:18:44 +05:30
|
|
|
|
2011-08-18 13:30:19 +05:30
|
|
|
""" + string.replace('$Id$','\n','\\n')
|
|
|
|
)
|
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
parser.add_option('-s', '--scalar', action='extend', dest='scalar', type='string', \
|
|
|
|
help='list of scalars to visualize')
|
2012-06-19 21:00:16 +05:30
|
|
|
parser.add_option( '--double', action='extend', dest='double', type='string', \
|
|
|
|
help='list of scalars to visualize')
|
|
|
|
parser.add_option( '--triple', action='extend', dest='triple', type='string', \
|
|
|
|
help='list of scalars to visualize')
|
|
|
|
parser.add_option( '--quadruple', action='extend', dest='quadruple', type='string', \
|
|
|
|
help='list of scalars to visualize')
|
2011-10-20 17:59:59 +05:30
|
|
|
parser.add_option('-v', '--vector', action='extend', dest='vector', type='string', \
|
|
|
|
help='list of vectors to visualize')
|
2011-02-01 16:18:44 +05:30
|
|
|
parser.add_option('-d', '--deformation', dest='defgrad', type='string', \
|
|
|
|
help='heading of deformation gradient columns [%default]')
|
2011-10-20 17:59:59 +05:30
|
|
|
parser.add_option('--reference', dest='undeformed', action='store_true',\
|
|
|
|
help='map results to reference (undeformed) configuration')
|
|
|
|
parser.add_option('-c','--cell', dest='cell', action='store_true',\
|
|
|
|
help='data is cell-centered')
|
|
|
|
parser.add_option('-p','--vertex', dest='cell', action='store_false',\
|
|
|
|
help='data is vertex-centered')
|
|
|
|
parser.add_option('--mesh', dest='output_mesh', action='store_true', \
|
2012-08-02 18:52:57 +05:30
|
|
|
help='produce VTK mesh file [%default]')
|
2011-10-20 17:59:59 +05:30
|
|
|
parser.add_option('--nomesh', dest='output_mesh', action='store_false', \
|
|
|
|
help='omit VTK mesh file')
|
|
|
|
parser.add_option('--points', dest='output_points', action='store_true', \
|
2011-08-19 13:03:22 +05:30
|
|
|
help='produce VTK points file [%default]')
|
2011-10-20 17:59:59 +05:30
|
|
|
parser.add_option('--nopoints', dest='output_points', action='store_false', \
|
|
|
|
help='omit VTK points file')
|
|
|
|
parser.add_option('--box', dest='output_box', action='store_true', \
|
2012-08-02 18:52:57 +05:30
|
|
|
help='produce VTK box file [%default]')
|
2011-10-20 17:59:59 +05:30
|
|
|
parser.add_option('--nobox', dest='output_box', action='store_false', \
|
|
|
|
help='omit VTK box file')
|
2012-06-19 21:30:59 +05:30
|
|
|
parser.add_option('--separator', dest='separator', type='string', \
|
|
|
|
help='data separator [t(ab), n(ewline), s(pace)]')
|
2013-03-26 19:48:29 +05:30
|
|
|
parser.add_option('--scaling', dest='scaling', action='extend', type='string', \
|
2013-03-22 20:39:55 +05:30
|
|
|
help='scaling of fluctuation')
|
2011-10-20 17:59:59 +05:30
|
|
|
parser.add_option('-u', '--unitlength', dest='unitlength', type='float', \
|
2011-08-25 20:58:33 +05:30
|
|
|
help='set unit length for 2D model [%default]')
|
2012-08-30 22:31:46 +05:30
|
|
|
parser.add_option('--filenodalcoords', dest='filenodalcoords', type='string', \
|
|
|
|
help='ASCII table containing nodal coords')
|
2013-01-31 21:58:08 +05:30
|
|
|
parser.add_option('--labelnodalcoords', dest='nodalcoords', type='string', nargs=3, \
|
2012-08-30 22:31:46 +05:30
|
|
|
help='labels of nodal coords in ASCII table')
|
2012-07-19 00:09:59 +05:30
|
|
|
parser.add_option('-l', '--linear', dest='linearreconstruction', action='store_true',\
|
|
|
|
help='use linear reconstruction of geometry [%default]')
|
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
parser.set_defaults(defgrad = 'f')
|
2012-06-19 21:30:59 +05:30
|
|
|
parser.set_defaults(separator = 't')
|
2011-02-01 16:18:44 +05:30
|
|
|
parser.set_defaults(scalar = [])
|
2012-06-19 21:00:16 +05:30
|
|
|
parser.set_defaults(double = [])
|
|
|
|
parser.set_defaults(triple = [])
|
|
|
|
parser.set_defaults(quadruple = [])
|
2011-02-01 16:18:44 +05:30
|
|
|
parser.set_defaults(vector = [])
|
|
|
|
parser.set_defaults(tensor = [])
|
2011-08-19 13:03:22 +05:30
|
|
|
parser.set_defaults(output_mesh = True)
|
|
|
|
parser.set_defaults(output_points = False)
|
|
|
|
parser.set_defaults(output_box = False)
|
2013-03-22 20:39:55 +05:30
|
|
|
parser.set_defaults(scaling = [])
|
2011-08-19 13:03:22 +05:30
|
|
|
parser.set_defaults(undeformed = False)
|
2011-08-25 20:58:33 +05:30
|
|
|
parser.set_defaults(unitlength = 0.0)
|
2011-10-20 17:59:59 +05:30
|
|
|
parser.set_defaults(cell = True)
|
2012-08-30 22:31:46 +05:30
|
|
|
parser.set_defaults(filenodalcoords = '')
|
2013-01-31 21:58:08 +05:30
|
|
|
parser.set_defaults(labelnodalcoords = ['coord.x','coord.y','coord.z'])
|
2012-07-19 00:09:59 +05:30
|
|
|
parser.set_defaults(linearreconstruction = False)
|
2011-02-01 16:18:44 +05:30
|
|
|
|
2012-06-19 21:30:59 +05:30
|
|
|
sep = {'n': '\n', 't': '\t', 's': ' '}
|
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
(options, args) = parser.parse_args()
|
2013-03-22 20:39:55 +05:30
|
|
|
|
2013-03-26 21:46:59 +05:30
|
|
|
options.scaling += [1.0 for i in xrange(max(0,3-len(options.scaling)))]
|
2013-03-26 19:48:29 +05:30
|
|
|
options.scaling = map(float, options.scaling)
|
2013-03-26 21:46:59 +05:30
|
|
|
|
2013-03-22 20:39:55 +05:30
|
|
|
if numpy.any(options.scaling != 1.0) and options.linearreconstruction: print 'cannot scale for linear reconstruction'
|
|
|
|
if numpy.any(options.scaling != 1.0) and options.filenodalcoords != '': print 'cannot scale when reading coordinate from file'
|
2012-06-19 21:30:59 +05:30
|
|
|
options.separator = options.separator.lower()
|
2011-02-01 16:18:44 +05:30
|
|
|
for filename in args:
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
continue
|
|
|
|
file = open(filename)
|
|
|
|
content = file.readlines()
|
|
|
|
file.close()
|
|
|
|
m = re.search('(\d+)\shead',content[0],re.I)
|
|
|
|
if m == None:
|
|
|
|
continue
|
2011-12-06 23:16:33 +05:30
|
|
|
print filename,'\n'
|
2011-05-13 22:23:27 +05:30
|
|
|
sys.stdout.flush()
|
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
headrow = int(m.group(1))
|
|
|
|
headings = content[headrow].split()
|
|
|
|
column = {}
|
2013-05-29 15:54:00 +05:30
|
|
|
matches = {}
|
2011-02-01 16:18:44 +05:30
|
|
|
maxcol = 0
|
|
|
|
|
|
|
|
for col,head in enumerate(headings):
|
2011-10-20 17:59:59 +05:30
|
|
|
if head == {True:'ip.x',False:'node.x'}[options.cell]:
|
|
|
|
locol = col
|
2011-02-01 16:18:44 +05:30
|
|
|
maxcol = max(maxcol,col+3)
|
|
|
|
break
|
|
|
|
|
2011-10-20 17:59:59 +05:30
|
|
|
if locol < 0:
|
|
|
|
print 'missing coordinates..!'
|
2011-02-01 16:18:44 +05:30
|
|
|
continue
|
2013-05-29 15:54:00 +05:30
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
column['tensor'] = {}
|
2013-05-29 15:54:00 +05:30
|
|
|
matches['tensor'] = {}
|
2011-02-01 16:18:44 +05:30
|
|
|
for label in [options.defgrad] + options.tensor:
|
|
|
|
column['tensor'][label] = -1
|
|
|
|
for col,head in enumerate(headings):
|
2013-02-15 03:03:11 +05:30
|
|
|
if head == label or head == '1_'+label:
|
2011-02-01 16:18:44 +05:30
|
|
|
column['tensor'][label] = col
|
|
|
|
maxcol = max(maxcol,col+9)
|
2013-05-29 15:54:00 +05:30
|
|
|
matches['tensor'][label] = [label]
|
2011-02-01 16:18:44 +05:30
|
|
|
break
|
|
|
|
|
2012-06-19 21:00:16 +05:30
|
|
|
if not options.undeformed and column['tensor'][options.defgrad] < 0:
|
2011-05-13 22:23:27 +05:30
|
|
|
print 'missing deformation gradient "%s"..!'%options.defgrad
|
2011-02-01 16:18:44 +05:30
|
|
|
continue
|
|
|
|
|
|
|
|
column['vector'] = {}
|
2013-05-29 15:54:00 +05:30
|
|
|
matches['tensor'] = {}
|
2011-02-01 16:18:44 +05:30
|
|
|
for label in options.vector:
|
|
|
|
column['vector'][label] = -1
|
|
|
|
for col,head in enumerate(headings):
|
2013-02-15 03:03:11 +05:30
|
|
|
if head == label or head == '1_'+label:
|
2011-02-01 16:18:44 +05:30
|
|
|
column['vector'][label] = col
|
|
|
|
maxcol = max(maxcol,col+3)
|
2013-05-29 15:54:00 +05:30
|
|
|
matches['vector'][label] = [label]
|
2011-02-01 16:18:44 +05:30
|
|
|
break
|
|
|
|
|
2012-06-19 21:00:16 +05:30
|
|
|
for length,what in enumerate(['scalar','double','triple','quadruple']):
|
|
|
|
column[what] = {}
|
2013-05-29 15:54:00 +05:30
|
|
|
labels = eval("options.%s"%what)
|
|
|
|
matches[what] = {}
|
|
|
|
for col,head in enumerate(headings):
|
|
|
|
for needle in labels:
|
|
|
|
if fnmatch.fnmatch(head,needle):
|
|
|
|
column[what][head] = col
|
2012-06-19 21:00:16 +05:30
|
|
|
maxcol = max(maxcol,col+1+length)
|
2013-05-29 15:54:00 +05:30
|
|
|
if needle not in matches[what]:
|
|
|
|
matches[what][needle] = [head]
|
|
|
|
else:
|
|
|
|
matches[what][needle] += [head]
|
2012-06-19 21:00:16 +05:30
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
|
2011-04-14 15:41:23 +05:30
|
|
|
values = numpy.array(sorted([map(transliterateToFloat,line.split()[:maxcol]) for line in content[headrow+1:]],
|
2012-08-27 13:34:47 +05:30
|
|
|
key=lambda x:(x[locol+0],x[locol+1],x[locol+2])),'d') # sort with z as fastest and x as slowest index
|
2011-04-14 15:41:23 +05:30
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
N = len(values)
|
2013-01-31 21:58:08 +05:30
|
|
|
|
2011-02-01 16:18:44 +05:30
|
|
|
grid = [{},{},{}]
|
2011-10-20 17:59:59 +05:30
|
|
|
for j in xrange(3):
|
|
|
|
for i in xrange(N):
|
2012-01-20 02:12:50 +05:30
|
|
|
grid[j][str(values[i,locol+j])] = True
|
2011-02-01 16:18:44 +05:30
|
|
|
|
|
|
|
res = numpy.array([len(grid[0]),\
|
|
|
|
len(grid[1]),\
|
|
|
|
len(grid[2]),],'i')
|
2013-06-05 19:35:11 +05:30
|
|
|
|
2011-05-13 22:23:27 +05:30
|
|
|
dim = numpy.ones(3)
|
2011-10-20 17:59:59 +05:30
|
|
|
|
2011-05-13 22:23:27 +05:30
|
|
|
for i,r in enumerate(res):
|
|
|
|
if r > 1:
|
|
|
|
dim[i] = (max(map(float,grid[i].keys()))-min(map(float,grid[i].keys())))*r/(r-1.0)
|
2011-10-20 17:59:59 +05:30
|
|
|
if res[2]==1: # for 2D case set undefined dimension to given unitlength or alternatively give it the length of the smallest element
|
2011-08-25 20:58:33 +05:30
|
|
|
if options.unitlength == 0.0:
|
2011-10-20 17:59:59 +05:30
|
|
|
dim[2] = min(dim/res)
|
2011-08-25 20:58:33 +05:30
|
|
|
else:
|
|
|
|
dim[2] = options.unitlength
|
2012-08-30 22:31:46 +05:30
|
|
|
|
2013-01-31 21:58:08 +05:30
|
|
|
if options.undeformed:
|
|
|
|
Favg = numpy.eye(3)
|
|
|
|
else:
|
|
|
|
Favg = damask.core.math.tensorAvg(
|
|
|
|
numpy.reshape(numpy.transpose(values[:,column['tensor'][options.defgrad]:
|
|
|
|
column['tensor'][options.defgrad]+9]),
|
|
|
|
(3,3,res[0],res[1],res[2])))
|
|
|
|
if not options.filenodalcoords:
|
|
|
|
F = numpy.reshape(numpy.transpose(values[:,column['tensor'][options.defgrad]:
|
|
|
|
column['tensor'][options.defgrad]+9]),
|
|
|
|
(3,3,res[0],res[1],res[2]))
|
|
|
|
if options.linearreconstruction:
|
|
|
|
centroids = damask.core.mesh.deformedCoordsLinear(dim,F,Favg)
|
|
|
|
else:
|
2013-03-22 20:39:55 +05:30
|
|
|
centroids = damask.core.mesh.deformedCoordsFFT(dim,F,Favg,options.scaling)
|
2013-01-31 21:58:08 +05:30
|
|
|
mesh = damask.core.mesh.nodesAroundCentres(dim,Favg,centroids)
|
|
|
|
|
|
|
|
else:
|
2012-08-30 22:31:46 +05:30
|
|
|
mesh = numpy.zeros(((res[0]+1)*(res[1]+1)*(res[2]+1),3),'d')
|
2013-01-31 21:58:08 +05:30
|
|
|
|
2012-08-30 22:31:46 +05:30
|
|
|
filenodalcoords = open(options.filenodalcoords)
|
|
|
|
tablenodalcoords = damask.ASCIItable(filenodalcoords)
|
|
|
|
tablenodalcoords.head_read()
|
2013-01-31 21:58:08 +05:30
|
|
|
columns = [tablenodalcoords.labels.index(options.labelnodalcoords[0]),
|
|
|
|
tablenodalcoords.labels.index(options.labelnodalcoords[1]),
|
|
|
|
tablenodalcoords.labels.index(options.labelnodalcoords[2])]
|
2012-08-30 22:31:46 +05:30
|
|
|
i = 0
|
2013-01-31 21:58:08 +05:30
|
|
|
while tablenodalcoords.data_read(): # read next data line of ASCII table
|
|
|
|
mesh[i,:]=float(tablenodalcoords.data[column[:]])
|
2012-08-30 22:31:46 +05:30
|
|
|
i += 1
|
2013-06-05 19:35:11 +05:30
|
|
|
|
2013-01-31 21:58:08 +05:30
|
|
|
mesh=mesh.reshape(res[0]+1,res[1]+1,res[2]+1,3)
|
2012-01-20 02:12:50 +05:30
|
|
|
|
2011-05-22 03:13:40 +05:30
|
|
|
fields = {\
|
|
|
|
'tensor': {},\
|
|
|
|
'vector': {},\
|
|
|
|
'scalar': {},\
|
2012-06-19 21:00:16 +05:30
|
|
|
'double': {},\
|
|
|
|
'triple': {},\
|
|
|
|
'quadruple': {},\
|
2011-05-22 03:13:40 +05:30
|
|
|
}
|
|
|
|
reshape = {\
|
2012-01-20 02:12:50 +05:30
|
|
|
'tensor': [3,3],\
|
|
|
|
'vector': [3],\
|
|
|
|
'scalar': [],\
|
2012-06-19 21:00:16 +05:30
|
|
|
'double': [2],\
|
|
|
|
'triple': [3],\
|
|
|
|
'quadruple': [4],\
|
2011-05-22 03:13:40 +05:30
|
|
|
}
|
|
|
|
length = {\
|
|
|
|
'tensor': 9,\
|
|
|
|
'vector': 3,\
|
|
|
|
'scalar': 1,\
|
2012-06-19 21:00:16 +05:30
|
|
|
'double': 2,\
|
|
|
|
'triple': 3,\
|
|
|
|
'quadruple': 4,\
|
2011-05-22 03:13:40 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
for datatype in fields.keys():
|
|
|
|
print '\n%s:'%datatype,
|
2013-05-29 15:54:00 +05:30
|
|
|
fields[datatype]['_order_'] = []
|
2013-02-15 03:03:11 +05:30
|
|
|
for what in eval('options.'+datatype):
|
2013-05-29 15:54:00 +05:30
|
|
|
for label in matches[datatype][what]:
|
|
|
|
col = column[datatype][label]
|
|
|
|
if col != -1:
|
|
|
|
print label,
|
|
|
|
fields[datatype][label] = numpy.reshape(values[:,col:col+length[datatype]],[res[0],res[1],res[2]]+reshape[datatype])
|
|
|
|
fields[datatype]['_order_'] += [label]
|
2011-05-26 15:14:10 +05:30
|
|
|
print '\n'
|
2011-02-01 16:18:44 +05:30
|
|
|
|
|
|
|
out = {}
|
2012-08-30 22:31:46 +05:30
|
|
|
if options.output_mesh: out['mesh'] = vtk_writeASCII_mesh(mesh,fields,res,sep[options.separator])
|
2012-06-19 21:30:59 +05:30
|
|
|
if options.output_points: out['points'] = vtk_writeASCII_points(centroids,fields,res,sep[options.separator])
|
2012-08-30 22:31:46 +05:30
|
|
|
if options.output_box: out['box'] = vtk_writeASCII_box(dim,Favg)
|
2011-02-01 16:18:44 +05:30
|
|
|
|
|
|
|
for what in out.keys():
|
2011-05-22 03:13:40 +05:30
|
|
|
print what
|
2011-05-26 15:14:10 +05:30
|
|
|
(head,tail) = os.path.split(filename)
|
2013-02-15 03:03:11 +05:30
|
|
|
vtk = open(os.path.join(head,what+'_'+os.path.splitext(tail)[0]+'.vtk'), 'w')
|
2011-02-01 16:18:44 +05:30
|
|
|
output(out[what],{'filepointer':vtk},'File')
|
|
|
|
vtk.close()
|
2011-05-26 15:14:10 +05:30
|
|
|
print
|
2011-08-19 13:03:22 +05:30
|
|
|
|