remove plastic_j2
This commit is contained in:
commit
e0f8699605
|
@ -6,3 +6,4 @@
|
|||
# Denote all files that are truly binary and should not be modified.
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.cae binary
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
:: sets up an environment for DAMASK on Windows
|
||||
:: usage: call DAMASK_env.bat
|
||||
@echo off
|
||||
set LOCATION=%~dp0
|
||||
set DAMASK_ROOT=%LOCATION%\DAMASK
|
||||
set DAMASK_NUM_THREADS=2
|
||||
chcp 1252
|
||||
Title Düsseldorf Advanced Materials Simulation Kit - DAMASK, MPIE Düsseldorf
|
||||
echo.
|
||||
echo Düsseldorf Advanced Materials Simulation Kit - DAMASK
|
||||
echo Max-Planck-Institut für Eisenforschung, Düsseldorf
|
||||
echo http://damask.mpie.de
|
||||
echo.
|
||||
echo Preparing environment ...
|
||||
echo DAMASK_ROOT=%DAMASK_ROOT%
|
||||
echo DAMASK_NUM_THREADS=%DAMASK_NUM_THREADS%
|
||||
set DAMASK_BIN=%DAMASK_ROOT%\bin
|
||||
set PATH=%PATH%;%DAMASK_BIN%
|
||||
set PYTHONPATH=%PYTHONPATH%;%DAMASK_ROOT%\lib
|
|
@ -3,7 +3,7 @@
|
|||
# Kuo, J. C., Mikrostrukturmechanik von Bikristallen mit Kippkorngrenzen. Shaker-Verlag 2004. http://edoc.mpg.de/204079
|
||||
|
||||
elasticity hooke
|
||||
plasticity j2
|
||||
plasticity isotropic
|
||||
|
||||
(output) flowstress
|
||||
(output) strainrate
|
|
@ -21,13 +21,13 @@ if not os.path.isdir(binDir):
|
|||
|
||||
#define ToDo list
|
||||
processing_subDirs = ['pre','post','misc',]
|
||||
processing_extensions = ['.py',]
|
||||
processing_extensions = ['.py','.sh',]
|
||||
|
||||
for subDir in processing_subDirs:
|
||||
theDir = os.path.abspath(os.path.join(baseDir,subDir))
|
||||
|
||||
for theFile in os.listdir(theDir):
|
||||
if os.path.splitext(theFile)[1] in processing_extensions: # omit anything not fitting our script extensions (skip .py.bak, .py~, and the like)
|
||||
if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions
|
||||
|
||||
src = os.path.abspath(os.path.join(theDir,theFile))
|
||||
sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0]))
|
||||
|
|
|
@ -67,7 +67,7 @@ class ASCIItable():
|
|||
return 0.0
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def _noCRLF(self,
|
||||
def _removeCRLF(self,
|
||||
string):
|
||||
try:
|
||||
return string.replace('\n','').replace('\r','')
|
||||
|
@ -251,9 +251,9 @@ class ASCIItable():
|
|||
try:
|
||||
for item in what: self.labels_append(item)
|
||||
except:
|
||||
self.labels += [self._noCRLF(str(what))]
|
||||
self.labels += [self._removeCRLF(str(what))]
|
||||
else:
|
||||
self.labels += [self._noCRLF(what)]
|
||||
self.labels += [self._removeCRLF(what)]
|
||||
|
||||
self.__IO__['labeled'] = True # switch on processing (in particular writing) of labels
|
||||
if reset: self.__IO__['labels'] = list(self.labels) # subsequent data_read uses current labels as data size
|
||||
|
@ -369,7 +369,8 @@ class ASCIItable():
|
|||
start = self.label_index(labels)
|
||||
dim = self.label_dimension(labels)
|
||||
|
||||
return map(lambda a,b: xrange(a,a+b), zip(start,dim)) if isinstance(labels, Iterable) and not isinstance(labels, str) \
|
||||
return np.hstack(map(lambda c: xrange(c[0],c[0]+c[1]), zip(start,dim))) \
|
||||
if isinstance(labels, Iterable) and not isinstance(labels, str) \
|
||||
else xrange(start,start+dim)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
@ -380,9 +381,9 @@ class ASCIItable():
|
|||
try:
|
||||
for item in what: self.info_append(item)
|
||||
except:
|
||||
self.info += [self._noCRLF(str(what))]
|
||||
self.info += [self._removeCRLF(str(what))]
|
||||
else:
|
||||
self.info += [self._noCRLF(what)]
|
||||
self.info += [self._removeCRLF(what)]
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def info_clear(self):
|
||||
|
|
|
@ -333,6 +333,15 @@ class Colormap():
|
|||
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]),
|
||||
'right': Color('HSL',[0,1,0.5]),
|
||||
'interpolate': 'linear' },
|
||||
'orientation': {'left': Color('RGB',[0.933334,0.878432,0.878431]),
|
||||
'right': Color('RGB',[0.250980,0.007843,0.000000]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'strain': {'left': Color('RGB',[0.941177,0.941177,0.870588]),
|
||||
'right': Color('RGB',[0.266667,0.266667,0.000000]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'stress': {'left': Color('RGB',[0.878432,0.874511,0.949019]),
|
||||
'right': Color('RGB',[0.000002,0.000000,0.286275]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
}
|
||||
|
||||
|
||||
|
@ -344,7 +353,7 @@ class Colormap():
|
|||
predefined = None
|
||||
):
|
||||
|
||||
if str(predefined).lower() in self.__predefined__:
|
||||
if predefined is not None:
|
||||
left = self.__predefined__[predefined.lower()]['left']
|
||||
right= self.__predefined__[predefined.lower()]['right']
|
||||
interpolate = self.__predefined__[predefined.lower()]['interpolate']
|
||||
|
@ -442,11 +451,12 @@ class Colormap():
|
|||
format = format.lower() # consistent comparison basis
|
||||
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
||||
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
|
||||
|
||||
if format == 'paraview':
|
||||
colormap = ['<ColorMap name="'+str(name)+'" space="Diverging">'] \
|
||||
+ ['<Point x="%i"'%i + ' o="1" r="%g" g="%g" b="%g"/>'%(color[0],color[1],color[2],) for i,color in colors] \
|
||||
+ ['</ColorMap>']
|
||||
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
|
||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
|
||||
for i,color in enumerate(colors[:-1])]\
|
||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(i+1,colors[-1][0],colors[-1][1],colors[-1][2],)]\
|
||||
+ [' ]\n }\n]']
|
||||
|
||||
elif format == 'gmsh':
|
||||
colormap = ['View.ColorTable = {'] \
|
||||
|
|
|
@ -53,6 +53,20 @@ def report(who,what):
|
|||
"""reports script and file name"""
|
||||
croak( (emph(who) if who else '') + (': '+what if what else '') )
|
||||
|
||||
|
||||
# -----------------------------
|
||||
def report_geom(info,
|
||||
what = ['grid','size','origin','homogenization','microstructures']):
|
||||
"""reports (selected) geometry information"""
|
||||
output = {
|
||||
'grid' : 'grid a b c: {}'.format(' x '.join(map(str,info['grid' ]))),
|
||||
'size' : 'size x y z: {}'.format(' x '.join(map(str,info['size' ]))),
|
||||
'origin' : 'origin x y z: {}'.format(' : '.join(map(str,info['origin']))),
|
||||
'homogenization' : 'homogenization: {}'.format(info['homogenization']),
|
||||
'microstructures' : 'microstructures: {}'.format(info['microstructures']),
|
||||
}
|
||||
for item in what: croak(output[item.lower()])
|
||||
|
||||
# -----------------------------
|
||||
def emph(what):
|
||||
"""emphasizes string on screen"""
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,string,h5py
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Add column(s) containing Cauchy stress based on given column(s) of
|
||||
deformation gradient and first Piola--Kirchhoff stress.
|
||||
|
||||
""" + string.replace('$Id$','\n','\\n')
|
||||
)
|
||||
|
||||
|
||||
parser.add_option('-f','--defgrad', dest='defgrad', \
|
||||
help='heading of columns containing deformation gradient [%default]')
|
||||
parser.add_option('-p','--stress', dest='stress', \
|
||||
help='heading of columns containing first Piola--Kirchhoff stress [%default]')
|
||||
parser.add_option('-o','--output', dest='output', \
|
||||
help='group containing requested data [%default]')
|
||||
parser.set_defaults(defgrad = 'f')
|
||||
parser.set_defaults(stress = 'p')
|
||||
parser.set_defaults(output = 'crystallite')
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
if options.defgrad is None or options.stress is None or options.output is None:
|
||||
parser.error('missing data column...')
|
||||
|
||||
|
||||
# ------------------------------------------ setup file handles ---------------------------------------
|
||||
|
||||
files = []
|
||||
for name in filenames:
|
||||
if os.path.exists(name):
|
||||
files.append({'name':name, 'file':h5py.File(name,"a")})
|
||||
|
||||
# ------------------------------------------ loop over input files ------------------------------------
|
||||
|
||||
for myFile in files:
|
||||
print(myFile['name'])
|
||||
|
||||
# ------------------------------------------ loop over increments -------------------------------------
|
||||
for inc in myFile['file']['increments'].keys():
|
||||
print("Current Increment: "+inc)
|
||||
for instance in myFile['file']['increments/'+inc+'/'+options.output].keys():
|
||||
dsets = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance].keys()
|
||||
if (options.defgrad in dsets and options.stress in dsets):
|
||||
defgrad = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance+'/'+options.defgrad]
|
||||
stress = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance+'/'+options.stress]
|
||||
cauchy=np.zeros(np.shape(stress),'f')
|
||||
for p in range(stress.shape[0]):
|
||||
cauchy[p,...] = 1.0/np.linalg.det(defgrad[p,...])*np.dot(stress[p,...],defgrad[p,...].T) # [Cauchy] = (1/det(F)) * [P].[F_transpose]
|
||||
cauchyFile = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance].create_dataset('cauchy', data=cauchy)
|
||||
cauchyFile.attrs['units'] = 'Pa'
|
|
@ -10,6 +10,7 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
def curlFFT(geomdim,field):
|
||||
shapeFFT = np.array(np.shape(field))[0:3]
|
||||
grid = np.array(np.shape(field)[2::-1])
|
||||
N = grid.prod() # field size
|
||||
n = np.array(np.shape(field)[3:]).prod() # data size
|
||||
|
@ -17,8 +18,8 @@ def curlFFT(geomdim,field):
|
|||
if n == 3: dataType = 'vector'
|
||||
elif n == 9: dataType = 'tensor'
|
||||
|
||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
||||
curl_fourier = np.zeros(field_fourier.shape,'c16')
|
||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
|
||||
curl_fourier = np.empty(field_fourier.shape,'c16')
|
||||
|
||||
# differentiation in Fourier space
|
||||
k_s = np.zeros([3],'i')
|
||||
|
@ -55,32 +56,32 @@ def curlFFT(geomdim,field):
|
|||
curl_fourier[i,j,k,2] = ( field_fourier[i,j,k,1]*xi[0]\
|
||||
-field_fourier[i,j,k,0]*xi[1]) *TWOPIIMG
|
||||
|
||||
return np.fft.fftpack.irfftn(curl_fourier,axes=(0,1,2)).reshape([N,n])
|
||||
return np.fft.fftpack.irfftn(curl_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
|
||||
Add column(s) containing curl of requested column(s).
|
||||
Operates on periodic ordered three-dimensional data sets.
|
||||
Deals with both vector- and tensor-valued fields.
|
||||
Deals with both vector- and tensor fields.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c','--coordinates',
|
||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||
dest = 'coords',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label of coordinates [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-v','--vector',
|
||||
dest = 'vector',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'column label(s) of vector field values')
|
||||
help = 'label(s) of vector field values')
|
||||
parser.add_option('-t','--tensor',
|
||||
dest = 'tensor',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'column label(s) of tensor field values')
|
||||
help = 'label(s) of tensor field values')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
)
|
||||
|
@ -90,7 +91,7 @@ parser.set_defaults(coords = 'pos',
|
|||
if options.vector is None and options.tensor is None:
|
||||
parser.error('no data column specified.')
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
# --- loop over input files ------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
|
@ -147,7 +148,7 @@ for name in filenames:
|
|||
maxcorner = np.array(map(max,coords))
|
||||
grid = np.array(map(len,coords),'i')
|
||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1]))
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
|
||||
|
||||
# ------------------------------------------ process value field -----------------------------------
|
||||
|
||||
|
|
|
@ -94,18 +94,20 @@ Outputs at cell centers or cell nodes (into separate file).
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-f', '--defgrad',
|
||||
parser.add_option('-f',
|
||||
'--defgrad',
|
||||
dest = 'defgrad',
|
||||
metavar = 'string',
|
||||
help = 'column label of deformation gradient [%default]')
|
||||
parser.add_option('-c', '--coordinates',
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'coords',
|
||||
metavar = 'string',
|
||||
help = 'column label of coordinates [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('--nodal',
|
||||
dest = 'nodal',
|
||||
action = 'store_true',
|
||||
help = 'output nodal (not cell-centered) displacements')
|
||||
help = 'output nodal (instad of cell-centered) displacements')
|
||||
|
||||
parser.set_defaults(defgrad = 'f',
|
||||
coords = 'pos',
|
||||
|
@ -164,14 +166,6 @@ for name in filenames:
|
|||
np.zeros((table.data.shape[0],
|
||||
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
|
||||
coords = [np.unique(table.data[:,9+i]) for i in xrange(3)]
|
||||
mincorner = np.array(map(min,coords))
|
||||
maxcorner = np.array(map(max,coords))
|
|
@ -10,12 +10,13 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
def divFFT(geomdim,field):
|
||||
shapeFFT = np.array(np.shape(field))[0:3]
|
||||
grid = np.array(np.shape(field)[2::-1])
|
||||
N = grid.prod() # field size
|
||||
n = np.array(np.shape(field)[3:]).prod() # data size
|
||||
|
||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
||||
div_fourier = np.zeros(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
|
||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
|
||||
div_fourier = np.empty(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
|
||||
|
||||
# differentiation in Fourier space
|
||||
k_s = np.zeros([3],'i')
|
||||
|
@ -41,32 +42,32 @@ def divFFT(geomdim,field):
|
|||
elif n == 3: # vector, 3 -> 1
|
||||
div_fourier[i,j,k] = sum(field_fourier[i,j,k,0:3]*xi) *TWOPIIMG
|
||||
|
||||
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2)).reshape([N,n/3])
|
||||
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n/3])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
|
||||
Add column(s) containing divergence of requested column(s).
|
||||
Operates on periodic ordered three-dimensional data sets.
|
||||
Deals with both vector- and tensor-valued fields.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c','--coordinates',
|
||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||
dest = 'coords',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label of coordinates [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-v','--vector',
|
||||
dest = 'vector',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'column label(s) of vector field values')
|
||||
help = 'label(s) of vector field values')
|
||||
parser.add_option('-t','--tensor',
|
||||
dest = 'tensor',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'column label(s) of tensor field values')
|
||||
help = 'label(s) of tensor field values')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
)
|
||||
|
|
|
@ -88,16 +88,27 @@ Add column(s) containing Euclidean distance to grain structural features: bounda
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c','--coordinates', dest='coords', metavar='string',
|
||||
help='column label of coordinates [%default]')
|
||||
parser.add_option('-i','--identifier', dest='id', metavar = 'string',
|
||||
help='column label of grain identifier [%default]')
|
||||
parser.add_option('-t','--type', dest = 'type', action = 'extend', metavar = '<string LIST>',
|
||||
help = 'feature type {%s} '%(', '.join(map(lambda x:'/'.join(x['names']),features))) )
|
||||
parser.add_option('-n','--neighborhood',dest='neighborhood', choices = neighborhoods.keys(), metavar = 'string',
|
||||
help = 'type of neighborhood [neumann] {%s}'%(', '.join(neighborhoods.keys())))
|
||||
parser.add_option('-s', '--scale', dest = 'scale', type = 'float', metavar = 'float',
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'coords', metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-i',
|
||||
'--id', '--identifier',
|
||||
dest = 'id', metavar = 'string',
|
||||
help='label of grain identifier [%default]')
|
||||
parser.add_option('-t',
|
||||
'--type',
|
||||
dest = 'type', action = 'extend', metavar = '<string LIST>',
|
||||
help = 'feature type {{{}}} '.format(', '.join(map(lambda x:'/'.join(x['names']),features))) )
|
||||
parser.add_option('-n',
|
||||
'--neighborhood',
|
||||
dest = 'neighborhood', choices = neighborhoods.keys(), metavar = 'string',
|
||||
help = 'neighborhood type [neumann] {{{}}}'.format(', '.join(neighborhoods.keys())))
|
||||
parser.add_option('-s',
|
||||
'--scale',
|
||||
dest = 'scale', type = 'float', metavar = 'float',
|
||||
help = 'voxel size [%default]')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
id = 'texture',
|
||||
neighborhood = 'neumann',
|
||||
|
@ -111,7 +122,7 @@ if options.type is None:
|
|||
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
|
||||
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
|
||||
if 'biplane' in options.type and 'boundary' in options.type:
|
||||
parser.error("only one from aliases 'biplane' and 'boundary' possible.")
|
||||
parser.error('only one from aliases "biplane" and "boundary" possible.')
|
||||
|
||||
feature_list = []
|
||||
for i,feature in enumerate(features):
|
||||
|
@ -172,9 +183,7 @@ for name in filenames:
|
|||
|
||||
N = grid.prod()
|
||||
|
||||
if N != len(table.data): errors.append('data count {} does not match grid '.format(N) +
|
||||
'x'.join(map(str,grid)) +
|
||||
'.')
|
||||
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
|
|
|
@ -9,17 +9,17 @@ import damask
|
|||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
def gradFFT(geomdim,field):
|
||||
|
||||
shapeFFT = np.array(np.shape(field))[0:3]
|
||||
grid = np.array(np.shape(field)[2::-1])
|
||||
N = grid.prod() # field size
|
||||
n = np.array(np.shape(field)[3:]).prod() # data size
|
||||
|
||||
if n == 3: dataType = 'vector'
|
||||
elif n == 1: dataType = 'scalar'
|
||||
|
||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
||||
grad_fourier = np.zeros(field_fourier.shape+(3,),'c16')
|
||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
|
||||
grad_fourier = np.empty(field_fourier.shape+(3,),'c16')
|
||||
|
||||
# differentiation in Fourier space
|
||||
k_s = np.zeros([3],'i')
|
||||
|
@ -46,32 +46,32 @@ def gradFFT(geomdim,field):
|
|||
grad_fourier[i,j,k,1,:] = field_fourier[i,j,k,1]*xi *TWOPIIMG # tensor field from vector data
|
||||
grad_fourier[i,j,k,2,:] = field_fourier[i,j,k,2]*xi *TWOPIIMG
|
||||
|
||||
return np.fft.fftpack.irfftn(grad_fourier,axes=(0,1,2)).reshape([N,3*n])
|
||||
return np.fft.fftpack.irfftn(grad_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,3*n])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
|
||||
Add column(s) containing gradient of requested column(s).
|
||||
Operates on periodic ordered three-dimensional data sets.
|
||||
Deals with both vector- and scalar fields.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c','--coordinates',
|
||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||
dest = 'coords',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label of coordinates [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-v','--vector',
|
||||
dest = 'vector',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'column label(s) of vector field values')
|
||||
help = 'label(s) of vector field values')
|
||||
parser.add_option('-s','--scalar',
|
||||
dest = 'scalar',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'column label(s) of scalar field values')
|
||||
help = 'label(s) of scalar field values')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
)
|
||||
|
@ -138,7 +138,7 @@ for name in filenames:
|
|||
maxcorner = np.array(map(max,coords))
|
||||
grid = np.array(map(len,coords),'i')
|
||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1]))
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
|
||||
|
||||
# ------------------------------------------ process value field -----------------------------------
|
||||
|
||||
|
|
|
@ -15,54 +15,61 @@ Add grain index based on similiarity of crystal lattice orientation.
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-r', '--radius',
|
||||
parser.add_option('-r',
|
||||
'--radius',
|
||||
dest = 'radius',
|
||||
type = 'float', metavar = 'float',
|
||||
help = 'search radius')
|
||||
parser.add_option('-d', '--disorientation',
|
||||
parser.add_option('-d',
|
||||
'--disorientation',
|
||||
dest = 'disorientation',
|
||||
type = 'float', metavar = 'float',
|
||||
help = 'disorientation threshold in degrees [%default]')
|
||||
parser.add_option('-s', '--symmetry',
|
||||
parser.add_option('-s',
|
||||
'--symmetry',
|
||||
dest = 'symmetry',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'crystal symmetry [%default]')
|
||||
parser.add_option('-e', '--eulers',
|
||||
parser.add_option('-e',
|
||||
'--eulers',
|
||||
dest = 'eulers',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'Euler angles')
|
||||
help = 'label of Euler angles')
|
||||
parser.add_option('--degrees',
|
||||
dest = 'degrees',
|
||||
action = 'store_true',
|
||||
help = 'Euler angles are given in degrees [%default]')
|
||||
parser.add_option('-m', '--matrix',
|
||||
parser.add_option('-m',
|
||||
'--matrix',
|
||||
dest = 'matrix',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'orientation matrix')
|
||||
help = 'label of orientation matrix')
|
||||
parser.add_option('-a',
|
||||
dest = 'a',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'crystal frame a vector')
|
||||
help = 'label of crystal frame a vector')
|
||||
parser.add_option('-b',
|
||||
dest = 'b',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'crystal frame b vector')
|
||||
help = 'label of crystal frame b vector')
|
||||
parser.add_option('-c',
|
||||
dest = 'c',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'crystal frame c vector')
|
||||
parser.add_option('-q', '--quaternion',
|
||||
help = 'label of crystal frame c vector')
|
||||
parser.add_option('-q',
|
||||
'--quaternion',
|
||||
dest = 'quaternion',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'quaternion')
|
||||
parser.add_option('-p', '--position',
|
||||
dest = 'coords',
|
||||
help = 'label of quaternion')
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'spatial position of voxel [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
|
||||
parser.set_defaults(disorientation = 5,
|
||||
symmetry = 'cubic',
|
||||
coords = 'pos',
|
||||
pos = 'pos',
|
||||
degrees = False,
|
||||
)
|
||||
|
||||
|
@ -108,10 +115,10 @@ for name in filenames:
|
|||
errors = []
|
||||
remarks = []
|
||||
|
||||
if not 3 >= table.label_dimension(options.coords) >= 1:
|
||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||
if not 3 >= table.label_dimension(options.pos) >= 1:
|
||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||
if not np.all(table.label_dimension(label) == dim):
|
||||
errors.append('input {} does not have dimension {}.'.format(label,dim))
|
||||
errors.append('input "{}" does not have dimension {}.'.format(label,dim))
|
||||
else: column = table.label_index(label)
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
@ -140,7 +147,7 @@ for name in filenames:
|
|||
|
||||
bg.set_message('reading positions...')
|
||||
|
||||
table.data_readArray(options.coords) # read position vectors
|
||||
table.data_readArray(options.pos) # read position vectors
|
||||
grainID = -np.ones(len(table.data),dtype=int)
|
||||
|
||||
start = tick = time.clock()
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
|
||||
Add info lines to ASCIItable header.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-i',
|
||||
'--info',
|
||||
dest = 'info', action = 'extend', metavar = '<string LIST>',
|
||||
help = 'items to add')
|
||||
|
||||
parser.set_defaults(info = [],
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files ------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# ------------------------------------------ assemble header ---------------------------------------
|
||||
|
||||
table.head_read()
|
||||
table.info_append(options.info)
|
||||
table.head_write()
|
||||
|
||||
# ------------------------------------------ pass through data -------------------------------------
|
||||
|
||||
outputAlive = True
|
||||
|
||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||
outputAlive = table.data_write() # output processed line
|
||||
|
||||
# ------------------------------------------ output finalization -----------------------------------
|
||||
|
||||
table.close() # close ASCII tables
|
|
@ -76,9 +76,10 @@ for name in filenames:
|
|||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
|
||||
table.data_readArray()
|
||||
table.data_readArray(options.coords)
|
||||
table.data_rewind()
|
||||
|
||||
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
||||
mincorner = np.array(map(min,coords))
|
||||
maxcorner = np.array(map(max,coords))
|
||||
grid = np.array(map(len,coords),'i')
|
||||
|
@ -95,7 +96,6 @@ for name in filenames:
|
|||
|
||||
# ------------------------------------------ process data -------------------------------------------
|
||||
|
||||
table.data_rewind()
|
||||
data = np.zeros(outSize.tolist()+[len(table.labels)])
|
||||
p = np.zeros(3,'i')
|
||||
|
||||
|
|
|
@ -1,144 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,shutil
|
||||
import numpy as np
|
||||
import damask
|
||||
from optparse import OptionParser
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
# -----------------------------
|
||||
# MAIN FUNCTION STARTS HERE
|
||||
# -----------------------------
|
||||
|
||||
# --- input parsing
|
||||
|
||||
parser = OptionParser(usage='%prog [options] resultfile', description = """
|
||||
Create vtk files for the (deformed) geometry that belongs to a .t16 (MSC.Marc) results file.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-d','--dir', dest='dir', \
|
||||
help='name of subdirectory to hold output [%default]')
|
||||
parser.add_option('-r','--range', dest='range', type='int', nargs=3, \
|
||||
help='range of positions (or increments) to output (start, end, step) [all]')
|
||||
parser.add_option('--increments', action='store_true', dest='getIncrements', \
|
||||
help='switch to increment range [%default]')
|
||||
parser.add_option('-t','--type', dest='type', type='choice', choices=['ipbased','nodebased'], \
|
||||
help='processed geometry type [ipbased and nodebased]')
|
||||
|
||||
parser.set_defaults(dir = 'vtk')
|
||||
parser.set_defaults(getIncrements= False)
|
||||
|
||||
(options, files) = parser.parse_args()
|
||||
|
||||
# --- basic sanity checks
|
||||
|
||||
if files == []:
|
||||
parser.print_help()
|
||||
parser.error('no file specified...')
|
||||
|
||||
filename = os.path.splitext(files[0])[0]
|
||||
if not os.path.exists(filename+'.t16'):
|
||||
parser.print_help()
|
||||
parser.error('invalid file "%s" specified...'%filename+'.t16')
|
||||
|
||||
if not options.type :
|
||||
options.type = ['nodebased', 'ipbased']
|
||||
else:
|
||||
options.type = [options.type]
|
||||
|
||||
|
||||
# --- more sanity checks
|
||||
|
||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||
try:
|
||||
import py_post
|
||||
except:
|
||||
print('error: no valid Mentat release found')
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
# --------------------------- open results file and initialize mesh ----------
|
||||
|
||||
p = py_post.post_open(filename+'.t16')
|
||||
p.moveto(0)
|
||||
Nnodes = p.nodes()
|
||||
Nincrements = p.increments() - 1 # t16 contains one "virtual" increment (at 0)
|
||||
if damask.core.mesh.mesh_init_postprocessing(filename+'.mesh') > 0:
|
||||
print('error: init not successful')
|
||||
sys.exit(-1)
|
||||
Ncellnodes = damask.core.mesh.mesh_get_Ncellnodes()
|
||||
unitlength = damask.core.mesh.mesh_get_unitlength()
|
||||
|
||||
|
||||
# --------------------------- create output dir --------------------------------
|
||||
|
||||
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname,0755)
|
||||
|
||||
|
||||
# --------------------------- get positions --------------------------------
|
||||
|
||||
incAtPosition = {}
|
||||
positionOfInc = {}
|
||||
|
||||
for position in range(Nincrements):
|
||||
p.moveto(position+1)
|
||||
incAtPosition[position] = p.increment # remember "real" increment at this position
|
||||
positionOfInc[p.increment] = position # remember position of "real" increment
|
||||
|
||||
if not options.range:
|
||||
options.getIncrements = False
|
||||
locations = range(Nincrements) # process all positions
|
||||
else:
|
||||
options.range = list(options.range) # convert to list
|
||||
if options.getIncrements:
|
||||
locations = [positionOfInc[x] for x in range(options.range[0],options.range[1]+1,options.range[2])
|
||||
if x in positionOfInc]
|
||||
else:
|
||||
locations = range( max(0,options.range[0]),
|
||||
min(Nincrements,options.range[1]+1),
|
||||
options.range[2] )
|
||||
|
||||
increments = [incAtPosition[x] for x in locations] # build list of increments to process
|
||||
|
||||
|
||||
|
||||
# --------------------------- loop over positions --------------------------------
|
||||
|
||||
for incCount,position in enumerate(locations): # walk through locations
|
||||
|
||||
p.moveto(position+1) # wind to correct position
|
||||
|
||||
# --- get displacements
|
||||
|
||||
node_displacement = [[0,0,0] for i in range(Nnodes)]
|
||||
for n in range(Nnodes):
|
||||
if p.node_displacements():
|
||||
node_displacement[n] = map(lambda x:x*unitlength,list(p.node_displacement(n)))
|
||||
c = damask.core.mesh.mesh_build_cellnodes(np.array(node_displacement).T,Ncellnodes)
|
||||
cellnode_displacement = [[c[i][n] for i in range(3)] for n in range(Ncellnodes)]
|
||||
|
||||
|
||||
# --- append displacements to corresponding files
|
||||
|
||||
for geomtype in options.type:
|
||||
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")\
|
||||
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||
print outFilename
|
||||
shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename)
|
||||
|
||||
with open(outFilename,'a') as myfile:
|
||||
myfile.write("POINT_DATA %i\n"%{'nodebased':Nnodes,'ipbased':Ncellnodes}[geomtype])
|
||||
myfile.write("VECTORS displacement double\n")
|
||||
coordinates = {'nodebased':node_displacement,'ipbased':cellnode_displacement}[geomtype]
|
||||
for n in range({'nodebased':Nnodes,'ipbased':Ncellnodes}[geomtype]):
|
||||
myfile.write("%.8e %.8e %.8e\n"%(coordinates[n][0],coordinates[n][1],coordinates[n][2]))
|
||||
|
||||
|
||||
|
||||
# --------------------------- DONE --------------------------------
|
|
@ -1,421 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,string,re,time
|
||||
from optparse import OptionParser, OptionGroup
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
# -----------------------------
|
||||
def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||
"""parse .output* files in order to get a list of outputs"""
|
||||
myID = {'Homogenization': homogID,
|
||||
'Crystallite': crystID,
|
||||
'Constitutive': phaseID,
|
||||
}
|
||||
format = {}
|
||||
|
||||
for what in ['Homogenization','Crystallite','Constitutive']:
|
||||
content = []
|
||||
format[what] = {'outputs':{},'specials':{'brothers':[]}}
|
||||
for prefix in ['']+map(str,range(1,17)):
|
||||
if os.path.exists(prefix+filename+'.output'+what):
|
||||
try:
|
||||
file = open(prefix+filename+'.output'+what)
|
||||
content = file.readlines()
|
||||
file.close()
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
if content == []: continue # nothing found...
|
||||
|
||||
tag = ''
|
||||
tagID = 0
|
||||
for line in content:
|
||||
if re.match("\s*$",line) or re.match("#",line): # skip blank lines and comments
|
||||
continue
|
||||
m = re.match("\[(.+)\]",line) # look for block indicator
|
||||
if m: # next section
|
||||
tag = m.group(1)
|
||||
tagID += 1
|
||||
format[what]['specials']['brothers'].append(tag)
|
||||
if tag == myID[what] or (myID[what].isdigit() and tagID == int(myID[what])):
|
||||
format[what]['specials']['_id'] = tagID
|
||||
format[what]['outputs'] = []
|
||||
tag = myID[what]
|
||||
else: # data from section
|
||||
if tag == myID[what]:
|
||||
(output,length) = line.split()
|
||||
output.lower()
|
||||
if length.isdigit():
|
||||
length = int(length)
|
||||
if re.match("\((.+)\)",output): # special data, e.g. (Ngrains)
|
||||
format[what]['specials'][output] = length
|
||||
elif length > 0:
|
||||
format[what]['outputs'].append([output,length])
|
||||
|
||||
if '_id' not in format[what]['specials']:
|
||||
print "\nsection '%s' not found in <%s>"%(myID[what], what)
|
||||
print '\n'.join(map(lambda x:' [%s]'%x, format[what]['specials']['brothers']))
|
||||
|
||||
return format
|
||||
|
||||
|
||||
# -----------------------------
|
||||
def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||
"""
|
||||
parse postfile in order to get position and labels of outputs
|
||||
|
||||
needs "outputFormat" for mapping of output names to postfile output indices
|
||||
"""
|
||||
startVar = {True: 'GrainCount',
|
||||
False:'HomogenizationCount'}
|
||||
|
||||
# --- build statistics
|
||||
|
||||
stat = { \
|
||||
'IndexOfLabel': {}, \
|
||||
'Title': p.title(), \
|
||||
'Extrapolation': p.extrapolate, \
|
||||
'NumberOfIncrements': p.increments() - 1, \
|
||||
'NumberOfNodes': p.nodes(), \
|
||||
'NumberOfNodalScalars': p.node_scalars(), \
|
||||
'LabelOfNodalScalar': [None]*p.node_scalars() , \
|
||||
'NumberOfElements': p.elements(), \
|
||||
'NumberOfElementalScalars': p.element_scalars(), \
|
||||
'LabelOfElementalScalar': [None]*p.element_scalars() , \
|
||||
'NumberOfElementalTensors': p.element_tensors(), \
|
||||
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
||||
}
|
||||
|
||||
# --- find labels
|
||||
|
||||
for labelIndex in range(stat['NumberOfNodalScalars']):
|
||||
label = p.node_scalar_label(labelIndex)
|
||||
stat['IndexOfLabel'][label] = labelIndex
|
||||
stat['LabelOfNodalScalar'][labelIndex] = label
|
||||
|
||||
for labelIndex in range(stat['NumberOfElementalScalars']):
|
||||
label = p.element_scalar_label(labelIndex)
|
||||
stat['IndexOfLabel'][label] = labelIndex
|
||||
stat['LabelOfElementalScalar'][labelIndex] = label
|
||||
|
||||
for labelIndex in range(stat['NumberOfElementalTensors']):
|
||||
label = p.element_tensor_label(labelIndex)
|
||||
stat['IndexOfLabel'][label] = labelIndex
|
||||
stat['LabelOfElementalTensor'][labelIndex] = label
|
||||
|
||||
if 'User Defined Variable 1' in stat['IndexOfLabel']: # output format without dedicated names?
|
||||
stat['IndexOfLabel'][startVar[legacyFormat]] = stat['IndexOfLabel']['User Defined Variable 1'] # adjust first named entry
|
||||
|
||||
if startVar[legacyFormat] in stat['IndexOfLabel']: # does the result file contain relevant user defined output at all?
|
||||
startIndex = stat['IndexOfLabel'][startVar[legacyFormat]]
|
||||
stat['LabelOfElementalScalar'][startIndex] = startVar[legacyFormat]
|
||||
|
||||
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
||||
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
||||
# given in the .output* file
|
||||
|
||||
offset = 1
|
||||
if legacyFormat:
|
||||
stat['LabelOfElementalScalar'][startIndex + offset] = startVar[not legacyFormat] # add HomogenizationCount as second
|
||||
offset += 1
|
||||
|
||||
for (name,N) in outputFormat['Homogenization']['outputs']:
|
||||
for i in range(N):
|
||||
label = {False: '%s'%( name),
|
||||
True:'%i_%s'%(i+1,name)}[N > 1]
|
||||
stat['IndexOfLabel'][label] = startIndex + offset
|
||||
stat['LabelOfElementalScalar'][startIndex + offset] = label
|
||||
offset += 1
|
||||
|
||||
if not legacyFormat:
|
||||
stat['IndexOfLabel'][startVar[not legacyFormat]] = startIndex + offset
|
||||
stat['LabelOfElementalScalar'][startIndex + offset] = startVar[not legacyFormat] # add GrainCount
|
||||
offset += 1
|
||||
|
||||
if '(ngrains)' in outputFormat['Homogenization']['specials']:
|
||||
for grain in range(outputFormat['Homogenization']['specials']['(ngrains)']):
|
||||
|
||||
stat['IndexOfLabel']['%i_CrystalliteCount'%(grain+1)] = startIndex + offset # report crystallite count
|
||||
stat['LabelOfElementalScalar'][startIndex + offset] = '%i_CrystalliteCount'%(grain+1) # add GrainCount
|
||||
offset += 1
|
||||
|
||||
for (name,N) in outputFormat['Crystallite']['outputs']: # add crystallite outputs
|
||||
for i in range(N):
|
||||
label = {False: '%i_%s'%(grain+1, name),
|
||||
True:'%i_%i_%s'%(grain+1,i+1,name)}[N > 1]
|
||||
stat['IndexOfLabel'][label] = startIndex + offset
|
||||
stat['LabelOfElementalScalar'][startIndex + offset] = label
|
||||
offset += 1
|
||||
|
||||
stat['IndexOfLabel']['%i_ConstitutiveCount'%(grain+1)] = startIndex + offset # report constitutive count
|
||||
stat['LabelOfElementalScalar'][startIndex + offset] = '%i_ConstitutiveCount'%(grain+1) # add GrainCount
|
||||
offset += 1
|
||||
|
||||
for (name,N) in outputFormat['Constitutive']['outputs']: # add constitutive outputs
|
||||
for i in range(N):
|
||||
label = {False: '%i_%s'%(grain+1, name),
|
||||
True:'%i_%i_%s'%(grain+1,i+1,name)}[N > 1]
|
||||
stat['IndexOfLabel'][label] = startIndex + offset
|
||||
try:
|
||||
stat['LabelOfElementalScalar'][startIndex + offset] = label
|
||||
except IndexError:
|
||||
print 'trying to assign %s at position %i+%i'%(label,startIndex,offset)
|
||||
sys.exit(1)
|
||||
offset += 1
|
||||
|
||||
return stat
|
||||
|
||||
|
||||
# -----------------------------
|
||||
def GetIncrementLocations(p,Nincrements,options):
|
||||
"""get mapping between positions in postfile and increment number"""
|
||||
incAtPosition = {}
|
||||
positionOfInc = {}
|
||||
|
||||
for position in range(Nincrements):
|
||||
p.moveto(position+1)
|
||||
incAtPosition[position] = p.increment # remember "real" increment at this position
|
||||
positionOfInc[p.increment] = position # remember position of "real" increment
|
||||
|
||||
if not options.range:
|
||||
options.getIncrements = False
|
||||
locations = range(Nincrements) # process all positions
|
||||
else:
|
||||
options.range = list(options.range) # convert to list
|
||||
if options.getIncrements:
|
||||
locations = [positionOfInc[x] for x in range(options.range[0],options.range[1]+1,options.range[2])
|
||||
if x in positionOfInc]
|
||||
else:
|
||||
locations = range( max(0,options.range[0]),
|
||||
min(Nincrements,options.range[1]+1),
|
||||
options.range[2] )
|
||||
|
||||
increments = [incAtPosition[x] for x in locations] # build list of increments to process
|
||||
|
||||
return [increments,locations]
|
||||
|
||||
|
||||
# -----------------------------
|
||||
def SummarizePostfile(stat,where=sys.stdout):
|
||||
|
||||
where.write('\n\n')
|
||||
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
||||
where.write('extraplation:\t%s'%stat['Extrapolation'] + '\n\n')
|
||||
where.write('increments:\t%i'%(stat['NumberOfIncrements']) + '\n\n')
|
||||
where.write('nodes:\t%i'%stat['NumberOfNodes'] + '\n\n')
|
||||
where.write('elements:\t%i'%stat['NumberOfElements'] + '\n\n')
|
||||
where.write('nodal scalars:\t%i'%stat['NumberOfNodalScalars'] + '\n\n '\
|
||||
+'\n '.join(stat['LabelOfNodalScalar']) + '\n\n')
|
||||
where.write('elemental scalars:\t%i'%stat['NumberOfElementalScalars'] + '\n\n '\
|
||||
+ '\n '.join(stat['LabelOfElementalScalar']) + '\n\n')
|
||||
where.write('elemental tensors:\t%i'%stat['NumberOfElementalTensors'] + '\n\n '\
|
||||
+ '\n '.join(stat['LabelOfElementalTensor']) + '\n\n')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# -----------------------------
|
||||
def SummarizeOutputfile(format,where=sys.stdout):
|
||||
|
||||
where.write('\nUser Defined Outputs')
|
||||
for what in format.keys():
|
||||
where.write('\n\n %s:'%what)
|
||||
for output in format[what]['outputs']:
|
||||
where.write('\n %s'%output)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# -----------------------------
|
||||
def writeHeader(myfile,stat,geomtype):
|
||||
|
||||
myfile.write('2\theader\n')
|
||||
myfile.write(string.replace('$Id$','\n','\\n')+
|
||||
'\t' + ' '.join(sys.argv[1:]) + '\n')
|
||||
if geomtype == 'nodebased':
|
||||
myfile.write('node')
|
||||
for i in range(stat['NumberOfNodalScalars']):
|
||||
myfile.write('\t%s'%''.join(stat['LabelOfNodalScalar'][i].split()))
|
||||
|
||||
elif geomtype == 'ipbased':
|
||||
myfile.write('elem\tip')
|
||||
for i in range(stat['NumberOfElementalScalars']):
|
||||
myfile.write('\t%s'%''.join(stat['LabelOfElementalScalar'][i].split()))
|
||||
|
||||
myfile.write('\n')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Extract data from a .t16 (MSC.Marc) results file.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-i','--info', action='store_true', dest='info', \
|
||||
help='list contents of resultfile [%default]')
|
||||
parser.add_option('-l','--legacy', action='store_true', dest='legacy', \
|
||||
help='legacy user result block (starts with GrainCount) [%default]')
|
||||
parser.add_option('-d','--dir', dest='dir', \
|
||||
help='name of subdirectory to hold output [%default]')
|
||||
parser.add_option('-r','--range', dest='range', type='int', nargs=3, \
|
||||
help='range of positions (or increments) to output (start, end, step) [all]')
|
||||
parser.add_option('--increments', action='store_true', dest='getIncrements', \
|
||||
help='switch to increment range [%default]')
|
||||
parser.add_option('-t','--type', dest='type', type='choice', choices=['ipbased','nodebased'], \
|
||||
help='processed geometry type [ipbased and nodebased]')
|
||||
|
||||
group_material = OptionGroup(parser,'Material identifier')
|
||||
|
||||
group_material.add_option('--homogenization', dest='homog', \
|
||||
help='homogenization identifier (as string or integer [%default])', metavar='<ID>')
|
||||
group_material.add_option('--crystallite', dest='cryst', \
|
||||
help='crystallite identifier (as string or integer [%default])', metavar='<ID>')
|
||||
group_material.add_option('--phase', dest='phase', \
|
||||
help='phase identifier (as string or integer [%default])', metavar='<ID>')
|
||||
|
||||
parser.add_option_group(group_material)
|
||||
|
||||
parser.set_defaults(info = False)
|
||||
parser.set_defaults(legacy = False)
|
||||
parser.set_defaults(dir = 'vtk')
|
||||
parser.set_defaults(getIncrements= False)
|
||||
parser.set_defaults(homog = '1')
|
||||
parser.set_defaults(cryst = '1')
|
||||
parser.set_defaults(phase = '1')
|
||||
|
||||
(options, files) = parser.parse_args()
|
||||
|
||||
|
||||
# --- sanity checks
|
||||
|
||||
if files == []:
|
||||
parser.print_help()
|
||||
parser.error('no file specified...')
|
||||
|
||||
filename = os.path.splitext(files[0])[0]
|
||||
if not os.path.exists(filename+'.t16'):
|
||||
parser.print_help()
|
||||
parser.error('invalid file "%s" specified...'%filename+'.t16')
|
||||
|
||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||
try:
|
||||
import py_post
|
||||
except:
|
||||
print('error: no valid Mentat release found')
|
||||
sys.exit(-1)
|
||||
|
||||
if not options.type :
|
||||
options.type = ['nodebased', 'ipbased']
|
||||
else:
|
||||
options.type = [options.type]
|
||||
|
||||
|
||||
# --- initialize mesh data
|
||||
|
||||
if damask.core.mesh.mesh_init_postprocessing(filename+'.mesh'):
|
||||
print('error: init not successful')
|
||||
sys.exit(-1)
|
||||
|
||||
|
||||
# --- check if ip data available for all elements; if not, then .t19 file is required
|
||||
|
||||
p = py_post.post_open(filename+'.t16')
|
||||
asciiFile = False
|
||||
p.moveto(1)
|
||||
for e in range(p.elements()):
|
||||
if not damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),1):
|
||||
if os.path.exists(filename+'.t19'):
|
||||
p.close()
|
||||
p = py_post.post_open(filename+'.t19')
|
||||
asciiFile = True
|
||||
break
|
||||
|
||||
|
||||
# --- parse *.output and *.t16 file
|
||||
|
||||
outputFormat = ParseOutputFormat(filename,options.homog,options.cryst,options.phase)
|
||||
p.moveto(1)
|
||||
p.extrapolation('translate')
|
||||
stat = ParsePostfile(p,filename,outputFormat,options.legacy)
|
||||
|
||||
|
||||
# --- output info
|
||||
|
||||
if options.info:
|
||||
print '\n\nMentat release %s'%damask.solver.Marc().version('../../')
|
||||
SummarizePostfile(stat)
|
||||
SummarizeOutputfile(outputFormat)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
# --- create output dir
|
||||
|
||||
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname,0755)
|
||||
|
||||
|
||||
# --- get positions
|
||||
|
||||
[increments,locations] = GetIncrementLocations(p,stat['NumberOfIncrements'],options)
|
||||
|
||||
|
||||
# --- loop over positions
|
||||
|
||||
time_start = time.time()
|
||||
for incCount,position in enumerate(locations): # walk through locations
|
||||
p.moveto(position+1) # wind to correct position
|
||||
time_delta = (float(len(locations)) / float(incCount+1) - 1.0) * (time.time() - time_start)
|
||||
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."\
|
||||
%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
|
||||
sys.stdout.flush()
|
||||
|
||||
# --- write header
|
||||
|
||||
outFilename = {}
|
||||
for geomtype in options.type:
|
||||
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")\
|
||||
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||
with open(outFilename[geomtype],'w') as myfile:
|
||||
writeHeader(myfile,stat,geomtype)
|
||||
|
||||
# --- write node based data
|
||||
|
||||
if geomtype == 'nodebased':
|
||||
for n in range(stat['NumberOfNodes']):
|
||||
myfile.write(str(n))
|
||||
for l in range(stat['NumberOfNodalScalars']):
|
||||
myfile.write('\t'+str(p.node_scalar(n,l)))
|
||||
myfile.write('\n')
|
||||
|
||||
# --- write ip based data
|
||||
|
||||
elif geomtype == 'ipbased':
|
||||
for e in range(stat['NumberOfElements']):
|
||||
if asciiFile:
|
||||
print 'ascii postfile not yet supported'
|
||||
sys.exit(-1)
|
||||
else:
|
||||
ipData = [[]]
|
||||
for l in range(stat['NumberOfElementalScalars']):
|
||||
data = p.element_scalar(e,l)
|
||||
for i in range(len(data)): # at least as many nodes as ips
|
||||
node = damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),i+1) # fortran indexing starts at 1
|
||||
if not node: break # no more ips
|
||||
while i >= len(ipData): ipData.append([])
|
||||
ipData[i].extend([data[node-1].value]) # python indexing starts at 0
|
||||
for i in range(len(ipData)):
|
||||
myfile.write('\t'.join(map(str,[e,i]+ipData[i]))+'\n')
|
||||
|
||||
p.close()
|
||||
sys.stdout.write("\n")
|
|
@ -14,7 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
#Borland, D., & Taylor, R. M. (2007). Rainbow Color Map (Still) Considered Harmful. Computer Graphics and Applications, IEEE, 27(2), 14--17.
|
||||
#Moreland, K. (2009). Diverging Color Maps for Scientific Visualization. In Proc. 5th Int. Symp. Visual Computing (pp. 92--103).
|
||||
outtypes = ['paraview','gmsh','raw','GOM']
|
||||
extensions = ['.xml','.msh','.txt','.legend']
|
||||
extensions = ['.json','.msh','.txt','.legend']
|
||||
colormodels = ['RGB','HSL','XYZ','CIELAB','MSH']
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
|
@ -34,11 +34,9 @@ parser.add_option('-r','--right', dest='right', type='float', nargs=3, metavar='
|
|||
parser.add_option('-c','--colormodel', dest='colormodel', metavar='string',
|
||||
help='colormodel: '+', '.join(colormodels)+' [%default]')
|
||||
parser.add_option('-p','--predefined', dest='predefined', metavar='string',
|
||||
help='predefined colormap [%default]')
|
||||
help='predefined colormap')
|
||||
parser.add_option('-f','--format', dest='format', metavar='string',
|
||||
help='output format: '+', '.join(outtypes)+' [%default]')
|
||||
parser.add_option('-b','--basename', dest='basename', metavar='string',
|
||||
help='basename of output file [%default]')
|
||||
parser.set_defaults(colormodel = 'RGB')
|
||||
parser.set_defaults(predefined = None)
|
||||
parser.set_defaults(basename = None)
|
||||
|
@ -48,7 +46,7 @@ parser.set_defaults(trim = (-1.0,1.0))
|
|||
parser.set_defaults(left = (1.0,1.0,1.0))
|
||||
parser.set_defaults(right = (0.0,0.0,0.0))
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
(options,filename) = parser.parse_args()
|
||||
|
||||
if options.format not in outtypes:
|
||||
parser.error('invalid format: "%s" (can be %s).'%(options.format,', '.join(outtypes)))
|
||||
|
@ -62,10 +60,10 @@ if options.trim[0] < -1.0 or \
|
|||
parser.error('invalid trim range (-1 +1).')
|
||||
|
||||
|
||||
name = options.format if options.basename is None\
|
||||
else options.basename
|
||||
output = sys.stdout if options.basename is None\
|
||||
else open(os.path.basename(options.basename)+extensions[outtypes.index(options.format)],'w')
|
||||
name = options.format if filename[0] is None\
|
||||
else filename[0]
|
||||
output = sys.stdout if filename[0] is None\
|
||||
else open(os.path.basename(filename[0])+extensions[outtypes.index(options.format)],'w')
|
||||
|
||||
colorLeft = damask.Color(options.colormodel.upper(), list(options.left))
|
||||
colorRight = damask.Color(options.colormodel.upper(), list(options.right))
|
||||
|
|
|
@ -1003,8 +1003,7 @@ fileOpen = False
|
|||
assembleHeader = True
|
||||
header = []
|
||||
standard = ['inc'] + \
|
||||
{True: ['time'],
|
||||
False:[]}[options.time] + \
|
||||
(['time'] if options.time else []) + \
|
||||
['elem','node','ip','grain','1_pos','2_pos','3_pos']
|
||||
|
||||
# --------------------------- loop over positions --------------------------------
|
||||
|
|
|
@ -1,82 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,glob,re
|
||||
import damask
|
||||
from optparse import OptionParser
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
# -----------------------------
|
||||
def findTag(filename,tag):
|
||||
|
||||
with open(filename,'r') as myfile:
|
||||
mypattern = re.compile(str(tag))
|
||||
for line in myfile:
|
||||
if mypattern.search(line): return True
|
||||
return False
|
||||
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# MAIN FUNCTION STARTS HERE
|
||||
# -----------------------------
|
||||
|
||||
# --- input parsing
|
||||
|
||||
parser = OptionParser(usage='%prog [options] directory', description = """
|
||||
Add data from an ASCII table to a VTK geometry file.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-s','--sub', action='store_true', dest='subdir', \
|
||||
help='include files in subdirectories [%default]')
|
||||
parser.set_defaults(subdir = False)
|
||||
|
||||
(options, dirname) = parser.parse_args()
|
||||
|
||||
|
||||
# --- sanity checks
|
||||
|
||||
if dirname == []:
|
||||
parser.print_help()
|
||||
parser.error('no directory specified...')
|
||||
else:
|
||||
dirname = os.path.abspath(dirname[0]) # only use first argument
|
||||
|
||||
if not os.path.isdir(dirname):
|
||||
parser.print_help()
|
||||
parser.error('invalid directory "%s" specified...'%dirname)
|
||||
|
||||
|
||||
# --- loop over "nodebased" and "ipbased" data files and
|
||||
# copy data to corresponding geometry files
|
||||
|
||||
dataSetTag = {'nodebased':'POINT_DATA', 'ipbased':'CELL_DATA'}
|
||||
for geomtype in ['nodebased','ipbased']:
|
||||
for vtkfilename in glob.iglob(dirname+os.sep+'*'+geomtype+'*.vtk'):
|
||||
|
||||
if not os.path.dirname(vtkfilename) == dirname and not options.subdir: continue # include files in subdir?
|
||||
datafilename = os.path.splitext(vtkfilename)[0] + '.txt'
|
||||
if not os.path.exists(datafilename): continue # no corresponding datafile found
|
||||
|
||||
# --- read data from datafile
|
||||
|
||||
with open(datafilename,'r') as datafile: # open datafile in read mode
|
||||
table = damask.ASCIItable(fileIn=datafile) # use ASCIItable class to read data file
|
||||
table.head_read() # read ASCII header info
|
||||
myData = []
|
||||
while table.data_read(): # read line in datafile
|
||||
myData.append(table.data)
|
||||
myData = zip(*myData) # reorder data: first index now label, not node
|
||||
|
||||
# --- append data to vtkfile
|
||||
|
||||
with open(vtkfilename,'a') as vtkfile: # open vtkfile in append mode
|
||||
print vtkfilename
|
||||
if not findTag(vtkfilename,dataSetTag[geomtype]): # check if data set is already present...
|
||||
vtkfile.write(dataSetTag[geomtype] + ' %i'%len(myData[0])) # ... if not, write keyword
|
||||
for idx,label in enumerate(table.labels): # write data
|
||||
vtkfile.write('\nSCALARS '+label+' double 1\nLOOKUP_TABLE default\n') # all scalar data
|
||||
vtkfile.write('\n'.join(map(str,myData[idx])))
|
|
@ -1,135 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,vtk
|
||||
import damask
|
||||
from collections import defaultdict
|
||||
from optparse import OptionParser
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Add scalar and RGB tuples from ASCIItable to existing VTK voxel cloud (.vtu/.vtk).
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-v', '--vtk', dest='vtk', \
|
||||
help = 'VTK file name')
|
||||
parser.add_option('-s', '--scalar', dest='scalar', action='extend', \
|
||||
help = 'scalar values')
|
||||
parser.add_option('-c', '--color', dest='color', action='extend', \
|
||||
help = 'RGB color tuples')
|
||||
|
||||
parser.set_defaults(scalar = [],
|
||||
color = [],
|
||||
render = False,
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
if options.vtk is None or not os.path.exists(options.vtk):
|
||||
parser.error('VTK file does not exist')
|
||||
|
||||
if os.path.splitext(options.vtk)[1] == '.vtu':
|
||||
reader = vtk.vtkXMLUnstructuredGridReader()
|
||||
reader.SetFileName(options.vtk)
|
||||
reader.Update()
|
||||
uGrid = reader.GetOutput()
|
||||
elif os.path.splitext(options.vtk)[1] == '.vtk':
|
||||
reader = vtk.vtkGenericDataObjectReader()
|
||||
reader.SetFileName(options.vtk)
|
||||
reader.Update()
|
||||
uGrid = reader.GetUnstructuredGridOutput()
|
||||
else:
|
||||
parser.error('unsupported VTK file type extension')
|
||||
|
||||
Npoints = uGrid.GetNumberOfPoints()
|
||||
Ncells = uGrid.GetNumberOfCells()
|
||||
|
||||
sys.stderr.write('{}: {} points and {} cells...\n'.format(damask.util.emph(options.vtk),Npoints,Ncells))
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, readonly = True)
|
||||
except: continue
|
||||
damask.util.croak(damask.util.emph(scriptName)+(': '+name if name else ''))
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
|
||||
remarks = []
|
||||
errors = []
|
||||
VTKarray = {}
|
||||
active = defaultdict(list)
|
||||
|
||||
for datatype,dimension,label in [['scalar',1,options.scalar],
|
||||
['color',3,options.color],
|
||||
]:
|
||||
for i,dim in enumerate(table.label_dimension(label)):
|
||||
me = label[i]
|
||||
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||
elif dim > dimension: remarks.append('"{}" not of dimension{}...'.format(me,dimension))
|
||||
else:
|
||||
damask.util.croak('adding {} {}'.format(datatype,me))
|
||||
active[datatype].append(me)
|
||||
|
||||
if datatype in ['scalar']:
|
||||
VTKarray[me] = vtk.vtkDoubleArray()
|
||||
elif datatype == 'color':
|
||||
VTKarray[me] = vtk.vtkUnsignedCharArray()
|
||||
|
||||
VTKarray[me].SetNumberOfComponents(dimension)
|
||||
VTKarray[me].SetName(label[i])
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss=True)
|
||||
continue
|
||||
|
||||
# ------------------------------------------ process data ---------------------------------------
|
||||
|
||||
while table.data_read(): # read next data line of ASCII table
|
||||
|
||||
for datatype,labels in active.items(): # loop over scalar,color
|
||||
for me in labels: # loop over all requested items
|
||||
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
||||
if datatype == 'color':
|
||||
VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
|
||||
elif datatype == 'scalar':
|
||||
VTKarray[me].InsertNextValue(float(theData[0]))
|
||||
|
||||
# ------------------------------------------ add data ---------------------------------------
|
||||
|
||||
for datatype,labels in active.items(): # loop over scalar,color
|
||||
if datatype == 'color':
|
||||
uGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
|
||||
for label in labels: # loop over all requested items
|
||||
uGrid.GetCellData().AddArray(VTKarray[me])
|
||||
|
||||
uGrid.Modified()
|
||||
if vtk.VTK_MAJOR_VERSION <= 5:
|
||||
uGrid.Update()
|
||||
|
||||
# ------------------------------------------ output result ---------------------------------------
|
||||
|
||||
writer = vtk.vtkXMLUnstructuredGridWriter()
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetFileName(os.path.splitext(options.vtk)[0]+'_added.vtu')
|
||||
if vtk.VTK_MAJOR_VERSION <= 5:
|
||||
writer.SetInput(uGrid)
|
||||
else:
|
||||
writer.SetInputData(uGrid)
|
||||
writer.Write()
|
|
@ -18,12 +18,13 @@ Produce a VTK point cloud dataset based on coordinates given in an ASCIItable.
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c', '--coordinates',
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'coordinate label [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
|
||||
parser.set_defaults(pos = 'pos'
|
||||
parser.set_defaults(pos = 'pos',
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
@ -83,19 +84,21 @@ for name in filenames:
|
|||
|
||||
if name:
|
||||
writer = vtk.vtkXMLPolyDataWriter()
|
||||
(directory,filename) = os.path.split(name)
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]\
|
||||
+'.'+writer.GetDefaultFileExtension()))
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetFileName(os.path.join(os.path.split(name)[0],
|
||||
os.path.splitext(os.path.split(name)[1])[0] +
|
||||
'.' + writer.GetDefaultFileExtension()))
|
||||
else:
|
||||
writer = vtk.vtkDataSetWriter()
|
||||
writer.WriteToOutputStringOn()
|
||||
writer.SetHeader('# powered by '+scriptID)
|
||||
writer.WriteToOutputStringOn()
|
||||
|
||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(Polydata)
|
||||
else: writer.SetInputData(Polydata)
|
||||
|
||||
writer.Write()
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0
|
||||
|
||||
table.close()
|
||||
|
|
|
@ -6,7 +6,6 @@ import numpy as np
|
|||
import damask
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
@ -19,16 +18,25 @@ Create regular voxel grid from points in an ASCIItable (or geom file).
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-m', '--mode',
|
||||
parser.add_option('-m',
|
||||
'--mode',
|
||||
dest = 'mode',
|
||||
type = 'choice', choices = ['cell','point'],
|
||||
help = 'cell-centered or point-centered coordinates')
|
||||
parser.add_option('-c', '--coordinates',
|
||||
dest = 'coords',
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'coordinate label [%default]')
|
||||
parser.set_defaults(coords = 'pos',
|
||||
mode = 'cell'
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-g',
|
||||
'--geom',
|
||||
dest = 'geom',
|
||||
action = 'store_true',
|
||||
help = 'geom input format')
|
||||
|
||||
parser.set_defaults(mode = 'cell',
|
||||
pos = 'pos',
|
||||
geom = False,
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
@ -38,7 +46,7 @@ parser.set_defaults(coords = 'pos',
|
|||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
isGeom = name.endswith('.geom')
|
||||
isGeom = options.geom or (name is not None and name.endswith('.geom'))
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = not isGeom,
|
||||
|
@ -53,9 +61,9 @@ for name in filenames:
|
|||
|
||||
remarks = []
|
||||
errors = []
|
||||
coordDim = 3 if isGeom else table.label_dimension(options.coords)
|
||||
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||
elif coordDim < 3: remarks.append('appending {} dimensions to coordinates "{}"...'.format(3-coordDim,options.coords))
|
||||
coordDim = 3 if isGeom else table.label_dimension(options.pos)
|
||||
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||
elif coordDim < 3: remarks.append('appending {} dimensions to coordinates "{}"...'.format(3-coordDim,options.pos))
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
|
@ -73,7 +81,7 @@ for name in filenames:
|
|||
endpoint = True,
|
||||
) for i in xrange(3)]
|
||||
else:
|
||||
table.data_readArray(options.coords)
|
||||
table.data_readArray(options.pos)
|
||||
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
||||
if table.data.shape[1] < 3:
|
||||
table.data = np.hstack((table.data,
|
||||
|
@ -119,20 +127,22 @@ for name in filenames:
|
|||
|
||||
if name:
|
||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||
(directory,filename) = os.path.split(name)
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0] +
|
||||
('' if isGeom else '_{}({})'.format(options.coords, options.mode)) +
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetFileName(os.path.join(os.path.split(name)[0],
|
||||
os.path.splitext(os.path.split(name)[1])[0] +
|
||||
('' if isGeom else '_{}({})'.format(options.pos, options.mode)) +
|
||||
'.' + writer.GetDefaultFileExtension()))
|
||||
else:
|
||||
writer = vtk.vtkDataSetWriter()
|
||||
writer.WriteToOutputStringOn()
|
||||
writer.SetHeader('# powered by '+scriptID)
|
||||
writer.WriteToOutputStringOn()
|
||||
|
||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(rGrid)
|
||||
else: writer.SetInputData(rGrid)
|
||||
|
||||
writer.Write()
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0
|
||||
|
||||
table.close()
|
||||
|
|
|
@ -1,122 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,shutil
|
||||
import damask
|
||||
from optparse import OptionParser
|
||||
import vtk
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-v','--vector', nargs=3, dest='vector', \
|
||||
help='suffices indicating vector components [%default]')
|
||||
parser.add_option('-s','--separator', dest='separator', \
|
||||
help='separator between label and suffix [%default]')
|
||||
|
||||
parser.set_defaults(vector = ['x','y','z'])
|
||||
parser.set_defaults(separator = '.')
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
|
||||
# --- sanity checks
|
||||
|
||||
if filenames == []:
|
||||
parser.print_help()
|
||||
parser.error('no file specified...')
|
||||
|
||||
for filename in filenames:
|
||||
if not os.path.isfile(filename):
|
||||
parser.print_help()
|
||||
parser.error('invalid file "%s" specified...'%filename)
|
||||
|
||||
|
||||
# --- ITERATE OVER FILES AND PROCESS THEM
|
||||
|
||||
for filename in filenames:
|
||||
|
||||
sys.stdout.write('read file "%s" ...'%filename)
|
||||
sys.stdout.flush()
|
||||
suffix = os.path.splitext(filename)[1]
|
||||
if suffix == '.vtk':
|
||||
reader = vtk.vtkUnstructuredGridReader()
|
||||
reader.ReadAllScalarsOn()
|
||||
reader.ReadAllVectorsOn()
|
||||
reader.ReadAllTensorsOn()
|
||||
elif suffix == '.vtu':
|
||||
reader = vtk.vtkXMLUnstructuredGridReader()
|
||||
else:
|
||||
parser.error('filetype "%s" not supported'%suffix)
|
||||
reader.SetFileName(filename)
|
||||
reader.Update()
|
||||
uGrid = reader.GetOutput()
|
||||
sys.stdout.write(' done\n')
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
# Read the scalar data
|
||||
|
||||
scalarData = {}
|
||||
scalarsToBeRemoved = []
|
||||
Nscalars = uGrid.GetCellData().GetNumberOfArrays()
|
||||
for i in range(Nscalars):
|
||||
sys.stdout.write("\rread scalar data %d%%" %(100*i/Nscalars))
|
||||
sys.stdout.flush()
|
||||
scalarName = uGrid.GetCellData().GetArrayName(i)
|
||||
if scalarName.split(options.separator)[-1] in options.vector:
|
||||
label,suffix = scalarName.split(options.separator)
|
||||
if label not in scalarData:
|
||||
scalarData[label] = [[],[],[]]
|
||||
uGrid.GetCellData().SetActiveScalars(scalarName)
|
||||
scalarData[label][options.vector.index(suffix)] = uGrid.GetCellData().GetScalars(scalarName)
|
||||
scalarsToBeRemoved.append(scalarName)
|
||||
for scalarName in scalarsToBeRemoved:
|
||||
uGrid.GetCellData().RemoveArray(scalarName)
|
||||
sys.stdout.write('\rread scalar data done\n')
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
# Convert the scalar data to vector data
|
||||
|
||||
NscalarData = len(scalarData)
|
||||
for n,label in enumerate(scalarData):
|
||||
sys.stdout.write("\rconvert to vector data %d%%" %(100*n/NscalarData))
|
||||
sys.stdout.flush()
|
||||
Nvalues = scalarData[label][0].GetNumberOfTuples()
|
||||
vectorData = vtk.vtkDoubleArray()
|
||||
vectorData.SetName(label)
|
||||
vectorData.SetNumberOfComponents(3) # set this before NumberOfTuples !!!
|
||||
vectorData.SetNumberOfTuples(Nvalues)
|
||||
for i in range(Nvalues):
|
||||
for j in range(3):
|
||||
vectorData.SetComponent(i,j,scalarData[label][j].GetValue(i))
|
||||
uGrid.GetCellData().AddArray(vectorData)
|
||||
sys.stdout.write('\rconvert to vector data done\n')
|
||||
|
||||
|
||||
# Write to new vtk file
|
||||
|
||||
outfilename = os.path.splitext(filename)[0]+'.vtu'
|
||||
sys.stdout.write('write to file "%s" ...'%outfilename)
|
||||
sys.stdout.flush()
|
||||
writer = vtk.vtkXMLUnstructuredGridWriter()
|
||||
writer.SetFileName(outfilename+'_tmp')
|
||||
writer.SetDataModeToAscii()
|
||||
writer.SetInput(uGrid)
|
||||
writer.Write()
|
||||
sys.stdout.write(' done\n')
|
||||
sys.stdout.flush()
|
||||
shutil.move(outfilename+'_tmp',outfilename)
|
||||
|
||||
|
||||
|
||||
# --------------------------- DONE --------------------------------
|
|
@ -1,118 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,vtk
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Create hexahedral voxels around points in an ASCIItable.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-d', '--deformed',
|
||||
dest = 'deformed',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'deformed coordinate label [%default]')
|
||||
parser.add_option('-c','--coordinates',
|
||||
dest = 'coords',
|
||||
type = 'string', metavar='string',
|
||||
help = 'undeformed coordinates label [%default]')
|
||||
parser.set_defaults(deformed = 'ipdeformedcoord',
|
||||
coords = 'ipinitialcoord',
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, readonly = True)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --------------- interprete header -----------------------------------------------------------------
|
||||
table.head_read()
|
||||
errors=[]
|
||||
if table.label_dimension(options.deformed) != 3:
|
||||
errors.append('columns "{}" have dimension {}'.format(options.deformed,table.label_dimension(options.deformed)))
|
||||
if table.label_dimension(options.coords) != 3:
|
||||
errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
|
||||
table.data_readArray([options.coords,options.deformed])
|
||||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
coords = [{},{},{}]
|
||||
for i in xrange(len(table.data)):
|
||||
for j in xrange(3):
|
||||
coords[j][str(table.data[i,table.label_index(options.coords)+j])] = True
|
||||
grid = np.array(map(len,coords),'i')
|
||||
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
|
||||
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
|
||||
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
|
||||
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
|
||||
],'d') # size from bounding box, corrected for cell-centeredness
|
||||
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||
|
||||
# ------------------------------------------ process data ---------------------------------------
|
||||
hexPoints = np.array([[-1,-1,-1],
|
||||
[ 1,-1,-1],
|
||||
[ 1, 1,-1],
|
||||
[-1, 1,-1],
|
||||
[-1,-1, 1],
|
||||
[ 1,-1, 1],
|
||||
[ 1, 1, 1],
|
||||
[-1, 1, 1],
|
||||
])
|
||||
|
||||
halfDelta = 0.5*size/grid
|
||||
|
||||
Points = vtk.vtkPoints()
|
||||
Hex = vtk.vtkHexahedron()
|
||||
uGrid = vtk.vtkUnstructuredGrid()
|
||||
|
||||
for p in table.data:
|
||||
for i,h in enumerate(hexPoints):
|
||||
pointID = Points.InsertNextPoint(p[table.label_index(options.deformed):table.label_index(options.deformed)+3]+h*halfDelta)
|
||||
Hex.GetPointIds().SetId(i,pointID)
|
||||
|
||||
uGrid.InsertNextCell(Hex.GetCellType(), Hex.GetPointIds())
|
||||
|
||||
uGrid.SetPoints(Points)
|
||||
|
||||
# ------------------------------------------ output result ---------------------------------------
|
||||
|
||||
if name:
|
||||
writer = vtk.vtkXMLUnstructuredGridWriter()
|
||||
(directory,filename) = os.path.split(name)
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]\
|
||||
+'.'+writer.GetDefaultFileExtension()))
|
||||
else:
|
||||
writer = vtk.vtkDataSetWriter()
|
||||
writer.WriteToOutputStringOn()
|
||||
writer.SetHeader('# powered by '+scriptID)
|
||||
|
||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(uGrid)
|
||||
else: writer.SetInputData(uGrid)
|
||||
writer.Write()
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||
|
||||
table.close() # close input ASCII table
|
||||
|
|
@ -15,25 +15,29 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Changes the (three-dimensional) canvas of a spectral geometry description.
|
||||
Grid can be given as absolute or relative values, e.g. 16 16 16 or 2x 0.5x 32.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-g', '--grid',
|
||||
parser.add_option('-g',
|
||||
'--grid',
|
||||
dest = 'grid',
|
||||
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
|
||||
help = 'a,b,c grid of hexahedral box [auto]')
|
||||
parser.add_option('-o', '--offset',
|
||||
help = 'a,b,c grid of hexahedral box. [auto]')
|
||||
parser.add_option('-o',
|
||||
'--offset',
|
||||
dest = 'offset',
|
||||
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
|
||||
help = 'a,b,c offset from old to new origin of grid [%default]')
|
||||
parser.add_option('-f', '--fill',
|
||||
parser.add_option('-f',
|
||||
'--fill',
|
||||
dest = 'fill',
|
||||
type = 'float', metavar = 'float',
|
||||
help = '(background) canvas grain index. "0" selects maximum microstructure index + 1 [%default]')
|
||||
parser.add_option('--float',
|
||||
dest = 'real',
|
||||
action = 'store_true',
|
||||
help = 'input data is float [%default]')
|
||||
help = 'use float input')
|
||||
|
||||
parser.set_defaults(grid = ['0','0','0'],
|
||||
offset = (0,0,0),
|
||||
|
@ -61,13 +65,7 @@ for name in filenames:
|
|||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i'%info['microstructures'],
|
||||
])
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
|
@ -105,7 +103,7 @@ for name in filenames:
|
|||
translate_y = [i - options.offset[1] for i in yindex]
|
||||
translate_z = [i - options.offset[2] for i in zindex]
|
||||
if 0 in map(len,[xindex,yindex,zindex,translate_x,translate_y,translate_z]):
|
||||
damask.util.croak('Invaldid grid-offset comination')
|
||||
damask.util.croak('invaldid grid-offset combination.')
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
microstructure_cropped[min(translate_x):(max(translate_x)+1),\
|
||||
|
@ -125,13 +123,13 @@ for name in filenames:
|
|||
errors = []
|
||||
|
||||
if (any(newInfo['grid'] != info['grid'])):
|
||||
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
|
||||
remarks.append('--> grid a b c: {}'.format(' x '.join(map(str,newInfo['grid']))))
|
||||
if (any(newInfo['size'] != info['size'])):
|
||||
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
|
||||
remarks.append('--> size x y z: {}'.format(' x '.join(map(str,newInfo['size']))))
|
||||
if (any(newInfo['origin'] != info['origin'])):
|
||||
remarks.append('--> origin x y z: %s'%(' : '.join(map(str,newInfo['origin']))))
|
||||
remarks.append('--> origin x y z: {}'.format(' : '.join(map(str,newInfo['origin']))))
|
||||
if ( newInfo['microstructures'] != info['microstructures']):
|
||||
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
||||
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
|
||||
|
||||
if np.any(newInfo['grid'] < 1): errors.append('invalid new grid a b c.')
|
||||
if np.any(newInfo['size'] <= 0.0): errors.append('invalid new size x y z.')
|
||||
|
@ -147,11 +145,11 @@ for name in filenames:
|
|||
table.info_clear()
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||
"grid\ta {}\tb {}\tc {}".format(*newInfo['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*newInfo['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*newInfo['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
"microstructures\t{}".format(newInfo['microstructures']),
|
||||
extra_header
|
||||
])
|
||||
table.labels_clear()
|
||||
|
|
|
@ -1,108 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,vtk
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """
|
||||
Produce VTK rectilinear mesh of structure data from geom description
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-m','--nodata',
|
||||
dest = 'data',
|
||||
action = 'store_false',
|
||||
help = 'generate mesh without microstructure index data')
|
||||
|
||||
parser.set_defaults(data = True,
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, labeled = False, readonly = True)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i'%info['microstructures'],
|
||||
])
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
|
||||
#--- read microstructure information --------------------------------------------------------------
|
||||
|
||||
if options.data:
|
||||
microstructure,ok = table.microstructure_read(info['grid'],strict = True) # read microstructure
|
||||
|
||||
if ok:
|
||||
structure = vtk.vtkIntArray()
|
||||
structure.SetName('Microstructures')
|
||||
for idx in microstructure: structure.InsertNextValue(idx)
|
||||
|
||||
else: errors.append('mismatch between data and grid dimension.')
|
||||
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- generate VTK rectilinear grid ---------------------------------------------------------------
|
||||
|
||||
grid = vtk.vtkRectilinearGrid()
|
||||
grid.SetDimensions([x+1 for x in info['grid']])
|
||||
for i in xrange(3):
|
||||
temp = vtk.vtkDoubleArray()
|
||||
temp.SetNumberOfTuples(info['grid'][i]+1)
|
||||
for j in xrange(info['grid'][i]+1):
|
||||
temp.InsertTuple1(j,j*info['size'][i]/info['grid'][i]+info['origin'][i])
|
||||
if i == 0: grid.SetXCoordinates(temp)
|
||||
elif i == 1: grid.SetYCoordinates(temp)
|
||||
elif i == 2: grid.SetZCoordinates(temp)
|
||||
|
||||
|
||||
if options.data: grid.GetCellData().AddArray(structure)
|
||||
|
||||
# --- write data -----------------------------------------------------------------------------------
|
||||
if name:
|
||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||
(directory,filename) = os.path.split(name)
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]+'.'+writer.GetDefaultFileExtension()))
|
||||
else:
|
||||
writer = vtk.vtkDataSetWriter()
|
||||
writer.WriteToOutputStringOn()
|
||||
writer.SetHeader('# powered by '+scriptID)
|
||||
|
||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
|
||||
else: writer.SetInputData(grid)
|
||||
writer.Write()
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||
|
||||
table.close()
|
|
@ -0,0 +1,15 @@
|
|||
#!/bin/bash
|
||||
|
||||
for geom in "$@"
|
||||
do
|
||||
vtk_rectilinearGrid \
|
||||
--geom $geom
|
||||
|
||||
geom_toTable \
|
||||
< $geom \
|
||||
| \
|
||||
vtk_addRectilinearGridData \
|
||||
--scalar microstructure \
|
||||
--inplace \
|
||||
--vtk ${geom%.*}.vtr
|
||||
done
|
|
@ -1,206 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
|
||||
Generate geometry description and material configuration from EBSD data in given square-gridded 'ang' file.
|
||||
Two phases can be discriminated based on threshold value in a given data column.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('--column', dest='column', type='int', metavar = 'int',
|
||||
help='data column to discriminate between both phases [%default]')
|
||||
parser.add_option('-t','--threshold', dest='threshold', type='float', metavar = 'float',
|
||||
help='threshold value for phase discrimination [%default]')
|
||||
parser.add_option('--homogenization', dest='homogenization', type='int', metavar = 'int',
|
||||
help='homogenization index for <microstructure> configuration [%default]')
|
||||
parser.add_option('--phase', dest='phase', type='int', nargs = 2, metavar = 'int int',
|
||||
help='phase indices for <microstructure> configuration %default')
|
||||
parser.add_option('--crystallite', dest='crystallite', type='int', metavar = 'int',
|
||||
help='crystallite index for <microstructure> configuration [%default]')
|
||||
parser.add_option('-c','--compress', dest='compress', action='store_true',
|
||||
help='lump identical microstructure and texture information [%default]')
|
||||
parser.add_option('-a', '--axes', dest='axes', nargs = 3, metavar = 'string string string',
|
||||
help='Euler angle coordinate system for <texture> configuration x,y,z = %default')
|
||||
parser.add_option('-p', '--precision', dest='precision', choices=['0','1','2','3'], metavar = 'int',
|
||||
help = 'euler angles decimal places for output format and compressing (0,1,2,3) [2]')
|
||||
|
||||
parser.set_defaults(column = 11,
|
||||
threshold = 0.5,
|
||||
homogenization = 1,
|
||||
phase = [1,2],
|
||||
crystallite = 1,
|
||||
compress = False,
|
||||
axes = ['y','x','-z'],
|
||||
precision = '2',
|
||||
)
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
|
||||
for i in options.axes:
|
||||
if i.lower() not in ['x','+x','-x','y','+y','-y','z','+z','-z']:
|
||||
parser.error('invalid axes %s %s %s' %(options.axes[0],options.axes[1],options.axes[2]))
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
info = {
|
||||
'grid': np.ones(3,'i'),
|
||||
'size': np.zeros(3,'d'),
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': 0,
|
||||
'homogenization': options.homogenization
|
||||
}
|
||||
|
||||
coords = [{},{},{1:True}]
|
||||
pos = {'min':[ float("inf"), float("inf")],
|
||||
'max':[-float("inf"),-float("inf")]}
|
||||
|
||||
phase = []
|
||||
eulerangles = []
|
||||
|
||||
# --------------- read data -----------------------------------------------------------------------
|
||||
errors = []
|
||||
while table.data_read():
|
||||
words = table.data
|
||||
if words[0] == '#': # process initial comments/header block
|
||||
if len(words) > 2:
|
||||
if words[2].lower() == 'hexgrid':
|
||||
errors.append('The file has HexGrid format. Please first convert to SquareGrid...')
|
||||
break
|
||||
else:
|
||||
currPos = words[3:5]
|
||||
for i in xrange(2):
|
||||
coords[i][currPos[i]] = True
|
||||
currPos = map(float,currPos)
|
||||
for i in xrange(2):
|
||||
pos['min'][i] = min(pos['min'][i],currPos[i])
|
||||
pos['max'][i] = max(pos['max'][i],currPos[i])
|
||||
eulerangles.append(map(math.degrees,map(float,words[:3])))
|
||||
phase.append(options.phase[int(float(words[options.column-1]) > options.threshold)])
|
||||
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
continue
|
||||
|
||||
# --------------- determine size and grid ---------------------------------------------------------
|
||||
info['grid'] = np.array(map(len,coords),'i')
|
||||
info['size'][0:2] = info['grid'][0:2]/(info['grid'][0:2]-1.0)* \
|
||||
np.array([pos['max'][0]-pos['min'][0],
|
||||
pos['max'][1]-pos['min'][1]],'d')
|
||||
info['size'][2]=info['size'][0]/info['grid'][0]
|
||||
eulerangles = np.array(eulerangles,dtype='f').reshape(info['grid'].prod(),3)
|
||||
phase = np.array(phase,dtype='i').reshape(info['grid'].prod())
|
||||
|
||||
limits = [360,180,360]
|
||||
if any([np.any(eulerangles[:,i]>=limits[i]) for i in [0,1,2]]):
|
||||
errors.append('Error: euler angles out of bound. Ang file might contain unidexed poins.')
|
||||
for i,angle in enumerate(['phi1','PHI','phi2']):
|
||||
for n in np.nditer(np.where(eulerangles[:,i]>=limits[i]),['zerosize_ok']):
|
||||
errors.append('%s in line %i (%4.2f %4.2f %4.2f)\n'
|
||||
%(angle,n,eulerangles[n,0],eulerangles[n,1],eulerangles[n,2]))
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
continue
|
||||
|
||||
eulerangles=np.around(eulerangles,int(options.precision)) # round to desired precision
|
||||
# ensure, that rounded euler angles are not out of bounds (modulo by limits)
|
||||
for i,angle in enumerate(['phi1','PHI','phi2']):
|
||||
eulerangles[:,i]%=limits[i]
|
||||
|
||||
# scale angles by desired precision and convert to int. create unique integer key from three euler angles by
|
||||
# concatenating the string representation with leading zeros and store as integer and search unique euler angle keys.
|
||||
# Texture IDs are the indices of the first occurrence, the inverse is used to construct the microstructure
|
||||
# create a microstructure (texture/phase pair) for each point using unique texture IDs.
|
||||
# Use longInt (64bit, i8) because the keys might be long
|
||||
if options.compress:
|
||||
formatString='{0:0>'+str(int(options.precision)+3)+'}'
|
||||
euleranglesRadInt = (eulerangles*10**int(options.precision)).astype('int')
|
||||
eulerKeys = np.array([int(''.join(map(formatString.format,euleranglesRadInt[i,:]))) \
|
||||
for i in xrange(info['grid'].prod())])
|
||||
devNull, texture, eulerKeys_idx = np.unique(eulerKeys, return_index = True, return_inverse=True)
|
||||
msFull = np.array([[eulerKeys_idx[i],phase[i]] for i in xrange(info['grid'].prod())],'i8')
|
||||
devNull,msUnique,matPoints = np.unique(msFull.view('c16'),True,True)
|
||||
matPoints+=1
|
||||
microstructure = np.array([msFull[i] for i in msUnique]) # pick only unique microstructures
|
||||
else:
|
||||
texture = np.arange(info['grid'].prod())
|
||||
microstructure = np.hstack( zip(texture,phase) ).reshape(info['grid'].prod(),2) # create texture/phase pairs
|
||||
formatOut = 1+int(math.log10(len(texture)))
|
||||
textureOut =['<texture>']
|
||||
|
||||
eulerFormatOut='%%%i.%if'%(int(options.precision)+4,int(options.precision))
|
||||
outStringAngles='(gauss) phi1 '+eulerFormatOut+' Phi '+eulerFormatOut+' phi2 '+eulerFormatOut+' scatter 0.0 fraction 1.0'
|
||||
for i in xrange(len(texture)):
|
||||
textureOut += ['[Texture%s]'%str(i+1).zfill(formatOut),
|
||||
'axes %s %s %s'%(options.axes[0],options.axes[1],options.axes[2]),
|
||||
outStringAngles%tuple(eulerangles[texture[i],...])
|
||||
]
|
||||
formatOut = 1+int(math.log10(len(microstructure)))
|
||||
microstructureOut =['<microstructure>']
|
||||
for i in xrange(len(microstructure)):
|
||||
microstructureOut += ['[Grain%s]'%str(i+1).zfill(formatOut),
|
||||
'crystallite\t%i'%options.crystallite,
|
||||
'(constituent)\tphase %i\ttexture %i\tfraction 1.0'%(microstructure[i,1],microstructure[i,0]+1)
|
||||
]
|
||||
|
||||
info['microstructures'] = len(microstructure)
|
||||
|
||||
#--- report ---------------------------------------------------------------------------------------
|
||||
damask.util.croak('grid a b c: %s\n'%(' x '.join(map(str,info['grid']))) +
|
||||
'size x y z: %s\n'%(' x '.join(map(str,info['size']))) +
|
||||
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) +
|
||||
'homogenization: %i\n'%info['homogenization'] +
|
||||
'microstructures: %i\n\n'%info['microstructures'])
|
||||
|
||||
if np.any(info['grid'] < 1):
|
||||
damask.util.croak('invalid grid a b c.\n')
|
||||
continue
|
||||
if np.any(info['size'] <= 0.0):
|
||||
damask.util.croak('invalid size x y z.\n')
|
||||
continue
|
||||
|
||||
|
||||
#--- write data/header --------------------------------------------------------------------------------
|
||||
table.info_clear()
|
||||
table.info_append([' '.join([scriptID] + sys.argv[1:]),
|
||||
"grid\ta %i\tb %i\tc %i"%(info['grid'][0],info['grid'][1],info['grid'][2],),
|
||||
"size\tx %f\ty %f\tz %f"%(info['size'][0],info['size'][1],info['size'][2],),
|
||||
"origin\tx %f\ty %f\tz %f"%(info['origin'][0],info['origin'][1],info['origin'][2],),
|
||||
"microstructures\t%i"%info['microstructures'],
|
||||
"homogenization\t%i"%info['homogenization'],
|
||||
] +
|
||||
[line for line in microstructureOut + textureOut]
|
||||
)
|
||||
table.head_write()
|
||||
if options.compress:
|
||||
matPoints = matPoints.reshape((info['grid'][1],info['grid'][0]))
|
||||
table.data = matPoints
|
||||
table.data_writeArray('%%%ii'%(1+int(math.log10(np.amax(matPoints)))),delimiter=' ')
|
||||
else:
|
||||
table.output_write("1 to %i\n"%(info['microstructures']))
|
||||
table.output_flush()
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close()
|
|
@ -1,297 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
##
|
||||
# This script will read in all the seeds and partition the space
|
||||
# using scipy.spatial.Delaunay triangulation.
|
||||
# The unknown location will be then interpolated through Barycentric
|
||||
# interpolation method, which relies on the triangulation.
|
||||
# A rim will be automatically added to the patch, which will help
|
||||
# improve the compatibility with the spectral solver as well as
|
||||
# maintain meaningful microstructure(reduce artifacts).
|
||||
|
||||
|
||||
import os
|
||||
import numpy as np
|
||||
import argparse
|
||||
from scipy.spatial import Delaunay
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
OFFSET = 0.1 #resize the seeded volume to give space for rim/pan
|
||||
PHANTOM_ID = -1 #grain ID for phantom seeds
|
||||
|
||||
def d_print(info, data, separator=False):
|
||||
"""quickly print debug information"""
|
||||
if(separator): print "*"*80
|
||||
print info
|
||||
print data
|
||||
|
||||
|
||||
def meshgrid2(*arrs):
|
||||
"""code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d"""
|
||||
arrs = tuple(reversed(arrs))
|
||||
arrs = tuple(arrs)
|
||||
lens = np.array(map(len, arrs))
|
||||
dim = len(arrs)
|
||||
ans = []
|
||||
for i, arr in enumerate(arrs):
|
||||
slc = np.ones(dim,'i')
|
||||
slc[i] = lens[i]
|
||||
arr2 = np.asarray(arr).reshape(slc)
|
||||
for j, sz in enumerate(lens):
|
||||
if j != i:
|
||||
arr2 = arr2.repeat(sz, axis=j)
|
||||
|
||||
ans.insert(0,arr2)
|
||||
return tuple(ans)
|
||||
|
||||
|
||||
#prepare command line interface
|
||||
parser = argparse.ArgumentParser(prog="geoFromBarycentic",
|
||||
description='''Generate geom file through \
|
||||
Barycentric interpolating seeds file.''',
|
||||
epilog="requires numpy, and scipy.")
|
||||
parser.add_argument("seeds",
|
||||
help="seeds file in DAMASK format:\
|
||||
http://damask.mpie.de/Documentation/AsciiTableFormat",
|
||||
default="test.seeds")
|
||||
parser.add_argument("-v", "--version",
|
||||
action="version",
|
||||
version="%(prog)s 0.1")
|
||||
parser.add_argument("-g", "--grid",
|
||||
nargs=3,
|
||||
help="grid size(mesh resolution, recommend using 2^x)",
|
||||
default=[32,32,32],
|
||||
type=int)
|
||||
parser.add_argument("-s", "--size",
|
||||
help="physical size of the target volume.",
|
||||
nargs=3,
|
||||
default=[1.0,1.0,1.0],
|
||||
type=float)
|
||||
parser.add_argument("-o", "--origin",
|
||||
help="lower left corner of the patch.",
|
||||
nargs=3,
|
||||
default=[0.0,0.0,0.0],
|
||||
type=float)
|
||||
parser.add_argument('-m', '--homogenization',
|
||||
help='homogenization index to be used',
|
||||
default=1,
|
||||
type=int)
|
||||
parser.add_argument('-c', '--crystallite',
|
||||
help='crystallite index to be used',
|
||||
default=1,
|
||||
type=int)
|
||||
parser.add_argument('-p', '--phase',
|
||||
help='phase index to be used',
|
||||
default=1,
|
||||
type=int)
|
||||
parser.add_argument('-F', '--Favg',
|
||||
help='reshape the periodicity, not useful for RIM method',
|
||||
nargs=9,
|
||||
default=[1.0,0.0,0.0,
|
||||
0.0,1.0,0.0,
|
||||
0.0,0.0,1.0],
|
||||
type=float)
|
||||
parser.add_argument("-G", "--geomFile",
|
||||
help='the name of the output geom file',
|
||||
default='seeds.geom',
|
||||
type=str)
|
||||
parser.add_argument("-C", "--configFile",
|
||||
help='output dummy material.config file',
|
||||
action='store_true',
|
||||
default=False)
|
||||
parser.add_argument("-d", "--debug",
|
||||
help="start debugging script",
|
||||
action='store_true',
|
||||
default=False)
|
||||
parser.add_argument("-S", "--seedsFile",
|
||||
help="write out resized seeds file",
|
||||
action='store_true',
|
||||
default=False)
|
||||
parser.add_argument("-r", '--addRim',
|
||||
help="add rim and provide control of face lifting point",
|
||||
action='store_true',
|
||||
default=False)
|
||||
args = parser.parse_args() # get all the arguments right after
|
||||
|
||||
#quick help to user
|
||||
print "*"*80
|
||||
parser.print_help()
|
||||
print """Sample usage:
|
||||
./geoFromBarycentic.py 20grains.seeds -g 128 128 128 -S -r; geom_check seeds.geom; seeds_check new_seed.seeds.
|
||||
"""
|
||||
print "*"*80
|
||||
if (args.debug):
|
||||
d_print("args are:", parser.parse_args(),separator=True)
|
||||
|
||||
#/\/\/\/\/#
|
||||
# m a i n #
|
||||
#\/\/\/\/\#
|
||||
print "only work for 3D case now, 2D support coming soon..."
|
||||
print "reading seeds file: {}".format(args.seeds)
|
||||
|
||||
with open(args.seeds, 'r') as f:
|
||||
rawtext = f.readlines()
|
||||
n_header = int(rawtext.pop(0).split()[0])
|
||||
#record all the seeds position
|
||||
if (args.addRim):
|
||||
grid_shift = np.array(args.size) * np.array([OFFSET,OFFSET,OFFSET*2])
|
||||
s_coords = np.array([[np.array(float(item))*(1 - OFFSET*2)
|
||||
for item in line.split()[:3]] + grid_shift
|
||||
for line in rawtext[n_header:]])
|
||||
else:
|
||||
#no need for shifting with periodicity
|
||||
s_coords = np.array([[np.array(float(item))
|
||||
for item in line.split()[:3]]
|
||||
for line in rawtext[n_header:]])
|
||||
|
||||
#record ID of the seeds: int/EulerAngles
|
||||
if 'microstructure' in rawtext[n_header-1]:
|
||||
s_id = [int(line.split()[-1]) for line in rawtext[n_header:]]
|
||||
else:
|
||||
print "WARNING:"
|
||||
print "THIS SCRIPT DOES NOT UNDERSTAND HOW TO GROUP CRYSTALLITES."
|
||||
print "ALL CRYSTAL ORIENTATIONS ARE CONSIDERED TO BE UNIQUE."
|
||||
print "FOR MORE ACCURATE CONTROL OF SEEDS GROUPING, USE MICROSTRUCTURE ID."
|
||||
s_id = range(len(s_coords))
|
||||
#s_eulers here is just a quick book keeping
|
||||
s_eulers = np.array([[float(item) for item in line.split()[3:]]
|
||||
for line in rawtext[n_header:]])
|
||||
|
||||
if(args.debug):
|
||||
print d_print("resize point cloud to make space for rim/pan:",
|
||||
s_coords)
|
||||
|
||||
if(args.addRim):
|
||||
#add binding box to create rim/pan for the volume where the ID of the seeds is
|
||||
#unknown
|
||||
print "Shrining the seeds to {}x in each direction".format(1 - OFFSET*2)
|
||||
x,y,z = args.size[0],args.size[1],args.size[2]
|
||||
print "Use circumscribed sphere to place phantom seeds."
|
||||
r = np.sqrt(x**2+y**2+z**2)/2.0
|
||||
BINDBOX = [[0,0,0],[x,0,0],[0,y,0],[x,y,0],
|
||||
[0,0,z],[x,0,z],[0,y,z],[x,y,z],
|
||||
[x/2.0+r,y/2, z/2], [x/2.0-r, y/2, z/2],
|
||||
[x/2, y/2.0+r, z/2], [x/2, y/2.0-r, z/2],
|
||||
[x/2, y/2, z/2.0-r]] #8 corners + 5 face centers (no top)
|
||||
print "Adding phantom seeds for RIM generation:"
|
||||
for point in BINDBOX:
|
||||
print point
|
||||
s_coords = np.vstack([s_coords,point])
|
||||
s_id.append(PHANTOM_ID)
|
||||
else:
|
||||
#The idea here is that we read in each seed point, than duplicate in 3D (make a few copies),
|
||||
#move on to the next seed point, repeat the same procedure. As for the ID list, we can just use the
|
||||
#same one. The trick here is use the floor division to find the correct id since we pretty much duplicate
|
||||
#the same point several times.
|
||||
Favg = np.array(args.Favg).reshape((3,3))
|
||||
x,y,z = args.size[0],args.size[1],args.size[2]
|
||||
tmp = []
|
||||
for seed in s_coords:
|
||||
tmp += [np.dot(Favg, np.array(seed) + np.array([dx,dy,dz]))
|
||||
for dz in [-z, 0, z]
|
||||
for dy in [-y, 0, y]
|
||||
for dx in [-x, 0, x]]
|
||||
s_coords = tmp
|
||||
|
||||
if (args.seedsFile):
|
||||
with open("new_seed.seeds", "w") as f:
|
||||
outstr = "4\theader\n"
|
||||
outstr += "grid\ta {}\tb {}\tc {}\n".format(args.grid[0],
|
||||
args.grid[1],
|
||||
args.grid[2])
|
||||
outstr += "microstructures {}\n".format(len(set(s_id)))
|
||||
outstr += "x\ty\tz\tmicrostructure"
|
||||
if (args.addRim):
|
||||
for i in range(len(s_id)):
|
||||
outstr += "{}\t{}\t{}\t{}\n".format(s_coords[i][0],
|
||||
s_coords[i][1],
|
||||
s_coords[i][2],
|
||||
s_id[i])
|
||||
else:
|
||||
for i in range(len(s_coords)):
|
||||
outstr += "{}\t{}\t{}\t{}\n".format(s_coords[i][0],
|
||||
s_coords[i][1],
|
||||
s_coords[i][2],
|
||||
s_id[i//3**3])
|
||||
f.write(outstr)
|
||||
|
||||
#triangulate space with given point-clouds
|
||||
tri = Delaunay(s_coords)
|
||||
|
||||
if(args.debug):
|
||||
d_print("simplices:", tri.simplices, separator=True)
|
||||
d_print("vertices:", s_coords[tri.simplices])
|
||||
|
||||
#populate grid points (only 3D for now)
|
||||
'''
|
||||
#populating grid using meshgrid2
|
||||
x = (np.arange(args.grid[0])+0.5)*args.size[0]/args.grid[0]
|
||||
y = (np.arange(args.grid[1])+0.5)*args.size[1]/args.grid[1]
|
||||
z = (np.arange(args.grid[2])+0.5)*args.size[2]/args.grid[2]
|
||||
mesh_pts = np.transpose(np.vstack(map(np.ravel, meshgrid2(x, y, z))))
|
||||
print mesh_pts
|
||||
'''
|
||||
#this is actually faster than using meshgrid2
|
||||
mesh_pts = [[(i+0.5)*args.size[0]/args.grid[0],
|
||||
(j+0.5)*args.size[1]/args.grid[1],
|
||||
(k+0.5)*args.size[2]/args.grid[2]]
|
||||
for k in range(args.grid[2])
|
||||
for j in range(args.grid[1])
|
||||
for i in range(args.grid[0])]
|
||||
|
||||
mesh_ids = [PHANTOM_ID*2]*len(mesh_pts) #initialize grid
|
||||
|
||||
#search ID for each grid point
|
||||
s_id = np.array(s_id) #allow multi-indexing
|
||||
mesh_idx = tri.find_simplex(mesh_pts)
|
||||
|
||||
for i, pt in enumerate(mesh_pts):
|
||||
if mesh_idx[i] < 0:
|
||||
continue #didn't find any envelop tetrahedron --> something wrong!
|
||||
#calculate Barycentric coordinates
|
||||
bary_c = tri.transform[mesh_idx[i],:3,:3].dot(pt-tri.transform[mesh_idx[i],3,:])
|
||||
bary_c = np.append(bary_c, 1 - bary_c.sum())
|
||||
|
||||
if (args.addRim):
|
||||
tmp_ids = s_id[tri.simplices[mesh_idx[i]]] #rim method
|
||||
else:
|
||||
tmp_ids = np.array(s_id[tri.simplices[mesh_idx[i]]//(3**3)]) #kill periodicity through floor division
|
||||
#print tmp_ids
|
||||
#print tri.simplices[mesh_idx[i]]//(3**3)
|
||||
|
||||
max_weight = -1960
|
||||
for this_id in tmp_ids:
|
||||
msk = [item==this_id for item in tmp_ids] #find vertex with the same id
|
||||
tmp_weight = sum([bary_c[j] for j in range(len(bary_c)) if msk[j]])
|
||||
if tmp_weight > max_weight:
|
||||
max_weight = tmp_weight
|
||||
mesh_ids[i] = this_id
|
||||
if (args.debug):
|
||||
d_print("bary_c:",bary_c,separator=True)
|
||||
d_print("vertex ID:", tmp_ids)
|
||||
d_print("final ID:", mesh_ids[i])
|
||||
|
||||
mesh_ids = np.reshape(mesh_ids, (-1, args.grid[0]))
|
||||
|
||||
#write to file
|
||||
with open(args.geomFile, "w") as f:
|
||||
outstr = "5\theader\n"
|
||||
outstr += "grid\ta {}\tb {}\tc {}\n".format(args.grid[0],
|
||||
args.grid[1],
|
||||
args.grid[2])
|
||||
outstr += "size\tx {}\ty {}\tz {}\n".format(args.size[0],
|
||||
args.size[1],
|
||||
args.size[2])
|
||||
outstr += "origin\tx {}\ty {}\tz {}\n".format(args.origin[0],
|
||||
args.origin[1],
|
||||
args.origin[2])
|
||||
outstr += "homogenization\t{}\nmicrostructure\t{}\n".format(args.homogenization,
|
||||
len(set(s_id)))
|
||||
for row in mesh_ids:
|
||||
row = [str(item) for item in list(row)]
|
||||
outstr += "\t".join(row) + "\n"
|
||||
f.write(outstr)
|
|
@ -1,110 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
from PIL import Image
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
|
||||
Generate geometry description from (multilayer) images.
|
||||
Microstructure index is based on gray scale value (1..256).
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('--homogenization',
|
||||
dest = 'homogenization',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'homogenization index [%default]')
|
||||
|
||||
parser.set_defaults(homogenization = 1,
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[0]+'.geom' if name else name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
|
||||
|
||||
# --- read image ------------------------------------------------------------------------------------
|
||||
|
||||
img = Image.open(name).convert(mode = 'L') # open and convert to grayscale 8bit
|
||||
|
||||
slice = 0
|
||||
while True:
|
||||
try:
|
||||
img.seek(slice) # advance to slice
|
||||
layer = np.expand_dims(1+np.array(img,dtype = 'uint16'),axis = 0) # read image layer
|
||||
microstructure = layer if slice == 0 else np.vstack((microstructure,layer)) # noqa
|
||||
slice += 1 # advance to next slice
|
||||
except EOFError:
|
||||
break
|
||||
|
||||
# http://docs.scipy.org/doc/scipy/reference/ndimage.html
|
||||
# http://scipy-lectures.github.io/advanced/image_processing/
|
||||
|
||||
info = {
|
||||
'grid': np.array(microstructure.shape,'i')[::-1],
|
||||
'size': np.array(microstructure.shape,'d')[::-1],
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': len(np.unique(microstructure)),
|
||||
'homogenization': options.homogenization,
|
||||
}
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
|
||||
table.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i'%info['microstructures'],
|
||||
])
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
table.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.info_clear()
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
])
|
||||
table.labels_clear()
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
||||
table.data = microstructure.reshape((info['grid'][1]*info['grid'][2],info['grid'][0]),order='C')
|
||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
|
@ -112,7 +112,8 @@ Generate geometry description and material configuration by standard Voronoi tes
|
|||
|
||||
group = OptionGroup(parser, "Tessellation","")
|
||||
|
||||
group.add_option('-l', '--laguerre',
|
||||
group.add_option('-l',
|
||||
'--laguerre',
|
||||
dest = 'laguerre',
|
||||
action = 'store_true',
|
||||
help = 'use Laguerre (weighted Voronoi) tessellation')
|
||||
|
@ -123,21 +124,24 @@ group.add_option('--cpus',
|
|||
group.add_option('--nonperiodic',
|
||||
dest = 'nonperiodic',
|
||||
action = 'store_true',
|
||||
help = 'use nonperiodic tessellation')
|
||||
help = 'nonperiodic tessellation')
|
||||
|
||||
parser.add_option_group(group)
|
||||
|
||||
group = OptionGroup(parser, "Geometry","")
|
||||
|
||||
group.add_option('-g', '--grid',
|
||||
group.add_option('-g',
|
||||
'--grid',
|
||||
dest = 'grid',
|
||||
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
|
||||
help = 'a,b,c grid of hexahedral box [auto]')
|
||||
group.add_option('-s', '--size',
|
||||
group.add_option('-s',
|
||||
'--size',
|
||||
dest = 'size',
|
||||
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
|
||||
help = 'x,y,z size of hexahedral box [auto]')
|
||||
group.add_option('-o', '--origin',
|
||||
group.add_option('-o',
|
||||
'--origin',
|
||||
dest = 'origin',
|
||||
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
|
||||
help = 'origin of grid')
|
||||
|
@ -146,22 +150,26 @@ parser.add_option_group(group)
|
|||
|
||||
group = OptionGroup(parser, "Seeds","")
|
||||
|
||||
group.add_option('-p', '--position',
|
||||
dest = 'position',
|
||||
group.add_option('-p',
|
||||
'--pos', '--seedposition',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for seed positions [%default]')
|
||||
group.add_option('-w', '--weight',
|
||||
help = 'label of coordinates [%default]')
|
||||
group.add_option('-w',
|
||||
'--weight',
|
||||
dest = 'weight',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for seed weights [%default]')
|
||||
group.add_option('-m', '--microstructure',
|
||||
help = 'label of weights [%default]')
|
||||
group.add_option('-m',
|
||||
'--microstructure',
|
||||
dest = 'microstructure',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for seed microstructures [%default]')
|
||||
group.add_option('-e', '--eulers',
|
||||
help = 'label of microstructures [%default]')
|
||||
group.add_option('-e',
|
||||
'--eulers',
|
||||
dest = 'eulers',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for seed Euler angles [%default]')
|
||||
help = 'label of Euler angles [%default]')
|
||||
group.add_option('--axes',
|
||||
dest = 'axes',
|
||||
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
|
||||
|
@ -186,10 +194,10 @@ group.add_option('--phase',
|
|||
|
||||
parser.add_option_group(group)
|
||||
|
||||
parser.set_defaults(position = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
weight = 'weight',
|
||||
microstructure = 'microstructure',
|
||||
eulers = 'eulerangles',
|
||||
eulers = 'euler',
|
||||
homogenization = 1,
|
||||
crystallite = 1,
|
||||
phase = 1,
|
||||
|
@ -205,7 +213,7 @@ if filenames == []: filenames = [None]
|
|||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
|
||||
outname = os.path.splitext(name)[0]+'.geom' if name else name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
@ -238,11 +246,11 @@ for name in filenames:
|
|||
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
|
||||
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
|
||||
|
||||
if table.label_dimension(options.position) != 3:
|
||||
errors.append('position columns "{}" have dimension {}.'.format(options.position,
|
||||
table.label_dimension(options.position)))
|
||||
if table.label_dimension(options.pos) != 3:
|
||||
errors.append('seed positions "{}" have dimension {}.'.format(options.pos,
|
||||
table.label_dimension(options.pos)))
|
||||
else:
|
||||
labels += [options.position]
|
||||
labels += [options.pos]
|
||||
|
||||
if not hasEulers: remarks.append('missing seed orientations...')
|
||||
else: labels += [options.eulers]
|
||||
|
@ -260,14 +268,13 @@ for name in filenames:
|
|||
# ------------------------------------------ read seeds ---------------------------------------
|
||||
|
||||
table.data_readArray(labels)
|
||||
coords = table.data[:,table.label_index(options.position):table.label_index(options.position)+3]\
|
||||
* info['size']
|
||||
eulers = table.data[:,table.label_index(options.eulers ):table.label_index(options.eulers )+3]\
|
||||
if hasEulers else np.zeros(3*len(coords))
|
||||
grains = table.data[:,table.label_index(options.microstructure)].astype('i')\
|
||||
if hasGrains else 1+np.arange(len(coords))
|
||||
weights = table.data[:,table.label_index(options.weight)]\
|
||||
if hasWeights else np.zeros(len(coords))
|
||||
coords = table.data[:,table.label_indexrange(options.pos)] * info['size']
|
||||
eulers = table.data[:,table.label_indexrange(options.eulers)] if hasEulers \
|
||||
else np.zeros(3*len(coords))
|
||||
grains = table.data[:,table.label_indexrange(options.microstructure)].astype('i') if hasGrains \
|
||||
else 1+np.arange(len(coords))
|
||||
weights = table.data[:,table.label_indexrange(options.weight)] if hasWeights \
|
||||
else np.zeros(len(coords))
|
||||
grainIDs = np.unique(grains).astype('i')
|
||||
NgrainIDs = len(grainIDs)
|
||||
|
||||
|
@ -289,12 +296,9 @@ for name in filenames:
|
|||
|
||||
if info['homogenization'] == 0: info['homogenization'] = options.homogenization
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i%s'%(info['microstructures'],
|
||||
(' out of %i'%NgrainIDs if NgrainIDs != info['microstructures'] else '')),
|
||||
damask.util.report_geom(info,['grid','size','origin','homogenization',])
|
||||
damask.util.croak(['microstructures: {}{}'.format(info['microstructures'],
|
||||
(' out of {}'.format(NgrainIDs) if NgrainIDs != info['microstructures'] else '')),
|
||||
])
|
||||
|
||||
config_header = []
|
||||
|
@ -302,28 +306,28 @@ for name in filenames:
|
|||
|
||||
config_header += ['<microstructure>']
|
||||
for i,ID in enumerate(grainIDs):
|
||||
config_header += ['[Grain%s]'%(str(ID).zfill(formatwidth)),
|
||||
'crystallite %i'%options.crystallite,
|
||||
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(options.phase,str(ID).rjust(formatwidth)),
|
||||
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)),
|
||||
'crystallite {}'.format(options.crystallite),
|
||||
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(options.phase,str(ID).rjust(formatwidth)),
|
||||
]
|
||||
if hasEulers:
|
||||
config_header += ['<texture>']
|
||||
for ID in grainIDs:
|
||||
eulerID = np.nonzero(grains == ID)[0][0] # find first occurrence of this grain id
|
||||
config_header += ['[Grain%s]'%(str(ID).zfill(formatwidth)),
|
||||
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(eulers[eulerID])
|
||||
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)),
|
||||
'(gauss)\tphi1 {:g}\tPhi {:g}\tphi2 {:g}\tscatter 0.0\tfraction 1.0'.format(*eulers[eulerID])
|
||||
]
|
||||
if options.axes is not None: config_header.append('axes\t%s %s %s'%tuple(options.axes))
|
||||
if options.axes is not None: config_header.append('axes\t{} {} {}'.format(*options.axes))
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
"microstructures\t{}".format(info['microstructures']),
|
||||
config_header,
|
||||
])
|
||||
table.head_write()
|
||||
|
|
|
@ -19,16 +19,18 @@ Translate geom description into ASCIItable containing 1/2/3_pos and microstructu
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-p','--position',
|
||||
dest = 'position',
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for position [%default]')
|
||||
parser.add_option('-m','--microstructure',
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-m',
|
||||
'--microstructure',
|
||||
dest = 'microstructure',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for microstructure index [%default]')
|
||||
help = 'label of microstructure index [%default]')
|
||||
|
||||
parser.set_defaults(position = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
microstructure = 'microstructure',
|
||||
)
|
||||
|
||||
|
@ -39,10 +41,11 @@ parser.set_defaults(position = 'pos',
|
|||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[0]+'.txt' if name else name,
|
||||
buffered = False, labeled = False)
|
||||
buffered = False,
|
||||
labeled = False,
|
||||
)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
|
@ -75,7 +78,7 @@ for name in filenames:
|
|||
table.info_clear()
|
||||
table.info_append(extra_header + [scriptID + '\t' + ' '.join(sys.argv[1:])])
|
||||
table.labels_clear()
|
||||
table.labels_append(['{}_{}'.format(1+i,options.position) for i in xrange(3)]+[options.microstructure])
|
||||
table.labels_append(['{}_{}'.format(1+i,options.pos) for i in xrange(3)]+[options.microstructure])
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
|
||||
|
|
|
@ -1,125 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,vtk
|
||||
import numpy as np
|
||||
import damask
|
||||
from optparse import OptionParser
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [seedsfile[s]]', description = """
|
||||
Produce VTK point mesh from seeds file
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-s', '--size',
|
||||
dest = 'size',
|
||||
type = 'float', nargs = 3, metavar = 'float float float',
|
||||
help = 'x,y,z size of hexahedral box [1.0 along largest grid point number]')
|
||||
parser.add_option('-p','--position',
|
||||
dest = 'position',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for coordinates [%default]')
|
||||
|
||||
parser.set_defaults(size = [0.0,0.0,0.0],
|
||||
position = 'pos',
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, readonly = True)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i'%info['microstructures'],
|
||||
])
|
||||
|
||||
remarks = []
|
||||
errors = []
|
||||
|
||||
if np.any(info['grid'] < 1): remarks.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0) \
|
||||
and np.all(info['grid'] < 1): errors.append('invalid size x y z.')
|
||||
else:
|
||||
for i in xrange(3):
|
||||
if info['size'][i] <= 0.0: # any invalid size?
|
||||
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
|
||||
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
|
||||
if table.label_dimension(options.position) != 3:
|
||||
errors.append('columns "{}" have dimension {}'.format(options.position,table.label_dimension(options.position)))
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss=True)
|
||||
continue
|
||||
|
||||
labels = ['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in xrange(3)]
|
||||
hasGrains = table.label_index('microstructure') != -1
|
||||
labels += ['microstructure'] if hasGrains else []
|
||||
|
||||
table.data_readArray(labels) # read ASCIItable columns
|
||||
|
||||
coords = table.data[:,:3]*info['size'] # assign coordinates (rescaled to box size)
|
||||
grain = table.data[:,3].astype('i') if hasGrains else 1+np.arange(len(coords),dtype='i') # assign grains
|
||||
|
||||
# --- generate grid --------------------------------------------------------------------------------
|
||||
|
||||
grid = vtk.vtkUnstructuredGrid()
|
||||
pts = vtk.vtkPoints()
|
||||
|
||||
# --- process microstructure information -----------------------------------------------------------
|
||||
|
||||
IDs = vtk.vtkIntArray()
|
||||
IDs.SetNumberOfComponents(1)
|
||||
IDs.SetName("GrainID")
|
||||
|
||||
for i,item in enumerate(coords):
|
||||
IDs.InsertNextValue(grain[i])
|
||||
pid = pts.InsertNextPoint(item[0:3])
|
||||
pointIds = vtk.vtkIdList()
|
||||
pointIds.InsertId(0, pid)
|
||||
grid.InsertNextCell(1, pointIds)
|
||||
|
||||
grid.SetPoints(pts)
|
||||
grid.GetCellData().AddArray(IDs)
|
||||
|
||||
#--- write data -----------------------------------------------------------------------------------
|
||||
if name:
|
||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||
(directory,filename) = os.path.split(name)
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]+'.'+writer.GetDefaultFileExtension()))
|
||||
else:
|
||||
writer = vtk.vtkDataSetWriter()
|
||||
writer.WriteToOutputStringOn()
|
||||
writer.SetHeader('# powered by '+scriptID)
|
||||
|
||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
|
||||
else: writer.SetInputData(grid)
|
||||
writer.Write()
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||
|
||||
table.close()
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
|
||||
for seeds in "$@"
|
||||
do
|
||||
vtk_pointcloud $seeds
|
||||
|
||||
vtk_addPointCloudData $seeds \
|
||||
--scalar microstructure,weight \
|
||||
--inplace \
|
||||
--vtk ${seeds%.*}.vtp \
|
||||
|
||||
done
|
|
@ -14,26 +14,30 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Create seed file taking microstructure indices from given geom file but excluding black-listed grains.
|
||||
Create seed file taking microstructure indices from given geom file.
|
||||
Indices can be black-listed or white-listed.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-w','--white',
|
||||
parser.add_option('-w',
|
||||
'--white',
|
||||
action = 'extend', metavar = '<int LIST>',
|
||||
dest = 'whitelist',
|
||||
help = 'whitelist of grain IDs')
|
||||
parser.add_option('-b','--black',
|
||||
parser.add_option('-b',
|
||||
'--black',
|
||||
action = 'extend', metavar = '<int LIST>',
|
||||
dest = 'blacklist',
|
||||
help = 'blacklist of grain IDs')
|
||||
parser.add_option('-p','--position',
|
||||
parser.add_option('-p',
|
||||
'--pos', '--seedposition',
|
||||
dest = 'position',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for coordinates [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
|
||||
parser.set_defaults(whitelist = [],
|
||||
blacklist = [],
|
||||
position = 'pos',
|
||||
pos = 'pos',
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
@ -46,25 +50,18 @@ options.blacklist = map(int,options.blacklist)
|
|||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
|
||||
buffered = False, labeled = False)
|
||||
except:
|
||||
continue
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i'%info['microstructures'],
|
||||
])
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
|
@ -98,14 +95,14 @@ for name in filenames:
|
|||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
"microstructures\t{}".format(info['microstructures']),
|
||||
])
|
||||
table.labels_clear()
|
||||
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in range(3)]+['microstructure'])
|
||||
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.pos) for i in range(3)]+['microstructure'])
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
|
||||
|
|
|
@ -34,31 +34,36 @@ Reports positions with random crystal orientations in seeds file format to STDOU
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-N', dest='N',
|
||||
parser.add_option('-N',
|
||||
dest = 'N',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'number of seed points to distribute [%default]')
|
||||
parser.add_option('-g','--grid',
|
||||
help = 'number of seed points [%default]')
|
||||
parser.add_option('-g',
|
||||
'--grid',
|
||||
dest = 'grid',
|
||||
type = 'int', nargs = 3, metavar = 'int int int',
|
||||
help='min a,b,c grid of hexahedral box %default')
|
||||
parser.add_option('-m', '--microstructure',
|
||||
parser.add_option('-m',
|
||||
'--microstructure',
|
||||
dest = 'microstructure',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'first microstructure index [%default]')
|
||||
parser.add_option('-r', '--rnd',
|
||||
parser.add_option('-r',
|
||||
'--rnd',
|
||||
dest = 'randomSeed', type = 'int', metavar = 'int',
|
||||
help = 'seed of random number generator [%default]')
|
||||
parser.add_option('--format',
|
||||
dest = 'format', type = 'string', metavar = 'string',
|
||||
help = 'number format of output [auto]')
|
||||
help = 'output number format [auto]')
|
||||
|
||||
group = OptionGroup(parser, "Laguerre Tessellation",
|
||||
"Parameters determining shape of weight distribution of seed points"
|
||||
)
|
||||
group.add_option('-w', '--weights',
|
||||
group.add_option( '-w',
|
||||
'--weights',
|
||||
action = 'store_true',
|
||||
dest = 'weights',
|
||||
help = 'assign random weigts to seed points for Laguerre tessellation [%default]')
|
||||
help = 'assign random weights to seed points for Laguerre tessellation [%default]')
|
||||
group.add_option( '--max',
|
||||
dest = 'max',
|
||||
type = 'float', metavar = 'float',
|
||||
|
@ -76,18 +81,20 @@ parser.add_option_group(group)
|
|||
group = OptionGroup(parser, "Selective Seeding",
|
||||
"More uniform distribution of seed points using Mitchell's Best Candidate Algorithm"
|
||||
)
|
||||
group.add_option('-s','--selective',
|
||||
group.add_option( '-s',
|
||||
'--selective',
|
||||
action = 'store_true',
|
||||
dest = 'selective',
|
||||
help = 'selective picking of seed points from random seed points [%default]')
|
||||
group.add_option('-f','--force',
|
||||
group.add_option( '-f',
|
||||
'--force',
|
||||
action = 'store_true',
|
||||
dest = 'force',
|
||||
help = 'try selective picking despite large seed point number [%default]')
|
||||
group.add_option( '--distance',
|
||||
dest = 'distance',
|
||||
type = 'float', metavar = 'float',
|
||||
help = 'minimum distance to the next neighbor [%default]')
|
||||
help = 'minimum distance to next neighbor [%default]')
|
||||
group.add_option( '--numCandidates',
|
||||
dest = 'numCandidates',
|
||||
type = 'int', metavar = 'int',
|
||||
|
@ -124,11 +131,9 @@ random.seed(options.randomSeed)
|
|||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(outname = name,
|
||||
try: table = damask.ASCIItable(outname = name,
|
||||
buffered = False)
|
||||
except:
|
||||
continue
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- sanity checks -------------------------------------------------------------------------
|
||||
|
@ -136,7 +141,7 @@ for name in filenames:
|
|||
remarks = []
|
||||
errors = []
|
||||
if gridSize == 0:
|
||||
errors.append('zero grid dimension for %s.'%(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
|
||||
errors.append('zero grid dimension for {}.'.format(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
|
||||
if options.N > gridSize/10.: errors.append('seed count exceeds 0.1 of grid points.')
|
||||
if options.selective and 4./3.*math.pi*(options.distance/2.)**3*options.N > 0.5:
|
||||
(remarks if options.force else errors).append('maximum recommended seed point count for given distance is {}.{}'.
|
||||
|
@ -186,10 +191,8 @@ for name in filenames:
|
|||
seeds = seeds.T # prepare shape for stacking
|
||||
|
||||
if options.weights:
|
||||
if options.max > 0.0:
|
||||
weights = [np.random.uniform(low = 0, high = options.max, size = options.N)]
|
||||
else:
|
||||
weights = [np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)]
|
||||
weights = [np.random.uniform(low = 0, high = options.max, size = options.N)] if options.max > 0.0 \
|
||||
else [np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)]
|
||||
else:
|
||||
weights = []
|
||||
seeds = np.transpose(np.vstack(tuple([seeds,
|
||||
|
@ -204,13 +207,13 @@ for name in filenames:
|
|||
table.info_clear()
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=options.grid),
|
||||
"grid\ta {}\tb {}\tc {}".format(*options.grid),
|
||||
"microstructures\t{}".format(options.N),
|
||||
"randomSeed\t{}".format(options.randomSeed),
|
||||
])
|
||||
table.labels_clear()
|
||||
table.labels_append( ['{dim}_{label}'.format(dim = 1+k,label = 'pos') for k in xrange(3)] +
|
||||
['{dim}_{label}'.format(dim = 1+k,label = 'eulerangles') for k in xrange(3)] +
|
||||
['{dim}_{label}'.format(dim = 1+k,label = 'euler') for k in xrange(3)] +
|
||||
['microstructure'] +
|
||||
(['weight'] if options.weights else []))
|
||||
table.head_write()
|
||||
|
|
|
@ -23,29 +23,38 @@ Examples:
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-p', '--positions',
|
||||
parser.add_option('-p',
|
||||
'--pos', '--seedposition',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'coordinate label [%default]')
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('--boundingbox',
|
||||
dest = 'box',
|
||||
type = 'float', nargs = 6, metavar = ' '.join(['float']*6),
|
||||
help = 'min (x,y,z) and max (x,y,z) coordinates of bounding box [tight]')
|
||||
parser.add_option('-i', '--index',
|
||||
dest = 'index',
|
||||
parser.add_option('-m',
|
||||
'--microstructure',
|
||||
dest = 'microstructure',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'microstructure index label [%default]')
|
||||
parser.add_option('-w','--white',
|
||||
help = 'label of microstructures [%default]')
|
||||
parser.add_option('--weight',
|
||||
dest = 'weight',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'label of weights [%default]')
|
||||
parser.add_option('-w',
|
||||
'--white',
|
||||
dest = 'whitelist',
|
||||
action = 'extend', metavar = '<int LIST>',
|
||||
help = 'whitelist of microstructure indices')
|
||||
parser.add_option('-b','--black',
|
||||
parser.add_option('-b',
|
||||
'--black',
|
||||
dest = 'blacklist',
|
||||
action = 'extend', metavar = '<int LIST>',
|
||||
help = 'blacklist of microstructure indices')
|
||||
|
||||
parser.set_defaults(pos = 'pos',
|
||||
index ='microstructure',
|
||||
microstructure = 'microstructure',
|
||||
weight = None,
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
@ -58,8 +67,7 @@ if options.blacklist is not None: options.blacklist = map(int,options.blacklist)
|
|||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
|
@ -69,14 +77,17 @@ for name in filenames:
|
|||
|
||||
# ------------------------------------------ sanity checks ---------------------------------------
|
||||
|
||||
missing_labels = table.data_readArray([options.pos,options.index])
|
||||
missing_labels = table.data_readArray([options.pos,options.microstructure] +
|
||||
([options.weight] if options.weight else []))
|
||||
|
||||
errors = []
|
||||
if len(missing_labels) > 0:
|
||||
errors.append('column{} {} not found'.format('s' if len(missing_labels) > 1 else '',
|
||||
', '.join(missing_labels)))
|
||||
for label, dim in {options.pos: 3,
|
||||
options.index: 1}.iteritems():
|
||||
input = {options.pos: 3,
|
||||
options.microstructure: 1,}
|
||||
if options.weight: input.update({options.weight: 1})
|
||||
for label, dim in input.iteritems():
|
||||
if table.label_dimension(label) != dim:
|
||||
errors.append('column {} has wrong dimension'.format(label))
|
||||
|
||||
|
@ -113,11 +124,12 @@ for name in filenames:
|
|||
|
||||
table.info = [
|
||||
scriptID,
|
||||
'size %s'%(' '.join(list(itertools.chain.from_iterable(zip(['x','y','z'],
|
||||
'size {}'.format(' '.join(list(itertools.chain.from_iterable(zip(['x','y','z'],
|
||||
map(str,boundingBox[1,:]-boundingBox[0,:])))))),
|
||||
]
|
||||
table.labels_clear()
|
||||
table.labels_append(['1_pos','2_pos','3_pos','microstructure']) # implicitly switching label processing/writing on
|
||||
table.labels_append(['1_pos','2_pos','3_pos','microstructure'] +
|
||||
['weight'] if options.weight else []) # implicitly switching label processing/writing on
|
||||
table.head_write()
|
||||
|
||||
# ------------------------------------------ output result ---------------------------------------
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Adds header to OIM grain file to make it accesible as ASCII table
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-l', '--labels',
|
||||
dest = 'labels',
|
||||
help = 'lables of requested columns')
|
||||
|
||||
parser.set_defaults(labels = ['1_euler','2_euler','3_euler',
|
||||
'1_pos','2_pos', 'IQ', 'CI', 'Fit', 'GrainID',],
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
table.head_read()
|
||||
data = []
|
||||
while table.data_read():
|
||||
data.append(table.data[0:len(options.labels)])
|
||||
|
||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||
table.labels_append(options.labels)
|
||||
table.head_write()
|
||||
for i in data:
|
||||
table.data = i
|
||||
table.data_write()
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
543
src/IO.f90
543
src/IO.f90
|
@ -6,10 +6,6 @@
|
|||
!> @brief input/output functions, partly depending on chosen solver
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
module IO
|
||||
#ifdef HDF
|
||||
use hdf5, only: &
|
||||
HID_T
|
||||
#endif
|
||||
use prec, only: &
|
||||
pInt, &
|
||||
pReal
|
||||
|
@ -18,22 +14,8 @@ module IO
|
|||
private
|
||||
character(len=5), parameter, public :: &
|
||||
IO_EOF = '#EOF#' !< end of file string
|
||||
#ifdef HDF
|
||||
integer(HID_T), public, protected :: tempCoordinates, tempResults
|
||||
integer(HID_T), private :: resultsFile, tempFile
|
||||
integer(pInt), private :: currentInc
|
||||
#endif
|
||||
|
||||
public :: &
|
||||
#ifdef HDF
|
||||
HDF5_mappingConstitutive, &
|
||||
HDF5_mappingHomogenization, &
|
||||
HDF5_mappingCells, &
|
||||
HDF5_addGroup ,&
|
||||
HDF5_forwardResults, &
|
||||
HDF5_addScalarDataset, &
|
||||
IO_formatIntToString ,&
|
||||
#endif
|
||||
IO_init, &
|
||||
IO_read, &
|
||||
IO_checkAndRewind, &
|
||||
|
@ -117,9 +99,6 @@ subroutine IO_init
|
|||
#include "compilation_info.f90"
|
||||
endif mainProcess
|
||||
|
||||
#ifdef HDF
|
||||
call HDF5_createJobFile
|
||||
#endif
|
||||
|
||||
end subroutine IO_init
|
||||
|
||||
|
@ -1944,526 +1923,4 @@ recursive function abaqus_assembleInputFile(unit1,unit2) result(createSuccess)
|
|||
end function abaqus_assembleInputFile
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef HDF
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief creates and initializes HDF5 output files
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_createJobFile
|
||||
use hdf5
|
||||
use DAMASK_interface, only: &
|
||||
getSolverWorkingDirectoryName, &
|
||||
getSolverJobName
|
||||
|
||||
implicit none
|
||||
integer :: hdferr
|
||||
integer(SIZE_T) :: typeSize
|
||||
character(len=1024) :: path
|
||||
integer(HID_T) :: prp_id
|
||||
integer(SIZE_T), parameter :: increment = 104857600 ! increase temp file in memory in 100MB steps
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! initialize HDF5 library and check if integer and float type size match
|
||||
call h5open_f(hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5open_f')
|
||||
call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5tget_size_f (int)')
|
||||
if (int(pInt,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pInt does not match H5T_NATIVE_INTEGER')
|
||||
call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5tget_size_f (double)')
|
||||
if (int(pReal,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pReal does not match H5T_NATIVE_DOUBLE')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! open file
|
||||
path = trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.'//'DAMASKout'
|
||||
call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr)
|
||||
if (hdferr < 0) call IO_error(100_pInt,ext_msg=path)
|
||||
call HDF5_addStringAttribute(resultsFile,'createdBy','$Id$')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! open temp file
|
||||
path = trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.'//'DAMASKoutTemp'
|
||||
call h5pcreate_f(H5P_FILE_ACCESS_F, prp_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5pcreate_f')
|
||||
call h5pset_fapl_core_f(prp_id, increment, .false., hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5pset_fapl_core_f')
|
||||
call h5fcreate_f(path,H5F_ACC_TRUNC_F,tempFile,hdferr)
|
||||
if (hdferr < 0) call IO_error(100_pInt,ext_msg=path)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create mapping groups in out file
|
||||
call HDF5_closeGroup(HDF5_addGroup("mapping"))
|
||||
call HDF5_closeGroup(HDF5_addGroup("results"))
|
||||
call HDF5_closeGroup(HDF5_addGroup("coordinates"))
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create results group in temp file
|
||||
tempResults = HDF5_addGroup("results",tempFile)
|
||||
tempCoordinates = HDF5_addGroup("coordinates",tempFile)
|
||||
|
||||
end subroutine HDF5_createJobFile
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief creates and initializes HDF5 output file
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_closeJobFile()
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer :: hdferr
|
||||
call h5fclose_f(resultsFile,hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeJobFile: h5fclose_f')
|
||||
|
||||
end subroutine HDF5_closeJobFile
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief adds a new group to the results file, or if loc is present at the given location
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
integer(HID_T) function HDF5_addGroup(path,loc)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
character(len=*), intent(in) :: path
|
||||
integer(HID_T), intent(in),optional :: loc
|
||||
integer :: hdferr
|
||||
|
||||
if (present(loc)) then
|
||||
call h5gcreate_f(loc, trim(path), HDF5_addGroup, hdferr)
|
||||
else
|
||||
call h5gcreate_f(resultsFile, trim(path), HDF5_addGroup, hdferr)
|
||||
endif
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(path)//' )')
|
||||
|
||||
end function HDF5_addGroup
|
||||
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief adds a new group to the results file
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
integer(HID_T) function HDF5_openGroup(path)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
character(len=*), intent(in) :: path
|
||||
integer :: hdferr
|
||||
|
||||
call h5gopen_f(resultsFile, trim(path), HDF5_openGroup, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(path)//' )')
|
||||
|
||||
end function HDF5_openGroup
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief closes a group
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_closeGroup(ID)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer(HID_T), intent(in) :: ID
|
||||
integer :: hdferr
|
||||
|
||||
call h5gclose_f(ID, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f')
|
||||
|
||||
end subroutine HDF5_closeGroup
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief adds a new group to the results file
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer(HID_T), intent(in) :: entity
|
||||
character(len=*), intent(in) :: attrLabel, attrValue
|
||||
integer :: hdferr
|
||||
integer(HID_T) :: attr_id, space_id, type_id
|
||||
|
||||
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5screate_f')
|
||||
call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tcopy_f')
|
||||
call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tset_size_f')
|
||||
call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5acreate_f')
|
||||
call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5awrite_f')
|
||||
call h5aclose_f(attr_id,hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5aclose_f')
|
||||
call h5sclose_f(space_id,hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5sclose_f')
|
||||
|
||||
end subroutine HDF5_addStringAttribute
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief adds the unique mapping from spatial position and constituent ID to results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_mappingConstitutive(mapping)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer(pInt), intent(in), dimension(:,:,:) :: mapping
|
||||
|
||||
integer :: hdferr, NmatPoints,Nconstituents
|
||||
integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id,instance_id,position_id
|
||||
|
||||
Nconstituents=size(mapping,1)
|
||||
NmatPoints=size(mapping,2)
|
||||
mapping_ID = HDF5_openGroup("mapping")
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create dataspace
|
||||
call h5screate_simple_f(2, int([Nconstituents,NmatPoints],HSIZE_T), space_id, hdferr, &
|
||||
int([Nconstituents,NmatPoints],HSIZE_T))
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! compound type
|
||||
call h5tcreate_f(H5T_COMPOUND_F, 6_SIZE_T, dtype_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tcreate_f dtype_id')
|
||||
|
||||
call h5tinsert_f(dtype_id, "Constitutive Instance", 0_SIZE_T, H5T_STD_U16LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f 0')
|
||||
call h5tinsert_f(dtype_id, "Position in Instance Results", 2_SIZE_T, H5T_STD_U32LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f 2')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create Dataset
|
||||
call h5dcreate_f(mapping_id, "Constitutive", dtype_id, space_id, dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! Create memory types (one compound datatype for each member)
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), instance_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tcreate_f instance_id')
|
||||
call h5tinsert_f(instance_id, "Constitutive Instance", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f instance_id')
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tcreate_f position_id')
|
||||
call h5tinsert_f(position_id, "Position in Instance Results", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f position_id')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! write data by fields in the datatype. Fields order is not important.
|
||||
call h5dwrite_f(dset_id, position_id, mapping(1:Nconstituents,1:NmatPoints,1), &
|
||||
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dwrite_f position_id')
|
||||
|
||||
call h5dwrite_f(dset_id, instance_id, mapping(1:Nconstituents,1:NmatPoints,2), &
|
||||
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dwrite_f instance_id')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!close types, dataspaces
|
||||
call h5tclose_f(dtype_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tclose_f dtype_id')
|
||||
call h5tclose_f(position_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tclose_f position_id')
|
||||
call h5tclose_f(instance_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tclose_f instance_id')
|
||||
call h5dclose_f(dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dclose_f')
|
||||
call h5sclose_f(space_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5sclose_f')
|
||||
call HDF5_closeGroup(mapping_ID)
|
||||
|
||||
end subroutine HDF5_mappingConstitutive
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief adds the unique mapping from spatial position and constituent ID to results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_mappingCrystallite(mapping)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer(pInt), intent(in), dimension(:,:,:) :: mapping
|
||||
|
||||
integer :: hdferr, NmatPoints,Nconstituents
|
||||
integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id,instance_id,position_id
|
||||
|
||||
Nconstituents=size(mapping,1)
|
||||
NmatPoints=size(mapping,2)
|
||||
mapping_ID = HDF5_openGroup("mapping")
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create dataspace
|
||||
call h5screate_simple_f(2, int([Nconstituents,NmatPoints],HSIZE_T), space_id, hdferr, &
|
||||
int([Nconstituents,NmatPoints],HSIZE_T))
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! compound type
|
||||
call h5tcreate_f(H5T_COMPOUND_F, 6_SIZE_T, dtype_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f dtype_id')
|
||||
|
||||
call h5tinsert_f(dtype_id, "Crystallite Instance", 0_SIZE_T, H5T_STD_U16LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 0')
|
||||
call h5tinsert_f(dtype_id, "Position in Instance Results", 2_SIZE_T, H5T_STD_U32LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 2')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create Dataset
|
||||
call h5dcreate_f(mapping_id, "Crystallite", dtype_id, space_id, dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! Create memory types (one compound datatype for each member)
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), instance_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f instance_id')
|
||||
call h5tinsert_f(instance_id, "Crystallite Instance", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f instance_id')
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f position_id')
|
||||
call h5tinsert_f(position_id, "Position in Instance Results", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f position_id')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! write data by fields in the datatype. Fields order is not important.
|
||||
call h5dwrite_f(dset_id, position_id, mapping(1:Nconstituents,1:NmatPoints,1), &
|
||||
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f position_id')
|
||||
|
||||
call h5dwrite_f(dset_id, instance_id, mapping(1:Nconstituents,1:NmatPoints,2), &
|
||||
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f instance_id')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!close types, dataspaces
|
||||
call h5tclose_f(dtype_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f dtype_id')
|
||||
call h5tclose_f(position_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f position_id')
|
||||
call h5tclose_f(instance_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f instance_id')
|
||||
call h5dclose_f(dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dclose_f')
|
||||
call h5sclose_f(space_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sclose_f')
|
||||
call HDF5_closeGroup(mapping_ID)
|
||||
|
||||
end subroutine HDF5_mappingCrystallite
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief adds the unique mapping from spatial position to results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_mappingHomogenization(mapping)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer(pInt), intent(in), dimension(:,:) :: mapping
|
||||
|
||||
integer :: hdferr, NmatPoints
|
||||
integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id,instance_id,position_id,elem_id,ip_id
|
||||
|
||||
NmatPoints=size(mapping,1)
|
||||
mapping_ID = HDF5_openGroup("mapping")
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create dataspace
|
||||
call h5screate_simple_f(1, int([NmatPoints],HSIZE_T), space_id, hdferr, &
|
||||
int([NmatPoints],HSIZE_T))
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! compound type
|
||||
call h5tcreate_f(H5T_COMPOUND_F, 11_SIZE_T, dtype_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f dtype_id')
|
||||
|
||||
call h5tinsert_f(dtype_id, "Homogenization Instance", 0_SIZE_T, H5T_STD_U16LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 0')
|
||||
call h5tinsert_f(dtype_id, "Position in Instance Results", 2_SIZE_T, H5T_STD_U32LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 2')
|
||||
call h5tinsert_f(dtype_id, "Element Number", 6_SIZE_T, H5T_STD_U32LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 6')
|
||||
call h5tinsert_f(dtype_id, "Material Point Number", 10_SIZE_T, H5T_STD_U8LE, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 10')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create Dataset
|
||||
call h5dcreate_f(mapping_id, "Homogenization", dtype_id, space_id, dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! Create memory types (one compound datatype for each member)
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), instance_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f instance_id')
|
||||
call h5tinsert_f(instance_id, "Homogenization Instance", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f instance_id')
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f position_id')
|
||||
call h5tinsert_f(position_id, "Position in Instance Results", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f position_id')
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), elem_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f elem_id')
|
||||
call h5tinsert_f(elem_id, "Element Number", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f elem_id')
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), ip_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f ip_id')
|
||||
call h5tinsert_f(ip_id, "Material Point Number", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f ip_id')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! write data by fields in the datatype. Fields order is not important.
|
||||
call h5dwrite_f(dset_id, position_id, mapping(1:NmatPoints,1), &
|
||||
int([NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f position_id')
|
||||
|
||||
call h5dwrite_f(dset_id, instance_id, mapping(1:NmatPoints,2), &
|
||||
int([NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f position_id')
|
||||
|
||||
call h5dwrite_f(dset_id, elem_id, mapping(1:NmatPoints,3), &
|
||||
int([NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f elem_id')
|
||||
|
||||
call h5dwrite_f(dset_id, ip_id, mapping(1:NmatPoints,4), &
|
||||
int([NmatPoints],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f ip_id')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!close types, dataspaces
|
||||
call h5tclose_f(dtype_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f dtype_id')
|
||||
call h5tclose_f(position_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f position_id')
|
||||
call h5tclose_f(instance_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f instance_id')
|
||||
call h5tclose_f(ip_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f ip_id')
|
||||
call h5tclose_f(elem_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f elem_id')
|
||||
call h5dclose_f(dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dclose_f')
|
||||
call h5sclose_f(space_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5sclose_f')
|
||||
call HDF5_closeGroup(mapping_ID)
|
||||
|
||||
end subroutine HDF5_mappingHomogenization
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief adds the unique cell to node mapping
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_mappingCells(mapping)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer(pInt), intent(in), dimension(:) :: mapping
|
||||
|
||||
integer :: hdferr, Nnodes
|
||||
integer(HID_T) :: mapping_id, dset_id, space_id
|
||||
|
||||
Nnodes=size(mapping)
|
||||
mapping_ID = HDF5_openGroup("mapping")
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create dataspace
|
||||
call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, &
|
||||
int([Nnodes],HSIZE_T))
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5screate_simple_f')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create Dataset
|
||||
call h5dcreate_f(mapping_id, "Cell",H5T_NATIVE_INTEGER, space_id, dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! write data by fields in the datatype. Fields order is not important.
|
||||
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, mapping, int([Nnodes],HSIZE_T), hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5dwrite_f instance_id')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!close types, dataspaces
|
||||
call h5dclose_f(dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dclose_f')
|
||||
call h5sclose_f(space_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5sclose_f')
|
||||
call HDF5_closeGroup(mapping_ID)
|
||||
|
||||
end subroutine HDF5_mappingCells
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief creates a new scalar dataset in the given group location
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_addScalarDataset(group,nnodes,label,SIunit)
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer(HID_T), intent(in) :: group
|
||||
integer(pInt), intent(in) :: nnodes
|
||||
character(len=*), intent(in) :: SIunit,label
|
||||
|
||||
integer :: hdferr
|
||||
integer(HID_T) :: dset_id, space_id
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create dataspace
|
||||
call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, &
|
||||
int([Nnodes],HSIZE_T))
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5screate_simple_f')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create Dataset
|
||||
call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dcreate_f')
|
||||
call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit))
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!close types, dataspaces
|
||||
call h5dclose_f(dset_id, hdferr)
|
||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dclose_f')
|
||||
call h5sclose_f(space_id, hdferr)
|
||||
|
||||
end subroutine HDF5_addScalarDataset
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief returns nicely formatted string of integer value
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
function IO_formatIntToString(myInt)
|
||||
|
||||
implicit none
|
||||
integer(pInt), intent(in) :: myInt
|
||||
character(len=1_pInt + int(log10(real(myInt)),pInt)) :: IO_formatIntToString
|
||||
write(IO_formatIntToString,'('//IO_intOut(myInt)//')') myInt
|
||||
|
||||
end function
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief copies the current temp results to the actual results file
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_forwardResults
|
||||
use hdf5
|
||||
|
||||
implicit none
|
||||
integer :: hdferr
|
||||
integer(HID_T) :: new_loc_id
|
||||
|
||||
new_loc_id = HDF5_openGroup("results")
|
||||
currentInc = currentInc + 1_pInt
|
||||
call h5ocopy_f(tempFile, 'results', new_loc_id,dst_name=IO_formatIntToString(currentInc), hdferr=hdferr)
|
||||
if (hdferr < 0_pInt) call IO_error(1_pInt,ext_msg='HDF5_forwardResults: h5ocopy_f')
|
||||
call HDF5_closeGroup(new_loc_id)
|
||||
|
||||
end subroutine HDF5_forwardResults
|
||||
|
||||
|
||||
#endif
|
||||
end module IO
|
||||
|
|
|
@ -320,7 +320,7 @@ KINEMATICS_FILES = \
|
|||
kinematics_vacancy_strain.o kinematics_hydrogen_strain.o
|
||||
|
||||
PLASTIC_FILES = \
|
||||
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o plastic_j2.o \
|
||||
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \
|
||||
plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \
|
||||
plastic_phenoplus.o
|
||||
|
||||
|
@ -591,9 +591,6 @@ plastic_phenoplus.o: plastic_phenoplus.f90 \
|
|||
plastic_isotropic.o: plastic_isotropic.f90 \
|
||||
lattice.o
|
||||
|
||||
plastic_j2.o: plastic_j2.f90 \
|
||||
lattice.o
|
||||
|
||||
plastic_none.o: plastic_none.f90 \
|
||||
lattice.o
|
||||
ifeq "$(F90)" "gfortran"
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
#include "kinematics_hydrogen_strain.f90"
|
||||
#include "plastic_none.f90"
|
||||
#include "plastic_isotropic.f90"
|
||||
#include "plastic_j2.f90"
|
||||
#include "plastic_phenopowerlaw.f90"
|
||||
#include "plastic_phenoplus.f90"
|
||||
#include "plastic_titanmod.f90"
|
||||
|
|
|
@ -69,7 +69,6 @@ subroutine constitutive_init()
|
|||
ELASTICITY_hooke_ID, &
|
||||
PLASTICITY_none_ID, &
|
||||
PLASTICITY_isotropic_ID, &
|
||||
PLASTICITY_j2_ID, &
|
||||
PLASTICITY_phenopowerlaw_ID, &
|
||||
PLASTICITY_phenoplus_ID, &
|
||||
PLASTICITY_dislotwin_ID, &
|
||||
|
@ -93,7 +92,6 @@ subroutine constitutive_init()
|
|||
ELASTICITY_HOOKE_label, &
|
||||
PLASTICITY_NONE_label, &
|
||||
PLASTICITY_ISOTROPIC_label, &
|
||||
PLASTICITY_J2_label, &
|
||||
PLASTICITY_PHENOPOWERLAW_label, &
|
||||
PLASTICITY_PHENOPLUS_label, &
|
||||
PLASTICITY_DISLOTWIN_label, &
|
||||
|
@ -114,7 +112,6 @@ subroutine constitutive_init()
|
|||
|
||||
use plastic_none
|
||||
use plastic_isotropic
|
||||
use plastic_j2
|
||||
use plastic_phenopowerlaw
|
||||
use plastic_phenoplus
|
||||
use plastic_dislotwin
|
||||
|
@ -160,7 +157,6 @@ subroutine constitutive_init()
|
|||
! parse plasticities from config file
|
||||
if (any(phase_plasticity == PLASTICITY_NONE_ID)) call plastic_none_init
|
||||
if (any(phase_plasticity == PLASTICITY_ISOTROPIC_ID)) call plastic_isotropic_init(FILEUNIT)
|
||||
if (any(phase_plasticity == PLASTICITY_J2_ID)) call plastic_j2_init(FILEUNIT)
|
||||
if (any(phase_plasticity == PLASTICITY_PHENOPOWERLAW_ID)) call plastic_phenopowerlaw_init(FILEUNIT)
|
||||
if (any(phase_plasticity == PLASTICITY_PHENOPLUS_ID)) call plastic_phenoplus_init(FILEUNIT)
|
||||
if (any(phase_plasticity == PLASTICITY_DISLOTWIN_ID)) call plastic_dislotwin_init(FILEUNIT)
|
||||
|
@ -217,11 +213,6 @@ subroutine constitutive_init()
|
|||
thisNoutput => plastic_isotropic_Noutput
|
||||
thisOutput => plastic_isotropic_output
|
||||
thisSize => plastic_isotropic_sizePostResult
|
||||
case (PLASTICITY_J2_ID) plasticityType
|
||||
outputName = PLASTICITY_J2_label
|
||||
thisNoutput => plastic_j2_Noutput
|
||||
thisOutput => plastic_j2_output
|
||||
thisSize => plastic_j2_sizePostResult
|
||||
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
|
||||
outputName = PLASTICITY_PHENOPOWERLAW_label
|
||||
thisNoutput => plastic_phenopowerlaw_Noutput
|
||||
|
@ -408,8 +399,6 @@ function constitutive_homogenizedC(ipc,ip,el)
|
|||
plastic_titanmod_homogenizedC
|
||||
use plastic_dislotwin, only: &
|
||||
plastic_dislotwin_homogenizedC
|
||||
use plastic_disloucla, only: &
|
||||
plastic_disloucla_homogenizedC
|
||||
use lattice, only: &
|
||||
lattice_C66
|
||||
|
||||
|
@ -423,8 +412,6 @@ function constitutive_homogenizedC(ipc,ip,el)
|
|||
plasticityType: select case (phase_plasticity(material_phase(ipc,ip,el)))
|
||||
case (PLASTICITY_DISLOTWIN_ID) plasticityType
|
||||
constitutive_homogenizedC = plastic_dislotwin_homogenizedC(ipc,ip,el)
|
||||
case (PLASTICITY_DISLOUCLA_ID) plasticityType
|
||||
constitutive_homogenizedC = plastic_disloucla_homogenizedC(ipc,ip,el)
|
||||
case (PLASTICITY_TITANMOD_ID) plasticityType
|
||||
constitutive_homogenizedC = plastic_titanmod_homogenizedC (ipc,ip,el)
|
||||
case default plasticityType
|
||||
|
@ -521,7 +508,6 @@ subroutine constitutive_LpAndItsTangent(Lp, dLp_dTstar3333, dLp_dFi3333, Tstar_v
|
|||
thermalMapping, &
|
||||
PLASTICITY_NONE_ID, &
|
||||
PLASTICITY_ISOTROPIC_ID, &
|
||||
PLASTICITY_J2_ID, &
|
||||
PLASTICITY_PHENOPOWERLAW_ID, &
|
||||
PLASTICITY_PHENOPLUS_ID, &
|
||||
PLASTICITY_DISLOTWIN_ID, &
|
||||
|
@ -530,8 +516,6 @@ subroutine constitutive_LpAndItsTangent(Lp, dLp_dTstar3333, dLp_dFi3333, Tstar_v
|
|||
PLASTICITY_NONLOCAL_ID
|
||||
use plastic_isotropic, only: &
|
||||
plastic_isotropic_LpAndItsTangent
|
||||
use plastic_j2, only: &
|
||||
plastic_j2_LpAndItsTangent
|
||||
use plastic_phenopowerlaw, only: &
|
||||
plastic_phenopowerlaw_LpAndItsTangent
|
||||
use plastic_phenoplus, only: &
|
||||
|
@ -582,8 +566,6 @@ subroutine constitutive_LpAndItsTangent(Lp, dLp_dTstar3333, dLp_dFi3333, Tstar_v
|
|||
dLp_dMstar = 0.0_pReal
|
||||
case (PLASTICITY_ISOTROPIC_ID) plasticityType
|
||||
call plastic_isotropic_LpAndItsTangent(Lp,dLp_dMstar,Mstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_J2_ID) plasticityType
|
||||
call plastic_j2_LpAndItsTangent(Lp,dLp_dMstar,Mstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
|
||||
call plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMstar,Mstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_PHENOPLUS_ID) plasticityType
|
||||
|
@ -911,7 +893,6 @@ subroutine constitutive_collectDotState(Tstar_v, FeArray, FpArray, subdt, subfra
|
|||
homogenization_maxNgrains, &
|
||||
PLASTICITY_none_ID, &
|
||||
PLASTICITY_isotropic_ID, &
|
||||
PLASTICITY_j2_ID, &
|
||||
PLASTICITY_phenopowerlaw_ID, &
|
||||
PLASTICITY_phenoplus_ID, &
|
||||
PLASTICITY_dislotwin_ID, &
|
||||
|
@ -924,8 +905,6 @@ subroutine constitutive_collectDotState(Tstar_v, FeArray, FpArray, subdt, subfra
|
|||
SOURCE_thermal_externalheat_ID
|
||||
use plastic_isotropic, only: &
|
||||
plastic_isotropic_dotState
|
||||
use plastic_j2, only: &
|
||||
plastic_j2_dotState
|
||||
use plastic_phenopowerlaw, only: &
|
||||
plastic_phenopowerlaw_dotState
|
||||
use plastic_phenoplus, only: &
|
||||
|
@ -979,8 +958,6 @@ subroutine constitutive_collectDotState(Tstar_v, FeArray, FpArray, subdt, subfra
|
|||
plasticityType: select case (phase_plasticity(material_phase(ipc,ip,el)))
|
||||
case (PLASTICITY_ISOTROPIC_ID) plasticityType
|
||||
call plastic_isotropic_dotState (Tstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_J2_ID) plasticityType
|
||||
call plastic_j2_dotState (Tstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
|
||||
call plastic_phenopowerlaw_dotState(Tstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_PHENOPLUS_ID) plasticityType
|
||||
|
@ -1125,7 +1102,6 @@ function constitutive_postResults(Tstar_v, FeArray, ipc, ip, el)
|
|||
homogenization_maxNgrains, &
|
||||
PLASTICITY_NONE_ID, &
|
||||
PLASTICITY_ISOTROPIC_ID, &
|
||||
PLASTICITY_J2_ID, &
|
||||
PLASTICITY_PHENOPOWERLAW_ID, &
|
||||
PLASTICITY_PHENOPLUS_ID, &
|
||||
PLASTICITY_DISLOTWIN_ID, &
|
||||
|
@ -1138,8 +1114,6 @@ function constitutive_postResults(Tstar_v, FeArray, ipc, ip, el)
|
|||
SOURCE_damage_anisoDuctile_ID
|
||||
use plastic_isotropic, only: &
|
||||
plastic_isotropic_postResults
|
||||
use plastic_j2, only: &
|
||||
plastic_j2_postResults
|
||||
use plastic_phenopowerlaw, only: &
|
||||
plastic_phenopowerlaw_postResults
|
||||
use plastic_phenoplus, only: &
|
||||
|
@ -1193,8 +1167,6 @@ function constitutive_postResults(Tstar_v, FeArray, ipc, ip, el)
|
|||
constitutive_postResults(startPos:endPos) = plastic_titanmod_postResults(ipc,ip,el)
|
||||
case (PLASTICITY_ISOTROPIC_ID) plasticityType
|
||||
constitutive_postResults(startPos:endPos) = plastic_isotropic_postResults(Tstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_J2_ID) plasticityType
|
||||
constitutive_postResults(startPos:endPos) = plastic_j2_postResults(Tstar_v,ipc,ip,el)
|
||||
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
|
||||
constitutive_postResults(startPos:endPos) = &
|
||||
plastic_phenopowerlaw_postResults(Tstar_v,ipc,ip,el)
|
||||
|
|
|
@ -258,6 +258,7 @@ subroutine crystallite_init
|
|||
allocate(crystallite_orientation(4,cMax,iMax,eMax), source=0.0_pReal)
|
||||
allocate(crystallite_orientation0(4,cMax,iMax,eMax), source=0.0_pReal)
|
||||
allocate(crystallite_rotation(4,cMax,iMax,eMax), source=0.0_pReal)
|
||||
if (any(plasticState%nonLocal)) &
|
||||
allocate(crystallite_disorientation(4,nMax,cMax,iMax,eMax),source=0.0_pReal)
|
||||
allocate(crystallite_localPlasticity(cMax,iMax,eMax), source=.true.)
|
||||
allocate(crystallite_requested(cMax,iMax,eMax), source=.false.)
|
||||
|
@ -4005,6 +4006,7 @@ subroutine crystallite_orientations
|
|||
|
||||
! --- CALCULATE ORIENTATION AND LATTICE ROTATION ---
|
||||
|
||||
nonlocalPresent: if (any(plasticState%nonLocal)) then
|
||||
!$OMP PARALLEL DO PRIVATE(orientation)
|
||||
do e = FEsolving_execElem(1),FEsolving_execElem(2)
|
||||
do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
|
||||
|
@ -4060,6 +4062,7 @@ subroutine crystallite_orientations
|
|||
endif
|
||||
enddo; enddo
|
||||
!$OMP END PARALLEL DO
|
||||
endif nonlocalPresent
|
||||
|
||||
end subroutine crystallite_orientations
|
||||
|
||||
|
|
|
@ -71,12 +71,6 @@ contains
|
|||
!> @brief module initialization
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine homogenization_init
|
||||
#ifdef HDF
|
||||
use hdf5, only: &
|
||||
HID_T
|
||||
use IO, only : &
|
||||
HDF5_mappingHomogenization
|
||||
#endif
|
||||
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
||||
use math, only: &
|
||||
math_I3
|
||||
|
@ -131,12 +125,6 @@ subroutine homogenization_init
|
|||
character(len=64), dimension(:,:), pointer :: thisOutput
|
||||
character(len=32) :: outputName !< name of output, intermediate fix until HDF5 output is ready
|
||||
logical :: knownHomogenization, knownThermal, knownDamage, knownVacancyflux, knownPorosity, knownHydrogenflux
|
||||
#ifdef HDF
|
||||
integer(pInt), dimension(:,:), allocatable :: mapping
|
||||
integer(pInt), dimension(:), allocatable :: InstancePosition
|
||||
allocate(mapping(mesh_ncpelems,4),source=0_pInt)
|
||||
allocate(InstancePosition(material_Nhomogenization),source=0_pInt)
|
||||
#endif
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -396,17 +384,6 @@ subroutine homogenization_init
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! allocate and initialize global state and postresutls variables
|
||||
#ifdef HDF
|
||||
elementLooping: do e = 1,mesh_NcpElems
|
||||
myInstance = homogenization_typeInstance(mesh_element(3,e))
|
||||
IpLooping: do i = 1,FE_Nips(FE_geomtype(mesh_element(2,e)))
|
||||
InstancePosition(myInstance) = InstancePosition(myInstance)+1_pInt
|
||||
mapping(e,1:4) = [instancePosition(myinstance),myinstance,e,i]
|
||||
enddo IpLooping
|
||||
enddo elementLooping
|
||||
call HDF5_mappingHomogenization(mapping)
|
||||
#endif
|
||||
|
||||
homogenization_maxSizePostResults = 0_pInt
|
||||
thermal_maxSizePostResults = 0_pInt
|
||||
damage_maxSizePostResults = 0_pInt
|
||||
|
|
|
@ -94,11 +94,11 @@ module lattice
|
|||
LATTICE_fcc_NcleavageSystem = int([3, 4, 0],pInt) !< total # of cleavage systems per family for fcc
|
||||
|
||||
integer(pInt), parameter, private :: &
|
||||
LATTICE_fcc_Nslip = 12_pInt, & ! sum(lattice_fcc_NslipSystem), & !< total # of slip systems for fcc
|
||||
LATTICE_fcc_Ntwin = 12_pInt, & ! sum(lattice_fcc_NtwinSystem) !< total # of twin systems for fcc
|
||||
LATTICE_fcc_Nslip = sum(lattice_fcc_NslipSystem), & !< total # of slip systems for fcc
|
||||
LATTICE_fcc_Ntwin = sum(lattice_fcc_NtwinSystem), & !< total # of twin systems for fcc
|
||||
LATTICE_fcc_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for fcc
|
||||
LATTICE_fcc_Ntrans = 12_pInt, & !< total # of transformations for fcc
|
||||
LATTICE_fcc_Ncleavage = 7_pInt !< total # of cleavage systems for fcc
|
||||
LATTICE_fcc_Ntrans = sum(lattice_fcc_NtransSystem), & !< total # of transformation systems for fcc
|
||||
LATTICE_fcc_Ncleavage = sum(lattice_fcc_NcleavageSystem) !< total # of cleavage systems for fcc
|
||||
|
||||
real(pReal), dimension(3+3,LATTICE_fcc_Nslip), parameter, private :: &
|
||||
LATTICE_fcc_systemSlip = reshape(real([&
|
||||
|
@ -377,12 +377,11 @@ module lattice
|
|||
LATTICE_bcc_NcleavageSystem = int([3,6,0],pInt) !< total # of cleavage systems per family for bcc
|
||||
|
||||
integer(pInt), parameter, private :: &
|
||||
LATTICE_bcc_Nslip = 24_pInt, & ! sum(lattice_bcc_NslipSystem), & !< total # of slip systems for bcc
|
||||
LATTICE_bcc_Ntwin = 12_pInt, & ! sum(lattice_bcc_NtwinSystem) !< total # of twin systems for bcc
|
||||
LATTICE_bcc_NnonSchmid = 6_pInt, & !< # of non-Schmid contributions for bcc. 6 known non schmid contributions for BCC (A. Koester, A. Ma, A. Hartmaier 2012)
|
||||
LATTICE_bcc_Ntrans = 0_pInt, & !< total # of transformations for bcc
|
||||
LATTICE_bcc_Ncleavage = 9_pInt !< total # of cleavage systems for bcc
|
||||
|
||||
LATTICE_bcc_Nslip = sum(lattice_bcc_NslipSystem), & !< total # of slip systems for bcc
|
||||
LATTICE_bcc_Ntwin = sum(lattice_bcc_NtwinSystem), & !< total # of twin systems for bcc
|
||||
LATTICE_bcc_NnonSchmid = 6_pInt, & !< total # of non-Schmid contributions for bcc (A. Koester, A. Ma, A. Hartmaier 2012)
|
||||
LATTICE_bcc_Ntrans = sum(lattice_bcc_NtransSystem), & !< total # of transformation systems for bcc
|
||||
LATTICE_bcc_Ncleavage = sum(lattice_bcc_NcleavageSystem) !< total # of cleavage systems for bcc
|
||||
|
||||
real(pReal), dimension(3+3,LATTICE_bcc_Nslip), parameter, private :: &
|
||||
LATTICE_bcc_systemSlip = reshape(real([&
|
||||
|
@ -575,11 +574,11 @@ module lattice
|
|||
LATTICE_hex_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for hex
|
||||
|
||||
integer(pInt), parameter, private :: &
|
||||
LATTICE_hex_Nslip = 33_pInt, & ! sum(lattice_hex_NslipSystem), !< total # of slip systems for hex
|
||||
LATTICE_hex_Ntwin = 24_pInt, & ! sum(lattice_hex_NtwinSystem) !< total # of twin systems for hex
|
||||
LATTICE_hex_NnonSchmid = 0_pInt, & !< # of non-Schmid contributions for hex
|
||||
LATTICE_hex_Ntrans = 0_pInt, & !< total # of transformations for hex
|
||||
LATTICE_hex_Ncleavage = 3_pInt !< total # of transformations for hex
|
||||
LATTICE_hex_Nslip = sum(lattice_hex_NslipSystem), & !< total # of slip systems for hex
|
||||
LATTICE_hex_Ntwin = sum(lattice_hex_NtwinSystem), & !< total # of twin systems for hex
|
||||
LATTICE_hex_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for hex
|
||||
LATTICE_hex_Ntrans = sum(lattice_hex_NtransSystem), & !< total # of transformation systems for hex
|
||||
LATTICE_hex_Ncleavage = sum(lattice_hex_NcleavageSystem) !< total # of cleavage systems for hex
|
||||
|
||||
real(pReal), dimension(4+4,LATTICE_hex_Nslip), parameter, private :: &
|
||||
LATTICE_hex_systemSlip = reshape(real([&
|
||||
|
@ -842,7 +841,6 @@ module lattice
|
|||
],pReal),[ 4_pInt + 4_pInt,LATTICE_hex_Ncleavage])
|
||||
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! bct
|
||||
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
|
||||
|
@ -857,13 +855,12 @@ module lattice
|
|||
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
|
||||
LATTICE_bct_NcleavageSystem = int([0,0,0],pInt) !< total # of cleavage systems per family for bct
|
||||
|
||||
|
||||
integer(pInt), parameter, private :: &
|
||||
LATTICE_bct_Nslip = 52_pInt, & ! sum(lattice_bct_NslipSystem), !< total # of slip systems for bct
|
||||
LATTICE_bct_Ntwin = 0_pInt, & ! sum(lattice_bcc_NtwinSystem) !< total # of twin systems for bct
|
||||
LATTICE_bct_NnonSchmid = 0_pInt, & !< # of non-Schmid contributions for bct
|
||||
LATTICE_bct_Ntrans = 0_pInt, & !< total # of transformations for bct
|
||||
LATTICE_bct_Ncleavage = 0_pInt !< total # of transformations for bct
|
||||
LATTICE_bct_Nslip = sum(lattice_bct_NslipSystem), & !< total # of slip systems for bct
|
||||
LATTICE_bct_Ntwin = sum(lattice_bct_NtwinSystem), & !< total # of twin systems for bct
|
||||
LATTICE_bct_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for bct
|
||||
LATTICE_bct_Ntrans = sum(lattice_bct_NtransSystem), & !< total # of transformation systems for bct
|
||||
LATTICE_bct_Ncleavage = sum(lattice_bct_NcleavageSystem) !< total # of cleavage systems for bct
|
||||
|
||||
real(pReal), dimension(3+3,LATTICE_bct_Nslip), parameter, private :: &
|
||||
LATTICE_bct_systemSlip = reshape(real([&
|
||||
|
@ -1007,10 +1004,10 @@ module lattice
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! isotropic
|
||||
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
|
||||
LATTICE_iso_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for isotropic
|
||||
LATTICE_iso_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for iso
|
||||
|
||||
integer(pInt), parameter, private :: &
|
||||
LATTICE_iso_Ncleavage = 3_pInt !< total # of cleavage systems for bcc
|
||||
LATTICE_iso_Ncleavage = sum(LATTICE_iso_NcleavageSystem) !< total # of cleavage systems for iso
|
||||
|
||||
real(pReal), dimension(3+3,LATTICE_iso_Ncleavage), parameter, private :: &
|
||||
LATTICE_iso_systemCleavage = reshape(real([&
|
||||
|
@ -1023,10 +1020,10 @@ module lattice
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! orthorhombic
|
||||
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
|
||||
LATTICE_ortho_NcleavageSystem = int([1,1,1],pInt) !< total # of cleavage systems per family for orthotropic
|
||||
LATTICE_ortho_NcleavageSystem = int([1,1,1],pInt) !< total # of cleavage systems per family for ortho
|
||||
|
||||
integer(pInt), parameter, private :: &
|
||||
LATTICE_ortho_Ncleavage = 3_pInt !< total # of cleavage systems for bcc
|
||||
LATTICE_ortho_Ncleavage = sum(LATTICE_ortho_NcleavageSystem) !< total # of cleavage systems for ortho
|
||||
|
||||
real(pReal), dimension(3+3,LATTICE_ortho_Ncleavage), parameter, private :: &
|
||||
LATTICE_ortho_systemCleavage = reshape(real([&
|
||||
|
|
|
@ -24,7 +24,6 @@ module material
|
|||
ELASTICITY_hooke_label = 'hooke', &
|
||||
PLASTICITY_none_label = 'none', &
|
||||
PLASTICITY_isotropic_label = 'isotropic', &
|
||||
PLASTICITY_j2_label = 'j2', &
|
||||
PLASTICITY_phenopowerlaw_label = 'phenopowerlaw', &
|
||||
PLASTICITY_phenoplus_label = 'phenoplus', &
|
||||
PLASTICITY_dislotwin_label = 'dislotwin', &
|
||||
|
@ -74,7 +73,6 @@ module material
|
|||
enumerator :: PLASTICITY_undefined_ID, &
|
||||
PLASTICITY_none_ID, &
|
||||
PLASTICITY_isotropic_ID, &
|
||||
PLASTICITY_j2_ID, &
|
||||
PLASTICITY_phenopowerlaw_ID, &
|
||||
PLASTICITY_phenoplus_ID, &
|
||||
PLASTICITY_dislotwin_ID, &
|
||||
|
@ -313,7 +311,6 @@ module material
|
|||
ELASTICITY_hooke_ID ,&
|
||||
PLASTICITY_none_ID, &
|
||||
PLASTICITY_isotropic_ID, &
|
||||
PLASTICITY_J2_ID, &
|
||||
PLASTICITY_phenopowerlaw_ID, &
|
||||
PLASTICITY_phenoplus_ID, &
|
||||
PLASTICITY_dislotwin_ID, &
|
||||
|
@ -351,9 +348,6 @@ module material
|
|||
HYDROGENFLUX_cahnhilliard_ID, &
|
||||
HOMOGENIZATION_none_ID, &
|
||||
HOMOGENIZATION_isostrain_ID, &
|
||||
#ifdef HDF
|
||||
material_NconstituentsPhase, &
|
||||
#endif
|
||||
HOMOGENIZATION_RGC_ID
|
||||
|
||||
private :: &
|
||||
|
@ -982,8 +976,6 @@ subroutine material_parsePhase(fileUnit,myPart)
|
|||
phase_plasticity(section) = PLASTICITY_NONE_ID
|
||||
case (PLASTICITY_ISOTROPIC_label)
|
||||
phase_plasticity(section) = PLASTICITY_ISOTROPIC_ID
|
||||
case (PLASTICITY_J2_label)
|
||||
phase_plasticity(section) = PLASTICITY_J2_ID
|
||||
case (PLASTICITY_PHENOPOWERLAW_label)
|
||||
phase_plasticity(section) = PLASTICITY_PHENOPOWERLAW_ID
|
||||
case (PLASTICITY_PHENOPLUS_label)
|
||||
|
@ -1603,14 +1595,4 @@ subroutine material_populateGrains
|
|||
|
||||
end subroutine material_populateGrains
|
||||
|
||||
#ifdef HDF
|
||||
integer(pInt) pure function material_NconstituentsPhase(matID)
|
||||
|
||||
implicit none
|
||||
integer(pInt), intent(in) :: matID
|
||||
|
||||
material_NconstituentsPhase = count(microstructure_phase == matID)
|
||||
end function
|
||||
#endif
|
||||
|
||||
end module material
|
||||
|
|
|
@ -1645,14 +1645,14 @@ pure function math_qToAxisAngle(Q)
|
|||
real(pReal) :: halfAngle, sinHalfAngle
|
||||
real(pReal), dimension(4) :: math_qToAxisAngle
|
||||
|
||||
halfAngle = acos(max(-1.0_pReal, min(1.0_pReal, Q(1)))) ! limit to [-1,1] --> 0 to 180 deg
|
||||
halfAngle = acos(math_limit(Q(1),-1.0_pReal,1.0_pReal))
|
||||
sinHalfAngle = sin(halfAngle)
|
||||
|
||||
if (sinHalfAngle <= 1.0e-4_pReal) then ! very small rotation angle?
|
||||
smallRotation: if (sinHalfAngle <= 1.0e-4_pReal) then
|
||||
math_qToAxisAngle = 0.0_pReal
|
||||
else
|
||||
else smallRotation
|
||||
math_qToAxisAngle= [ Q(2:4)/sinHalfAngle, halfAngle*2.0_pReal]
|
||||
endif
|
||||
endif smallRotation
|
||||
|
||||
end function math_qToAxisAngle
|
||||
|
||||
|
|
16
src/mesh.f90
16
src/mesh.f90
|
@ -4525,17 +4525,9 @@ subroutine mesh_write_cellGeom
|
|||
VTK_geo, &
|
||||
VTK_con, &
|
||||
VTK_end
|
||||
#ifdef HDF
|
||||
use IO, only: &
|
||||
HDF5_mappingCells
|
||||
#endif
|
||||
implicit none
|
||||
integer(I4P), dimension(1:mesh_Ncells) :: celltype
|
||||
integer(I4P), dimension(mesh_Ncells*(1_pInt+FE_maxNcellnodesPerCell)) :: cellconnection
|
||||
#ifdef HDF
|
||||
integer(pInt), dimension(mesh_Ncells*FE_maxNcellnodesPerCell) :: cellconnectionHDF5
|
||||
integer(pInt) :: j2=0_pInt
|
||||
#endif
|
||||
integer(I4P):: error
|
||||
integer(I4P):: g, c, e, CellID, i, j
|
||||
|
||||
|
@ -4550,16 +4542,8 @@ subroutine mesh_write_cellGeom
|
|||
cellconnection(j+1_pInt:j+FE_NcellnodesPerCell(c)+1_pInt) &
|
||||
= [FE_NcellnodesPerCell(c),mesh_cell(1:FE_NcellnodesPerCell(c),i,e)-1_pInt] ! number of cellnodes per cell & list of global cellnode IDs belnging to this cell (cellnode counting starts at 0)
|
||||
j = j + FE_NcellnodesPerCell(c) + 1_pInt
|
||||
#ifdef HDF
|
||||
cellconnectionHDF5(j2+1_pInt:j2+FE_NcellnodesPerCell(c)) &
|
||||
= mesh_cell(1:FE_NcellnodesPerCell(c),i,e)-1_pInt
|
||||
j2=j2 + FE_ncellnodesPerCell(c)
|
||||
#endif
|
||||
enddo
|
||||
enddo
|
||||
#ifdef HDF
|
||||
call HDF5_mappingCells(cellconnectionHDF5(1:j2))
|
||||
#endif
|
||||
|
||||
error=VTK_ini(output_format = 'ASCII', &
|
||||
title=trim(getSolverJobName())//' cell mesh', &
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -7,7 +7,6 @@
|
|||
!! untextured polycrystal
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
module plastic_isotropic
|
||||
|
||||
use prec, only: &
|
||||
pReal,&
|
||||
pInt, &
|
||||
|
@ -140,9 +139,10 @@ subroutine plastic_isotropic_init(fileUnit)
|
|||
sizeDeltaState
|
||||
character(len=65536) :: &
|
||||
tag = '', &
|
||||
outputtag = '', &
|
||||
line = '', &
|
||||
extmsg = ''
|
||||
character(len=64) :: &
|
||||
outputtag = ''
|
||||
integer(pInt) :: NipcMyPhase
|
||||
|
||||
mainProcess: if (worldrank == 0) then
|
||||
|
@ -382,8 +382,7 @@ subroutine plastic_isotropic_LpAndItsTangent(Lp,dLp_dTstar99,Tstar_v,ipc,ip,el)
|
|||
math_mul33xx33, &
|
||||
math_transpose33
|
||||
use material, only: &
|
||||
phaseAt, phasememberAt, &
|
||||
plasticState, &
|
||||
phasememberAt, &
|
||||
material_phase, &
|
||||
phase_plasticityInstance
|
||||
|
||||
|
@ -413,7 +412,7 @@ subroutine plastic_isotropic_LpAndItsTangent(Lp,dLp_dTstar99,Tstar_v,ipc,ip,el)
|
|||
k, l, m, n
|
||||
|
||||
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
|
||||
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
|
||||
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
|
||||
|
||||
Tstar_dev_33 = math_deviatoric33(math_Mandel6to33(Tstar_v)) ! deviatoric part of 2nd Piola-Kirchhoff stress
|
||||
squarenorm_Tstar_dev = math_mul33xx33(Tstar_dev_33,Tstar_dev_33)
|
||||
|
@ -463,8 +462,7 @@ subroutine plastic_isotropic_LiAndItsTangent(Li,dLi_dTstar_3333,Tstar_v,ipc,ip,e
|
|||
math_spherical33, &
|
||||
math_mul33xx33
|
||||
use material, only: &
|
||||
phaseAt, phasememberAt, &
|
||||
plasticState, &
|
||||
phasememberAt, &
|
||||
material_phase, &
|
||||
phase_plasticityInstance
|
||||
|
||||
|
@ -491,17 +489,13 @@ real(pReal) :: &
|
|||
k, l, m, n
|
||||
|
||||
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
|
||||
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
|
||||
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
|
||||
|
||||
Tstar_sph_33 = math_spherical33(math_Mandel6to33(Tstar_v)) ! spherical part of 2nd Piola-Kirchhoff stress
|
||||
squarenorm_Tstar_sph = math_mul33xx33(Tstar_sph_33,Tstar_sph_33)
|
||||
norm_Tstar_sph = sqrt(squarenorm_Tstar_sph)
|
||||
|
||||
if (param(instance)%dilatation) then
|
||||
if (norm_Tstar_sph <= 0.0_pReal) then ! Tstar == 0 --> both Li and dLi_dTstar are zero
|
||||
Li = 0.0_pReal
|
||||
dLi_dTstar_3333 = 0.0_pReal
|
||||
else
|
||||
if (param(instance)%dilatation .and. norm_Tstar_sph > 0.0_pReal) then ! Tstar == 0 or J2 plascitiy --> both Li and dLi_dTstar are zero
|
||||
gamma_dot = param(instance)%gdot0 &
|
||||
* (sqrt(1.5_pReal) * norm_Tstar_sph / param(instance)%fTaylor / state(instance)%flowstress(of) ) &
|
||||
**param(instance)%n
|
||||
|
@ -518,7 +512,6 @@ real(pReal) :: &
|
|||
|
||||
dLi_dTstar_3333 = gamma_dot / param(instance)%fTaylor * &
|
||||
dLi_dTstar_3333 / norm_Tstar_sph
|
||||
endif
|
||||
else
|
||||
Li = 0.0_pReal
|
||||
dLi_dTstar_3333 = 0.0_pReal
|
||||
|
@ -534,8 +527,7 @@ subroutine plastic_isotropic_dotState(Tstar_v,ipc,ip,el)
|
|||
use math, only: &
|
||||
math_mul6x6
|
||||
use material, only: &
|
||||
phaseAt, phasememberAt, &
|
||||
plasticState, &
|
||||
phasememberAt, &
|
||||
material_phase, &
|
||||
phase_plasticityInstance
|
||||
|
||||
|
@ -558,7 +550,7 @@ subroutine plastic_isotropic_dotState(Tstar_v,ipc,ip,el)
|
|||
of !< shortcut notation for offset position in state array
|
||||
|
||||
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
|
||||
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
|
||||
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! norm of (deviatoric) 2nd Piola-Kirchhoff stress
|
||||
|
@ -614,8 +606,7 @@ function plastic_isotropic_postResults(Tstar_v,ipc,ip,el)
|
|||
math_mul6x6
|
||||
use material, only: &
|
||||
material_phase, &
|
||||
plasticState, &
|
||||
phaseAt, phasememberAt, &
|
||||
phasememberAt, &
|
||||
phase_plasticityInstance
|
||||
|
||||
implicit none
|
||||
|
@ -639,7 +630,7 @@ function plastic_isotropic_postResults(Tstar_v,ipc,ip,el)
|
|||
o
|
||||
|
||||
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
|
||||
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
|
||||
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! norm of (deviatoric) 2nd Piola-Kirchhoff stress
|
||||
|
|
|
@ -1,564 +0,0 @@
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @author Franz Roters, Max-Planck-Institut für Eisenforschung GmbH
|
||||
!> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH
|
||||
!> @brief material subroutine for isotropic (J2) plasticity
|
||||
!> @details Isotropic (J2) Plasticity which resembles the phenopowerlaw plasticity without
|
||||
!! resolving the stress on the slip systems. Will give the response of phenopowerlaw for an
|
||||
!! untextured polycrystal
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
module plastic_j2
|
||||
#ifdef HDF
|
||||
use hdf5, only: &
|
||||
HID_T
|
||||
#endif
|
||||
|
||||
use prec, only: &
|
||||
pReal,&
|
||||
pInt
|
||||
|
||||
implicit none
|
||||
private
|
||||
integer(pInt), dimension(:), allocatable, public, protected :: &
|
||||
plastic_j2_sizePostResults !< cumulative size of post results
|
||||
|
||||
integer(pInt), dimension(:,:), allocatable, target, public :: &
|
||||
plastic_j2_sizePostResult !< size of each post result output
|
||||
|
||||
character(len=64), dimension(:,:), allocatable, target, public :: &
|
||||
plastic_j2_output !< name of each post result output
|
||||
|
||||
integer(pInt), dimension(:), allocatable, target, public :: &
|
||||
plastic_j2_Noutput !< number of outputs per instance
|
||||
real(pReal), dimension(:), allocatable, private :: &
|
||||
plastic_j2_fTaylor, & !< Taylor factor
|
||||
plastic_j2_tau0, & !< initial plastic stress
|
||||
plastic_j2_gdot0, & !< reference velocity
|
||||
plastic_j2_n, & !< Visco-plastic parameter
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! h0 as function of h0 = A + B log (gammadot)
|
||||
plastic_j2_h0, &
|
||||
plastic_j2_h0_slopeLnRate, &
|
||||
plastic_j2_tausat, & !< final plastic stress
|
||||
plastic_j2_a, &
|
||||
plastic_j2_aTolResistance, &
|
||||
plastic_j2_aTolShear, &
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! tausat += (asinh((gammadot / SinhFitA)**(1 / SinhFitD)))**(1 / SinhFitC) / (SinhFitB * (gammadot / gammadot0)**(1/n))
|
||||
plastic_j2_tausat_SinhFitA, & !< fitting parameter for normalized strain rate vs. stress function
|
||||
plastic_j2_tausat_SinhFitB, & !< fitting parameter for normalized strain rate vs. stress function
|
||||
plastic_j2_tausat_SinhFitC, & !< fitting parameter for normalized strain rate vs. stress function
|
||||
plastic_j2_tausat_SinhFitD !< fitting parameter for normalized strain rate vs. stress function
|
||||
|
||||
enum, bind(c)
|
||||
enumerator :: undefined_ID, &
|
||||
flowstress_ID, &
|
||||
strainrate_ID
|
||||
end enum
|
||||
integer(kind(undefined_ID)), dimension(:,:), allocatable, private :: &
|
||||
plastic_j2_outputID !< ID of each post result output
|
||||
|
||||
|
||||
#ifdef HDF
|
||||
type plastic_j2_tOutput
|
||||
real(pReal), dimension(:), allocatable, private :: &
|
||||
flowstress, &
|
||||
strainrate
|
||||
logical :: flowstressActive = .false., strainrateActive = .false. ! if we can write the output block wise, this is not needed anymore because we can do an if(allocated(xxx))
|
||||
end type plastic_j2_tOutput
|
||||
type(plastic_j2_tOutput), allocatable, dimension(:) :: plastic_j2_Output2
|
||||
integer(HID_T), allocatable, dimension(:) :: outID
|
||||
#endif
|
||||
|
||||
|
||||
public :: &
|
||||
plastic_j2_init, &
|
||||
plastic_j2_LpAndItsTangent, &
|
||||
plastic_j2_dotState, &
|
||||
plastic_j2_postResults
|
||||
|
||||
contains
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief module initialization
|
||||
!> @details reads in material parameters, allocates arrays, and does sanity checks
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine plastic_j2_init(fileUnit)
|
||||
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
||||
#ifdef HDF
|
||||
use hdf5
|
||||
#endif
|
||||
use debug, only: &
|
||||
debug_level, &
|
||||
debug_constitutive, &
|
||||
debug_levelBasic
|
||||
use numerics, only: &
|
||||
analyticJaco, &
|
||||
worldrank, &
|
||||
numerics_integrator
|
||||
use math, only: &
|
||||
math_Mandel3333to66, &
|
||||
math_Voigt66to3333
|
||||
use IO, only: &
|
||||
IO_read, &
|
||||
IO_lc, &
|
||||
IO_getTag, &
|
||||
IO_isBlank, &
|
||||
IO_stringPos, &
|
||||
IO_stringValue, &
|
||||
IO_floatValue, &
|
||||
IO_error, &
|
||||
IO_timeStamp, &
|
||||
#ifdef HDF
|
||||
tempResults, &
|
||||
HDF5_addGroup, &
|
||||
HDF5_addScalarDataset,&
|
||||
#endif
|
||||
IO_EOF
|
||||
use material, only: &
|
||||
phase_plasticity, &
|
||||
phase_plasticityInstance, &
|
||||
phase_Noutput, &
|
||||
PLASTICITY_J2_label, &
|
||||
PLASTICITY_J2_ID, &
|
||||
material_phase, &
|
||||
plasticState, &
|
||||
MATERIAL_partPhase
|
||||
|
||||
use lattice
|
||||
|
||||
implicit none
|
||||
integer(pInt), intent(in) :: fileUnit
|
||||
|
||||
|
||||
integer(pInt), allocatable, dimension(:) :: chunkPos
|
||||
integer(pInt) :: &
|
||||
o, &
|
||||
phase, &
|
||||
maxNinstance, &
|
||||
instance, &
|
||||
mySize, &
|
||||
sizeDotState, &
|
||||
sizeState, &
|
||||
sizeDeltaState
|
||||
character(len=65536) :: &
|
||||
tag = '', &
|
||||
line = ''
|
||||
integer(pInt) :: NofMyPhase
|
||||
|
||||
#ifdef HDF
|
||||
character(len=5) :: &
|
||||
str1
|
||||
integer(HID_T) :: ID,ID2,ID4
|
||||
#endif
|
||||
|
||||
mainProcess: if (worldrank == 0) then
|
||||
write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_J2_label//' init -+>>>'
|
||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||
#include "compilation_info.f90"
|
||||
endif mainProcess
|
||||
|
||||
maxNinstance = int(count(phase_plasticity == PLASTICITY_J2_ID),pInt)
|
||||
if (maxNinstance == 0_pInt) return
|
||||
|
||||
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0_pInt) &
|
||||
write(6,'(a16,1x,i5,/)') '# instances:',maxNinstance
|
||||
|
||||
#ifdef HDF
|
||||
allocate(plastic_j2_Output2(maxNinstance))
|
||||
allocate(outID(maxNinstance))
|
||||
#endif
|
||||
|
||||
allocate(plastic_j2_sizePostResults(maxNinstance), source=0_pInt)
|
||||
allocate(plastic_j2_sizePostResult(maxval(phase_Noutput), maxNinstance),source=0_pInt)
|
||||
allocate(plastic_j2_output(maxval(phase_Noutput), maxNinstance))
|
||||
plastic_j2_output = ''
|
||||
allocate(plastic_j2_outputID(maxval(phase_Noutput),maxNinstance), source=undefined_ID)
|
||||
allocate(plastic_j2_Noutput(maxNinstance), source=0_pInt)
|
||||
allocate(plastic_j2_fTaylor(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_tau0(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_gdot0(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_n(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_h0(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_h0_slopeLnRate(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_tausat(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_a(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_aTolResistance(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_aTolShear (maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_tausat_SinhFitA(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_tausat_SinhFitB(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_tausat_SinhFitC(maxNinstance), source=0.0_pReal)
|
||||
allocate(plastic_j2_tausat_SinhFitD(maxNinstance), source=0.0_pReal)
|
||||
|
||||
rewind(fileUnit)
|
||||
phase = 0_pInt
|
||||
do while (trim(line) /= IO_EOF .and. IO_lc(IO_getTag(line,'<','>')) /= material_partPhase) ! wind forward to <phase>
|
||||
line = IO_read(fileUnit)
|
||||
enddo
|
||||
|
||||
parsingFile: do while (trim(line) /= IO_EOF) ! read through sections of phase part
|
||||
line = IO_read(fileUnit)
|
||||
if (IO_isBlank(line)) cycle ! skip empty lines
|
||||
if (IO_getTag(line,'<','>') /= '') then ! stop at next part
|
||||
line = IO_read(fileUnit, .true.) ! reset IO_read
|
||||
exit
|
||||
endif
|
||||
if (IO_getTag(line,'[',']') /= '') then ! next section
|
||||
phase = phase + 1_pInt ! advance section counter
|
||||
if (phase_plasticity(phase) == PLASTICITY_J2_ID) then
|
||||
instance = phase_plasticityInstance(phase)
|
||||
endif
|
||||
cycle ! skip to next line
|
||||
endif
|
||||
if (phase > 0_pInt ) then; if (phase_plasticity(phase) == PLASTICITY_J2_ID) then ! one of my phases. Do not short-circuit here (.and. between if-statements), it's not safe in Fortran
|
||||
instance = phase_plasticityInstance(phase) ! which instance of my plasticity is present phase
|
||||
chunkPos = IO_stringPos(line)
|
||||
tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key
|
||||
|
||||
select case(tag)
|
||||
case ('(output)')
|
||||
select case(IO_lc(IO_stringValue(line,chunkPos,2_pInt)))
|
||||
case ('flowstress')
|
||||
plastic_j2_Noutput(instance) = plastic_j2_Noutput(instance) + 1_pInt
|
||||
plastic_j2_outputID(plastic_j2_Noutput(instance),instance) = flowstress_ID
|
||||
plastic_j2_output(plastic_j2_Noutput(instance),instance) = &
|
||||
IO_lc(IO_stringValue(line,chunkPos,2_pInt))
|
||||
case ('strainrate')
|
||||
plastic_j2_Noutput(instance) = plastic_j2_Noutput(instance) + 1_pInt
|
||||
plastic_j2_outputID(plastic_j2_Noutput(instance),instance) = strainrate_ID
|
||||
plastic_j2_output(plastic_j2_Noutput(instance),instance) = &
|
||||
IO_lc(IO_stringValue(line,chunkPos,2_pInt))
|
||||
case default
|
||||
|
||||
end select
|
||||
case ('tau0')
|
||||
plastic_j2_tau0(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
if (plastic_j2_tau0(instance) < 0.0_pReal) &
|
||||
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
|
||||
case ('gdot0')
|
||||
plastic_j2_gdot0(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
if (plastic_j2_gdot0(instance) <= 0.0_pReal) &
|
||||
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
|
||||
case ('n')
|
||||
plastic_j2_n(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
if (plastic_j2_n(instance) <= 0.0_pReal) &
|
||||
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
|
||||
case ('h0')
|
||||
plastic_j2_h0(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
case ('h0_slope','slopelnrate')
|
||||
plastic_j2_h0_slopeLnRate(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
case ('tausat')
|
||||
plastic_j2_tausat(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
if (plastic_j2_tausat(instance) <= 0.0_pReal) &
|
||||
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
|
||||
case ('tausat_sinhfita')
|
||||
plastic_j2_tausat_SinhFitA(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
case ('tausat_sinhfitb')
|
||||
plastic_j2_tausat_SinhFitB(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
case ('tausat_sinhfitc')
|
||||
plastic_j2_tausat_SinhFitC(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
case ('tausat_sinhfitd')
|
||||
plastic_j2_tausat_SinhFitD(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
case ('a', 'w0')
|
||||
plastic_j2_a(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
if (plastic_j2_a(instance) <= 0.0_pReal) &
|
||||
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
|
||||
case ('taylorfactor')
|
||||
plastic_j2_fTaylor(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
if (plastic_j2_fTaylor(instance) <= 0.0_pReal) &
|
||||
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
|
||||
case ('atol_resistance')
|
||||
plastic_j2_aTolResistance(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
if (plastic_j2_aTolResistance(instance) <= 0.0_pReal) &
|
||||
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
|
||||
case ('atol_shear')
|
||||
plastic_j2_aTolShear(instance) = IO_floatValue(line,chunkPos,2_pInt)
|
||||
|
||||
case default
|
||||
|
||||
end select
|
||||
endif; endif
|
||||
enddo parsingFile
|
||||
|
||||
initializeInstances: do phase = 1_pInt, size(phase_plasticity)
|
||||
myPhase: if (phase_plasticity(phase) == PLASTICITY_j2_ID) then
|
||||
NofMyPhase=count(material_phase==phase)
|
||||
instance = phase_plasticityInstance(phase)
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! sanity checks
|
||||
if (plastic_j2_aTolShear(instance) <= 0.0_pReal) &
|
||||
plastic_j2_aTolShear(instance) = 1.0e-6_pReal ! default absolute tolerance 1e-6
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! Determine size of postResults array
|
||||
outputsLoop: do o = 1_pInt,plastic_j2_Noutput(instance)
|
||||
select case(plastic_j2_outputID(o,instance))
|
||||
case(flowstress_ID,strainrate_ID)
|
||||
mySize = 1_pInt
|
||||
case default
|
||||
end select
|
||||
|
||||
outputFound: if (mySize > 0_pInt) then
|
||||
plastic_j2_sizePostResult(o,instance) = mySize
|
||||
plastic_j2_sizePostResults(instance) = &
|
||||
plastic_j2_sizePostResults(instance) + mySize
|
||||
endif outputFound
|
||||
enddo outputsLoop
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! allocate state arrays
|
||||
sizeState = 2_pInt
|
||||
sizeDotState = sizeState
|
||||
sizeDeltaState = 0_pInt
|
||||
plasticState(phase)%sizeState = sizeState
|
||||
plasticState(phase)%sizeDotState = sizeDotState
|
||||
plasticState(phase)%sizeDeltaState = sizeDeltaState
|
||||
plasticState(phase)%sizePostResults = plastic_j2_sizePostResults(instance)
|
||||
plasticState(phase)%nSlip = 1
|
||||
plasticState(phase)%nTwin = 0
|
||||
plasticState(phase)%nTrans= 0
|
||||
allocate(plasticState(phase)%aTolState ( sizeState))
|
||||
plasticState(phase)%aTolState(1) = plastic_j2_aTolResistance(instance)
|
||||
plasticState(phase)%aTolState(2) = plastic_j2_aTolShear(instance)
|
||||
allocate(plasticState(phase)%state0 ( sizeState,NofMyPhase))
|
||||
plasticState(phase)%state0(1,1:NofMyPhase) = plastic_j2_tau0(instance)
|
||||
plasticState(phase)%state0(2,1:NofMyPhase) = 0.0_pReal
|
||||
allocate(plasticState(phase)%partionedState0 ( sizeState,NofMyPhase),source=0.0_pReal)
|
||||
allocate(plasticState(phase)%subState0 ( sizeState,NofMyPhase),source=0.0_pReal)
|
||||
allocate(plasticState(phase)%state ( sizeState,NofMyPhase),source=0.0_pReal)
|
||||
allocate(plasticState(phase)%dotState (sizeDotState,NofMyPhase),source=0.0_pReal)
|
||||
allocate(plasticState(phase)%deltaState (sizeDeltaState,NofMyPhase),source=0.0_pReal)
|
||||
if (.not. analyticJaco) then
|
||||
allocate(plasticState(phase)%state_backup ( sizeState,NofMyPhase),source=0.0_pReal)
|
||||
allocate(plasticState(phase)%dotState_backup (sizeDotState,NofMyPhase),source=0.0_pReal)
|
||||
endif
|
||||
if (any(numerics_integrator == 1_pInt)) then
|
||||
allocate(plasticState(phase)%previousDotState (sizeDotState,NofMyPhase),source=0.0_pReal)
|
||||
allocate(plasticState(phase)%previousDotState2(sizeDotState,NofMyPhase),source=0.0_pReal)
|
||||
endif
|
||||
if (any(numerics_integrator == 4_pInt)) &
|
||||
allocate(plasticState(phase)%RK4dotState (sizeDotState,NofMyPhase),source=0.0_pReal)
|
||||
if (any(numerics_integrator == 5_pInt)) &
|
||||
allocate(plasticState(phase)%RKCK45dotState (6,sizeDotState,NofMyPhase),source=0.0_pReal)
|
||||
plasticState(phase)%slipRate => plasticState(phase)%dotState(2:2,1:NofMyPhase)
|
||||
plasticState(phase)%accumulatedSlip => plasticState(phase)%state (2:2,1:NofMyPhase)
|
||||
endif myPhase
|
||||
enddo initializeInstances
|
||||
|
||||
end subroutine plastic_j2_init
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief calculates plastic velocity gradient and its tangent
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine plastic_j2_LpAndItsTangent(Lp,dLp_dTstar99,Tstar_v,ipc,ip,el)
|
||||
use math, only: &
|
||||
math_mul6x6, &
|
||||
math_Mandel6to33, &
|
||||
math_Plain3333to99, &
|
||||
math_deviatoric33, &
|
||||
math_mul33xx33
|
||||
use material, only: &
|
||||
phaseAt, phasememberAt, &
|
||||
plasticState, &
|
||||
material_phase, &
|
||||
phase_plasticityInstance
|
||||
|
||||
implicit none
|
||||
real(pReal), dimension(3,3), intent(out) :: &
|
||||
Lp !< plastic velocity gradient
|
||||
real(pReal), dimension(9,9), intent(out) :: &
|
||||
dLp_dTstar99 !< derivative of Lp with respect to 2nd Piola Kirchhoff stress
|
||||
|
||||
real(pReal), dimension(6), intent(in) :: &
|
||||
Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation
|
||||
integer(pInt), intent(in) :: &
|
||||
ipc, & !< component-ID of integration point
|
||||
ip, & !< integration point
|
||||
el !< element
|
||||
|
||||
real(pReal), dimension(3,3) :: &
|
||||
Tstar_dev_33 !< deviatoric part of the 2nd Piola Kirchhoff stress tensor as 2nd order tensor
|
||||
real(pReal), dimension(3,3,3,3) :: &
|
||||
dLp_dTstar_3333 !< derivative of Lp with respect to Tstar as 4th order tensor
|
||||
real(pReal) :: &
|
||||
gamma_dot, & !< strainrate
|
||||
norm_Tstar_dev, & !< euclidean norm of Tstar_dev
|
||||
squarenorm_Tstar_dev !< square of the euclidean norm of Tstar_dev
|
||||
integer(pInt) :: &
|
||||
instance, &
|
||||
k, l, m, n
|
||||
|
||||
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
|
||||
Tstar_dev_33 = math_deviatoric33(math_Mandel6to33(Tstar_v)) ! deviatoric part of 2nd Piola-Kirchhoff stress
|
||||
squarenorm_Tstar_dev = math_mul33xx33(Tstar_dev_33,Tstar_dev_33)
|
||||
norm_Tstar_dev = sqrt(squarenorm_Tstar_dev)
|
||||
|
||||
if (norm_Tstar_dev <= 0.0_pReal) then ! Tstar == 0 --> both Lp and dLp_dTstar are zero
|
||||
Lp = 0.0_pReal
|
||||
dLp_dTstar99 = 0.0_pReal
|
||||
else
|
||||
gamma_dot = plastic_j2_gdot0(instance) &
|
||||
* (sqrt(1.5_pReal) * norm_Tstar_dev / (plastic_j2_fTaylor(instance) * &
|
||||
plasticState(phaseAt(ipc,ip,el))%state(1,phasememberAt(ipc,ip,el)))) &
|
||||
**plastic_j2_n(instance)
|
||||
|
||||
Lp = Tstar_dev_33/norm_Tstar_dev * gamma_dot/plastic_j2_fTaylor(instance)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! Calculation of the tangent of Lp
|
||||
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) &
|
||||
dLp_dTstar_3333(k,l,m,n) = (plastic_j2_n(instance)-1.0_pReal) * &
|
||||
Tstar_dev_33(k,l)*Tstar_dev_33(m,n) / squarenorm_Tstar_dev
|
||||
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt) &
|
||||
dLp_dTstar_3333(k,l,k,l) = dLp_dTstar_3333(k,l,k,l) + 1.0_pReal
|
||||
forall (k=1_pInt:3_pInt,m=1_pInt:3_pInt) &
|
||||
dLp_dTstar_3333(k,k,m,m) = dLp_dTstar_3333(k,k,m,m) - 1.0_pReal/3.0_pReal
|
||||
dLp_dTstar99 = math_Plain3333to99(gamma_dot / plastic_j2_fTaylor(instance) * &
|
||||
dLp_dTstar_3333 / norm_Tstar_dev)
|
||||
end if
|
||||
end subroutine plastic_j2_LpAndItsTangent
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief calculates the rate of change of microstructure
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine plastic_j2_dotState(Tstar_v,ipc,ip,el)
|
||||
use math, only: &
|
||||
math_mul6x6
|
||||
use material, only: &
|
||||
phaseAt, phasememberAt, &
|
||||
plasticState, &
|
||||
material_phase, &
|
||||
phase_plasticityInstance
|
||||
|
||||
implicit none
|
||||
real(pReal), dimension(6), intent(in):: &
|
||||
Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation
|
||||
integer(pInt), intent(in) :: &
|
||||
ipc, & !< component-ID of integration point
|
||||
ip, & !< integration point
|
||||
el !< element
|
||||
real(pReal), dimension(6) :: &
|
||||
Tstar_dev_v !< deviatoric part of the 2nd Piola Kirchhoff stress tensor in Mandel notation
|
||||
real(pReal) :: &
|
||||
gamma_dot, & !< strainrate
|
||||
hardening, & !< hardening coefficient
|
||||
saturation, & !< saturation resistance
|
||||
norm_Tstar_dev !< euclidean norm of Tstar_dev
|
||||
integer(pInt) :: &
|
||||
instance, & !< instance of my instance (unique number of my constitutive model)
|
||||
of, & !< shortcut notation for offset position in state array
|
||||
ph !< shortcut notation for phase ID (unique number of all phases, regardless of constitutive model)
|
||||
|
||||
of = phasememberAt(ipc,ip,el)
|
||||
ph = phaseAt(ipc,ip,el)
|
||||
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! norm of deviatoric part of 2nd Piola-Kirchhoff stress
|
||||
Tstar_dev_v(1:3) = Tstar_v(1:3) - sum(Tstar_v(1:3))/3.0_pReal
|
||||
Tstar_dev_v(4:6) = Tstar_v(4:6)
|
||||
norm_Tstar_dev = sqrt(math_mul6x6(Tstar_dev_v,Tstar_dev_v))
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! strain rate
|
||||
gamma_dot = plastic_j2_gdot0(instance) * ( sqrt(1.5_pReal) * norm_Tstar_dev &
|
||||
/ &!-----------------------------------------------------------------------------------
|
||||
(plastic_j2_fTaylor(instance)*plasticState(ph)%state(1,of)) )**plastic_j2_n(instance)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! hardening coefficient
|
||||
if (abs(gamma_dot) > 1e-12_pReal) then
|
||||
if (abs(plastic_j2_tausat_SinhFitA(instance)) <= tiny(0.0_pReal)) then
|
||||
saturation = plastic_j2_tausat(instance)
|
||||
else
|
||||
saturation = ( plastic_j2_tausat(instance) &
|
||||
+ ( log( ( gamma_dot / plastic_j2_tausat_SinhFitA(instance)&
|
||||
)**(1.0_pReal / plastic_j2_tausat_SinhFitD(instance))&
|
||||
+ sqrt( ( gamma_dot / plastic_j2_tausat_SinhFitA(instance) &
|
||||
)**(2.0_pReal / plastic_j2_tausat_SinhFitD(instance)) &
|
||||
+ 1.0_pReal ) &
|
||||
) & ! asinh(K) = ln(K + sqrt(K^2 +1))
|
||||
)**(1.0_pReal / plastic_j2_tausat_SinhFitC(instance)) &
|
||||
/ ( plastic_j2_tausat_SinhFitB(instance) &
|
||||
* (gamma_dot / plastic_j2_gdot0(instance))**(1.0_pReal / plastic_j2_n(instance)) &
|
||||
) &
|
||||
)
|
||||
endif
|
||||
hardening = ( plastic_j2_h0(instance) + plastic_j2_h0_slopeLnRate(instance) * log(gamma_dot) ) &
|
||||
* abs( 1.0_pReal - plasticState(ph)%state(1,of)/saturation )**plastic_j2_a(instance) &
|
||||
* sign(1.0_pReal, 1.0_pReal - plasticState(ph)%state(1,of)/saturation)
|
||||
else
|
||||
hardening = 0.0_pReal
|
||||
endif
|
||||
|
||||
plasticState(ph)%dotState(1,of) = hardening * gamma_dot
|
||||
plasticState(ph)%dotState(2,of) = gamma_dot
|
||||
|
||||
end subroutine plastic_j2_dotState
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief return array of constitutive results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
function plastic_j2_postResults(Tstar_v,ipc,ip,el)
|
||||
use math, only: &
|
||||
math_mul6x6
|
||||
use material, only: &
|
||||
material_phase, &
|
||||
plasticState, &
|
||||
phaseAt, phasememberAt, &
|
||||
phase_plasticityInstance
|
||||
|
||||
implicit none
|
||||
real(pReal), dimension(6), intent(in) :: &
|
||||
Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation
|
||||
integer(pInt), intent(in) :: &
|
||||
ipc, & !< component-ID of integration point
|
||||
ip, & !< integration point
|
||||
el !< element
|
||||
real(pReal), dimension(plastic_j2_sizePostResults(phase_plasticityInstance(material_phase(ipc,ip,el)))) :: &
|
||||
plastic_j2_postResults
|
||||
|
||||
real(pReal), dimension(6) :: &
|
||||
Tstar_dev_v ! deviatoric part of the 2nd Piola Kirchhoff stress tensor in Mandel notation
|
||||
real(pReal) :: &
|
||||
norm_Tstar_dev ! euclidean norm of Tstar_dev
|
||||
integer(pInt) :: &
|
||||
instance, & !< instance of my instance (unique number of my constitutive model)
|
||||
of, & !< shortcut notation for offset position in state array
|
||||
ph, & !< shortcut notation for phase ID (unique number of all phases, regardless of constitutive model)
|
||||
c, &
|
||||
o
|
||||
|
||||
of = phasememberAt(ipc,ip,el)
|
||||
ph = phaseAt(ipc,ip,el)
|
||||
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! calculate deviatoric part of 2nd Piola-Kirchhoff stress and its norm
|
||||
Tstar_dev_v(1:3) = Tstar_v(1:3) - sum(Tstar_v(1:3))/3.0_pReal
|
||||
Tstar_dev_v(4:6) = Tstar_v(4:6)
|
||||
norm_Tstar_dev = sqrt(math_mul6x6(Tstar_dev_v,Tstar_dev_v))
|
||||
|
||||
c = 0_pInt
|
||||
plastic_j2_postResults = 0.0_pReal
|
||||
|
||||
outputsLoop: do o = 1_pInt,plastic_j2_Noutput(instance)
|
||||
select case(plastic_j2_outputID(o,instance))
|
||||
case (flowstress_ID)
|
||||
plastic_j2_postResults(c+1_pInt) = plasticState(ph)%state(1,of)
|
||||
c = c + 1_pInt
|
||||
case (strainrate_ID)
|
||||
plastic_j2_postResults(c+1_pInt) = &
|
||||
plastic_j2_gdot0(instance) * ( sqrt(1.5_pReal) * norm_Tstar_dev &
|
||||
/ &!----------------------------------------------------------------------------------
|
||||
(plastic_j2_fTaylor(instance) * plasticState(ph)%state(1,of)) ) ** plastic_j2_n(instance)
|
||||
c = c + 1_pInt
|
||||
end select
|
||||
enddo outputsLoop
|
||||
|
||||
end function plastic_j2_postResults
|
||||
|
||||
|
||||
end module plastic_j2
|
Loading…
Reference in New Issue