Merge remote-tracking branch 'remotes/origin/DADF5-improvements' into development

This commit is contained in:
Franz Roters 2019-11-02 20:32:34 +01:00
commit 6f3cb071ec
30 changed files with 599 additions and 1325 deletions

View File

@ -506,7 +506,6 @@ Processing:
stage: createDocumentation stage: createDocumentation
script: script:
- cd $DAMASKROOT/processing/pre - cd $DAMASKROOT/processing/pre
- rm abq_addUserOutput.py marc_addUserOutput.py
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py - $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
- cd $DAMASKROOT/processing/post - cd $DAMASKROOT/processing/post
- rm vtk2ang.py DAD*.py - rm vtk2ang.py DAD*.py

View File

@ -1,6 +1,6 @@
######################################################################################## ########################################################################################
# Compiler options for building DAMASK # Compiler options for building DAMASK
cmake_minimum_required (VERSION 3.6.0 FATAL_ERROR) cmake_minimum_required (VERSION 3.10.0 FATAL_ERROR)
#--------------------------------------------------------------------------------------- #---------------------------------------------------------------------------------------
# Find PETSc from system environment # Find PETSc from system environment

@ -1 +1 @@
Subproject commit b04c068a080a6424c3f655a11de3cfe2d73f9fdc Subproject commit a3a88933cbb92b81d481305ce93374917baf3980

View File

@ -1,66 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import os,string,scipy
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Apply filter(s) to Gwyddion data.
""" + string.replace(scriptID,'\n','\\n')
)
for option in ['opening',
'closing',
'erosion',
'dilation',
'average',
'median',
]:
parser.add_option('-%s'%option[0], '--%s'%option, dest=option, type='int',
help = 'stencil size for %s filter'%option)
parser.set_default(option, 0)
(options, filenames) = parser.parse_args()
# ------------------------------------------ read Gwyddion data ---------------------------------------
for file in filenames:
filters = ''
header = []
with open(file,'r') as f:
for line in f:
pieces = line.split()
if pieces[0] != '#': break
if pieces[1] == 'Width:': width = float(pieces[2])
if pieces[1] == 'Height:': height = float(pieces[2])
header.append(line.lstrip('#').strip())
elevation = np.loadtxt(file)#*1e6
if options.opening > 0:
elevation = scipy.ndimage.morphology.grey_opening(elevation,options.opening)
filters += '_opening%i'%options.opening
if options.closing > 0:
elevation = scipy.ndimage.morphology.grey_closing(elevation,options.closing)
filters += '_closing%i'%options.closing
if options.erosion > 0:
elevation = scipy.ndimage.morphology.grey_erosion(elevation,options.erosion)
filters += '_erosion%i'%options.erosion
if options.dilation > 0:
elevation = scipy.ndimage.morphology.grey_dilation(elevation,options.dilation)
filters += '_dilation%i'%options.dilation
if options.average > 0:
elevation = scipy.ndimage.filters.uniform_filter(elevation,options.average)
filters += '_avg%i'%options.average
if options.median > 0:
elevation = scipy.ndimage.filters.median_filter(elevation,options.median)
filters += '_median%i'%options.median
np.savetxt(os.path.splitext(file)[0]+filters+os.path.splitext(file)[1],elevation,header='\n'.join(header))

View File

@ -1,98 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import os,string,vtk
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
scalingFactor = { \
'm': {
'm': 1e0,
'mm': 1e-3,
'µm': 1e-6,
},
'mm': {
'm': 1e+3,
'mm': 1e0,
'µm': 1e-3,
},
'µm': {
'm': 1e+6,
'mm': 1e+3,
'µm': 1e0,
},
}
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Produce VTK rectilinear grid from Gwyddion dataset exported as text.
""" + string.replace(scriptID,'\n','\\n')
)
parser.add_option('-s', '--scaling', dest='scaling', type='float',
help = 'scaling factor for elevation data [auto]')
parser.set_defaults(scaling = 0.0)
(options, filenames) = parser.parse_args()
# ------------------------------------------ read Gwyddion data ---------------------------------------
for file in filenames:
with open(file,'r') as f:
for line in f:
pieces = line.split()
if pieces[0] != '#': break
if len(pieces) < 2: continue
if pieces[1] == 'Width:':
width = float(pieces[2])
lateralunit = pieces[3]
if pieces[1] == 'Height:':
height = float(pieces[2])
lateralunit = pieces[3]
if pieces[1] == 'Value' and pieces[2] == 'units:':
elevationunit = pieces[3]
if options.scaling == 0.0:
options.scaling = scalingFactor[lateralunit][elevationunit]
elevation = np.loadtxt(file)*options.scaling
grid = vtk.vtkRectilinearGrid()
grid.SetDimensions(elevation.shape[1],elevation.shape[0],1)
xCoords = vtk.vtkDoubleArray()
for x in np.arange(0.0,width,width/elevation.shape[1],'d'):
xCoords.InsertNextValue(x)
yCoords = vtk.vtkDoubleArray()
for y in np.arange(0.0,height,height/elevation.shape[0],'d'):
yCoords.InsertNextValue(y)
zCoords = vtk.vtkDoubleArray()
zCoords.InsertNextValue(0.0)
grid.SetXCoordinates(xCoords)
grid.SetYCoordinates(yCoords)
grid.SetZCoordinates(zCoords)
vector = vtk.vtkFloatArray()
vector.SetName("elevation");
vector.SetNumberOfComponents(3);
vector.SetNumberOfTuples(np.prod(elevation.shape));
for i,z in enumerate(np.ravel(elevation)):
vector.SetTuple3(i,0,0,z)
grid.GetPointData().AddArray(vector)
writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.splitext(file)[0]+'.vtr')
if vtk.VTK_MAJOR_VERSION <= 5:
writer.SetInput(grid)
else:
writer.SetInputData(grid)
writer.Write()

View File

@ -42,7 +42,7 @@ for filename in options.filenames:
results = damask.DADF5(filename) results = damask.DADF5(filename)
if results.structured: # for grid solvers use rectilinear grid if results.structured: # for grid solvers use rectilinear grid
grid = vtk.vtkRectilineagrid() grid = vtk.vtkRectilinearGrid()
coordArray = [vtk.vtkDoubleArray(), coordArray = [vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(), vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(), vtk.vtkDoubleArray(),
@ -120,7 +120,7 @@ for filename in options.filenames:
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1]) grid.GetCellData().AddArray(vtk_data[-1])
writer = vtk.vtkXMLRectilineagridWriter() if results.structured else \ writer = vtk.vtkXMLRectilinearGridWriter() if results.structured else \
vtk.vtkXMLUnstructuredGridWriter() vtk.vtkXMLUnstructuredGridWriter()
results.set_visible('constituents', False) results.set_visible('constituents', False)

View File

@ -1,150 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import os
import sys
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def outMentat(cmd,locals):
if cmd[0:3] == '(!)':
exec(cmd[3:])
elif cmd[0:3] == '(?)':
cmd = eval(cmd[3:])
py_mentat.py_send(cmd)
else:
py_mentat.py_send(cmd)
return
# -----------------------------
def outStdout(cmd,locals):
if cmd[0:3] == '(!)':
exec(cmd[3:])
elif cmd[0:3] == '(?)':
cmd = eval(cmd[3:])
print(cmd)
else:
print(cmd)
return
# -----------------------------
def output(cmds,locals,dest):
for cmd in cmds:
if isinstance(cmd,list):
output(cmd,locals,dest)
else:
{\
'Mentat': outMentat,\
'Stdout': outStdout,\
}[dest](cmd,locals)
return
# -----------------------------
def colorMap(colors,baseIdx=32):
cmds = [ "*color %i %f %f %f"%(idx+baseIdx,color[0],color[1],color[2])
for idx,color in enumerate(colors) ]
return cmds
# -----------------------------
# MAIN FUNCTION STARTS HERE
# -----------------------------
parser = OptionParser(option_class=damask.extendableOption,
usage="%prog [options] predefinedScheme | (lower_h,s,l upper_h,s,l)", description = """
Changes the color map in MSC.Mentat.
Interpolates colors between "lower_hsl" and "upper_hsl".
""", version = scriptID)
parser.add_option("-i","--inverse", action = "store_true",
dest = "inverse",
help = "invert legend")
parser.add_option( "--palette", action = "store_true",
dest = "palette",
help = "output plain rgb palette integer values (0-255)")
parser.add_option( "--palettef", action = "store_true",
dest = "palettef",
help = "output plain rgb palette float values (0.0-1.0)")
parser.add_option("-p", "--port", type = "int",
dest = "port",
metavar ='int',
help = "Mentat connection port [%default]")
parser.add_option("-b", "--baseindex", type = "int",
metavar ='int',
dest = "baseIdx",
help = "base index of colormap [%default]")
parser.add_option("-n", "--colorcount", type = "int",
metavar ='int',
dest = "colorcount",
help = "number of colors [%default]")
parser.add_option("-v", "--verbose", action="store_true",
dest = "verbose",
help = "write Mentat command stream also to STDOUT")
parser.set_defaults(port = 40007)
parser.set_defaults(baseIdx = 32)
parser.set_defaults(colorcount = 32)
parser.set_defaults(inverse = False)
parser.set_defaults(palette = False)
parser.set_defaults(palettef = False)
parser.set_defaults(verbose = False)
msg = []
(options, colors) = parser.parse_args()
if len(colors) == 0:
parser.error('missing color information')
elif len(colors) == 1:
theMap = damask.Colormap(predefined = colors[0])
elif len(colors) == 2:
theMap = damask.Colormap(damask.Color('HSL',map(float, colors[0].split(','))),
damask.Color('HSL',map(float, colors[1].split(','))) )
else:
theMap = damask.Colormap()
if options.inverse:
theMap = theMap.invert()
if options.palettef:
print(theMap.export(format='raw',steps=options.colorcount))
elif options.palette:
for theColor in theMap.export(format='list',steps=options.colorcount):
print('\t'.join(map(lambda x: str(int(255*x)),theColor)))
else: # connect to Mentat and change colorMap
sys.path.append(damask.solver.Marc().libraryPath())
try:
import py_mentat
print('waiting to connect...')
py_mentat.py_connect('',options.port)
print('connected...')
mentat = True
except:
sys.stderr.write('warning: no valid Mentat release found\n')
mentat = False
outputLocals = {}
cmds = colorMap(theMap.export(format='list',steps=options.colorcount),options.baseIdx)
if mentat:
output(['*show_table']+cmds+['*show_model *redraw'],outputLocals,'Mentat')
py_mentat.py_disconnect()
if options.verbose:
output(cmds,outputLocals,'Stdout')

View File

@ -1,167 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import sys,os,re
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def ParseOutputFormat(filename,what,me):
format = {'outputs':{},'specials':{'brothers':[]}}
outputmetafile = filename+'.output'+what
try:
myFile = open(outputmetafile)
except:
print('Could not open file %s'%outputmetafile)
raise
else:
content = myFile.readlines()
myFile.close()
tag = ''
tagID = 0
for line in content:
if re.match("\s*$",line) or re.match("#",line): # skip blank lines and comments
continue
m = re.match("\[(.+)\]",line) # look for block indicator
if m: # next section
tag = m.group(1)
tagID += 1
format['specials']['brothers'].append(tag)
if tag == me or (me.isdigit() and tagID == int(me)):
format['specials']['_id'] = tagID
format['outputs'] = []
tag = me
else: # data from section
if tag == me:
(output,length) = line.split()
output.lower()
if length.isdigit():
length = int(length)
if re.match("\((.+)\)",output): # special data, (e.g. (Ngrains)
format['specials'][output] = length
elif length > 0:
format['outputs'].append([output,length])
return format
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Abaqus.Inputfile(s)', description = """
Transfer the output variables requested in the material.config to
properly labelled user-defined variables within the Abaqus input file (*.inp).
Requires the files
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
that are written during the first run of the model.
Specify which user block format you want to apply by stating the homogenization, crystallite, and phase identifiers.
Or have an existing set of user variables copied over from another *.inp file.
""", version = scriptID)
parser.add_option('-m', dest='number', type='int', metavar = 'int',
help='maximum requested User Defined Variable [%default]')
parser.add_option('--homogenization', dest='homog', metavar = 'string',
help='homogenization name or index [%default]')
parser.add_option('--crystallite', dest='cryst', metavar = 'string',
help='crystallite identifier name or index [%default]')
parser.add_option('--phase', dest='phase', metavar = 'string',
help='phase identifier name or index [%default]')
parser.add_option('--use', dest='useFile', metavar = 'string',
help='optionally parse output descriptors from '+
'outputXXX files of given name')
parser.add_option('--option', dest='damaskOption', metavar = 'string',
help='Add DAMASK option to input file, e.g. "periodic x z"')
parser.set_defaults(number = 0,
homog = '1',
cryst = '1',
phase = '1')
(options, files) = parser.parse_args()
if not files:
parser.error('no file(s) specified.')
me = { 'Homogenization': options.homog,
'Crystallite': options.cryst,
'Constitutive': options.phase,
}
for myFile in files:
damask.util.report(scriptName,myFile)
if options.useFile is not None:
formatFile = os.path.splitext(options.useFile)[0]
else:
formatFile = os.path.splitext(myFile)[0]
myFile = os.path.splitext(myFile)[0]+'.inp'
if not os.path.lexists(myFile):
print('{} not found'.format(myFile))
continue
print('Scanning format files of: {}'.format(formatFile))
if options.number < 1:
outputFormat = {}
for what in me:
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
if '_id' not in outputFormat[what]['specials']:
print("'{}' not found in <{}>".format(me[what],what))
print('\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers'])))
sys.exit(1)
UserVars = ['HomogenizationCount']
for var in outputFormat['Homogenization']['outputs']:
if var[1] > 1:
UserVars += ['%i_%s'%(i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%s'%(var[0]) for i in range(var[1])]
UserVars += ['GrainCount']
for grain in range(outputFormat['Homogenization']['specials']['(ngrains)']):
UserVars += ['%i_CrystalliteCount'%(grain+1)]
for var in outputFormat['Crystallite']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
UserVars += ['%i_ConstitutiveCount'%(grain+1)]
for var in outputFormat['Constitutive']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
# Now change *.inp file(s)
print('Adding labels to: {}'.format(myFile))
inFile = open(myFile)
input = inFile.readlines()
inFile.close()
output = open(myFile,'w')
thisSection = ''
if options.damaskOption is not None:
output.write('$damask {0}\n'.format(options.damaskOption))
for line in input:
#Abaqus keyword line begins with: *keyword, argument1, ...
m = re.match('([*]\w+)\s',line)
if m:
lastSection = thisSection
thisSection = m.group(1)
if (lastSection.upper() == '*DEPVAR' and thisSection.upper() == '*USER'): # Abaqus keyword can be upper or lower case
if options.number > 0:
output.write('{}\n'.format(options.number)) # Abaqus needs total number of SDVs in the line after *Depvar keyword
else:
output.write('{}\n'.format(len(UserVars)))
for i in range(len(UserVars)):
output.write('%i,"%i%s","%i%s"\n'%(i+1,0,UserVars[i],0,UserVars[i])) #index,output variable key,output variable description
if (thisSection.upper() != '*DEPVAR' or not re.match('\s*\d',line)):
output.write(line)
output.close()

View File

@ -1,164 +0,0 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import sys,os,re
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def ParseOutputFormat(filename,what,me):
format = {'outputs':{},'specials':{'brothers':[]}}
outputmetafile = filename+'.output'+what
try:
myFile = open(outputmetafile)
except:
print('Could not open file %s'%outputmetafile)
raise
else:
content = myFile.readlines()
myFile.close()
tag = ''
tagID = 0
for line in content:
if re.match("\s*$",line) or re.match("#",line): # skip blank lines and comments
continue
m = re.match("\[(.+)\]",line) # look for block indicator
if m: # next section
tag = m.group(1)
tagID += 1
format['specials']['brothers'].append(tag)
if tag == me or (me.isdigit() and tagID == int(me)):
format['specials']['_id'] = tagID
format['outputs'] = []
tag = me
else: # data from section
if tag == me:
(output,length) = line.split()
output.lower()
if length.isdigit():
length = int(length)
if re.match("\((.+)\)",output): # special data, (e.g. (Ngrains)
format['specials'][output] = length
elif length > 0:
format['outputs'].append([output,length])
return format
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Marc.Inputfile(s)', description = """
Transfer the output variables requested in the material.config to
properly labelled user-defined variables within the Marc input file (*.dat).
Requires the files
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
that are written during the first run of the model.
Specify which user block format you want to apply by stating the homogenization, crystallite, and phase identifiers.
Or have an existing set of user variables copied over from another *.dat file.
""", version = scriptID)
parser.add_option('-m', dest='number', type='int', metavar = 'int',
help='maximum requested User Defined Variable [%default]')
parser.add_option('--homogenization', dest='homog', metavar = 'string',
help='homogenization name or index [%default]')
parser.add_option('--crystallite', dest='cryst', metavar = 'string',
help='crystallite identifier name or index [%default]')
parser.add_option('--phase', dest='phase', metavar = 'string',
help='phase identifier name or index [%default]')
parser.add_option('--use', dest='useFile', metavar = 'string',
help='optionally parse output descriptors from '+
'outputXXX files of given name')
parser.add_option('--option', dest='damaskOption', metavar = 'string',
help='Add DAMASK option to input file, e.g. "periodic x z"')
parser.set_defaults(number = 0,
homog = '1',
cryst = '1',
phase = '1')
(options, files) = parser.parse_args()
if not files:
parser.error('no file(s) specified.')
me = { 'Homogenization': options.homog,
'Crystallite': options.cryst,
'Constitutive': options.phase,
}
for myFile in files:
damask.util.report(scriptName,myFile)
if options.useFile is not None:
formatFile = os.path.splitext(options.useFile)[0]
else:
formatFile = os.path.splitext(myFile)[0]
myFile = os.path.splitext(myFile)[0]+'.dat'
if not os.path.lexists(myFile):
print('{} not found'.format(myFile))
continue
print('Scanning format files of: {}'.format(formatFile))
if options.number < 1:
outputFormat = {}
for what in me:
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
if '_id' not in outputFormat[what]['specials']:
print("'{}' not found in <{}>".format(me[what],what))
print('\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers'])))
sys.exit(1)
UserVars = ['HomogenizationCount']
for var in outputFormat['Homogenization']['outputs']:
if var[1] > 1:
UserVars += ['%i_%s'%(i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%s'%(var[0]) for i in range(var[1])]
UserVars += ['GrainCount']
for grain in range(outputFormat['Homogenization']['specials']['(ngrains)']):
UserVars += ['%i_CrystalliteCount'%(grain+1)]
for var in outputFormat['Crystallite']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
UserVars += ['%i_ConstitutiveCount'%(grain+1)]
for var in outputFormat['Constitutive']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
# Now change *.dat file(s)
print('Adding labels to: {}'.format(myFile))
inFile = open(myFile)
input = inFile.readlines()
inFile.close()
output = open(myFile,'w')
thisSection = ''
if options.damaskOption is not None:
output.write('$damask {0}\n'.format(options.damaskOption))
for line in input:
m = re.match('(\w+)\s',line)
if m:
lastSection = thisSection
thisSection = m.group(1)
if (lastSection == 'post' and thisSection == 'parameters'):
if options.number > 0:
for i in range(options.number):
output.write('%10i%10i\n'%(-i-1,0))
else:
for i in range(len(UserVars)):
output.write('%10i%10i%s\n'%(-i-1,0,UserVars[i]))
if (thisSection != 'post' or not re.match('\s*\-',line)):
output.write(line)
output.close()

View File

@ -6,6 +6,7 @@ with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f:
name = 'damask' name = 'damask'
# classes
from .environment import Environment # noqa from .environment import Environment # noqa
from .asciitable import ASCIItable # noqa from .asciitable import ASCIItable # noqa
@ -14,8 +15,11 @@ from .colormaps import Colormap, Color # noqa
from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa
from .dadf5 import DADF5 # noqa from .dadf5 import DADF5 # noqa
#from .block import Block # only one class
from .geom import Geom # noqa from .geom import Geom # noqa
from .solver import Solver # noqa from .solver import Solver # noqa
from .test import Test # noqa from .test import Test # noqa
from .util import extendableOption # noqa from .util import extendableOption # noqa
# functions in modules
from . import mechanics # noqa

View File

@ -7,6 +7,7 @@ import numpy as np
from . import util from . import util
from . import version from . import version
from . import mechanics
# ------------------------------------------------------------------ # ------------------------------------------------------------------
class DADF5(): class DADF5():
@ -90,7 +91,7 @@ class DADF5():
valid = [e for e_ in [glob.fnmatch.filter(getattr(self,what),s) for s in choice] for e in e_] valid = [e for e_ in [glob.fnmatch.filter(getattr(self,what),s) for s in choice] for e in e_]
existing = set(self.visible[what]) existing = set(self.visible[what])
if action == 'set': if action == 'set':
self.visible[what] = valid self.visible[what] = valid
elif action == 'add': elif action == 'add':
@ -102,21 +103,21 @@ class DADF5():
def __time_to_inc(self,start,end): def __time_to_inc(self,start,end):
selected = [] selected = []
for i,time in enumerate(self.times): for i,time in enumerate(self.times):
if start <= time < end: if start <= time <= end:
selected.append(self.increments[i]) selected.append(self.increments[i])
return selected return selected
def set_by_time(self,start,end): def set_by_time(self,start,end):
""" """
Sets active time increments based on start and end time. Set active increments based on start and end time.
Parameters Parameters
---------- ----------
start : float start : float
start time (included) start time (included)
end : float end : float
end time (exclcuded) end time (included)
""" """
self.__manage_visible(self.__time_to_inc(start,end),'increments','set') self.__manage_visible(self.__time_to_inc(start,end),'increments','set')
@ -124,14 +125,14 @@ class DADF5():
def add_by_time(self,start,end): def add_by_time(self,start,end):
""" """
Adds to active time increments based on start and end time. Add to active increments based on start and end time.
Parameters Parameters
---------- ----------
start : float start : float
start time (included) start time (included)
end : float end : float
end time (exclcuded) end time (included)
""" """
self.__manage_visible(self.__time_to_inc(start,end),'increments','add') self.__manage_visible(self.__time_to_inc(start,end),'increments','add')
@ -139,22 +140,67 @@ class DADF5():
def del_by_time(self,start,end): def del_by_time(self,start,end):
""" """
Delets from active time increments based on start and end time. Delete from active increments based on start and end time.
Parameters Parameters
---------- ----------
start : float start : float
start time (included) start time (included)
end : float end : float
end time (exclcuded) end time (included)
""" """
self.__manage_visible(self.__time_to_inc(start,end),'increments','del') self.__manage_visible(self.__time_to_inc(start,end),'increments','del')
def set_by_increment(self,start,end):
"""
Set active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','set')
def add_by_increment(self,start,end):
"""
Add to active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','add')
def del_by_increment(self,start,end):
"""
Delet from active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','del')
def iter_visible(self,what): def iter_visible(self,what):
""" """
Iterates over visible items by setting each one visible. Iterate over visible items by setting each one visible.
Parameters Parameters
---------- ----------
@ -176,7 +222,7 @@ class DADF5():
def set_visible(self,what,datasets): def set_visible(self,what,datasets):
""" """
Sets active groups. Set active groups.
Parameters Parameters
---------- ----------
@ -192,7 +238,7 @@ class DADF5():
def add_visible(self,what,datasets): def add_visible(self,what,datasets):
""" """
Adds to active groups. Add to active groups.
Parameters Parameters
---------- ----------
@ -208,7 +254,7 @@ class DADF5():
def del_visible(self,what,datasets): def del_visible(self,what,datasets):
""" """
Removes from active groupse. Delete from active groupse.
Parameters Parameters
---------- ----------
@ -267,11 +313,11 @@ class DADF5():
def list_data(self): def list_data(self):
"""Gives information on all active datasets in the file.""" """Return information on all active datasets in the file."""
message = '' message = ''
with h5py.File(self.filename,'r') as f: with h5py.File(self.filename,'r') as f:
for i in self.iter_visible('increments'): for s,i in enumerate(self.iter_visible('increments')):
message+='\n{}\n'.format(i) message+='\n{} ({}s)\n'.format(i,self.times[s])
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o): for oo in self.iter_visible(o):
message+=' {}\n'.format(oo) message+=' {}\n'.format(oo)
@ -280,14 +326,15 @@ class DADF5():
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
for d in f[group].keys(): for d in f[group].keys():
try: try:
message+=' {} ({})\n'.format(d,f['/'.join([group,d])].attrs['Description'].decode()) dataset = f['/'.join([group,d])]
message+=' {} / ({}): {}\n'.format(d,dataset.attrs['Unit'].decode(),dataset.attrs['Description'].decode())
except KeyError: except KeyError:
pass pass
return message return message
def get_dataset_location(self,label): def get_dataset_location(self,label):
"""Returns the location of all active datasets with given label.""" """Return the location of all active datasets with given label."""
path = [] path = []
with h5py.File(self.filename,'r') as f: with h5py.File(self.filename,'r') as f:
for i in self.iter_visible('increments'): for i in self.iter_visible('increments'):
@ -326,7 +373,7 @@ class DADF5():
""" """
Dataset for all points/cells. Dataset for all points/cells.
If more than one path is given, the dataset is composed of the individual contributions If more than one path is given, the dataset is composed of the individual contributions.
""" """
with h5py.File(self.filename,'r') as f: with h5py.File(self.filename,'r') as f:
shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:] shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:]
@ -341,7 +388,7 @@ class DADF5():
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0] p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
if len(p)>0: if len(p)>0:
u = (f['mapping/cellResults/constituent'][p,c]['Position']) u = (f['mapping/cellResults/constituent']['Position'][p,c])
a = np.array(f[pa]) a = np.array(f[pa])
if len(a.shape) == 1: if len(a.shape) == 1:
a=a.reshape([a.shape[0],1]) a=a.reshape([a.shape[0],1])
@ -349,7 +396,7 @@ class DADF5():
p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0] p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0]
if len(p)>0: if len(p)>0:
u = (f['mapping/cellResults/materialpoint'][p.tolist()]['Position']) u = (f['mapping/cellResults/materialpoint']['Position'][p.tolist()])
a = np.array(f[pa]) a = np.array(f[pa])
if len(a.shape) == 1: if len(a.shape) == 1:
a=a.reshape([a.shape[0],1]) a=a.reshape([a.shape[0],1])
@ -359,7 +406,7 @@ class DADF5():
def cell_coordinates(self): def cell_coordinates(self):
"""Initial coordinates of the cell centers.""" """Return initial coordinates of the cell centers."""
if self.structured: if self.structured:
delta = self.size/self.grid*0.5 delta = self.size/self.grid*0.5
z, y, x = np.meshgrid(np.linspace(delta[2],self.size[2]-delta[2],self.grid[2]), z, y, x = np.meshgrid(np.linspace(delta[2],self.size[2]-delta[2],self.grid[2]),
@ -374,70 +421,77 @@ class DADF5():
def add_Cauchy(self,P='P',F='F'): def add_Cauchy(self,P='P',F='F'):
""" """
Adds Cauchy stress calculated from 1st Piola-Kirchhoff stress and deformation gradient. Add Cauchy stress calculated from 1. Piola-Kirchhoff stress and deformation gradient.
Resulting tensor is symmetrized as the Cauchy stress should be symmetric. Parameters
----------
P : str, optional
Label of the dataset containing the 1. Piola-Kirchhoff stress. Default value is P.
F : str, optional
Label of the dataset containing the deformation gradient. Default value is F.
""" """
def Cauchy(F,P): def __add_Cauchy(F,P):
sigma = np.einsum('i,ijk,ilk->ijl',1.0/np.linalg.det(F['data']),P['data'],F['data'])
sigma = (sigma + np.transpose(sigma,(0,2,1)))*0.5 # enforce symmetry return {
return { 'data': mechanics.Cauchy(F['data'],P['data']),
'data' : sigma, 'label': 'sigma',
'label' : 'sigma', 'meta': {
'meta' : { 'Unit': P['meta']['Unit'],
'Unit' : P['meta']['Unit'], 'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],P['meta']['Description'])+\
'Description' : 'Cauchy stress calculated from {} ({}) '.format(P['label'],P['meta']['Description'])+\ 'and deformation gradient {} ({})'.format(F['label'],F['meta']['Description']),
'and deformation gradient {} ({})'.format(F['label'],F['meta']['Description']), 'Creator': 'dadf5.py:add_Cauchy v{}'.format(version)
'Creator' : 'dadf5.py:add_Cauchy v{}'.format(version)
} }
} }
requested = [{'label':F,'arg':'F'}, requested = [{'label':F,'arg':'F'},
{'label':P,'arg':'P'} ] {'label':P,'arg':'P'} ]
self.__add_generic_pointwise(Cauchy,requested) self.__add_generic_pointwise(__add_Cauchy,requested)
def add_Mises(self,x): def add_Mises(self,x):
"""Adds the equivalent Mises stress or strain of a tensor.""" """
def Mises(x): Add the equivalent Mises stress or strain of a symmetric tensor.
if x['meta']['Unit'] == b'Pa': #ToDo: Should we use this? Then add_Cauchy and add_strain_tensors also should perform sanity checks Parameters
factor = 3.0/2.0 ----------
t = 'stress' x : str
elif x['meta']['Unit'] == b'1': Label of the dataset containing a symmetric stress or strain tensor.
factor = 2.0/3.0
t = 'strain'
else:
print(x['meta']['Unit'])
raise ValueError
d = x['data']
dev = d - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[d.shape[0],3,3]),np.trace(d,axis1=1,axis2=2)/3.0)
#dev_sym = (dev + np.einsum('ikj',dev))*0.5 # ToDo: this is not needed (only if the input is not symmetric, but then the whole concept breaks down)
return { """
'data' : np.sqrt(np.einsum('ijk->i',dev**2)*factor), def __add_Mises(x):
'label' : '{}_vM'.format(x['label']),
'meta' : { t = 'strain' if x['meta']['Unit'] == '1' else \
'Unit' : x['meta']['Unit'], 'stress'
'Description' : 'Mises equivalent {} of {} ({})'.format(t,x['label'],x['meta']['Description']), return {
'Creator' : 'dadf5.py:add_Mises_stress v{}'.format(version) 'data': mechanics.Mises_strain(x['data']) if t=='strain' else mechanics.Mises_stress(x['data']),
'label': '{}_vM'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_Mises v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(Mises,requested) self.__add_generic_pointwise(__add_Mises,requested)
def add_norm(self,x,ord=None): def add_norm(self,x,ord=None):
""" """
Adds norm of vector or tensor. Add the norm of vector or tensor.
See numpy.linalg.norm manual for details. Parameters
----------
x : str
Label of the dataset containing a vector or tensor.
ord : {non-zero int, inf, -inf, fro, nuc}, optional
Order of the norm. inf means numpys inf object. For details refer to numpy.linalg.norm.
""" """
def norm(x,ord): def __add_norm(x,ord):
o = ord o = ord
if len(x['data'].shape) == 2: if len(x['data'].shape) == 2:
@ -451,181 +505,266 @@ class DADF5():
else: else:
raise ValueError raise ValueError
return { return {
'data' : np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True), 'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
'label' : '|{}|_{}'.format(x['label'],o), 'label': '|{}|_{}'.format(x['label'],o),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : '{}-Norm of {} {} ({})'.format(ord,t,x['label'],x['meta']['Description']), 'Description': '{}-Norm of {} {} ({})'.format(ord,t,x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_norm v{}'.format(version) 'Creator': 'dadf5.py:add_norm v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(norm,requested,{'ord':ord}) self.__add_generic_pointwise(__add_norm,requested,{'ord':ord})
def add_absolute(self,x): def add_absolute(self,x):
"""Adds absolute value.""" """
def absolute(x): Add absolute value.
return { Parameters
'data' : np.abs(x['data']), ----------
'label' : '|{}|'.format(x['label']), x : str
'meta' : { Label of the dataset containing a scalar, vector, or tensor.
'Unit' : x['meta']['Unit'],
'Description' : 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']), """
'Creator' : 'dadf5.py:add_abs v{}'.format(version) def __add_absolute(x):
return {
'data': np.abs(x['data']),
'label': '|{}|'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_abs v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(absolute,requested) self.__add_generic_pointwise(__add_absolute,requested)
def add_determinant(self,x): def add_determinant(self,x):
"""Adds the determinant component of a tensor.""" """
def determinant(x): Add the determinant of a tensor.
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def __add_determinant(x):
return { return {
'data' : np.linalg.det(x['data']), 'data': np.linalg.det(x['data']),
'label' : 'det({})'.format(x['label']), 'label': 'det({})'.format(x['label']),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']), 'Description': 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_determinant v{}'.format(version) 'Creator': 'dadf5.py:add_determinant v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(determinant,requested) self.__add_generic_pointwise(__add_determinant,requested)
def add_spherical(self,x): def add_spherical(self,x):
"""Adds the spherical component of a tensor.""" """
def spherical(x): Add the spherical (hydrostatic) part of a tensor.
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def __add_spherical(x):
if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])): if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
raise ValueError raise ValueError
return { return {
'data' : np.trace(x['data'],axis1=1,axis2=2)/3.0, 'data': mechanics.spherical_part(x['data']),
'label' : 'sph({})'.format(x['label']), 'label': 'p_{}'.format(x['label']),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : 'Spherical component of tensor {} ({})'.format(x['label'],x['meta']['Description']), 'Description': 'Spherical component of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_spherical v{}'.format(version) 'Creator': 'dadf5.py:add_spherical v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(spherical,requested) self.__add_generic_pointwise(__add_spherical,requested)
def add_deviator(self,x): def add_deviator(self,x):
"""Adds the deviator of a tensor.""" """
def deviator(x): Add the deviatoric part of a tensor.
d = x['data']
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def __add_deviator(x):
if not np.all(np.array(d.shape[1:]) == np.array([3,3])): if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
raise ValueError raise ValueError
return { return {
'data' : d - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[d.shape[0],3,3]),np.trace(d,axis1=1,axis2=2)/3.0), 'data': mechanics.deviator(x['data']),
'label' : 'dev({})'.format(x['label']), 'label': 's_{}'.format(x['label']),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']), 'Description': 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_deviator v{}'.format(version) 'Creator': 'dadf5.py:add_deviator v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(deviator,requested) self.__add_generic_pointwise(__add_deviator,requested)
def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True): def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True):
""" """
General formula. Add result of a general formula.
Works currently only for vectorized expressions
Parameters
----------
formula : str
Formula, refer to datasets by #Label#.
label : str
Label of the dataset containing the result of the calculation.
unit : str, optional
Physical unit of the result.
description : str, optional
Human readable description of the result.
vectorized : bool, optional
Indicate whether the formula is written in vectorized form. Default is True.
""" """
if vectorized is not True: if vectorized is not True:
raise NotImplementedError raise NotImplementedError
def calculation(**kwargs): def __add_calculation(**kwargs):
formula = kwargs['formula'] formula = kwargs['formula']
for d in re.findall(r'#(.*?)#',formula): for d in re.findall(r'#(.*?)#',formula):
formula = re.sub('#{}#'.format(d),"kwargs['{}']['data']".format(d),formula) formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d))
return { return {
'data' : eval(formula), 'data': eval(formula),
'label' : kwargs['label'], 'label': kwargs['label'],
'meta' : { 'meta': {
'Unit' : kwargs['unit'], 'Unit': kwargs['unit'],
'Description' : '{} (formula: {})'.format(kwargs['description'],kwargs['formula']), 'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
'Creator' : 'dadf5.py:add_calculation v{}'.format(version) 'Creator': 'dadf5.py:add_calculation v{}'.format(version)
} }
} }
requested = [{'label':d,'arg':d} for d in re.findall(r'#(.*?)#',formula)] # datasets used in the formula requested = [{'label':d,'arg':d} for d in set(re.findall(r'#(.*?)#',formula))] # datasets used in the formula
pass_through = {'formula':formula,'label':label,'unit':unit,'description':description} pass_through = {'formula':formula,'label':label,'unit':unit,'description':description}
self.__add_generic_pointwise(calculation,requested,pass_through) self.__add_generic_pointwise(__add_calculation,requested,pass_through)
def add_strain_tensor(self,t,ord,defgrad='F'): def add_strain_tensor(self,F='F',t='U',m=0):
""" """
Adds the a strain tensor. Add strain tensor calculated from a deformation gradient.
Albrecht Bertram: Elasticity and Plasticity of Large Deformations An Introduction (3rd Edition, 2012), p. 102. For details refer to damask.mechanics.strain_tensor
"""
def strain_tensor(defgrad,t,ord): Parameters
----------
operator = { F : str, optional
'V#ln': lambda V: np.log(V), Label of the dataset containing the deformation gradient. Default value is F.
'U#ln': lambda U: np.log(U), t : {V, U}, optional
'V#Biot': lambda V: np.broadcast_to(np.ones(3),[V.shape[0],3]) - 1.0/V, Type of the polar decomposition, V for right stretch tensor and U for left stretch tensor.
'U#Biot': lambda U: U - np.broadcast_to(np.ones(3),[U.shape[0],3]), Defaults value is U.
'V#Green':lambda V: np.broadcast_to(np.ones(3),[V.shape[0],3]) - 1.0/V**2, m : float, optional
'U#Green':lambda U: U**2 - np.broadcast_to(np.ones(3),[U.shape[0],3]), Order of the strain calculation. Default value is 0.0.
}
if t.lower() in ['l','left']:
stretch = 'V'
elif t.lower() in ['r','right']:
stretch = 'U'
else:
raise KeyError
(U,S,Vh) = np.linalg.svd(defgrad['data']) # singular value decomposition """
R_inv = np.transpose(np.matmul(U,Vh),(0,2,1)) # transposed rotation of polar decomposition def __add_strain_tensor(F,t,m):
s = np.matmul(R_inv,defgrad['data']) if stretch == 'U' else \
np.matmul(defgrad['data'],R_inv) # compute either left or right stretch
(D,V) = np.linalg.eigh((s+np.transpose(s,(0,2,1)))*.5) # eigen decomposition (of symmetric(ed) matrix)
d = operator[stretch+'#'+{0:'ln',1:'Biot',2:'Green'}[ord]](D) return {
a = np.matmul(V,np.einsum('ij,ikj->ijk',d,V)) 'data': mechanics.strain_tensor(F['data'],t,m),
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
return { 'meta': {
'data' : a, 'Unit': F['meta']['Unit'],
'label' : 'epsilon_{}^{}({})'.format(stretch,ord,defgrad['label']), 'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
'meta' : { 'Creator': 'dadf5.py:add_strain_tensor v{}'.format(version)
'Unit' : defgrad['meta']['Unit'],
'Description' : 'Strain tensor of {} ({})'.format(defgrad['label'],defgrad['meta']['Description']),
'Creator' : 'dadf5.py:add_strain_tensor v{}'.format(version)
} }
} }
requested = [{'label':defgrad,'arg':'defgrad'}] requested = [{'label':F,'arg':'F'}]
self.__add_generic_pointwise(strain_tensor,requested,{'t':t,'ord':ord}) self.__add_generic_pointwise(__add_strain_tensor,requested,{'t':t,'m':m})
def add_principal_components(self,x):
"""
Add principal components of symmetric tensor.
The principal components are sorted in descending order, each repeated according to its multiplicity.
Parameters
----------
x : str
Label of the dataset containing a symmetric tensor.
"""
def __add_principal_components(x):
return {
'data': mechanics.principal_components(x['data']),
'label': 'lambda_{}'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Pricipal components of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_principal_components v{}'.format(version)
}
}
requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(__add_principal_components,requested)
def add_maximum_shear(self,x):
"""
Add maximum shear components of symmetric tensor.
Parameters
----------
x : str
Label of the dataset containing a symmetric tensor.
"""
def __add_maximum_shear(x):
return {
'data': mechanics.maximum_shear(x['data']),
'label': 'max_shear({})'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Maximum shear component of of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_maximum_shear v{}'.format(version)
}
}
requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(__add_maximum_shear,requested)
def __add_generic_pointwise(self,func,datasets_requested,extra_args={}): def __add_generic_pointwise(self,func,datasets_requested,extra_args={}):
@ -634,12 +773,12 @@ class DADF5():
Parameters Parameters
---------- ----------
func : function func : function
Function that calculates a new dataset from one or more datasets per HDF5 group. Function that calculates a new dataset from one or more datasets per HDF5 group.
datasets_requested : list of dictionaries datasets_requested : list of dictionaries
Details of the datasets to be used: label (in HDF5 file) and arg (argument to which the data is parsed in func). Details of the datasets to be used: label (in HDF5 file) and arg (argument to which the data is parsed in func).
extra_args : dictionary, optional extra_args : dictionary, optional
Any extra arguments parsed to func. Any extra arguments parsed to func.
""" """
def job(args): def job(args):
@ -661,7 +800,7 @@ class DADF5():
for d in datasets_requested: for d in datasets_requested:
loc = f[group+'/'+d['label']] loc = f[group+'/'+d['label']]
data = loc[()] data = loc[()]
meta = {k:loc.attrs[k] for k in loc.attrs.keys()} meta = {k:loc.attrs[k].decode() for k in loc.attrs.keys()}
datasets_in[d['arg']] = {'data': data, 'meta' : meta, 'label' : d['label']} datasets_in[d['arg']] = {'data': data, 'meta' : meta, 'label' : d['label']}
todo.append({'in':{**datasets_in,**extra_args},'func':func,'group':group,'results':results}) todo.append({'in':{**datasets_in,**extra_args},'func':func,'group':group,'results':results})
@ -674,7 +813,7 @@ class DADF5():
with h5py.File(self.filename,'a') as f: # write to file with h5py.File(self.filename,'a') as f: # write to file
dataset_out = f[result['group']].create_dataset(result['label'],data=result['data']) dataset_out = f[result['group']].create_dataset(result['label'],data=result['data'])
for k in result['meta'].keys(): for k in result['meta'].keys():
dataset_out.attrs[k] = result['meta'][k] dataset_out.attrs[k] = result['meta'][k].encode()
N_not_calculated-=1 N_not_calculated-=1
if N_added < len(todo): # add more jobs if N_added < len(todo): # add more jobs

247
python/damask/mechanics.py Normal file
View File

@ -0,0 +1,247 @@
import numpy as np
def Cauchy(F,P):
"""
Return Cauchy stress calculated from 1. Piola-Kirchhoff stress and deformation gradient.
Resulting tensor is symmetrized as the Cauchy stress needs to be symmetric.
Parameters
----------
F : numpy.array of shape (:,3,3) or (3,3)
Deformation gradient.
P : numpy.array of shape (:,3,3) or (3,3)
1. Piola-Kirchhoff stress.
"""
if np.shape(F) == np.shape(P) == (3,3):
sigma = 1.0/np.linalg.det(F) * np.dot(P,F.T)
else:
sigma = np.einsum('i,ijk,ilk->ijl',1.0/np.linalg.det(F),P,F)
return symmetric(sigma)
def strain_tensor(F,t,m):
"""
Return strain tensor calculated from deformation gradient.
For details refer to https://en.wikipedia.org/wiki/Finite_strain_theory and
https://de.wikipedia.org/wiki/Verzerrungstensor
Parameters
----------
F : numpy.array of shape (:,3,3) or (3,3)
Deformation gradient.
t : {V, U}
Type of the polar decomposition, V for left stretch tensor and U for right stretch tensor.
m : float
Order of the strain.
"""
F_ = F.reshape((1,3,3)) if F.shape == (3,3) else F
if t == 'U':
B = np.matmul(F_,transpose(F_))
w,n = np.linalg.eigh(B)
elif t == 'V':
C = np.matmul(transpose(F_),F_)
w,n = np.linalg.eigh(C)
if m > 0.0:
eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
- np.broadcast_to(np.ones(3),[F_.shape[0],3]))
elif m < 0.0:
eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
+ np.broadcast_to(np.ones(3),[F_.shape[0],3]))
else:
eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n))
return eps.reshape((3,3)) if np.shape(F) == (3,3) else \
eps
def deviatoric_part(x):
"""
Return deviatoric part of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the deviatoric part is computed.
"""
return x - np.eye(3)*spherical_part(x) if np.shape(x) == (3,3) else \
x - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[x.shape[0],3,3]),spherical_part(x))
def spherical_part(x):
"""
Return spherical (hydrostatic) part of a tensor.
A single scalar is returned, i.e. the hydrostatic part is not mapped on the 3rd order identity
matrix.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the hydrostatic part is computed.
"""
return np.trace(x)/3.0 if np.shape(x) == (3,3) else \
np.trace(x,axis1=1,axis2=2)/3.0
def Mises_stress(sigma):
"""
Return the Mises equivalent of a stress tensor.
Parameters
----------
sigma : numpy.array of shape (:,3,3) or (3,3)
Symmetric stress tensor of which the von Mises equivalent is computed.
"""
s = deviatoric_part(sigma)
return np.sqrt(3.0/2.0*(np.sum(s**2.0))) if np.shape(sigma) == (3,3) else \
np.sqrt(3.0/2.0*np.einsum('ijk->i',s**2.0))
def Mises_strain(epsilon):
"""
Return the Mises equivalent of a strain tensor.
Parameters
----------
epsilon : numpy.array of shape (:,3,3) or (3,3)
Symmetric strain tensor of which the von Mises equivalent is computed.
"""
s = deviatoric_part(epsilon)
return np.sqrt(2.0/3.0*(np.sum(s**2.0))) if np.shape(epsilon) == (3,3) else \
np.sqrt(2.0/3.0*np.einsum('ijk->i',s**2.0))
def symmetric(x):
"""
Return the symmetrized tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the symmetrized values are computed.
"""
return (x+transpose(x))*0.5
def maximum_shear(x):
"""
Return the maximum shear component of a symmetric tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Symmetric tensor of which the maximum shear is computed.
"""
w = np.linalg.eigvalsh(symmetric(x)) # eigenvalues in ascending order
return (w[2] - w[0])*0.5 if np.shape(x) == (3,3) else \
(w[:,2] - w[:,0])*0.5
def principal_components(x):
"""
Return the principal components of a symmetric tensor.
The principal components (eigenvalues) are sorted in descending order, each repeated according to
its multiplicity.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Symmetric tensor of which the principal compontents are computed.
"""
w = np.linalg.eigvalsh(symmetric(x)) # eigenvalues in ascending order
return w[::-1] if np.shape(x) == (3,3) else \
w[:,::-1]
def transpose(x):
"""
Return the transpose of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the transpose is computed.
"""
return x.T if np.shape(x) == (3,3) else \
np.transpose(x,(0,2,1))
def rotational_part(x):
"""
Return the rotational part of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the rotational part is computed.
"""
return __polar_decomposition(x,'R')
def left_stretch(x):
"""
Return the left stretch of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the left stretch is computed.
"""
return __polar_decomposition(x,'V')
def right_stretch(x):
"""
Return the right stretch of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the right stretch is computed.
"""
return __polar_decomposition(x,'U')
def __polar_decomposition(x,requested):
"""
Singular value decomposition.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the singular values are computed.
requested : list of str
Requested outputs: R for the rotation tensor,
V for left stretch tensor and U for right stretch tensor.
"""
u, s, vh = np.linalg.svd(x)
R = np.dot(u,vh) if np.shape(x) == (3,3) else \
np.einsum('ijk,ikl->ijl',u,vh)
output = []
if 'R' in requested:
output.append(R)
if 'V' in requested:
output.append(np.dot(x,R.T) if np.shape(x) == (3,3) else np.einsum('ijk,ilk->ijl',x,R))
if 'U' in requested:
output.append(np.dot(R.T,x) if np.shape(x) == (3,3) else np.einsum('ikj,ikl->ijl',R,x))
return tuple(output)

View File

@ -362,37 +362,3 @@ subroutine uedinc(inc,incsub)
call CPFEM_results(inc,cptim) call CPFEM_results(inc,cptim)
end subroutine uedinc end subroutine uedinc
!--------------------------------------------------------------------------------------------------
!> @brief sets user defined output variables for Marc
!> @details select a variable contour plotting (user subroutine).
!--------------------------------------------------------------------------------------------------
subroutine plotv(v,s,sp,etot,eplas,ecreep,t,m,nn,layer,ndi,nshear,jpltcd)
use prec
use mesh
use IO
use homogenization
implicit none
integer, intent(in) :: &
m, & !< element number
nn, & !< integration point number
layer, & !< layer number
ndi, & !< number of direct stress components
nshear, & !< number of shear stress components
jpltcd !< user variable index
real(pReal), dimension(*), intent(in) :: &
s, & !< stress array
sp, & !< stresses in preferred direction
etot, & !< total strain (generalized)
eplas, & !< total plastic strain
ecreep, & !< total creep strain
t !< current temperature
real(pReal), intent(out) :: &
v !< variable
if (jpltcd > materialpoint_sizeResults) call IO_error(700,jpltcd) ! complain about out of bounds error
v = materialpoint_results(jpltcd,nn,mesh_FEasCP('elem', m))
end subroutine plotv

View File

@ -712,11 +712,6 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
case (602) case (602)
msg = 'invalid selection for debug' msg = 'invalid selection for debug'
!-------------------------------------------------------------------------------------------------
! DAMASK_marc errors
case (700)
msg = 'invalid materialpoint result requested'
!------------------------------------------------------------------------------------------------- !-------------------------------------------------------------------------------------------------
! errors related to the grid solver ! errors related to the grid solver
case (809) case (809)

View File

@ -120,24 +120,24 @@ subroutine constitutive_init
thisSize => null() thisSize => null()
case (PLASTICITY_ISOTROPIC_ID) plasticityType case (PLASTICITY_ISOTROPIC_ID) plasticityType
outputName = PLASTICITY_ISOTROPIC_label outputName = PLASTICITY_ISOTROPIC_label
thisOutput => plastic_isotropic_output thisOutput => null()
thisSize => plastic_isotropic_sizePostResult thisSize => null()
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
outputName = PLASTICITY_PHENOPOWERLAW_label outputName = PLASTICITY_PHENOPOWERLAW_label
thisOutput => plastic_phenopowerlaw_output thisOutput => null()
thisSize => plastic_phenopowerlaw_sizePostResult thisSize => null()
case (PLASTICITY_KINEHARDENING_ID) plasticityType case (PLASTICITY_KINEHARDENING_ID) plasticityType
outputName = PLASTICITY_KINEHARDENING_label outputName = PLASTICITY_KINEHARDENING_label
thisOutput => plastic_kinehardening_output thisOutput => null()
thisSize => plastic_kinehardening_sizePostResult thisSize => null()
case (PLASTICITY_DISLOTWIN_ID) plasticityType case (PLASTICITY_DISLOTWIN_ID) plasticityType
outputName = PLASTICITY_DISLOTWIN_label outputName = PLASTICITY_DISLOTWIN_label
thisOutput => plastic_dislotwin_output thisOutput => null()
thisSize => plastic_dislotwin_sizePostResult thisSize => null()
case (PLASTICITY_DISLOUCLA_ID) plasticityType case (PLASTICITY_DISLOUCLA_ID) plasticityType
outputName = PLASTICITY_DISLOUCLA_label outputName = PLASTICITY_DISLOUCLA_label
thisOutput => plastic_disloucla_output thisOutput => null()
thisSize => plastic_disloucla_sizePostResult thisSize => null()
case (PLASTICITY_NONLOCAL_ID) plasticityType case (PLASTICITY_NONLOCAL_ID) plasticityType
outputName = PLASTICITY_NONLOCAL_label outputName = PLASTICITY_NONLOCAL_label
thisOutput => plastic_nonlocal_output thisOutput => plastic_nonlocal_output
@ -148,7 +148,7 @@ subroutine constitutive_init
write(FILEUNIT,'(/,a,/)') '['//trim(config_name_phase(ph))//']' write(FILEUNIT,'(/,a,/)') '['//trim(config_name_phase(ph))//']'
if (knownPlasticity) then if (knownPlasticity) then
write(FILEUNIT,'(a)') '(plasticity)'//char(9)//trim(outputName) write(FILEUNIT,'(a)') '(plasticity)'//char(9)//trim(outputName)
if (phase_plasticity(ph) /= PLASTICITY_NONE_ID) then if (associated(thisOutput)) then
OutputPlasticityLoop: do o = 1,size(thisOutput(:,ins)) OutputPlasticityLoop: do o = 1,size(thisOutput(:,ins))
if(len_trim(thisOutput(o,ins)) > 0) & if(len_trim(thisOutput(o,ins)) > 0) &
write(FILEUNIT,'(a,i4)') trim(thisOutput(o,ins))//char(9),thisSize(o,ins) write(FILEUNIT,'(a,i4)') trim(thisOutput(o,ins))//char(9),thisSize(o,ins)
@ -736,25 +736,6 @@ function constitutive_postResults(S, Fi, ipc, ip, el)
instance = phase_plasticityInstance(material_phaseAt(ipc,el)) instance = phase_plasticityInstance(material_phaseAt(ipc,el))
plasticityType: select case (phase_plasticity(material_phaseAt(ipc,el))) plasticityType: select case (phase_plasticity(material_phaseAt(ipc,el)))
case (PLASTICITY_ISOTROPIC_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_isotropic_postResults(Mp,instance,of)
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_phenopowerlaw_postResults(Mp,instance,of)
case (PLASTICITY_KINEHARDENING_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_kinehardening_postResults(Mp,instance,of)
case (PLASTICITY_DISLOTWIN_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_dislotwin_postResults(Mp,temperature(ho)%p(tme),instance,of)
case (PLASTICITY_DISLOUCLA_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_disloucla_postResults(Mp,temperature(ho)%p(tme),instance,of)
case (PLASTICITY_NONLOCAL_ID) plasticityType case (PLASTICITY_NONLOCAL_ID) plasticityType
constitutive_postResults(startPos:endPos) = & constitutive_postResults(startPos:endPos) = &

View File

@ -331,14 +331,10 @@ subroutine crystallite_init
do r = 1,size(config_crystallite) do r = 1,size(config_crystallite)
do o = 1,crystallite_Noutput(r) do o = 1,crystallite_Noutput(r)
select case(crystallite_outputID(o,r)) select case(crystallite_outputID(o,r))
case(phase_ID,texture_ID) case(orientation_ID)
mySize = 1
case(orientation_ID,grainrotation_ID)
mySize = 4 mySize = 4
case(defgrad_ID,fe_ID,fp_ID,fi_ID,lp_ID,li_ID,p_ID,s_ID) case(defgrad_ID,fp_ID,p_ID)
mySize = 9 mySize = 9
case(elasmatrix_ID)
mySize = 36
case(neighboringip_ID,neighboringelement_ID) case(neighboringip_ID,neighboringelement_ID)
mySize = nIPneighbors mySize = nIPneighbors
case default case default
@ -883,7 +879,6 @@ function crystallite_postResults(ipc, ip, el)
crystID, & crystID, &
mySize, & mySize, &
n n
type(rotation) :: rot
crystID = microstructure_crystallite(discretization_microstructureAt(el)) crystID = microstructure_crystallite(discretization_microstructureAt(el))
@ -894,22 +889,10 @@ function crystallite_postResults(ipc, ip, el)
do o = 1,crystallite_Noutput(crystID) do o = 1,crystallite_Noutput(crystID)
mySize = 0 mySize = 0
select case(crystallite_outputID(o,crystID)) select case(crystallite_outputID(o,crystID))
case (phase_ID)
mySize = 1
crystallite_postResults(c+1) = real(material_phaseAt(ipc,el),pReal) ! phaseID of grain
case (texture_ID)
mySize = 1
crystallite_postResults(c+1) = real(material_texture(ipc,ip,el),pReal) ! textureID of grain
case (orientation_ID) case (orientation_ID)
mySize = 4 mySize = 4
crystallite_postResults(c+1:c+mySize) = crystallite_orientation(ipc,ip,el)%asQuaternion() crystallite_postResults(c+1:c+mySize) = crystallite_orientation(ipc,ip,el)%asQuaternion()
case (grainrotation_ID)
rot = material_orientation0(ipc,ip,el)%misorientation(crystallite_orientation(ipc,ip,el))
mySize = 4
crystallite_postResults(c+1:c+mySize) = rot%asAxisAngle()
crystallite_postResults(c+4) = inDeg * crystallite_postResults(c+4) ! angle in degree
! remark: tensor output is of the form 11,12,13, 21,22,23, 31,32,33 ! remark: tensor output is of the form 11,12,13, 21,22,23, 31,32,33
! thus row index i is slow, while column index j is fast. reminder: "row is slow" ! thus row index i is slow, while column index j is fast. reminder: "row is slow"
@ -917,37 +900,14 @@ function crystallite_postResults(ipc, ip, el)
mySize = 9 mySize = 9
crystallite_postResults(c+1:c+mySize) = & crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_partionedF(1:3,1:3,ipc,ip,el)),[mySize]) reshape(transpose(crystallite_partionedF(1:3,1:3,ipc,ip,el)),[mySize])
case (fe_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Fe(1:3,1:3,ipc,ip,el)),[mySize])
case (fp_ID) case (fp_ID)
mySize = 9 mySize = 9
crystallite_postResults(c+1:c+mySize) = & crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Fp(1:3,1:3,ipc,ip,el)),[mySize]) reshape(transpose(crystallite_Fp(1:3,1:3,ipc,ip,el)),[mySize])
case (fi_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Fi(1:3,1:3,ipc,ip,el)),[mySize])
case (lp_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Lp(1:3,1:3,ipc,ip,el)),[mySize])
case (li_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Li(1:3,1:3,ipc,ip,el)),[mySize])
case (p_ID) case (p_ID)
mySize = 9 mySize = 9
crystallite_postResults(c+1:c+mySize) = & crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_P(1:3,1:3,ipc,ip,el)),[mySize]) reshape(transpose(crystallite_P(1:3,1:3,ipc,ip,el)),[mySize])
case (s_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(crystallite_S(1:3,1:3,ipc,ip,el),[mySize])
case (elasmatrix_ID)
mySize = 36
crystallite_postResults(c+1:c+mySize) = reshape(constitutive_homogenizedC(ipc,ip,el),[mySize])
case(neighboringelement_ID) case(neighboringelement_ID)
mySize = nIPneighbors mySize = nIPneighbors
crystallite_postResults(c+1:c+mySize) = 0.0_pReal crystallite_postResults(c+1:c+mySize) = 0.0_pReal
@ -1055,16 +1015,16 @@ subroutine crystallite_results
real(pReal), allocatable, dimension(:,:,:) :: select_tensors real(pReal), allocatable, dimension(:,:,:) :: select_tensors
integer :: e,i,c,j integer :: e,i,c,j
allocate(select_tensors(3,3,count(material_phaseAt==instance)*homogenization_maxNgrains)) allocate(select_tensors(3,3,count(material_phaseAt==instance)*homogenization_maxNgrains*discretization_nIP))
j=0 j=0
do e = 1, size(material_phaseAt,2) do e = 1, size(material_phaseAt,2)
do i = 1, homogenization_maxNgrains !ToDo: this needs to be changed for varying Ngrains do i = 1, discretization_nIP
do c = 1, size(material_phaseAt,1) do c = 1, size(material_phaseAt,1) !ToDo: this needs to be changed for varying Ngrains
if (material_phaseAt(c,e) == instance) then if (material_phaseAt(c,e) == instance) then
j = j + 1 j = j + 1
select_tensors(1:3,1:3,j) = dataset(1:3,1:3,c,i,e) select_tensors(1:3,1:3,j) = dataset(1:3,1:3,c,i,e)
endif endif
enddo enddo
enddo enddo
enddo enddo
@ -1082,12 +1042,12 @@ subroutine crystallite_results
type(rotation), allocatable, dimension(:) :: select_rotations type(rotation), allocatable, dimension(:) :: select_rotations
integer :: e,i,c,j integer :: e,i,c,j
allocate(select_rotations(count(material_phaseAt==instance)*homogenization_maxNgrains)) allocate(select_rotations(count(material_phaseAt==instance)*homogenization_maxNgrains*discretization_nIP))
j=0 j=0
do e = 1, size(material_phaseAt,2) do e = 1, size(material_phaseAt,2)
do i = 1, homogenization_maxNgrains !ToDo: this needs to be changed for varying Ngrains do i = 1, discretization_nIP
do c = 1, size(material_phaseAt,1) do c = 1, size(material_phaseAt,1) !ToDo: this needs to be changed for varying Ngrains
if (material_phaseAt(c,e) == instance) then if (material_phaseAt(c,e) == instance) then
j = j + 1 j = j + 1
select_rotations(j) = dataset(c,i,e) select_rotations(j) = dataset(c,i,e)

View File

@ -30,7 +30,6 @@ subroutine damage_none_init
myhomog: if (damage_type(homog) == DAMAGE_NONE_ID) then myhomog: if (damage_type(homog) == DAMAGE_NONE_ID) then
NofMyHomog = count(material_homogenizationAt == homog) NofMyHomog = count(material_homogenizationAt == homog)
damageState(homog)%sizeState = 0 damageState(homog)%sizeState = 0
damageState(homog)%sizePostResults = 0
allocate(damageState(homog)%state0 (0,NofMyHomog)) allocate(damageState(homog)%state0 (0,NofMyHomog))
allocate(damageState(homog)%subState0(0,NofMyHomog)) allocate(damageState(homog)%subState0(0,NofMyHomog))
allocate(damageState(homog)%state (0,NofMyHomog)) allocate(damageState(homog)%state (0,NofMyHomog))

View File

@ -160,7 +160,6 @@ module subroutine mech_RGC_init
+ size(['avg constitutive work ','average penalty energy']) + size(['avg constitutive work ','average penalty energy'])
homogState(h)%sizeState = sizeState homogState(h)%sizeState = sizeState
homogState(h)%sizePostResults = 0
allocate(homogState(h)%state0 (sizeState,NofMyHomog), source=0.0_pReal) allocate(homogState(h)%state0 (sizeState,NofMyHomog), source=0.0_pReal)
allocate(homogState(h)%subState0(sizeState,NofMyHomog), source=0.0_pReal) allocate(homogState(h)%subState0(sizeState,NofMyHomog), source=0.0_pReal)
allocate(homogState(h)%state (sizeState,NofMyHomog), source=0.0_pReal) allocate(homogState(h)%state (sizeState,NofMyHomog), source=0.0_pReal)

View File

@ -63,7 +63,6 @@ module subroutine mech_isostrain_init
NofMyHomog = count(material_homogenizationAt == h) NofMyHomog = count(material_homogenizationAt == h)
homogState(h)%sizeState = 0 homogState(h)%sizeState = 0
homogState(h)%sizePostResults = 0
allocate(homogState(h)%state0 (0,NofMyHomog)) allocate(homogState(h)%state0 (0,NofMyHomog))
allocate(homogState(h)%subState0(0,NofMyHomog)) allocate(homogState(h)%subState0(0,NofMyHomog))
allocate(homogState(h)%state (0,NofMyHomog)) allocate(homogState(h)%state (0,NofMyHomog))

View File

@ -29,7 +29,6 @@ module subroutine mech_none_init
NofMyHomog = count(material_homogenizationAt == h) NofMyHomog = count(material_homogenizationAt == h)
homogState(h)%sizeState = 0 homogState(h)%sizeState = 0
homogState(h)%sizePostResults = 0
allocate(homogState(h)%state0 (0,NofMyHomog)) allocate(homogState(h)%state0 (0,NofMyHomog))
allocate(homogState(h)%subState0(0,NofMyHomog)) allocate(homogState(h)%subState0(0,NofMyHomog))
allocate(homogState(h)%state (0,NofMyHomog)) allocate(homogState(h)%state (0,NofMyHomog))

View File

@ -135,7 +135,7 @@ module material
material_homogenizationMemberAt !< position of the element within its homogenization instance material_homogenizationMemberAt !< position of the element within its homogenization instance
integer, dimension(:,:), allocatable, public, protected :: & ! (constituent,elem) integer, dimension(:,:), allocatable, public, protected :: & ! (constituent,elem)
material_phaseAt !< phase ID of each element material_phaseAt !< phase ID of each element
integer, dimension(:,:,:), allocatable, public, protected :: & ! (constituent,ip,elem) integer, dimension(:,:,:), allocatable, public, protected :: & ! (constituent,elem)
material_phaseMemberAt !< position of the element within its phase instance material_phaseMemberAt !< position of the element within its phase instance
! END NEW MAPPINGS ! END NEW MAPPINGS

View File

@ -1,7 +1,7 @@
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @author Franz Roters, Max-Planck-Institut für Eisenforschung GmbH !> @author Franz Roters, Max-Planck-Institut für Eisenforschung GmbH
!> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH !> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH
!> @author Christoph Koords, Max-Planck-Institut für Eisenforschung GmbH !> @author Christoph Kords, Max-Planck-Institut für Eisenforschung GmbH
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Sets up the mesh for the solver MSC.Marc !> @brief Sets up the mesh for the solver MSC.Marc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -18,11 +18,6 @@ module plastic_disloUCLA
implicit none implicit none
private private
integer, dimension(:,:), allocatable, target, public :: &
plastic_disloUCLA_sizePostResult !< size of each post result output
character(len=64), dimension(:,:), allocatable, target, public :: &
plastic_disloUCLA_output !< name of each post result output
real(pReal), parameter, private :: & real(pReal), parameter, private :: &
kB = 1.38e-23_pReal !< Boltzmann constant in J/Kelvin kB = 1.38e-23_pReal !< Boltzmann constant in J/Kelvin
@ -106,7 +101,6 @@ module plastic_disloUCLA
plastic_disloUCLA_dependentState, & plastic_disloUCLA_dependentState, &
plastic_disloUCLA_LpAndItsTangent, & plastic_disloUCLA_LpAndItsTangent, &
plastic_disloUCLA_dotState, & plastic_disloUCLA_dotState, &
plastic_disloUCLA_postResults, &
plastic_disloUCLA_results plastic_disloUCLA_results
private :: & private :: &
kinetics kinetics
@ -148,10 +142,6 @@ subroutine plastic_disloUCLA_init()
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) & if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) &
write(6,'(a16,1x,i5,/)') '# instances:',Ninstance write(6,'(a16,1x,i5,/)') '# instances:',Ninstance
allocate(plastic_disloUCLA_sizePostResult(maxval(phase_Noutput),Ninstance),source=0)
allocate(plastic_disloUCLA_output(maxval(phase_Noutput),Ninstance))
plastic_disloUCLA_output = ''
allocate(param(Ninstance)) allocate(param(Ninstance))
allocate(state(Ninstance)) allocate(state(Ninstance))
allocate(dotState(Ninstance)) allocate(dotState(Ninstance))
@ -289,8 +279,6 @@ subroutine plastic_disloUCLA_init()
end select end select
if (outputID /= undefined_ID) then if (outputID /= undefined_ID) then
plastic_disloUCLA_output(i,phase_plasticityInstance(p)) = outputs(i)
plastic_disloUCLA_sizePostResult(i,phase_plasticityInstance(p)) = prm%sum_N_sl
prm%outputID = [prm%outputID, outputID] prm%outputID = [prm%outputID, outputID]
endif endif
@ -304,7 +292,6 @@ subroutine plastic_disloUCLA_init()
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, & call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
prm%sum_N_sl,0,0) prm%sum_N_sl,0,0)
plasticState(p)%sizePostResults = sum(plastic_disloUCLA_sizePostResult(:,phase_plasticityInstance(p)))
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState ! locally defined state aliases and initialization of state0 and aTolState
@ -472,59 +459,6 @@ subroutine plastic_disloUCLA_dependentState(instance,of)
end subroutine plastic_disloUCLA_dependentState end subroutine plastic_disloUCLA_dependentState
!--------------------------------------------------------------------------------------------------
!> @brief return array of constitutive results
!--------------------------------------------------------------------------------------------------
function plastic_disloUCLA_postResults(Mp,T,instance,of) result(postResults)
real(pReal), dimension(3,3), intent(in) :: &
Mp !< Mandel stress
real(pReal), intent(in) :: &
T !< temperature
integer, intent(in) :: &
instance, &
of
real(pReal), dimension(sum(plastic_disloUCLA_sizePostResult(:,instance))) :: &
postResults
integer :: &
o,c
real(pReal), dimension(param(instance)%sum_N_sl) :: &
gdot_pos,gdot_neg
c = 0
associate(prm => param(instance), stt => state(instance), dst => dependentState(instance))
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
case (rho_mob_ID)
postResults(c+1:c+prm%sum_N_sl) = stt%rho_mob(1:prm%sum_N_sl,of)
case (rho_dip_ID)
postResults(c+1:c+prm%sum_N_sl) = stt%rho_dip(1:prm%sum_N_sl,of)
case (dot_gamma_sl_ID)
call kinetics(Mp,T,instance,of,gdot_pos,gdot_neg)
postResults(c+1:c+prm%sum_N_sl) = gdot_pos + gdot_neg
case (gamma_sl_ID)
postResults(c+1:c+prm%sum_N_sl) = stt%gamma_sl(1:prm%sum_N_sl, of)
case (Lambda_sl_ID)
postResults(c+1:c+prm%sum_N_sl) = dst%Lambda_sl(1:prm%sum_N_sl, of)
case (thresholdstress_ID)
postResults(c+1:c+prm%sum_N_sl) = dst%threshold_stress(1:prm%sum_N_sl,of)
end select
c = c + prm%sum_N_sl
enddo outputsLoop
end associate
end function plastic_disloUCLA_postResults
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief writes results to HDF5 output file !> @brief writes results to HDF5 output file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -20,11 +20,6 @@ module plastic_dislotwin
implicit none implicit none
private private
integer, dimension(:,:), allocatable, target, public :: &
plastic_dislotwin_sizePostResult !< size of each post result output
character(len=64), dimension(:,:), allocatable, target, public :: &
plastic_dislotwin_output !< name of each post result output
real(pReal), parameter :: & real(pReal), parameter :: &
kB = 1.38e-23_pReal !< Boltzmann constant in J/Kelvin kB = 1.38e-23_pReal !< Boltzmann constant in J/Kelvin
@ -167,7 +162,6 @@ module plastic_dislotwin
plastic_dislotwin_dependentState, & plastic_dislotwin_dependentState, &
plastic_dislotwin_LpAndItsTangent, & plastic_dislotwin_LpAndItsTangent, &
plastic_dislotwin_dotState, & plastic_dislotwin_dotState, &
plastic_dislotwin_postResults, &
plastic_dislotwin_results plastic_dislotwin_results
contains contains
@ -213,10 +207,6 @@ subroutine plastic_dislotwin_init
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) & if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) &
write(6,'(a16,1x,i5,/)') '# instances:',Ninstance write(6,'(a16,1x,i5,/)') '# instances:',Ninstance
allocate(plastic_dislotwin_sizePostResult(maxval(phase_Noutput),Ninstance),source=0)
allocate(plastic_dislotwin_output(maxval(phase_Noutput),Ninstance))
plastic_dislotwin_output = ''
allocate(param(Ninstance)) allocate(param(Ninstance))
allocate(state(Ninstance)) allocate(state(Ninstance))
@ -507,8 +497,6 @@ subroutine plastic_dislotwin_init
end select end select
if (outputID /= undefined_ID) then if (outputID /= undefined_ID) then
plastic_dislotwin_output(i,phase_plasticityInstance(p)) = outputs(i)
plastic_dislotwin_sizePostResult(i,phase_plasticityInstance(p)) = outputSize
prm%outputID = [prm%outputID, outputID] prm%outputID = [prm%outputID, outputID]
endif endif
@ -524,8 +512,6 @@ subroutine plastic_dislotwin_init
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, & call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
prm%sum_N_sl,prm%sum_N_tw,prm%sum_N_tr) prm%sum_N_sl,prm%sum_N_tw,prm%sum_N_tr)
plasticState(p)%sizePostResults = sum(plastic_dislotwin_sizePostResult(:,phase_plasticityInstance(p)))
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState ! locally defined state aliases and initialization of state0 and aTolState
@ -936,82 +922,6 @@ subroutine plastic_dislotwin_dependentState(T,instance,of)
end subroutine plastic_dislotwin_dependentState end subroutine plastic_dislotwin_dependentState
!--------------------------------------------------------------------------------------------------
!> @brief return array of constitutive results
!--------------------------------------------------------------------------------------------------
function plastic_dislotwin_postResults(Mp,T,instance,of) result(postResults)
real(pReal), dimension(3,3),intent(in) :: &
Mp !< 2nd Piola Kirchhoff stress tensor in Mandel notation
real(pReal), intent(in) :: &
T !< temperature at integration point
integer, intent(in) :: &
instance, &
of
real(pReal), dimension(sum(plastic_dislotwin_sizePostResult(:,instance))) :: &
postResults
integer :: &
o,c,j
associate(prm => param(instance), stt => state(instance), dst => dependentState(instance))
c = 0
do o = 1,size(prm%outputID)
select case(prm%outputID(o))
case (rho_mob_ID)
postResults(c+1:c+prm%sum_N_sl) = stt%rho_mob(1:prm%sum_N_sl,of)
c = c + prm%sum_N_sl
case (rho_dip_ID)
postResults(c+1:c+prm%sum_N_sl) = stt%rho_dip(1:prm%sum_N_sl,of)
c = c + prm%sum_N_sl
case (dot_gamma_sl_ID)
call kinetics_slip(Mp,T,instance,of,postResults(c+1:c+prm%sum_N_sl))
c = c + prm%sum_N_sl
case (gamma_sl_ID)
postResults(c+1:c+prm%sum_N_sl) = stt%gamma_sl(1:prm%sum_N_sl,of)
c = c + prm%sum_N_sl
case (Lambda_sl_ID)
postResults(c+1:c+prm%sum_N_sl) = dst%Lambda_sl(1:prm%sum_N_sl,of)
c = c + prm%sum_N_sl
case (resolved_stress_slip_ID)
do j = 1, prm%sum_N_sl
postResults(c+j) = math_mul33xx33(Mp,prm%P_sl(1:3,1:3,j))
enddo
c = c + prm%sum_N_sl
case (threshold_stress_slip_ID)
postResults(c+1:c+prm%sum_N_sl) = dst%tau_pass(1:prm%sum_N_sl,of)
c = c + prm%sum_N_sl
case (f_tw_ID)
postResults(c+1:c+prm%sum_N_tw) = stt%f_tw(1:prm%sum_N_tw,of)
c = c + prm%sum_N_tw
case (Lambda_tw_ID)
postResults(c+1:c+prm%sum_N_tw) = dst%Lambda_tw(1:prm%sum_N_tw,of)
c = c + prm%sum_N_tw
case (resolved_stress_twin_ID)
do j = 1, prm%sum_N_tw
postResults(c+j) = math_mul33xx33(Mp,prm%P_tw(1:3,1:3,j))
enddo
c = c + prm%sum_N_tw
case (tau_hat_tw_ID)
postResults(c+1:c+prm%sum_N_tw) = dst%tau_hat_tw(1:prm%sum_N_tw,of)
c = c + prm%sum_N_tw
case (f_tr_ID)
postResults(c+1:c+prm%sum_N_tr) = stt%f_tr(1:prm%sum_N_tr,of)
c = c + prm%sum_N_tr
end select
enddo
end associate
end function plastic_dislotwin_postResults
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief writes results to HDF5 output file !> @brief writes results to HDF5 output file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -20,11 +20,6 @@ module plastic_isotropic
implicit none implicit none
private private
integer, dimension(:,:), allocatable, target, public :: &
plastic_isotropic_sizePostResult !< size of each post result output
character(len=64), dimension(:,:), allocatable, target, public :: &
plastic_isotropic_output !< name of each post result output
enum, bind(c) enum, bind(c)
enumerator :: & enumerator :: &
undefined_ID, & undefined_ID, &
@ -74,7 +69,6 @@ module plastic_isotropic
plastic_isotropic_LpAndItsTangent, & plastic_isotropic_LpAndItsTangent, &
plastic_isotropic_LiAndItsTangent, & plastic_isotropic_LiAndItsTangent, &
plastic_isotropic_dotState, & plastic_isotropic_dotState, &
plastic_isotropic_postResults, &
plastic_isotropic_results plastic_isotropic_results
contains contains
@ -110,10 +104,6 @@ subroutine plastic_isotropic_init
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) & if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) &
write(6,'(a16,1x,i5,/)') '# instances:',Ninstance write(6,'(a16,1x,i5,/)') '# instances:',Ninstance
allocate(plastic_isotropic_sizePostResult(maxval(phase_Noutput),Ninstance),source=0)
allocate(plastic_isotropic_output(maxval(phase_Noutput),Ninstance))
plastic_isotropic_output = ''
allocate(param(Ninstance)) allocate(param(Ninstance))
allocate(state(Ninstance)) allocate(state(Ninstance))
allocate(dotState(Ninstance)) allocate(dotState(Ninstance))
@ -151,7 +141,7 @@ subroutine plastic_isotropic_init
! sanity checks ! sanity checks
extmsg = '' extmsg = ''
if (prm%aTol_gamma <= 0.0_pReal) extmsg = trim(extmsg)//' aTol_gamma' if (prm%aTol_gamma <= 0.0_pReal) extmsg = trim(extmsg)//' aTol_gamma'
if (prm%xi_0 < 0.0_pReal) extmsg = trim(extmsg)//' xi_0' if (prm%xi_0 < 0.0_pReal) extmsg = trim(extmsg)//' xi_0'
if (prm%dot_gamma_0 <= 0.0_pReal) extmsg = trim(extmsg)//' dot_gamma_0' if (prm%dot_gamma_0 <= 0.0_pReal) extmsg = trim(extmsg)//' dot_gamma_0'
if (prm%n <= 0.0_pReal) extmsg = trim(extmsg)//' n' if (prm%n <= 0.0_pReal) extmsg = trim(extmsg)//' n'
if (prm%a <= 0.0_pReal) extmsg = trim(extmsg)//' a' if (prm%a <= 0.0_pReal) extmsg = trim(extmsg)//' a'
@ -180,8 +170,6 @@ subroutine plastic_isotropic_init
end select end select
if (outputID /= undefined_ID) then if (outputID /= undefined_ID) then
plastic_isotropic_output(i,phase_plasticityInstance(p)) = outputs(i)
plastic_isotropic_sizePostResult(i,phase_plasticityInstance(p)) = 1
prm%outputID = [prm%outputID, outputID] prm%outputID = [prm%outputID, outputID]
endif endif
@ -195,7 +183,6 @@ subroutine plastic_isotropic_init
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, & call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
1,0,0) 1,0,0)
plasticState(p)%sizePostResults = sum(plastic_isotropic_sizePostResult(:,phase_plasticityInstance(p)))
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState ! locally defined state aliases and initialization of state0 and aTolState
@ -382,54 +369,6 @@ subroutine plastic_isotropic_dotState(Mp,instance,of)
end subroutine plastic_isotropic_dotState end subroutine plastic_isotropic_dotState
!--------------------------------------------------------------------------------------------------
!> @brief return array of constitutive results
!--------------------------------------------------------------------------------------------------
function plastic_isotropic_postResults(Mp,instance,of) result(postResults)
real(pReal), dimension(3,3), intent(in) :: &
Mp !< Mandel stress
integer, intent(in) :: &
instance, &
of
real(pReal), dimension(sum(plastic_isotropic_sizePostResult(:,instance))) :: &
postResults
real(pReal) :: &
norm_Mp !< norm of the Mandel stress
integer :: &
o,c
associate(prm => param(instance), stt => state(instance))
if (prm%dilatation) then
norm_Mp = sqrt(math_mul33xx33(Mp,Mp))
else
norm_Mp = sqrt(math_mul33xx33(math_deviatoric33(Mp),math_deviatoric33(Mp)))
endif
c = 0
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
case (xi_ID)
postResults(c+1) = stt%xi(of)
c = c + 1
case (dot_gamma_ID)
postResults(c+1) = prm%dot_gamma_0 &
* (sqrt(1.5_pReal) * norm_Mp /(prm%M * stt%xi(of)))**prm%n
c = c + 1
end select
enddo outputsLoop
end associate
end function plastic_isotropic_postResults
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief writes results to HDF5 output file !> @brief writes results to HDF5 output file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -19,11 +19,6 @@ module plastic_kinehardening
implicit none implicit none
private private
integer, dimension(:,:), allocatable, target, public :: &
plastic_kinehardening_sizePostResult !< size of each post result output
character(len=64), dimension(:,:), allocatable, target, public :: &
plastic_kinehardening_output !< name of each post result output
enum, bind(c) enum, bind(c)
enumerator :: & enumerator :: &
undefined_ID, & undefined_ID, &
@ -90,7 +85,6 @@ module plastic_kinehardening
plastic_kinehardening_LpAndItsTangent, & plastic_kinehardening_LpAndItsTangent, &
plastic_kinehardening_dotState, & plastic_kinehardening_dotState, &
plastic_kinehardening_deltaState, & plastic_kinehardening_deltaState, &
plastic_kinehardening_postResults, &
plastic_kinehardening_results plastic_kinehardening_results
contains contains
@ -127,10 +121,6 @@ subroutine plastic_kinehardening_init
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) & if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) &
write(6,'(a16,1x,i5,/)') '# instances:',Ninstance write(6,'(a16,1x,i5,/)') '# instances:',Ninstance
allocate(plastic_kinehardening_sizePostResult(maxval(phase_Noutput),Ninstance),source=0)
allocate(plastic_kinehardening_output(maxval(phase_Noutput),Ninstance))
plastic_kinehardening_output = ''
allocate(param(Ninstance)) allocate(param(Ninstance))
allocate(state(Ninstance)) allocate(state(Ninstance))
allocate(dotState(Ninstance)) allocate(dotState(Ninstance))
@ -247,8 +237,6 @@ subroutine plastic_kinehardening_init
end select end select
if (outputID /= undefined_ID) then if (outputID /= undefined_ID) then
plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i)
plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = prm%totalNslip
prm%outputID = [prm%outputID , outputID] prm%outputID = [prm%outputID , outputID]
endif endif
@ -263,7 +251,6 @@ subroutine plastic_kinehardening_init
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, &
prm%totalNslip,0,0) prm%totalNslip,0,0)
plasticState(p)%sizePostResults = sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(p)))
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState ! locally defined state aliases and initialization of state0 and aTolState
@ -446,63 +433,6 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of)
end subroutine plastic_kinehardening_deltaState end subroutine plastic_kinehardening_deltaState
!--------------------------------------------------------------------------------------------------
!> @brief return array of constitutive results
!--------------------------------------------------------------------------------------------------
function plastic_kinehardening_postResults(Mp,instance,of) result(postResults)
real(pReal), dimension(3,3), intent(in) :: &
Mp !< Mandel stress
integer, intent(in) :: &
instance, &
of
real(pReal), dimension(sum(plastic_kinehardening_sizePostResult(:,instance))) :: &
postResults
integer :: &
o,c,i
real(pReal), dimension(param(instance)%totalNslip) :: &
gdot_pos,gdot_neg
c = 0
associate(prm => param(instance), stt => state(instance))
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
case (crss_ID)
postResults(c+1:c+prm%totalNslip) = stt%crss(:,of)
case(crss_back_ID)
postResults(c+1:c+prm%totalNslip) = stt%crss_back(:,of)
case (sense_ID)
postResults(c+1:c+prm%totalNslip) = stt%sense(:,of)
case (chi0_ID)
postResults(c+1:c+prm%totalNslip) = stt%chi0(:,of)
case (gamma0_ID)
postResults(c+1:c+prm%totalNslip) = stt%gamma0(:,of)
case (accshear_ID)
postResults(c+1:c+prm%totalNslip) = stt%accshear(:,of)
case (shearrate_ID)
call kinetics(Mp,instance,of,gdot_pos,gdot_neg)
postResults(c+1:c+prm%totalNslip) = gdot_pos+gdot_neg
case (resolvedstress_ID)
do i = 1, prm%totalNslip
postResults(c+i) = math_mul33xx33(Mp,prm%Schmid(1:3,1:3,i))
enddo
end select
c = c + prm%totalNslip
enddo outputsLoop
end associate
end function plastic_kinehardening_postResults
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief writes results to HDF5 output file !> @brief writes results to HDF5 output file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -40,8 +40,6 @@ subroutine plastic_none_init
NipcMyPhase = count(material_phaseAt == p) * discretization_nIP NipcMyPhase = count(material_phaseAt == p) * discretization_nIP
call material_allocatePlasticState(p,NipcMyPhase,0,0,0, & call material_allocatePlasticState(p,NipcMyPhase,0,0,0, &
0,0,0) 0,0,0)
plasticState(p)%sizePostResults = 0
enddo enddo
end subroutine plastic_none_init end subroutine plastic_none_init

View File

@ -17,11 +17,6 @@ module plastic_phenopowerlaw
implicit none implicit none
private private
integer, dimension(:,:), allocatable, target, public :: &
plastic_phenopowerlaw_sizePostResult !< size of each post result output
character(len=64), dimension(:,:), allocatable, target, public :: &
plastic_phenopowerlaw_output !< name of each post result output
enum, bind(c) enum, bind(c)
enumerator :: & enumerator :: &
@ -100,7 +95,6 @@ module plastic_phenopowerlaw
plastic_phenopowerlaw_init, & plastic_phenopowerlaw_init, &
plastic_phenopowerlaw_LpAndItsTangent, & plastic_phenopowerlaw_LpAndItsTangent, &
plastic_phenopowerlaw_dotState, & plastic_phenopowerlaw_dotState, &
plastic_phenopowerlaw_postResults, &
plastic_phenopowerlaw_results plastic_phenopowerlaw_results
contains contains
@ -137,10 +131,6 @@ subroutine plastic_phenopowerlaw_init
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) & if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) &
write(6,'(a16,1x,i5,/)') '# instances:',Ninstance write(6,'(a16,1x,i5,/)') '# instances:',Ninstance
allocate(plastic_phenopowerlaw_sizePostResult(maxval(phase_Noutput),Ninstance),source=0)
allocate(plastic_phenopowerlaw_output(maxval(phase_Noutput),Ninstance))
plastic_phenopowerlaw_output = ''
allocate(param(Ninstance)) allocate(param(Ninstance))
allocate(state(Ninstance)) allocate(state(Ninstance))
allocate(dotState(Ninstance)) allocate(dotState(Ninstance))
@ -306,8 +296,6 @@ subroutine plastic_phenopowerlaw_init
end select end select
if (outputID /= undefined_ID) then if (outputID /= undefined_ID) then
plastic_phenopowerlaw_output(i,phase_plasticityInstance(p)) = outputs(i)
plastic_phenopowerlaw_sizePostResult(i,phase_plasticityInstance(p)) = outputSize
prm%outputID = [prm%outputID, outputID] prm%outputID = [prm%outputID, outputID]
endif endif
@ -322,7 +310,6 @@ subroutine plastic_phenopowerlaw_init
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, & call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
prm%totalNslip,prm%totalNtwin,0) prm%totalNslip,prm%totalNtwin,0)
plasticState(p)%sizePostResults = sum(plastic_phenopowerlaw_sizePostResult(:,phase_plasticityInstance(p)))
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState ! locally defined state aliases and initialization of state0 and aTolState
@ -473,71 +460,6 @@ subroutine plastic_phenopowerlaw_dotState(Mp,instance,of)
end subroutine plastic_phenopowerlaw_dotState end subroutine plastic_phenopowerlaw_dotState
!--------------------------------------------------------------------------------------------------
!> @brief return array of constitutive results
!--------------------------------------------------------------------------------------------------
function plastic_phenopowerlaw_postResults(Mp,instance,of) result(postResults)
real(pReal), dimension(3,3), intent(in) :: &
Mp !< Mandel stress
integer, intent(in) :: &
instance, &
of
real(pReal), dimension(sum(plastic_phenopowerlaw_sizePostResult(:,instance))) :: &
postResults
integer :: &
o,c,i
real(pReal), dimension(param(instance)%totalNslip) :: &
gdot_slip_pos,gdot_slip_neg
c = 0
associate(prm => param(instance), stt => state(instance))
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
case (resistance_slip_ID)
postResults(c+1:c+prm%totalNslip) = stt%xi_slip(1:prm%totalNslip,of)
c = c + prm%totalNslip
case (accumulatedshear_slip_ID)
postResults(c+1:c+prm%totalNslip) = stt%gamma_slip(1:prm%totalNslip,of)
c = c + prm%totalNslip
case (shearrate_slip_ID)
call kinetics_slip(Mp,instance,of,gdot_slip_pos,gdot_slip_neg)
postResults(c+1:c+prm%totalNslip) = gdot_slip_pos+gdot_slip_neg
c = c + prm%totalNslip
case (resolvedstress_slip_ID)
do i = 1, prm%totalNslip
postResults(c+i) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,i))
enddo
c = c + prm%totalNslip
case (resistance_twin_ID)
postResults(c+1:c+prm%totalNtwin) = stt%xi_twin(1:prm%totalNtwin,of)
c = c + prm%totalNtwin
case (accumulatedshear_twin_ID)
postResults(c+1:c+prm%totalNtwin) = stt%gamma_twin(1:prm%totalNtwin,of)
c = c + prm%totalNtwin
case (shearrate_twin_ID)
call kinetics_twin(Mp,instance,of,postResults(c+1:c+prm%totalNtwin))
c = c + prm%totalNtwin
case (resolvedstress_twin_ID)
do i = 1, prm%totalNtwin
postResults(c+i) = math_mul33xx33(Mp,prm%Schmid_twin(1:3,1:3,i))
enddo
c = c + prm%totalNtwin
end select
enddo outputsLoop
end associate
end function plastic_phenopowerlaw_postResults
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief writes results to HDF5 output file !> @brief writes results to HDF5 output file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -304,18 +304,18 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
real(pReal), intent(in), dimension(:,:,:) :: dataset real(pReal), intent(in), dimension(:,:,:) :: dataset
integer :: i integer :: i
logical :: T logical :: transposed_
integer(HID_T) :: groupHandle integer(HID_T) :: groupHandle
real(pReal), dimension(:,:,:), allocatable :: dataset_transposed real(pReal), dimension(:,:,:), allocatable :: dataset_transposed
if(present(transposed)) then if(present(transposed)) then
T = transposed transposed_ = transposed
else else
T = .true. transposed_ = .true.
endif endif
if(T) then if(transposed_) then
if(size(dataset,1) /= size(dataset,2)) call IO_error(0,ext_msg='transpose non-symmetric tensor') if(size(dataset,1) /= size(dataset,2)) call IO_error(0,ext_msg='transpose non-symmetric tensor')
allocate(dataset_transposed,mold=dataset) allocate(dataset_transposed,mold=dataset)
do i=1,size(dataset_transposed,3) do i=1,size(dataset_transposed,3)

View File

@ -31,7 +31,6 @@ subroutine thermal_isothermal_init
if (thermal_type(homog) /= THERMAL_isothermal_ID) cycle if (thermal_type(homog) /= THERMAL_isothermal_ID) cycle
NofMyHomog = count(material_homogenizationAt == homog) NofMyHomog = count(material_homogenizationAt == homog)
thermalState(homog)%sizeState = 0 thermalState(homog)%sizeState = 0
thermalState(homog)%sizePostResults = 0
allocate(thermalState(homog)%state0 (0,NofMyHomog), source=0.0_pReal) allocate(thermalState(homog)%state0 (0,NofMyHomog), source=0.0_pReal)
allocate(thermalState(homog)%subState0(0,NofMyHomog), source=0.0_pReal) allocate(thermalState(homog)%subState0(0,NofMyHomog), source=0.0_pReal)
allocate(thermalState(homog)%state (0,NofMyHomog), source=0.0_pReal) allocate(thermalState(homog)%state (0,NofMyHomog), source=0.0_pReal)