diff --git a/PRIVATE b/PRIVATE index 232a094c7..c595994cd 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 232a094c715bcbbd1c6652c4dc4a4a50d402b82f +Subproject commit c595994cd8880acadf50b5dedb79156d04d35b91 diff --git a/VERSION b/VERSION index 6e24519bd..6f8955f5c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.3-2364-g62f7363a +v2.0.3-2402-gb88f5ec0 diff --git a/installation/symlink_Processing.py b/installation/symlink_Processing.py index 2f4c99d34..5972a7f6a 100755 --- a/installation/symlink_Processing.py +++ b/installation/symlink_Processing.py @@ -16,10 +16,6 @@ if not os.path.isdir(binDir): #define ToDo list processing_subDirs = ['pre', 'post', - 'misc', - ] -processing_extensions = ['.py', - '.sh', ] sys.stdout.write('\nsymbolic linking...\n') @@ -31,7 +27,7 @@ for subDir in processing_subDirs: for theFile in os.listdir(theDir): theName,theExt = os.path.splitext(theFile) - if theExt in processing_extensions: # only consider files with proper extensions + if theExt in ['.py']: src = os.path.abspath(os.path.join(theDir,theFile)) sym_link = os.path.abspath(os.path.join(binDir,theName)) diff --git a/processing/misc/ang_toTable.py b/processing/misc/ang_toTable.py deleted file mode 100755 index 5579f2466..000000000 --- a/processing/misc/ang_toTable.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -#-------------------------------------------------------------------------------------------------- -# MAIN -#-------------------------------------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog [angfile[s]]', description = """ -Convert TSL/EDAX *.ang file to ASCIItable - -""", version = scriptID) - -(options, filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ang(StringIO(''.join(sys.stdin.read())) if name is None else name) - table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.txt') diff --git a/processing/post/addCauchy.py b/processing/post/addCauchy.py deleted file mode 100755 index 8bbc7fb0e..000000000 --- a/processing/post/addCauchy.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Add column containing Cauchy stress based on deformation gradient and first Piola--Kirchhoff stress. - -""", version = scriptID) - -parser.add_option('-f','--defgrad', - dest = 'defgrad', - type = 'string', metavar = 'string', - help = 'heading of columns containing deformation gradient [%default]') -parser.add_option('-p','--stress', - dest = 'stress', - type = 'string', metavar = 'string', - help = 'heading of columns containing first Piola--Kirchhoff stress [%default]') - -parser.set_defaults(defgrad = 'f', - stress = 'p', - ) - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - table.add('Cauchy', - damask.mechanics.Cauchy(table.get(options.stress ).reshape(-1,3,3), - table.get(options.defgrad).reshape(-1,3,3)).reshape(-1,9), - scriptID+' '+' '.join(sys.argv[1:])) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addDeterminant.py b/processing/post/addDeterminant.py deleted file mode 100755 index f2368559d..000000000 --- a/processing/post/addDeterminant.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import numpy as np - -import damask - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Add column(s) containing determinant of requested tensor column(s). - -""", version = scriptID) - -parser.add_option('-t','--tensor', - dest = 'tensor', - action = 'extend', metavar = '', - help = 'heading of columns containing tensor field values') - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if options.tensor is None: - parser.error('no data column specified.') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - for tensor in options.tensor: - table.add('det({})'.format(tensor), - np.linalg.det(table.get(tensor).reshape(-1,3,3)), - scriptID+' '+' '.join(sys.argv[1:])) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addDeviator.py b/processing/post/addDeviator.py deleted file mode 100755 index 9a532caec..000000000 --- a/processing/post/addDeviator.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(2)]', description = """ -Add column(s) containing deviator of requested tensor column(s). - -""", version = scriptID) - -parser.add_option('-t','--tensor', - dest = 'tensor', - action = 'extend', metavar='', - help = 'heading of columns containing tensor field values') -parser.add_option('-s','--spherical', - dest = 'spherical', - action = 'store_true', - help = 'report spherical part of tensor (hydrostatic component, pressure)') - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if options.tensor is None: - parser.error('no data column specified...') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - for tensor in options.tensor: - table.add('dev({})'.format(tensor), - damask.mechanics.deviatoric_part(table.get(tensor).reshape(-1,3,3)).reshape(-1,9), - scriptID+' '+' '.join(sys.argv[1:])) - if options.spherical: - table.add('sph({})'.format(tensor), - damask.mechanics.spherical_part(table.get(tensor).reshape(-1,3,3)), - scriptID+' '+' '.join(sys.argv[1:])) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addInfo.py b/processing/post/addInfo.py deleted file mode 100755 index 5e32510db..000000000 --- a/processing/post/addInfo.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Add info lines to ASCIItable header. - -""", version = scriptID) - -parser.add_option('-i', - '--info', - dest = 'info', action = 'extend', metavar = '', - help = 'items to add') - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if options.info is None: - parser.error('no info specified.') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - table.comments += options.info - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addMises.py b/processing/post/addMises.py deleted file mode 100755 index 0c2a6db50..000000000 --- a/processing/post/addMises.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Add vonMises equivalent values for symmetric part of requested strains and/or stresses. - -""", version = scriptID) - -parser.add_option('-e','--strain', - dest = 'strain', - action = 'extend', metavar = '', - help = 'heading(s) of columns containing strain tensors') -parser.add_option('-s','--stress', - dest = 'stress', - action = 'extend', metavar = '', - help = 'heading(s) of columns containing stress tensors') - -parser.set_defaults(strain = [], - stress = [], - ) -(options,filenames) = parser.parse_args() - -if options.stress is [] and options.strain is []: - parser.error('no data column specified...') - -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - for strain in options.strain: - table.add('Mises({})'.format(strain), - damask.mechanics.Mises_strain(damask.mechanics.symmetric(table.get(strain).reshape(-1,3,3))), - scriptID+' '+' '.join(sys.argv[1:])) - for stress in options.stress: - table.add('Mises({})'.format(stress), - damask.mechanics.Mises_stress(damask.mechanics.symmetric(table.get(stress).reshape(-1,3,3))), - scriptID+' '+' '.join(sys.argv[1:])) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/addOrientations.py b/processing/post/addOrientations.py index 137b8121e..e1752b0a4 100755 --- a/processing/post/addOrientations.py +++ b/processing/post/addOrientations.py @@ -172,7 +172,7 @@ for name in filenames: elif inputtype == 'matrix': d = representations['matrix'][1] - o = damask.Rotation.fromMatrix(list(map(float,table.data[column:column+d]))) + o = damask.Rotation.fromMatrix(np.array(list(map(float,table.data[column:column+d]))).reshape(3,3)) elif inputtype == 'frame': M = np.array(list(map(float,table.data[column[0]:column[0]+3] + \ diff --git a/processing/post/addSchmidfactors.py b/processing/post/addSchmidfactors.py index 94c062766..b3c4c7500 100755 --- a/processing/post/addSchmidfactors.py +++ b/processing/post/addSchmidfactors.py @@ -214,7 +214,7 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table - o = damask.Rotation(list(map(float,table.data[column:column+4]))) + o = damask.Rotation(np.array(list(map(float,table.data[column:column+4])))) table.data_append( np.abs( np.sum(slip_direction * (o * force) ,axis=1) \ * np.sum(slip_normal * (o * normal),axis=1))) diff --git a/processing/post/addTable.py b/processing/post/addTable.py deleted file mode 100755 index 944214e69..000000000 --- a/processing/post/addTable.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Append data of ASCIItable(s) column-wise. - -""", version = scriptID) - -parser.add_option('-a', '--add','--table', - dest = 'table', - action = 'extend', metavar = '', - help = 'tables to add') - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if options.table is None: - parser.error('no table specified.') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - - for addTable in options.table: - table2 = damask.Table.from_ASCII(addTable) - table2.data = table2.data[:table.data.shape[0]] - table.join(table2) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/growTable.py b/processing/post/growTable.py deleted file mode 100755 index 1dbfa8423..000000000 --- a/processing/post/growTable.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Append data of ASCIItable(s) row-wise. - -""", version = scriptID) - -parser.add_option('-a', '--add','--table', - dest = 'table', - action = 'extend', metavar = '', - help = 'tables to add') - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if options.table is None: - parser.error('no table specified.') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - - for growTable in options.table: - table2 = damask.Table.from_ASCII(growTable) - table.append(table2) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/permuteData.py b/processing/post/permuteData.py index 72759f2ce..1f4b80532 100755 --- a/processing/post/permuteData.py +++ b/processing/post/permuteData.py @@ -2,6 +2,7 @@ import os import sys +from io import StringIO from optparse import OptionParser import numpy as np @@ -41,73 +42,20 @@ parser.set_defaults(label = [], ) (options,filenames) = parser.parse_args() - -if len(options.label) == 0: - parser.error('no labels specified.') - -# --- loop over input files ------------------------------------------------------------------------- - if filenames == []: filenames = [None] for name in filenames: - try: - table = damask.ASCIItable(name = name) - except IOError: - continue - damask.util.report(scriptName,name) + damask.util.report(scriptName,name) -# ------------------------------------------ read header ------------------------------------------ + table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - table.head_read() + randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file + rng = np.random.default_rng(randomSeed) -# ------------------------------------------ process labels --------------------------------------- + for label in options.label: + data = table.get(label) + uniques,inverse = np.unique(data,return_inverse=True,axis=0) if options.unique else (data,np.arange(len(data))) + rng.shuffle(uniques) + table.set(label,uniques[inverse], scriptID+' '+' '.join(sys.argv[1:])) - errors = [] - remarks = [] - columns = [] - dims = [] - - indices = table.label_index (options.label) - dimensions = table.label_dimension(options.label) - for i,index in enumerate(indices): - if index == -1: remarks.append('label "{}" not present...'.format(options.label[i])) - else: - columns.append(index) - dims.append(dimensions[i]) - - if remarks != []: damask.util.croak(remarks) - if errors != []: - damask.util.croak(errors) - table.close(dismiss = True) - continue - -# ------------------------------------------ assemble header --------------------------------------- - - randomSeed = int(os.urandom(4).hex(), 16) if options.randomSeed is None else options.randomSeed # random seed per file - np.random.seed(randomSeed) - - table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]), - 'random seed {}'.format(randomSeed), - ]) - table.head_write() - -# ------------------------------------------ process data ------------------------------------------ - - table.data_readArray() # read all data at once - for col,dim in zip(columns,dims): - if options.unique: - s = set(map(tuple,table.data[:,col:col+dim])) # generate set of (unique) values - uniques = np.array(list(map(np.array,s))) # translate set to np.array - shuffler = dict(zip(s,np.random.permutation(len(s)))) # random permutation - table.data[:,col:col+dim] = uniques[np.array(list(map(lambda x: shuffler[tuple(x)], - table.data[:,col:col+dim])))] # fill table with mapped uniques - else: - np.random.shuffle(table.data[:,col:col+dim]) # independently shuffle every row - -# ------------------------------------------ output result ----------------------------------------- - - table.data_writeArray() - -# ------------------------------------------ output finalization ----------------------------------- - - table.close() # close ASCII tables + table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/reLabel.py b/processing/post/reLabel.py deleted file mode 100755 index da608e994..000000000 --- a/processing/post/reLabel.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Rename scalar, vectorial, and/or tensorial data header labels. - -""", version = scriptID) - -parser.add_option('-l','--label', - dest = 'label', - action = 'extend', metavar='', - help = 'column(s) to rename') -parser.add_option('-s','--substitute', - dest = 'substitute', - action = 'extend', metavar='', - help = 'new column label(s)') - -parser.set_defaults(label = [], - substitute = [], - ) - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if len(options.label) != len(options.substitute): - parser.error('number of column labels and substitutes do not match.') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - for label,substitute in zip(options.label,options.substitute): - table.rename(label,substitute,scriptID+' '+' '.join(sys.argv[1:])) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/scaleData.py b/processing/post/scaleData.py deleted file mode 100755 index c6ce0a64e..000000000 --- a/processing/post/scaleData.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Uniformly scale column values by given factor. - -""", version = scriptID) - -parser.add_option('-l','--label', - dest = 'labels', - action = 'extend', metavar = '', - help ='column(s) to scale') -parser.add_option('-f','--factor', - dest = 'factor', - action = 'extend', metavar='', - help = 'factor(s) per column') - -parser.set_defaults(label = [], - factor = []) - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if len(options.labels) != len(options.factor): - parser.error('number of column labels and factors do not match.') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - for label,factor in zip(options.labels,options.factor): - table.set(label,table.get(label)*float(factor),scriptID+' '+' '.join(sys.argv[1:])) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/shiftData.py b/processing/post/shiftData.py deleted file mode 100755 index 341a78c7e..000000000 --- a/processing/post/shiftData.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Uniformly shift column values by given offset. - -""", version = scriptID) - -parser.add_option('-l','--label', - dest = 'labels', - action = 'extend', metavar = '', - help ='column(s) to shift') -parser.add_option('-o','--offset', - dest = 'offset', - action = 'extend', metavar='', - help = 'offset(s) per column') - -parser.set_defaults(label = [], - offset = []) - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if len(options.labels) != len(options.offset): - parser.error('number of column labels and offsets do not match.') - -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - for label,offset in zip(options.labels,options.offset): - table.set(label,table.get(label)+float(offset),scriptID+' '+' '.join(sys.argv[1:])) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/post/sortTable.py b/processing/post/sortTable.py deleted file mode 100755 index 3a3738d18..000000000 --- a/processing/post/sortTable.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """ -Sort rows by given (or all) column label(s). - -Examples: -With coordinates in columns "x", "y", and "z"; sorting with x slowest and z fastest varying index: --label x,y,z. -""", version = scriptID) - - -parser.add_option('-l','--label', - dest = 'labels', - action = 'extend', metavar = '', - help = 'list of column labels (a,b,c,...)') -parser.add_option('-r','--reverse', - dest = 'reverse', - action = 'store_true', - help = 'sort in reverse') - -parser.set_defaults(reverse = False, - ) - -(options,filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -if options.labels is None: - parser.error('no labels specified.') -for name in filenames: - damask.util.report(scriptName,name) - - table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) - table.sort_by(options.labels,not options.reverse) - - table.to_ASCII(sys.stdout if name is None else name) diff --git a/processing/pre/geom_clean.py b/processing/pre/geom_clean.py deleted file mode 100755 index 8883c1b2a..000000000 --- a/processing/pre/geom_clean.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -#-------------------------------------------------------------------------------------------------- -# MAIN -#-------------------------------------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ -Smooth microstructure by selecting most frequent index within given stencil at each location. - -""", version=scriptID) - -parser.add_option('-s','--stencil', - dest = 'stencil', - type = 'int', metavar = 'int', - help = 'size of smoothing stencil [%default]') - -parser.set_defaults(stencil = 3) - -(options, filenames) = parser.parse_args() - - -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - damask.util.croak(geom.clean(options.stencil)) - geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - geom.to_file(sys.stdout if name is None else name,pack=False) diff --git a/processing/pre/geom_fromScratch.py b/processing/pre/geom_fromScratch.py deleted file mode 100755 index 89fd27be5..000000000 --- a/processing/pre/geom_fromScratch.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from optparse import OptionParser - -import numpy as np - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -# -------------------------------------------------------------------- -# MAIN -# -------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ -Generate homogeneous geometry. - -""", version = scriptID) - -parser.add_option('-g','--grid', - dest = 'grid', - type = 'int', nargs = 3, metavar = ' '.join(['int']*3), - help = 'a,b,c grid of hexahedral box %default') -parser.add_option('-s', '--size', - dest = 'size', - type = 'float', nargs = 3, metavar = ' '.join(['float']*3), - help = 'x,y,z of geometry size') -parser.add_option('-o','--origin', - dest = 'origin', - type = 'float', nargs = 3, metavar = ' '.join(['float']*3), - help = 'x,y,z of geometry origin %default') -parser.add_option('--homogenization', - dest = 'homogenization', - type = 'int', metavar = 'int', - help = 'homogenization index [%default]') -parser.add_option('-f','--fill', - dest = 'fill', - type = 'float', metavar = 'int', - help = 'microstructure index [%default]') - -parser.set_defaults(grid = (16,16,16), - origin = (0.,0.,0.), - homogenization = 1, - fill = 1, - ) - -(options, filename) = parser.parse_args() - - -name = None if filename == [] else filename[0] -damask.util.report(scriptName,name) - -dtype = float if np.isnan(options.fill) or int(options.fill) != options.fill else int -geom = damask.Geom(microstructure=np.full(options.grid,options.fill,dtype=dtype), - size=options.size, - origin=options.origin, - homogenization=options.homogenization, - comments=scriptID + ' ' + ' '.join(sys.argv[1:])) -damask.util.croak(geom) - -geom.to_file(sys.stdout if name is None else name,pack=False) diff --git a/processing/pre/geom_mirror.py b/processing/pre/geom_mirror.py deleted file mode 100755 index cca0a4e10..000000000 --- a/processing/pre/geom_mirror.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -#-------------------------------------------------------------------------------------------------- -# MAIN -#-------------------------------------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ -Mirror along given directions. - -""", version=scriptID) - -validDirections = ['x','y','z'] - -parser.add_option('-d','--direction', - dest = 'directions', - action = 'extend', metavar = '', - help = "directions in which to mirror {{{}}}".format(','.join(validDirections))) -parser.add_option( '--reflect', - dest = 'reflect', - action = 'store_true', - help = 'reflect (include) outermost layers') - -parser.set_defaults(reflect = False) - -(options, filenames) = parser.parse_args() - -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - damask.util.croak(geom.mirror(options.directions,options.reflect)) - geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - geom.to_file(sys.stdout if name is None else name,pack=False) diff --git a/processing/pre/geom_pack.py b/processing/pre/geom_pack.py deleted file mode 100755 index e927c006f..000000000 --- a/processing/pre/geom_pack.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -#-------------------------------------------------------------------------------------------------- -# MAIN -#-------------------------------------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """ -Pack ranges to "a to b" and/or multiples to "n of x". - -""", version = scriptID) - -(options, filenames) = parser.parse_args() - - -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - - damask.util.croak(geom) - geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - - geom.to_file(sys.stdout if name is None else name,pack=True) diff --git a/processing/pre/geom_rescale.py b/processing/pre/geom_rescale.py deleted file mode 100755 index b1a15593c..000000000 --- a/processing/pre/geom_rescale.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import numpy as np - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -#-------------------------------------------------------------------------------------------------- -# MAIN -#-------------------------------------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ -Scales independently in x, y, and z direction in terms of grid and/or size. -Either absolute values or relative factors (like "0.25x") can be used. - -""", version = scriptID) - -parser.add_option('-g', '--grid', - dest = 'grid', - type = 'string', nargs = 3, metavar = 'string string string', - help = 'a,b,c grid of hexahedral box') -parser.add_option('-s', '--size', - dest = 'size', - type = 'string', nargs = 3, metavar = 'string string string', - help = 'x,y,z size of hexahedral box') - -(options, filenames) = parser.parse_args() - - -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - - grid = geom.get_grid() - size = geom.get_size() - - new_grid = grid if options.grid is None else \ - np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \ - else int(n) for o,n in zip(grid,options.grid)],dtype=int) - - new_size = size if options.size is None else \ - np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \ - else float(n) for o,n in zip(size,options.size)],dtype=float) - - geom.scale(new_grid) - damask.util.croak(geom.update(microstructure = None,size = new_size)) - geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - geom.to_file(sys.stdout if name is None else name,pack=False) diff --git a/processing/pre/geom_toTable.py b/processing/pre/geom_toTable.py deleted file mode 100755 index f1b5b9555..000000000 --- a/processing/pre/geom_toTable.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from io import StringIO -from optparse import OptionParser - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -#-------------------------------------------------------------------------------------------------- -# MAIN -#-------------------------------------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """ -Translate geom description into ASCIItable containing position and microstructure. - -""", version = scriptID) - -(options, filenames) = parser.parse_args() -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - damask.util.croak(geom) - - coord0 = damask.grid_filters.cell_coord0(geom.grid,geom.size,geom.origin).reshape(-1,3) - - comments = geom.comments \ - + [scriptID + ' ' + ' '.join(sys.argv[1:]), - 'grid\ta {}\tb {}\tc {}'.format(*geom.grid), - 'size\tx {}\ty {}\tz {}'.format(*geom.size), - 'origin\tx {}\ty {}\tz {}'.format(*geom.origin), - 'homogenization\t{}'.format(geom.homogenization)] - - table = damask.Table(coord0,{'pos':(3,)},comments) - table.add('microstructure',geom.microstructure.reshape((-1,1),order='F')) - - table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.txt') diff --git a/processing/pre/geom_unpack.py b/processing/pre/geom_unpack.py deleted file mode 100755 index 58bd5de87..000000000 --- a/processing/pre/geom_unpack.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -from optparse import OptionParser -from io import StringIO - -import damask - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName,damask.version]) - - -#-------------------------------------------------------------------------------------------------- -# MAIN -#-------------------------------------------------------------------------------------------------- - -parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """ -Unpack ranges "a to b" and/or "n of x" multiples (exclusively in one line). - -""", version = scriptID) - -(options, filenames) = parser.parse_args() - - -if filenames == []: filenames = [None] - -for name in filenames: - damask.util.report(scriptName,name) - - geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) - - damask.util.croak(geom) - geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) - - geom.to_file(sys.stdout if name is None else name,pack=False) diff --git a/python/.coveragerc b/python/.coveragerc index c712d2595..5daa25bb2 100644 --- a/python/.coveragerc +++ b/python/.coveragerc @@ -1,2 +1,5 @@ [run] omit = tests/* + damask/_asciitable.py + damask/_test.py + damask/config/* diff --git a/python/damask/_orientation.py b/python/damask/_orientation.py index 76475e057..e6c732007 100644 --- a/python/damask/_orientation.py +++ b/python/damask/_orientation.py @@ -38,6 +38,9 @@ class Orientation: else: self.rotation = Rotation.fromQuaternion(rotation) # assume quaternion + if self.rotation.quaternion.shape != (4,): + raise NotImplementedError('Support for multiple rotations missing') + def disorientation(self, other, SST = True, diff --git a/python/damask/_rotation.py b/python/damask/_rotation.py index a2ea47007..716977a79 100644 --- a/python/damask/_rotation.py +++ b/python/damask/_rotation.py @@ -1,6 +1,7 @@ import numpy as np from ._Lambert import ball_to_cube, cube_to_ball +from . import mechanics _P = -1 @@ -61,6 +62,8 @@ class Rotation: def __repr__(self): """Orientation displayed as unit quaternion, rotation matrix, and Bunge-Euler angles.""" + if self.quaternion.shape != (4,): + raise NotImplementedError('Support for multiple rotations missing') return '\n'.join([ 'Quaternion: (real={:.3f}, imag=<{:+.3f}, {:+.3f}, {:+.3f}>)'.format(*(self.quaternion)), 'Matrix:\n{}'.format(self.asMatrix()), @@ -83,6 +86,8 @@ class Rotation: considere rotation of (3,3,3,3)-matrix """ + if self.quaternion.shape != (4,): + raise NotImplementedError('Support for multiple rotations missing') if isinstance(other, Rotation): # rotate a rotation self_q = self.quaternion[0] self_p = self.quaternion[1:] @@ -107,7 +112,7 @@ class Rotation: elif other.shape == (3,3,): # rotate a single (3x3)-matrix return np.dot(self.asMatrix(),np.dot(other,self.asMatrix().T)) elif other.shape == (3,3,3,3,): - raise NotImplementedError + raise NotImplementedError('Support for rotation of 4th order tensors missing') else: return NotImplemented else: @@ -116,7 +121,7 @@ class Rotation: def inverse(self): """In-place inverse rotation/backward rotation.""" - self.quaternion[1:] *= -1 + self.quaternion[...,1:] *= -1 return self def inversed(self): @@ -125,12 +130,12 @@ class Rotation: def standardize(self): - """In-place quaternion representation with positive q.""" - if self.quaternion[0] < 0.0: self.quaternion*=-1 + """In-place quaternion representation with positive real part.""" + self.quaternion[self.quaternion[...,0] < 0.0] *= -1 return self def standardized(self): - """Quaternion representation with positive q.""" + """Quaternion representation with positive real part.""" return self.copy().standardize() @@ -157,15 +162,17 @@ class Rotation: Rotation from which the average is rotated. """ + if self.quaternion.shape != (4,) or other.quaternion.shape != (4,): + raise NotImplementedError('Support for multiple rotations missing') return Rotation.fromAverage([self,other]) ################################################################################################ # convert to different orientation representations (numpy arrays) - def asQuaternion(self): + def as_quaternion(self): """ - Unit quaternion [q, p_1, p_2, p_3] unless quaternion == True: damask.quaternion object. + Unit quaternion [q, p_1, p_2, p_3]. Parameters ---------- @@ -175,8 +182,8 @@ class Rotation: """ return self.quaternion - def asEulers(self, - degrees = False): + def as_Eulers(self, + degrees = False): """ Bunge-Euler angles: (φ_1, ϕ, φ_2). @@ -190,9 +197,9 @@ class Rotation: if degrees: eu = np.degrees(eu) return eu - def asAxisAngle(self, - degrees = False, - pair = False): + def as_axis_angle(self, + degrees = False, + pair = False): """ Axis angle representation [n_1, n_2, n_3, ω] unless pair == True: ([n_1, n_2, n_3], ω). @@ -205,15 +212,15 @@ class Rotation: """ ax = Rotation.qu2ax(self.quaternion) - if degrees: ax[3] = np.degrees(ax[3]) - return (ax[:3],ax[3]) if pair else ax + if degrees: ax[...,3] = np.degrees(ax[...,3]) + return (ax[...,:3],ax[...,3]) if pair else ax - def asMatrix(self): + def as_matrix(self): """Rotation matrix.""" return Rotation.qu2om(self.quaternion) - def asRodrigues(self, - vector = False): + def as_Rodrigues(self, + vector = False): """ Rodrigues-Frank vector representation [n_1, n_2, n_3, tan(ω/2)] unless vector == True: [n_1, n_2, n_3] * tan(ω/2). @@ -224,9 +231,9 @@ class Rotation: """ ro = Rotation.qu2ro(self.quaternion) - return ro[:3]*ro[3] if vector else ro + return ro[...,:3]*ro[...,3] if vector else ro - def asHomochoric(self): + def as_homochoric(self): """Homochoric vector: (h_1, h_2, h_3).""" return Rotation.qu2ho(self.quaternion) @@ -234,7 +241,7 @@ class Rotation: """Cubochoric vector: (c_1, c_2, c_3).""" return Rotation.qu2cu(self.quaternion) - def asM(self): + def M(self): # ToDo not sure about the name: as_M or M? we do not have a from_M """ Intermediate representation supporting quaternion averaging. @@ -244,114 +251,133 @@ class Rotation: https://doi.org/10.2514/1.28949 """ - return np.outer(self.quaternion,self.quaternion) + return np.einsum('...i,...j',self.quaternion,self.quaternion) + # for compatibility (old names do not follow convention) + asM = M + asQuaternion = as_quaternion + asEulers = as_Eulers + asAxisAngle = as_axis_angle + asMatrix = as_matrix + asRodrigues = as_Rodrigues + asHomochoric = as_homochoric ################################################################################################ - # static constructors. The input data needs to follow the convention, options allow to - # relax these convections + # Static constructors. The input data needs to follow the conventions, options allow to + # relax the conventions. @staticmethod - def fromQuaternion(quaternion, - acceptHomomorph = False, - P = -1): + def from_quaternion(quaternion, + acceptHomomorph = False, + P = -1): - qu = quaternion if isinstance(quaternion,np.ndarray) and quaternion.dtype == np.dtype(float) \ - else np.array(quaternion,dtype=float) - if P > 0: qu[1:4] *= -1 # convert from P=1 to P=-1 - if qu[0] < 0.0: - if acceptHomomorph: - qu *= -1. - else: - raise ValueError('Quaternion has negative first component: {}.'.format(qu[0])) - if not np.isclose(np.linalg.norm(qu), 1.0): - raise ValueError('Quaternion is not of unit length: {} {} {} {}.'.format(*qu)) + qu = np.array(quaternion,dtype=float) + if qu.shape[:-2:-1] != (4,): + raise ValueError('Invalid shape.') + + if P > 0: qu[...,1:4] *= -1 # convert from P=1 to P=-1 + if acceptHomomorph: + qu[qu[...,0] < 0.0] *= -1 + else: + if np.any(qu[...,0] < 0.0): + raise ValueError('Quaternion with negative first (real) component.') + if not np.all(np.isclose(np.linalg.norm(qu,axis=-1), 1.0)): + raise ValueError('Quaternion is not of unit length.') return Rotation(qu) @staticmethod - def fromEulers(eulers, - degrees = False): + def from_Eulers(eulers, + degrees = False): + + eu = np.array(eulers,dtype=float) + if eu.shape[:-2:-1] != (3,): + raise ValueError('Invalid shape.') - eu = eulers if isinstance(eulers, np.ndarray) and eulers.dtype == np.dtype(float) \ - else np.array(eulers,dtype=float) eu = np.radians(eu) if degrees else eu - if np.any(eu < 0.0) or np.any(eu > 2.0*np.pi) or eu[1] > np.pi: - raise ValueError('Euler angles outside of [0..2π],[0..π],[0..2π]: {} {} {}.'.format(*eu)) + if np.any(eu < 0.0) or np.any(eu > 2.0*np.pi) or np.any(eu[...,1] > np.pi): # ToDo: No separate check for PHI + raise ValueError('Euler angles outside of [0..2π],[0..π],[0..2π].') return Rotation(Rotation.eu2qu(eu)) @staticmethod - def fromAxisAngle(angleAxis, - degrees = False, - normalise = False, - P = -1): + def from_axis_angle(axis_angle, + degrees = False, + normalise = False, + P = -1): - ax = angleAxis if isinstance(angleAxis, np.ndarray) and angleAxis.dtype == np.dtype(float) \ - else np.array(angleAxis,dtype=float) - if P > 0: ax[0:3] *= -1 # convert from P=1 to P=-1 - if degrees: ax[ 3] = np.radians(ax[3]) - if normalise: ax[0:3] /= np.linalg.norm(ax[0:3]) - if ax[3] < 0.0 or ax[3] > np.pi: - raise ValueError('Axis angle rotation angle outside of [0..π]: {}.'.format(ax[3])) - if not np.isclose(np.linalg.norm(ax[0:3]), 1.0): - raise ValueError('Axis angle rotation axis is not of unit length: {} {} {}.'.format(*ax[0:3])) + ax = np.array(axis_angle,dtype=float) + if ax.shape[:-2:-1] != (4,): + raise ValueError('Invalid shape.') + + if P > 0: ax[...,0:3] *= -1 # convert from P=1 to P=-1 + if degrees: ax[..., 3] = np.radians(ax[...,3]) + if normalise: ax[...,0:3] /= np.linalg.norm(ax[...,0:3],axis=-1) + if np.any(ax[...,3] < 0.0) or np.any(ax[...,3] > np.pi): + raise ValueError('Axis angle rotation angle outside of [0..π].') + if not np.all(np.isclose(np.linalg.norm(ax[...,0:3],axis=-1), 1.0)): + raise ValueError('Axis angle rotation axis is not of unit length.') return Rotation(Rotation.ax2qu(ax)) @staticmethod - def fromBasis(basis, - orthonormal = True, - reciprocal = False, - ): + def from_basis(basis, + orthonormal = True, + reciprocal = False): + + om = np.array(basis,dtype=float) + if om.shape[:-3:-1] != (3,3): + raise ValueError('Invalid shape.') - om = basis if isinstance(basis, np.ndarray) else np.array(basis).reshape(3,3) if reciprocal: - om = np.linalg.inv(om.T/np.pi) # transform reciprocal basis set + om = np.linalg.inv(mechanics.transpose(om)/np.pi) # transform reciprocal basis set orthonormal = False # contains stretch if not orthonormal: (U,S,Vh) = np.linalg.svd(om) # singular value decomposition - om = np.dot(U,Vh) - if not np.isclose(np.linalg.det(om),1.0): - raise ValueError('matrix is not a proper rotation: {}.'.format(om)) - if not np.isclose(np.dot(om[0],om[1]), 0.0) \ - or not np.isclose(np.dot(om[1],om[2]), 0.0) \ - or not np.isclose(np.dot(om[2],om[0]), 0.0): - raise ValueError('matrix is not orthogonal: {}.'.format(om)) + om = np.einsum('...ij,...jl->...il',U,Vh) + if not np.all(np.isclose(np.linalg.det(om),1.0)): + raise ValueError('Orientation matrix has determinant ≠ 1.') + if not np.all(np.isclose(np.einsum('...i,...i',om[...,0],om[...,1]), 0.0)) \ + or not np.all(np.isclose(np.einsum('...i,...i',om[...,1],om[...,2]), 0.0)) \ + or not np.all(np.isclose(np.einsum('...i,...i',om[...,2],om[...,0]), 0.0)): + raise ValueError('Orientation matrix is not orthogonal.') return Rotation(Rotation.om2qu(om)) @staticmethod - def fromMatrix(om, - ): + def from_matrix(om): - return Rotation.fromBasis(om) + return Rotation.from_basis(om) @staticmethod - def fromRodrigues(rodrigues, - normalise = False, - P = -1): + def from_Rodrigues(rodrigues, + normalise = False, + P = -1): - ro = rodrigues if isinstance(rodrigues, np.ndarray) and rodrigues.dtype == np.dtype(float) \ - else np.array(rodrigues,dtype=float) - if P > 0: ro[0:3] *= -1 # convert from P=1 to P=-1 - if normalise: ro[0:3] /= np.linalg.norm(ro[0:3]) - if not np.isclose(np.linalg.norm(ro[0:3]), 1.0): - raise ValueError('Rodrigues rotation axis is not of unit length: {} {} {}.'.format(*ro[0:3])) - if ro[3] < 0.0: - raise ValueError('Rodrigues rotation angle not positive: {}.'.format(ro[3])) + ro = np.array(rodrigues,dtype=float) + if ro.shape[:-2:-1] != (4,): + raise ValueError('Invalid shape.') + + if P > 0: ro[...,0:3] *= -1 # convert from P=1 to P=-1 + if normalise: ro[...,0:3] /= np.linalg.norm(ro[...,0:3],axis=-1) + if np.any(ro[...,3] < 0.0): + raise ValueError('Rodrigues vector rotation angle not positive.') + if not np.all(np.isclose(np.linalg.norm(ro[...,0:3],axis=-1), 1.0)): + raise ValueError('Rodrigues vector rotation axis is not of unit length.') return Rotation(Rotation.ro2qu(ro)) @staticmethod - def fromHomochoric(homochoric, - P = -1): + def from_homochoric(homochoric, + P = -1): + + ho = np.array(homochoric,dtype=float) + if ho.shape[:-2:-1] != (3,): + raise ValueError('Invalid shape.') - ho = homochoric if isinstance(homochoric, np.ndarray) and homochoric.dtype == np.dtype(float) \ - else np.array(homochoric,dtype=float) if P > 0: ho *= -1 # convert from P=1 to P=-1 - if np.linalg.norm(ho) > (3.*np.pi/4.)**(1./3.)+1e-9: - raise ValueError('Coordinate outside of the sphere: {} {} {}.'.format(ho)) + if np.any(np.linalg.norm(ho,axis=-1) > (3.*np.pi/4.)**(1./3.)+1e-9): + raise ValueError('Homochoric coordinate outside of the sphere.') return Rotation(Rotation.ho2qu(ho)) @@ -359,11 +385,12 @@ class Rotation: def fromCubochoric(cubochoric, P = -1): - cu = cubochoric if isinstance(cubochoric, np.ndarray) and cubochoric.dtype == np.dtype(float) \ - else np.array(cubochoric,dtype=float) + cu = np.array(cubochoric,dtype=float) + if cu.shape[:-2:-1] != (3,): + raise ValueError('Invalid shape.') if np.abs(np.max(cu))>np.pi**(2./3.) * 0.5+1e-9: - raise ValueError('Coordinate outside of the cube: {} {} {}.'.format(*cu)) + raise ValueError('Cubochoric coordinate outside of the cube: {} {} {}.'.format(*cu)) ho = Rotation.cu2ho(cu) if P > 0: ho *= -1 # convert from P=1 to P=-1 @@ -403,17 +430,34 @@ class Rotation: return Rotation.fromQuaternion(np.real(vec.T[eig.argmax()]),acceptHomomorph = True) - @staticmethod - def fromRandom(): - r = np.random.random(3) - A = np.sqrt(r[2]) - B = np.sqrt(1.0-r[2]) - return Rotation(np.array([np.cos(2.0*np.pi*r[0])*A, - np.sin(2.0*np.pi*r[1])*B, - np.cos(2.0*np.pi*r[1])*B, - np.sin(2.0*np.pi*r[0])*A])).standardize() + def from_random(shape=None): + if shape is None: + r = np.random.random(3) + elif hasattr(shape, '__iter__'): + r = np.random.random(tuple(shape)+(3,)) + else: + r = np.random.random((shape,3)) + A = np.sqrt(r[...,2]) + B = np.sqrt(1.0-r[...,2]) + q = np.stack([np.cos(2.0*np.pi*r[...,0])*A, + np.sin(2.0*np.pi*r[...,1])*B, + np.cos(2.0*np.pi*r[...,1])*B, + np.sin(2.0*np.pi*r[...,0])*A],axis=-1) + + return Rotation(q.reshape(r.shape[:-1]+(4,)) if shape is not None else q).standardize() + + + # for compatibility (old names do not follow convention) + fromQuaternion = from_quaternion + fromEulers = from_Eulers + fromAxisAngle = from_axis_angle + fromBasis = from_basis + fromMatrix = from_matrix + fromRodrigues = from_Rodrigues + fromHomochoric = from_homochoric + fromRandom = from_random #################################################################################################### # Code below available according to the following conditions on https://github.com/MarDiehl/3Drotations @@ -808,12 +852,11 @@ class Rotation: c = np.cos(ax[3]*0.5) s = np.sin(ax[3]*0.5) qu = np.array([ c, ax[0]*s, ax[1]*s, ax[2]*s ]) - return qu else: c = np.cos(ax[...,3:4]*.5) s = np.sin(ax[...,3:4]*.5) qu = np.where(np.abs(ax[...,3:4])<1.e-6,[1.0, 0.0, 0.0, 0.0],np.block([c, ax[...,:3]*s])) - return qu + return qu @staticmethod def ax2om(ax): @@ -859,7 +902,7 @@ class Rotation: # 180 degree case ro += [np.inf] if np.isclose(ax[3],np.pi,atol=1.0e-15,rtol=0.0) else \ [np.tan(ax[3]*0.5)] - return np.array(ro) + ro = np.array(ro) else: ro = np.block([ax[...,:3], np.where(np.isclose(ax[...,3:4],np.pi,atol=1.e-15,rtol=.0), @@ -867,7 +910,7 @@ class Rotation: np.tan(ax[...,3:4]*0.5)) ]) ro[np.abs(ax[...,3])<1.e-6] = [.0,.0,_P,.0] - return ro + return ro @staticmethod def ax2ho(ax): @@ -875,11 +918,10 @@ class Rotation: if len(ax.shape) == 1: f = (0.75 * ( ax[3] - np.sin(ax[3]) ))**(1.0/3.0) ho = ax[0:3] * f - return ho else: f = (0.75 * ( ax[...,3:4] - np.sin(ax[...,3:4]) ))**(1.0/3.0) ho = ax[...,:3] * f - return ho + return ho @staticmethod def ax2cu(ax): @@ -936,7 +978,6 @@ class Rotation: f = np.where(np.isfinite(ro[...,3:4]),2.0*np.arctan(ro[...,3:4]) -np.sin(2.0*np.arctan(ro[...,3:4])),np.pi) ho = np.where(np.broadcast_to(np.sum(ro[...,0:3]**2.0,axis=-1,keepdims=True) < 1.e-6,ro[...,0:3].shape), np.zeros(3), ro[...,0:3]* (0.75*f)**(1.0/3.0)) - return ho @staticmethod @@ -1010,7 +1051,7 @@ class Rotation: if len(ho.shape) == 1: return ball_to_cube(ho) else: - raise NotImplementedError + raise NotImplementedError('Support for multiple rotations missing') #---------- Cubochoric ---------- @@ -1045,4 +1086,4 @@ class Rotation: if len(cu.shape) == 1: return cube_to_ball(cu) else: - raise NotImplementedError + raise NotImplementedError('Support for multiple rotations missing') diff --git a/python/damask/mechanics.py b/python/damask/mechanics.py index e19f140fb..c81399d94 100644 --- a/python/damask/mechanics.py +++ b/python/damask/mechanics.py @@ -135,16 +135,16 @@ def PK2(P,F): Parameters ---------- - P : numpy.ndarray of shape (:,3,3) or (3,3) + P : numpy.ndarray of shape (...,3,3) or (3,3) First Piola-Kirchhoff stress. - F : numpy.ndarray of shape (:,3,3) or (3,3) + F : numpy.ndarray of shape (...,3,3) or (3,3) Deformation gradient. """ if _np.shape(F) == _np.shape(P) == (3,3): S = _np.dot(_np.linalg.inv(F),P) else: - S = _np.einsum('ijk,ikl->ijl',_np.linalg.inv(F),P) + S = _np.einsum('...jk,...kl->...jl',_np.linalg.inv(F),P) return symmetric(S) @@ -241,7 +241,7 @@ def symmetric(T): Parameters ---------- - T : numpy.ndarray of shape (:,3,3) or (3,3) + T : numpy.ndarray of shape (...,3,3) or (3,3) Tensor of which the symmetrized values are computed. """ @@ -254,12 +254,12 @@ def transpose(T): Parameters ---------- - T : numpy.ndarray of shape (:,3,3) or (3,3) + T : numpy.ndarray of shape (...,3,3) or (3,3) Tensor of which the transpose is computed. """ return T.T if _np.shape(T) == (3,3) else \ - _np.transpose(T,(0,2,1)) + _np.swapaxes(T,axis2=-2,axis1=-1) def _polar_decomposition(T,requested): diff --git a/python/tests/test_Rotation.py b/python/tests/test_Rotation.py index 2ac819f4c..302e895ee 100644 --- a/python/tests/test_Rotation.py +++ b/python/tests/test_Rotation.py @@ -157,6 +157,30 @@ class TestRotation: print(m,o,rot.asQuaternion()) assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9 + @pytest.mark.parametrize('function',[Rotation.from_quaternion, + Rotation.from_Eulers, + Rotation.from_axis_angle, + Rotation.from_matrix, + Rotation.from_Rodrigues, + Rotation.from_homochoric]) + def test_invalid_shape(self,function): + invalid_shape = np.random.random(np.random.randint(8,32,(3))) + with pytest.raises(ValueError): + function(invalid_shape) + + @pytest.mark.parametrize('function,invalid',[(Rotation.from_quaternion, np.array([-1,0,0,0])), + (Rotation.from_quaternion, np.array([1,1,1,0])), + (Rotation.from_Eulers, np.array([1,4,0])), + (Rotation.from_axis_angle, np.array([1,0,0,4])), + (Rotation.from_axis_angle, np.array([1,1,0,1])), + (Rotation.from_matrix, np.random.rand(3,3)), + (Rotation.from_Rodrigues, np.array([1,0,0,-1])), + (Rotation.from_Rodrigues, np.array([1,1,0,1])), + (Rotation.from_homochoric, np.array([2,2,2])) ]) + def test_invalid(self,function,invalid): + with pytest.raises(ValueError): + function(invalid) + @pytest.mark.parametrize('conversion',[Rotation.qu2om, Rotation.qu2eu, Rotation.qu2ax, diff --git a/python/tests/test_mechanics.py b/python/tests/test_mechanics.py index ac2ac419e..f7128401f 100644 --- a/python/tests/test_mechanics.py +++ b/python/tests/test_mechanics.py @@ -12,7 +12,6 @@ class TestMechanics: @pytest.mark.parametrize('function',[mechanics.deviatoric_part, mechanics.eigenvalues, mechanics.eigenvectors, - mechanics.deviatoric_part, mechanics.left_stretch, mechanics.maximum_shear, mechanics.Mises_strain, @@ -42,12 +41,13 @@ class TestMechanics: assert np.allclose(mechanics.strain_tensor(F,t,m)[self.c], mechanics.strain_tensor(F[self.c],t,m)) - - def test_Cauchy(self): - """Ensure Cauchy stress is symmetrized 1. Piola-Kirchhoff stress for no deformation.""" + @pytest.mark.parametrize('function',[mechanics.Cauchy, + mechanics.PK2, + ]) + def test_stress_measures(self,function): + """Ensure that all stress measures are equivalent for no deformation.""" P = np.random.rand(self.n,3,3) - assert np.allclose(mechanics.Cauchy(P,np.broadcast_to(np.eye(3),(self.n,3,3))), - mechanics.symmetric(P)) + assert np.allclose(function(P,np.broadcast_to(np.eye(3),(self.n,3,3))),mechanics.symmetric(P)) def test_deviatoric_part(self): I_n = np.broadcast_to(np.eye(3),(self.n,3,3)) @@ -63,12 +63,6 @@ class TestMechanics: assert np.allclose(np.matmul(R,U), np.matmul(V,R)) - def test_PK2(self): - """Ensure 2. Piola-Kirchhoff stress is symmetrized 1. Piola-Kirchhoff stress for no deformation.""" - P = np.random.rand(self.n,3,3) - assert np.allclose(mechanics.PK2(P,np.broadcast_to(np.eye(3),(self.n,3,3))), - mechanics.symmetric(P)) - def test_strain_tensor_no_rotation(self): """Ensure that left and right stretch give same results for no rotation.""" F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.rand(self.n,3,3) diff --git a/src/CPFEM.f90 b/src/CPFEM.f90 index c590a86b5..5394fc90c 100644 --- a/src/CPFEM.f90 +++ b/src/CPFEM.f90 @@ -73,28 +73,24 @@ subroutine CPFEM_initAll(el,ip) integer(pInt), intent(in) :: el, & !< FE el number ip !< FE integration point number - !$OMP CRITICAL(init) - if (.not. CPFEM_init_done) then - call DAMASK_interface_init - call prec_init - call IO_init - call numerics_init - call debug_init - call config_init - call math_init - call rotations_init - call HDF5_utilities_init - call results_init - call discretization_marc_init(ip, el) - call lattice_init - call material_init - call constitutive_init - call crystallite_init - call homogenization_init - call CPFEM_init - CPFEM_init_done = .true. - endif - !$OMP END CRITICAL(init) + CPFEM_init_done = .true. + call DAMASK_interface_init + call prec_init + call IO_init + call numerics_init + call debug_init + call config_init + call math_init + call rotations_init + call HDF5_utilities_init + call results_init + call discretization_marc_init(ip, el) + call lattice_init + call material_init + call constitutive_init + call crystallite_init + call homogenization_init + call CPFEM_init end subroutine CPFEM_initAll diff --git a/src/DAMASK_marc.f90 b/src/DAMASK_marc.f90 index 9fd41459b..fa0711b02 100644 --- a/src/DAMASK_marc.f90 +++ b/src/DAMASK_marc.f90 @@ -261,11 +261,10 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, & endif !$ defaultNumThreadsInt = omp_get_num_threads() ! remember number of threads set by Marc + !$ call omp_set_num_threads(DAMASK_NumThreadsInt) ! set number of threads for parallel execution set by DAMASK_NUM_THREADS if (.not. CPFEM_init_done) call CPFEM_initAll(m(1),nn) - !$ call omp_set_num_threads(DAMASK_NumThreadsInt) ! set number of threads for parallel execution set by DAMASK_NUM_THREADS - computationMode = 0 ! save initialization value, since it does not result in any calculation if (lovl == 4 ) then ! jacobian requested by marc if (timinc < theDelta .and. theInc == inc .and. lastLovl /= lovl) & ! first after cutback diff --git a/src/crystallite.f90 b/src/crystallite.f90 index d8cd5ab98..929fec862 100644 --- a/src/crystallite.f90 +++ b/src/crystallite.f90 @@ -303,6 +303,7 @@ function crystallite_stress(dummyArgumentToPreventInternalCompilerErrorWithGCC) s logical, dimension(homogenization_maxNgrains,discretization_nIP,discretization_nElem) :: todo !ToDo: need to set some values to false for different Ngrains + todo = .false. #ifdef DEBUG if (iand(debug_level(debug_crystallite),debug_levelSelective) /= 0 & .and. FEsolving_execElem(1) <= debug_e & @@ -1240,10 +1241,10 @@ subroutine integrateStateAdaptiveEuler(todo) c = material_phaseMemberAt(g,i,e) broken = constitutive_collectDotState(crystallite_S(1:3,1:3,g,i,e), & - crystallite_partionedF0, & - crystallite_Fi(1:3,1:3,g,i,e), & - crystallite_partionedFp0, & - crystallite_subdt(g,i,e), g,i,e,p,c) + crystallite_partionedF0, & + crystallite_Fi(1:3,1:3,g,i,e), & + crystallite_partionedFp0, & + crystallite_subdt(g,i,e), g,i,e,p,c) if(broken) cycle sizeDotState = plasticState(p)%sizeDotState @@ -1269,10 +1270,10 @@ subroutine integrateStateAdaptiveEuler(todo) if(broken) cycle broken = constitutive_collectDotState(crystallite_S(1:3,1:3,g,i,e), & - crystallite_partionedF0, & - crystallite_Fi(1:3,1:3,g,i,e), & - crystallite_partionedFp0, & - crystallite_subdt(g,i,e), g,i,e,p,c) + crystallite_partionedF0, & + crystallite_Fi(1:3,1:3,g,i,e), & + crystallite_partionedFp0, & + crystallite_subdt(g,i,e), g,i,e,p,c) if(broken) cycle @@ -1301,9 +1302,9 @@ subroutine integrateStateAdaptiveEuler(todo) end subroutine integrateStateAdaptiveEuler -!-------------------------------------------------------------------------------------------------- -!> @brief integrate stress, state with 4th order explicit Runge Kutta method -!-------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- +!> @brief Integrate state (including stress integration) with the classic Runge Kutta method +!--------------------------------------------------------------------------------------------------- subroutine integrateStateRK4(todo) logical, dimension(:,:,:), intent(in) :: todo @@ -1312,157 +1313,59 @@ subroutine integrateStateRK4(todo) A = reshape([& 0.5_pReal, 0.0_pReal, 0.0_pReal, & 0.0_pReal, 0.5_pReal, 0.0_pReal, & - 0.0_pReal, 0.0_pReal, 1.0_pReal], & - [3,3]) + 0.0_pReal, 0.0_pReal, 1.0_pReal],& + shape(A)) real(pReal), dimension(3), parameter :: & - CC = [0.5_pReal, 0.5_pReal, 1.0_pReal] ! factor giving the fraction of the original timestep used for Runge Kutta Integration + C = [0.5_pReal, 0.5_pReal, 1.0_pReal] real(pReal), dimension(4), parameter :: & - B = [1.0_pReal/6.0_pReal, 1.0_pReal/3.0_pReal, 1.0_pReal/3.0_pReal, 1.0_pReal/6.0_pReal] ! weight of slope used for Runge Kutta integration (final weight divided by 6) + B = [1.0_pReal/6.0_pReal, 1.0_pReal/3.0_pReal, 1.0_pReal/3.0_pReal, 1.0_pReal/6.0_pReal] - integer :: & - e, & ! element index in element loop - i, & ! integration point index in ip loop - g, & ! grain index in grain loop - stage, & ! stage index in integration stage loop - n, & - p, & - c, & - s, & - sizeDotState - logical :: & - nonlocalBroken, broken - - real(pReal), dimension(constitutive_source_maxSizeDotState,4,maxval(phase_Nsources)) :: source_RK4dotState - real(pReal), dimension(constitutive_plasticity_maxSizeDotState,4) :: plastic_RK4dotState - nonlocalBroken = .false. - !$OMP PARALLEL DO PRIVATE(sizeDotState,p,c,source_RK4dotState,plastic_RK4dotState,broken) - do e = FEsolving_execElem(1),FEsolving_execElem(2) - do i = FEsolving_execIP(1),FEsolving_execIP(2) - do g = 1,homogenization_Ngrains(material_homogenizationAt(e)) - broken = .false. - p = material_phaseAt(g,e) - if(todo(g,i,e) .and. .not. (nonlocalBroken .and. plasticState(p)%nonlocal)) then - - c = material_phaseMemberAt(g,i,e) - - - broken = constitutive_collectDotState(crystallite_S(1:3,1:3,g,i,e), & - crystallite_partionedF0, & - crystallite_Fi(1:3,1:3,g,i,e), & - crystallite_partionedFp0, & - crystallite_subdt(g,i,e), g,i,e,p,c) - if(broken) cycle - - do stage = 1,3 - sizeDotState = plasticState(p)%sizeDotState - plastic_RK4dotState(1:sizeDotState,stage) = plasticState(p)%dotState(:,c) - plasticState(p)%dotState(:,c) = A(1,stage) * plastic_RK4dotState(1:sizeDotState,1) - do s = 1, phase_Nsources(p) - sizeDotState = sourceState(p)%p(s)%sizeDotState - source_RK4dotState(1:sizeDotState,stage,s) = sourceState(p)%p(s)%dotState(:,c) - sourceState(p)%p(s)%dotState(:,c) = A(1,stage) * source_RK4dotState(1:sizeDotState,1,s) - enddo - - do n = 2, stage - sizeDotState = plasticState(p)%sizeDotState - plasticState(p)%dotState(:,c) = plasticState(p)%dotState(:,c) & - + A(n,stage) * plastic_RK4dotState(1:sizeDotState,n) - do s = 1, phase_Nsources(p) - sizeDotState = sourceState(p)%p(s)%sizeDotState - sourceState(p)%p(s)%dotState(:,c) = sourceState(p)%p(s)%dotState(:,c) & - + A(n,stage) * source_RK4dotState(1:sizeDotState,n,s) - enddo - enddo - - sizeDotState = plasticState(p)%sizeDotState - plasticState(p)%state(1:sizeDotState,c) = plasticState(p)%subState0(1:sizeDotState,c) & - + plasticState(p)%dotState (1:sizeDotState,c) & - * crystallite_subdt(g,i,e) - do s = 1, phase_Nsources(p) - sizeDotState = sourceState(p)%p(s)%sizeDotState - sourceState(p)%p(s)%state(1:sizeDotState,c) = sourceState(p)%p(s)%subState0(1:sizeDotState,c) & - + sourceState(p)%p(s)%dotState (1:sizeDotState,c) & - * crystallite_subdt(g,i,e) - enddo - - broken = integrateStress(g,i,e,CC(stage)) - if(broken) exit - - broken = constitutive_collectDotState(crystallite_S(1:3,1:3,g,i,e), & - crystallite_partionedF0, & - crystallite_Fi(1:3,1:3,g,i,e), & - crystallite_partionedFp0, & - crystallite_subdt(g,i,e)*CC(stage), g,i,e,p,c) - if(broken) exit - - enddo - if(broken) cycle - - sizeDotState = plasticState(p)%sizeDotState - - plastic_RK4dotState(1:sizeDotState,4) = plasticState (p)%dotState(:,c) - - plasticState(p)%dotState(:,c) = matmul(plastic_RK4dotState(1:sizeDotState,1:4),B) - plasticState(p)%state(1:sizeDotState,c) = plasticState(p)%subState0(1:sizeDotState,c) & - + plasticState(p)%dotState (1:sizeDotState,c) & - * crystallite_subdt(g,i,e) - - do s = 1, phase_Nsources(p) - sizeDotState = sourceState(p)%p(s)%sizeDotState - - source_RK4dotState(1:sizeDotState,4,s) = sourceState(p)%p(s)%dotState(:,c) - - sourceState(p)%p(s)%dotState(:,c) = matmul(source_RK4dotState(1:sizeDotState,1:4,s),B) - sourceState(p)%p(s)%state(1:sizeDotState,c) = sourceState(p)%p(s)%subState0(1:sizeDotState,c) & - + sourceState(p)%p(s)%dotState (1:sizeDotState,c) & - * crystallite_subdt(g,i,e) - enddo - - broken = constitutive_deltaState(crystallite_S(1:3,1:3,g,i,e), & - crystallite_Fe(1:3,1:3,g,i,e), & - crystallite_Fi(1:3,1:3,g,i,e),g,i,e,p,c) - if(broken) cycle - - broken = integrateStress(g,i,e) - crystallite_converged(g,i,e) = .not. broken - - endif - if(broken .and. plasticState(p)%nonlocal) nonlocalBroken = .true. - enddo; enddo; enddo - !$OMP END PARALLEL DO - - if (nonlocalBroken) call nonlocalConvergenceCheck + call integrateStateRK(todo,A,B,C) end subroutine integrateStateRK4 -!-------------------------------------------------------------------------------------------------- -!> @brief integrate stress, state with 5th order Runge-Kutta Cash-Karp method with -!> adaptive step size (use 5th order solution to advance = "local extrapolation") -!-------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- +!> @brief Integrate state (including stress integration) with the Cash-Carp method +!--------------------------------------------------------------------------------------------------- subroutine integrateStateRKCK45(todo) logical, dimension(:,:,:), intent(in) :: todo real(pReal), dimension(5,5), parameter :: & A = reshape([& - .2_pReal, .075_pReal, .3_pReal, -11.0_pReal/54.0_pReal, 1631.0_pReal/55296.0_pReal, & - .0_pReal, .225_pReal, -.9_pReal, 2.5_pReal, 175.0_pReal/512.0_pReal, & - .0_pReal, .0_pReal, 1.2_pReal, -70.0_pReal/27.0_pReal, 575.0_pReal/13824.0_pReal, & - .0_pReal, .0_pReal, .0_pReal, 35.0_pReal/27.0_pReal, 44275.0_pReal/110592.0_pReal, & - .0_pReal, .0_pReal, .0_pReal, .0_pReal, 253.0_pReal/4096.0_pReal], & - [5,5], order=[2,1]) !< coefficients in Butcher tableau (used for preliminary integration in stages 2 to 6) - + 1._pReal/5._pReal, .0_pReal, .0_pReal, .0_pReal, .0_pReal, & + 3._pReal/40._pReal, 9._pReal/40._pReal, .0_pReal, .0_pReal, .0_pReal, & + 3_pReal/10._pReal, -9._pReal/10._pReal, 6._pReal/5._pReal, .0_pReal, .0_pReal, & + -11._pReal/54._pReal, 5._pReal/2._pReal, -70.0_pReal/27.0_pReal, 35.0_pReal/27.0_pReal, .0_pReal, & + 1631._pReal/55296._pReal,175._pReal/512._pReal,575._pReal/13824._pReal,44275._pReal/110592._pReal,253._pReal/4096._pReal],& + shape(A)) + real(pReal), dimension(5), parameter :: & + C = [0.2_pReal, 0.3_pReal, 0.6_pReal, 1.0_pReal, 0.875_pReal] real(pReal), dimension(6), parameter :: & B = & [37.0_pReal/378.0_pReal, .0_pReal, 250.0_pReal/621.0_pReal, & - 125.0_pReal/594.0_pReal, .0_pReal, 512.0_pReal/1771.0_pReal], & !< coefficients in Butcher tableau (used for final integration and error estimate) + 125.0_pReal/594.0_pReal, .0_pReal, 512.0_pReal/1771.0_pReal], & DB = B - & [2825.0_pReal/27648.0_pReal, .0_pReal, 18575.0_pReal/48384.0_pReal,& - 13525.0_pReal/55296.0_pReal, 277.0_pReal/14336.0_pReal, 0.25_pReal] !< coefficients in Butcher tableau (used for final integration and error estimate) + 13525.0_pReal/55296.0_pReal, 277.0_pReal/14336.0_pReal, 1._pReal/4._pReal] - real(pReal), dimension(5), parameter :: & - CC = [0.2_pReal, 0.3_pReal, 0.6_pReal, 1.0_pReal, 0.875_pReal] !< coefficients in Butcher tableau (fractions of original time step in stages 2 to 6) + call integrateStateRK(todo,A,B,C,DB) + +end subroutine integrateStateRKCK45 + + +!-------------------------------------------------------------------------------------------------- +!> @brief Integrate state (including stress integration) with an explicit Runge-Kutta method or an +!! embedded explicit Runge-Kutta method +!-------------------------------------------------------------------------------------------------- +subroutine integrateStateRK(todo,A,B,CC,DB) + + logical, dimension(:,:,:), intent(in) :: todo + + real(pReal), dimension(:,:), intent(in) :: A + real(pReal), dimension(:), intent(in) :: B, CC + real(pReal), dimension(:), intent(in), optional :: DB integer :: & e, & ! element index in element loop @@ -1476,8 +1379,8 @@ subroutine integrateStateRKCK45(todo) sizeDotState logical :: & nonlocalBroken, broken - real(pReal), dimension(constitutive_source_maxSizeDotState,6,maxval(phase_Nsources)) :: source_RKdotState - real(pReal), dimension(constitutive_plasticity_maxSizeDotState,6) :: plastic_RKdotState + real(pReal), dimension(constitutive_source_maxSizeDotState,size(B),maxval(phase_Nsources)) :: source_RKdotState + real(pReal), dimension(constitutive_plasticity_maxSizeDotState,size(B)) :: plastic_RKdotState nonlocalBroken = .false. !$OMP PARALLEL DO PRIVATE(sizeDotState,p,c,plastic_RKdotState,source_RKdotState,broken) @@ -1491,13 +1394,13 @@ subroutine integrateStateRKCK45(todo) c = material_phaseMemberAt(g,i,e) broken = constitutive_collectDotState(crystallite_S(1:3,1:3,g,i,e), & - crystallite_partionedF0, & - crystallite_Fi(1:3,1:3,g,i,e), & - crystallite_partionedFp0, & - crystallite_subdt(g,i,e), g,i,e,p,c) + crystallite_partionedF0, & + crystallite_Fi(1:3,1:3,g,i,e), & + crystallite_partionedFp0, & + crystallite_subdt(g,i,e), g,i,e,p,c) if(broken) cycle - do stage = 1,5 + do stage = 1,size(A,1) sizeDotState = plasticState(p)%sizeDotState plastic_RKdotState(1:sizeDotState,stage) = plasticState(p)%dotState(:,c) plasticState(p)%dotState(:,c) = A(1,stage) * plastic_RKdotState(1:sizeDotState,1) @@ -1533,10 +1436,10 @@ subroutine integrateStateRKCK45(todo) if(broken) exit broken = constitutive_collectDotState(crystallite_S(1:3,1:3,g,i,e), & - crystallite_partionedF0, & - crystallite_Fi(1:3,1:3,g,i,e), & - crystallite_partionedFp0, & - crystallite_subdt(g,i,e)*CC(stage), g,i,e,p,c) + crystallite_partionedF0, & + crystallite_Fi(1:3,1:3,g,i,e), & + crystallite_partionedFp0, & + crystallite_subdt(g,i,e)*CC(stage), g,i,e,p,c) if(broken) exit enddo @@ -1544,29 +1447,30 @@ subroutine integrateStateRKCK45(todo) sizeDotState = plasticState(p)%sizeDotState - plastic_RKdotState(1:sizeDotState,6) = plasticState (p)%dotState(:,c) - plasticState(p)%dotState(:,c) = matmul(plastic_RKdotState(1:sizeDotState,1:6),B) + plastic_RKdotState(1:sizeDotState,size(B)) = plasticState (p)%dotState(:,c) + plasticState(p)%dotState(:,c) = matmul(plastic_RKdotState(1:sizeDotState,1:size(B)),B) plasticState(p)%state(1:sizeDotState,c) = plasticState(p)%subState0(1:sizeDotState,c) & + plasticState(p)%dotState (1:sizeDotState,c) & * crystallite_subdt(g,i,e) - broken = .not. converged( matmul(plastic_RKdotState(1:sizeDotState,1:6),DB) & - * crystallite_subdt(g,i,e), & - plasticState(p)%state(1:sizeDotState,c), & - plasticState(p)%atol(1:sizeDotState)) + if(present(DB)) & + broken = .not. converged( matmul(plastic_RKdotState(1:sizeDotState,1:size(DB)),DB) & + * crystallite_subdt(g,i,e), & + plasticState(p)%state(1:sizeDotState,c), & + plasticState(p)%atol(1:sizeDotState)) do s = 1, phase_Nsources(p) sizeDotState = sourceState(p)%p(s)%sizeDotState - source_RKdotState(1:sizeDotState,6,s) = sourceState(p)%p(s)%dotState(:,c) - sourceState(p)%p(s)%dotState(:,c) = matmul(source_RKdotState(1:sizeDotState,1:6,s),B) + source_RKdotState(1:sizeDotState,size(B),s) = sourceState(p)%p(s)%dotState(:,c) + sourceState(p)%p(s)%dotState(:,c) = matmul(source_RKdotState(1:sizeDotState,1:size(B),s),B) sourceState(p)%p(s)%state(1:sizeDotState,c) = sourceState(p)%p(s)%subState0(1:sizeDotState,c) & + sourceState(p)%p(s)%dotState (1:sizeDotState,c) & * crystallite_subdt(g,i,e) - broken = broken .and. .not. & - converged(matmul(source_RKdotState(1:sizeDotState,1:6,s),DB) & - * crystallite_subdt(g,i,e), & - sourceState(p)%p(s)%state(1:sizeDotState,c), & - sourceState(p)%p(s)%atol(1:sizeDotState)) + if(present(DB)) & + broken = broken .or. .not. converged(matmul(source_RKdotState(1:sizeDotState,1:size(DB),s),DB) & + * crystallite_subdt(g,i,e), & + sourceState(p)%p(s)%state(1:sizeDotState,c), & + sourceState(p)%p(s)%atol(1:sizeDotState)) enddo if(broken) cycle @@ -1585,7 +1489,7 @@ subroutine integrateStateRKCK45(todo) if(nonlocalBroken) call nonlocalConvergenceCheck -end subroutine integrateStateRKCK45 +end subroutine integrateStateRK !--------------------------------------------------------------------------------------------------