Merge branch 'development' into less-shell-scripts

This commit is contained in:
Martin Diehl 2020-06-26 21:59:50 +02:00
commit 642931c203
39 changed files with 900 additions and 929 deletions

View File

@ -1 +1 @@
v2.0.3-2654-g5c544a6e v2.0.3-2706-g8ce5da55

View File

@ -27,7 +27,7 @@ SolidSolutionStrength 0.0 # Strength due to elements in solid solution
### Dislocation glide parameters ### ### Dislocation glide parameters ###
#per family #per family
Nslip 12 0 Nslip 12
slipburgers 2.72e-10 # Burgers vector of slip system [m] slipburgers 2.72e-10 # Burgers vector of slip system [m]
rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3] rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3]
rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3] rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3]

View File

@ -1,56 +1,30 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# Makes postprocessing routines accessible from everywhere. # Makes postprocessing routines accessible from everywhere.
import os
import sys import sys
from pathlib import Path
import damask import damask
damaskEnv = damask.Environment() env = damask.Environment()
baseDir = damaskEnv.relPath('processing/') bin_dir = env.root_dir/Path('bin')
binDir = damaskEnv.relPath('bin/')
if not os.path.isdir(binDir): if not bin_dir.exists():
os.mkdir(binDir) bin_dir.mkdir()
#define ToDo list
processing_subDirs = ['pre',
'post',
]
sys.stdout.write('\nsymbolic linking...\n') sys.stdout.write('\nsymbolic linking...\n')
for sub_dir in ['pre','post']:
the_dir = env.root_dir/Path('processing')/Path(sub_dir)
for subDir in processing_subDirs: for the_file in the_dir.glob('*.py'):
theDir = os.path.abspath(os.path.join(baseDir,subDir)) src = the_dir/the_file
dst = bin_dir/Path(the_file.with_suffix('').name)
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n') if dst.is_file(): dst.unlink() # dst.unlink(True) for Python >3.8
dst.symlink_to(src)
for theFile in os.listdir(theDir):
theName,theExt = os.path.splitext(theFile)
if theExt in ['.py']:
src = os.path.abspath(os.path.join(theDir,theFile))
sym_link = os.path.abspath(os.path.join(binDir,theName))
if os.path.lexists(sym_link):
os.remove(sym_link)
output = theName+damask.util.deemph(theExt)
else:
output = damask.util.emph(theName)+damask.util.deemph(theExt)
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
os.symlink(src,sym_link)
sys.stdout.write('\npruning broken links...\n') sys.stdout.write('\npruning broken links...\n')
for filename in bin_dir.glob('*'):
brokenLinks = 0 if not filename.is_file():
filename.unlink
for filename in os.listdir(binDir):
path = os.path.join(binDir,filename)
if os.path.islink(path) and not os.path.exists(path):
sys.stdout.write(' '+damask.util.delete(path)+'\n')
os.remove(path)
brokenLinks += 1
sys.stdout.write(('none.' if brokenLinks == 0 else '')+'\n')

View File

@ -6,7 +6,7 @@ import numpy as np
from optparse import OptionParser from optparse import OptionParser
import damask import damask
sys.path.append(damask.solver.Marc().libraryPath()) sys.path.append(str(damask.solver.Marc().library_path))
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
@ -198,13 +198,14 @@ def add_servoLinks(mfd_data,active=[True,True,True]): # directions on which to
if mfd_data[i]['uid'] == 1705: del mfd_data[i] if mfd_data[i]['uid'] == 1705: del mfd_data[i]
mfd_data.insert(i, links) mfd_data.insert(i, links)
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Set up servo linking to achieve periodic boundary conditions for a regular hexahedral mesh. Set up servo linking to achieve periodic boundary conditions for a regular hexahedral mesh.
Use *py_connection to operate on model presently opened in MSC.Mentat. Use *py_connection to operate on model presently opened in MSC.Mentat.
""", version = scriptID) """, version = scriptID)
parser.add_option('-p', '--port', parser.add_option('-p', '--port',
@ -229,10 +230,7 @@ if remote and filenames != []:
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
if remote: if remote:
try: import py_mentat import py_mentat
except:
damask.util.croak('no valid Mentat release found.')
sys.exit(-1)
damask.util.report(scriptName, 'waiting to connect...') damask.util.report(scriptName, 'waiting to connect...')
filenames = [os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names()) + '.mfd')] filenames = [os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names()) + '.mfd')]
@ -240,14 +238,14 @@ if remote:
py_mentat.py_connect('',options.port) py_mentat.py_connect('',options.port)
py_mentat.py_send('*set_save_formatted on') py_mentat.py_send('*set_save_formatted on')
py_mentat.py_send('*save_as_model "{}" yes'.format(filenames[0])) py_mentat.py_send('*save_as_model "{}" yes'.format(filenames[0]))
py_mentat.py_get_int("nnodes()") # hopefully blocks until file is written py_mentat.py_get_int("nnodes()")
except: except py_mentat.InputError as err:
damask.util.croak('failed. try setting Tools/Python/"Run as Separate Process" & "Initiate".') damask.util.croak('{}. Try Tools/Python/"Run as Separate Process" & "Initiate".'.format(err))
sys.exit() sys.exit(-1)
damask.util.croak( 'connected...') damask.util.croak( 'connected...')
for name in filenames: for name in filenames:
while remote and not os.path.exists(name): time.sleep(0.5) # wait for Mentat to write MFD file while remote and not os.path.exists(name): time.sleep(0.5)
with open( name,'r') if name is not None else sys.stdin as fileIn: with open( name,'r') if name is not None else sys.stdin as fileIn:
damask.util.report(scriptName, name) damask.util.report(scriptName, name)
mfd = parseMFD(fileIn) mfd = parseMFD(fileIn)
@ -257,5 +255,4 @@ for name in filenames:
fileOut.write(asMFD(mfd)) fileOut.write(asMFD(mfd))
if remote: if remote:
try: py_mentat.py_send('*open_model "{}"'.format(filenames[0])) py_mentat.py_send('*open_model "{}"'.format(filenames[0]))
except: damask.util.croak('lost connection on sending open command for "{}".'.format(filenames[0]))

View File

@ -9,7 +9,7 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
sys.path.append(damask.solver.Marc().libraryPath()) sys.path.append(str(damask.solver.Marc().library_path))
#------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------
def outMentat(cmd,locals): def outMentat(cmd,locals):

View File

@ -1,9 +1,9 @@
"""Tools for pre and post processing of DAMASK simulations.""" """Tools for pre and post processing of DAMASK simulations."""
import os as _os from pathlib import Path as _Path
import re as _re import re as _re
name = 'damask' name = 'damask'
with open(_os.path.join(_os.path.dirname(__file__),'VERSION')) as _f: with open(_Path(__file__).parent/_Path('VERSION')) as _f:
version = _re.sub(r'^v','',_f.readline().strip()) version = _re.sub(r'^v','',_f.readline().strip())
# make classes directly accessible as damask.Class # make classes directly accessible as damask.Class

View File

@ -157,7 +157,7 @@ class ASCIItable():
def head_write(self, def head_write(self,
header = True): header = True):
"""Write current header information (info + labels).""" """Write current header information (info + labels)."""
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else [] head = [f"{len(self.info)+self.__IO__['labeled']}\theader"] if header else []
head.append(self.info) head.append(self.info)
if self.__IO__['labeled']: if self.__IO__['labeled']:
head.append('\t'.join(map(self._quote,self.tags))) head.append('\t'.join(map(self._quote,self.tags)))
@ -209,7 +209,7 @@ class ASCIItable():
labelList.append(tags[id]) labelList.append(tags[id])
else: else:
label = tags[id][2:] # get label label = tags[id][2:] # get label
while id < len(tags) and tags[id] == '{}_{}'.format(dim,label): # check successors while id < len(tags) and tags[id] == f'{dim}_{label}': # check successors
id += 1 # next label... id += 1 # next label...
dim += 1 # ...should be one higher dimension dim += 1 # ...should be one higher dimension
labelList.append(label) # reached end --> store labelList.append(label) # reached end --> store

View File

@ -1,4 +1,5 @@
import numpy as np import numpy as np
from . import util
class Color: class Color:
"""Color representation in and conversion between different color-spaces.""" """Color representation in and conversion between different color-spaces."""
@ -502,7 +503,7 @@ class Colormap:
[RGB] colormap for use in paraview or gmsh, or as raw string, or array. [RGB] colormap for use in paraview or gmsh, or as raw string, or array.
Arguments: name, format, steps, crop. Arguments: name, format, steps, crop.
Format is one of (paraview, gmsh, raw, list). Format is one of (paraview, gmsh, gom, raw, list).
Crop selects a (sub)range in [-1.0,1.0]. Crop selects a (sub)range in [-1.0,1.0].
Generates sequential map if one limiting color is either white or black, Generates sequential map if one limiting color is either white or black,
diverging map otherwise. diverging map otherwise.
@ -511,23 +512,22 @@ class Colormap:
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).express_as(model).color for i in range(steps)] colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).express_as(model).color for i in range(steps)]
if format == 'paraview': if format == 'paraview':
colormap = ['[\n {{\n "ColorSpace": "RGB", "Name": "{}", "DefaultMap": true,\n "RGBPoints" : ['.format(name)] \ colormap = [f'[\n {{\n "ColorSpace": "RGB", "Name": "{name}", "DefaultMap": true,\n "RGBPoints" : ['] \
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],) \ + [f' {i:4d},{color[0]:8.6f},{color[1]:8.6f},{color[2]:8.6f}{"," if i+1<len(colors) else ""}' \
for i,color in enumerate(colors[:-1])] \ for i,color in enumerate(colors)] \
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(len(colors),colors[-1][0],colors[-1][1],colors[-1][2],)] \
+ [' ]\n }\n]'] + [' ]\n }\n]']
elif format == 'gmsh': elif format == 'gmsh':
colormap = ['View.ColorTable = {'] \ colormap = ['View.ColorTable = {'] \
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \ + [',\n'.join([','.join([str(x*255.0) for x in color]) for color in colors])] \
+ ['}'] + ['}']
elif format == 'gom': elif format == 'gom':
colormap = ['1 1 ' + str(name) colormap = [ f'1 1 {name}'
+ ' 9 ' + str(name) + f' 9 {name}'
+ ' 0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' + ' 0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 '
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) + f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colors)}'
+ ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])] + ' '.join([f' 0 {util.srepr((255*np.array(c)).astype(int)," ")} 255 1' for c in reversed(colors)])]
elif format == 'raw': elif format == 'raw':
colormap = ['\t'.join(map(str,color)) for color in colors] colormap = ['\t'.join(map(str,color)) for color in colors]

View File

@ -1,11 +1,11 @@
import os import os
from pathlib import Path
import tkinter import tkinter
class Environment: class Environment:
def __init__(self): def __init__(self):
"""Read and provide values of DAMASK configuration.""" """Read and provide values of DAMASK configuration."""
self.options = self._get_options()
try: try:
tk = tkinter.Tk() tk = tkinter.Tk()
self.screen_width = tk.winfo_screenwidth() self.screen_width = tk.winfo_screenwidth()
@ -15,17 +15,8 @@ class Environment:
self.screen_width = 1024 self.screen_width = 1024
self.screen_height = 768 self.screen_height = 768
def relPath(self,relative = '.'): @property
"""Return absolute path from path relative to DAMASK root.""" def options(self):
return os.path.join(self.rootDir(),relative)
def rootDir(self):
"""Return DAMASK root path."""
return os.path.normpath(os.path.join(os.path.realpath(__file__),'../../../'))
def _get_options(self):
options = {} options = {}
for item in ['DAMASK_NUM_THREADS', for item in ['DAMASK_NUM_THREADS',
'MSC_ROOT', 'MSC_ROOT',
@ -34,3 +25,13 @@ class Environment:
options[item] = os.environ[item] if item in os.environ else None options[item] = os.environ[item] if item in os.environ else None
return options return options
@property
def root_dir(self):
"""Return DAMASK root path."""
return Path(__file__).parents[2]
# for compatibility
def rootDir(self):
return str(self.root_dir)

View File

@ -43,12 +43,12 @@ class Geom:
def __repr__(self): def __repr__(self):
"""Basic information on geometry definition.""" """Basic information on geometry definition."""
return util.srepr([ return util.srepr([
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))), f'grid a b c: {util.srepr(self.get_grid ()," x ")}',
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))), f'size x y z: {util.srepr(self.get_size ()," x ")}',
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))), f'origin x y z: {util.srepr(self.get_origin()," ")}',
'homogenization: {}'.format(self.get_homogenization()), f'homogenization: {self.get_homogenization()}',
'# microstructures: {}'.format(len(np.unique(self.microstructure))), f'# microstructures: {self.N_microstructure}',
'max microstructure: {}'.format(np.nanmax(self.microstructure)), f'max microstructure: {np.nanmax(self.microstructure)}',
]) ])
@ -71,7 +71,7 @@ class Geom:
grid_old = self.get_grid() grid_old = self.get_grid()
size_old = self.get_size() size_old = self.get_size()
origin_old = self.get_origin() origin_old = self.get_origin()
unique_old = len(np.unique(self.microstructure)) unique_old = self.N_microstructure
max_old = np.nanmax(self.microstructure) max_old = np.nanmax(self.microstructure)
if size is not None and rescale: if size is not None and rescale:
@ -85,32 +85,32 @@ class Geom:
elif rescale: elif rescale:
self.set_size(self.get_grid()/grid_old*self.size) self.set_size(self.get_grid()/grid_old*self.size)
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))] message = [f'grid a b c: {util.srepr(grid_old," x ")}']
if np.any(grid_old != self.get_grid()): if np.any(grid_old != self.get_grid()):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('grid a b c: {}'.format(' x '.join(map(str,self.get_grid()))))) message.append(util.emph(f'grid a b c: {util.srepr(self.get_grid()," x ")}'))
message.append('size x y z: {}'.format(' x '.join(map(str,size_old)))) message.append(f'size x y z: {util.srepr(size_old," x ")}')
if np.any(size_old != self.get_size()): if np.any(size_old != self.get_size()):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('size x y z: {}'.format(' x '.join(map(str,self.get_size()))))) message.append(util.emph(f'size x y z: {util.srepr(self.get_size()," x ")}'))
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old)))) message.append(f'origin x y z: {util.srepr(origin_old," ")}')
if np.any(origin_old != self.get_origin()): if np.any(origin_old != self.get_origin()):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('origin x y z: {}'.format(' '.join(map(str,self.get_origin()))))) message.append(util.emph(f'origin x y z: {util.srepr(self.get_origin()," ")}'))
message.append('homogenization: {}'.format(self.get_homogenization())) message.append(f'homogenization: {self.get_homogenization()}')
message.append('# microstructures: {}'.format(unique_old)) message.append(f'# microstructures: {unique_old}')
if unique_old != len(np.unique(self.microstructure)): if unique_old != self.N_microstructure:
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('# microstructures: {}'.format(len(np.unique(self.microstructure))))) message.append(util.emph(f'# microstructures: {self.N_microstructure}'))
message.append('max microstructure: {}'.format(max_old)) message.append(f'max microstructure: {max_old}')
if max_old != np.nanmax(self.microstructure): if max_old != np.nanmax(self.microstructure):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure)))) message.append(util.emph(f'max microstructure: {np.nanmax(self.microstructure)}'))
return util.return_message(message) return util.return_message(message)
@ -154,9 +154,9 @@ class Geom:
""" """
if microstructure is not None: if microstructure is not None:
if len(microstructure.shape) != 3: if len(microstructure.shape) != 3:
raise ValueError('Invalid microstructure shape {}'.format(microstructure.shape)) raise ValueError(f'Invalid microstructure shape {microstructure.shape}')
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']: elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype)) raise TypeError(f'Invalid microstructue data type {microstructure.dtype}')
else: else:
self.microstructure = np.copy(microstructure) self.microstructure = np.copy(microstructure)
@ -176,7 +176,7 @@ class Geom:
self.size = grid/np.max(grid) self.size = grid/np.max(grid)
else: else:
if len(size) != 3 or any(np.array(size) <= 0): if len(size) != 3 or any(np.array(size) <= 0):
raise ValueError('Invalid size {}'.format(size)) raise ValueError(f'Invalid size {size}')
else: else:
self.size = np.array(size) self.size = np.array(size)
@ -193,7 +193,7 @@ class Geom:
""" """
if origin is not None: if origin is not None:
if len(origin) != 3: if len(origin) != 3:
raise ValueError('Invalid origin {}'.format(origin)) raise ValueError(f'Invalid origin {origin}')
else: else:
self.origin = np.array(origin) self.origin = np.array(origin)
@ -210,7 +210,7 @@ class Geom:
""" """
if homogenization is not None: if homogenization is not None:
if not isinstance(homogenization,int) or homogenization < 1: if not isinstance(homogenization,int) or homogenization < 1:
raise TypeError('Invalid homogenization {}'.format(homogenization)) raise TypeError(f'Invalid homogenization {homogenization}')
else: else:
self.homogenization = homogenization self.homogenization = homogenization
@ -222,7 +222,7 @@ class Geom:
@property @property
def N_microstructure(self): def N_microstructure(self):
return len(np.unique(self.microstructure)) return np.unique(self.microstructure).size
def get_microstructure(self): def get_microstructure(self):
@ -257,11 +257,11 @@ class Geom:
def get_header(self): def get_header(self):
"""Return the full header (grid, size, origin, homogenization, comments).""" """Return the full header (grid, size, origin, homogenization, comments)."""
header = ['{} header'.format(len(self.comments)+4)] + self.comments header = [f'{len(self.comments)+4} header'] + self.comments
header.append('grid a {} b {} c {}'.format(*self.get_grid())) header.append('grid a {} b {} c {}'.format(*self.get_grid()))
header.append('size x {} y {} z {}'.format(*self.get_size())) header.append('size x {} y {} z {}'.format(*self.get_size()))
header.append('origin x {} y {} z {}'.format(*self.get_origin())) header.append('origin x {} y {} z {}'.format(*self.get_origin()))
header.append('homogenization {}'.format(self.get_homogenization())) header.append(f'homogenization {self.get_homogenization()}')
return header return header
@ -320,7 +320,7 @@ class Geom:
i += len(items) i += len(items)
if i != grid.prod(): if i != grid.prod():
raise TypeError('Invalid file: expected {} entries, found {}'.format(grid.prod(),i)) raise TypeError(f'Invalid file: expected {grid.prod()} entries, found {i}')
if not np.any(np.mod(microstructure,1) != 0.0): # no float present if not np.any(np.mod(microstructure,1) != 0.0): # no float present
microstructure = microstructure.astype('int') microstructure = microstructure.astype('int')
@ -331,6 +331,7 @@ class Geom:
@staticmethod @staticmethod
def _find_closest_seed(seeds, weights, point): def _find_closest_seed(seeds, weights, point):
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights) return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
@staticmethod @staticmethod
def from_Laguerre_tessellation(grid,size,seeds,weights,periodic=True): def from_Laguerre_tessellation(grid,size,seeds,weights,periodic=True):
""" """
@ -373,7 +374,7 @@ class Geom:
else: else:
microstructure = microstructure.reshape(grid) microstructure = microstructure.reshape(grid)
#comments = 'geom.py:from_Laguerre_tessellation v{}'.format(version) #ToDo: comments = 'geom.py:from_Laguerre_tessellation v{}'.format(version)
return Geom(microstructure+1,size,homogenization=1) return Geom(microstructure+1,size,homogenization=1)
@ -398,7 +399,7 @@ class Geom:
KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds) KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds)
devNull,microstructure = KDTree.query(coords) devNull,microstructure = KDTree.query(coords)
#comments = 'geom.py:from_Voronoi_tessellation v{}'.format(version) #ToDo: comments = 'geom.py:from_Voronoi_tessellation v{}'.format(version)
return Geom(microstructure.reshape(grid)+1,size,homogenization=1) return Geom(microstructure.reshape(grid)+1,size,homogenization=1)
@ -418,7 +419,7 @@ class Geom:
grid = self.get_grid() grid = self.get_grid()
if pack is None: if pack is None:
plain = grid.prod()/np.unique(self.microstructure).size < 250 plain = grid.prod()/self.N_microstructure < 250
else: else:
plain = not pack plain = not pack
@ -448,11 +449,11 @@ class Geom:
if compressType is None: if compressType is None:
f.write('\n'.join(self.get_header())+'\n') f.write('\n'.join(self.get_header())+'\n')
elif compressType == '.': elif compressType == '.':
f.write('{}\n'.format(former)) f.write(f'{former}\n')
elif compressType == 'to': elif compressType == 'to':
f.write('{} to {}\n'.format(start,former)) f.write(f'{start} to {former}\n')
elif compressType == 'of': elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former)) f.write(f'{reps} of {former}\n')
compressType = '.' compressType = '.'
start = current start = current
@ -461,11 +462,11 @@ class Geom:
former = current former = current
if compressType == '.': if compressType == '.':
f.write('{}\n'.format(former)) f.write(f'{former}\n')
elif compressType == 'to': elif compressType == 'to':
f.write('{} to {}\n'.format(start,former)) f.write(f'{start} to {former}\n')
elif compressType == 'of': elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former)) f.write(f'{reps} of {former}\n')
def to_vtk(self,fname=None): def to_vtk(self,fname=None):
@ -511,7 +512,7 @@ class Geom:
if not all(isinstance(d, str) for d in directions): if not all(isinstance(d, str) for d in directions):
raise TypeError('Directions are not of type str.') raise TypeError('Directions are not of type str.')
elif not set(directions).issubset(valid): elif not set(directions).issubset(valid):
raise ValueError('Invalid direction specified {}'.format(set(directions).difference(valid))) raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
limits = [None,None] if reflect else [-2,0] limits = [None,None] if reflect else [-2,0]
ms = self.get_microstructure() ms = self.get_microstructure()
@ -523,7 +524,7 @@ class Geom:
if 'x' in directions: if 'x' in directions:
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0) ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
#self.add_comments('geom.py:mirror v{}'.format(version) #ToDo: self.add_comments('geom.py:mirror v{}'.format(version)
return self.update(ms,rescale=True) return self.update(ms,rescale=True)
@ -537,7 +538,7 @@ class Geom:
number of grid points in x,y,z direction. number of grid points in x,y,z direction.
""" """
#self.add_comments('geom.py:scale v{}'.format(version) #ToDo: self.add_comments('geom.py:scale v{}'.format(version)
return self.update( return self.update(
ndimage.interpolation.zoom( ndimage.interpolation.zoom(
self.microstructure, self.microstructure,
@ -564,7 +565,7 @@ class Geom:
unique, inverse = np.unique(arr, return_inverse=True) unique, inverse = np.unique(arr, return_inverse=True)
return unique[np.argmax(np.bincount(inverse))] return unique[np.argmax(np.bincount(inverse))]
#self.add_comments('geom.py:clean v{}'.format(version) #ToDo: self.add_comments('geom.py:clean v{}'.format(version)
return self.update(ndimage.filters.generic_filter( return self.update(ndimage.filters.generic_filter(
self.microstructure, self.microstructure,
mostFrequent, mostFrequent,
@ -579,7 +580,7 @@ class Geom:
for i, oldID in enumerate(np.unique(self.microstructure)): for i, oldID in enumerate(np.unique(self.microstructure)):
renumbered = np.where(self.microstructure == oldID, i+1, renumbered) renumbered = np.where(self.microstructure == oldID, i+1, renumbered)
#self.add_comments('geom.py:renumber v{}'.format(version) #ToDo: self.add_comments('geom.py:renumber v{}'.format(version)
return self.update(renumbered) return self.update(renumbered)
@ -614,7 +615,7 @@ class Geom:
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
#self.add_comments('geom.py:rotate v{}'.format(version) #ToDo: self.add_comments('geom.py:rotate v{}'.format(version)
return self.update(microstructure_in,origin=origin,rescale=True) return self.update(microstructure_in,origin=origin,rescale=True)
@ -646,7 +647,7 @@ class Geom:
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]] canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
#self.add_comments('geom.py:canvas v{}'.format(version) #ToDo: self.add_comments('geom.py:canvas v{}'.format(version)
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True) return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
@ -666,5 +667,5 @@ class Geom:
for from_ms,to_ms in zip(from_microstructure,to_microstructure): for from_ms,to_ms in zip(from_microstructure,to_microstructure):
substituted[self.microstructure==from_ms] = to_ms substituted[self.microstructure==from_ms] = to_ms
#self.add_comments('geom.py:substitute v{}'.format(version) #ToDo: self.add_comments('geom.py:substitute v{}'.format(version)
return self.update(substituted) return self.update(substituted)

View File

@ -26,7 +26,7 @@ class Symmetry:
""" """
if symmetry is not None and symmetry.lower() not in Symmetry.lattices: if symmetry is not None and symmetry.lower() not in Symmetry.lattices:
raise KeyError('Symmetry/crystal system "{}" is unknown'.format(symmetry)) raise KeyError(f'Symmetry/crystal system "{symmetry}" is unknown')
self.lattice = symmetry.lower() if isinstance(symmetry,str) else symmetry self.lattice = symmetry.lower() if isinstance(symmetry,str) else symmetry
@ -40,7 +40,7 @@ class Symmetry:
def __repr__(self): def __repr__(self):
"""Readable string.""" """Readable string."""
return '{}'.format(self.lattice) return f'{self.lattice}'
def __eq__(self, other): def __eq__(self, other):
@ -348,7 +348,7 @@ class Lattice:
def __repr__(self): def __repr__(self):
"""Report basic lattice information.""" """Report basic lattice information."""
return 'Bravais lattice {} ({} symmetry)'.format(self.lattice,self.symmetry) return f'Bravais lattice {self.lattice} ({self.symmetry} symmetry)'
# Kurdjomov--Sachs orientation relationship for fcc <-> bcc transformation # Kurdjomov--Sachs orientation relationship for fcc <-> bcc transformation
@ -613,10 +613,10 @@ class Lattice:
try: try:
relationship = models[model] relationship = models[model]
except KeyError : except KeyError :
raise KeyError('Orientation relationship "{}" is unknown'.format(model)) raise KeyError(f'Orientation relationship "{model}" is unknown')
if self.lattice not in relationship['mapping']: if self.lattice not in relationship['mapping']:
raise ValueError('Relationship "{}" not supported for lattice "{}"'.format(model,self.lattice)) raise ValueError(f'Relationship "{model}" not supported for lattice "{self.lattice}"')
r = {'lattice':Lattice((set(relationship['mapping'])-{self.lattice}).pop()), # target lattice r = {'lattice':Lattice((set(relationship['mapping'])-{self.lattice}).pop()), # target lattice
'rotations':[] } 'rotations':[] }

View File

@ -6,6 +6,7 @@ import os
import datetime import datetime
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import xml.dom.minidom import xml.dom.minidom
from pathlib import Path
from functools import partial from functools import partial
import h5py import h5py
@ -49,8 +50,7 @@ class Result:
self.version_minor = f.attrs['DADF5-minor'] self.version_minor = f.attrs['DADF5-minor']
if self.version_major != 0 or not 2 <= self.version_minor <= 6: if self.version_major != 0 or not 2 <= self.version_minor <= 6:
raise TypeError('Unsupported DADF5 version {}.{} '.format(self.version_major, raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
self.version_minor))
self.structured = 'grid' in f['geometry'].attrs.keys() self.structured = 'grid' in f['geometry'].attrs.keys()
@ -88,7 +88,7 @@ class Result:
'con_physics': self.con_physics, 'mat_physics': self.mat_physics 'con_physics': self.con_physics, 'mat_physics': self.mat_physics
} }
self.fname = os.path.abspath(fname) self.fname = Path(fname).absolute()
self._allow_modification = False self._allow_modification = False
@ -106,7 +106,7 @@ class Result:
self.pick('increments',all_selected_increments) self.pick('increments',all_selected_increments)
in_between = '' if len(all_selected_increments) < 3 else \ in_between = '' if len(all_selected_increments) < 3 else \
''.join(['\n{}\n ...\n'.format(inc) for inc in all_selected_increments[1:-2]]) ''.join([f'\n{inc}\n ...\n' for inc in all_selected_increments[1:-2]])
return util.srepr(first + in_between + last) return util.srepr(first + in_between + last)
@ -136,7 +136,7 @@ class Result:
if what == 'increments': if what == 'increments':
choice = [c if isinstance(c,str) and c.startswith('inc') else choice = [c if isinstance(c,str) and c.startswith('inc') else
'inc{}'.format(c) for c in choice] f'inc{c}' for c in choice]
elif what == 'times': elif what == 'times':
what = 'increments' what = 'increments'
if choice == ['*']: if choice == ['*']:
@ -411,21 +411,19 @@ class Result:
message = '' message = ''
with h5py.File(self.fname,'r') as f: with h5py.File(self.fname,'r') as f:
for i in self.iterate('increments'): for i in self.iterate('increments'):
message += '\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)]) message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n'
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iterate(o): for oo in self.iterate(o):
message += ' {}\n'.format(oo) message += f' {oo}\n'
for pp in self.iterate(p): for pp in self.iterate(p):
message += ' {}\n'.format(pp) message += f' {pp}\n'
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
for d in f[group].keys(): for d in f[group].keys():
try: try:
dataset = f['/'.join([group,d])] dataset = f['/'.join([group,d])]
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
description = dataset.attrs['Description'].decode() description = dataset.attrs['Description'].decode()
if 'Unit' in dataset.attrs: message += f' {d}{unit}: {description}\n'
message += ' {} / ({}): {}\n'.format(d,dataset.attrs['Unit'].decode(),description)
else:
message += ' {}: {}\n'.format(d,description)
except KeyError: except KeyError:
pass pass
return message return message
@ -527,10 +525,10 @@ class Result:
def _add_absolute(x): def _add_absolute(x):
return { return {
'data': np.abs(x['data']), 'data': np.abs(x['data']),
'label': '|{}|'.format(x['label']), 'label': f'|{x["label"]}|',
'meta': { 'meta': {
'Unit': x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']), 'Description': f"Absolute value of {x['label']} ({x['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -551,14 +549,14 @@ class Result:
def _add_calculation(**kwargs): def _add_calculation(**kwargs):
formula = kwargs['formula'] formula = kwargs['formula']
for d in re.findall(r'#(.*?)#',formula): for d in re.findall(r'#(.*?)#',formula):
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d)) formula = formula.replace(f'#{d}#',f"kwargs['{d}']['data']")
return { return {
'data': eval(formula), 'data': eval(formula),
'label': kwargs['label'], 'label': kwargs['label'],
'meta': { 'meta': {
'Unit': kwargs['unit'], 'Unit': kwargs['unit'],
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']), 'Description': f"{kwargs['description']} (formula: {kwargs['formula']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -595,8 +593,9 @@ class Result:
'label': 'sigma', 'label': 'sigma',
'meta': { 'meta': {
'Unit': P['meta']['Unit'], 'Unit': P['meta']['Unit'],
'Description': 'Cauchy stress calculated from {} ({}) and {} ({})'\ 'Description': "Cauchy stress calculated "
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']), f"from {P['label']} ({P['meta']['Description']})"
f" and {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -619,10 +618,10 @@ class Result:
def _add_determinant(T): def _add_determinant(T):
return { return {
'data': np.linalg.det(T['data']), 'data': np.linalg.det(T['data']),
'label': 'det({})'.format(T['label']), 'label': f"det({T['label']})",
'meta': { 'meta': {
'Unit': T['meta']['Unit'], 'Unit': T['meta']['Unit'],
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']), 'Description': f"Determinant of tensor {T['label']} ({T['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -643,10 +642,10 @@ class Result:
def _add_deviator(T): def _add_deviator(T):
return { return {
'data': mechanics.deviatoric_part(T['data']), 'data': mechanics.deviatoric_part(T['data']),
'label': 's_{}'.format(T['label']), 'label': f"s_{T['label']}",
'meta': { 'meta': {
'Unit': T['meta']['Unit'], 'Unit': T['meta']['Unit'],
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']), 'Description': f"Deviator of tensor {T['label']} ({T['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -674,10 +673,10 @@ class Result:
return { return {
'data': mechanics.eigenvalues(T_sym['data'])[:,p], 'data': mechanics.eigenvalues(T_sym['data'])[:,p],
'label': 'lambda_{}({})'.format(eigenvalue,T_sym['label']), 'label': f"lambda_{eigenvalue}({T_sym['label']})",
'meta' : { 'meta' : {
'Unit': T_sym['meta']['Unit'], 'Unit': T_sym['meta']['Unit'],
'Description': '{} eigenvalue of {} ({})'.format(label,T_sym['label'],T_sym['meta']['Description']), 'Description': f"{label} eigenvalue of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -707,11 +706,11 @@ class Result:
print('p',eigenvalue) print('p',eigenvalue)
return { return {
'data': mechanics.eigenvectors(T_sym['data'])[:,p], 'data': mechanics.eigenvectors(T_sym['data'])[:,p],
'label': 'v_{}({})'.format(eigenvalue,T_sym['label']), 'label': f"v_{eigenvalue}({T_sym['label']})",
'meta' : { 'meta' : {
'Unit': '1', 'Unit': '1',
'Description': 'Eigenvector corresponding to {} eigenvalue of {} ({})'\ 'Description': f"Eigenvector corresponding to {label} eigenvalue"
.format(label,T_sym['label'],T_sym['meta']['Description']), f" of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -773,10 +772,10 @@ class Result:
def _add_maximum_shear(T_sym): def _add_maximum_shear(T_sym):
return { return {
'data': mechanics.maximum_shear(T_sym['data']), 'data': mechanics.maximum_shear(T_sym['data']),
'label': 'max_shear({})'.format(T_sym['label']), 'label': f"max_shear({T_sym['label']})",
'meta': { 'meta': {
'Unit': T_sym['meta']['Unit'], 'Unit': T_sym['meta']['Unit'],
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']), 'Description': f"Maximum shear component of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -799,11 +798,11 @@ class Result:
'stress' 'stress'
return { return {
'data': mechanics.Mises_strain(T_sym['data']) if t=='strain' else mechanics.Mises_stress(T_sym['data']), 'data': (mechanics.Mises_strain if t=='strain' else mechanics.Mises_stress)(T_sym['data']),
'label': '{}_vM'.format(T_sym['label']), 'label': f"{T_sym['label']}_vM",
'meta': { 'meta': {
'Unit': T_sym['meta']['Unit'], 'Unit': T_sym['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']), 'Description': f"Mises equivalent {t} of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -836,10 +835,10 @@ class Result:
return { return {
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True), 'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
'label': '|{}|_{}'.format(x['label'],o), 'label': f"|{x['label']}|_{o}",
'meta': { 'meta': {
'Unit': x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']), 'Description': f"{o}-norm of {t} {x['label']} ({x['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -865,19 +864,20 @@ class Result:
'label': 'S', 'label': 'S',
'meta': { 'meta': {
'Unit': P['meta']['Unit'], 'Unit': P['meta']['Unit'],
'Description': '2. Piola-Kirchhoff stress calculated from {} ({}) and {} ({})'\ 'Description': "2. Piola-Kirchhoff stress calculated "
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']), f"from {P['label']} ({P['meta']['Description']})"
f" and {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
def add_PK2(self,P='P',F='F'): def add_PK2(self,P='P',F='F'):
""" """
Add second Piola-Kirchhoff calculated from first Piola-Kirchhoff stress and deformation gradient. Add second Piola-Kirchhoff stress calculated from first Piola-Kirchhoff stress and deformation gradient.
Parameters Parameters
---------- ----------
P : str, optional P : str, optional
Label first Piola-Kirchhoff stress dataset. Defaults to P. Label of first Piola-Kirchhoff stress dataset. Defaults to P.
F : str, optional F : str, optional
Label of deformation gradient dataset. Defaults to F. Label of deformation gradient dataset. Defaults to F.
@ -927,10 +927,10 @@ class Result:
def _add_rotational_part(F): def _add_rotational_part(F):
return { return {
'data': mechanics.rotational_part(F['data']), 'data': mechanics.rotational_part(F['data']),
'label': 'R({})'.format(F['label']), 'label': f"R({F['label']})",
'meta': { 'meta': {
'Unit': F['meta']['Unit'], 'Unit': F['meta']['Unit'],
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']), 'Description': f"Rotational part of {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -951,10 +951,10 @@ class Result:
def _add_spherical(T): def _add_spherical(T):
return { return {
'data': mechanics.spherical_part(T['data']), 'data': mechanics.spherical_part(T['data']),
'label': 'p_{}'.format(T['label']), 'label': f"p_{T['label']}",
'meta': { 'meta': {
'Unit': T['meta']['Unit'], 'Unit': T['meta']['Unit'],
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']), 'Description': f"Spherical component of tensor {T['label']} ({T['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -975,10 +975,10 @@ class Result:
def _add_strain_tensor(F,t,m): def _add_strain_tensor(F,t,m):
return { return {
'data': mechanics.strain_tensor(F['data'],t,m), 'data': mechanics.strain_tensor(F['data'],t,m),
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']), 'label': f"epsilon_{t}^{m}({F['label']})",
'meta': { 'meta': {
'Unit': F['meta']['Unit'], 'Unit': F['meta']['Unit'],
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']), 'Description': f"Strain tensor of {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -1005,11 +1005,11 @@ class Result:
@staticmethod @staticmethod
def _add_stretch_tensor(F,t): def _add_stretch_tensor(F,t):
return { return {
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']), 'data': (mechanics.left_stretch if t.upper() == 'V' else mechanics.right_stretch)(F['data']),
'label': '{}({})'.format(t,F['label']), 'label': f"{t}({F['label']})",
'meta': { 'meta': {
'Unit': F['meta']['Unit'], 'Unit': F['meta']['Unit'],
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right', 'Description': '{} stretch tensor of {} ({})'.format('Left' if t.upper() == 'V' else 'Right',
F['label'],F['meta']['Description']), F['label'],F['meta']['Description']),
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
@ -1045,7 +1045,7 @@ class Result:
r = func(**datasets_in,**args) r = func(**datasets_in,**args)
return [group,r] return [group,r]
except Exception as err: except Exception as err:
print('Error during calculation: {}.'.format(err)) print(f'Error during calculation: {err}.')
return None return None
@ -1056,14 +1056,17 @@ class Result:
Parameters Parameters
---------- ----------
func : function func : function
Callback function that calculates a new dataset from one or more datasets per HDF5 group. Callback function that calculates a new dataset from one or
more datasets per HDF5 group.
datasets : dictionary datasets : dictionary
Details of the datasets to be used: label (in HDF5 file) and arg (argument to which the data is parsed in func). Details of the datasets to be used: label (in HDF5 file) and
arg (argument to which the data is parsed in func).
args : dictionary, optional args : dictionary, optional
Arguments parsed to func. Arguments parsed to func.
""" """
pool = multiprocessing.Pool(int(Environment().options['DAMASK_NUM_THREADS'])) num_threads = Environment().options['DAMASK_NUM_THREADS']
pool = multiprocessing.Pool(int(num_threads) if num_threads is not None else None)
lock = multiprocessing.Manager().Lock() lock = multiprocessing.Manager().Lock()
groups = self.groups_with_datasets(datasets.values()) groups = self.groups_with_datasets(datasets.values())
@ -1087,11 +1090,11 @@ class Result:
for l,v in result[1]['meta'].items(): for l,v in result[1]['meta'].items():
dataset.attrs[l]=v.encode() dataset.attrs[l]=v.encode()
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version) creator = f"damask.Result.{dataset.attrs['Creator'].decode()} v{version}"
dataset.attrs['Creator'] = creator.encode() dataset.attrs['Creator'] = creator.encode()
except (OSError,RuntimeError) as err: except (OSError,RuntimeError) as err:
print('Could not add dataset: {}.'.format(err)) print(f'Could not add dataset: {err}.')
lock.release() lock.release()
pool.close() pool.close()
@ -1124,7 +1127,7 @@ class Result:
time_data = ET.SubElement(time, 'DataItem') time_data = ET.SubElement(time, 'DataItem')
time_data.attrib={'Format': 'XML', time_data.attrib={'Format': 'XML',
'NumberType': 'Float', 'NumberType': 'Float',
'Dimensions': '{}'.format(len(self.times))} 'Dimensions': f'{len(self.times)}'}
time_data.text = ' '.join(map(str,self.times)) time_data.text = ' '.join(map(str,self.times))
attributes = [] attributes = []
@ -1165,7 +1168,7 @@ class Result:
data_items[-1].attrib={'Format': 'HDF', data_items[-1].attrib={'Format': 'HDF',
'Precision': '8', 'Precision': '8',
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))} 'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
data_items[-1].text='{}:/{}/geometry/u_n'.format(os.path.split(self.fname)[1],inc) data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in getattr(self,o): for oo in getattr(self,o):
@ -1180,17 +1183,17 @@ class Result:
if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue
attributes.append(ET.SubElement(grid, 'Attribute')) attributes.append(ET.SubElement(grid, 'Attribute'))
attributes[-1].attrib={'Name': '{}'.format(name.split('/',2)[2]), attributes[-1].attrib={'Name': name.split('/',2)[2],
'Center': 'Cell', 'Center': 'Cell',
'AttributeType': 'Tensor'} 'AttributeType': 'Tensor'}
data_items.append(ET.SubElement(attributes[-1], 'DataItem')) data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
data_items[-1].attrib={'Format': 'HDF', data_items[-1].attrib={'Format': 'HDF',
'NumberType': 'Float', 'NumberType': 'Float',
'Precision': '{}'.format(prec), 'Precision': f'{prec}',
'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))} 'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))}
data_items[-1].text='{}:{}'.format(os.path.split(self.fname)[1],name) data_items[-1].text=f'{os.path.split(self.fname)[1]}:{name}'
with open(os.path.splitext(self.fname)[0]+'.xdmf','w') as f: with open(self.fname.with_suffix('.xdmf').name,'w') as f:
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml()) f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
@ -1266,7 +1269,4 @@ class Result:
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p')) u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
v.add(u,'u') v.add(u,'u')
file_out = '{}_inc{}'.format(os.path.splitext(os.path.basename(self.fname))[0], v.write(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')
inc[3:].zfill(N_digits))
v.write(file_out)

View File

@ -49,7 +49,8 @@ class Rotation:
Parameters Parameters
---------- ----------
quaternion : numpy.ndarray, optional quaternion : numpy.ndarray, optional
Unit quaternion that follows the conventions. Use .from_quaternion to perform a sanity check. Unit quaternion in positive real hemisphere.
Use .from_quaternion to perform a sanity check.
""" """
self.quaternion = quaternion.copy() self.quaternion = quaternion.copy()
@ -73,7 +74,7 @@ class Rotation:
raise NotImplementedError('Support for multiple rotations missing') raise NotImplementedError('Support for multiple rotations missing')
return '\n'.join([ return '\n'.join([
'Quaternion: (real={:.3f}, imag=<{:+.3f}, {:+.3f}, {:+.3f}>)'.format(*(self.quaternion)), 'Quaternion: (real={:.3f}, imag=<{:+.3f}, {:+.3f}, {:+.3f}>)'.format(*(self.quaternion)),
'Matrix:\n{}'.format(self.as_matrix()), 'Matrix:\n{}'.format(np.round(self.as_matrix(),8)),
'Bunge Eulers / deg: ({:3.2f}, {:3.2f}, {:3.2f})'.format(*self.as_Eulers(degrees=True)), 'Bunge Eulers / deg: ({:3.2f}, {:3.2f}, {:3.2f})'.format(*self.as_Eulers(degrees=True)),
]) ])
@ -87,10 +88,6 @@ class Rotation:
other : numpy.ndarray or Rotation other : numpy.ndarray or Rotation
Vector, second or fourth order tensor, or rotation object that is rotated. Vector, second or fourth order tensor, or rotation object that is rotated.
Todo
----
Check rotation of 4th order tensor
""" """
if isinstance(other, Rotation): if isinstance(other, Rotation):
q_m = self.quaternion[...,0:1] q_m = self.quaternion[...,0:1]
@ -99,7 +96,7 @@ class Rotation:
p_o = other.quaternion[...,1:] p_o = other.quaternion[...,1:]
q = (q_m*q_o - np.einsum('...i,...i',p_m,p_o).reshape(self.shape+(1,))) q = (q_m*q_o - np.einsum('...i,...i',p_m,p_o).reshape(self.shape+(1,)))
p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o) p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o)
return self.__class__(np.block([q,p])).standardize() return self.__class__(np.block([q,p]))._standardize()
elif isinstance(other,np.ndarray): elif isinstance(other,np.ndarray):
if self.shape + (3,) == other.shape: if self.shape + (3,) == other.shape:
@ -122,26 +119,26 @@ class Rotation:
else: else:
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors') raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
else: else:
raise TypeError('Cannot rotate {}'.format(type(other))) raise TypeError(f'Cannot rotate {type(other)}')
def inverse(self):
"""In-place inverse rotation/backward rotation."""
self.quaternion[...,1:] *= -1
return self
def inversed(self):
"""Inverse rotation/backward rotation."""
return self.copy().inverse()
def standardize(self): def _standardize(self):
"""In-place quaternion representation with positive real part.""" """Standardize (ensure positive real hemisphere)."""
self.quaternion[self.quaternion[...,0] < 0.0] *= -1 self.quaternion[self.quaternion[...,0] < 0.0] *= -1
return self return self
def standardized(self): def inverse(self):
"""Quaternion representation with positive real part.""" """In-place inverse rotation (backward rotation)."""
return self.copy().standardize() self.quaternion[...,1:] *= -1
return self
def __invert__(self):
"""Inverse rotation (backward rotation)."""
return self.copy().inverse()
def inversed(self):
"""Inverse rotation (backward rotation)."""
return ~ self
def misorientation(self,other): def misorientation(self,other):
@ -154,7 +151,7 @@ class Rotation:
Rotation to which the misorientation is computed. Rotation to which the misorientation is computed.
""" """
return other*self.inversed() return other@~self
def broadcast_to(self,shape): def broadcast_to(self,shape):
@ -169,7 +166,7 @@ class Rotation:
return self.__class__(q) return self.__class__(q)
def average(self,other): def average(self,other): #ToDo: discuss calling for vectors
""" """
Calculate the average rotation. Calculate the average rotation.
@ -189,25 +186,31 @@ class Rotation:
def as_quaternion(self): def as_quaternion(self):
""" """
Unit quaternion [q, p_1, p_2, p_3]. Represent as unit quaternion.
Parameters Returns
---------- -------
quaternion : bool, optional q : numpy.ndarray of shape (...,4)
return quaternion as DAMASK object. Unit quaternion in positive real hemisphere: (q_0, q_1, q_2, q_3), |q|=1, q_0 0.
""" """
return self.quaternion return self.quaternion.copy()
def as_Eulers(self, def as_Eulers(self,
degrees = False): degrees = False):
""" """
Bunge-Euler angles: (φ_1, ϕ, φ_2). Represent as Bunge-Euler angles.
Parameters Parameters
---------- ----------
degrees : bool, optional degrees : bool, optional
return angles in degrees. Return angles in degrees.
Returns
-------
phi : numpy.ndarray of shape (...,3)
Bunge-Euler angles: (φ_1, ϕ, φ_2), φ_1 [0,2π], ϕ [0,π], φ_2 [0,2π]
unless degrees == True: φ_1 [0,360], ϕ [0,180], φ_2 [0,360]
""" """
eu = Rotation._qu2eu(self.quaternion) eu = Rotation._qu2eu(self.quaternion)
@ -218,14 +221,21 @@ class Rotation:
degrees = False, degrees = False,
pair = False): pair = False):
""" """
Axis angle representation [n_1, n_2, n_3, ω] unless pair == True: ([n_1, n_2, n_3], ω). Represent as axis angle pair.
Parameters Parameters
---------- ----------
degrees : bool, optional degrees : bool, optional
return rotation angle in degrees. Return rotation angle in degrees. Defaults to False.
pair : bool, optional pair : bool, optional
return tuple of axis and angle. Return tuple of axis and angle. Defaults to False.
Returns
-------
axis_angle : numpy.ndarray of shape (...,4) unless pair == True:
tuple containing numpy.ndarray of shapes (...,3) and (...)
Axis angle pair: (n_1, n_2, n_3, ω), |n| = 1 and ω [0,π]
unless degrees = True: ω [0,180].
""" """
ax = Rotation._qu2ax(self.quaternion) ax = Rotation._qu2ax(self.quaternion)
@ -233,29 +243,60 @@ class Rotation:
return (ax[...,:3],ax[...,3]) if pair else ax return (ax[...,:3],ax[...,3]) if pair else ax
def as_matrix(self): def as_matrix(self):
"""Rotation matrix.""" """
Represent as rotation matrix.
Returns
-------
R : numpy.ndarray of shape (...,3,3)
Rotation matrix R, det(R) = 1, R.TR=I.
"""
return Rotation._qu2om(self.quaternion) return Rotation._qu2om(self.quaternion)
def as_Rodrigues(self, def as_Rodrigues(self,
vector = False): vector = False):
""" """
Rodrigues-Frank vector representation [n_1, n_2, n_3, tan(ω/2)] unless vector == True: [n_1, n_2, n_3] * tan(ω/2). Represent as Rodrigues-Frank vector with separated axis and angle argument.
Parameters Parameters
---------- ----------
vector : bool, optional vector : bool, optional
return as actual Rodrigues--Frank vector, i.e. rotation axis scaled by tan(ω/2). Return as actual Rodrigues-Frank vector, i.e. axis
and angle argument are not separated.
Returns
-------
rho : numpy.ndarray of shape (...,4) unless vector == True:
numpy.ndarray of shape (...,3)
Rodrigues-Frank vector: [n_1, n_2, n_3, tan(ω/2)], |n| = 1 and ω [0,π].
""" """
ro = Rotation._qu2ro(self.quaternion) ro = Rotation._qu2ro(self.quaternion)
return ro[...,:3]*ro[...,3] if vector else ro return ro[...,:3]*ro[...,3] if vector else ro
def as_homochoric(self): def as_homochoric(self):
"""Homochoric vector: (h_1, h_2, h_3).""" """
Represent as homochoric vector.
Returns
-------
h : numpy.ndarray of shape (...,3)
Homochoric vector: (h_1, h_2, h_3), |h| < 1/2*π^(2/3).
"""
return Rotation._qu2ho(self.quaternion) return Rotation._qu2ho(self.quaternion)
def as_cubochoric(self): def as_cubochoric(self):
"""Cubochoric vector: (c_1, c_2, c_3).""" """
Represent as cubochoric vector.
Returns
-------
c : numpy.ndarray of shape (...,3)
Cubochoric vector: (c_1, c_2, c_3), max(c_i) < 1/2*π^(2/3).
"""
return Rotation._qu2cu(self.quaternion) return Rotation._qu2cu(self.quaternion)
def M(self): # ToDo not sure about the name: as_M or M? we do not have a from_M def M(self): # ToDo not sure about the name: as_M or M? we do not have a from_M
@ -275,18 +316,34 @@ class Rotation:
# Static constructors. The input data needs to follow the conventions, options allow to # Static constructors. The input data needs to follow the conventions, options allow to
# relax the conventions. # relax the conventions.
@staticmethod @staticmethod
def from_quaternion(quaternion, def from_quaternion(q,
accept_homomorph = False, accept_homomorph = False,
P = -1, P = -1,
acceptHomomorph = None): acceptHomomorph = None): # old name (for compatibility)
"""
Initialize from quaternion.
Parameters
----------
q : numpy.ndarray of shape (...,4)
Unit quaternion in positive real hemisphere: (q_0, q_1, q_2, q_3),
|q|=1, q_0 0.
accept_homomorph : boolean, optional
Allow homomorphic variants, i.e. q_0 < 0 (negative real hemisphere).
Defaults to False.
P : integer {-1,1}, optional
Convention used. Defaults to -1.
"""
if acceptHomomorph is not None: if acceptHomomorph is not None:
accept_homomorph = acceptHomomorph accept_homomorph = acceptHomomorph # for compatibility
qu = np.array(quaternion,dtype=float) qu = np.array(q,dtype=float)
if qu.shape[:-2:-1] != (4,): if qu.shape[:-2:-1] != (4,):
raise ValueError('Invalid shape.') raise ValueError('Invalid shape.')
if abs(P) != 1:
raise ValueError('P ∉ {-1,1}')
if P > 0: qu[...,1:4] *= -1 # convert from P=1 to P=-1 if P == 1: qu[...,1:4] *= -1
if accept_homomorph: if accept_homomorph:
qu[qu[...,0] < 0.0] *= -1 qu[qu[...,0] < 0.0] *= -1
else: else:
@ -298,10 +355,21 @@ class Rotation:
return Rotation(qu) return Rotation(qu)
@staticmethod @staticmethod
def from_Eulers(eulers, def from_Eulers(phi,
degrees = False): degrees = False):
"""
Initialize from Bunge-Euler angles.
eu = np.array(eulers,dtype=float) Parameters
----------
phi : numpy.ndarray of shape (...,3)
Bunge-Euler angles: (φ_1, ϕ, φ_2), φ_1 [0,2π], ϕ [0,π], φ_2 [0,2π]
unless degrees == True: φ_1 [0,360], ϕ [0,180], φ_2 [0,360].
degrees : boolean, optional
Bunge-Euler angles are given in degrees. Defaults to False.
"""
eu = np.array(phi,dtype=float)
if eu.shape[:-2:-1] != (3,): if eu.shape[:-2:-1] != (3,):
raise ValueError('Invalid shape.') raise ValueError('Invalid shape.')
@ -316,12 +384,29 @@ class Rotation:
degrees = False, degrees = False,
normalise = False, normalise = False,
P = -1): P = -1):
"""
Initialize from Axis angle pair.
Parameters
----------
axis_angle : numpy.ndarray of shape (...,4)
Axis angle pair: [n_1, n_2, n_3, ω], |n| = 1 and ω [0,π]
unless degrees = True: ω [0,180].
degrees : boolean, optional
Angle ω is given in degrees. Defaults to False.
normalize: boolean, optional
Allow |n| 1. Defaults to False.
P : integer {-1,1}, optional
Convention used. Defaults to -1.
"""
ax = np.array(axis_angle,dtype=float) ax = np.array(axis_angle,dtype=float)
if ax.shape[:-2:-1] != (4,): if ax.shape[:-2:-1] != (4,):
raise ValueError('Invalid shape.') raise ValueError('Invalid shape.')
if abs(P) != 1:
raise ValueError('P ∉ {-1,1}')
if P > 0: ax[...,0:3] *= -1 # convert from P=1 to P=-1 if P == 1: ax[...,0:3] *= -1
if degrees: ax[..., 3] = np.radians(ax[...,3]) if degrees: ax[..., 3] = np.radians(ax[...,3])
if normalise: ax[...,0:3] /= np.linalg.norm(ax[...,0:3],axis=-1) if normalise: ax[...,0:3] /= np.linalg.norm(ax[...,0:3],axis=-1)
if np.any(ax[...,3] < 0.0) or np.any(ax[...,3] > np.pi): if np.any(ax[...,3] < 0.0) or np.any(ax[...,3] > np.pi):
@ -335,7 +420,19 @@ class Rotation:
def from_basis(basis, def from_basis(basis,
orthonormal = True, orthonormal = True,
reciprocal = False): reciprocal = False):
"""
Initialize from lattice basis vectors.
Parameters
----------
basis : numpy.ndarray of shape (...,3,3)
Three lattice basis vectors in three dimensions.
orthonormal : boolean, optional
Basis is strictly orthonormal, i.e. is free of stretch components. Defaults to True.
reciprocal : boolean, optional
Basis vectors are given in reciprocal (instead of real) space. Defaults to False.
"""
om = np.array(basis,dtype=float) om = np.array(basis,dtype=float)
if om.shape[:-3:-1] != (3,3): if om.shape[:-3:-1] != (3,3):
raise ValueError('Invalid shape.') raise ValueError('Invalid shape.')
@ -356,20 +453,43 @@ class Rotation:
return Rotation(Rotation._om2qu(om)) return Rotation(Rotation._om2qu(om))
@staticmethod @staticmethod
def from_matrix(om): def from_matrix(R):
"""
Initialize from rotation matrix.
return Rotation.from_basis(om) Parameters
----------
R : numpy.ndarray of shape (...,3,3)
Rotation matrix: det(R) = 1, R.TR=I.
"""
return Rotation.from_basis(R)
@staticmethod @staticmethod
def from_Rodrigues(rodrigues, def from_Rodrigues(rho,
normalise = False, normalise = False,
P = -1): P = -1):
"""
Initialize from Rodrigues-Frank vector.
ro = np.array(rodrigues,dtype=float) Parameters
----------
rho : numpy.ndarray of shape (...,4)
Rodrigues-Frank vector (angle separated from axis).
(n_1, n_2, n_3, tan(ω/2)), |n| = 1 and ω [0,π].
normalize : boolean, optional
Allow |n| 1. Defaults to False.
P : integer {-1,1}, optional
Convention used. Defaults to -1.
"""
ro = np.array(rho,dtype=float)
if ro.shape[:-2:-1] != (4,): if ro.shape[:-2:-1] != (4,):
raise ValueError('Invalid shape.') raise ValueError('Invalid shape.')
if abs(P) != 1:
raise ValueError('P ∉ {-1,1}')
if P > 0: ro[...,0:3] *= -1 # convert from P=1 to P=-1 if P == 1: ro[...,0:3] *= -1
if normalise: ro[...,0:3] /= np.linalg.norm(ro[...,0:3],axis=-1) if normalise: ro[...,0:3] /= np.linalg.norm(ro[...,0:3],axis=-1)
if np.any(ro[...,3] < 0.0): if np.any(ro[...,3] < 0.0):
raise ValueError('Rodrigues vector rotation angle not positive.') raise ValueError('Rodrigues vector rotation angle not positive.')
@ -379,14 +499,26 @@ class Rotation:
return Rotation(Rotation._ro2qu(ro)) return Rotation(Rotation._ro2qu(ro))
@staticmethod @staticmethod
def from_homochoric(homochoric, def from_homochoric(h,
P = -1): P = -1):
"""
Initialize from homochoric vector.
ho = np.array(homochoric,dtype=float) Parameters
----------
h : numpy.ndarray of shape (...,3)
Homochoric vector: (h_1, h_2, h_3), |h| < (3/4*π)^(1/3).
P : integer {-1,1}, optional
Convention used. Defaults to -1.
"""
ho = np.array(h,dtype=float)
if ho.shape[:-2:-1] != (3,): if ho.shape[:-2:-1] != (3,):
raise ValueError('Invalid shape.') raise ValueError('Invalid shape.')
if abs(P) != 1:
raise ValueError('P ∉ {-1,1}')
if P > 0: ho *= -1 # convert from P=1 to P=-1 if P == 1: ho *= -1
if np.any(np.linalg.norm(ho,axis=-1) >_R1+1e-9): if np.any(np.linalg.norm(ho,axis=-1) >_R1+1e-9):
raise ValueError('Homochoric coordinate outside of the sphere.') raise ValueError('Homochoric coordinate outside of the sphere.')
@ -394,18 +526,30 @@ class Rotation:
return Rotation(Rotation._ho2qu(ho)) return Rotation(Rotation._ho2qu(ho))
@staticmethod @staticmethod
def from_cubochoric(cubochoric, def from_cubochoric(c,
P = -1): P = -1):
"""
Initialize from cubochoric vector.
cu = np.array(cubochoric,dtype=float) Parameters
----------
c : numpy.ndarray of shape (...,3)
Cubochoric vector: (c_1, c_2, c_3), max(c_i) < 1/2*π^(2/3).
P : integer {-1,1}, optional
Convention used. Defaults to -1.
"""
cu = np.array(c,dtype=float)
if cu.shape[:-2:-1] != (3,): if cu.shape[:-2:-1] != (3,):
raise ValueError('Invalid shape.') raise ValueError('Invalid shape.')
if abs(P) != 1:
raise ValueError('P ∉ {-1,1}')
if np.abs(np.max(cu)) > np.pi**(2./3.) * 0.5+1e-9: if np.abs(np.max(cu)) > np.pi**(2./3.) * 0.5+1e-9:
raise ValueError('Cubochoric coordinate outside of the cube: {} {} {}.'.format(*cu)) raise ValueError('Cubochoric coordinate outside of the cube.')
ho = Rotation._cu2ho(cu) ho = Rotation._cu2ho(cu)
if P > 0: ho *= -1 # convert from P=1 to P=-1 if P == 1: ho *= -1
return Rotation(Rotation._ho2qu(ho)) return Rotation(Rotation._ho2qu(ho))
@ -458,7 +602,7 @@ class Rotation:
np.cos(2.0*np.pi*r[...,1])*B, np.cos(2.0*np.pi*r[...,1])*B,
np.sin(2.0*np.pi*r[...,0])*A],axis=-1) np.sin(2.0*np.pi*r[...,0])*A],axis=-1)
return Rotation(q.reshape(r.shape[:-1]+(4,)) if shape is not None else q).standardize() return Rotation(q.reshape(r.shape[:-1]+(4,)) if shape is not None else q)._standardize()
# for compatibility (old names do not follow convention) # for compatibility (old names do not follow convention)
@ -471,7 +615,7 @@ class Rotation:
#################################################################################################### ####################################################################################################
# Code below available according to the following conditions on https://github.com/MarDiehl/3Drotations # Code below available according to the following conditions on https://github.com/MarDiehl/3Drotations
#################################################################################################### ####################################################################################################
# Copyright (c) 2017-2019, Martin Diehl/Max-Planck-Institut für Eisenforschung GmbH # Copyright (c) 2017-2020, Martin Diehl/Max-Planck-Institut für Eisenforschung GmbH
# Copyright (c) 2013-2014, Marc De Graef/Carnegie Mellon University # Copyright (c) 2013-2014, Marc De Graef/Carnegie Mellon University
# All rights reserved. # All rights reserved.
# #
@ -530,7 +674,7 @@ class Rotation:
np.zeros(qu.shape[:-1]+(2,))]), np.zeros(qu.shape[:-1]+(2,))]),
np.where(np.abs(q03_s) < 1.0e-8, np.where(np.abs(q03_s) < 1.0e-8,
np.block([np.arctan2( 2.0*qu[...,1:2]*qu[...,2:3],qu[...,1:2]**2-qu[...,2:3]**2), np.block([np.arctan2( 2.0*qu[...,1:2]*qu[...,2:3],qu[...,1:2]**2-qu[...,2:3]**2),
np.broadcast_to(np.pi,qu.shape[:-1]+(1,)), np.broadcast_to(np.pi,qu[...,0:1].shape),
np.zeros(qu.shape[:-1]+(1,))]), np.zeros(qu.shape[:-1]+(1,))]),
np.block([np.arctan2((-_P*q02+q13)*chi, (-_P*q01-q23)*chi), np.block([np.arctan2((-_P*q02+q13)*chi, (-_P*q01-q23)*chi),
np.arctan2( 2.0*chi, q03_s-q12_s ), np.arctan2( 2.0*chi, q03_s-q12_s ),
@ -553,7 +697,7 @@ class Rotation:
s = np.sign(qu[...,0:1])/np.sqrt(qu[...,1:2]**2+qu[...,2:3]**2+qu[...,3:4]**2) s = np.sign(qu[...,0:1])/np.sqrt(qu[...,1:2]**2+qu[...,2:3]**2+qu[...,3:4]**2)
omega = 2.0 * np.arccos(np.clip(qu[...,0:1],-1.0,1.0)) omega = 2.0 * np.arccos(np.clip(qu[...,0:1],-1.0,1.0))
ax = np.where(np.broadcast_to(qu[...,0:1] < 1.0e-8,qu.shape), ax = np.where(np.broadcast_to(qu[...,0:1] < 1.0e-8,qu.shape),
np.block([qu[...,1:4],np.broadcast_to(np.pi,qu.shape[:-1]+(1,))]), np.block([qu[...,1:4],np.broadcast_to(np.pi,qu[...,0:1].shape)]),
np.block([qu[...,1:4]*s,omega])) np.block([qu[...,1:4]*s,omega]))
ax[np.isclose(qu[...,0],1.,rtol=0.0)] = [0.0, 0.0, 1.0, 0.0] ax[np.isclose(qu[...,0],1.,rtol=0.0)] = [0.0, 0.0, 1.0, 0.0]
return ax return ax
@ -564,7 +708,7 @@ class Rotation:
with np.errstate(invalid='ignore',divide='ignore'): with np.errstate(invalid='ignore',divide='ignore'):
s = np.linalg.norm(qu[...,1:4],axis=-1,keepdims=True) s = np.linalg.norm(qu[...,1:4],axis=-1,keepdims=True)
ro = np.where(np.broadcast_to(np.abs(qu[...,0:1]) < 1.0e-12,qu.shape), ro = np.where(np.broadcast_to(np.abs(qu[...,0:1]) < 1.0e-12,qu.shape),
np.block([qu[...,1:2], qu[...,2:3], qu[...,3:4], np.broadcast_to(np.inf,qu.shape[:-1]+(1,))]), np.block([qu[...,1:2], qu[...,2:3], qu[...,3:4], np.broadcast_to(np.inf,qu[...,0:1].shape)]),
np.block([qu[...,1:2]/s,qu[...,2:3]/s,qu[...,3:4]/s, np.block([qu[...,1:2]/s,qu[...,2:3]/s,qu[...,3:4]/s,
np.tan(np.arccos(np.clip(qu[...,0:1],-1.0,1.0))) np.tan(np.arccos(np.clip(qu[...,0:1],-1.0,1.0)))
]) ])

View File

@ -4,6 +4,7 @@ import pandas as pd
import numpy as np import numpy as np
from . import version from . import version
from . import util
class Table: class Table:
"""Store spreadsheet-like data.""" """Store spreadsheet-like data."""
@ -24,7 +25,7 @@ class Table:
""" """
self.comments = [] if comments is None else [c for c in comments] self.comments = [] if comments is None else [c for c in comments]
self.data = pd.DataFrame(data=data) self.data = pd.DataFrame(data=data)
self.shapes = shapes self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() }
self._label_condensed() self._label_condensed()
@ -33,7 +34,7 @@ class Table:
labels = [] labels = []
for label,shape in self.shapes.items(): for label,shape in self.shapes.items():
size = int(np.prod(shape)) size = int(np.prod(shape))
labels += ['{}{}'.format('' if size == 1 else '{}_'.format(i+1),label) for i in range(size)] labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)]
self.data.columns = labels self.data.columns = labels
@ -47,8 +48,7 @@ class Table:
def _add_comment(self,label,shape,info): def _add_comment(self,label,shape,info):
if info is not None: if info is not None:
c = '{}{}: {}'.format(label,' '+str(shape) if np.prod(shape,dtype=int) > 1 else '',info) self.comments.append(f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}')
self.comments.append(c)
@staticmethod @staticmethod
@ -126,8 +126,6 @@ class Table:
Filename or file for reading. Filename or file for reading.
""" """
shapes = {'eu':(3,), 'pos':(2,),
'IQ':(1,), 'CI':(1,), 'ID':(1,), 'intensity':(1,), 'fit':(1,)}
try: try:
f = open(fname) f = open(fname)
except TypeError: except TypeError:
@ -136,7 +134,7 @@ class Table:
content = f.readlines() content = f.readlines()
comments = ['table.py:from_ang v {}'.format(version)] comments = [f'table.py:from_ang v{version}']
for line in content: for line in content:
if line.startswith('#'): if line.startswith('#'):
comments.append(line.strip()) comments.append(line.strip())
@ -144,8 +142,11 @@ class Table:
break break
data = np.loadtxt(content) data = np.loadtxt(content)
for c in range(data.shape[1]-10):
shapes['n/a_{}'.format(c+1)] = (1,) shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
remainder = data.shape[1]-sum(shapes.values())
if remainder > 0: # 3.8 can do: if (remainder := data.shape[1]-sum(shapes.values())) > 0
shapes['unknown'] = remainder
return Table(data,shapes,comments) return Table(data,shapes,comments)
@ -234,7 +235,6 @@ class Table:
""" """
self.data.drop(columns=label,inplace=True) self.data.drop(columns=label,inplace=True)
del self.shapes[label] del self.shapes[label]
@ -251,8 +251,7 @@ class Table:
""" """
self.data.rename(columns={label_old:label_new},inplace=True) self.data.rename(columns={label_old:label_new},inplace=True)
c = '{} => {}{}'.format(label_old,label_new,'' if info is None else ': {}'.format(info)) self.comments.append(f'{label_old} => {label_new}'+('' if info is None else f': {info}'))
self.comments.append(c)
self.shapes = {(label if label != label_old else label_new):self.shapes[label] for label in self.shapes} self.shapes = {(label if label != label_old else label_new):self.shapes[label] for label in self.shapes}
@ -271,7 +270,7 @@ class Table:
self._label_flat() self._label_flat()
self.data.sort_values(labels,axis=0,inplace=True,ascending=ascending) self.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
self._label_condensed() self._label_condensed()
self.comments.append('sorted by [{}]'.format(', '.join(labels))) self.comments.append(f'sorted by [{", ".join(labels)}]')
def append(self,other): def append(self,other):
@ -328,18 +327,18 @@ class Table:
labels = [] labels = []
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]: for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
if self.shapes[l] == (1,): if self.shapes[l] == (1,):
labels.append('{}'.format(l)) labels.append(f'{l}')
elif len(self.shapes[l]) == 1: elif len(self.shapes[l]) == 1:
labels += ['{}_{}'.format(i+1,l) \ labels += [f'{i+1}_{l}' \
for i in range(self.shapes[l][0])] for i in range(self.shapes[l][0])]
else: else:
labels += ['{}:{}_{}'.format('x'.join([str(d) for d in self.shapes[l]]),i+1,l) \ labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
for i in range(np.prod(self.shapes[l]))] for i in range(np.prod(self.shapes[l]))]
if new_style: if new_style:
header = ['# {}'.format(comment) for comment in self.comments] header = [f'# {comment}' for comment in self.comments]
else: else:
header = ['{} header'.format(len(self.comments)+1)] \ header = [f'{len(self.comments)+1} header'] \
+ self.comments \ + self.comments \
try: try:

View File

@ -53,7 +53,7 @@ class Test:
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__)) self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
self.parser = OptionParser(option_class=damask.extendableOption, self.parser = OptionParser(option_class=damask.extendableOption,
description = '{} (Test class version: {})'.format(self.description,damask.version), description = f'{self.description} (Test class version: {damask.version})',
usage = './test.py [options]') usage = './test.py [options]')
self.parser.add_option("-k", "--keep", self.parser.add_option("-k", "--keep",
action = "store_true", action = "store_true",
@ -93,7 +93,7 @@ class Test:
for variant,object in enumerate(self.variants): for variant,object in enumerate(self.variants):
name = self.variantName(variant) name = self.variantName(variant)
if self.options.show: if self.options.show:
logging.critical('{}: {}'.format(variant+1,name)) logging.critical(f'{variant+1}: {name}')
elif self.options.select is not None \ elif self.options.select is not None \
and not (name in self.options.select or str(variant+1) in self.options.select): and not (name in self.options.select or str(variant+1) in self.options.select):
pass pass
@ -106,12 +106,12 @@ class Test:
self.postprocess(variant) self.postprocess(variant)
if self.options.update: if self.options.update:
if self.update(variant) != 0: logging.critical('update for "{}" failed.'.format(name)) if self.update(variant) != 0: logging.critical(f'update for "{name}" failed.')
elif not (self.options.accept or self.compare(variant)): # no update, do comparison elif not (self.options.accept or self.compare(variant)): # no update, do comparison
return variant+1 # return culprit return variant+1 # return culprit
except Exception as e: except Exception as e:
logging.critical('exception during variant execution: "{}"'.format(str(e))) logging.critical(f'exception during variant execution: "{e}"')
return variant+1 # return culprit return variant+1 # return culprit
return 0 return 0
@ -124,13 +124,13 @@ class Test:
try: try:
shutil.rmtree(self.dirCurrent()) shutil.rmtree(self.dirCurrent())
except FileNotFoundError: except FileNotFoundError:
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent())) logging.warning(f'removal of directory "{self.dirCurrent()}" not possible...')
try: try:
os.mkdir(self.dirCurrent()) os.mkdir(self.dirCurrent())
return True return True
except FileExistsError: except FileExistsError:
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent())) logging.critical(f'creation of directory "{self.dirCurrent()}" failed.')
return False return False
def prepareAll(self): def prepareAll(self):
@ -211,7 +211,7 @@ class Test:
try: try:
shutil.copy2(source,target) shutil.copy2(source,target)
except FileNotFoundError: except FileNotFoundError:
logging.critical('error copying {} to {}'.format(source,target)) logging.critical(f'error copying {source} to {target}')
raise FileNotFoundError raise FileNotFoundError
@ -222,7 +222,7 @@ class Test:
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Reference2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -235,7 +235,7 @@ class Test:
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i])) shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.error(os.path.join(source,f)) logging.error(os.path.join(source,f))
logging.critical('Base2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Base2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -246,7 +246,7 @@ class Test:
try: try:
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i])) shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f)) logging.critical(f'Current2Reference: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -257,7 +257,7 @@ class Test:
try: try:
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Proof2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -267,7 +267,7 @@ class Test:
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Current2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Current2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -302,9 +302,7 @@ class Test:
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
refArrayNonZero = refArrayNonZero[curArray.nonzero()] refArrayNonZero = refArrayNonZero[curArray.nonzero()]
curArray = curArray[curArray.nonzero()] curArray = curArray[curArray.nonzero()]
print(' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err, print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
refArrayNonZero[max_loc],
curArray[max_loc]))
return max_err return max_err
else: else:
raise Exception('mismatch in array size to compare') raise Exception('mismatch in array size to compare')
@ -350,7 +348,7 @@ class Test:
for i in range(dataLength): for i in range(dataLength):
if headings0[i]['shape'] != headings1[i]['shape']: if headings0[i]['shape'] != headings1[i]['shape']:
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label'])) raise Exception(f"shape mismatch between {headings0[i]['label']} and {headings1[i]['label']}")
shape[i] = headings0[i]['shape'] shape[i] = headings0[i]['shape']
for j in range(np.shape(shape[i])[0]): for j in range(np.shape(shape[i])[0]):
length[i] *= shape[i][j] length[i] *= shape[i][j]
@ -358,9 +356,7 @@ class Test:
for j in range(np.shape(normShape[i])[0]): for j in range(np.shape(normShape[i])[0]):
normLength[i] *= normShape[i][j] normLength[i] *= normShape[i][j]
else: else:
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0), raise Exception(f'trying to compare {len(headings0)} with {len(headings1)} normed by {len(normHeadings)} data sets')
len(headings1),
len(normHeadings)))
table0 = damask.ASCIItable(name=file0,readonly=True) table0 = damask.ASCIItable(name=file0,readonly=True)
table0.head_read() table0.head_read()
@ -372,11 +368,11 @@ class Test:
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label'] key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label'] normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
if key0 not in table0.labels(raw = True): if key0 not in table0.labels(raw = True):
raise Exception('column "{}" not found in first table...\n'.format(key0)) raise Exception(f'column "{key0}" not found in first table...')
elif key1 not in table1.labels(raw = True): elif key1 not in table1.labels(raw = True):
raise Exception('column "{}" not found in second table...\n'.format(key1)) raise Exception(f'column "{key1}" not found in second table...')
elif normKey not in table0.labels(raw = True): elif normKey not in table0.labels(raw = True):
raise Exception('column "{}" not found in first table...\n'.format(normKey)) raise Exception(f'column "{normKey}" not found in first table...')
else: else:
column[0][i] = table0.label_index(key0) column[0][i] = table0.label_index(key0)
column[1][i] = table1.label_index(key1) column[1][i] = table1.label_index(key1)
@ -404,9 +400,9 @@ class Test:
norm[i] = [1.0 for j in range(line0-len(skipLines))] norm[i] = [1.0 for j in range(line0-len(skipLines))]
absTol[i] = True absTol[i] = True
if perLine: if perLine:
logging.warning('At least one norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label'])) logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
else: else:
logging.warning('Maximum norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label'])) logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
line1 = 0 line1 = 0
while table1.data_read(): # read next data line of ASCII table while table1.data_read(): # read next data line of ASCII table
@ -418,18 +414,14 @@ class Test:
norm[i][line1-len(skipLines)]) norm[i][line1-len(skipLines)])
line1 +=1 line1 +=1
if (line0 != line1): raise Exception('found {} lines in first table but {} in second table'.format(line0,line1)) if (line0 != line1): raise Exception(f'found {line0} lines in first table but {line1} in second table')
logging.info(' ********') logging.info(' ********')
for i in range(dataLength): for i in range(dataLength):
if absTol[i]: if absTol[i]:
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i], logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
headings0[i]['label'],
headings1[i]['label']))
else: else:
logging.info(' * maximum relative error {} between {} and {}'.format(maxError[i], logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
headings0[i]['label'],
headings1[i]['label']))
logging.info(' ********') logging.info(' ********')
return maxError return maxError
@ -480,8 +472,8 @@ class Test:
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0) normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
mean = np.amax(np.abs(np.mean(normedDelta,0))) mean = np.amax(np.abs(np.mean(normedDelta,0)))
std = np.amax(np.std(normedDelta,0)) std = np.amax(np.std(normedDelta,0))
logging.info('mean: {:f}'.format(mean)) logging.info(f'mean: {mean:f}')
logging.info('std: {:f}'.format(std)) logging.info(f'std: {std:f}')
return (mean<meanTol) & (std < stdTol) return (mean<meanTol) & (std < stdTol)
@ -521,7 +513,7 @@ class Test:
for i,(table,labels) in enumerate(zip(tables,columns)): for i,(table,labels) in enumerate(zip(tables,columns)):
if np.any(dimensions != [np.prod(table.shapes[c]) for c in labels]): # check data object consistency if np.any(dimensions != [np.prod(table.shapes[c]) for c in labels]): # check data object consistency
logging.critical('Table {} differs in data layout.'.format(files[i])) logging.critical(f'Table {files[i]} differs in data layout.')
return False return False
data.append(np.hstack(list(table.get(label) for label in labels)).astype(np.float)) # store data.append(np.hstack(list(table.get(label) for label in labels)).astype(np.float)) # store
@ -537,19 +529,19 @@ class Test:
for i in range(len(data)): for i in range(len(data)):
data[i] /= maximum # normalize each table data[i] /= maximum # normalize each table
logging.info('shape of data {}: {}'.format(i,data[i].shape)) logging.info(f'shape of data {i}: {data[i].shape}')
if debug: if debug:
violators = np.absolute(data[0]-data[1]) > atol + rtol*np.absolute(data[1]) violators = np.absolute(data[0]-data[1]) > atol + rtol*np.absolute(data[1])
logging.info('shape of violators: {}'.format(violators.shape)) logging.info(f'shape of violators: {violators.shape}')
for j,culprits in enumerate(violators): for j,culprits in enumerate(violators):
goodguys = np.logical_not(culprits) goodguys = np.logical_not(culprits)
if culprits.any(): if culprits.any():
logging.info('{} has {}'.format(j,np.sum(culprits))) logging.info(f'{j} has {np.sum(culprits)}')
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[culprits])) logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[culprits]}')
logging.info('data : {}'.format(np.absolute(data[1][j])[culprits])) logging.info(f'data : {np.absolute(data[1][j])[culprits]}')
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[goodguys])) logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[goodguys]}')
logging.info('data : {}'.format(np.absolute(data[1][j])[goodguys])) logging.info(f'data : {np.absolute(data[1][j])[goodguys]}')
allclose = True # start optimistic allclose = True # start optimistic
for i in range(1,len(data)): for i in range(1,len(data)):
@ -588,12 +580,12 @@ class Test:
if culprit == 0: if culprit == 0:
count = len(self.variants) if self.options.select is None else len(self.options.select) count = len(self.variants) if self.options.select is None else len(self.options.select)
msg = 'Test passed.' if count == 1 else 'All {} tests passed.'.format(count) msg = 'Test passed.' if count == 1 else f'All {count} tests passed.'
elif culprit == -1: elif culprit == -1:
msg = 'Warning: could not start test...' msg = 'Warning: could not start test...'
ret = 0 ret = 0
else: else:
msg = 'Test "{}" failed.'.format(self.variantName(culprit-1)) msg = f'Test "{self.variantName(culprit-1)}" failed.'
logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n') logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
return ret return ret

View File

@ -1,4 +1,4 @@
import os from pathlib import Path
import pandas as pd import pandas as pd
import numpy as np import numpy as np
@ -85,7 +85,7 @@ class VTK:
geom = vtk.vtkUnstructuredGrid() geom = vtk.vtkUnstructuredGrid()
geom.SetPoints(vtk_nodes) geom.SetPoints(vtk_nodes)
geom.SetCells(eval('vtk.VTK_{}'.format(cell_type.split('_',1)[-1].upper())),cells) geom.SetCells(eval(f'vtk.VTK_{cell_type.split("_",1)[-1].upper()}'),cells)
return VTK(geom) return VTK(geom)
@ -126,8 +126,8 @@ class VTK:
vtkUnstructuredGrid, and vtkPolyData. vtkUnstructuredGrid, and vtkPolyData.
""" """
ext = os.path.splitext(fname)[1] ext = Path(fname).suffix
if ext == '.vtk': if ext == '.vtk' or dataset_type:
reader = vtk.vtkGenericDataObjectReader() reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(fname) reader.SetFileName(fname)
reader.Update() reader.Update()
@ -140,7 +140,7 @@ class VTK:
elif dataset_type.lower().endswith('polydata'): elif dataset_type.lower().endswith('polydata'):
geom = reader.GetPolyDataOutput() geom = reader.GetPolyDataOutput()
else: else:
raise TypeError('Unknown dataset type for vtk file {}'.format(dataset_type)) raise TypeError(f'Unknown dataset type {dataset_type} for vtk file')
else: else:
if ext == '.vtr': if ext == '.vtr':
reader = vtk.vtkXMLRectilinearGridReader() reader = vtk.vtkXMLRectilinearGridReader()
@ -149,7 +149,7 @@ class VTK:
elif ext == '.vtp': elif ext == '.vtp':
reader = vtk.vtkXMLPolyDataReader() reader = vtk.vtkXMLPolyDataReader()
else: else:
raise TypeError('Unknown file extension {}'.format(ext)) raise TypeError(f'Unknown file extension {ext}')
reader.SetFileName(fname) reader.SetFileName(fname)
reader.Update() reader.Update()
@ -176,10 +176,10 @@ class VTK:
writer = vtk.vtkXMLPolyDataWriter() writer = vtk.vtkXMLPolyDataWriter()
default_ext = writer.GetDefaultFileExtension() default_ext = writer.GetDefaultFileExtension()
name, ext = os.path.splitext(fname) ext = Path(fname).suffix
if ext and ext != '.'+default_ext: if ext and ext != '.'+default_ext:
raise ValueError('Given extension {} is not .{}'.format(ext,default_ext)) raise ValueError(f'Given extension {ext} does not match default .{default_ext}')
writer.SetFileName('{}.{}'.format(name,default_ext)) writer.SetFileName(str(Path(fname).with_suffix('.'+default_ext)))
writer.SetCompressorTypeToZLib() writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()
writer.SetInputData(self.geom) writer.SetInputData(self.geom)
@ -214,7 +214,7 @@ class VTK:
def __repr__(self): def __repr__(self):
"""ASCII representation of the VTK data.""" """ASCII representation of the VTK data."""
writer = vtk.vtkDataSetWriter() writer = vtk.vtkDataSetWriter()
writer.SetHeader('# DAMASK.VTK v{}'.format(version)) writer.SetHeader(f'# DAMASK.VTK v{version}')
writer.WriteToOutputStringOn() writer.WriteToOutputStringOn()
writer.SetInputData(self.geom) writer.SetInputData(self.geom)
writer.Write() writer.Write()

View File

@ -1,6 +1,7 @@
import re import re
import os import os
from damask import util
class Section(): class Section():
def __init__(self,data = {'__order__':[]},part = ''): def __init__(self,data = {'__order__':[]},part = ''):
@ -94,7 +95,7 @@ class Material():
'texture', 'texture',
'microstructure', 'microstructure',
] ]
self.data = {\ self.data = {
'homogenization': {'__order__': []}, 'homogenization': {'__order__': []},
'microstructure': {'__order__': []}, 'microstructure': {'__order__': []},
'crystallite': {'__order__': []}, 'crystallite': {'__order__': []},
@ -107,19 +108,19 @@ class Material():
"""Returns current data structure in material.config format.""" """Returns current data structure in material.config format."""
me = [] me = []
for part in self.parts: for part in self.parts:
if self.verbose: print('processing <{}>'.format(part)) if self.verbose: print(f'processing <{part}>')
me += ['', me += ['',
'#'*100, '#'*100,
'<{}>'.format(part), f'<{part}>',
'#'*100, '#'*100,
] ]
for section in self.data[part]['__order__']: for section in self.data[part]['__order__']:
me += ['[{}] {}'.format(section,'#'+'-'*max(0,96-len(section)))] me += [f'[{section}] {"#"+"-"*max(0,96-len(section))}']
for key in self.data[part][section]['__order__']: for key in self.data[part][section]['__order__']:
if key.startswith('(') and key.endswith(')'): # multiple (key) if key.startswith('(') and key.endswith(')'): # multiple (key)
me += ['{}\t{}'.format(key,' '.join(values)) for values in self.data[part][section][key]] me += [f'{key}\t{" ".join(values)}' for values in self.data[part][section][key]]
else: # plain key else: # plain key
me += ['{}\t{}'.format(key,' '.join(map(str,self.data[part][section][key])))] me += [f'{key}\t{util.srepr(self.data[part][section][key]," ")}']
return '\n'.join(me) + '\n' return '\n'.join(me) + '\n'
def parse(self, part=None, sections=[], content=None): def parse(self, part=None, sections=[], content=None):
@ -185,9 +186,9 @@ class Material():
outname = filename outname = filename
while os.path.exists(outname) and not overwrite: while os.path.exists(outname) and not overwrite:
i += 1 i += 1
outname = '{}_{}'.format(filename,i) outname = f'{filename}_{i}'
if self.verbose: print('Writing material data to {}'.format(outname)) if self.verbose: print(f'Writing material data to {outname}')
with open(outname,'w') as f: with open(outname,'w') as f:
f.write(str(self)) f.write(str(self))
return outname return outname
@ -196,7 +197,7 @@ class Material():
"""Add Update.""" """Add Update."""
part = part.lower() part = part.lower()
section = section.lower() section = section.lower()
if part not in self.parts: raise Exception('invalid part {}'.format(part)) if part not in self.parts: raise Exception(f'invalid part {part}')
if not isinstance(initialData, dict): if not isinstance(initialData, dict):
initialData = initialData.data() initialData = initialData.data()

View File

@ -233,7 +233,7 @@ def cell_coord0_gridSizeOrigin(coord0,ordered=True):
origin[_np.where(grid==1)] = 0.0 origin[_np.where(grid==1)] = 0.0
if grid.prod() != len(coord0): if grid.prod() != len(coord0):
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid)) raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
start = origin + delta*.5 start = origin + delta*.5
end = origin - delta*.5 + size end = origin - delta*.5 + size
@ -384,7 +384,7 @@ def node_coord0_gridSizeOrigin(coord0,ordered=True):
origin = mincorner origin = mincorner
if (grid+1).prod() != len(coord0): if (grid+1).prod() != len(coord0):
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid)) raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
atol = _np.max(size)*5e-2 atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \ if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \

View File

@ -1,7 +1,7 @@
import os
import subprocess import subprocess
import shlex import shlex
import string import string
from pathlib import Path
from .._environment import Environment from .._environment import Environment
@ -19,28 +19,24 @@ class Marc:
""" """
self.solver = 'Marc' self.solver = 'Marc'
try: self.version = version
self.version = int(version)
except TypeError:
self.version = -1
@property
#-------------------------- def library_path(self):
def libraryPath(self):
path_MSC = Environment().options['MSC_ROOT'] path_MSC = Environment().options['MSC_ROOT']
path_lib = '{}/mentat{}/shlib/linux64'.format(path_MSC,self.version) path_lib = Path(f'{path_MSC}/mentat{self.version}/shlib/linux64')
return path_lib if os.path.exists(path_lib) else '' return path_lib if path_lib.is_dir() else None
#-------------------------- @property
def toolsPath(self): def tools_path(self):
path_MSC = Environment().options['MSC_ROOT'] path_MSC = Environment().options['MSC_ROOT']
path_tools = '{}/marc{}/tools'.format(path_MSC,self.version) path_tools = Path(f'{path_MSC}/marc{self.version}/tools')
return path_tools if os.path.exists(path_tools) else '' return path_tools if path_tools.is_dir() else None
#-------------------------- #--------------------------
@ -53,23 +49,23 @@ class Marc:
): ):
damaskEnv = Environment() env = Environment()
user = os.path.join(damaskEnv.relPath('src'),'DAMASK_marc{}.{}'.format(self.version,'f90' if compile else 'marc')) usersub = env.root_dir/Path(f'src/DAMASK_marc{self.version}').with_suffix('.f90' if compile else '.marc')
if not os.path.isfile(user): if not usersub.is_file():
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),user)) raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),usersub))
# Define options [see Marc Installation and Operation Guide, pp 23] # Define options [see Marc Installation and Operation Guide, pp 23]
script = 'run_damask_{}mp'.format(optimization) script = f'run_damask_{optimization}mp'
cmd = os.path.join(self.toolsPath(),script) + \ cmd = str(self.tools_path/Path(script)) + \
' -jid ' + model + '_' + job + \ ' -jid ' + model + '_' + job + \
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no' ' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
if compile: cmd += ' -u ' + user + ' -save y' if compile: cmd += ' -u ' + str(usersub) + ' -save y'
else: cmd += ' -prog ' + os.path.splitext(user)[0] else: cmd += ' -prog ' + str(usersub.with_suffix(''))
print('job submission {} compilation: {}'.format('with' if compile else 'without',user)) print('job submission {} compilation: {}'.format(('with' if compile else 'without'),usersub))
if logfile: log = open(logfile, 'w') if logfile: log = open(logfile, 'w')
print(cmd) print(cmd)
process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT) process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT)

View File

@ -27,7 +27,7 @@ __all__=[
#################################################################################################### ####################################################################################################
def srepr(arg,glue = '\n'): def srepr(arg,glue = '\n'):
r""" r"""
Join arguments as individual lines. Join arguments with glue string.
Parameters Parameters
---------- ----------
@ -93,7 +93,7 @@ def strikeout(what):
def execute(cmd, def execute(cmd,
streamIn = None, stream_in = None,
wd = './', wd = './',
env = None): env = None):
""" """
@ -103,7 +103,7 @@ def execute(cmd,
---------- ----------
cmd : str cmd : str
Command to be executed. Command to be executed.
streanIn :, optional stream_in : file object, optional
Input (via pipe) for executed process. Input (via pipe) for executed process.
wd : str, optional wd : str, optional
Working directory of process. Defaults to ./ . Working directory of process. Defaults to ./ .
@ -119,14 +119,14 @@ def execute(cmd,
stderr = subprocess.PIPE, stderr = subprocess.PIPE,
stdin = subprocess.PIPE, stdin = subprocess.PIPE,
env = myEnv) env = myEnv)
out,error = [i for i in (process.communicate() if streamIn is None stdout, stderr = [i for i in (process.communicate() if stream_in is None
else process.communicate(streamIn.read().encode('utf-8')))] else process.communicate(stream_in.read().encode('utf-8')))]
os.chdir(initialPath) os.chdir(initialPath)
out = out.decode('utf-8').replace('\x08','') stdout = stdout.decode('utf-8').replace('\x08','')
error = error.decode('utf-8').replace('\x08','') stderr = stderr.decode('utf-8').replace('\x08','')
if process.returncode != 0: if process.returncode != 0:
raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode)) raise RuntimeError(f'{cmd} failed with returncode {process.returncode}')
return out,error return stdout, stderr
def show_progress(iterable,N_iter=None,prefix='',bar_length=50): def show_progress(iterable,N_iter=None,prefix='',bar_length=50):
@ -156,7 +156,7 @@ def show_progress(iterable,N_iter=None,prefix='',bar_length=50):
def scale_to_coprime(v): def scale_to_coprime(v):
"""Scale vector to co-prime (relatively prime) integers.""" """Scale vector to co-prime (relatively prime) integers."""
MAX_DENOMINATOR = 1000 MAX_DENOMINATOR = 1000000
def get_square_denominator(x): def get_square_denominator(x):
"""Denominator of the square of a number.""" """Denominator of the square of a number."""
@ -166,10 +166,13 @@ def scale_to_coprime(v):
"""Least common multiple.""" """Least common multiple."""
return a * b // np.gcd(a, b) return a * b // np.gcd(a, b)
denominators = [int(get_square_denominator(i)) for i in v] m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
s = reduce(lcm, denominators) ** 0.5 m = m//reduce(np.gcd,m)
m = (np.array(v)*s).astype(np.int)
return m//reduce(np.gcd,m) if not np.allclose(v[v.nonzero()]/m[v.nonzero()],v[v.nonzero()][0]/m[m.nonzero()][0]):
raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?')
return m
#################################################################################################### ####################################################################################################
@ -223,21 +226,20 @@ class _ProgressBar:
self.start_time = datetime.datetime.now() self.start_time = datetime.datetime.now()
self.last_fraction = 0.0 self.last_fraction = 0.0
sys.stderr.write('{} {} 0% ETA n/a'.format(self.prefix, ''*self.bar_length)) sys.stderr.write(f"{self.prefix} {''*self.bar_length} 0% ETA n/a")
sys.stderr.flush() sys.stderr.flush()
def update(self,iteration): def update(self,iteration):
fraction = (iteration+1) / self.total fraction = (iteration+1) / self.total
filled_length = int(self.bar_length * fraction)
if int(self.bar_length * fraction) > int(self.bar_length * self.last_fraction): if filled_length > int(self.bar_length * self.last_fraction):
bar = '' * filled_length + '' * (self.bar_length - filled_length)
delta_time = datetime.datetime.now() - self.start_time delta_time = datetime.datetime.now() - self.start_time
remaining_time = (self.total - (iteration+1)) * delta_time / (iteration+1) remaining_time = (self.total - (iteration+1)) * delta_time / (iteration+1)
remaining_time -= datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs remaining_time -= datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs
sys.stderr.write(f'\r{self.prefix} {bar} {fraction:>4.0%} ETA {remaining_time}')
filled_length = int(self.bar_length * fraction)
bar = '' * filled_length + '' * (self.bar_length - filled_length)
sys.stderr.write('\r{} {} {:>4.0%} ETA {}'.format(self.prefix, bar, fraction, remaining_time))
sys.stderr.flush() sys.stderr.flush()
self.last_fraction = fraction self.last_fraction = fraction
@ -249,7 +251,7 @@ class _ProgressBar:
class bcolors: class bcolors:
""" """
ASCII Colors. ASCII colors.
https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
https://stackoverflow.com/questions/287871 https://stackoverflow.com/questions/287871

View File

@ -6,6 +6,7 @@ import numpy as np
from damask import Geom from damask import Geom
from damask import Rotation from damask import Rotation
from damask import util
def geom_equal(a,b): def geom_equal(a,b):
@ -85,8 +86,8 @@ class TestGeom:
def test_mirror(self,default,update,reference_dir,directions,reflect): def test_mirror(self,default,update,reference_dir,directions,reflect):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.mirror(directions,reflect) modified.mirror(directions,reflect)
tag = 'directions={}_reflect={}'.format('-'.join(directions),reflect) tag = f'directions={"-".join(directions)}_reflect={reflect}'
reference = os.path.join(reference_dir,'mirror_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'mirror_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))
@ -94,8 +95,8 @@ class TestGeom:
def test_clean(self,default,update,reference_dir,stencil): def test_clean(self,default,update,reference_dir,stencil):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.clean(stencil) modified.clean(stencil)
tag = 'stencil={}'.format(stencil) tag = f'stencil={stencil}'
reference = os.path.join(reference_dir,'clean_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'clean_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))
@ -111,8 +112,8 @@ class TestGeom:
def test_scale(self,default,update,reference_dir,grid): def test_scale(self,default,update,reference_dir,grid):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.scale(grid) modified.scale(grid)
tag = 'grid={}'.format('-'.join([str(x) for x in grid])) tag = f'grid={util.srepr(grid,"-")}'
reference = os.path.join(reference_dir,'scale_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'scale_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))
@ -150,8 +151,8 @@ class TestGeom:
def test_rotate(self,default,update,reference_dir,Eulers): def test_rotate(self,default,update,reference_dir,Eulers):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.rotate(Rotation.from_Eulers(Eulers,degrees=True)) modified.rotate(Rotation.from_Eulers(Eulers,degrees=True))
tag = 'Eulers={}-{}-{}'.format(*Eulers) tag = f'Eulers={util.srepr(Eulers,"-")}'
reference = os.path.join(reference_dir,'rotate_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'rotate_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))

View File

@ -38,4 +38,4 @@ class TestSymmetry:
def test_invalid_argument(self,function): def test_invalid_argument(self,function):
s = Symmetry() # noqa s = Symmetry() # noqa
with pytest.raises(ValueError): with pytest.raises(ValueError):
eval('s.{}(np.ones(4))'.format(function)) eval(f's.{function}(np.ones(4))')

View File

@ -49,7 +49,7 @@ class TestOrientation:
@pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch']) @pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch'])
@pytest.mark.parametrize('lattice',['fcc','bcc']) @pytest.mark.parametrize('lattice',['fcc','bcc'])
def test_relationship_reference(self,update,reference_dir,model,lattice): def test_relationship_reference(self,update,reference_dir,model,lattice):
reference = os.path.join(reference_dir,'{}_{}.txt'.format(lattice,model)) reference = os.path.join(reference_dir,f'{lattice}_{model}.txt')
ori = Orientation(Rotation(),lattice) ori = Orientation(Rotation(),lattice)
eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)]) eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)])
if update: if update:

View File

@ -137,7 +137,7 @@ class TestResult:
default.add_Cauchy('P','F') default.add_Cauchy('P','F')
default.add_eigenvalue('sigma',eigenvalue) default.add_eigenvalue('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'), loc = {'sigma' :default.get_dataset_location('sigma'),
'lambda':default.get_dataset_location('lambda_{}(sigma)'.format(eigenvalue))} 'lambda':default.get_dataset_location(f'lambda_{eigenvalue}(sigma)')}
in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True) in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
in_file = default.read_dataset(loc['lambda'],0) in_file = default.read_dataset(loc['lambda'],0)
assert np.allclose(in_memory,in_file) assert np.allclose(in_memory,in_file)
@ -147,7 +147,7 @@ class TestResult:
default.add_Cauchy('P','F') default.add_Cauchy('P','F')
default.add_eigenvector('sigma',eigenvalue) default.add_eigenvector('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'), loc = {'sigma' :default.get_dataset_location('sigma'),
'v(sigma)':default.get_dataset_location('v_{}(sigma)'.format(eigenvalue))} 'v(sigma)':default.get_dataset_location(f'v_{eigenvalue}(sigma)')}
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx] in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
in_file = default.read_dataset(loc['v(sigma)'],0) in_file = default.read_dataset(loc['v(sigma)'],0)
assert np.allclose(in_memory,in_file) assert np.allclose(in_memory,in_file)
@ -179,7 +179,7 @@ class TestResult:
t = ['V','U'][np.random.randint(0,2)] t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0 m = np.random.random()*2.0 - 1.0
default.add_strain_tensor('F',t,m) default.add_strain_tensor('F',t,m)
label = 'epsilon_{}^{}(F)'.format(t,m) label = f'epsilon_{t}^{m}(F)'
default.add_Mises(label) default.add_Mises(label)
loc = {label :default.get_dataset_location(label), loc = {label :default.get_dataset_location(label),
label+'_vM':default.get_dataset_location(label+'_vM')} label+'_vM':default.get_dataset_location(label+'_vM')}
@ -248,7 +248,7 @@ class TestResult:
t = ['V','U'][np.random.randint(0,2)] t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0 m = np.random.random()*2.0 - 1.0
default.add_strain_tensor('F',t,m) default.add_strain_tensor('F',t,m)
label = 'epsilon_{}^{}(F)'.format(t,m) label = f'epsilon_{t}^{m}(F)'
loc = {'F': default.get_dataset_location('F'), loc = {'F': default.get_dataset_location('F'),
label: default.get_dataset_location(label)} label: default.get_dataset_location(label)}
in_memory = mechanics.strain_tensor(default.read_dataset(loc['F'],0),t,m) in_memory = mechanics.strain_tensor(default.read_dataset(loc['F'],0),t,m)
@ -302,8 +302,8 @@ class TestResult:
@pytest.mark.parametrize('allowed',['off','on']) @pytest.mark.parametrize('allowed',['off','on'])
def test_rename(self,default,allowed): def test_rename(self,default,allowed):
F = default.read_dataset(default.get_dataset_location('F'))
if allowed == 'on': if allowed == 'on':
F = default.read_dataset(default.get_dataset_location('F'))
default.allow_modification() default.allow_modification()
default.rename('F','new_name') default.rename('F','new_name')
assert np.all(F == default.read_dataset(default.get_dataset_location('new_name'))) assert np.all(F == default.read_dataset(default.get_dataset_location('new_name')))

View File

@ -14,7 +14,7 @@ scatter=1.e-2
@pytest.fixture @pytest.fixture
def default(): def default():
"""A set of n random rotations.""" """A set of n rotations (corner cases and random)."""
specials = np.array([ specials = np.array([
[1.0, 0.0, 0.0, 0.0], [1.0, 0.0, 0.0, 0.0],
#---------------------- #----------------------
@ -534,7 +534,7 @@ def mul(me, other):
other_p = other.quaternion[1:] other_p = other.quaternion[1:]
R = me.__class__(np.append(me_q*other_q - np.dot(me_p,other_p), R = me.__class__(np.append(me_q*other_q - np.dot(me_p,other_p),
me_q*other_p + other_q*me_p + _P * np.cross(me_p,other_p))) me_q*other_p + other_q*me_p + _P * np.cross(me_p,other_p)))
return R.standardize() return R._standardize()
elif isinstance(other, np.ndarray): elif isinstance(other, np.ndarray):
if other.shape == (3,): if other.shape == (3,):
A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:]) A = me.quaternion[0]**2.0 - np.dot(me.quaternion[1:],me.quaternion[1:])
@ -554,9 +554,9 @@ def mul(me, other):
axes = ((0, 2, 4, 6), (0, 1, 2, 3)) axes = ((0, 2, 4, 6), (0, 1, 2, 3))
return np.tensordot(RRRR, other, axes) return np.tensordot(RRRR, other, axes)
else: else:
raise ValueError('Can only rotate vectors, 2nd order ternsors, and 4th order tensors') raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
else: else:
raise TypeError('Cannot rotate {}'.format(type(other))) raise TypeError(f'Cannot rotate {type(other)}')
class TestRotation: class TestRotation:
@ -854,6 +854,10 @@ class TestRotation:
rot = Rotation.from_basis(om,False,reciprocal=reciprocal) rot = Rotation.from_basis(om,False,reciprocal=reciprocal)
assert np.isclose(np.linalg.det(rot.as_matrix()),1.0) assert np.isclose(np.linalg.det(rot.as_matrix()),1.0)
@pytest.mark.parametrize('shape',[None,1,(4,4)])
def test_random(self,shape):
Rotation.from_random(shape)
@pytest.mark.parametrize('function',[Rotation.from_quaternion, @pytest.mark.parametrize('function',[Rotation.from_quaternion,
Rotation.from_Eulers, Rotation.from_Eulers,
Rotation.from_axis_angle, Rotation.from_axis_angle,
@ -866,6 +870,16 @@ class TestRotation:
with pytest.raises(ValueError): with pytest.raises(ValueError):
function(invalid_shape) function(invalid_shape)
@pytest.mark.parametrize('fr,to',[(Rotation.from_quaternion,'as_quaternion'),
(Rotation.from_axis_angle,'as_axis_angle'),
(Rotation.from_Rodrigues, 'as_Rodrigues'),
(Rotation.from_homochoric,'as_homochoric'),
(Rotation.from_cubochoric,'as_cubochoric')])
def test_invalid_P(self,fr,to):
R = Rotation.from_random(np.random.randint(8,32,(3))) # noqa
with pytest.raises(ValueError):
fr(eval(f'R.{to}()'),P=-30)
@pytest.mark.parametrize('shape',[None,(3,),(4,2)]) @pytest.mark.parametrize('shape',[None,(3,),(4,2)])
def test_broadcast(self,shape): def test_broadcast(self,shape):
rot = Rotation.from_random(shape) rot = Rotation.from_random(shape)
@ -932,14 +946,18 @@ class TestRotation:
phi_2 = 2*np.pi - phi_1 phi_2 = 2*np.pi - phi_1
R_1 = Rotation.from_Eulers(np.array([phi_1,0.,0.])) R_1 = Rotation.from_Eulers(np.array([phi_1,0.,0.]))
R_2 = Rotation.from_Eulers(np.array([0.,0.,phi_2])) R_2 = Rotation.from_Eulers(np.array([0.,0.,phi_2]))
assert np.allclose(data,R_2*(R_1*data)) assert np.allclose(data,R_2@(R_1@data))
def test_rotate_inverse(self):
R = Rotation.from_random()
assert np.allclose(np.eye(3),(~R@R).as_matrix())
@pytest.mark.parametrize('data',[np.random.rand(3), @pytest.mark.parametrize('data',[np.random.rand(3),
np.random.rand(3,3), np.random.rand(3,3),
np.random.rand(3,3,3,3)]) np.random.rand(3,3,3,3)])
def test_rotate_inverse(self,data): def test_rotate_inverse_array(self,data):
R = Rotation.from_random() R = Rotation.from_random()
assert np.allclose(data,R.inversed()*(R*data)) assert np.allclose(data,~R@(R@data))
@pytest.mark.parametrize('data',[np.random.rand(4), @pytest.mark.parametrize('data',[np.random.rand(4),
np.random.rand(3,2), np.random.rand(3,2),
@ -956,3 +974,7 @@ class TestRotation:
R = Rotation.from_random() R = Rotation.from_random()
with pytest.raises(TypeError): with pytest.raises(TypeError):
R*data R*data
def test_misorientation(self):
R = Rotation.from_random()
assert np.allclose(R.misorientation(R).as_matrix(),np.eye(3))

View File

@ -17,18 +17,24 @@ class TestVTK:
size = np.random.random(3) + 1.0 size = np.random.random(3) + 1.0
origin = np.random.random(3) origin = np.random.random(3)
v = VTK.from_rectilinearGrid(grid,size,origin) v = VTK.from_rectilinearGrid(grid,size,origin)
s = v.__repr__() string = v.__repr__()
v.write(os.path.join(tmp_path,'rectilinearGrid')) v.write(os.path.join(tmp_path,'rectilinearGrid'))
v = VTK.from_file(os.path.join(tmp_path,'rectilinearGrid.vtr')) vtr = VTK.from_file(os.path.join(tmp_path,'rectilinearGrid.vtr'))
assert(s == v.__repr__()) with open(os.path.join(tmp_path,'rectilinearGrid.vtk'),'w') as f:
f.write(string)
vtk = VTK.from_file(os.path.join(tmp_path,'rectilinearGrid.vtk'),'VTK_rectilinearGrid')
assert(string == vtr.__repr__() == vtk.__repr__())
def test_polyData(self,tmp_path): def test_polyData(self,tmp_path):
points = np.random.rand(3,100) points = np.random.rand(3,100)
v = VTK.from_polyData(points) v = VTK.from_polyData(points)
s = v.__repr__() string = v.__repr__()
v.write(os.path.join(tmp_path,'polyData')) v.write(os.path.join(tmp_path,'polyData'))
v = VTK.from_file(os.path.join(tmp_path,'polyData.vtp')) vtp = VTK.from_file(os.path.join(tmp_path,'polyData.vtp'))
assert(s == v.__repr__()) with open(os.path.join(tmp_path,'polyData.vtk'),'w') as f:
f.write(string)
vtk = VTK.from_file(os.path.join(tmp_path,'polyData.vtk'),'polyData')
assert(string == vtp.__repr__() == vtk.__repr__())
@pytest.mark.parametrize('cell_type,n',[ @pytest.mark.parametrize('cell_type,n',[
('VTK_hexahedron',8), ('VTK_hexahedron',8),
@ -41,7 +47,17 @@ class TestVTK:
nodes = np.random.rand(n,3) nodes = np.random.rand(n,3)
connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n) connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n)
v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type) v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type)
s = v.__repr__() string = v.__repr__()
v.write(os.path.join(tmp_path,'unstructuredGrid')) v.write(os.path.join(tmp_path,'unstructuredGrid'))
v = VTK.from_file(os.path.join(tmp_path,'unstructuredGrid.vtu')) vtu = VTK.from_file(os.path.join(tmp_path,'unstructuredGrid.vtu'))
assert(s == v.__repr__()) with open(os.path.join(tmp_path,'unstructuredGrid.vtk'),'w') as f:
f.write(string)
vtk = VTK.from_file(os.path.join(tmp_path,'unstructuredGrid.vtk'),'unstructuredgrid')
assert(string == vtu.__repr__() == vtk.__repr__())
@pytest.mark.parametrize('name,dataset_type',[('this_file_does_not_exist.vtk',None),
('this_file_does_not_exist.vtk','vtk'),
('this_file_does_not_exist.vtx', None)])
def test_invalid_dataset_type(self,dataset_type,name):
with pytest.raises(TypeError):
VTK.from_file('this_file_does_not_exist.vtk',dataset_type)

View File

@ -30,8 +30,8 @@ class TestGridFilters:
grid = np.random.randint(8,32,(3)) grid = np.random.randint(8,32,(3))
size = np.random.random(3) size = np.random.random(3)
origin = np.random.random(3) origin = np.random.random(3)
coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa coord0 = eval(f'grid_filters.{mode}_coord0(grid,size,origin)') # noqa
_grid,_size,_origin = eval('grid_filters.{}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))'.format(mode)) _grid,_size,_origin = eval(f'grid_filters.{mode}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))')
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin) assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin)
def test_displacement_fluct_equivalence(self): def test_displacement_fluct_equivalence(self):
@ -67,8 +67,8 @@ class TestGridFilters:
origin= np.random.random(3) origin= np.random.random(3)
size = np.random.random(3) # noqa size = np.random.random(3) # noqa
grid = np.random.randint(8,32,(3)) grid = np.random.randint(8,32,(3))
shifted = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) shifted = eval(f'grid_filters.{mode}_coord0(grid,size,origin)')
unshifted = eval('grid_filters.{}_coord0(grid,size)'.format(mode)) unshifted = eval(f'grid_filters.{mode}_coord0(grid,size)')
if mode == 'cell': if mode == 'cell':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,))) assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,)))
elif mode == 'node': elif mode == 'node':

33
python/tests/test_util.py Normal file
View File

@ -0,0 +1,33 @@
import pytest
import numpy as np
from damask import util
class TestUtil:
def test_execute_direct(self):
out,err = util.execute('echo test')
assert out=='test\n' and err==''
def test_execute_env(self):
out,err = util.execute('sh -c "echo $test_for_execute"',env={'test_for_execute':'test'})
assert out=='test\n' and err==''
def test_croak(self):
util.croak('Burp!')
@pytest.mark.parametrize('input,output',
[
([2,0],[1,0]),
([0.5,0.5],[1,1]),
([1./2.,1./3.],[3,2]),
([2./3.,1./2.,1./3.],[4,3,2]),
])
def test_scale2coprime(self,input,output):
assert np.allclose(util.scale_to_coprime(np.array(input)),
np.array(output).astype(int))
def test_lackofprecision(self):
with pytest.raises(ValueError):
util.scale_to_coprime(np.array([1/3333,1,1]))

View File

@ -35,29 +35,15 @@ module CPFEM
CPFEM_dcsdE !< Cauchy stress tangent CPFEM_dcsdE !< Cauchy stress tangent
real(pReal), dimension (:,:,:,:), allocatable, private :: & real(pReal), dimension (:,:,:,:), allocatable, private :: &
CPFEM_dcsdE_knownGood !< known good tangent CPFEM_dcsdE_knownGood !< known good tangent
integer(pInt), public :: &
cycleCounter = 0_pInt, & !< needs description
theInc = -1_pInt, & !< needs description
lastLovl = 0_pInt !< lovl in previous call to marc hypela2
real(pReal), public :: &
theTime = 0.0_pReal, & !< needs description
theDelta = 0.0_pReal
logical, public :: &
outdatedFFN1 = .false., & !< needs description
lastIncConverged = .false., & !< needs description
outdatedByNewInc = .false. !< needs description
logical, public, protected :: & integer(pInt), public :: &
CPFEM_init_done = .false. !< remember whether init has been done already cycleCounter = 0_pInt !< needs description
logical, private :: &
CPFEM_calc_done = .false. !< remember whether first ip has already calced the results
integer(pInt), parameter, public :: & integer(pInt), parameter, public :: &
CPFEM_COLLECT = 2_pInt**0_pInt, & CPFEM_CALCRESULTS = 2_pInt**0_pInt, &
CPFEM_CALCRESULTS = 2_pInt**1_pInt, & CPFEM_AGERESULTS = 2_pInt**1_pInt, &
CPFEM_AGERESULTS = 2_pInt**2_pInt, & CPFEM_BACKUPJACOBIAN = 2_pInt**2_pInt, &
CPFEM_BACKUPJACOBIAN = 2_pInt**3_pInt, & CPFEM_RESTOREJACOBIAN = 2_pInt**3_pInt
CPFEM_RESTOREJACOBIAN = 2_pInt**4_pInt
public :: & public :: &
CPFEM_general, & CPFEM_general, &
@ -68,14 +54,10 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief call (thread safe) all module initializations !> @brief call all module initializations
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_initAll(el,ip) subroutine CPFEM_initAll
integer(pInt), intent(in) :: el, & !< FE el number
ip !< FE integration point number
CPFEM_init_done = .true.
call DAMASK_interface_init call DAMASK_interface_init
call prec_init call prec_init
call IO_init call IO_init
@ -88,7 +70,7 @@ subroutine CPFEM_initAll(el,ip)
call YAML_init call YAML_init
call HDF5_utilities_init call HDF5_utilities_init
call results_init(.false.) call results_init(.false.)
call discretization_marc_init(ip, el) call discretization_marc_init
call lattice_init call lattice_init
call material_init(.false.) call material_init(.false.)
call constitutive_init call constitutive_init
@ -124,7 +106,7 @@ end subroutine CPFEM_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief perform initialization at first call, update variables and call the actual material model !> @brief perform initialization at first call, update variables and call the actual material model
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyStress, jacobian) subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyStress, jacobian)
integer(pInt), intent(in) :: elFE, & !< FE element number integer(pInt), intent(in) :: elFE, & !< FE element number
ip !< integration point number ip !< integration point number
@ -133,24 +115,21 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
ffn1 !< deformation gradient for t=t1 ffn1 !< deformation gradient for t=t1
integer(pInt), intent(in) :: mode !< computation mode 1: regular computation plus aging of results integer(pInt), intent(in) :: mode !< computation mode 1: regular computation plus aging of results
real(pReal), intent(in) :: temperature_inp !< temperature real(pReal), intent(in) :: temperature_inp !< temperature
logical, intent(in) :: parallelExecution !< flag indicating parallel computation of requested IPs
real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector
real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE) real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE)
real(pReal) J_inverse, & ! inverse of Jacobian real(pReal) J_inverse, & ! inverse of Jacobian
rnd rnd
real(pReal), dimension (3,3) :: Kirchhoff, & ! Piola-Kirchhoff stress real(pReal), dimension (3,3) :: Kirchhoff ! Piola-Kirchhoff stress
cauchyStress33 ! stress vector
real(pReal), dimension (3,3,3,3) :: H_sym, & real(pReal), dimension (3,3,3,3) :: H_sym, &
H, & H
jacobian3333 ! jacobian in Matrix notation
integer(pInt) elCP, & ! crystal plasticity element number integer(pInt) elCP, & ! crystal plasticity element number
i, j, k, l, m, n, ph, homog, mySource i, j, k, l, m, n, ph, homog, mySource
logical updateJaco ! flag indicating if Jacobian has to be updated logical updateJaco ! flag indicating if Jacobian has to be updated
real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress in case of ping pong dummy cycle real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress if terminallyIll
ODD_JACOBIAN = 1e50_pReal !< return value for jacobian in case of ping pong dummy cycle ODD_JACOBIAN = 1e50_pReal !< return value for jacobian if terminallyIll
elCP = mesh_FEM2DAMASK_elem(elFE) elCP = mesh_FEM2DAMASK_elem(elFE)
@ -159,9 +138,6 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
write(6,'(/,a)') '#############################################' write(6,'(/,a)') '#############################################'
write(6,'(a1,a22,1x,i8,a13)') '#','element', elCP, '#' write(6,'(a1,a22,1x,i8,a13)') '#','element', elCP, '#'
write(6,'(a1,a22,1x,i8,a13)') '#','ip', ip, '#' write(6,'(a1,a22,1x,i8,a13)') '#','ip', ip, '#'
write(6,'(a1,a22,1x,f15.7,a6)') '#','theTime', theTime, '#'
write(6,'(a1,a22,1x,f15.7,a6)') '#','theDelta', theDelta, '#'
write(6,'(a1,a22,1x,i8,a13)') '#','theInc', theInc, '#'
write(6,'(a1,a22,1x,i8,a13)') '#','cycleCounter', cycleCounter, '#' write(6,'(a1,a22,1x,i8,a13)') '#','cycleCounter', cycleCounter, '#'
write(6,'(a1,a22,1x,i8,a13)') '#','computationMode',mode, '#' write(6,'(a1,a22,1x,i8,a13)') '#','computationMode',mode, '#'
if (terminallyIll) & if (terminallyIll) &
@ -174,13 +150,8 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) & if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) &
CPFEM_dcsde = CPFEM_dcsde_knownGood CPFEM_dcsde = CPFEM_dcsde_knownGood
!*** age results
if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) call CPFEM_forward if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) call CPFEM_forward
!*** collection of FEM input with returning of randomize odd stress and jacobian
!* If no parallel execution is required, there is no need to collect FEM input
if (.not. parallelExecution) then
chosenThermal1: select case (thermal_type(material_homogenizationAt(elCP))) chosenThermal1: select case (thermal_type(material_homogenizationAt(elCP)))
case (THERMAL_conduction_ID) chosenThermal1 case (THERMAL_conduction_ID) chosenThermal1
temperature(material_homogenizationAt(elCP))%p(thermalMapping(material_homogenizationAt(elCP))%p(ip,elCP)) = & temperature(material_homogenizationAt(elCP))%p(thermalMapping(material_homogenizationAt(elCP))%p(ip,elCP)) = &
@ -189,64 +160,22 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
materialpoint_F0(1:3,1:3,ip,elCP) = ffn materialpoint_F0(1:3,1:3,ip,elCP) = ffn
materialpoint_F(1:3,1:3,ip,elCP) = ffn1 materialpoint_F(1:3,1:3,ip,elCP) = ffn1
elseif (iand(mode, CPFEM_COLLECT) /= 0_pInt) then
call random_number(rnd)
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
CPFEM_cs(1:6,ip,elCP) = rnd * ODD_STRESS
CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6)
chosenThermal2: select case (thermal_type(material_homogenizationAt(elCP)))
case (THERMAL_conduction_ID) chosenThermal2
temperature(material_homogenizationAt(elCP))%p(thermalMapping(material_homogenizationAt(elCP))%p(ip,elCP)) = &
temperature_inp
end select chosenThermal2
materialpoint_F0(1:3,1:3,ip,elCP) = ffn
materialpoint_F(1:3,1:3,ip,elCP) = ffn1
CPFEM_calc_done = .false.
endif
!*** calculation of stress and jacobian
if (iand(mode, CPFEM_CALCRESULTS) /= 0_pInt) then if (iand(mode, CPFEM_CALCRESULTS) /= 0_pInt) then
!*** deformation gradient outdated or any actual deformation gradient differs more than relevantStrain from the stored one validCalculation: if (terminallyIll) then
validCalculation: if (terminallyIll &
.or. outdatedFFN1 &
.or. any(abs(ffn1 - materialpoint_F(1:3,1:3,ip,elCP)) > defgradTolerance)) then
if (any(abs(ffn1 - materialpoint_F(1:3,1:3,ip,elCP)) > defgradTolerance)) then
if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) then
write(6,'(a,1x,i8,1x,i2)') '<< CPFEM >> OUTDATED at elFE ip',elFE,ip
write(6,'(a,/,3(12x,3(f10.6,1x),/))') '<< CPFEM >> FFN1 old:',&
transpose(materialpoint_F(1:3,1:3,ip,elCP))
write(6,'(a,/,3(12x,3(f10.6,1x),/))') '<< CPFEM >> FFN1 now:',transpose(ffn1)
endif
outdatedFFN1 = .true.
endif
call random_number(rnd) call random_number(rnd)
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd
CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6) CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6)
!*** deformation gradient is not outdated
else validCalculation else validCalculation
updateJaco = mod(cycleCounter,iJacoStiffness) == 0 updateJaco = mod(cycleCounter,iJacoStiffness) == 0
!* no parallel computation, so we use just one single elFE and ip for computation
if (.not. parallelExecution) then
FEsolving_execElem = elCP FEsolving_execElem = elCP
FEsolving_execIP = ip FEsolving_execIP = ip
if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) & if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
write(6,'(a,i8,1x,i2)') '<< CPFEM >> calculation for elFE ip ',elFE,ip write(6,'(a,i8,1x,i2)') '<< CPFEM >> calculation for elFE ip ',elFE,ip
call materialpoint_stressAndItsTangent(updateJaco, dt) call materialpoint_stressAndItsTangent(updateJaco, dt)
!* parallel computation and calulation not yet done
elseif (.not. CPFEM_calc_done) then
if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
write(6,'(a,i8,a,i8)') '<< CPFEM >> calculation for elements ',FEsolving_execElem(1),&
' to ',FEsolving_execElem(2)
call materialpoint_stressAndItsTangent(updateJaco, dt)
CPFEM_calc_done = .true.
endif
!* map stress and stiffness (or return odd values if terminally ill)
terminalIllness: if (terminallyIll) then terminalIllness: if (terminallyIll) then
call random_number(rnd) call random_number(rnd)
@ -256,7 +185,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
else terminalIllness else terminalIllness
! translate from P to CS ! translate from P to sigma
Kirchhoff = matmul(materialpoint_P(1:3,1:3,ip,elCP), transpose(materialpoint_F(1:3,1:3,ip,elCP))) Kirchhoff = matmul(materialpoint_P(1:3,1:3,ip,elCP), transpose(materialpoint_F(1:3,1:3,ip,elCP)))
J_inverse = 1.0_pReal / math_det33(materialpoint_F(1:3,1:3,ip,elCP)) J_inverse = 1.0_pReal / math_det33(materialpoint_F(1:3,1:3,ip,elCP))
CPFEM_cs(1:6,ip,elCP) = math_sym33to6(J_inverse * Kirchhoff,weighted=.false.) CPFEM_cs(1:6,ip,elCP) = math_sym33to6(J_inverse * Kirchhoff,weighted=.false.)
@ -280,7 +209,6 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
endif terminalIllness endif terminalIllness
endif validCalculation endif validCalculation
!* report stress and stiffness
if ((iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) & if ((iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
.and. ((debug_e == elCP .and. debug_i == ip) & .and. ((debug_e == elCP .and. debug_i == ip) &
.or. .not. iand(debug_level(debug_CPFEM), debug_levelSelective) /= 0_pInt)) then .or. .not. iand(debug_level(debug_CPFEM), debug_levelSelective) /= 0_pInt)) then
@ -293,35 +221,11 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
endif endif
!*** warn if stiffness close to zero
if (all(abs(CPFEM_dcsdE(1:6,1:6,ip,elCP)) < 1e-10_pReal)) call IO_warning(601,elCP,ip) if (all(abs(CPFEM_dcsdE(1:6,1:6,ip,elCP)) < 1e-10_pReal)) call IO_warning(601,elCP,ip)
!*** copy to output if using commercial FEM solver
cauchyStress = CPFEM_cs (1:6, ip,elCP) cauchyStress = CPFEM_cs (1:6, ip,elCP)
jacobian = CPFEM_dcsdE(1:6,1:6,ip,elCP) jacobian = CPFEM_dcsdE(1:6,1:6,ip,elCP)
!*** remember extreme values of stress ...
cauchyStress33 = math_6toSym33(CPFEM_cs(1:6,ip,elCP),weighted=.false.)
if (maxval(cauchyStress33) > debug_stressMax) then
debug_stressMaxLocation = [elCP, ip]
debug_stressMax = maxval(cauchyStress33)
endif
if (minval(cauchyStress33) < debug_stressMin) then
debug_stressMinLocation = [elCP, ip]
debug_stressMin = minval(cauchyStress33)
endif
!*** ... and Jacobian
jacobian3333 = math_66toSym3333(CPFEM_dcsdE(1:6,1:6,ip,elCP),weighted=.false.)
if (maxval(jacobian3333) > debug_jacobianMax) then
debug_jacobianMaxLocation = [elCP, ip]
debug_jacobianMax = maxval(jacobian3333)
endif
if (minval(jacobian3333) < debug_jacobianMin) then
debug_jacobianMinLocation = [elCP, ip]
debug_jacobianMin = minval(jacobian3333)
endif
end subroutine CPFEM_general end subroutine CPFEM_general

View File

@ -43,8 +43,6 @@ module DAMASK_interface
logical, protected, public :: symmetricSolver logical, protected, public :: symmetricSolver
character(len=*), parameter, public :: INPUTFILEEXTENSION = '.dat' character(len=*), parameter, public :: INPUTFILEEXTENSION = '.dat'
logical, dimension(:,:), public, allocatable :: &
calcMode !< calculate or collect (ping pong scheme)
public :: & public :: &
DAMASK_interface_init, & DAMASK_interface_init, &
@ -185,7 +183,7 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
use CPFEM use CPFEM
implicit none implicit none
!$ include "omp_lib.h" ! the openMP function library include "omp_lib.h" ! the openMP function library
integer, intent(in) :: & ! according to MSC.Marc 2012 Manual D integer, intent(in) :: & ! according to MSC.Marc 2012 Manual D
ngens, & !< size of stress-strain law ngens, & !< size of stress-strain law
nn, & !< integration point number nn, & !< integration point number
@ -243,7 +241,18 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
real(pReal), dimension(6) :: stress real(pReal), dimension(6) :: stress
real(pReal), dimension(6,6) :: ddsdde real(pReal), dimension(6,6) :: ddsdde
integer :: computationMode, i, cp_en, node, CPnodeID integer :: computationMode, i, cp_en, node, CPnodeID
!$ integer(4) :: defaultNumThreadsInt !< default value set by Marc integer(4) :: defaultNumThreadsInt !< default value set by Marc
integer(pInt), save :: &
theInc = -1_pInt, & !< needs description
lastLovl = 0_pInt !< lovl in previous call to marc hypela2
real(pReal), save :: &
theTime = 0.0_pReal, & !< needs description
theDelta = 0.0_pReal
logical, save :: &
lastIncConverged = .false., & !< needs description
outdatedByNewInc = .false., & !< needs description
CPFEM_init_done = .false. !< remember whether init has been done already
if (iand(debug_level(debug_MARC),debug_LEVELBASIC) /= 0) then if (iand(debug_level(debug_MARC),debug_LEVELBASIC) /= 0) then
write(6,'(a,/,i8,i8,i2)') ' MSC.MARC information on shape of element(2), IP:', m, nn write(6,'(a,/,i8,i8,i2)') ' MSC.MARC information on shape of element(2), IP:', m, nn
@ -260,116 +269,76 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
transpose(ffn1) transpose(ffn1)
endif endif
!$ defaultNumThreadsInt = omp_get_num_threads() ! remember number of threads set by Marc defaultNumThreadsInt = omp_get_num_threads() ! remember number of threads set by Marc
!$ call omp_set_num_threads(DAMASK_NumThreadsInt) ! set number of threads for parallel execution set by DAMASK_NUM_THREADS call omp_set_num_threads(1) ! no openMP
if (.not. CPFEM_init_done) call CPFEM_initAll(m(1),nn) if (.not. CPFEM_init_done) then
CPFEM_init_done = .true.
call CPFEM_initAll
endif
computationMode = 0 ! save initialization value, since it does not result in any calculation computationMode = 0 ! save initialization value, since it does not result in any calculation
if (lovl == 4 ) then ! jacobian requested by marc if (lovl == 4 ) then ! jacobian requested by marc
if (timinc < theDelta .and. theInc == inc .and. lastLovl /= lovl) & ! first after cutback if (timinc < theDelta .and. theInc == inc .and. lastLovl /= lovl) & ! first after cutback
computationMode = CPFEM_RESTOREJACOBIAN computationMode = CPFEM_RESTOREJACOBIAN
elseif (lovl == 6) then ! stress requested by marc elseif (lovl == 6) then ! stress requested by marc
computationMode = CPFEM_CALCRESULTS
cp_en = mesh_FEM2DAMASK_elem(m(1)) cp_en = mesh_FEM2DAMASK_elem(m(1))
if (cptim > theTime .or. inc /= theInc) then ! reached "convergence" if (cptim > theTime .or. inc /= theInc) then ! reached "convergence"
terminallyIll = .false. terminallyIll = .false.
cycleCounter = -1 ! first calc step increments this to cycle = 0 cycleCounter = -1 ! first calc step increments this to cycle = 0
if (inc == 0) then ! >> start of analysis << if (inc == 0) then ! >> start of analysis <<
lastIncConverged = .false. ! no Jacobian backup lastIncConverged = .false.
outdatedByNewInc = .false. ! no aging of state outdatedByNewInc = .false.
calcMode = .false. ! pretend last step was collection
lastLovl = lovl ! pretend that this is NOT the first after a lovl change lastLovl = lovl ! pretend that this is NOT the first after a lovl change
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> start of analysis..! ',m(1),nn write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> start of analysis..! ',m(1),nn
flush(6)
else if (inc - theInc > 1) then ! >> restart of broken analysis << else if (inc - theInc > 1) then ! >> restart of broken analysis <<
lastIncConverged = .false. ! no Jacobian backup lastIncConverged = .false.
outdatedByNewInc = .false. ! no aging of state outdatedByNewInc = .false.
calcMode = .true. ! pretend last step was calculation
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> restart of analysis..! ',m(1),nn write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> restart of analysis..! ',m(1),nn
flush(6)
else ! >> just the next inc << else ! >> just the next inc <<
lastIncConverged = .true. ! request Jacobian backup lastIncConverged = .true.
outdatedByNewInc = .true. ! request aging of state outdatedByNewInc = .true.
calcMode = .true. ! assure last step was calculation
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> new increment..! ',m(1),nn write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> new increment..! ',m(1),nn
flush(6)
endif endif
else if ( timinc < theDelta ) then ! >> cutBack << else if ( timinc < theDelta ) then ! >> cutBack <<
lastIncConverged = .false. ! no Jacobian backup lastIncConverged = .false.
outdatedByNewInc = .false. ! no aging of state outdatedByNewInc = .false.
terminallyIll = .false. terminallyIll = .false.
cycleCounter = -1 ! first calc step increments this to cycle = 0 cycleCounter = -1 ! first calc step increments this to cycle = 0
calcMode = .true. ! pretend last step was calculation
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> cutback detected..! ',m(1),nn write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> cutback detected..! ',m(1),nn
flush(6)
endif ! convergence treatment end endif ! convergence treatment end
flush(6)
if (usePingPong) then
calcMode(nn,cp_en) = .not. calcMode(nn,cp_en) ! ping pong (calc <--> collect)
if (calcMode(nn,cp_en)) then ! now --- CALC ---
computationMode = CPFEM_CALCRESULTS
if (lastLovl /= lovl) then ! first after ping pong
call debug_reset() ! resets debugging
outdatedFFN1 = .false.
cycleCounter = cycleCounter + 1
!mesh_cellnode = mesh_build_cellnodes() ! update cell node coordinates
!call mesh_build_ipCoordinates() ! update ip coordinates
endif
if (outdatedByNewInc) then
computationMode = ior(computationMode,CPFEM_AGERESULTS) ! calc and age results
outdatedByNewInc = .false. ! reset flag
endif
else ! now --- COLLECT ---
computationMode = CPFEM_COLLECT ! plain collect
if (lastLovl /= lovl .and. & .not. terminallyIll) &
call debug_info() ! first after ping pong reports (meaningful) debugging
if (lastIncConverged) then
computationMode = ior(computationMode,CPFEM_BACKUPJACOBIAN) ! collect and backup Jacobian after convergence
lastIncConverged = .false. ! reset flag
endif
!do node = 1,theMesh%elem%nNodes
!CPnodeID = mesh_element(4+node,cp_en)
!mesh_node(1:ndeg,CPnodeID) = mesh_node0(1:ndeg,CPnodeID) + numerics_unitlength * dispt(1:ndeg,node)
!enddo
endif
else ! --- PLAIN MODE ---
computationMode = CPFEM_CALCRESULTS ! always calc
if (lastLovl /= lovl) then if (lastLovl /= lovl) then
if (.not. terminallyIll) &
call debug_info() ! first reports (meaningful) debugging
call debug_reset() ! and resets debugging
outdatedFFN1 = .false.
cycleCounter = cycleCounter + 1 cycleCounter = cycleCounter + 1
!mesh_cellnode = mesh_build_cellnodes() ! update cell node coordinates !mesh_cellnode = mesh_build_cellnodes() ! update cell node coordinates
!call mesh_build_ipCoordinates() ! update ip coordinates !call mesh_build_ipCoordinates() ! update ip coordinates
endif endif
if (outdatedByNewInc) then if (outdatedByNewInc) then
computationMode = ior(computationMode,CPFEM_AGERESULTS) computationMode = ior(computationMode,CPFEM_AGERESULTS)
outdatedByNewInc = .false. ! reset flag outdatedByNewInc = .false.
endif endif
if (lastIncConverged) then if (lastIncConverged) then
computationMode = ior(computationMode,CPFEM_BACKUPJACOBIAN) ! backup Jacobian after convergence computationMode = ior(computationMode,CPFEM_BACKUPJACOBIAN)
lastIncConverged = .false. ! reset flag lastIncConverged = .false.
endif
endif endif
theTime = cptim ! record current starting time theTime = cptim
theDelta = timinc ! record current time increment theDelta = timinc
theInc = inc ! record current increment number theInc = inc
endif endif
lastLovl = lovl ! record lovl lastLovl = lovl
call CPFEM_general(computationMode,usePingPong,ffn,ffn1,t(1),timinc,m(1),nn,stress,ddsdde) call CPFEM_general(computationMode,ffn,ffn1,t(1),timinc,m(1),nn,stress,ddsdde)
d = ddsdde(1:ngens,1:ngens) d = ddsdde(1:ngens,1:ngens)
s = stress(1:ndi+nshear) s = stress(1:ndi+nshear)
g = 0.0_pReal g = 0.0_pReal
if(symmetricSolver) d = 0.5_pReal*(d+transpose(d)) if(symmetricSolver) d = 0.5_pReal*(d+transpose(d))
!$ call omp_set_num_threads(defaultNumThreadsInt) ! reset number of threads to stored default value call omp_set_num_threads(defaultNumThreadsInt) ! reset number of threads to stored default value
end subroutine hypela2 end subroutine hypela2

View File

@ -830,7 +830,7 @@ module subroutine plastic_dislotwin_results(instance,group)
'mobile dislocation density','1/m²') 'mobile dislocation density','1/m²')
case('rho_dip') case('rho_dip')
if(prm%sum_N_sl>0) call results_writeDataset(group,stt%rho_dip,'rho_dip',& if(prm%sum_N_sl>0) call results_writeDataset(group,stt%rho_dip,'rho_dip',&
'dislocation dipole density''1/m²') 'dislocation dipole density','1/m²')
case('gamma_sl') case('gamma_sl')
if(prm%sum_N_sl>0) call results_writeDataset(group,stt%gamma_sl,'gamma_sl',& if(prm%sum_N_sl>0) call results_writeDataset(group,stt%gamma_sl,'gamma_sl',&
'plastic shear','1') 'plastic shear','1')

View File

@ -245,7 +245,7 @@ subroutine crystallite_init
enddo enddo
!$OMP END PARALLEL DO !$OMP END PARALLEL DO
if(any(plasticState%nonlocal) .and. .not. usePingPong) call IO_error(601) ! exit if nonlocal but no ping-pong ToDo: Why not check earlier? or in nonlocal? !if(any(plasticState%nonlocal) .and. .not. usePingPong) call IO_error(601)
crystallite_partionedFp0 = crystallite_Fp0 crystallite_partionedFp0 = crystallite_Fp0
crystallite_partionedFi0 = crystallite_Fi0 crystallite_partionedFi0 = crystallite_Fi0
@ -276,9 +276,6 @@ subroutine crystallite_init
write(6,'(a42,1x,i10)') 'max # of constituents/integration point: ', cMax write(6,'(a42,1x,i10)') 'max # of constituents/integration point: ', cMax
flush(6) flush(6)
endif endif
call debug_info
call debug_reset
#endif #endif
end subroutine crystallite_init end subroutine crystallite_init
@ -287,11 +284,9 @@ end subroutine crystallite_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief calculate stress (P) !> @brief calculate stress (P)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
function crystallite_stress(dummyArgumentToPreventInternalCompilerErrorWithGCC) function crystallite_stress()
logical, dimension(discretization_nIP,discretization_nElem) :: crystallite_stress logical, dimension(discretization_nIP,discretization_nElem) :: crystallite_stress
real(pReal), intent(in), optional :: &
dummyArgumentToPreventInternalCompilerErrorWithGCC
real(pReal) :: & real(pReal) :: &
formerSubStep formerSubStep
integer :: & integer :: &

View File

@ -49,26 +49,11 @@ module debug
debug_i = 1, & debug_i = 1, &
debug_g = 1 debug_g = 1
integer, dimension(2), public :: &
debug_stressMaxLocation = 0, &
debug_stressMinLocation = 0, &
debug_jacobianMaxLocation = 0, &
debug_jacobianMinLocation = 0
real(pReal), public :: &
debug_stressMax = -huge(1.0_pReal), &
debug_stressMin = huge(1.0_pReal), &
debug_jacobianMax = -huge(1.0_pReal), &
debug_jacobianMin = huge(1.0_pReal)
#ifdef PETSc #ifdef PETSc
character(len=1024), parameter, public :: & character(len=1024), parameter, public :: &
PETSCDEBUG = ' -snes_view -snes_monitor ' PETSCDEBUG = ' -snes_view -snes_monitor '
#endif #endif
public :: debug_init, & public :: debug_init
debug_reset, &
debug_info
contains contains
@ -230,42 +215,4 @@ subroutine debug_init
end subroutine debug_init end subroutine debug_init
!--------------------------------------------------------------------------------------------------
!> @brief resets all debug values
!--------------------------------------------------------------------------------------------------
subroutine debug_reset
debug_stressMaxLocation = 0
debug_stressMinLocation = 0
debug_jacobianMaxLocation = 0
debug_jacobianMinLocation = 0
debug_stressMax = -huge(1.0_pReal)
debug_stressMin = huge(1.0_pReal)
debug_jacobianMax = -huge(1.0_pReal)
debug_jacobianMin = huge(1.0_pReal)
end subroutine debug_reset
!--------------------------------------------------------------------------------------------------
!> @brief writes debug statements to standard out
!--------------------------------------------------------------------------------------------------
subroutine debug_info
!$OMP CRITICAL (write2out)
debugOutputCPFEM: if (iand(debug_level(debug_CPFEM),debug_LEVELBASIC) /= 0 &
.and. any(debug_stressMinLocation /= 0) &
.and. any(debug_stressMaxLocation /= 0) ) then
write(6,'(2/,a,/)') ' Extreme values of returned stress and Jacobian'
write(6,'(a39)') ' value el ip'
write(6,'(a14,1x,e12.3,1x,i8,1x,i4)') ' stress min :', debug_stressMin, debug_stressMinLocation
write(6,'(a14,1x,e12.3,1x,i8,1x,i4)') ' max :', debug_stressMax, debug_stressMaxLocation
write(6,'(a14,1x,e12.3,1x,i8,1x,i4)') ' Jacobian min :', debug_jacobianMin, debug_jacobianMinLocation
write(6,'(a14,1x,e12.3,1x,i8,1x,i4,/)') ' max :', debug_jacobianMax, debug_jacobianMaxLocation
endif debugOutputCPFEM
!$OMP END CRITICAL (write2out)
end subroutine debug_info
end module debug end module debug

View File

@ -49,7 +49,7 @@ subroutine discretization_init(homogenizationAt,microstructureAt,&
IPcoords0, & IPcoords0, &
NodeCoords0 NodeCoords0
integer, optional, intent(in) :: & integer, optional, intent(in) :: &
sharedNodesBegin sharedNodesBegin !< index of first node shared among different processes (MPI)
write(6,'(/,a)') ' <<<+- discretization init -+>>>'; flush(6) write(6,'(/,a)') ' <<<+- discretization init -+>>>'; flush(6)

View File

@ -249,7 +249,8 @@ subroutine materialpoint_stressAndItsTangent(updateJaco,dt)
NiterationHomog = 0 NiterationHomog = 0
cutBackLooping: do while (.not. terminallyIll .and. & cutBackLooping: do while (.not. terminallyIll .and. &
any(subStep(:,FEsolving_execELem(1):FEsolving_execElem(2)) > num%subStepMinHomog)) any(subStep(FEsolving_execIP(1):FEsolving_execIP(2),&
FEsolving_execElem(1):FEsolving_execElem(2)) > num%subStepMinHomog))
!$OMP PARALLEL DO PRIVATE(myNgrains) !$OMP PARALLEL DO PRIVATE(myNgrains)
elementLooping1: do e = FEsolving_execElem(1),FEsolving_execElem(2) elementLooping1: do e = FEsolving_execElem(1),FEsolving_execElem(2)

View File

@ -45,9 +45,7 @@ contains
!> @brief initializes the mesh by calling all necessary private routines the mesh module !> @brief initializes the mesh by calling all necessary private routines the mesh module
!! Order and routines strongly depend on type of solver !! Order and routines strongly depend on type of solver
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine discretization_marc_init(ip,el) subroutine discretization_marc_init
integer, intent(in) :: el, ip
real(pReal), dimension(:,:), allocatable :: & real(pReal), dimension(:,:), allocatable :: &
node0_elem, & !< node x,y,z coordinates (initially!) node0_elem, & !< node x,y,z coordinates (initially!)
@ -70,7 +68,7 @@ subroutine discretization_marc_init(ip,el)
real(pReal), dimension(:,:,:,:),allocatable :: & real(pReal), dimension(:,:,:,:),allocatable :: &
unscaledNormals unscaledNormals
write(6,'(/,a)') ' <<<+- mesh init -+>>>'; flush(6) write(6,'(/,a)') ' <<<+- discretization_marc init -+>>>'; flush(6)
mesh_unitlength = numerics_unitlength ! set physical extent of a length unit in mesh mesh_unitlength = numerics_unitlength ! set physical extent of a length unit in mesh
@ -83,10 +81,6 @@ subroutine discretization_marc_init(ip,el)
FEsolving_execElem = [1,nElems] FEsolving_execElem = [1,nElems]
FEsolving_execIP = [1,elem%nIPs] FEsolving_execIP = [1,elem%nIPs]
allocate(calcMode(elem%nIPs,nElems),source=.false.) ! pretend to have collected what first call is asking (F = I)
calcMode(ip,mesh_FEM2DAMASK_elem(el)) = .true. ! first ip,el needs to be already pingponged to "calc"
allocate(cellNodeDefinition(elem%nNodes-1)) allocate(cellNodeDefinition(elem%nNodes-1))
allocate(connectivity_cell(elem%NcellNodesPerCell,elem%nIPs,nElems)) allocate(connectivity_cell(elem%NcellNodesPerCell,elem%nIPs,nElems))
call buildCells(connectivity_cell,cellNodeDefinition,& call buildCells(connectivity_cell,cellNodeDefinition,&

View File

@ -36,9 +36,6 @@ module discretization_mesh
mesh_maxNips !< max number of IPs in any CP element mesh_maxNips !< max number of IPs in any CP element
!!!! BEGIN DEPRECATED !!!!! !!!! BEGIN DEPRECATED !!!!!
integer, dimension(:,:), allocatable :: &
mesh_element !DEPRECATED
real(pReal), dimension(:,:), allocatable :: & real(pReal), dimension(:,:), allocatable :: &
mesh_ipVolume, & !< volume associated with IP (initially!) mesh_ipVolume, & !< volume associated with IP (initially!)
mesh_node0 !< node x,y,z coordinates (initially!) mesh_node0 !< node x,y,z coordinates (initially!)
@ -67,15 +64,10 @@ subroutine discretization_mesh_init(restart)
logical, intent(in) :: restart logical, intent(in) :: restart
integer, dimension(1), parameter:: FE_geomtype = [1] !< geometry type of particular element type
integer, dimension(1) :: FE_Nips !< number of IPs in a specific type of element
integer, allocatable, dimension(:) :: chunkPos integer, allocatable, dimension(:) :: chunkPos
integer :: dimPlex, & integer :: dimPlex, &
mesh_Nnodes, & !< total number of nodes in mesh mesh_Nnodes, & !< total number of nodes in mesh
j, l j, l
integer, parameter :: &
mesh_ElemType=1 !< Element type of the mesh (only support homogeneous meshes)
PetscSF :: sf PetscSF :: sf
DM :: globalMesh DM :: globalMesh
PetscInt :: nFaceSets PetscInt :: nFaceSets
@ -83,23 +75,22 @@ subroutine discretization_mesh_init(restart)
character(len=pStringLen), dimension(:), allocatable :: fileContent character(len=pStringLen), dimension(:), allocatable :: fileContent
IS :: faceSetIS IS :: faceSetIS
PetscErrorCode :: ierr PetscErrorCode :: ierr
integer, dimension(:), allocatable :: &
homogenizationAt, &
microstructureAt
write(6,'(/,a)') ' <<<+- mesh init -+>>>' write(6,'(/,a)') ' <<<+- mesh init -+>>>'
! read in file
call DMPlexCreateFromFile(PETSC_COMM_WORLD,geometryFile,PETSC_TRUE,globalMesh,ierr) call DMPlexCreateFromFile(PETSC_COMM_WORLD,geometryFile,PETSC_TRUE,globalMesh,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
! get spatial dimension (2 or 3?)
call DMGetDimension(globalMesh,dimPlex,ierr) call DMGetDimension(globalMesh,dimPlex,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
write(6,*) 'dimension',dimPlex;flush(6)
call DMGetStratumSize(globalMesh,'depth',dimPlex,mesh_NcpElemsGlobal,ierr) call DMGetStratumSize(globalMesh,'depth',dimPlex,mesh_NcpElemsGlobal,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
! get number of IDs in face sets (for boundary conditions?) ! get number of IDs in face sets (for boundary conditions?)
call DMGetLabelSize(globalMesh,'Face Sets',mesh_Nboundaries,ierr) call DMGetLabelSize(globalMesh,'Face Sets',mesh_Nboundaries,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
write(6,*) 'number of "Face Sets"',mesh_Nboundaries;flush(6)
call MPI_Bcast(mesh_Nboundaries,1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr) call MPI_Bcast(mesh_Nboundaries,1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr)
call MPI_Bcast(mesh_NcpElemsGlobal,1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr) call MPI_Bcast(mesh_NcpElemsGlobal,1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr)
call MPI_Bcast(dimPlex,1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr) call MPI_Bcast(dimPlex,1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr)
@ -154,33 +145,27 @@ subroutine discretization_mesh_init(restart)
call DMGetStratumSize(geomMesh,'depth',0,mesh_Nnodes,ierr) call DMGetStratumSize(geomMesh,'depth',0,mesh_Nnodes,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
FE_Nips(FE_geomtype(1)) = FEM_nQuadrature(dimPlex,integrationOrder) mesh_maxNips = FEM_nQuadrature(dimPlex,integrationOrder)
mesh_maxNips = FE_Nips(1)
write(6,*) 'mesh_maxNips',mesh_maxNips
call mesh_FEM_build_ipCoordinates(dimPlex,FEM_quadrature_points(dimPlex,integrationOrder)%p) call mesh_FEM_build_ipCoordinates(dimPlex,FEM_quadrature_points(dimPlex,integrationOrder)%p)
call mesh_FEM_build_ipVolumes(dimPlex) call mesh_FEM_build_ipVolumes(dimPlex)
allocate (mesh_element (4,mesh_NcpElems)); mesh_element = 0 allocate(microstructureAt(mesh_NcpElems))
allocate(homogenizationAt(mesh_NcpElems),source=1)
do j = 1, mesh_NcpElems do j = 1, mesh_NcpElems
mesh_element( 1,j) = -1 ! DEPRECATED call DMGetLabelValue(geomMesh,'material',j-1,microstructureAt(j),ierr)
mesh_element( 2,j) = mesh_elemType ! elem type
mesh_element( 3,j) = 1 ! homogenization
call DMGetLabelValue(geomMesh,'material',j-1,mesh_element(4,j),ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
end do end do
if (debug_e < 1 .or. debug_e > mesh_NcpElems) & if (debug_e < 1 .or. debug_e > mesh_NcpElems) call IO_error(602,ext_msg='element')
call IO_error(602,ext_msg='element') ! selected element does not exist if (debug_i < 1 .or. debug_i > mesh_maxNips) call IO_error(602,ext_msg='IP')
if (debug_i < 1 .or. debug_i > FE_Nips(FE_geomtype(mesh_element(2,debug_e)))) &
call IO_error(602,ext_msg='IP') ! selected element does not have requested IP
FEsolving_execElem = [1,mesh_NcpElems] ! parallel loop bounds set to comprise all DAMASK elements FEsolving_execElem = [1,mesh_NcpElems] ! parallel loop bounds set to comprise all DAMASK elements
FEsolving_execIP = [1,FE_Nips(FE_geomtype(mesh_element(2,1)))] FEsolving_execIP = [1,mesh_maxNips]
allocate(mesh_node0(3,mesh_Nnodes),source=0.0_pReal) allocate(mesh_node0(3,mesh_Nnodes),source=0.0_pReal)
call discretization_init(mesh_element(3,:),mesh_element(4,:),& call discretization_init(microstructureAt,homogenizationAt,&
reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), & reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), &
mesh_node0) mesh_node0)

View File

@ -28,8 +28,6 @@ module numerics
numerics_unitlength = 1.0_pReal, & !< determines the physical length of one computational length unit numerics_unitlength = 1.0_pReal, & !< determines the physical length of one computational length unit
charLength = 1.0_pReal, & !< characteristic length scale for gradient problems charLength = 1.0_pReal, & !< characteristic length scale for gradient problems
residualStiffness = 1.0e-6_pReal !< non-zero residual damage residualStiffness = 1.0e-6_pReal !< non-zero residual damage
logical, protected, public :: &
usePingPong = .true.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! field parameters: ! field parameters:
@ -133,8 +131,6 @@ subroutine numerics_init
defgradTolerance = IO_floatValue(line,chunkPos,2) defgradTolerance = IO_floatValue(line,chunkPos,2)
case ('ijacostiffness') case ('ijacostiffness')
iJacoStiffness = IO_intValue(line,chunkPos,2) iJacoStiffness = IO_intValue(line,chunkPos,2)
case ('usepingpong')
usepingpong = IO_intValue(line,chunkPos,2) > 0
case ('unitlength') case ('unitlength')
numerics_unitlength = IO_floatValue(line,chunkPos,2) numerics_unitlength = IO_floatValue(line,chunkPos,2)
@ -221,7 +217,6 @@ subroutine numerics_init
! writing parameters to output ! writing parameters to output
write(6,'(a24,1x,es8.1)') ' defgradTolerance: ',defgradTolerance write(6,'(a24,1x,es8.1)') ' defgradTolerance: ',defgradTolerance
write(6,'(a24,1x,i8)') ' iJacoStiffness: ',iJacoStiffness write(6,'(a24,1x,i8)') ' iJacoStiffness: ',iJacoStiffness
write(6,'(a24,1x,L8)') ' use ping pong scheme: ',usepingpong
write(6,'(a24,1x,es8.1,/)')' unitlength: ',numerics_unitlength write(6,'(a24,1x,es8.1,/)')' unitlength: ',numerics_unitlength
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -101,6 +101,8 @@ module quaternions
assignment(=), & assignment(=), &
conjg, aimag, & conjg, aimag, &
log, exp, & log, exp, &
abs, dot_product, &
inverse, &
real real
contains contains