Merge branch 'development' into YAML-compatible-debug
This commit is contained in:
commit
57e4d01a6b
|
@ -192,21 +192,6 @@ Post_addGradient:
|
|||
- master
|
||||
- release
|
||||
|
||||
Post_ParaviewRelated:
|
||||
stage: postprocessing
|
||||
script: ParaviewRelated/test.py
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
||||
Post_OrientationConversion:
|
||||
stage: postprocessing
|
||||
script:
|
||||
- OrientationConversion/test.py
|
||||
except:
|
||||
- master
|
||||
- release
|
||||
|
||||
Post_OrientationAverageMisorientation:
|
||||
stage: postprocessing
|
||||
script:
|
||||
|
|
|
@ -1,143 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
||||
Produces a binned grid of two columns from an ASCIItable, i.e. a two-dimensional probability density map.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-d','--data',
|
||||
dest = 'data',
|
||||
type = 'string', nargs = 2, metavar = 'string string',
|
||||
help = 'column labels containing x and y ')
|
||||
parser.add_option('-w','--weight',
|
||||
dest = 'weight',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label containing weight of (x,y) point')
|
||||
parser.add_option('-b','--bins',
|
||||
dest = 'bins',
|
||||
type = 'int', nargs = 2, metavar = 'int int',
|
||||
help = 'number of bins in x and y direction [%default]')
|
||||
parser.add_option('-t','--type',
|
||||
dest = 'type',
|
||||
type = 'string', nargs = 3, metavar = 'string string string',
|
||||
help = 'type (linear/log) of x, y, and z axis [%default]')
|
||||
parser.add_option('-x','--xrange',
|
||||
dest = 'xrange',
|
||||
type = 'float', nargs = 2, metavar = 'float float',
|
||||
help = 'min max limits in x direction (optional)')
|
||||
parser.add_option('-y','--yrange',
|
||||
dest = 'yrange',
|
||||
type = 'float', nargs = 2, metavar = 'float float',
|
||||
help = 'min max limits in y direction (optional)')
|
||||
parser.add_option('-z','--zrange',
|
||||
dest = 'zrange',
|
||||
type = 'float', nargs = 2, metavar = 'float float',
|
||||
help = 'min max limits in z direction (optional)')
|
||||
parser.add_option('-i','--invert',
|
||||
dest = 'invert',
|
||||
action = 'store_true',
|
||||
help = 'invert probability density')
|
||||
parser.add_option('-r','--rownormalize',
|
||||
dest = 'normRow',
|
||||
action = 'store_true',
|
||||
help = 'normalize probability density in each row')
|
||||
parser.add_option('-c','--colnormalize',
|
||||
dest = 'normCol',
|
||||
action = 'store_true',
|
||||
help = 'normalize probability density in each column')
|
||||
|
||||
parser.set_defaults(bins = (10,10),
|
||||
type = ('linear','linear','linear'),
|
||||
xrange = (0.0,0.0),
|
||||
yrange = (0.0,0.0),
|
||||
zrange = (0.0,0.0),
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
minmax = np.array([options.xrange,options.yrange,options.zrange])
|
||||
result = np.empty((options.bins[0],options.bins[1],3),'f')
|
||||
|
||||
if options.data is None: parser.error('no data columns specified.')
|
||||
|
||||
for name in filenames:
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
data = np.hstack((table.get(options.data[0]),table.get(options.data[1])))
|
||||
|
||||
for c in (0,1): # check data minmax for x and y (i = 0 and 1)
|
||||
if (minmax[c] == 0.0).all(): minmax[c] = [data[:,c].min(),data[:,c].max()]
|
||||
if options.type[c].lower() == 'log': # if log scale
|
||||
data[:,c] = np.log(data[:,c]) # change x,y coordinates to log
|
||||
minmax[c] = np.log(minmax[c]) # change minmax to log, too
|
||||
|
||||
delta = minmax[:,1]-minmax[:,0]
|
||||
(grid,xedges,yedges) = np.histogram2d(data[:,0],data[:,1],
|
||||
bins=options.bins,
|
||||
range=minmax[:2],
|
||||
weights=table.get(options.weight) if options.weight else None)
|
||||
if options.normCol:
|
||||
for x in range(options.bins[0]):
|
||||
sum = np.sum(grid[x,:])
|
||||
if sum > 0.0:
|
||||
grid[x,:] /= sum
|
||||
if options.normRow:
|
||||
for y in range(options.bins[1]):
|
||||
sum = np.sum(grid[:,y])
|
||||
if sum > 0.0:
|
||||
grid[:,y] /= sum
|
||||
|
||||
if (minmax[2] == 0.0).all(): minmax[2] = [grid.min(),grid.max()] # auto scale from data
|
||||
if minmax[2,0] == minmax[2,1]:
|
||||
minmax[2,0] -= 1.
|
||||
minmax[2,1] += 1.
|
||||
if (minmax[2] == 0.0).all(): # no data in grid?
|
||||
damask.util.croak('no data found on grid...')
|
||||
minmax[2,:] = np.array([0.0,1.0]) # making up arbitrary z minmax
|
||||
if options.type[2].lower() == 'log':
|
||||
grid = np.log(grid)
|
||||
minmax[2] = np.log(minmax[2])
|
||||
|
||||
delta[2] = minmax[2,1]-minmax[2,0]
|
||||
|
||||
for x in range(options.bins[0]):
|
||||
for y in range(options.bins[1]):
|
||||
result[x,y,:] = [minmax[0,0]+delta[0]/options.bins[0]*(x+0.5),
|
||||
minmax[1,0]+delta[1]/options.bins[1]*(y+0.5),
|
||||
np.clip((grid[x,y]-minmax[2,0])/delta[2],0.0,1.0)]
|
||||
|
||||
for c in (0,1):
|
||||
if options.type[c].lower() == 'log': result[:,:,c] = np.exp(result[:,:,c])
|
||||
|
||||
if options.invert: result[:,:,2] = 1.0 - result[:,:,2]
|
||||
|
||||
comments = scriptID + '\t' + ' '.join(sys.argv[1:])
|
||||
shapes = {'bin_%s'%options.data[0]:(1,),'bin_%s'%options.data[1]:(1,),'z':(1,)}
|
||||
table = damask.Table(result.reshape(options.bins[0]*options.bins[1],3),shapes,[comments])
|
||||
if name:
|
||||
outname = os.path.join(os.path.dirname(name),'binned-{}-{}_'.format(*options.data) +
|
||||
('weighted-{}_'.format(options.weight) if options.weight else '') +
|
||||
os.path.basename(name))
|
||||
table.to_ASCII(outname)
|
||||
else:
|
||||
table.to_ASCII(sys.stdout)
|
|
@ -1,65 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption,
|
||||
usage='%prog options [ASCIItable(s)]',
|
||||
description = 'Add scalars/vectors, tensors, and/or a RGB tuples from ASCIItable '
|
||||
+ 'to existing VTK file (.vtr/.vtu/.vtp).',
|
||||
version = scriptID)
|
||||
|
||||
parser.add_option( '--vtk',
|
||||
dest = 'vtk',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'VTK file name')
|
||||
parser.add_option('-d', '--data',
|
||||
dest = 'data',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'scalar/vector value(s) label(s)')
|
||||
parser.add_option('-t', '--tensor',
|
||||
dest = 'tensor',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'tensor (3x3) value label(s)')
|
||||
parser.add_option('-c', '--color',
|
||||
dest = 'color',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'RGB color tuple label')
|
||||
|
||||
parser.set_defaults(data = [],
|
||||
tensor = [],
|
||||
color = [],
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
if not options.vtk:
|
||||
parser.error('No VTK file specified.')
|
||||
|
||||
for name in filenames:
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
vtk = damask.VTK.from_file(options.vtk)
|
||||
|
||||
for data in options.data+options.tensor:
|
||||
vtk.add(table.get(data),data)
|
||||
for color in options.color:
|
||||
vtk.add((table.get(color)*255).astype(np.uint8),color)
|
||||
|
||||
vtk.write(options.vtk)
|
|
@ -1,45 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
||||
Produce a VTK point cloud dataset based on coordinates given in an ASCIItable.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
|
||||
parser.set_defaults(pos = 'pos',
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
v = damask.VTK.from_polyData(table.get(options.pos))
|
||||
|
||||
if name:
|
||||
v.write(os.path.splitext(name)[0])
|
||||
else:
|
||||
sys.stdout.write(v.__repr__())
|
|
@ -1,58 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
||||
Create regular voxel grid from points in an ASCIItable.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-m',
|
||||
'--mode',
|
||||
dest = 'mode',
|
||||
metavar='string',
|
||||
type = 'choice', choices = ['cell','point'],
|
||||
help = 'cell-centered or point-centered coordinates')
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
|
||||
parser.set_defaults(mode = 'cell',
|
||||
pos = 'pos',
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
if options.mode == 'cell':
|
||||
grid, size, origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
|
||||
elif options.mode == 'point':
|
||||
grid, size, origin = damask.grid_filters.node_coord0_gridSizeOrigin(table.get(options.pos))
|
||||
|
||||
v = damask.VTK.from_rectilinearGrid(grid,size,origin)
|
||||
|
||||
if name:
|
||||
v.write('{}_{}({})'.format(os.path.splitext(name)[0],options.pos,options.mode))
|
||||
else:
|
||||
sys.stdout.write(v.__repr__())
|
|
@ -157,7 +157,7 @@ class ASCIItable():
|
|||
def head_write(self,
|
||||
header = True):
|
||||
"""Write current header information (info + labels)."""
|
||||
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
||||
head = [f"{len(self.info)+self.__IO__['labeled']}\theader"] if header else []
|
||||
head.append(self.info)
|
||||
if self.__IO__['labeled']:
|
||||
head.append('\t'.join(map(self._quote,self.tags)))
|
||||
|
@ -209,7 +209,7 @@ class ASCIItable():
|
|||
labelList.append(tags[id])
|
||||
else:
|
||||
label = tags[id][2:] # get label
|
||||
while id < len(tags) and tags[id] == '{}_{}'.format(dim,label): # check successors
|
||||
while id < len(tags) and tags[id] == f'{dim}_{label}': # check successors
|
||||
id += 1 # next label...
|
||||
dim += 1 # ...should be one higher dimension
|
||||
labelList.append(label) # reached end --> store
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import numpy as np
|
||||
from . import util
|
||||
|
||||
class Color:
|
||||
"""Color representation in and conversion between different color-spaces."""
|
||||
|
@ -450,7 +451,7 @@ class Colormap:
|
|||
|
||||
def rad_diff(a,b):
|
||||
return abs(a[2]-b[2])
|
||||
|
||||
|
||||
def adjust_hue(Msh_sat, Msh_unsat):
|
||||
"""If saturation of one of the two colors is too less than the other, hue of the less."""
|
||||
if Msh_sat[0] >= Msh_unsat[0]:
|
||||
|
@ -502,7 +503,7 @@ class Colormap:
|
|||
[RGB] colormap for use in paraview or gmsh, or as raw string, or array.
|
||||
|
||||
Arguments: name, format, steps, crop.
|
||||
Format is one of (paraview, gmsh, raw, list).
|
||||
Format is one of (paraview, gmsh, gom, raw, list).
|
||||
Crop selects a (sub)range in [-1.0,1.0].
|
||||
Generates sequential map if one limiting color is either white or black,
|
||||
diverging map otherwise.
|
||||
|
@ -511,23 +512,22 @@ class Colormap:
|
|||
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
||||
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).express_as(model).color for i in range(steps)]
|
||||
if format == 'paraview':
|
||||
colormap = ['[\n {{\n "ColorSpace": "RGB", "Name": "{}", "DefaultMap": true,\n "RGBPoints" : ['.format(name)] \
|
||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],) \
|
||||
for i,color in enumerate(colors[:-1])] \
|
||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(len(colors),colors[-1][0],colors[-1][1],colors[-1][2],)] \
|
||||
colormap = [f'[\n {{\n "ColorSpace": "RGB", "Name": "{name}", "DefaultMap": true,\n "RGBPoints" : ['] \
|
||||
+ [f' {i:4d},{color[0]:8.6f},{color[1]:8.6f},{color[2]:8.6f}{"," if i+1<len(colors) else ""}' \
|
||||
for i,color in enumerate(colors)] \
|
||||
+ [' ]\n }\n]']
|
||||
|
||||
elif format == 'gmsh':
|
||||
colormap = ['View.ColorTable = {'] \
|
||||
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \
|
||||
+ [',\n'.join([','.join([str(x*255.0) for x in color]) for color in colors])] \
|
||||
+ ['}']
|
||||
|
||||
elif format == 'gom':
|
||||
colormap = ['1 1 ' + str(name)
|
||||
+ ' 9 ' + str(name)
|
||||
+ ' 0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 '
|
||||
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors))
|
||||
+ ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])]
|
||||
colormap = [ f'1 1 {name}'
|
||||
+ f' 9 {name}'
|
||||
+ ' 0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 '
|
||||
+ f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colors)}'
|
||||
+ ' '.join([f' 0 {util.srepr((255*np.array(c)).astype(int)," ")} 255 1' for c in reversed(colors)])]
|
||||
|
||||
elif format == 'raw':
|
||||
colormap = ['\t'.join(map(str,color)) for color in colors]
|
||||
|
|
|
@ -43,12 +43,12 @@ class Geom:
|
|||
def __repr__(self):
|
||||
"""Basic information on geometry definition."""
|
||||
return util.srepr([
|
||||
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))),
|
||||
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))),
|
||||
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))),
|
||||
'homogenization: {}'.format(self.get_homogenization()),
|
||||
'# microstructures: {}'.format(len(np.unique(self.microstructure))),
|
||||
'max microstructure: {}'.format(np.nanmax(self.microstructure)),
|
||||
f'grid a b c: {util.srepr(self.get_grid ()," x ")}',
|
||||
f'size x y z: {util.srepr(self.get_size ()," x ")}',
|
||||
f'origin x y z: {util.srepr(self.get_origin()," ")}',
|
||||
f'homogenization: {self.get_homogenization()}',
|
||||
f'# microstructures: {self.N_microstructure}',
|
||||
f'max microstructure: {np.nanmax(self.microstructure)}',
|
||||
])
|
||||
|
||||
|
||||
|
@ -71,7 +71,7 @@ class Geom:
|
|||
grid_old = self.get_grid()
|
||||
size_old = self.get_size()
|
||||
origin_old = self.get_origin()
|
||||
unique_old = len(np.unique(self.microstructure))
|
||||
unique_old = self.N_microstructure
|
||||
max_old = np.nanmax(self.microstructure)
|
||||
|
||||
if size is not None and rescale:
|
||||
|
@ -85,32 +85,32 @@ class Geom:
|
|||
elif rescale:
|
||||
self.set_size(self.get_grid()/grid_old*self.size)
|
||||
|
||||
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))]
|
||||
message = [f'grid a b c: {util.srepr(grid_old," x ")}']
|
||||
if np.any(grid_old != self.get_grid()):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append(util.emph('grid a b c: {}'.format(' x '.join(map(str,self.get_grid())))))
|
||||
message.append(util.emph(f'grid a b c: {util.srepr(self.get_grid()," x ")}'))
|
||||
|
||||
message.append('size x y z: {}'.format(' x '.join(map(str,size_old))))
|
||||
message.append(f'size x y z: {util.srepr(size_old," x ")}')
|
||||
if np.any(size_old != self.get_size()):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append(util.emph('size x y z: {}'.format(' x '.join(map(str,self.get_size())))))
|
||||
message.append(util.emph(f'size x y z: {util.srepr(self.get_size()," x ")}'))
|
||||
|
||||
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old))))
|
||||
message.append(f'origin x y z: {util.srepr(origin_old," ")}')
|
||||
if np.any(origin_old != self.get_origin()):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append(util.emph('origin x y z: {}'.format(' '.join(map(str,self.get_origin())))))
|
||||
message.append(util.emph(f'origin x y z: {util.srepr(self.get_origin()," ")}'))
|
||||
|
||||
message.append('homogenization: {}'.format(self.get_homogenization()))
|
||||
message.append(f'homogenization: {self.get_homogenization()}')
|
||||
|
||||
message.append('# microstructures: {}'.format(unique_old))
|
||||
if unique_old != len(np.unique(self.microstructure)):
|
||||
message.append(f'# microstructures: {unique_old}')
|
||||
if unique_old != self.N_microstructure:
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append(util.emph('# microstructures: {}'.format(len(np.unique(self.microstructure)))))
|
||||
message.append(util.emph(f'# microstructures: {self.N_microstructure}'))
|
||||
|
||||
message.append('max microstructure: {}'.format(max_old))
|
||||
message.append(f'max microstructure: {max_old}')
|
||||
if max_old != np.nanmax(self.microstructure):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure))))
|
||||
message.append(util.emph(f'max microstructure: {np.nanmax(self.microstructure)}'))
|
||||
|
||||
return util.return_message(message)
|
||||
|
||||
|
@ -154,9 +154,9 @@ class Geom:
|
|||
"""
|
||||
if microstructure is not None:
|
||||
if len(microstructure.shape) != 3:
|
||||
raise ValueError('Invalid microstructure shape {}'.format(microstructure.shape))
|
||||
raise ValueError(f'Invalid microstructure shape {microstructure.shape}')
|
||||
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
|
||||
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype))
|
||||
raise TypeError(f'Invalid microstructue data type {microstructure.dtype}')
|
||||
else:
|
||||
self.microstructure = np.copy(microstructure)
|
||||
|
||||
|
@ -175,8 +175,8 @@ class Geom:
|
|||
grid = np.asarray(self.microstructure.shape)
|
||||
self.size = grid/np.max(grid)
|
||||
else:
|
||||
if len(size) != 3 or any(np.array(size)<=0):
|
||||
raise ValueError('Invalid size {}'.format(size))
|
||||
if len(size) != 3 or any(np.array(size) <= 0):
|
||||
raise ValueError(f'Invalid size {size}')
|
||||
else:
|
||||
self.size = np.array(size)
|
||||
|
||||
|
@ -193,7 +193,7 @@ class Geom:
|
|||
"""
|
||||
if origin is not None:
|
||||
if len(origin) != 3:
|
||||
raise ValueError('Invalid origin {}'.format(origin))
|
||||
raise ValueError(f'Invalid origin {origin}')
|
||||
else:
|
||||
self.origin = np.array(origin)
|
||||
|
||||
|
@ -210,7 +210,7 @@ class Geom:
|
|||
"""
|
||||
if homogenization is not None:
|
||||
if not isinstance(homogenization,int) or homogenization < 1:
|
||||
raise TypeError('Invalid homogenization {}'.format(homogenization))
|
||||
raise TypeError(f'Invalid homogenization {homogenization}')
|
||||
else:
|
||||
self.homogenization = homogenization
|
||||
|
||||
|
@ -222,7 +222,7 @@ class Geom:
|
|||
|
||||
@property
|
||||
def N_microstructure(self):
|
||||
return len(np.unique(self.microstructure))
|
||||
return np.unique(self.microstructure).size
|
||||
|
||||
|
||||
def get_microstructure(self):
|
||||
|
@ -257,11 +257,11 @@ class Geom:
|
|||
|
||||
def get_header(self):
|
||||
"""Return the full header (grid, size, origin, homogenization, comments)."""
|
||||
header = ['{} header'.format(len(self.comments)+4)] + self.comments
|
||||
header = [f'{len(self.comments)+4} header'] + self.comments
|
||||
header.append('grid a {} b {} c {}'.format(*self.get_grid()))
|
||||
header.append('size x {} y {} z {}'.format(*self.get_size()))
|
||||
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
|
||||
header.append('homogenization {}'.format(self.get_homogenization()))
|
||||
header.append(f'homogenization {self.get_homogenization()}')
|
||||
return header
|
||||
|
||||
|
||||
|
@ -320,7 +320,7 @@ class Geom:
|
|||
i += len(items)
|
||||
|
||||
if i != grid.prod():
|
||||
raise TypeError('Invalid file: expected {} entries, found {}'.format(grid.prod(),i))
|
||||
raise TypeError(f'Invalid file: expected {grid.prod()} entries, found {i}')
|
||||
|
||||
if not np.any(np.mod(microstructure,1) != 0.0): # no float present
|
||||
microstructure = microstructure.astype('int')
|
||||
|
@ -331,6 +331,7 @@ class Geom:
|
|||
@staticmethod
|
||||
def _find_closest_seed(seeds, weights, point):
|
||||
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
|
||||
|
||||
@staticmethod
|
||||
def from_Laguerre_tessellation(grid,size,seeds,weights,periodic=True):
|
||||
"""
|
||||
|
@ -373,7 +374,7 @@ class Geom:
|
|||
else:
|
||||
microstructure = microstructure.reshape(grid)
|
||||
|
||||
#comments = 'geom.py:from_Laguerre_tessellation v{}'.format(version)
|
||||
#ToDo: comments = 'geom.py:from_Laguerre_tessellation v{}'.format(version)
|
||||
return Geom(microstructure+1,size,homogenization=1)
|
||||
|
||||
|
||||
|
@ -398,7 +399,7 @@ class Geom:
|
|||
KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds)
|
||||
devNull,microstructure = KDTree.query(coords)
|
||||
|
||||
#comments = 'geom.py:from_Voronoi_tessellation v{}'.format(version)
|
||||
#ToDo: comments = 'geom.py:from_Voronoi_tessellation v{}'.format(version)
|
||||
return Geom(microstructure.reshape(grid)+1,size,homogenization=1)
|
||||
|
||||
|
||||
|
@ -418,7 +419,7 @@ class Geom:
|
|||
grid = self.get_grid()
|
||||
|
||||
if pack is None:
|
||||
plain = grid.prod()/np.unique(self.microstructure).size < 250
|
||||
plain = grid.prod()/self.N_microstructure < 250
|
||||
else:
|
||||
plain = not pack
|
||||
|
||||
|
@ -448,11 +449,11 @@ class Geom:
|
|||
if compressType is None:
|
||||
f.write('\n'.join(self.get_header())+'\n')
|
||||
elif compressType == '.':
|
||||
f.write('{}\n'.format(former))
|
||||
f.write(f'{former}\n')
|
||||
elif compressType == 'to':
|
||||
f.write('{} to {}\n'.format(start,former))
|
||||
f.write(f'{start} to {former}\n')
|
||||
elif compressType == 'of':
|
||||
f.write('{} of {}\n'.format(reps,former))
|
||||
f.write(f'{reps} of {former}\n')
|
||||
|
||||
compressType = '.'
|
||||
start = current
|
||||
|
@ -461,11 +462,11 @@ class Geom:
|
|||
former = current
|
||||
|
||||
if compressType == '.':
|
||||
f.write('{}\n'.format(former))
|
||||
f.write(f'{former}\n')
|
||||
elif compressType == 'to':
|
||||
f.write('{} to {}\n'.format(start,former))
|
||||
f.write(f'{start} to {former}\n')
|
||||
elif compressType == 'of':
|
||||
f.write('{} of {}\n'.format(reps,former))
|
||||
f.write(f'{reps} of {former}\n')
|
||||
|
||||
|
||||
def to_vtk(self,fname=None):
|
||||
|
@ -511,7 +512,7 @@ class Geom:
|
|||
if not all(isinstance(d, str) for d in directions):
|
||||
raise TypeError('Directions are not of type str.')
|
||||
elif not set(directions).issubset(valid):
|
||||
raise ValueError('Invalid direction specified {}'.format(set(directions).difference(valid)))
|
||||
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
|
||||
|
||||
limits = [None,None] if reflect else [-2,0]
|
||||
ms = self.get_microstructure()
|
||||
|
@ -523,7 +524,7 @@ class Geom:
|
|||
if 'x' in directions:
|
||||
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
|
||||
|
||||
#self.add_comments('geom.py:mirror v{}'.format(version)
|
||||
#ToDo: self.add_comments('geom.py:mirror v{}'.format(version)
|
||||
return self.update(ms,rescale=True)
|
||||
|
||||
|
||||
|
@ -537,7 +538,7 @@ class Geom:
|
|||
number of grid points in x,y,z direction.
|
||||
|
||||
"""
|
||||
#self.add_comments('geom.py:scale v{}'.format(version)
|
||||
#ToDo: self.add_comments('geom.py:scale v{}'.format(version)
|
||||
return self.update(
|
||||
ndimage.interpolation.zoom(
|
||||
self.microstructure,
|
||||
|
@ -564,7 +565,7 @@ class Geom:
|
|||
unique, inverse = np.unique(arr, return_inverse=True)
|
||||
return unique[np.argmax(np.bincount(inverse))]
|
||||
|
||||
#self.add_comments('geom.py:clean v{}'.format(version)
|
||||
#ToDo: self.add_comments('geom.py:clean v{}'.format(version)
|
||||
return self.update(ndimage.filters.generic_filter(
|
||||
self.microstructure,
|
||||
mostFrequent,
|
||||
|
@ -579,7 +580,7 @@ class Geom:
|
|||
for i, oldID in enumerate(np.unique(self.microstructure)):
|
||||
renumbered = np.where(self.microstructure == oldID, i+1, renumbered)
|
||||
|
||||
#self.add_comments('geom.py:renumber v{}'.format(version)
|
||||
#ToDo: self.add_comments('geom.py:renumber v{}'.format(version)
|
||||
return self.update(renumbered)
|
||||
|
||||
|
||||
|
@ -614,7 +615,7 @@ class Geom:
|
|||
|
||||
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
|
||||
|
||||
#self.add_comments('geom.py:rotate v{}'.format(version)
|
||||
#ToDo: self.add_comments('geom.py:rotate v{}'.format(version)
|
||||
return self.update(microstructure_in,origin=origin,rescale=True)
|
||||
|
||||
|
||||
|
@ -646,7 +647,7 @@ class Geom:
|
|||
|
||||
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
|
||||
|
||||
#self.add_comments('geom.py:canvas v{}'.format(version)
|
||||
#ToDo: self.add_comments('geom.py:canvas v{}'.format(version)
|
||||
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
|
||||
|
||||
|
||||
|
@ -666,5 +667,5 @@ class Geom:
|
|||
for from_ms,to_ms in zip(from_microstructure,to_microstructure):
|
||||
substituted[self.microstructure==from_ms] = to_ms
|
||||
|
||||
#self.add_comments('geom.py:substitute v{}'.format(version)
|
||||
#ToDo: self.add_comments('geom.py:substitute v{}'.format(version)
|
||||
return self.update(substituted)
|
||||
|
|
|
@ -26,7 +26,7 @@ class Symmetry:
|
|||
|
||||
"""
|
||||
if symmetry is not None and symmetry.lower() not in Symmetry.lattices:
|
||||
raise KeyError('Symmetry/crystal system "{}" is unknown'.format(symmetry))
|
||||
raise KeyError(f'Symmetry/crystal system "{symmetry}" is unknown')
|
||||
|
||||
self.lattice = symmetry.lower() if isinstance(symmetry,str) else symmetry
|
||||
|
||||
|
@ -40,7 +40,7 @@ class Symmetry:
|
|||
|
||||
def __repr__(self):
|
||||
"""Readable string."""
|
||||
return '{}'.format(self.lattice)
|
||||
return f'{self.lattice}'
|
||||
|
||||
|
||||
def __eq__(self, other):
|
||||
|
@ -348,7 +348,7 @@ class Lattice:
|
|||
|
||||
def __repr__(self):
|
||||
"""Report basic lattice information."""
|
||||
return 'Bravais lattice {} ({} symmetry)'.format(self.lattice,self.symmetry)
|
||||
return f'Bravais lattice {self.lattice} ({self.symmetry} symmetry)'
|
||||
|
||||
|
||||
# Kurdjomov--Sachs orientation relationship for fcc <-> bcc transformation
|
||||
|
@ -613,17 +613,17 @@ class Lattice:
|
|||
try:
|
||||
relationship = models[model]
|
||||
except KeyError :
|
||||
raise KeyError('Orientation relationship "{}" is unknown'.format(model))
|
||||
raise KeyError(f'Orientation relationship "{model}" is unknown')
|
||||
|
||||
if self.lattice not in relationship['mapping']:
|
||||
raise ValueError('Relationship "{}" not supported for lattice "{}"'.format(model,self.lattice))
|
||||
raise ValueError(f'Relationship "{model}" not supported for lattice "{self.lattice}"')
|
||||
|
||||
r = {'lattice':Lattice((set(relationship['mapping'])-{self.lattice}).pop()), # target lattice
|
||||
'rotations':[] }
|
||||
|
||||
myPlane_id = relationship['mapping'][self.lattice]
|
||||
otherPlane_id = (myPlane_id+1)%2
|
||||
myDir_id = myPlane_id +2
|
||||
myDir_id = myPlane_id +2
|
||||
otherDir_id = otherPlane_id +2
|
||||
|
||||
for miller in np.hstack((relationship['planes'],relationship['directions'])):
|
||||
|
|
|
@ -50,8 +50,7 @@ class Result:
|
|||
self.version_minor = f.attrs['DADF5-minor']
|
||||
|
||||
if self.version_major != 0 or not 2 <= self.version_minor <= 6:
|
||||
raise TypeError('Unsupported DADF5 version {}.{} '.format(self.version_major,
|
||||
self.version_minor))
|
||||
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
|
||||
|
||||
self.structured = 'grid' in f['geometry'].attrs.keys()
|
||||
|
||||
|
@ -107,7 +106,7 @@ class Result:
|
|||
self.pick('increments',all_selected_increments)
|
||||
|
||||
in_between = '' if len(all_selected_increments) < 3 else \
|
||||
''.join(['\n{}\n ...\n'.format(inc) for inc in all_selected_increments[1:-2]])
|
||||
''.join([f'\n{inc}\n ...\n' for inc in all_selected_increments[1:-2]])
|
||||
|
||||
return util.srepr(first + in_between + last)
|
||||
|
||||
|
@ -137,7 +136,7 @@ class Result:
|
|||
|
||||
if what == 'increments':
|
||||
choice = [c if isinstance(c,str) and c.startswith('inc') else
|
||||
'inc{}'.format(c) for c in choice]
|
||||
f'inc{c}' for c in choice]
|
||||
elif what == 'times':
|
||||
what = 'increments'
|
||||
if choice == ['*']:
|
||||
|
@ -268,7 +267,7 @@ class Result:
|
|||
def rename(self,name_old,name_new):
|
||||
"""
|
||||
Rename datasets.
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name_old : str
|
||||
|
@ -412,21 +411,19 @@ class Result:
|
|||
message = ''
|
||||
with h5py.File(self.fname,'r') as f:
|
||||
for i in self.iterate('increments'):
|
||||
message += '\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
|
||||
message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n'
|
||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||
for oo in self.iterate(o):
|
||||
message += ' {}\n'.format(oo)
|
||||
message += f' {oo}\n'
|
||||
for pp in self.iterate(p):
|
||||
message += ' {}\n'.format(pp)
|
||||
message += f' {pp}\n'
|
||||
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||
for d in f[group].keys():
|
||||
try:
|
||||
dataset = f['/'.join([group,d])]
|
||||
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
|
||||
description = dataset.attrs['Description'].decode()
|
||||
if 'Unit' in dataset.attrs:
|
||||
message += ' {} / ({}): {}\n'.format(d,dataset.attrs['Unit'].decode(),description)
|
||||
else:
|
||||
message += ' {}: {}\n'.format(d,description)
|
||||
message += f' {d}{unit}: {description}\n'
|
||||
except KeyError:
|
||||
pass
|
||||
return message
|
||||
|
@ -528,10 +525,10 @@ class Result:
|
|||
def _add_absolute(x):
|
||||
return {
|
||||
'data': np.abs(x['data']),
|
||||
'label': '|{}|'.format(x['label']),
|
||||
'label': f'|{x["label"]}|',
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Description': f"Absolute value of {x['label']} ({x['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -552,14 +549,14 @@ class Result:
|
|||
def _add_calculation(**kwargs):
|
||||
formula = kwargs['formula']
|
||||
for d in re.findall(r'#(.*?)#',formula):
|
||||
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d))
|
||||
formula = formula.replace(f'#{d}#',f"kwargs['{d}']['data']")
|
||||
|
||||
return {
|
||||
'data': eval(formula),
|
||||
'label': kwargs['label'],
|
||||
'meta': {
|
||||
'Unit': kwargs['unit'],
|
||||
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
|
||||
'Description': f"{kwargs['description']} (formula: {kwargs['formula']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -596,8 +593,9 @@ class Result:
|
|||
'label': 'sigma',
|
||||
'meta': {
|
||||
'Unit': P['meta']['Unit'],
|
||||
'Description': 'Cauchy stress calculated from {} ({}) and {} ({})'\
|
||||
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
|
||||
'Description': "Cauchy stress calculated "
|
||||
f"from {P['label']} ({P['meta']['Description']})"
|
||||
f" and {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -620,10 +618,10 @@ class Result:
|
|||
def _add_determinant(T):
|
||||
return {
|
||||
'data': np.linalg.det(T['data']),
|
||||
'label': 'det({})'.format(T['label']),
|
||||
'label': f"det({T['label']})",
|
||||
'meta': {
|
||||
'Unit': T['meta']['Unit'],
|
||||
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||
'Description': f"Determinant of tensor {T['label']} ({T['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -644,10 +642,10 @@ class Result:
|
|||
def _add_deviator(T):
|
||||
return {
|
||||
'data': mechanics.deviatoric_part(T['data']),
|
||||
'label': 's_{}'.format(T['label']),
|
||||
'label': f"s_{T['label']}",
|
||||
'meta': {
|
||||
'Unit': T['meta']['Unit'],
|
||||
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||
'Description': f"Deviator of tensor {T['label']} ({T['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -675,10 +673,10 @@ class Result:
|
|||
|
||||
return {
|
||||
'data': mechanics.eigenvalues(T_sym['data'])[:,p],
|
||||
'label': 'lambda_{}({})'.format(eigenvalue,T_sym['label']),
|
||||
'label': f"lambda_{eigenvalue}({T_sym['label']})",
|
||||
'meta' : {
|
||||
'Unit': T_sym['meta']['Unit'],
|
||||
'Description': '{} eigenvalue of {} ({})'.format(label,T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"{label} eigenvalue of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -708,14 +706,14 @@ class Result:
|
|||
print('p',eigenvalue)
|
||||
return {
|
||||
'data': mechanics.eigenvectors(T_sym['data'])[:,p],
|
||||
'label': 'v_{}({})'.format(eigenvalue,T_sym['label']),
|
||||
'label': f"v_{eigenvalue}({T_sym['label']})",
|
||||
'meta' : {
|
||||
'Unit': '1',
|
||||
'Description': 'Eigenvector corresponding to {} eigenvalue of {} ({})'\
|
||||
.format(label,T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"Eigenvector corresponding to {label} eigenvalue"
|
||||
f" of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
def add_eigenvector(self,T_sym,eigenvalue='max'):
|
||||
"""
|
||||
Add eigenvector of symmetric tensor.
|
||||
|
@ -774,10 +772,10 @@ class Result:
|
|||
def _add_maximum_shear(T_sym):
|
||||
return {
|
||||
'data': mechanics.maximum_shear(T_sym['data']),
|
||||
'label': 'max_shear({})'.format(T_sym['label']),
|
||||
'label': f"max_shear({T_sym['label']})",
|
||||
'meta': {
|
||||
'Unit': T_sym['meta']['Unit'],
|
||||
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"Maximum shear component of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -800,11 +798,11 @@ class Result:
|
|||
'stress'
|
||||
|
||||
return {
|
||||
'data': mechanics.Mises_strain(T_sym['data']) if t=='strain' else mechanics.Mises_stress(T_sym['data']),
|
||||
'label': '{}_vM'.format(T_sym['label']),
|
||||
'data': (mechanics.Mises_strain if t=='strain' else mechanics.Mises_stress)(T_sym['data']),
|
||||
'label': f"{T_sym['label']}_vM",
|
||||
'meta': {
|
||||
'Unit': T_sym['meta']['Unit'],
|
||||
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"Mises equivalent {t} of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -837,10 +835,10 @@ class Result:
|
|||
|
||||
return {
|
||||
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
|
||||
'label': '|{}|_{}'.format(x['label'],o),
|
||||
'label': f"|{x['label']}|_{o}",
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']),
|
||||
'Description': f"{o}-norm of {t} {x['label']} ({x['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -866,19 +864,20 @@ class Result:
|
|||
'label': 'S',
|
||||
'meta': {
|
||||
'Unit': P['meta']['Unit'],
|
||||
'Description': '2. Piola-Kirchhoff stress calculated from {} ({}) and {} ({})'\
|
||||
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
|
||||
'Description': "2. Piola-Kirchhoff stress calculated "
|
||||
f"from {P['label']} ({P['meta']['Description']})"
|
||||
f" and {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
def add_PK2(self,P='P',F='F'):
|
||||
"""
|
||||
Add second Piola-Kirchhoff calculated from first Piola-Kirchhoff stress and deformation gradient.
|
||||
Add second Piola-Kirchhoff stress calculated from first Piola-Kirchhoff stress and deformation gradient.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
P : str, optional
|
||||
Label first Piola-Kirchhoff stress dataset. Defaults to ‘P’.
|
||||
Label of first Piola-Kirchhoff stress dataset. Defaults to ‘P’.
|
||||
F : str, optional
|
||||
Label of deformation gradient dataset. Defaults to ‘F’.
|
||||
|
||||
|
@ -928,10 +927,10 @@ class Result:
|
|||
def _add_rotational_part(F):
|
||||
return {
|
||||
'data': mechanics.rotational_part(F['data']),
|
||||
'label': 'R({})'.format(F['label']),
|
||||
'label': f"R({F['label']})",
|
||||
'meta': {
|
||||
'Unit': F['meta']['Unit'],
|
||||
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
|
||||
'Description': f"Rotational part of {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -952,10 +951,10 @@ class Result:
|
|||
def _add_spherical(T):
|
||||
return {
|
||||
'data': mechanics.spherical_part(T['data']),
|
||||
'label': 'p_{}'.format(T['label']),
|
||||
'label': f"p_{T['label']}",
|
||||
'meta': {
|
||||
'Unit': T['meta']['Unit'],
|
||||
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||
'Description': f"Spherical component of tensor {T['label']} ({T['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -976,10 +975,10 @@ class Result:
|
|||
def _add_strain_tensor(F,t,m):
|
||||
return {
|
||||
'data': mechanics.strain_tensor(F['data'],t,m),
|
||||
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
|
||||
'label': f"epsilon_{t}^{m}({F['label']})",
|
||||
'meta': {
|
||||
'Unit': F['meta']['Unit'],
|
||||
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
|
||||
'Description': f"Strain tensor of {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -1006,11 +1005,11 @@ class Result:
|
|||
@staticmethod
|
||||
def _add_stretch_tensor(F,t):
|
||||
return {
|
||||
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']),
|
||||
'label': '{}({})'.format(t,F['label']),
|
||||
'data': (mechanics.left_stretch if t.upper() == 'V' else mechanics.right_stretch)(F['data']),
|
||||
'label': f"{t}({F['label']})",
|
||||
'meta': {
|
||||
'Unit': F['meta']['Unit'],
|
||||
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
|
||||
'Description': '{} stretch tensor of {} ({})'.format('Left' if t.upper() == 'V' else 'Right',
|
||||
F['label'],F['meta']['Description']),
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
|
@ -1046,7 +1045,7 @@ class Result:
|
|||
r = func(**datasets_in,**args)
|
||||
return [group,r]
|
||||
except Exception as err:
|
||||
print('Error during calculation: {}.'.format(err))
|
||||
print(f'Error during calculation: {err}.')
|
||||
return None
|
||||
|
||||
|
||||
|
@ -1091,11 +1090,11 @@ class Result:
|
|||
|
||||
for l,v in result[1]['meta'].items():
|
||||
dataset.attrs[l]=v.encode()
|
||||
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version)
|
||||
creator = f"damask.Result.{dataset.attrs['Creator'].decode()} v{version}"
|
||||
dataset.attrs['Creator'] = creator.encode()
|
||||
|
||||
except (OSError,RuntimeError) as err:
|
||||
print('Could not add dataset: {}.'.format(err))
|
||||
print(f'Could not add dataset: {err}.')
|
||||
lock.release()
|
||||
|
||||
pool.close()
|
||||
|
@ -1128,7 +1127,7 @@ class Result:
|
|||
time_data = ET.SubElement(time, 'DataItem')
|
||||
time_data.attrib={'Format': 'XML',
|
||||
'NumberType': 'Float',
|
||||
'Dimensions': '{}'.format(len(self.times))}
|
||||
'Dimensions': f'{len(self.times)}'}
|
||||
time_data.text = ' '.join(map(str,self.times))
|
||||
|
||||
attributes = []
|
||||
|
@ -1169,7 +1168,7 @@ class Result:
|
|||
data_items[-1].attrib={'Format': 'HDF',
|
||||
'Precision': '8',
|
||||
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
|
||||
data_items[-1].text='{}:/{}/geometry/u_n'.format(os.path.split(self.fname)[1],inc)
|
||||
data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
|
||||
|
||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||
for oo in getattr(self,o):
|
||||
|
@ -1184,15 +1183,15 @@ class Result:
|
|||
if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue
|
||||
|
||||
attributes.append(ET.SubElement(grid, 'Attribute'))
|
||||
attributes[-1].attrib={'Name': '{}'.format(name.split('/',2)[2]),
|
||||
attributes[-1].attrib={'Name': name.split('/',2)[2],
|
||||
'Center': 'Cell',
|
||||
'AttributeType': 'Tensor'}
|
||||
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
|
||||
data_items[-1].attrib={'Format': 'HDF',
|
||||
'NumberType': 'Float',
|
||||
'Precision': '{}'.format(prec),
|
||||
'Precision': f'{prec}',
|
||||
'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))}
|
||||
data_items[-1].text='{}:{}'.format(os.path.split(self.fname)[1],name)
|
||||
data_items[-1].text=f'{os.path.split(self.fname)[1]}:{name}'
|
||||
|
||||
with open(self.fname.with_suffix('.xdmf').name,'w') as f:
|
||||
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
||||
|
@ -1270,4 +1269,4 @@ class Result:
|
|||
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
||||
v.add(u,'u')
|
||||
|
||||
v.write('{}_inc{}'.format(self.fname.stem,inc[3:].zfill(N_digits)))
|
||||
v.write(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')
|
||||
|
|
|
@ -119,7 +119,7 @@ class Rotation:
|
|||
else:
|
||||
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
|
||||
else:
|
||||
raise TypeError('Cannot rotate {}'.format(type(other)))
|
||||
raise TypeError(f'Cannot rotate {type(other)}')
|
||||
|
||||
|
||||
def _standardize(self):
|
||||
|
|
|
@ -4,6 +4,7 @@ import pandas as pd
|
|||
import numpy as np
|
||||
|
||||
from . import version
|
||||
from . import util
|
||||
|
||||
class Table:
|
||||
"""Store spreadsheet-like data."""
|
||||
|
@ -24,7 +25,7 @@ class Table:
|
|||
"""
|
||||
self.comments = [] if comments is None else [c for c in comments]
|
||||
self.data = pd.DataFrame(data=data)
|
||||
self.shapes = shapes
|
||||
self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() }
|
||||
self._label_condensed()
|
||||
|
||||
|
||||
|
@ -33,7 +34,7 @@ class Table:
|
|||
labels = []
|
||||
for label,shape in self.shapes.items():
|
||||
size = int(np.prod(shape))
|
||||
labels += ['{}{}'.format('' if size == 1 else '{}_'.format(i+1),label) for i in range(size)]
|
||||
labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)]
|
||||
self.data.columns = labels
|
||||
|
||||
|
||||
|
@ -47,8 +48,7 @@ class Table:
|
|||
|
||||
def _add_comment(self,label,shape,info):
|
||||
if info is not None:
|
||||
c = '{}{}: {}'.format(label,' '+str(shape) if np.prod(shape,dtype=int) > 1 else '',info)
|
||||
self.comments.append(c)
|
||||
self.comments.append(f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}')
|
||||
|
||||
|
||||
@staticmethod
|
||||
|
@ -126,8 +126,6 @@ class Table:
|
|||
Filename or file for reading.
|
||||
|
||||
"""
|
||||
shapes = {'eu':(3,), 'pos':(2,),
|
||||
'IQ':(1,), 'CI':(1,), 'ID':(1,), 'intensity':(1,), 'fit':(1,)}
|
||||
try:
|
||||
f = open(fname)
|
||||
except TypeError:
|
||||
|
@ -136,7 +134,7 @@ class Table:
|
|||
|
||||
content = f.readlines()
|
||||
|
||||
comments = ['table.py:from_ang v {}'.format(version)]
|
||||
comments = [f'table.py:from_ang v{version}']
|
||||
for line in content:
|
||||
if line.startswith('#'):
|
||||
comments.append(line.strip())
|
||||
|
@ -144,8 +142,11 @@ class Table:
|
|||
break
|
||||
|
||||
data = np.loadtxt(content)
|
||||
for c in range(data.shape[1]-10):
|
||||
shapes['n/a_{}'.format(c+1)] = (1,)
|
||||
|
||||
shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
|
||||
remainder = data.shape[1]-sum(shapes.values())
|
||||
if remainder > 0: # 3.8 can do: if (remainder := data.shape[1]-sum(shapes.values())) > 0
|
||||
shapes['unknown'] = remainder
|
||||
|
||||
return Table(data,shapes,comments)
|
||||
|
||||
|
@ -234,7 +235,6 @@ class Table:
|
|||
|
||||
"""
|
||||
self.data.drop(columns=label,inplace=True)
|
||||
|
||||
del self.shapes[label]
|
||||
|
||||
|
||||
|
@ -251,8 +251,7 @@ class Table:
|
|||
|
||||
"""
|
||||
self.data.rename(columns={label_old:label_new},inplace=True)
|
||||
c = '{} => {}{}'.format(label_old,label_new,'' if info is None else ': {}'.format(info))
|
||||
self.comments.append(c)
|
||||
self.comments.append(f'{label_old} => {label_new}'+('' if info is None else f': {info}'))
|
||||
self.shapes = {(label if label != label_old else label_new):self.shapes[label] for label in self.shapes}
|
||||
|
||||
|
||||
|
@ -271,7 +270,7 @@ class Table:
|
|||
self._label_flat()
|
||||
self.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
|
||||
self._label_condensed()
|
||||
self.comments.append('sorted by [{}]'.format(', '.join(labels)))
|
||||
self.comments.append(f'sorted by [{", ".join(labels)}]')
|
||||
|
||||
|
||||
def append(self,other):
|
||||
|
@ -328,18 +327,18 @@ class Table:
|
|||
labels = []
|
||||
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
|
||||
if self.shapes[l] == (1,):
|
||||
labels.append('{}'.format(l))
|
||||
labels.append(f'{l}')
|
||||
elif len(self.shapes[l]) == 1:
|
||||
labels += ['{}_{}'.format(i+1,l) \
|
||||
labels += [f'{i+1}_{l}' \
|
||||
for i in range(self.shapes[l][0])]
|
||||
else:
|
||||
labels += ['{}:{}_{}'.format('x'.join([str(d) for d in self.shapes[l]]),i+1,l) \
|
||||
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
|
||||
for i in range(np.prod(self.shapes[l]))]
|
||||
|
||||
if new_style:
|
||||
header = ['# {}'.format(comment) for comment in self.comments]
|
||||
header = [f'# {comment}' for comment in self.comments]
|
||||
else:
|
||||
header = ['{} header'.format(len(self.comments)+1)] \
|
||||
header = [f'{len(self.comments)+1} header'] \
|
||||
+ self.comments \
|
||||
|
||||
try:
|
||||
|
|
|
@ -53,7 +53,7 @@ class Test:
|
|||
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
|
||||
|
||||
self.parser = OptionParser(option_class=damask.extendableOption,
|
||||
description = '{} (Test class version: {})'.format(self.description,damask.version),
|
||||
description = f'{self.description} (Test class version: {damask.version})',
|
||||
usage = './test.py [options]')
|
||||
self.parser.add_option("-k", "--keep",
|
||||
action = "store_true",
|
||||
|
@ -93,7 +93,7 @@ class Test:
|
|||
for variant,object in enumerate(self.variants):
|
||||
name = self.variantName(variant)
|
||||
if self.options.show:
|
||||
logging.critical('{}: {}'.format(variant+1,name))
|
||||
logging.critical(f'{variant+1}: {name}')
|
||||
elif self.options.select is not None \
|
||||
and not (name in self.options.select or str(variant+1) in self.options.select):
|
||||
pass
|
||||
|
@ -106,12 +106,12 @@ class Test:
|
|||
self.postprocess(variant)
|
||||
|
||||
if self.options.update:
|
||||
if self.update(variant) != 0: logging.critical('update for "{}" failed.'.format(name))
|
||||
if self.update(variant) != 0: logging.critical(f'update for "{name}" failed.')
|
||||
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
||||
return variant+1 # return culprit
|
||||
|
||||
except Exception as e:
|
||||
logging.critical('exception during variant execution: "{}"'.format(str(e)))
|
||||
logging.critical(f'exception during variant execution: "{e}"')
|
||||
return variant+1 # return culprit
|
||||
return 0
|
||||
|
||||
|
@ -124,13 +124,13 @@ class Test:
|
|||
try:
|
||||
shutil.rmtree(self.dirCurrent())
|
||||
except FileNotFoundError:
|
||||
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
||||
logging.warning(f'removal of directory "{self.dirCurrent()}" not possible...')
|
||||
|
||||
try:
|
||||
os.mkdir(self.dirCurrent())
|
||||
return True
|
||||
except FileExistsError:
|
||||
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
|
||||
logging.critical(f'creation of directory "{self.dirCurrent()}" failed.')
|
||||
return False
|
||||
|
||||
def prepareAll(self):
|
||||
|
@ -211,7 +211,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(source,target)
|
||||
except FileNotFoundError:
|
||||
logging.critical('error copying {} to {}'.format(source,target))
|
||||
logging.critical(f'error copying {source} to {target}')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -222,7 +222,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Reference2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -235,7 +235,7 @@ class Test:
|
|||
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.error(os.path.join(source,f))
|
||||
logging.critical('Base2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Base2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -246,7 +246,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Current2Reference: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -257,7 +257,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Proof2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -267,7 +267,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Current2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Current2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -302,9 +302,7 @@ class Test:
|
|||
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
||||
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
||||
curArray = curArray[curArray.nonzero()]
|
||||
print(' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err,
|
||||
refArrayNonZero[max_loc],
|
||||
curArray[max_loc]))
|
||||
print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
|
||||
return max_err
|
||||
else:
|
||||
raise Exception('mismatch in array size to compare')
|
||||
|
@ -350,7 +348,7 @@ class Test:
|
|||
|
||||
for i in range(dataLength):
|
||||
if headings0[i]['shape'] != headings1[i]['shape']:
|
||||
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
|
||||
raise Exception(f"shape mismatch between {headings0[i]['label']} and {headings1[i]['label']}")
|
||||
shape[i] = headings0[i]['shape']
|
||||
for j in range(np.shape(shape[i])[0]):
|
||||
length[i] *= shape[i][j]
|
||||
|
@ -358,9 +356,7 @@ class Test:
|
|||
for j in range(np.shape(normShape[i])[0]):
|
||||
normLength[i] *= normShape[i][j]
|
||||
else:
|
||||
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0),
|
||||
len(headings1),
|
||||
len(normHeadings)))
|
||||
raise Exception(f'trying to compare {len(headings0)} with {len(headings1)} normed by {len(normHeadings)} data sets')
|
||||
|
||||
table0 = damask.ASCIItable(name=file0,readonly=True)
|
||||
table0.head_read()
|
||||
|
@ -372,11 +368,11 @@ class Test:
|
|||
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
|
||||
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
|
||||
if key0 not in table0.labels(raw = True):
|
||||
raise Exception('column "{}" not found in first table...\n'.format(key0))
|
||||
raise Exception(f'column "{key0}" not found in first table...')
|
||||
elif key1 not in table1.labels(raw = True):
|
||||
raise Exception('column "{}" not found in second table...\n'.format(key1))
|
||||
raise Exception(f'column "{key1}" not found in second table...')
|
||||
elif normKey not in table0.labels(raw = True):
|
||||
raise Exception('column "{}" not found in first table...\n'.format(normKey))
|
||||
raise Exception(f'column "{normKey}" not found in first table...')
|
||||
else:
|
||||
column[0][i] = table0.label_index(key0)
|
||||
column[1][i] = table1.label_index(key1)
|
||||
|
@ -404,9 +400,9 @@ class Test:
|
|||
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
||||
absTol[i] = True
|
||||
if perLine:
|
||||
logging.warning('At least one norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||
logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
||||
else:
|
||||
logging.warning('Maximum norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||
logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
||||
|
||||
line1 = 0
|
||||
while table1.data_read(): # read next data line of ASCII table
|
||||
|
@ -418,18 +414,14 @@ class Test:
|
|||
norm[i][line1-len(skipLines)])
|
||||
line1 +=1
|
||||
|
||||
if (line0 != line1): raise Exception('found {} lines in first table but {} in second table'.format(line0,line1))
|
||||
if (line0 != line1): raise Exception(f'found {line0} lines in first table but {line1} in second table')
|
||||
|
||||
logging.info(' ********')
|
||||
for i in range(dataLength):
|
||||
if absTol[i]:
|
||||
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i],
|
||||
headings0[i]['label'],
|
||||
headings1[i]['label']))
|
||||
logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
||||
else:
|
||||
logging.info(' * maximum relative error {} between {} and {}'.format(maxError[i],
|
||||
headings0[i]['label'],
|
||||
headings1[i]['label']))
|
||||
logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
||||
logging.info(' ********')
|
||||
return maxError
|
||||
|
||||
|
@ -480,8 +472,8 @@ class Test:
|
|||
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
||||
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
||||
std = np.amax(np.std(normedDelta,0))
|
||||
logging.info('mean: {:f}'.format(mean))
|
||||
logging.info('std: {:f}'.format(std))
|
||||
logging.info(f'mean: {mean:f}')
|
||||
logging.info(f'std: {std:f}')
|
||||
|
||||
return (mean<meanTol) & (std < stdTol)
|
||||
|
||||
|
@ -521,7 +513,7 @@ class Test:
|
|||
|
||||
for i,(table,labels) in enumerate(zip(tables,columns)):
|
||||
if np.any(dimensions != [np.prod(table.shapes[c]) for c in labels]): # check data object consistency
|
||||
logging.critical('Table {} differs in data layout.'.format(files[i]))
|
||||
logging.critical(f'Table {files[i]} differs in data layout.')
|
||||
return False
|
||||
data.append(np.hstack(list(table.get(label) for label in labels)).astype(np.float)) # store
|
||||
|
||||
|
@ -537,19 +529,19 @@ class Test:
|
|||
|
||||
for i in range(len(data)):
|
||||
data[i] /= maximum # normalize each table
|
||||
logging.info('shape of data {}: {}'.format(i,data[i].shape))
|
||||
logging.info(f'shape of data {i}: {data[i].shape}')
|
||||
|
||||
if debug:
|
||||
violators = np.absolute(data[0]-data[1]) > atol + rtol*np.absolute(data[1])
|
||||
logging.info('shape of violators: {}'.format(violators.shape))
|
||||
logging.info(f'shape of violators: {violators.shape}')
|
||||
for j,culprits in enumerate(violators):
|
||||
goodguys = np.logical_not(culprits)
|
||||
if culprits.any():
|
||||
logging.info('{} has {}'.format(j,np.sum(culprits)))
|
||||
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[culprits]))
|
||||
logging.info('data : {}'.format(np.absolute(data[1][j])[culprits]))
|
||||
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[goodguys]))
|
||||
logging.info('data : {}'.format(np.absolute(data[1][j])[goodguys]))
|
||||
logging.info(f'{j} has {np.sum(culprits)}')
|
||||
logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[culprits]}')
|
||||
logging.info(f'data : {np.absolute(data[1][j])[culprits]}')
|
||||
logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[goodguys]}')
|
||||
logging.info(f'data : {np.absolute(data[1][j])[goodguys]}')
|
||||
|
||||
allclose = True # start optimistic
|
||||
for i in range(1,len(data)):
|
||||
|
@ -588,12 +580,12 @@ class Test:
|
|||
|
||||
if culprit == 0:
|
||||
count = len(self.variants) if self.options.select is None else len(self.options.select)
|
||||
msg = 'Test passed.' if count == 1 else 'All {} tests passed.'.format(count)
|
||||
msg = 'Test passed.' if count == 1 else f'All {count} tests passed.'
|
||||
elif culprit == -1:
|
||||
msg = 'Warning: could not start test...'
|
||||
ret = 0
|
||||
else:
|
||||
msg = 'Test "{}" failed.'.format(self.variantName(culprit-1))
|
||||
msg = f'Test "{self.variantName(culprit-1)}" failed.'
|
||||
|
||||
logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
|
||||
return ret
|
||||
|
|
|
@ -51,9 +51,11 @@ class VTK:
|
|||
"""
|
||||
geom = vtk.vtkRectilinearGrid()
|
||||
geom.SetDimensions(*(grid+1))
|
||||
geom.SetXCoordinates(np_to_vtk(np.linspace(origin[0],origin[0]+size[0],grid[0]+1),deep=True))
|
||||
geom.SetYCoordinates(np_to_vtk(np.linspace(origin[1],origin[1]+size[1],grid[1]+1),deep=True))
|
||||
geom.SetZCoordinates(np_to_vtk(np.linspace(origin[2],origin[2]+size[2],grid[2]+1),deep=True))
|
||||
coord = [np_to_vtk(np.linspace(origin[i],origin[i]+size[i],grid[i]+1),deep=True) for i in [0,1,2]]
|
||||
[coord[i].SetName(n) for i,n in enumerate(['x','y','z'])]
|
||||
geom.SetXCoordinates(coord[0])
|
||||
geom.SetYCoordinates(coord[1])
|
||||
geom.SetZCoordinates(coord[2])
|
||||
|
||||
return VTK(geom)
|
||||
|
||||
|
@ -85,7 +87,7 @@ class VTK:
|
|||
|
||||
geom = vtk.vtkUnstructuredGrid()
|
||||
geom.SetPoints(vtk_nodes)
|
||||
geom.SetCells(eval('vtk.VTK_{}'.format(cell_type.split('_',1)[-1].upper())),cells)
|
||||
geom.SetCells(eval(f'vtk.VTK_{cell_type.split("_",1)[-1].upper()}'),cells)
|
||||
|
||||
return VTK(geom)
|
||||
|
||||
|
@ -119,7 +121,7 @@ class VTK:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
fname : str
|
||||
fname : str or pathlib.Path
|
||||
Filename for reading. Valid extensions are .vtr, .vtu, .vtp, and .vtk.
|
||||
dataset_type : str, optional
|
||||
Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,
|
||||
|
@ -129,7 +131,7 @@ class VTK:
|
|||
ext = Path(fname).suffix
|
||||
if ext == '.vtk' or dataset_type:
|
||||
reader = vtk.vtkGenericDataObjectReader()
|
||||
reader.SetFileName(fname)
|
||||
reader.SetFileName(str(fname))
|
||||
reader.Update()
|
||||
if dataset_type is None:
|
||||
raise TypeError('Dataset type for *.vtk file not given.')
|
||||
|
@ -140,7 +142,7 @@ class VTK:
|
|||
elif dataset_type.lower().endswith('polydata'):
|
||||
geom = reader.GetPolyDataOutput()
|
||||
else:
|
||||
raise TypeError('Unknown dataset type for vtk file {}'.format(dataset_type))
|
||||
raise TypeError(f'Unknown dataset type {dataset_type} for vtk file')
|
||||
else:
|
||||
if ext == '.vtr':
|
||||
reader = vtk.vtkXMLRectilinearGridReader()
|
||||
|
@ -149,9 +151,9 @@ class VTK:
|
|||
elif ext == '.vtp':
|
||||
reader = vtk.vtkXMLPolyDataReader()
|
||||
else:
|
||||
raise TypeError('Unknown file extension {}'.format(ext))
|
||||
raise TypeError(f'Unknown file extension {ext}')
|
||||
|
||||
reader.SetFileName(fname)
|
||||
reader.SetFileName(str(fname))
|
||||
reader.Update()
|
||||
geom = reader.GetOutput()
|
||||
|
||||
|
@ -164,7 +166,7 @@ class VTK:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
fname : str
|
||||
fname : str or pathlib.Path
|
||||
Filename for writing.
|
||||
|
||||
"""
|
||||
|
@ -178,7 +180,7 @@ class VTK:
|
|||
default_ext = writer.GetDefaultFileExtension()
|
||||
ext = Path(fname).suffix
|
||||
if ext and ext != '.'+default_ext:
|
||||
raise ValueError('Given extension {} does not match default .{}'.format(ext,default_ext))
|
||||
raise ValueError(f'Given extension {ext} does not match default .{default_ext}')
|
||||
writer.SetFileName(str(Path(fname).with_suffix('.'+default_ext)))
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetDataModeToBinary()
|
||||
|
@ -214,7 +216,7 @@ class VTK:
|
|||
def __repr__(self):
|
||||
"""ASCII representation of the VTK data."""
|
||||
writer = vtk.vtkDataSetWriter()
|
||||
writer.SetHeader('# DAMASK.VTK v{}'.format(version))
|
||||
writer.SetHeader(f'# DAMASK.VTK v{version}')
|
||||
writer.WriteToOutputStringOn()
|
||||
writer.SetInputData(self.geom)
|
||||
writer.Write()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
from damask import util
|
||||
|
||||
class Section():
|
||||
def __init__(self,data = {'__order__':[]},part = ''):
|
||||
|
@ -27,7 +28,7 @@ class Section():
|
|||
if multiKey not in self.parameters: self.parameters[multiKey] = []
|
||||
if multiKey not in self.parameters['__order__']: self.parameters['__order__'] += [multiKey]
|
||||
self.parameters[multiKey] += [[item] for item in data] if isinstance(data, list) else [[data]]
|
||||
|
||||
|
||||
def data(self):
|
||||
return self.parameters
|
||||
|
||||
|
@ -42,27 +43,27 @@ class Crystallite(Section):
|
|||
def __init__(self,data = {'__order__':[]}):
|
||||
"""New material.config <crystallite> section."""
|
||||
Section.__init__(self,data)
|
||||
|
||||
|
||||
|
||||
class Phase(Section):
|
||||
def __init__(self,data = {'__order__':[]}):
|
||||
"""New material.config <Phase> section."""
|
||||
Section.__init__(self,data)
|
||||
|
||||
|
||||
|
||||
class Microstructure(Section):
|
||||
def __init__(self,data = {'__order__':[]}):
|
||||
"""New material.config <microstructure> section."""
|
||||
Section.__init__(self,data)
|
||||
|
||||
|
||||
|
||||
|
||||
class Texture(Section):
|
||||
def __init__(self,data = {'__order__':[]}):
|
||||
"""New material.config <texture> section."""
|
||||
Section.__init__(self,data)
|
||||
|
||||
|
||||
def add_component(self,theType,properties):
|
||||
|
||||
|
||||
scatter = properties['scatter'] if 'scatter' in list(map(str.lower,list(properties.keys()))) else 0.0
|
||||
fraction = properties['fraction'] if 'fraction' in list(map(str.lower,list(properties.keys()))) else 1.0
|
||||
|
||||
|
@ -81,45 +82,45 @@ class Texture(Section):
|
|||
)
|
||||
)
|
||||
|
||||
|
||||
|
||||
class Material():
|
||||
"""Read, manipulate, and write material.config files."""
|
||||
|
||||
def __init__(self,verbose=True):
|
||||
"""Generates ordered list of parts."""
|
||||
self.parts = [
|
||||
'homogenization',
|
||||
'crystallite',
|
||||
'phase',
|
||||
'texture',
|
||||
'microstructure',
|
||||
'homogenization',
|
||||
'crystallite',
|
||||
'phase',
|
||||
'texture',
|
||||
'microstructure',
|
||||
]
|
||||
self.data = {\
|
||||
'homogenization': {'__order__': []},
|
||||
'microstructure': {'__order__': []},
|
||||
'crystallite': {'__order__': []},
|
||||
'phase': {'__order__': []},
|
||||
'texture': {'__order__': []},
|
||||
}
|
||||
self.data = {
|
||||
'homogenization': {'__order__': []},
|
||||
'microstructure': {'__order__': []},
|
||||
'crystallite': {'__order__': []},
|
||||
'phase': {'__order__': []},
|
||||
'texture': {'__order__': []},
|
||||
}
|
||||
self.verbose = verbose
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
"""Returns current data structure in material.config format."""
|
||||
me = []
|
||||
for part in self.parts:
|
||||
if self.verbose: print('processing <{}>'.format(part))
|
||||
if self.verbose: print(f'processing <{part}>')
|
||||
me += ['',
|
||||
'#'*100,
|
||||
'<{}>'.format(part),
|
||||
f'<{part}>',
|
||||
'#'*100,
|
||||
]
|
||||
for section in self.data[part]['__order__']:
|
||||
me += ['[{}] {}'.format(section,'#'+'-'*max(0,96-len(section)))]
|
||||
me += [f'[{section}] {"#"+"-"*max(0,96-len(section))}']
|
||||
for key in self.data[part][section]['__order__']:
|
||||
if key.startswith('(') and key.endswith(')'): # multiple (key)
|
||||
me += ['{}\t{}'.format(key,' '.join(values)) for values in self.data[part][section][key]]
|
||||
me += [f'{key}\t{" ".join(values)}' for values in self.data[part][section][key]]
|
||||
else: # plain key
|
||||
me += ['{}\t{}'.format(key,' '.join(map(str,self.data[part][section][key])))]
|
||||
me += [f'{key}\t{util.srepr(self.data[part][section][key]," ")}']
|
||||
return '\n'.join(me) + '\n'
|
||||
|
||||
def parse(self, part=None, sections=[], content=None):
|
||||
|
@ -158,7 +159,7 @@ class Material():
|
|||
self.data[part][name_section][items[0]].append(items[1:])
|
||||
else: # plain key
|
||||
self.data[part][name_section][items[0]] = items[1:]
|
||||
|
||||
|
||||
|
||||
|
||||
def read(self,filename=None):
|
||||
|
@ -174,20 +175,20 @@ class Material():
|
|||
recursiveRead(match.group(1) if match.group(1).startswith('/') else
|
||||
os.path.normpath(os.path.join(os.path.dirname(filename),match.group(1))))
|
||||
return result
|
||||
|
||||
|
||||
c = recursiveRead(filename)
|
||||
for p in self.parts:
|
||||
self.parse(part=p, content=c)
|
||||
|
||||
|
||||
def write(self,filename='material.config', overwrite=False):
|
||||
"""Write to material.config."""
|
||||
i = 0
|
||||
outname = filename
|
||||
while os.path.exists(outname) and not overwrite:
|
||||
i += 1
|
||||
outname = '{}_{}'.format(filename,i)
|
||||
outname = f'{filename}_{i}'
|
||||
|
||||
if self.verbose: print('Writing material data to {}'.format(outname))
|
||||
if self.verbose: print(f'Writing material data to {outname}')
|
||||
with open(outname,'w') as f:
|
||||
f.write(str(self))
|
||||
return outname
|
||||
|
@ -196,11 +197,11 @@ class Material():
|
|||
"""Add Update."""
|
||||
part = part.lower()
|
||||
section = section.lower()
|
||||
if part not in self.parts: raise Exception('invalid part {}'.format(part))
|
||||
if part not in self.parts: raise Exception(f'invalid part {part}')
|
||||
|
||||
if not isinstance(initialData, dict):
|
||||
initialData = initialData.data()
|
||||
|
||||
|
||||
if section not in self.data[part]: self.data[part]['__order__'] += [section]
|
||||
if section in self.data[part] and merge:
|
||||
for existing in self.data[part][section]['__order__']: # replace existing
|
||||
|
@ -215,9 +216,9 @@ class Material():
|
|||
self.data[part][section]['__order__'] += [new]
|
||||
else:
|
||||
self.data[part][section] = initialData
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def add_microstructure(self, section='',
|
||||
components={}, # dict of phase,texture, and fraction lists
|
||||
|
@ -226,7 +227,7 @@ class Material():
|
|||
microstructure = Microstructure()
|
||||
# make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
||||
components=dict((k.lower(), v) for k,v in components.items())
|
||||
|
||||
|
||||
for key in ['phase','texture','fraction','crystallite']:
|
||||
if isinstance(components[key], list):
|
||||
for i, x in enumerate(components[key]):
|
||||
|
@ -239,7 +240,7 @@ class Material():
|
|||
components[key] = [components[key].lower()]
|
||||
except AttributeError:
|
||||
components[key] = [components[key]]
|
||||
|
||||
|
||||
for (phase,texture,fraction,crystallite) in zip(components['phase'],components['texture'],
|
||||
components['fraction'],components['crystallite']):
|
||||
microstructure.add_multiKey('constituent','phase %i\ttexture %i\tfraction %g\ncrystallite %i'%(
|
||||
|
@ -247,19 +248,19 @@ class Material():
|
|||
self.data['texture']['__order__'].index(texture)+1,
|
||||
fraction,
|
||||
self.data['crystallite']['__order__'].index(crystallite)+1))
|
||||
|
||||
|
||||
self.add_section('microstructure',section,microstructure)
|
||||
|
||||
|
||||
def change_value(self, part=None,
|
||||
section=None,
|
||||
key=None,
|
||||
|
||||
def change_value(self, part=None,
|
||||
section=None,
|
||||
key=None,
|
||||
value=None):
|
||||
if not isinstance(value,list):
|
||||
if not isinstance(value,list):
|
||||
if not isinstance(value,str):
|
||||
value = '%s'%value
|
||||
value = [value]
|
||||
newlen = len(value)
|
||||
newlen = len(value)
|
||||
oldval = self.data[part.lower()][section.lower()][key.lower()]
|
||||
oldlen = len(oldval)
|
||||
print('changing %s:%s:%s from %s to %s '%(part.lower(),section.lower(),key.lower(),oldval,value))
|
||||
|
|
|
@ -233,7 +233,7 @@ def cell_coord0_gridSizeOrigin(coord0,ordered=True):
|
|||
origin[_np.where(grid==1)] = 0.0
|
||||
|
||||
if grid.prod() != len(coord0):
|
||||
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
|
||||
raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
|
||||
|
||||
start = origin + delta*.5
|
||||
end = origin - delta*.5 + size
|
||||
|
@ -384,7 +384,7 @@ def node_coord0_gridSizeOrigin(coord0,ordered=True):
|
|||
origin = mincorner
|
||||
|
||||
if (grid+1).prod() != len(coord0):
|
||||
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
|
||||
raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
|
||||
|
||||
atol = _np.max(size)*5e-2
|
||||
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \
|
||||
|
|
|
@ -25,7 +25,7 @@ class Marc:
|
|||
def library_path(self):
|
||||
|
||||
path_MSC = Environment().options['MSC_ROOT']
|
||||
path_lib = Path('{}/mentat{}/shlib/linux64'.format(path_MSC,self.version))
|
||||
path_lib = Path(f'{path_MSC}/mentat{self.version}/shlib/linux64')
|
||||
|
||||
return path_lib if path_lib.is_dir() else None
|
||||
|
||||
|
@ -34,7 +34,7 @@ class Marc:
|
|||
def tools_path(self):
|
||||
|
||||
path_MSC = Environment().options['MSC_ROOT']
|
||||
path_tools = Path('{}/marc{}/tools'.format(path_MSC,self.version))
|
||||
path_tools = Path(f'{path_MSC}/marc{self.version}/tools')
|
||||
|
||||
return path_tools if path_tools.is_dir() else None
|
||||
|
||||
|
@ -51,21 +51,21 @@ class Marc:
|
|||
|
||||
env = Environment()
|
||||
|
||||
user = env.root_dir/Path('src/DAMASK_marc{}'.format(self.version)).with_suffix('.f90' if compile else '.marc')
|
||||
if not user.is_file():
|
||||
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),user))
|
||||
usersub = env.root_dir/Path(f'src/DAMASK_marc{self.version}').with_suffix('.f90' if compile else '.marc')
|
||||
if not usersub.is_file():
|
||||
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),usersub))
|
||||
|
||||
# Define options [see Marc Installation and Operation Guide, pp 23]
|
||||
script = 'run_damask_{}mp'.format(optimization)
|
||||
script = f'run_damask_{optimization}mp'
|
||||
|
||||
cmd = str(self.tools_path/Path(script)) + \
|
||||
' -jid ' + model + '_' + job + \
|
||||
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
|
||||
|
||||
if compile: cmd += ' -u ' + str(user) + ' -save y'
|
||||
else: cmd += ' -prog ' + str(user.with_suffix(''))
|
||||
if compile: cmd += ' -u ' + str(usersub) + ' -save y'
|
||||
else: cmd += ' -prog ' + str(usersub.with_suffix(''))
|
||||
|
||||
print('job submission {} compilation: {}'.format('with' if compile else 'without',user))
|
||||
print('job submission {} compilation: {}'.format(('with' if compile else 'without'),usersub))
|
||||
if logfile: log = open(logfile, 'w')
|
||||
print(cmd)
|
||||
process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT)
|
||||
|
|
|
@ -125,7 +125,7 @@ def execute(cmd,
|
|||
stdout = stdout.decode('utf-8').replace('\x08','')
|
||||
stderr = stderr.decode('utf-8').replace('\x08','')
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
|
||||
raise RuntimeError(f'{cmd} failed with returncode {process.returncode}')
|
||||
return stdout, stderr
|
||||
|
||||
|
||||
|
@ -156,7 +156,7 @@ def show_progress(iterable,N_iter=None,prefix='',bar_length=50):
|
|||
|
||||
def scale_to_coprime(v):
|
||||
"""Scale vector to co-prime (relatively prime) integers."""
|
||||
MAX_DENOMINATOR = 1000
|
||||
MAX_DENOMINATOR = 1000000
|
||||
|
||||
def get_square_denominator(x):
|
||||
"""Denominator of the square of a number."""
|
||||
|
@ -166,10 +166,13 @@ def scale_to_coprime(v):
|
|||
"""Least common multiple."""
|
||||
return a * b // np.gcd(a, b)
|
||||
|
||||
denominators = [int(get_square_denominator(i)) for i in v]
|
||||
s = reduce(lcm, denominators) ** 0.5
|
||||
m = (np.array(v)*s).astype(np.int)
|
||||
return m//reduce(np.gcd,m)
|
||||
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
||||
m = m//reduce(np.gcd,m)
|
||||
|
||||
if not np.allclose(v[v.nonzero()]/m[v.nonzero()],v[v.nonzero()][0]/m[m.nonzero()][0]):
|
||||
raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?')
|
||||
|
||||
return m
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
@ -223,21 +226,20 @@ class _ProgressBar:
|
|||
self.start_time = datetime.datetime.now()
|
||||
self.last_fraction = 0.0
|
||||
|
||||
sys.stderr.write('{} {} 0% ETA n/a'.format(self.prefix, '░'*self.bar_length))
|
||||
sys.stderr.write(f"{self.prefix} {'░'*self.bar_length} 0% ETA n/a")
|
||||
sys.stderr.flush()
|
||||
|
||||
def update(self,iteration):
|
||||
|
||||
fraction = (iteration+1) / self.total
|
||||
filled_length = int(self.bar_length * fraction)
|
||||
|
||||
if int(self.bar_length * fraction) > int(self.bar_length * self.last_fraction):
|
||||
if filled_length > int(self.bar_length * self.last_fraction):
|
||||
bar = '█' * filled_length + '░' * (self.bar_length - filled_length)
|
||||
delta_time = datetime.datetime.now() - self.start_time
|
||||
remaining_time = (self.total - (iteration+1)) * delta_time / (iteration+1)
|
||||
remaining_time -= datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs
|
||||
|
||||
filled_length = int(self.bar_length * fraction)
|
||||
bar = '█' * filled_length + '░' * (self.bar_length - filled_length)
|
||||
sys.stderr.write('\r{} {} {:>4.0%} ETA {}'.format(self.prefix, bar, fraction, remaining_time))
|
||||
sys.stderr.write(f'\r{self.prefix} {bar} {fraction:>4.0%} ETA {remaining_time}')
|
||||
sys.stderr.flush()
|
||||
|
||||
self.last_fraction = fraction
|
||||
|
@ -249,7 +251,7 @@ class _ProgressBar:
|
|||
|
||||
class bcolors:
|
||||
"""
|
||||
ASCII Colors.
|
||||
ASCII colors.
|
||||
|
||||
https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
|
||||
https://stackoverflow.com/questions/287871
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -15,4 +15,4 @@ def update(request):
|
|||
@pytest.fixture
|
||||
def reference_dir_base():
|
||||
"""Directory containing reference results."""
|
||||
return os.path.join(os.path.dirname(__file__),'reference')
|
||||
return Path(__file__).parent/'reference'
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="PolyData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<PolyData>
|
||||
<Piece NumberOfPoints="10" NumberOfVerts="0" NumberOfLines="0" NumberOfStrips="0" NumberOfPolys="0">
|
||||
<PointData>
|
||||
<DataArray type="Float64" Name="coordinates" NumberOfComponents="3" format="binary" RangeMin="0.7453559924999299" RangeMax="2.449489742783178">
|
||||
AQAAAACAAADwAAAAagAAAA==eF5jYMAGPuyXOV4IRHvsIfQZe8u+xxZ9j1/sh/Eh9B37IjDj4f5QMLhqD6Gf2odB+Pth6iD0G3sFiLn7ofqg+j/CxOH6IfRX+xCouRYQ+6H0D/sCqH6YuRD6D1wd1B9QmsEBxgcAJsNfhw==
|
||||
<InformationKey name="L2_NORM_RANGE" location="vtkDataArray" length="2">
|
||||
<Value index="0">
|
||||
0.7453559925
|
||||
</Value>
|
||||
<Value index="1">
|
||||
2.4494897428
|
||||
</Value>
|
||||
</InformationKey>
|
||||
</DataArray>
|
||||
</PointData>
|
||||
<CellData>
|
||||
</CellData>
|
||||
<Points>
|
||||
<DataArray type="Float64" Name="Points" NumberOfComponents="3" format="binary" RangeMin="0.7453559924999299" RangeMax="2.449489742783178">
|
||||
AQAAAACAAADwAAAAagAAAA==eF5jYMAGPuyXOV4IRHvsIfQZe8u+xxZ9j1/sh/Eh9B37IjDj4f5QMLhqD6Gf2odB+Pth6iD0G3sFiLn7ofqg+j/CxOH6IfRX+xCouRYQ+6H0D/sCqH6YuRD6D1wd1B9QmsEBxgcAJsNfhw==
|
||||
<InformationKey name="L2_NORM_RANGE" location="vtkDataArray" length="2">
|
||||
<Value index="0">
|
||||
0.7453559925
|
||||
</Value>
|
||||
<Value index="1">
|
||||
2.4494897428
|
||||
</Value>
|
||||
</InformationKey>
|
||||
</DataArray>
|
||||
</Points>
|
||||
<Verts>
|
||||
<DataArray type="Int64" Name="connectivity" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
<DataArray type="Int64" Name="offsets" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
</Verts>
|
||||
<Lines>
|
||||
<DataArray type="Int64" Name="connectivity" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
<DataArray type="Int64" Name="offsets" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
</Lines>
|
||||
<Strips>
|
||||
<DataArray type="Int64" Name="connectivity" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
<DataArray type="Int64" Name="offsets" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
</Strips>
|
||||
<Polys>
|
||||
<DataArray type="Int64" Name="connectivity" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
<DataArray type="Int64" Name="offsets" format="binary" RangeMin="1e+299" RangeMax="-1e+299">
|
||||
AAAAAACAAAAAAAAA
|
||||
</DataArray>
|
||||
</Polys>
|
||||
</Piece>
|
||||
</PolyData>
|
||||
</VTKFile>
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<RectilinearGrid WholeExtent="0 5 0 6 0 7">
|
||||
<Piece Extent="0 5 0 6 0 7">
|
||||
<PointData>
|
||||
<DataArray type="Float64" Name="node" NumberOfComponents="3" format="binary" RangeMin="0" RangeMax="1.268857754044952">
|
||||
AQAAAACAAACAHwAAywMAAA==eF51mTGKVUEQRX88izByB8byY0MDlyG4CTfgFswMDWQigw8/Ejr40E7woGkYXqLJW4LDb8qmz71VDDjvWMGhijvd0KeTr8dXn/++f/x59rwIf3j6+untw1PS34S/udez8KgP97r+///w8bwIDx/f34SHD3nU4DXxIS/CVx/2N+GrTxWfUV18PC/C132xvwlf9zV51PDck/mQF+HrfNjfhK/z2cXn273+iI/nRXj4+P4mPHzI1zqSfZEX4eu+2N+Er/uanPXl9buXn+9n5n3lRTjzvvY34cx78Pgee7ye6eN5Ec6804ecefc+NfEhL8KZd+9TE58qPqO6+HhehDPv9CFn3v189mQ+5EU48+7ns4sPefhE7ujjeRHOvNOHnHmnz6gj2Rd5Ec6804ecefc+kbtLkvcLfCb3eb/AZ3Kf90uS9+njOfN+SfI+fch93ulTEx9y5p0+7Gfe6VPFZ1QXH8+Zd+6L/cw799XFZ3ju4uM58875sJ9553x28VlzN308Z94vSd6nD7nPO/d1iI/nzDv3xX7mnfs6Ep/Tafvx8eXnl+R95UU48772N+HMe/D4Hnu8nunjeRHOvNOHnHn3PjXxIS/CmXfvUxOfKj6juvh4XoQz7/QhZ979fPZkPuRFOPPu57OLD3n4RO7o43kRzrzTh5x5p8+oI9kXeRHOvNOHnHn3PnHO3iTvK+f5fkvO9xt8Jmfeg8f32GOcs9PHc57vN8k7fciZd+9TEx9ynu/0YT/Pd/pU8RnVxcdznu/cF/t5vnNfXXyG5y4+nvN853zYz/Od89nFZz1np4/nPN9vknf6kDPv9Bl1iI/nPN+5L/bzfOe+jsTndLr/Gdh+S95XXoQz72t/E868B4/vscfrmT6eF+HMO33ImXfvUxMf8iKcefc+NfGp4jOqi4/nRTjzTh9y5t3PZ0/mQ16EM+9+Prv4kIdP5I4+nhfhzDt9yJl3+ow6kn2RF+HMO33ImXfvE/fqTfK+ct7nt+Q+v8FncuY9eHyPPca9evp4zvv8JnmnDznz7n1q4kPO+zx92M/7PH2q+Izq4uM57/PcF/t5n+e+uvgMz118POd9nvNhP+/znM8uPpE7+njO+/wmeacPOfNOn1GH+HjO+zz3xX7e57mvI/GJ6pL3lfM9rkve1/4mnHkPHr+NPca72PTxnO9xXfK+9jfhzLv3qYkPOd/j6MP+Jpx5p8/6zX2R8z2O+2J/E868r//yPY7zIed7HOfD/iaceadP5I4+nvM9rkve6UPOvNNn1CE+nvM9jvtifxPOvAf/B5cwXsg=
|
||||
<InformationKey name="L2_NORM_RANGE" location="vtkDataArray" length="2">
|
||||
<Value index="0">
|
||||
0
|
||||
</Value>
|
||||
<Value index="1">
|
||||
1.268857754
|
||||
</Value>
|
||||
</InformationKey>
|
||||
</DataArray>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Float64" Name="cell" NumberOfComponents="3" format="binary" RangeMin="0.10871961482881586" RangeMax="1.160792402743735">
|
||||
AQAAAACAAACwEwAAZQIAAA==eF511r2KFEEYRmHjjY2N9Ao0lQ79yQy8jBVjc29gL0Ezhc1maDBfU0FB3NVgNyqQuged1leZ56sqBpo5HRx4+wS13nn989l6vjzfzm45u/vk9+/NcvL17cuHJx8Lf3D/cD4XfvPq9vmj68vCH19vLwpf/3pvbedT8crjlccrj1ce77vtXBavPF55vPJ45fG+3/hN8crjlccrj1d+vHOb7NyKV368cyte+XrUVZ901YtXftxVL175Ss/f96dX+9MPpedwew6353B7DrdnvXJ71iu3Z73pTa/cnvXK7VlvetMrt2e9cnse79wmO7fildvzeOdWvOlt3FUvXrk9j7vqE+9uWQ/46qL0HG7P4fYcbs/h9qxXbs965fasN73plduzXrk9601veuX2rFduz+Od22TnVrxyex7v3Io3vY276sUrt+dxV33i3f37/vYcbs/h9hxuz+H2rFduz3rl9pynPYfbs165PeuV27NeuT3rlduz3j//22TnVrxyew6353B7DrdnvXJ71iu353uHa8jZl9JzuD2H23O4PYfbs165PeuV27Pe9KZXbs965fasN73plduzXrk9j3duk51b8crtebxzK970Nu6qF6/cnsdd9Yl3tzzdLtbfSs/h9hxuz+H2HG7PeuX2rFduz3rTm165PeuV27Pe9KZXbs965fY83rlNdm7FK7fn8c6teNPbuKtevHJ7HnfVJ97d8uJwDdn/KD2H23O4PYfbc7g965Xbs165PetNb3rl9qxXbs9605teuT3rldvzeOc22bkVr9yexzu34k1v46568crtedzVf/4LDINsdg==
|
||||
<InformationKey name="L2_NORM_RANGE" location="vtkDataArray" length="2">
|
||||
<Value index="0">
|
||||
0.10871961483
|
||||
</Value>
|
||||
<Value index="1">
|
||||
1.1607924027
|
||||
</Value>
|
||||
</InformationKey>
|
||||
</DataArray>
|
||||
</CellData>
|
||||
<Coordinates>
|
||||
<DataArray type="Float64" Name="x" format="binary" RangeMin="0" RangeMax="0.6">
|
||||
AQAAAACAAAAwAAAAJwAAAA==eF5jYICAHXKtrwN37LOH0Ofsua4vLrDlug7l37M3BoPH9gCdQxK6
|
||||
</DataArray>
|
||||
<DataArray type="Float64" Name="y" format="binary" RangeMin="0" RangeMax="1">
|
||||
AQAAAACAAAA4AAAAIgAAAA==eF5jYICAUDA4ag+hr9pDRB9A+U/tV4HBK6j4B3sAk7wQqg==
|
||||
</DataArray>
|
||||
<DataArray type="Float64" Name="z" format="binary" RangeMin="0" RangeMax="0.5">
|
||||
AQAAAACAAABAAAAALAAAAA==eF5jYICASSqeQLTJHkIfsr+9LReITkP5l+zB3NvXoOK37SG6HtgDANusGUo=
|
||||
</DataArray>
|
||||
</Coordinates>
|
||||
</Piece>
|
||||
</RectilinearGrid>
|
||||
</VTKFile>
|
|
@ -6,6 +6,7 @@ import numpy as np
|
|||
|
||||
from damask import Geom
|
||||
from damask import Rotation
|
||||
from damask import util
|
||||
|
||||
|
||||
def geom_equal(a,b):
|
||||
|
@ -85,8 +86,8 @@ class TestGeom:
|
|||
def test_mirror(self,default,update,reference_dir,directions,reflect):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.mirror(directions,reflect)
|
||||
tag = 'directions={}_reflect={}'.format('-'.join(directions),reflect)
|
||||
reference = os.path.join(reference_dir,'mirror_{}.geom'.format(tag))
|
||||
tag = f'directions={"-".join(directions)}_reflect={reflect}'
|
||||
reference = os.path.join(reference_dir,f'mirror_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
@ -94,8 +95,8 @@ class TestGeom:
|
|||
def test_clean(self,default,update,reference_dir,stencil):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.clean(stencil)
|
||||
tag = 'stencil={}'.format(stencil)
|
||||
reference = os.path.join(reference_dir,'clean_{}.geom'.format(tag))
|
||||
tag = f'stencil={stencil}'
|
||||
reference = os.path.join(reference_dir,f'clean_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
@ -111,8 +112,8 @@ class TestGeom:
|
|||
def test_scale(self,default,update,reference_dir,grid):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.scale(grid)
|
||||
tag = 'grid={}'.format('-'.join([str(x) for x in grid]))
|
||||
reference = os.path.join(reference_dir,'scale_{}.geom'.format(tag))
|
||||
tag = f'grid={util.srepr(grid,"-")}'
|
||||
reference = os.path.join(reference_dir,f'scale_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
@ -150,8 +151,8 @@ class TestGeom:
|
|||
def test_rotate(self,default,update,reference_dir,Eulers):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.rotate(Rotation.from_Eulers(Eulers,degrees=True))
|
||||
tag = 'Eulers={}-{}-{}'.format(*Eulers)
|
||||
reference = os.path.join(reference_dir,'rotate_{}.geom'.format(tag))
|
||||
tag = f'Eulers={util.srepr(Eulers,"-")}'
|
||||
reference = os.path.join(reference_dir,f'rotate_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
|
|
@ -38,4 +38,4 @@ class TestSymmetry:
|
|||
def test_invalid_argument(self,function):
|
||||
s = Symmetry() # noqa
|
||||
with pytest.raises(ValueError):
|
||||
eval('s.{}(np.ones(4))'.format(function))
|
||||
eval(f's.{function}(np.ones(4))')
|
||||
|
|
|
@ -49,7 +49,7 @@ class TestOrientation:
|
|||
@pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch'])
|
||||
@pytest.mark.parametrize('lattice',['fcc','bcc'])
|
||||
def test_relationship_reference(self,update,reference_dir,model,lattice):
|
||||
reference = os.path.join(reference_dir,'{}_{}.txt'.format(lattice,model))
|
||||
reference = os.path.join(reference_dir,f'{lattice}_{model}.txt')
|
||||
ori = Orientation(Rotation(),lattice)
|
||||
eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)])
|
||||
if update:
|
||||
|
|
|
@ -137,7 +137,7 @@ class TestResult:
|
|||
default.add_Cauchy('P','F')
|
||||
default.add_eigenvalue('sigma',eigenvalue)
|
||||
loc = {'sigma' :default.get_dataset_location('sigma'),
|
||||
'lambda':default.get_dataset_location('lambda_{}(sigma)'.format(eigenvalue))}
|
||||
'lambda':default.get_dataset_location(f'lambda_{eigenvalue}(sigma)')}
|
||||
in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
|
||||
in_file = default.read_dataset(loc['lambda'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
@ -147,7 +147,7 @@ class TestResult:
|
|||
default.add_Cauchy('P','F')
|
||||
default.add_eigenvector('sigma',eigenvalue)
|
||||
loc = {'sigma' :default.get_dataset_location('sigma'),
|
||||
'v(sigma)':default.get_dataset_location('v_{}(sigma)'.format(eigenvalue))}
|
||||
'v(sigma)':default.get_dataset_location(f'v_{eigenvalue}(sigma)')}
|
||||
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
|
||||
in_file = default.read_dataset(loc['v(sigma)'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
@ -179,7 +179,7 @@ class TestResult:
|
|||
t = ['V','U'][np.random.randint(0,2)]
|
||||
m = np.random.random()*2.0 - 1.0
|
||||
default.add_strain_tensor('F',t,m)
|
||||
label = 'epsilon_{}^{}(F)'.format(t,m)
|
||||
label = f'epsilon_{t}^{m}(F)'
|
||||
default.add_Mises(label)
|
||||
loc = {label :default.get_dataset_location(label),
|
||||
label+'_vM':default.get_dataset_location(label+'_vM')}
|
||||
|
@ -248,7 +248,7 @@ class TestResult:
|
|||
t = ['V','U'][np.random.randint(0,2)]
|
||||
m = np.random.random()*2.0 - 1.0
|
||||
default.add_strain_tensor('F',t,m)
|
||||
label = 'epsilon_{}^{}(F)'.format(t,m)
|
||||
label = f'epsilon_{t}^{m}(F)'
|
||||
loc = {'F': default.get_dataset_location('F'),
|
||||
label: default.get_dataset_location(label)}
|
||||
in_memory = mechanics.strain_tensor(default.read_dataset(loc['F'],0),t,m)
|
||||
|
|
|
@ -556,7 +556,7 @@ def mul(me, other):
|
|||
else:
|
||||
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
|
||||
else:
|
||||
raise TypeError('Cannot rotate {}'.format(type(other)))
|
||||
raise TypeError(f'Cannot rotate {type(other)}')
|
||||
|
||||
|
||||
class TestRotation:
|
||||
|
@ -878,7 +878,7 @@ class TestRotation:
|
|||
def test_invalid_P(self,fr,to):
|
||||
R = Rotation.from_random(np.random.randint(8,32,(3))) # noqa
|
||||
with pytest.raises(ValueError):
|
||||
fr(eval('R.{}()'.format(to)),P=-30)
|
||||
fr(eval(f'R.{to}()'),P=-30)
|
||||
|
||||
@pytest.mark.parametrize('shape',[None,(3,),(4,2)])
|
||||
def test_broadcast(self,shape):
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
import numpy as np
|
||||
|
||||
from damask import VTK
|
||||
from damask import grid_filters
|
||||
|
||||
@pytest.fixture
|
||||
def reference_dir(reference_dir_base):
|
||||
"""Directory containing reference results."""
|
||||
return os.path.join(reference_dir_base,'Result')
|
||||
return reference_dir_base/'VTK'
|
||||
|
||||
class TestVTK:
|
||||
|
||||
|
@ -18,22 +17,22 @@ class TestVTK:
|
|||
origin = np.random.random(3)
|
||||
v = VTK.from_rectilinearGrid(grid,size,origin)
|
||||
string = v.__repr__()
|
||||
v.write(os.path.join(tmp_path,'rectilinearGrid'))
|
||||
vtr = VTK.from_file(os.path.join(tmp_path,'rectilinearGrid.vtr'))
|
||||
with open(os.path.join(tmp_path,'rectilinearGrid.vtk'),'w') as f:
|
||||
v.write(tmp_path/'rectilinearGrid')
|
||||
vtr = VTK.from_file(tmp_path/'rectilinearGrid.vtr')
|
||||
with open(tmp_path/'rectilinearGrid.vtk','w') as f:
|
||||
f.write(string)
|
||||
vtk = VTK.from_file(os.path.join(tmp_path,'rectilinearGrid.vtk'),'VTK_rectilinearGrid')
|
||||
vtk = VTK.from_file(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid')
|
||||
assert(string == vtr.__repr__() == vtk.__repr__())
|
||||
|
||||
def test_polyData(self,tmp_path):
|
||||
points = np.random.rand(3,100)
|
||||
v = VTK.from_polyData(points)
|
||||
string = v.__repr__()
|
||||
v.write(os.path.join(tmp_path,'polyData'))
|
||||
vtp = VTK.from_file(os.path.join(tmp_path,'polyData.vtp'))
|
||||
with open(os.path.join(tmp_path,'polyData.vtk'),'w') as f:
|
||||
v.write(tmp_path/'polyData')
|
||||
vtp = VTK.from_file(tmp_path/'polyData.vtp')
|
||||
with open(tmp_path/'polyData.vtk','w') as f:
|
||||
f.write(string)
|
||||
vtk = VTK.from_file(os.path.join(tmp_path,'polyData.vtk'),'polyData')
|
||||
vtk = VTK.from_file(tmp_path/'polyData.vtk','polyData')
|
||||
assert(string == vtp.__repr__() == vtk.__repr__())
|
||||
|
||||
@pytest.mark.parametrize('cell_type,n',[
|
||||
|
@ -48,11 +47,11 @@ class TestVTK:
|
|||
connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n)
|
||||
v = VTK.from_unstructuredGrid(nodes,connectivity,cell_type)
|
||||
string = v.__repr__()
|
||||
v.write(os.path.join(tmp_path,'unstructuredGrid'))
|
||||
vtu = VTK.from_file(os.path.join(tmp_path,'unstructuredGrid.vtu'))
|
||||
with open(os.path.join(tmp_path,'unstructuredGrid.vtk'),'w') as f:
|
||||
v.write(tmp_path/'unstructuredGrid')
|
||||
vtu = VTK.from_file(tmp_path/'unstructuredGrid.vtu')
|
||||
with open(tmp_path/'unstructuredGrid.vtk','w') as f:
|
||||
f.write(string)
|
||||
vtk = VTK.from_file(os.path.join(tmp_path,'unstructuredGrid.vtk'),'unstructuredgrid')
|
||||
vtk = VTK.from_file(tmp_path/'unstructuredGrid.vtk','unstructuredgrid')
|
||||
assert(string == vtu.__repr__() == vtk.__repr__())
|
||||
|
||||
@pytest.mark.parametrize('name,dataset_type',[('this_file_does_not_exist.vtk',None),
|
||||
|
@ -61,3 +60,29 @@ class TestVTK:
|
|||
def test_invalid_dataset_type(self,dataset_type,name):
|
||||
with pytest.raises(TypeError):
|
||||
VTK.from_file('this_file_does_not_exist.vtk',dataset_type)
|
||||
|
||||
|
||||
def test_compare_reference_polyData(self,update,reference_dir,tmp_path):
|
||||
points=np.dstack((np.linspace(0.,1.,10),np.linspace(0.,2.,10),np.linspace(-1.,1.,10))).squeeze()
|
||||
polyData = VTK.from_polyData(points)
|
||||
polyData.add(points,'coordinates')
|
||||
if update:
|
||||
polyData.write(reference_dir/'polyData')
|
||||
else:
|
||||
reference = VTK.from_file(reference_dir/'polyData.vtp')
|
||||
assert polyData.__repr__() == reference.__repr__()
|
||||
|
||||
def test_compare_reference_rectilinearGrid(self,update,reference_dir,tmp_path):
|
||||
grid = np.array([5,6,7],int)
|
||||
size = np.array([.6,1.,.5])
|
||||
rectilinearGrid = VTK.from_rectilinearGrid(grid,size)
|
||||
c = grid_filters.cell_coord0(grid,size).reshape(-1,3,order='F')
|
||||
n = grid_filters.node_coord0(grid,size).reshape(-1,3,order='F')
|
||||
rectilinearGrid.add(c,'cell')
|
||||
rectilinearGrid.add(n,'node')
|
||||
if update:
|
||||
rectilinearGrid.write(reference_dir/'rectilinearGrid')
|
||||
else:
|
||||
reference = VTK.from_file(reference_dir/'rectilinearGrid.vtr')
|
||||
assert rectilinearGrid.__repr__() == reference.__repr__()
|
||||
|
||||
|
|
|
@ -30,8 +30,8 @@ class TestGridFilters:
|
|||
grid = np.random.randint(8,32,(3))
|
||||
size = np.random.random(3)
|
||||
origin = np.random.random(3)
|
||||
coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa
|
||||
_grid,_size,_origin = eval('grid_filters.{}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))'.format(mode))
|
||||
coord0 = eval(f'grid_filters.{mode}_coord0(grid,size,origin)') # noqa
|
||||
_grid,_size,_origin = eval(f'grid_filters.{mode}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))')
|
||||
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin)
|
||||
|
||||
def test_displacement_fluct_equivalence(self):
|
||||
|
@ -67,8 +67,8 @@ class TestGridFilters:
|
|||
origin= np.random.random(3)
|
||||
size = np.random.random(3) # noqa
|
||||
grid = np.random.randint(8,32,(3))
|
||||
shifted = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode))
|
||||
unshifted = eval('grid_filters.{}_coord0(grid,size)'.format(mode))
|
||||
shifted = eval(f'grid_filters.{mode}_coord0(grid,size,origin)')
|
||||
unshifted = eval(f'grid_filters.{mode}_coord0(grid,size)')
|
||||
if mode == 'cell':
|
||||
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,)))
|
||||
elif mode == 'node':
|
||||
|
|
|
@ -1,11 +1,33 @@
|
|||
import pytest
|
||||
import numpy as np
|
||||
from damask import util
|
||||
|
||||
|
||||
class TestUtil:
|
||||
|
||||
def test_execute_direct(self):
|
||||
out,err = util.execute('echo test')
|
||||
assert out=='test\n' and err==''
|
||||
|
||||
|
||||
def test_execute_env(self):
|
||||
out,err = util.execute('sh -c "echo $test_for_execute"',env={'test_for_execute':'test'})
|
||||
assert out=='test\n' and err==''
|
||||
|
||||
def test_croak(self):
|
||||
util.croak('Burp!')
|
||||
|
||||
@pytest.mark.parametrize('input,output',
|
||||
[
|
||||
([2,0],[1,0]),
|
||||
([0.5,0.5],[1,1]),
|
||||
([1./2.,1./3.],[3,2]),
|
||||
([2./3.,1./2.,1./3.],[4,3,2]),
|
||||
])
|
||||
|
||||
def test_scale2coprime(self,input,output):
|
||||
assert np.allclose(util.scale_to_coprime(np.array(input)),
|
||||
np.array(output).astype(int))
|
||||
|
||||
def test_lackofprecision(self):
|
||||
with pytest.raises(ValueError):
|
||||
util.scale_to_coprime(np.array([1/3333,1,1]))
|
||||
|
|
|
@ -49,7 +49,7 @@ module CPFEM
|
|||
iJacoStiffness !< frequency of stiffness update
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics), private :: num
|
||||
type(tNumerics), private :: num
|
||||
|
||||
public :: &
|
||||
CPFEM_general, &
|
||||
|
|
|
@ -36,16 +36,16 @@ end subroutine YAML_init
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
recursive function parse_flow(flow_string) result(node)
|
||||
|
||||
character(len=*), intent(inout) :: flow_string !< YAML file in flow style
|
||||
class (tNode), pointer :: node
|
||||
|
||||
character(len=*), intent(inout) :: flow_string !< YAML file in flow style
|
||||
class (tNode), pointer :: node
|
||||
|
||||
class (tNode), pointer :: myVal
|
||||
character(len=pStringLen) :: key
|
||||
|
||||
integer :: e, & ! end position of dictionary or list
|
||||
s, & ! start position of dictionary or list
|
||||
d ! position of key: value separator (':')
|
||||
|
||||
|
||||
flow_string = trim(adjustl(flow_string(:)))
|
||||
if (len_trim(flow_string) == 0) then
|
||||
node => emptyDict
|
||||
|
|
|
@ -23,7 +23,7 @@ module config
|
|||
config_homogenization, &
|
||||
config_texture, &
|
||||
config_crystallite
|
||||
|
||||
|
||||
character(len=pStringLen), public, protected, allocatable, dimension(:) :: &
|
||||
config_name_phase, & !< name of each phase
|
||||
config_name_homogenization, & !< name of each homogenization
|
||||
|
|
|
@ -84,7 +84,7 @@ module crystallite
|
|||
nState, & !< state loop limit
|
||||
nStress !< stress loop limit
|
||||
character(len=:), allocatable :: &
|
||||
integrator !< integrator scheme
|
||||
integrator !< integration scheme
|
||||
real(pReal) :: &
|
||||
subStepMinCryst, & !< minimum (relative) size of sub-step allowed during cutback
|
||||
subStepSizeCryst, & !< size of first substep when cutback
|
||||
|
@ -167,7 +167,7 @@ subroutine crystallite_init
|
|||
allocate(crystallite_converged(cMax,iMax,eMax), source=.true.)
|
||||
|
||||
num_crystallite => numerics_root%get('crystallite',defaultVal=emptyDict)
|
||||
|
||||
|
||||
num%subStepMinCryst = num_crystallite%get_asFloat ('subStepMin', defaultVal=1.0e-3_pReal)
|
||||
num%subStepSizeCryst = num_crystallite%get_asFloat ('subStepSize', defaultVal=0.25_pReal)
|
||||
num%stepIncreaseCryst = num_crystallite%get_asFloat ('stepIncrease', defaultVal=1.5_pReal)
|
||||
|
|
|
@ -56,7 +56,6 @@ subroutine damage_local_init
|
|||
num_generic => numerics_root%get('generic',defaultVal=emptyDict)
|
||||
num%residualStiffness = num_generic%get_asFloat('residualStiffness', defaultVal=1.0e-6_pReal)
|
||||
if (num%residualStiffness < 0.0_pReal) call IO_error(301,ext_msg='residualStiffness')
|
||||
!----------------------------------------------------------------------------------------------
|
||||
|
||||
Ninstance = count(damage_type == DAMAGE_local_ID)
|
||||
allocate(param(Ninstance))
|
||||
|
@ -102,7 +101,6 @@ function damage_local_updateState(subdt, ip, el)
|
|||
real(pReal) :: &
|
||||
phi, phiDot, dPhiDot_dPhi
|
||||
|
||||
|
||||
homog = material_homogenizationAt(el)
|
||||
offset = material_homogenizationMemberAt(ip,el)
|
||||
phi = damageState(homog)%subState0(1,offset)
|
||||
|
|
|
@ -53,14 +53,13 @@ subroutine damage_nonlocal_init
|
|||
integer :: Ninstance,NofMyHomog,h
|
||||
class(tNode), pointer :: &
|
||||
num_generic
|
||||
|
||||
|
||||
write(6,'(/,a)') ' <<<+- damage_'//DAMAGE_nonlocal_label//' init -+>>>'; flush(6)
|
||||
|
||||
!------------------------------------------------------------------------------------
|
||||
! read numerics parameter
|
||||
num_generic => numerics_root%get('generic',defaultVal= emptyDict)
|
||||
num%charLength = num_generic%get_asFloat('charLength',defaultVal=1.0_pReal)
|
||||
!------------------------------------------------------------------------------------
|
||||
|
||||
Ninstance = count(damage_type == DAMAGE_nonlocal_ID)
|
||||
allocate(param(Ninstance))
|
||||
|
|
|
@ -73,13 +73,9 @@ program DAMASK_grid
|
|||
character(len=pStringLen) :: &
|
||||
incInfo, &
|
||||
loadcase_string
|
||||
type :: tNumerics
|
||||
integer :: &
|
||||
maxCutBack, & !< max number of cut backs
|
||||
stagItMax !< max number of field level staggered iterations
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics) :: num
|
||||
integer :: &
|
||||
maxCutBack, & !< max number of cut backs
|
||||
stagItMax !< max number of field level staggered iterations
|
||||
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
|
||||
type(tLoadCase) :: newLoadCase
|
||||
type(tSolutionState), allocatable, dimension(:) :: solres
|
||||
|
@ -120,11 +116,11 @@ program DAMASK_grid
|
|||
!-------------------------------------------------------------------------------------------------
|
||||
! reading field paramters from numerics file and do sanity checks
|
||||
num_grid => numerics_root%get('grid', defaultVal=emptyDict)
|
||||
num%stagItMax = num_grid%get_asInt('maxStaggeredIter',defaultVal=10)
|
||||
num%maxCutBack = num_grid%get_asInt('maxCutBack',defaultVal=3)
|
||||
stagItMax = num_grid%get_asInt('maxStaggeredIter',defaultVal=10)
|
||||
maxCutBack = num_grid%get_asInt('maxCutBack',defaultVal=3)
|
||||
|
||||
if (num%stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter')
|
||||
if (num%maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
||||
if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter')
|
||||
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! assign mechanics solver depending on selected type
|
||||
|
@ -455,7 +451,7 @@ program DAMASK_grid
|
|||
|
||||
enddo
|
||||
stagIter = stagIter + 1
|
||||
stagIterate = stagIter < num%stagItMax &
|
||||
stagIterate = stagIter < stagItMax &
|
||||
.and. all(solres(:)%converged) &
|
||||
.and. .not. all(solres(:)%stagConverged) ! stationary with respect to staggered iteration
|
||||
enddo
|
||||
|
@ -474,7 +470,7 @@ program DAMASK_grid
|
|||
solres%converged, solres%iterationsNeeded
|
||||
flush(statUnit)
|
||||
endif
|
||||
elseif (cutBackLevel < num%maxCutBack) then ! further cutbacking tolerated?
|
||||
elseif (cutBackLevel < maxCutBack) then ! further cutbacking tolerated?
|
||||
cutBack = .true.
|
||||
stepFraction = (stepFraction - 1) * subStepFactor ! adjust to new denominator
|
||||
cutBackLevel = cutBackLevel + 1
|
||||
|
|
|
@ -28,8 +28,6 @@ module grid_damage_spectral
|
|||
residualStiffness, & !< non-zero residual damage
|
||||
eps_damage_atol, & !< absolute tolerance for damage evolution
|
||||
eps_damage_rtol !< relative tolerance for damage evolution
|
||||
character(len=:), allocatable :: &
|
||||
petsc_options
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics), private :: num
|
||||
|
@ -86,7 +84,6 @@ subroutine grid_damage_spectral_init
|
|||
!-------------------------------------------------------------------------------------------------
|
||||
! read numerical parameters and do sanity checks
|
||||
num_grid => numerics_root%get('grid',defaultVal=emptyDict)
|
||||
num%petsc_options = num_grid%get_asString('petsc_options',defaultVal='')
|
||||
num%itmax = num_grid%get_asInt ('itmax',defaultVal=250)
|
||||
num%eps_damage_atol = num_grid%get_asFloat ('eps_damage_atol',defaultVal=1.0e-2_pReal)
|
||||
num%eps_damage_rtol = num_grid%get_asFloat ('eps_damage_rtol',defaultVal=1.0e-6_pReal)
|
||||
|
@ -104,7 +101,7 @@ subroutine grid_damage_spectral_init
|
|||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf &
|
||||
&-damage_snes_ksp_ew -damage_ksp_type fgmres',ierr)
|
||||
CHKERRQ(ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num%petsc_options,ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
||||
CHKERRQ(ierr)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -42,11 +42,10 @@ module grid_mech_FEM
|
|||
eps_div_rtol, & !< relative tolerance for equilibrium
|
||||
eps_stress_atol, & !< absolute tolerance for fullfillment of stress BC
|
||||
eps_stress_rtol !< relative tolerance for fullfillment of stress BC
|
||||
character(len=:), allocatable :: &
|
||||
petsc_options
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics), private :: num
|
||||
type(tNumerics), private :: num
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! PETSc data
|
||||
DM, private :: mech_grid
|
||||
|
@ -126,7 +125,6 @@ subroutine grid_mech_FEM_init
|
|||
!-------------------------------------------------------------------------------------------------
|
||||
! read numerical parameter and do sanity checks
|
||||
num_grid => numerics_root%get('grid',defaultVal=emptyDict)
|
||||
num%petsc_options = num_grid%get_asString('petsc_options', defaultVal='')
|
||||
num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal)
|
||||
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
||||
num%eps_stress_atol = num_grid%get_asFloat ('eps_stress_atol', defaultVal=1.0e3_pReal)
|
||||
|
@ -147,7 +145,7 @@ subroutine grid_mech_FEM_init
|
|||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type newtonls -mech_ksp_type fgmres &
|
||||
&-mech_ksp_max_it 25 -mech_pc_type ml -mech_mg_levels_ksp_type chebyshev',ierr)
|
||||
CHKERRQ(ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num%petsc_options,ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
||||
CHKERRQ(ierr)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -454,8 +452,6 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,i
|
|||
divTol, &
|
||||
BCTol
|
||||
|
||||
!------------------------------------------------------------------------------------
|
||||
|
||||
err_div = fnorm*sqrt(wgt)*geomSize(1)/scaledGeomSize(1)/detJ
|
||||
divTol = max(maxval(abs(P_av))*num%eps_div_rtol ,num%eps_div_atol)
|
||||
BCTol = max(maxval(abs(P_av))*num%eps_stress_rtol,num%eps_stress_atol)
|
||||
|
|
|
@ -28,7 +28,7 @@ module grid_mech_spectral_basic
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! derived types
|
||||
type(tSolutionParams), private :: params
|
||||
type(tSolutionParams) :: params
|
||||
|
||||
type, private :: tNumerics
|
||||
logical :: update_gamma !< update gamma operator with current stiffness
|
||||
|
@ -40,33 +40,31 @@ module grid_mech_spectral_basic
|
|||
eps_div_rtol, & !< relative tolerance for equilibrium
|
||||
eps_stress_atol, & !< absolute tolerance for fullfillment of stress BC
|
||||
eps_stress_rtol !< relative tolerance for fullfillment of stress BC
|
||||
character(len=:), allocatable :: &
|
||||
petsc_options
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics), private :: num ! numerics parameters. Better name?
|
||||
type(tNumerics) :: num ! numerics parameters. Better name?
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! PETSc data
|
||||
DM, private :: da
|
||||
SNES, private :: snes
|
||||
Vec, private :: solution_vec
|
||||
DM :: da
|
||||
SNES :: snes
|
||||
Vec :: solution_vec
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! common pointwise data
|
||||
real(pReal), private, dimension(:,:,:,:,:), allocatable :: &
|
||||
real(pReal), dimension(:,:,:,:,:), allocatable :: &
|
||||
F_lastInc, & !< field of previous compatible deformation gradients
|
||||
Fdot !< field of assumed rate of compatible deformation gradient
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! stress, stiffness and compliance average etc.
|
||||
real(pReal), private, dimension(3,3) :: &
|
||||
real(pReal), dimension(3,3) :: &
|
||||
F_aimDot = 0.0_pReal, & !< assumed rate of average deformation gradient
|
||||
F_aim = math_I3, & !< current prescribed deformation gradient
|
||||
F_aim_lastInc = math_I3, & !< previous average deformation gradient
|
||||
P_av = 0.0_pReal !< average 1st Piola--Kirchhoff stress
|
||||
|
||||
character(len=pStringLen), private :: incInfo !< time and increment information
|
||||
character(len=pStringLen) :: incInfo !< time and increment information
|
||||
real(pReal), private, dimension(3,3,3,3) :: &
|
||||
C_volAvg = 0.0_pReal, & !< current volume average stiffness
|
||||
C_volAvgLastInc = 0.0_pReal, & !< previous volume average stiffness
|
||||
|
@ -74,11 +72,11 @@ module grid_mech_spectral_basic
|
|||
C_minMaxAvgLastInc = 0.0_pReal, & !< previous (min+max)/2 stiffness
|
||||
S = 0.0_pReal !< current compliance (filled up with zeros)
|
||||
|
||||
real(pReal), private :: &
|
||||
real(pReal) :: &
|
||||
err_BC, & !< deviation from stress BC
|
||||
err_div !< RMS of div of P
|
||||
|
||||
integer, private :: &
|
||||
integer :: &
|
||||
totalIter = 0 !< total iteration in current increment
|
||||
|
||||
public :: &
|
||||
|
@ -122,7 +120,6 @@ subroutine grid_mech_spectral_basic_init
|
|||
! read numerical parameters and do sanity checks
|
||||
num_grid => numerics_root%get('grid',defaultVal=emptyDict)
|
||||
|
||||
num%petsc_options = num_grid%get_asString ('petsc_options', defaultVal='')
|
||||
num%update_gamma = num_grid%get_asBool ('update_gamma', defaultVal=.false.)
|
||||
num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal)
|
||||
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
||||
|
@ -143,7 +140,7 @@ subroutine grid_mech_spectral_basic_init
|
|||
! set default and user defined options for PETSc
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type ngmres',ierr)
|
||||
CHKERRQ(ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num%petsc_options,ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
||||
CHKERRQ(ierr)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -29,12 +29,10 @@ module grid_mech_spectral_polarisation
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! derived types
|
||||
type(tSolutionParams), private :: params
|
||||
type(tSolutionParams) :: params
|
||||
|
||||
type, private :: tNumerics
|
||||
type :: tNumerics
|
||||
logical :: update_gamma !< update gamma operator with current stiffness
|
||||
character(len=:), allocatable :: &
|
||||
petsc_options
|
||||
integer :: &
|
||||
itmin, & !< minimum number of iterations
|
||||
itmax !< maximum number of iterations
|
||||
|
@ -46,21 +44,21 @@ module grid_mech_spectral_polarisation
|
|||
eps_stress_atol, & !< absolute tolerance for fullfillment of stress BC
|
||||
eps_stress_rtol !< relative tolerance for fullfillment of stress BC
|
||||
real(pReal) :: &
|
||||
alpha, & !< polarization scheme parameter 0.0 < alpha < 2.0. alpha = 1.0 ==> AL scheme, alpha = 2.0 ==> accelerated scheme
|
||||
beta !< polarization scheme parameter 0.0 < beta < 2.0. beta = 1.0 ==> AL scheme, beta = 2.0 ==> accelerated scheme
|
||||
alpha, & !< polarization scheme parameter 0.0 < alpha < 2.0. alpha = 1.0 ==> AL scheme, alpha = 2.0 ==> accelerated scheme
|
||||
beta !< polarization scheme parameter 0.0 < beta < 2.0. beta = 1.0 ==> AL scheme, beta = 2.0 ==> accelerated scheme
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics), private :: num ! numerics parameters. Better name?
|
||||
type(tNumerics) :: num ! numerics parameters. Better name?
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! PETSc data
|
||||
DM, private :: da
|
||||
SNES, private :: snes
|
||||
Vec, private :: solution_vec
|
||||
DM :: da
|
||||
SNES :: snes
|
||||
Vec :: solution_vec
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! common pointwise data
|
||||
real(pReal), private, dimension(:,:,:,:,:), allocatable :: &
|
||||
real(pReal), dimension(:,:,:,:,:), allocatable :: &
|
||||
F_lastInc, & !< field of previous compatible deformation gradients
|
||||
F_tau_lastInc, & !< field of previous incompatible deformation gradient
|
||||
Fdot, & !< field of assumed rate of compatible deformation gradient
|
||||
|
@ -68,15 +66,15 @@ module grid_mech_spectral_polarisation
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! stress, stiffness and compliance average etc.
|
||||
real(pReal), private, dimension(3,3) :: &
|
||||
real(pReal), dimension(3,3) :: &
|
||||
F_aimDot = 0.0_pReal, & !< assumed rate of average deformation gradient
|
||||
F_aim = math_I3, & !< current prescribed deformation gradient
|
||||
F_aim_lastInc = math_I3, & !< previous average deformation gradient
|
||||
F_av = 0.0_pReal, & !< average incompatible def grad field
|
||||
P_av = 0.0_pReal !< average 1st Piola--Kirchhoff stress
|
||||
|
||||
character(len=pStringLen), private :: incInfo !< time and increment information
|
||||
real(pReal), private, dimension(3,3,3,3) :: &
|
||||
character(len=pStringLen) :: incInfo !< time and increment information
|
||||
real(pReal), dimension(3,3,3,3) :: &
|
||||
C_volAvg = 0.0_pReal, & !< current volume average stiffness
|
||||
C_volAvgLastInc = 0.0_pReal, & !< previous volume average stiffness
|
||||
C_minMaxAvg = 0.0_pReal, & !< current (min+max)/2 stiffness
|
||||
|
@ -85,12 +83,12 @@ module grid_mech_spectral_polarisation
|
|||
C_scale = 0.0_pReal, &
|
||||
S_scale = 0.0_pReal
|
||||
|
||||
real(pReal), private :: &
|
||||
real(pReal) :: &
|
||||
err_BC, & !< deviation from stress BC
|
||||
err_curl, & !< RMS of curl of F
|
||||
err_div !< RMS of div of P
|
||||
|
||||
integer, private :: &
|
||||
integer :: &
|
||||
totalIter = 0 !< total iteration in current increment
|
||||
|
||||
public :: &
|
||||
|
@ -133,7 +131,6 @@ subroutine grid_mech_spectral_polarisation_init
|
|||
! read numerical parameters
|
||||
num_grid => numerics_root%get('grid',defaultVal=emptyDict)
|
||||
|
||||
num%petsc_options = num_grid%get_asString('petsc_options', defaultVal='')
|
||||
num%update_gamma = num_grid%get_asBool ('update_gamma', defaultVal=.false.)
|
||||
num%eps_div_atol = num_grid%get_asFloat ('eps_div_atol', defaultVal=1.0e-4_pReal)
|
||||
num%eps_div_rtol = num_grid%get_asFloat ('eps_div_rtol', defaultVal=5.0e-4_pReal)
|
||||
|
@ -161,7 +158,7 @@ subroutine grid_mech_spectral_polarisation_init
|
|||
! set default and user defined options for PETSc
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type ngmres',ierr)
|
||||
CHKERRQ(ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num%petsc_options,ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
||||
CHKERRQ(ierr)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -24,40 +24,38 @@ module grid_thermal_spectral
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! derived types
|
||||
type(tSolutionParams), private :: params
|
||||
type(tSolutionParams) :: params
|
||||
|
||||
type, private :: tNumerics
|
||||
type :: tNumerics
|
||||
integer :: &
|
||||
itmax !< maximum number of iterations
|
||||
real(pReal) :: &
|
||||
eps_thermal_atol, & !< absolute tolerance for thermal equilibrium
|
||||
eps_thermal_rtol !< relative tolerance for thermal equilibrium
|
||||
character(len=:), allocatable :: &
|
||||
petsc_options
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics), private :: num
|
||||
type(tNumerics) :: num
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! PETSc data
|
||||
SNES, private :: thermal_snes
|
||||
Vec, private :: solution_vec
|
||||
PetscInt, private :: xstart, xend, ystart, yend, zstart, zend
|
||||
real(pReal), private, dimension(:,:,:), allocatable :: &
|
||||
SNES :: thermal_snes
|
||||
Vec :: solution_vec
|
||||
PetscInt :: xstart, xend, ystart, yend, zstart, zend
|
||||
real(pReal), dimension(:,:,:), allocatable :: &
|
||||
T_current, & !< field of current temperature
|
||||
T_lastInc, & !< field of previous temperature
|
||||
T_stagInc !< field of staggered temperature
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! reference diffusion tensor, mobility etc.
|
||||
integer, private :: totalIter = 0 !< total iteration in current increment
|
||||
real(pReal), dimension(3,3), private :: K_ref
|
||||
real(pReal), private :: mu_ref
|
||||
integer :: totalIter = 0 !< total iteration in current increment
|
||||
real(pReal), dimension(3,3) :: K_ref
|
||||
real(pReal) :: mu_ref
|
||||
|
||||
public :: &
|
||||
grid_thermal_spectral_init, &
|
||||
grid_thermal_spectral_solution, &
|
||||
grid_thermal_spectral_forward
|
||||
private :: &
|
||||
formResidual
|
||||
|
||||
contains
|
||||
|
||||
|
@ -83,10 +81,11 @@ subroutine grid_thermal_spectral_init
|
|||
!-------------------------------------------------------------------------------------------------
|
||||
! read numerical parameter and do sanity checks
|
||||
num_grid => numerics_root%get('grid',defaultVal=emptyDict)
|
||||
num%petsc_options = num_grid%get_asString('petsc_options',defaultVal='')
|
||||
num%itmax = num_grid%get_asInt ('itmax', defaultVal=250)
|
||||
num%eps_thermal_atol = num_grid%get_asFloat ('eps_thermal_atol',defaultVal=1.0e-2_pReal)
|
||||
num%eps_thermal_rtol = num_grid%get_asFloat ('eps_thermal_rtol',defaultVal=1.0e-6_pReal)
|
||||
|
||||
if (num%itmax <= 1) call IO_error(301,ext_msg='itmax')
|
||||
if (num%eps_thermal_atol <= 0.0_pReal) call IO_error(301,ext_msg='eps_thermal_atol')
|
||||
if (num%eps_thermal_rtol <= 0.0_pReal) call IO_error(301,ext_msg='eps_thermal_rtol')
|
||||
|
||||
|
@ -94,7 +93,8 @@ subroutine grid_thermal_spectral_init
|
|||
! set default and user defined options for PETSc
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type ngmres',ierr)
|
||||
CHKERRQ(ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num%petsc_options,ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,&
|
||||
num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
||||
CHKERRQ(ierr)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -156,8 +156,7 @@ function grid_thermal_spectral_solution(timeinc,timeinc_old) result(solution)
|
|||
real(pReal), intent(in) :: &
|
||||
timeinc, & !< increment in time for current solution
|
||||
timeinc_old !< increment in time of last increment
|
||||
integer :: i, j, k, cell, &
|
||||
itmax !< maximum number of iterations
|
||||
integer :: i, j, k, cell
|
||||
type(tSolutionState) :: solution
|
||||
class(tNode), pointer :: &
|
||||
num_grid
|
||||
|
@ -167,12 +166,6 @@ function grid_thermal_spectral_solution(timeinc,timeinc_old) result(solution)
|
|||
PetscErrorCode :: ierr
|
||||
SNESConvergedReason :: reason
|
||||
|
||||
!-------------------------------------------------------------------
|
||||
! reading numerical parameter and do sanity check
|
||||
num_grid => numerics_root%get('grid',defaultVal=emptyDict)
|
||||
itmax = num_grid%get_asInt('itmax',defaultVal=250)
|
||||
if (itmax <= 1) call IO_error(301,ext_msg='itmax')
|
||||
|
||||
solution%converged =.false.
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -185,7 +178,7 @@ function grid_thermal_spectral_solution(timeinc,timeinc_old) result(solution)
|
|||
|
||||
if (reason < 1) then
|
||||
solution%converged = .false.
|
||||
solution%iterationsNeeded = itmax
|
||||
solution%iterationsNeeded = num%itmax
|
||||
else
|
||||
solution%converged = .true.
|
||||
solution%iterationsNeeded = totalIter
|
||||
|
|
|
@ -120,8 +120,7 @@ module spectral_utilities
|
|||
memory_efficient !< calculate gamma operator on the fly
|
||||
character(len=:), allocatable :: &
|
||||
spectral_derivative, & !< approximation used for derivatives in Fourier space
|
||||
FFTW_plan_mode, & !< FFTW plan mode, see www.fftw.org
|
||||
petsc_options
|
||||
FFTW_plan_mode !< FFTW plan mode, see www.fftw.org
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics), private :: num ! numerics parameters. Better name?
|
||||
|
@ -220,16 +219,16 @@ subroutine spectral_utilities_init
|
|||
if(debugPETSc) write(6,'(3(/,a),/)') &
|
||||
' Initializing PETSc with debug options: ', &
|
||||
trim(PETScDebug), &
|
||||
' add more using the PETSc_Options keyword in numerics.config '; flush(6)
|
||||
' add more using the PETSc_Options keyword in numerics.yaml '; flush(6)
|
||||
|
||||
num_grid => numerics_root%get('grid',defaultVal=emptyDict)
|
||||
num%petsc_options = num_grid%get_asString('petsc_options',defaultVal='')
|
||||
|
||||
call PETScOptionsClear(PETSC_NULL_OPTIONS,ierr)
|
||||
CHKERRQ(ierr)
|
||||
if(debugPETSc) call PETScOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr)
|
||||
CHKERRQ(ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num%petsc_options,ierr)
|
||||
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,&
|
||||
num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
||||
CHKERRQ(ierr)
|
||||
|
||||
grid1Red = grid(1)/2 + 1
|
||||
|
@ -237,7 +236,7 @@ subroutine spectral_utilities_init
|
|||
|
||||
write(6,'(/,a,3(i12 ))') ' grid a b c: ', grid
|
||||
write(6,'(a,3(es12.5))') ' size x y z: ', geomSize
|
||||
|
||||
|
||||
num%memory_efficient = num_grid%get_asInt ('memory_efficient', defaultVal=1) > 0
|
||||
num%FFTW_timelimit = num_grid%get_asFloat ('fftw_timelimit', defaultVal=-1.0_pReal)
|
||||
num%divergence_correction = num_grid%get_asInt ('divergence_correction', defaultVal=2)
|
||||
|
|
|
@ -191,7 +191,6 @@ subroutine homogenization_init
|
|||
num%subStepSizeHomog = num_homogGeneric%get_asFloat('subStepSize', defaultVal=0.25_pReal)
|
||||
num%stepIncreaseHomog = num_homogGeneric%get_asFloat('stepIncrease', defaultVal=1.5_pReal)
|
||||
|
||||
|
||||
if (num%nMPstate < 1) call IO_error(301,ext_msg='nMPstate')
|
||||
if (num%subStepMinHomog <= 0.0_pReal) call IO_error(301,ext_msg='subStepMinHomog')
|
||||
if (num%subStepSizeHomog <= 0.0_pReal) call IO_error(301,ext_msg='subStepSizeHomog')
|
||||
|
|
|
@ -110,7 +110,7 @@ module subroutine mech_RGC_init(num_homogMech,debug_homogenization)
|
|||
allocate(dependentState(Ninstance))
|
||||
|
||||
num_RGC => num_homogMech%get('RGC',defaultVal=emptyDict)
|
||||
|
||||
|
||||
num%atol = num_RGC%get_asFloat('atol', defaultVal=1.0e+4_pReal)
|
||||
num%rtol = num_RGC%get_asFloat('rtol', defaultVal=1.0e-3_pReal)
|
||||
num%absMax = num_RGC%get_asFloat('amax', defaultVal=1.0e+10_pReal)
|
||||
|
@ -125,7 +125,6 @@ module subroutine mech_RGC_init(num_homogMech,debug_homogenization)
|
|||
num%volDiscrMod = num_RGC%get_asFloat('voldiscrepancymod', defaultVal=1.0e+12_pReal)
|
||||
num%volDiscrPow = num_RGC%get_asFloat('dicrepancypower', defaultVal=5.0_pReal)
|
||||
|
||||
|
||||
if (num%atol <= 0.0_pReal) call IO_error(301,ext_msg='absTol_RGC')
|
||||
if (num%rtol <= 0.0_pReal) call IO_error(301,ext_msg='relTol_RGC')
|
||||
if (num%absMax <= 0.0_pReal) call IO_error(301,ext_msg='absMax_RGC')
|
||||
|
|
18
src/math.f90
18
src/math.f90
|
@ -72,20 +72,11 @@ module math
|
|||
3,2, &
|
||||
3,3 &
|
||||
],shape(MAPPLAIN)) !< arrangement in Plain notation
|
||||
|
||||
type, private :: tNumerics
|
||||
integer :: &
|
||||
randomSeed !< fixed seeding for pseudo-random number generator, Default 0: use random seed
|
||||
|
||||
end type
|
||||
|
||||
type(tNumerics), private :: num
|
||||
|
||||
interface math_eye
|
||||
module procedure math_identity2nd
|
||||
end interface math_eye
|
||||
|
||||
|
||||
!---------------------------------------------------------------------------------------------------
|
||||
private :: &
|
||||
selfTest
|
||||
|
@ -99,7 +90,8 @@ subroutine math_init
|
|||
|
||||
real(pReal), dimension(4) :: randTest
|
||||
integer :: &
|
||||
randSize
|
||||
randSize, &
|
||||
randomSeed !< fixed seeding for pseudo-random number generator, Default 0: use random seed
|
||||
integer, dimension(:), allocatable :: randInit
|
||||
class(tNode), pointer :: &
|
||||
num_generic
|
||||
|
@ -107,12 +99,12 @@ subroutine math_init
|
|||
write(6,'(/,a)') ' <<<+- math init -+>>>'; flush(6)
|
||||
|
||||
num_generic => numerics_root%get('generic',defaultVal=emptyDict)
|
||||
num%randomSeed = num_generic%get_asInt('random_seed', defaultVal = 0)
|
||||
randomSeed = num_generic%get_asInt('random_seed', defaultVal = 0)
|
||||
|
||||
call random_seed(size=randSize)
|
||||
allocate(randInit(randSize))
|
||||
if (num%randomSeed > 0) then
|
||||
randInit = num%randomSeed
|
||||
if (randomSeed > 0) then
|
||||
randInit = randomSeed
|
||||
else
|
||||
call random_seed()
|
||||
call random_seed(get = randInit)
|
||||
|
|
|
@ -63,13 +63,9 @@ program DAMASK_mesh
|
|||
character(len=pStringLen) :: &
|
||||
incInfo, &
|
||||
loadcase_string
|
||||
type :: tNumerics
|
||||
integer :: &
|
||||
stagItMax, & !< max number of field level staggered iterations
|
||||
maxCutBack !< max number of cutbacks
|
||||
end type tNumerics
|
||||
|
||||
type(tNumerics) :: num
|
||||
integer :: &
|
||||
stagItMax, & !< max number of field level staggered iterations
|
||||
maxCutBack !< max number of cutbacks
|
||||
|
||||
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
|
||||
type(tSolutionState), allocatable, dimension(:) :: solres
|
||||
|
@ -87,12 +83,12 @@ program DAMASK_mesh
|
|||
!---------------------------------------------------------------------
|
||||
! reading field information from numerics file and do sanity checks
|
||||
num_mesh => numerics_root%get('mesh', defaultVal=emptyDict)
|
||||
num%stagItMax = num_mesh%get_asInt('maxStaggeredIter',defaultVal=10)
|
||||
num%maxCutBack = num_mesh%get_asInt('maxCutBack',defaultVal=3)
|
||||
stagItMax = num_mesh%get_asInt('maxStaggeredIter',defaultVal=10)
|
||||
maxCutBack = num_mesh%get_asInt('maxCutBack',defaultVal=3)
|
||||
|
||||
if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter')
|
||||
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
||||
|
||||
if (num%stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter')
|
||||
if (num%maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
||||
|
||||
! reading basic information from load case file and allocate data structure containing load cases
|
||||
call DMGetDimension(geomMesh,dimPlex,ierr); CHKERRA(ierr) !< dimension of mesh (2D or 3D)
|
||||
nActiveFields = 1
|
||||
|
@ -333,7 +329,7 @@ program DAMASK_mesh
|
|||
|
||||
enddo
|
||||
stagIter = stagIter + 1
|
||||
stagIterate = stagIter < num%stagItMax &
|
||||
stagIterate = stagIter < stagItMax &
|
||||
.and. all(solres(:)%converged) &
|
||||
.and. .not. all(solres(:)%stagConverged) ! stationary with respect to staggered iteration
|
||||
enddo
|
||||
|
@ -341,7 +337,7 @@ program DAMASK_mesh
|
|||
! check solution
|
||||
cutBack = .False.
|
||||
if(.not. all(solres(:)%converged .and. solres(:)%stagConverged)) then ! no solution found
|
||||
if (cutBackLevel < num%maxCutBack) then ! do cut back
|
||||
if (cutBackLevel < maxCutBack) then ! do cut back
|
||||
write(6,'(/,a)') ' cut back detected'
|
||||
cutBack = .True.
|
||||
stepFraction = (stepFraction - 1) * subStepFactor ! adjust to new denominator
|
||||
|
|
|
@ -96,6 +96,7 @@ module FEM_utilities
|
|||
|
||||
contains
|
||||
|
||||
!ToDo: use functions in variable call
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief allocates all neccessary fields, sets debug flags
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -106,8 +107,6 @@ subroutine FEM_utilities_init
|
|||
num_mesh, &
|
||||
debug_mesh ! pointer to mesh debug options
|
||||
integer :: structOrder !< order of displacement shape functions
|
||||
character(len=:), allocatable :: &
|
||||
petsc_options
|
||||
character(len=pStringLen) :: &
|
||||
PETSCDEBUG = ' -snes_view -snes_monitor '
|
||||
|
||||
|
@ -116,8 +115,7 @@ subroutine FEM_utilities_init
|
|||
write(6,'(/,a)') ' <<<+- DAMASK_FEM_utilities init -+>>>'
|
||||
|
||||
num_mesh => numerics_root%get('mesh',defaultVal=emptyDict)
|
||||
structOrder = num_mesh%get_asInt ('structOrder', defaultVal = 2)
|
||||
petsc_options = num_mesh%get_asString('petsc_options', defaultVal='')
|
||||
structOrder = num_mesh%get_asInt('structOrder', defaultVal = 2)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! set debugging parameters
|
||||
|
@ -127,7 +125,7 @@ subroutine FEM_utilities_init
|
|||
if(debugPETSc) write(6,'(3(/,a),/)') &
|
||||
' Initializing PETSc with debug options: ', &
|
||||
trim(PETScDebug), &
|
||||
' add more using the PETSc_Options keyword in numerics.config '
|
||||
' add more using the PETSc_Options keyword in numerics.yaml '
|
||||
flush(6)
|
||||
call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr)
|
||||
CHKERRQ(ierr)
|
||||
|
@ -140,7 +138,7 @@ subroutine FEM_utilities_init
|
|||
&-mech_pc_type ml -mech_mg_levels_ksp_type chebyshev &
|
||||
&-mech_mg_levels_pc_type sor -mech_pc_ml_nullspace user',ierr)
|
||||
CHKERRQ(ierr)
|
||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,petsc_options,ierr)
|
||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_mesh%get_asString('petsc_options',defaultVal=''),ierr)
|
||||
CHKERRQ(ierr)
|
||||
write(petsc_optionsOrder,'(a,i0)') '-mechFE_petscspace_degree ', structOrder
|
||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_optionsOrder),ierr)
|
||||
|
|
|
@ -84,7 +84,6 @@ subroutine discretization_mesh_init(restart)
|
|||
num_mesh
|
||||
integer :: integrationOrder !< order of quadrature rule required
|
||||
|
||||
|
||||
write(6,'(/,a)') ' <<<+- mesh init -+>>>'
|
||||
|
||||
!--------------------------------------------------------------------------------
|
||||
|
|
|
@ -294,7 +294,6 @@ type(tSolutionState) function FEM_mech_solution( &
|
|||
character(len=*), intent(in) :: &
|
||||
incInfoIn
|
||||
|
||||
|
||||
PetscErrorCode :: ierr
|
||||
SNESConvergedReason :: reason
|
||||
|
||||
|
|
Loading…
Reference in New Issue