Merge branch 'f-strings' into 'development'

F strings

See merge request damask/DAMASK!182
This commit is contained in:
Martin Diehl 2020-06-25 14:42:23 +02:00
commit 8ce5da55fc
20 changed files with 297 additions and 280 deletions

View File

@ -157,7 +157,7 @@ class ASCIItable():
def head_write(self, def head_write(self,
header = True): header = True):
"""Write current header information (info + labels).""" """Write current header information (info + labels)."""
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else [] head = [f"{len(self.info)+self.__IO__['labeled']}\theader"] if header else []
head.append(self.info) head.append(self.info)
if self.__IO__['labeled']: if self.__IO__['labeled']:
head.append('\t'.join(map(self._quote,self.tags))) head.append('\t'.join(map(self._quote,self.tags)))
@ -209,7 +209,7 @@ class ASCIItable():
labelList.append(tags[id]) labelList.append(tags[id])
else: else:
label = tags[id][2:] # get label label = tags[id][2:] # get label
while id < len(tags) and tags[id] == '{}_{}'.format(dim,label): # check successors while id < len(tags) and tags[id] == f'{dim}_{label}': # check successors
id += 1 # next label... id += 1 # next label...
dim += 1 # ...should be one higher dimension dim += 1 # ...should be one higher dimension
labelList.append(label) # reached end --> store labelList.append(label) # reached end --> store

View File

@ -1,4 +1,5 @@
import numpy as np import numpy as np
from . import util
class Color: class Color:
"""Color representation in and conversion between different color-spaces.""" """Color representation in and conversion between different color-spaces."""
@ -450,7 +451,7 @@ class Colormap:
def rad_diff(a,b): def rad_diff(a,b):
return abs(a[2]-b[2]) return abs(a[2]-b[2])
def adjust_hue(Msh_sat, Msh_unsat): def adjust_hue(Msh_sat, Msh_unsat):
"""If saturation of one of the two colors is too less than the other, hue of the less.""" """If saturation of one of the two colors is too less than the other, hue of the less."""
if Msh_sat[0] >= Msh_unsat[0]: if Msh_sat[0] >= Msh_unsat[0]:
@ -502,7 +503,7 @@ class Colormap:
[RGB] colormap for use in paraview or gmsh, or as raw string, or array. [RGB] colormap for use in paraview or gmsh, or as raw string, or array.
Arguments: name, format, steps, crop. Arguments: name, format, steps, crop.
Format is one of (paraview, gmsh, raw, list). Format is one of (paraview, gmsh, gom, raw, list).
Crop selects a (sub)range in [-1.0,1.0]. Crop selects a (sub)range in [-1.0,1.0].
Generates sequential map if one limiting color is either white or black, Generates sequential map if one limiting color is either white or black,
diverging map otherwise. diverging map otherwise.
@ -511,23 +512,22 @@ class Colormap:
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).express_as(model).color for i in range(steps)] colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).express_as(model).color for i in range(steps)]
if format == 'paraview': if format == 'paraview':
colormap = ['[\n {{\n "ColorSpace": "RGB", "Name": "{}", "DefaultMap": true,\n "RGBPoints" : ['.format(name)] \ colormap = [f'[\n {{\n "ColorSpace": "RGB", "Name": "{name}", "DefaultMap": true,\n "RGBPoints" : ['] \
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],) \ + [f' {i:4d},{color[0]:8.6f},{color[1]:8.6f},{color[2]:8.6f}{"," if i+1<len(colors) else ""}' \
for i,color in enumerate(colors[:-1])] \ for i,color in enumerate(colors)] \
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(len(colors),colors[-1][0],colors[-1][1],colors[-1][2],)] \
+ [' ]\n }\n]'] + [' ]\n }\n]']
elif format == 'gmsh': elif format == 'gmsh':
colormap = ['View.ColorTable = {'] \ colormap = ['View.ColorTable = {'] \
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \ + [',\n'.join([','.join([str(x*255.0) for x in color]) for color in colors])] \
+ ['}'] + ['}']
elif format == 'gom': elif format == 'gom':
colormap = ['1 1 ' + str(name) colormap = [ f'1 1 {name}'
+ ' 9 ' + str(name) + f' 9 {name}'
+ ' 0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 ' + ' 0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 '
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) + f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colors)}'
+ ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])] + ' '.join([f' 0 {util.srepr((255*np.array(c)).astype(int)," ")} 255 1' for c in reversed(colors)])]
elif format == 'raw': elif format == 'raw':
colormap = ['\t'.join(map(str,color)) for color in colors] colormap = ['\t'.join(map(str,color)) for color in colors]

View File

@ -43,12 +43,12 @@ class Geom:
def __repr__(self): def __repr__(self):
"""Basic information on geometry definition.""" """Basic information on geometry definition."""
return util.srepr([ return util.srepr([
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))), f'grid a b c: {util.srepr(self.get_grid ()," x ")}',
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))), f'size x y z: {util.srepr(self.get_size ()," x ")}',
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))), f'origin x y z: {util.srepr(self.get_origin()," ")}',
'homogenization: {}'.format(self.get_homogenization()), f'homogenization: {self.get_homogenization()}',
'# microstructures: {}'.format(len(np.unique(self.microstructure))), f'# microstructures: {self.N_microstructure}',
'max microstructure: {}'.format(np.nanmax(self.microstructure)), f'max microstructure: {np.nanmax(self.microstructure)}',
]) ])
@ -71,7 +71,7 @@ class Geom:
grid_old = self.get_grid() grid_old = self.get_grid()
size_old = self.get_size() size_old = self.get_size()
origin_old = self.get_origin() origin_old = self.get_origin()
unique_old = len(np.unique(self.microstructure)) unique_old = self.N_microstructure
max_old = np.nanmax(self.microstructure) max_old = np.nanmax(self.microstructure)
if size is not None and rescale: if size is not None and rescale:
@ -85,32 +85,32 @@ class Geom:
elif rescale: elif rescale:
self.set_size(self.get_grid()/grid_old*self.size) self.set_size(self.get_grid()/grid_old*self.size)
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))] message = [f'grid a b c: {util.srepr(grid_old," x ")}']
if np.any(grid_old != self.get_grid()): if np.any(grid_old != self.get_grid()):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('grid a b c: {}'.format(' x '.join(map(str,self.get_grid()))))) message.append(util.emph(f'grid a b c: {util.srepr(self.get_grid()," x ")}'))
message.append('size x y z: {}'.format(' x '.join(map(str,size_old)))) message.append(f'size x y z: {util.srepr(size_old," x ")}')
if np.any(size_old != self.get_size()): if np.any(size_old != self.get_size()):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('size x y z: {}'.format(' x '.join(map(str,self.get_size()))))) message.append(util.emph(f'size x y z: {util.srepr(self.get_size()," x ")}'))
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old)))) message.append(f'origin x y z: {util.srepr(origin_old," ")}')
if np.any(origin_old != self.get_origin()): if np.any(origin_old != self.get_origin()):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('origin x y z: {}'.format(' '.join(map(str,self.get_origin()))))) message.append(util.emph(f'origin x y z: {util.srepr(self.get_origin()," ")}'))
message.append('homogenization: {}'.format(self.get_homogenization())) message.append(f'homogenization: {self.get_homogenization()}')
message.append('# microstructures: {}'.format(unique_old)) message.append(f'# microstructures: {unique_old}')
if unique_old != len(np.unique(self.microstructure)): if unique_old != self.N_microstructure:
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('# microstructures: {}'.format(len(np.unique(self.microstructure))))) message.append(util.emph(f'# microstructures: {self.N_microstructure}'))
message.append('max microstructure: {}'.format(max_old)) message.append(f'max microstructure: {max_old}')
if max_old != np.nanmax(self.microstructure): if max_old != np.nanmax(self.microstructure):
message[-1] = util.delete(message[-1]) message[-1] = util.delete(message[-1])
message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure)))) message.append(util.emph(f'max microstructure: {np.nanmax(self.microstructure)}'))
return util.return_message(message) return util.return_message(message)
@ -154,9 +154,9 @@ class Geom:
""" """
if microstructure is not None: if microstructure is not None:
if len(microstructure.shape) != 3: if len(microstructure.shape) != 3:
raise ValueError('Invalid microstructure shape {}'.format(microstructure.shape)) raise ValueError(f'Invalid microstructure shape {microstructure.shape}')
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']: elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype)) raise TypeError(f'Invalid microstructue data type {microstructure.dtype}')
else: else:
self.microstructure = np.copy(microstructure) self.microstructure = np.copy(microstructure)
@ -175,8 +175,8 @@ class Geom:
grid = np.asarray(self.microstructure.shape) grid = np.asarray(self.microstructure.shape)
self.size = grid/np.max(grid) self.size = grid/np.max(grid)
else: else:
if len(size) != 3 or any(np.array(size)<=0): if len(size) != 3 or any(np.array(size) <= 0):
raise ValueError('Invalid size {}'.format(size)) raise ValueError(f'Invalid size {size}')
else: else:
self.size = np.array(size) self.size = np.array(size)
@ -193,7 +193,7 @@ class Geom:
""" """
if origin is not None: if origin is not None:
if len(origin) != 3: if len(origin) != 3:
raise ValueError('Invalid origin {}'.format(origin)) raise ValueError(f'Invalid origin {origin}')
else: else:
self.origin = np.array(origin) self.origin = np.array(origin)
@ -210,7 +210,7 @@ class Geom:
""" """
if homogenization is not None: if homogenization is not None:
if not isinstance(homogenization,int) or homogenization < 1: if not isinstance(homogenization,int) or homogenization < 1:
raise TypeError('Invalid homogenization {}'.format(homogenization)) raise TypeError(f'Invalid homogenization {homogenization}')
else: else:
self.homogenization = homogenization self.homogenization = homogenization
@ -222,7 +222,7 @@ class Geom:
@property @property
def N_microstructure(self): def N_microstructure(self):
return len(np.unique(self.microstructure)) return np.unique(self.microstructure).size
def get_microstructure(self): def get_microstructure(self):
@ -257,11 +257,11 @@ class Geom:
def get_header(self): def get_header(self):
"""Return the full header (grid, size, origin, homogenization, comments).""" """Return the full header (grid, size, origin, homogenization, comments)."""
header = ['{} header'.format(len(self.comments)+4)] + self.comments header = [f'{len(self.comments)+4} header'] + self.comments
header.append('grid a {} b {} c {}'.format(*self.get_grid())) header.append('grid a {} b {} c {}'.format(*self.get_grid()))
header.append('size x {} y {} z {}'.format(*self.get_size())) header.append('size x {} y {} z {}'.format(*self.get_size()))
header.append('origin x {} y {} z {}'.format(*self.get_origin())) header.append('origin x {} y {} z {}'.format(*self.get_origin()))
header.append('homogenization {}'.format(self.get_homogenization())) header.append(f'homogenization {self.get_homogenization()}')
return header return header
@ -320,7 +320,7 @@ class Geom:
i += len(items) i += len(items)
if i != grid.prod(): if i != grid.prod():
raise TypeError('Invalid file: expected {} entries, found {}'.format(grid.prod(),i)) raise TypeError(f'Invalid file: expected {grid.prod()} entries, found {i}')
if not np.any(np.mod(microstructure,1) != 0.0): # no float present if not np.any(np.mod(microstructure,1) != 0.0): # no float present
microstructure = microstructure.astype('int') microstructure = microstructure.astype('int')
@ -331,6 +331,7 @@ class Geom:
@staticmethod @staticmethod
def _find_closest_seed(seeds, weights, point): def _find_closest_seed(seeds, weights, point):
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights) return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
@staticmethod @staticmethod
def from_Laguerre_tessellation(grid,size,seeds,weights,periodic=True): def from_Laguerre_tessellation(grid,size,seeds,weights,periodic=True):
""" """
@ -373,7 +374,7 @@ class Geom:
else: else:
microstructure = microstructure.reshape(grid) microstructure = microstructure.reshape(grid)
#comments = 'geom.py:from_Laguerre_tessellation v{}'.format(version) #ToDo: comments = 'geom.py:from_Laguerre_tessellation v{}'.format(version)
return Geom(microstructure+1,size,homogenization=1) return Geom(microstructure+1,size,homogenization=1)
@ -398,7 +399,7 @@ class Geom:
KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds) KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds)
devNull,microstructure = KDTree.query(coords) devNull,microstructure = KDTree.query(coords)
#comments = 'geom.py:from_Voronoi_tessellation v{}'.format(version) #ToDo: comments = 'geom.py:from_Voronoi_tessellation v{}'.format(version)
return Geom(microstructure.reshape(grid)+1,size,homogenization=1) return Geom(microstructure.reshape(grid)+1,size,homogenization=1)
@ -418,7 +419,7 @@ class Geom:
grid = self.get_grid() grid = self.get_grid()
if pack is None: if pack is None:
plain = grid.prod()/np.unique(self.microstructure).size < 250 plain = grid.prod()/self.N_microstructure < 250
else: else:
plain = not pack plain = not pack
@ -448,11 +449,11 @@ class Geom:
if compressType is None: if compressType is None:
f.write('\n'.join(self.get_header())+'\n') f.write('\n'.join(self.get_header())+'\n')
elif compressType == '.': elif compressType == '.':
f.write('{}\n'.format(former)) f.write(f'{former}\n')
elif compressType == 'to': elif compressType == 'to':
f.write('{} to {}\n'.format(start,former)) f.write(f'{start} to {former}\n')
elif compressType == 'of': elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former)) f.write(f'{reps} of {former}\n')
compressType = '.' compressType = '.'
start = current start = current
@ -461,11 +462,11 @@ class Geom:
former = current former = current
if compressType == '.': if compressType == '.':
f.write('{}\n'.format(former)) f.write(f'{former}\n')
elif compressType == 'to': elif compressType == 'to':
f.write('{} to {}\n'.format(start,former)) f.write(f'{start} to {former}\n')
elif compressType == 'of': elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former)) f.write(f'{reps} of {former}\n')
def to_vtk(self,fname=None): def to_vtk(self,fname=None):
@ -511,7 +512,7 @@ class Geom:
if not all(isinstance(d, str) for d in directions): if not all(isinstance(d, str) for d in directions):
raise TypeError('Directions are not of type str.') raise TypeError('Directions are not of type str.')
elif not set(directions).issubset(valid): elif not set(directions).issubset(valid):
raise ValueError('Invalid direction specified {}'.format(set(directions).difference(valid))) raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
limits = [None,None] if reflect else [-2,0] limits = [None,None] if reflect else [-2,0]
ms = self.get_microstructure() ms = self.get_microstructure()
@ -523,7 +524,7 @@ class Geom:
if 'x' in directions: if 'x' in directions:
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0) ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
#self.add_comments('geom.py:mirror v{}'.format(version) #ToDo: self.add_comments('geom.py:mirror v{}'.format(version)
return self.update(ms,rescale=True) return self.update(ms,rescale=True)
@ -537,7 +538,7 @@ class Geom:
number of grid points in x,y,z direction. number of grid points in x,y,z direction.
""" """
#self.add_comments('geom.py:scale v{}'.format(version) #ToDo: self.add_comments('geom.py:scale v{}'.format(version)
return self.update( return self.update(
ndimage.interpolation.zoom( ndimage.interpolation.zoom(
self.microstructure, self.microstructure,
@ -564,7 +565,7 @@ class Geom:
unique, inverse = np.unique(arr, return_inverse=True) unique, inverse = np.unique(arr, return_inverse=True)
return unique[np.argmax(np.bincount(inverse))] return unique[np.argmax(np.bincount(inverse))]
#self.add_comments('geom.py:clean v{}'.format(version) #ToDo: self.add_comments('geom.py:clean v{}'.format(version)
return self.update(ndimage.filters.generic_filter( return self.update(ndimage.filters.generic_filter(
self.microstructure, self.microstructure,
mostFrequent, mostFrequent,
@ -579,7 +580,7 @@ class Geom:
for i, oldID in enumerate(np.unique(self.microstructure)): for i, oldID in enumerate(np.unique(self.microstructure)):
renumbered = np.where(self.microstructure == oldID, i+1, renumbered) renumbered = np.where(self.microstructure == oldID, i+1, renumbered)
#self.add_comments('geom.py:renumber v{}'.format(version) #ToDo: self.add_comments('geom.py:renumber v{}'.format(version)
return self.update(renumbered) return self.update(renumbered)
@ -614,7 +615,7 @@ class Geom:
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
#self.add_comments('geom.py:rotate v{}'.format(version) #ToDo: self.add_comments('geom.py:rotate v{}'.format(version)
return self.update(microstructure_in,origin=origin,rescale=True) return self.update(microstructure_in,origin=origin,rescale=True)
@ -646,7 +647,7 @@ class Geom:
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]] canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
#self.add_comments('geom.py:canvas v{}'.format(version) #ToDo: self.add_comments('geom.py:canvas v{}'.format(version)
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True) return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
@ -666,5 +667,5 @@ class Geom:
for from_ms,to_ms in zip(from_microstructure,to_microstructure): for from_ms,to_ms in zip(from_microstructure,to_microstructure):
substituted[self.microstructure==from_ms] = to_ms substituted[self.microstructure==from_ms] = to_ms
#self.add_comments('geom.py:substitute v{}'.format(version) #ToDo: self.add_comments('geom.py:substitute v{}'.format(version)
return self.update(substituted) return self.update(substituted)

View File

@ -26,7 +26,7 @@ class Symmetry:
""" """
if symmetry is not None and symmetry.lower() not in Symmetry.lattices: if symmetry is not None and symmetry.lower() not in Symmetry.lattices:
raise KeyError('Symmetry/crystal system "{}" is unknown'.format(symmetry)) raise KeyError(f'Symmetry/crystal system "{symmetry}" is unknown')
self.lattice = symmetry.lower() if isinstance(symmetry,str) else symmetry self.lattice = symmetry.lower() if isinstance(symmetry,str) else symmetry
@ -40,7 +40,7 @@ class Symmetry:
def __repr__(self): def __repr__(self):
"""Readable string.""" """Readable string."""
return '{}'.format(self.lattice) return f'{self.lattice}'
def __eq__(self, other): def __eq__(self, other):
@ -348,7 +348,7 @@ class Lattice:
def __repr__(self): def __repr__(self):
"""Report basic lattice information.""" """Report basic lattice information."""
return 'Bravais lattice {} ({} symmetry)'.format(self.lattice,self.symmetry) return f'Bravais lattice {self.lattice} ({self.symmetry} symmetry)'
# Kurdjomov--Sachs orientation relationship for fcc <-> bcc transformation # Kurdjomov--Sachs orientation relationship for fcc <-> bcc transformation
@ -613,17 +613,17 @@ class Lattice:
try: try:
relationship = models[model] relationship = models[model]
except KeyError : except KeyError :
raise KeyError('Orientation relationship "{}" is unknown'.format(model)) raise KeyError(f'Orientation relationship "{model}" is unknown')
if self.lattice not in relationship['mapping']: if self.lattice not in relationship['mapping']:
raise ValueError('Relationship "{}" not supported for lattice "{}"'.format(model,self.lattice)) raise ValueError(f'Relationship "{model}" not supported for lattice "{self.lattice}"')
r = {'lattice':Lattice((set(relationship['mapping'])-{self.lattice}).pop()), # target lattice r = {'lattice':Lattice((set(relationship['mapping'])-{self.lattice}).pop()), # target lattice
'rotations':[] } 'rotations':[] }
myPlane_id = relationship['mapping'][self.lattice] myPlane_id = relationship['mapping'][self.lattice]
otherPlane_id = (myPlane_id+1)%2 otherPlane_id = (myPlane_id+1)%2
myDir_id = myPlane_id +2 myDir_id = myPlane_id +2
otherDir_id = otherPlane_id +2 otherDir_id = otherPlane_id +2
for miller in np.hstack((relationship['planes'],relationship['directions'])): for miller in np.hstack((relationship['planes'],relationship['directions'])):

View File

@ -50,8 +50,7 @@ class Result:
self.version_minor = f.attrs['DADF5-minor'] self.version_minor = f.attrs['DADF5-minor']
if self.version_major != 0 or not 2 <= self.version_minor <= 6: if self.version_major != 0 or not 2 <= self.version_minor <= 6:
raise TypeError('Unsupported DADF5 version {}.{} '.format(self.version_major, raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
self.version_minor))
self.structured = 'grid' in f['geometry'].attrs.keys() self.structured = 'grid' in f['geometry'].attrs.keys()
@ -107,7 +106,7 @@ class Result:
self.pick('increments',all_selected_increments) self.pick('increments',all_selected_increments)
in_between = '' if len(all_selected_increments) < 3 else \ in_between = '' if len(all_selected_increments) < 3 else \
''.join(['\n{}\n ...\n'.format(inc) for inc in all_selected_increments[1:-2]]) ''.join([f'\n{inc}\n ...\n' for inc in all_selected_increments[1:-2]])
return util.srepr(first + in_between + last) return util.srepr(first + in_between + last)
@ -137,7 +136,7 @@ class Result:
if what == 'increments': if what == 'increments':
choice = [c if isinstance(c,str) and c.startswith('inc') else choice = [c if isinstance(c,str) and c.startswith('inc') else
'inc{}'.format(c) for c in choice] f'inc{c}' for c in choice]
elif what == 'times': elif what == 'times':
what = 'increments' what = 'increments'
if choice == ['*']: if choice == ['*']:
@ -268,7 +267,7 @@ class Result:
def rename(self,name_old,name_new): def rename(self,name_old,name_new):
""" """
Rename datasets. Rename datasets.
Parameters Parameters
---------- ----------
name_old : str name_old : str
@ -412,21 +411,19 @@ class Result:
message = '' message = ''
with h5py.File(self.fname,'r') as f: with h5py.File(self.fname,'r') as f:
for i in self.iterate('increments'): for i in self.iterate('increments'):
message += '\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)]) message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n'
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iterate(o): for oo in self.iterate(o):
message += ' {}\n'.format(oo) message += f' {oo}\n'
for pp in self.iterate(p): for pp in self.iterate(p):
message += ' {}\n'.format(pp) message += f' {pp}\n'
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
for d in f[group].keys(): for d in f[group].keys():
try: try:
dataset = f['/'.join([group,d])] dataset = f['/'.join([group,d])]
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
description = dataset.attrs['Description'].decode() description = dataset.attrs['Description'].decode()
if 'Unit' in dataset.attrs: message += f' {d}{unit}: {description}\n'
message += ' {} / ({}): {}\n'.format(d,dataset.attrs['Unit'].decode(),description)
else:
message += ' {}: {}\n'.format(d,description)
except KeyError: except KeyError:
pass pass
return message return message
@ -528,10 +525,10 @@ class Result:
def _add_absolute(x): def _add_absolute(x):
return { return {
'data': np.abs(x['data']), 'data': np.abs(x['data']),
'label': '|{}|'.format(x['label']), 'label': f'|{x["label"]}|',
'meta': { 'meta': {
'Unit': x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']), 'Description': f"Absolute value of {x['label']} ({x['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -552,14 +549,14 @@ class Result:
def _add_calculation(**kwargs): def _add_calculation(**kwargs):
formula = kwargs['formula'] formula = kwargs['formula']
for d in re.findall(r'#(.*?)#',formula): for d in re.findall(r'#(.*?)#',formula):
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d)) formula = formula.replace(f'#{d}#',f"kwargs['{d}']['data']")
return { return {
'data': eval(formula), 'data': eval(formula),
'label': kwargs['label'], 'label': kwargs['label'],
'meta': { 'meta': {
'Unit': kwargs['unit'], 'Unit': kwargs['unit'],
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']), 'Description': f"{kwargs['description']} (formula: {kwargs['formula']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -596,8 +593,9 @@ class Result:
'label': 'sigma', 'label': 'sigma',
'meta': { 'meta': {
'Unit': P['meta']['Unit'], 'Unit': P['meta']['Unit'],
'Description': 'Cauchy stress calculated from {} ({}) and {} ({})'\ 'Description': "Cauchy stress calculated "
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']), f"from {P['label']} ({P['meta']['Description']})"
f" and {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -620,10 +618,10 @@ class Result:
def _add_determinant(T): def _add_determinant(T):
return { return {
'data': np.linalg.det(T['data']), 'data': np.linalg.det(T['data']),
'label': 'det({})'.format(T['label']), 'label': f"det({T['label']})",
'meta': { 'meta': {
'Unit': T['meta']['Unit'], 'Unit': T['meta']['Unit'],
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']), 'Description': f"Determinant of tensor {T['label']} ({T['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -644,10 +642,10 @@ class Result:
def _add_deviator(T): def _add_deviator(T):
return { return {
'data': mechanics.deviatoric_part(T['data']), 'data': mechanics.deviatoric_part(T['data']),
'label': 's_{}'.format(T['label']), 'label': f"s_{T['label']}",
'meta': { 'meta': {
'Unit': T['meta']['Unit'], 'Unit': T['meta']['Unit'],
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']), 'Description': f"Deviator of tensor {T['label']} ({T['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -675,10 +673,10 @@ class Result:
return { return {
'data': mechanics.eigenvalues(T_sym['data'])[:,p], 'data': mechanics.eigenvalues(T_sym['data'])[:,p],
'label': 'lambda_{}({})'.format(eigenvalue,T_sym['label']), 'label': f"lambda_{eigenvalue}({T_sym['label']})",
'meta' : { 'meta' : {
'Unit': T_sym['meta']['Unit'], 'Unit': T_sym['meta']['Unit'],
'Description': '{} eigenvalue of {} ({})'.format(label,T_sym['label'],T_sym['meta']['Description']), 'Description': f"{label} eigenvalue of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -708,14 +706,14 @@ class Result:
print('p',eigenvalue) print('p',eigenvalue)
return { return {
'data': mechanics.eigenvectors(T_sym['data'])[:,p], 'data': mechanics.eigenvectors(T_sym['data'])[:,p],
'label': 'v_{}({})'.format(eigenvalue,T_sym['label']), 'label': f"v_{eigenvalue}({T_sym['label']})",
'meta' : { 'meta' : {
'Unit': '1', 'Unit': '1',
'Description': 'Eigenvector corresponding to {} eigenvalue of {} ({})'\ 'Description': f"Eigenvector corresponding to {label} eigenvalue"
.format(label,T_sym['label'],T_sym['meta']['Description']), f" of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
def add_eigenvector(self,T_sym,eigenvalue='max'): def add_eigenvector(self,T_sym,eigenvalue='max'):
""" """
Add eigenvector of symmetric tensor. Add eigenvector of symmetric tensor.
@ -774,10 +772,10 @@ class Result:
def _add_maximum_shear(T_sym): def _add_maximum_shear(T_sym):
return { return {
'data': mechanics.maximum_shear(T_sym['data']), 'data': mechanics.maximum_shear(T_sym['data']),
'label': 'max_shear({})'.format(T_sym['label']), 'label': f"max_shear({T_sym['label']})",
'meta': { 'meta': {
'Unit': T_sym['meta']['Unit'], 'Unit': T_sym['meta']['Unit'],
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']), 'Description': f"Maximum shear component of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -800,11 +798,11 @@ class Result:
'stress' 'stress'
return { return {
'data': mechanics.Mises_strain(T_sym['data']) if t=='strain' else mechanics.Mises_stress(T_sym['data']), 'data': (mechanics.Mises_strain if t=='strain' else mechanics.Mises_stress)(T_sym['data']),
'label': '{}_vM'.format(T_sym['label']), 'label': f"{T_sym['label']}_vM",
'meta': { 'meta': {
'Unit': T_sym['meta']['Unit'], 'Unit': T_sym['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']), 'Description': f"Mises equivalent {t} of {T_sym['label']} ({T_sym['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -837,10 +835,10 @@ class Result:
return { return {
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True), 'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
'label': '|{}|_{}'.format(x['label'],o), 'label': f"|{x['label']}|_{o}",
'meta': { 'meta': {
'Unit': x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']), 'Description': f"{o}-norm of {t} {x['label']} ({x['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -866,19 +864,20 @@ class Result:
'label': 'S', 'label': 'S',
'meta': { 'meta': {
'Unit': P['meta']['Unit'], 'Unit': P['meta']['Unit'],
'Description': '2. Piola-Kirchhoff stress calculated from {} ({}) and {} ({})'\ 'Description': "2. Piola-Kirchhoff stress calculated "
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']), f"from {P['label']} ({P['meta']['Description']})"
f" and {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
def add_PK2(self,P='P',F='F'): def add_PK2(self,P='P',F='F'):
""" """
Add second Piola-Kirchhoff calculated from first Piola-Kirchhoff stress and deformation gradient. Add second Piola-Kirchhoff stress calculated from first Piola-Kirchhoff stress and deformation gradient.
Parameters Parameters
---------- ----------
P : str, optional P : str, optional
Label first Piola-Kirchhoff stress dataset. Defaults to P. Label of first Piola-Kirchhoff stress dataset. Defaults to P.
F : str, optional F : str, optional
Label of deformation gradient dataset. Defaults to F. Label of deformation gradient dataset. Defaults to F.
@ -928,10 +927,10 @@ class Result:
def _add_rotational_part(F): def _add_rotational_part(F):
return { return {
'data': mechanics.rotational_part(F['data']), 'data': mechanics.rotational_part(F['data']),
'label': 'R({})'.format(F['label']), 'label': f"R({F['label']})",
'meta': { 'meta': {
'Unit': F['meta']['Unit'], 'Unit': F['meta']['Unit'],
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']), 'Description': f"Rotational part of {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -952,10 +951,10 @@ class Result:
def _add_spherical(T): def _add_spherical(T):
return { return {
'data': mechanics.spherical_part(T['data']), 'data': mechanics.spherical_part(T['data']),
'label': 'p_{}'.format(T['label']), 'label': f"p_{T['label']}",
'meta': { 'meta': {
'Unit': T['meta']['Unit'], 'Unit': T['meta']['Unit'],
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']), 'Description': f"Spherical component of tensor {T['label']} ({T['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -976,10 +975,10 @@ class Result:
def _add_strain_tensor(F,t,m): def _add_strain_tensor(F,t,m):
return { return {
'data': mechanics.strain_tensor(F['data'],t,m), 'data': mechanics.strain_tensor(F['data'],t,m),
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']), 'label': f"epsilon_{t}^{m}({F['label']})",
'meta': { 'meta': {
'Unit': F['meta']['Unit'], 'Unit': F['meta']['Unit'],
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']), 'Description': f"Strain tensor of {F['label']} ({F['meta']['Description']})",
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
} }
@ -1006,11 +1005,11 @@ class Result:
@staticmethod @staticmethod
def _add_stretch_tensor(F,t): def _add_stretch_tensor(F,t):
return { return {
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']), 'data': (mechanics.left_stretch if t.upper() == 'V' else mechanics.right_stretch)(F['data']),
'label': '{}({})'.format(t,F['label']), 'label': f"{t}({F['label']})",
'meta': { 'meta': {
'Unit': F['meta']['Unit'], 'Unit': F['meta']['Unit'],
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right', 'Description': '{} stretch tensor of {} ({})'.format('Left' if t.upper() == 'V' else 'Right',
F['label'],F['meta']['Description']), F['label'],F['meta']['Description']),
'Creator': inspect.stack()[0][3][1:] 'Creator': inspect.stack()[0][3][1:]
} }
@ -1046,7 +1045,7 @@ class Result:
r = func(**datasets_in,**args) r = func(**datasets_in,**args)
return [group,r] return [group,r]
except Exception as err: except Exception as err:
print('Error during calculation: {}.'.format(err)) print(f'Error during calculation: {err}.')
return None return None
@ -1091,11 +1090,11 @@ class Result:
for l,v in result[1]['meta'].items(): for l,v in result[1]['meta'].items():
dataset.attrs[l]=v.encode() dataset.attrs[l]=v.encode()
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version) creator = f"damask.Result.{dataset.attrs['Creator'].decode()} v{version}"
dataset.attrs['Creator'] = creator.encode() dataset.attrs['Creator'] = creator.encode()
except (OSError,RuntimeError) as err: except (OSError,RuntimeError) as err:
print('Could not add dataset: {}.'.format(err)) print(f'Could not add dataset: {err}.')
lock.release() lock.release()
pool.close() pool.close()
@ -1128,7 +1127,7 @@ class Result:
time_data = ET.SubElement(time, 'DataItem') time_data = ET.SubElement(time, 'DataItem')
time_data.attrib={'Format': 'XML', time_data.attrib={'Format': 'XML',
'NumberType': 'Float', 'NumberType': 'Float',
'Dimensions': '{}'.format(len(self.times))} 'Dimensions': f'{len(self.times)}'}
time_data.text = ' '.join(map(str,self.times)) time_data.text = ' '.join(map(str,self.times))
attributes = [] attributes = []
@ -1169,7 +1168,7 @@ class Result:
data_items[-1].attrib={'Format': 'HDF', data_items[-1].attrib={'Format': 'HDF',
'Precision': '8', 'Precision': '8',
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))} 'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
data_items[-1].text='{}:/{}/geometry/u_n'.format(os.path.split(self.fname)[1],inc) data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in getattr(self,o): for oo in getattr(self,o):
@ -1184,15 +1183,15 @@ class Result:
if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue
attributes.append(ET.SubElement(grid, 'Attribute')) attributes.append(ET.SubElement(grid, 'Attribute'))
attributes[-1].attrib={'Name': '{}'.format(name.split('/',2)[2]), attributes[-1].attrib={'Name': name.split('/',2)[2],
'Center': 'Cell', 'Center': 'Cell',
'AttributeType': 'Tensor'} 'AttributeType': 'Tensor'}
data_items.append(ET.SubElement(attributes[-1], 'DataItem')) data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
data_items[-1].attrib={'Format': 'HDF', data_items[-1].attrib={'Format': 'HDF',
'NumberType': 'Float', 'NumberType': 'Float',
'Precision': '{}'.format(prec), 'Precision': f'{prec}',
'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))} 'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))}
data_items[-1].text='{}:{}'.format(os.path.split(self.fname)[1],name) data_items[-1].text=f'{os.path.split(self.fname)[1]}:{name}'
with open(self.fname.with_suffix('.xdmf').name,'w') as f: with open(self.fname.with_suffix('.xdmf').name,'w') as f:
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml()) f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
@ -1270,4 +1269,4 @@ class Result:
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p')) u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
v.add(u,'u') v.add(u,'u')
v.write('{}_inc{}'.format(self.fname.stem,inc[3:].zfill(N_digits))) v.write(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')

View File

@ -119,7 +119,7 @@ class Rotation:
else: else:
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors') raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
else: else:
raise TypeError('Cannot rotate {}'.format(type(other))) raise TypeError(f'Cannot rotate {type(other)}')
def _standardize(self): def _standardize(self):

View File

@ -4,6 +4,7 @@ import pandas as pd
import numpy as np import numpy as np
from . import version from . import version
from . import util
class Table: class Table:
"""Store spreadsheet-like data.""" """Store spreadsheet-like data."""
@ -24,7 +25,7 @@ class Table:
""" """
self.comments = [] if comments is None else [c for c in comments] self.comments = [] if comments is None else [c for c in comments]
self.data = pd.DataFrame(data=data) self.data = pd.DataFrame(data=data)
self.shapes = shapes self.shapes = { k:(v,) if isinstance(v,(np.int,int)) else v for k,v in shapes.items() }
self._label_condensed() self._label_condensed()
@ -33,7 +34,7 @@ class Table:
labels = [] labels = []
for label,shape in self.shapes.items(): for label,shape in self.shapes.items():
size = int(np.prod(shape)) size = int(np.prod(shape))
labels += ['{}{}'.format('' if size == 1 else '{}_'.format(i+1),label) for i in range(size)] labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)]
self.data.columns = labels self.data.columns = labels
@ -47,8 +48,7 @@ class Table:
def _add_comment(self,label,shape,info): def _add_comment(self,label,shape,info):
if info is not None: if info is not None:
c = '{}{}: {}'.format(label,' '+str(shape) if np.prod(shape,dtype=int) > 1 else '',info) self.comments.append(f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}')
self.comments.append(c)
@staticmethod @staticmethod
@ -126,8 +126,6 @@ class Table:
Filename or file for reading. Filename or file for reading.
""" """
shapes = {'eu':(3,), 'pos':(2,),
'IQ':(1,), 'CI':(1,), 'ID':(1,), 'intensity':(1,), 'fit':(1,)}
try: try:
f = open(fname) f = open(fname)
except TypeError: except TypeError:
@ -136,7 +134,7 @@ class Table:
content = f.readlines() content = f.readlines()
comments = ['table.py:from_ang v {}'.format(version)] comments = [f'table.py:from_ang v{version}']
for line in content: for line in content:
if line.startswith('#'): if line.startswith('#'):
comments.append(line.strip()) comments.append(line.strip())
@ -144,8 +142,11 @@ class Table:
break break
data = np.loadtxt(content) data = np.loadtxt(content)
for c in range(data.shape[1]-10):
shapes['n/a_{}'.format(c+1)] = (1,) shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
remainder = data.shape[1]-sum(shapes.values())
if remainder > 0: # 3.8 can do: if (remainder := data.shape[1]-sum(shapes.values())) > 0
shapes['unknown'] = remainder
return Table(data,shapes,comments) return Table(data,shapes,comments)
@ -234,7 +235,6 @@ class Table:
""" """
self.data.drop(columns=label,inplace=True) self.data.drop(columns=label,inplace=True)
del self.shapes[label] del self.shapes[label]
@ -251,8 +251,7 @@ class Table:
""" """
self.data.rename(columns={label_old:label_new},inplace=True) self.data.rename(columns={label_old:label_new},inplace=True)
c = '{} => {}{}'.format(label_old,label_new,'' if info is None else ': {}'.format(info)) self.comments.append(f'{label_old} => {label_new}'+('' if info is None else f': {info}'))
self.comments.append(c)
self.shapes = {(label if label != label_old else label_new):self.shapes[label] for label in self.shapes} self.shapes = {(label if label != label_old else label_new):self.shapes[label] for label in self.shapes}
@ -271,7 +270,7 @@ class Table:
self._label_flat() self._label_flat()
self.data.sort_values(labels,axis=0,inplace=True,ascending=ascending) self.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
self._label_condensed() self._label_condensed()
self.comments.append('sorted by [{}]'.format(', '.join(labels))) self.comments.append(f'sorted by [{", ".join(labels)}]')
def append(self,other): def append(self,other):
@ -328,18 +327,18 @@ class Table:
labels = [] labels = []
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]: for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
if self.shapes[l] == (1,): if self.shapes[l] == (1,):
labels.append('{}'.format(l)) labels.append(f'{l}')
elif len(self.shapes[l]) == 1: elif len(self.shapes[l]) == 1:
labels += ['{}_{}'.format(i+1,l) \ labels += [f'{i+1}_{l}' \
for i in range(self.shapes[l][0])] for i in range(self.shapes[l][0])]
else: else:
labels += ['{}:{}_{}'.format('x'.join([str(d) for d in self.shapes[l]]),i+1,l) \ labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
for i in range(np.prod(self.shapes[l]))] for i in range(np.prod(self.shapes[l]))]
if new_style: if new_style:
header = ['# {}'.format(comment) for comment in self.comments] header = [f'# {comment}' for comment in self.comments]
else: else:
header = ['{} header'.format(len(self.comments)+1)] \ header = [f'{len(self.comments)+1} header'] \
+ self.comments \ + self.comments \
try: try:

View File

@ -53,7 +53,7 @@ class Test:
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__)) self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
self.parser = OptionParser(option_class=damask.extendableOption, self.parser = OptionParser(option_class=damask.extendableOption,
description = '{} (Test class version: {})'.format(self.description,damask.version), description = f'{self.description} (Test class version: {damask.version})',
usage = './test.py [options]') usage = './test.py [options]')
self.parser.add_option("-k", "--keep", self.parser.add_option("-k", "--keep",
action = "store_true", action = "store_true",
@ -93,7 +93,7 @@ class Test:
for variant,object in enumerate(self.variants): for variant,object in enumerate(self.variants):
name = self.variantName(variant) name = self.variantName(variant)
if self.options.show: if self.options.show:
logging.critical('{}: {}'.format(variant+1,name)) logging.critical(f'{variant+1}: {name}')
elif self.options.select is not None \ elif self.options.select is not None \
and not (name in self.options.select or str(variant+1) in self.options.select): and not (name in self.options.select or str(variant+1) in self.options.select):
pass pass
@ -106,12 +106,12 @@ class Test:
self.postprocess(variant) self.postprocess(variant)
if self.options.update: if self.options.update:
if self.update(variant) != 0: logging.critical('update for "{}" failed.'.format(name)) if self.update(variant) != 0: logging.critical(f'update for "{name}" failed.')
elif not (self.options.accept or self.compare(variant)): # no update, do comparison elif not (self.options.accept or self.compare(variant)): # no update, do comparison
return variant+1 # return culprit return variant+1 # return culprit
except Exception as e: except Exception as e:
logging.critical('exception during variant execution: "{}"'.format(str(e))) logging.critical(f'exception during variant execution: "{e}"')
return variant+1 # return culprit return variant+1 # return culprit
return 0 return 0
@ -124,13 +124,13 @@ class Test:
try: try:
shutil.rmtree(self.dirCurrent()) shutil.rmtree(self.dirCurrent())
except FileNotFoundError: except FileNotFoundError:
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent())) logging.warning(f'removal of directory "{self.dirCurrent()}" not possible...')
try: try:
os.mkdir(self.dirCurrent()) os.mkdir(self.dirCurrent())
return True return True
except FileExistsError: except FileExistsError:
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent())) logging.critical(f'creation of directory "{self.dirCurrent()}" failed.')
return False return False
def prepareAll(self): def prepareAll(self):
@ -211,7 +211,7 @@ class Test:
try: try:
shutil.copy2(source,target) shutil.copy2(source,target)
except FileNotFoundError: except FileNotFoundError:
logging.critical('error copying {} to {}'.format(source,target)) logging.critical(f'error copying {source} to {target}')
raise FileNotFoundError raise FileNotFoundError
@ -222,7 +222,7 @@ class Test:
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Reference2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -235,7 +235,7 @@ class Test:
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i])) shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.error(os.path.join(source,f)) logging.error(os.path.join(source,f))
logging.critical('Base2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Base2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -246,7 +246,7 @@ class Test:
try: try:
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i])) shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f)) logging.critical(f'Current2Reference: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -257,7 +257,7 @@ class Test:
try: try:
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Proof2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -267,7 +267,7 @@ class Test:
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except FileNotFoundError: except FileNotFoundError:
logging.critical('Current2Current: Unable to copy file "{}"'.format(f)) logging.critical(f'Current2Current: Unable to copy file "{f}"')
raise FileNotFoundError raise FileNotFoundError
@ -302,9 +302,7 @@ class Test:
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.)) max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
refArrayNonZero = refArrayNonZero[curArray.nonzero()] refArrayNonZero = refArrayNonZero[curArray.nonzero()]
curArray = curArray[curArray.nonzero()] curArray = curArray[curArray.nonzero()]
print(' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err, print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
refArrayNonZero[max_loc],
curArray[max_loc]))
return max_err return max_err
else: else:
raise Exception('mismatch in array size to compare') raise Exception('mismatch in array size to compare')
@ -350,7 +348,7 @@ class Test:
for i in range(dataLength): for i in range(dataLength):
if headings0[i]['shape'] != headings1[i]['shape']: if headings0[i]['shape'] != headings1[i]['shape']:
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label'])) raise Exception(f"shape mismatch between {headings0[i]['label']} and {headings1[i]['label']}")
shape[i] = headings0[i]['shape'] shape[i] = headings0[i]['shape']
for j in range(np.shape(shape[i])[0]): for j in range(np.shape(shape[i])[0]):
length[i] *= shape[i][j] length[i] *= shape[i][j]
@ -358,9 +356,7 @@ class Test:
for j in range(np.shape(normShape[i])[0]): for j in range(np.shape(normShape[i])[0]):
normLength[i] *= normShape[i][j] normLength[i] *= normShape[i][j]
else: else:
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0), raise Exception(f'trying to compare {len(headings0)} with {len(headings1)} normed by {len(normHeadings)} data sets')
len(headings1),
len(normHeadings)))
table0 = damask.ASCIItable(name=file0,readonly=True) table0 = damask.ASCIItable(name=file0,readonly=True)
table0.head_read() table0.head_read()
@ -372,11 +368,11 @@ class Test:
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label'] key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label'] normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
if key0 not in table0.labels(raw = True): if key0 not in table0.labels(raw = True):
raise Exception('column "{}" not found in first table...\n'.format(key0)) raise Exception(f'column "{key0}" not found in first table...')
elif key1 not in table1.labels(raw = True): elif key1 not in table1.labels(raw = True):
raise Exception('column "{}" not found in second table...\n'.format(key1)) raise Exception(f'column "{key1}" not found in second table...')
elif normKey not in table0.labels(raw = True): elif normKey not in table0.labels(raw = True):
raise Exception('column "{}" not found in first table...\n'.format(normKey)) raise Exception(f'column "{normKey}" not found in first table...')
else: else:
column[0][i] = table0.label_index(key0) column[0][i] = table0.label_index(key0)
column[1][i] = table1.label_index(key1) column[1][i] = table1.label_index(key1)
@ -404,9 +400,9 @@ class Test:
norm[i] = [1.0 for j in range(line0-len(skipLines))] norm[i] = [1.0 for j in range(line0-len(skipLines))]
absTol[i] = True absTol[i] = True
if perLine: if perLine:
logging.warning('At least one norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label'])) logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
else: else:
logging.warning('Maximum norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label'])) logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
line1 = 0 line1 = 0
while table1.data_read(): # read next data line of ASCII table while table1.data_read(): # read next data line of ASCII table
@ -418,18 +414,14 @@ class Test:
norm[i][line1-len(skipLines)]) norm[i][line1-len(skipLines)])
line1 +=1 line1 +=1
if (line0 != line1): raise Exception('found {} lines in first table but {} in second table'.format(line0,line1)) if (line0 != line1): raise Exception(f'found {line0} lines in first table but {line1} in second table')
logging.info(' ********') logging.info(' ********')
for i in range(dataLength): for i in range(dataLength):
if absTol[i]: if absTol[i]:
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i], logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
headings0[i]['label'],
headings1[i]['label']))
else: else:
logging.info(' * maximum relative error {} between {} and {}'.format(maxError[i], logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
headings0[i]['label'],
headings1[i]['label']))
logging.info(' ********') logging.info(' ********')
return maxError return maxError
@ -480,8 +472,8 @@ class Test:
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0) normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
mean = np.amax(np.abs(np.mean(normedDelta,0))) mean = np.amax(np.abs(np.mean(normedDelta,0)))
std = np.amax(np.std(normedDelta,0)) std = np.amax(np.std(normedDelta,0))
logging.info('mean: {:f}'.format(mean)) logging.info(f'mean: {mean:f}')
logging.info('std: {:f}'.format(std)) logging.info(f'std: {std:f}')
return (mean<meanTol) & (std < stdTol) return (mean<meanTol) & (std < stdTol)
@ -521,7 +513,7 @@ class Test:
for i,(table,labels) in enumerate(zip(tables,columns)): for i,(table,labels) in enumerate(zip(tables,columns)):
if np.any(dimensions != [np.prod(table.shapes[c]) for c in labels]): # check data object consistency if np.any(dimensions != [np.prod(table.shapes[c]) for c in labels]): # check data object consistency
logging.critical('Table {} differs in data layout.'.format(files[i])) logging.critical(f'Table {files[i]} differs in data layout.')
return False return False
data.append(np.hstack(list(table.get(label) for label in labels)).astype(np.float)) # store data.append(np.hstack(list(table.get(label) for label in labels)).astype(np.float)) # store
@ -537,19 +529,19 @@ class Test:
for i in range(len(data)): for i in range(len(data)):
data[i] /= maximum # normalize each table data[i] /= maximum # normalize each table
logging.info('shape of data {}: {}'.format(i,data[i].shape)) logging.info(f'shape of data {i}: {data[i].shape}')
if debug: if debug:
violators = np.absolute(data[0]-data[1]) > atol + rtol*np.absolute(data[1]) violators = np.absolute(data[0]-data[1]) > atol + rtol*np.absolute(data[1])
logging.info('shape of violators: {}'.format(violators.shape)) logging.info(f'shape of violators: {violators.shape}')
for j,culprits in enumerate(violators): for j,culprits in enumerate(violators):
goodguys = np.logical_not(culprits) goodguys = np.logical_not(culprits)
if culprits.any(): if culprits.any():
logging.info('{} has {}'.format(j,np.sum(culprits))) logging.info(f'{j} has {np.sum(culprits)}')
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[culprits])) logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[culprits]}')
logging.info('data : {}'.format(np.absolute(data[1][j])[culprits])) logging.info(f'data : {np.absolute(data[1][j])[culprits]}')
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[goodguys])) logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[goodguys]}')
logging.info('data : {}'.format(np.absolute(data[1][j])[goodguys])) logging.info(f'data : {np.absolute(data[1][j])[goodguys]}')
allclose = True # start optimistic allclose = True # start optimistic
for i in range(1,len(data)): for i in range(1,len(data)):
@ -588,12 +580,12 @@ class Test:
if culprit == 0: if culprit == 0:
count = len(self.variants) if self.options.select is None else len(self.options.select) count = len(self.variants) if self.options.select is None else len(self.options.select)
msg = 'Test passed.' if count == 1 else 'All {} tests passed.'.format(count) msg = 'Test passed.' if count == 1 else f'All {count} tests passed.'
elif culprit == -1: elif culprit == -1:
msg = 'Warning: could not start test...' msg = 'Warning: could not start test...'
ret = 0 ret = 0
else: else:
msg = 'Test "{}" failed.'.format(self.variantName(culprit-1)) msg = f'Test "{self.variantName(culprit-1)}" failed.'
logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n') logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
return ret return ret

View File

@ -85,7 +85,7 @@ class VTK:
geom = vtk.vtkUnstructuredGrid() geom = vtk.vtkUnstructuredGrid()
geom.SetPoints(vtk_nodes) geom.SetPoints(vtk_nodes)
geom.SetCells(eval('vtk.VTK_{}'.format(cell_type.split('_',1)[-1].upper())),cells) geom.SetCells(eval(f'vtk.VTK_{cell_type.split("_",1)[-1].upper()}'),cells)
return VTK(geom) return VTK(geom)
@ -140,7 +140,7 @@ class VTK:
elif dataset_type.lower().endswith('polydata'): elif dataset_type.lower().endswith('polydata'):
geom = reader.GetPolyDataOutput() geom = reader.GetPolyDataOutput()
else: else:
raise TypeError('Unknown dataset type for vtk file {}'.format(dataset_type)) raise TypeError(f'Unknown dataset type {dataset_type} for vtk file')
else: else:
if ext == '.vtr': if ext == '.vtr':
reader = vtk.vtkXMLRectilinearGridReader() reader = vtk.vtkXMLRectilinearGridReader()
@ -149,7 +149,7 @@ class VTK:
elif ext == '.vtp': elif ext == '.vtp':
reader = vtk.vtkXMLPolyDataReader() reader = vtk.vtkXMLPolyDataReader()
else: else:
raise TypeError('Unknown file extension {}'.format(ext)) raise TypeError(f'Unknown file extension {ext}')
reader.SetFileName(fname) reader.SetFileName(fname)
reader.Update() reader.Update()
@ -178,7 +178,7 @@ class VTK:
default_ext = writer.GetDefaultFileExtension() default_ext = writer.GetDefaultFileExtension()
ext = Path(fname).suffix ext = Path(fname).suffix
if ext and ext != '.'+default_ext: if ext and ext != '.'+default_ext:
raise ValueError('Given extension {} does not match default .{}'.format(ext,default_ext)) raise ValueError(f'Given extension {ext} does not match default .{default_ext}')
writer.SetFileName(str(Path(fname).with_suffix('.'+default_ext))) writer.SetFileName(str(Path(fname).with_suffix('.'+default_ext)))
writer.SetCompressorTypeToZLib() writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()
@ -214,7 +214,7 @@ class VTK:
def __repr__(self): def __repr__(self):
"""ASCII representation of the VTK data.""" """ASCII representation of the VTK data."""
writer = vtk.vtkDataSetWriter() writer = vtk.vtkDataSetWriter()
writer.SetHeader('# DAMASK.VTK v{}'.format(version)) writer.SetHeader(f'# DAMASK.VTK v{version}')
writer.WriteToOutputStringOn() writer.WriteToOutputStringOn()
writer.SetInputData(self.geom) writer.SetInputData(self.geom)
writer.Write() writer.Write()

View File

@ -1,6 +1,7 @@
import re import re
import os import os
from damask import util
class Section(): class Section():
def __init__(self,data = {'__order__':[]},part = ''): def __init__(self,data = {'__order__':[]},part = ''):
@ -27,7 +28,7 @@ class Section():
if multiKey not in self.parameters: self.parameters[multiKey] = [] if multiKey not in self.parameters: self.parameters[multiKey] = []
if multiKey not in self.parameters['__order__']: self.parameters['__order__'] += [multiKey] if multiKey not in self.parameters['__order__']: self.parameters['__order__'] += [multiKey]
self.parameters[multiKey] += [[item] for item in data] if isinstance(data, list) else [[data]] self.parameters[multiKey] += [[item] for item in data] if isinstance(data, list) else [[data]]
def data(self): def data(self):
return self.parameters return self.parameters
@ -42,27 +43,27 @@ class Crystallite(Section):
def __init__(self,data = {'__order__':[]}): def __init__(self,data = {'__order__':[]}):
"""New material.config <crystallite> section.""" """New material.config <crystallite> section."""
Section.__init__(self,data) Section.__init__(self,data)
class Phase(Section): class Phase(Section):
def __init__(self,data = {'__order__':[]}): def __init__(self,data = {'__order__':[]}):
"""New material.config <Phase> section.""" """New material.config <Phase> section."""
Section.__init__(self,data) Section.__init__(self,data)
class Microstructure(Section): class Microstructure(Section):
def __init__(self,data = {'__order__':[]}): def __init__(self,data = {'__order__':[]}):
"""New material.config <microstructure> section.""" """New material.config <microstructure> section."""
Section.__init__(self,data) Section.__init__(self,data)
class Texture(Section): class Texture(Section):
def __init__(self,data = {'__order__':[]}): def __init__(self,data = {'__order__':[]}):
"""New material.config <texture> section.""" """New material.config <texture> section."""
Section.__init__(self,data) Section.__init__(self,data)
def add_component(self,theType,properties): def add_component(self,theType,properties):
scatter = properties['scatter'] if 'scatter' in list(map(str.lower,list(properties.keys()))) else 0.0 scatter = properties['scatter'] if 'scatter' in list(map(str.lower,list(properties.keys()))) else 0.0
fraction = properties['fraction'] if 'fraction' in list(map(str.lower,list(properties.keys()))) else 1.0 fraction = properties['fraction'] if 'fraction' in list(map(str.lower,list(properties.keys()))) else 1.0
@ -81,45 +82,45 @@ class Texture(Section):
) )
) )
class Material(): class Material():
"""Read, manipulate, and write material.config files.""" """Read, manipulate, and write material.config files."""
def __init__(self,verbose=True): def __init__(self,verbose=True):
"""Generates ordered list of parts.""" """Generates ordered list of parts."""
self.parts = [ self.parts = [
'homogenization', 'homogenization',
'crystallite', 'crystallite',
'phase', 'phase',
'texture', 'texture',
'microstructure', 'microstructure',
] ]
self.data = {\ self.data = {
'homogenization': {'__order__': []}, 'homogenization': {'__order__': []},
'microstructure': {'__order__': []}, 'microstructure': {'__order__': []},
'crystallite': {'__order__': []}, 'crystallite': {'__order__': []},
'phase': {'__order__': []}, 'phase': {'__order__': []},
'texture': {'__order__': []}, 'texture': {'__order__': []},
} }
self.verbose = verbose self.verbose = verbose
def __repr__(self): def __repr__(self):
"""Returns current data structure in material.config format.""" """Returns current data structure in material.config format."""
me = [] me = []
for part in self.parts: for part in self.parts:
if self.verbose: print('processing <{}>'.format(part)) if self.verbose: print(f'processing <{part}>')
me += ['', me += ['',
'#'*100, '#'*100,
'<{}>'.format(part), f'<{part}>',
'#'*100, '#'*100,
] ]
for section in self.data[part]['__order__']: for section in self.data[part]['__order__']:
me += ['[{}] {}'.format(section,'#'+'-'*max(0,96-len(section)))] me += [f'[{section}] {"#"+"-"*max(0,96-len(section))}']
for key in self.data[part][section]['__order__']: for key in self.data[part][section]['__order__']:
if key.startswith('(') and key.endswith(')'): # multiple (key) if key.startswith('(') and key.endswith(')'): # multiple (key)
me += ['{}\t{}'.format(key,' '.join(values)) for values in self.data[part][section][key]] me += [f'{key}\t{" ".join(values)}' for values in self.data[part][section][key]]
else: # plain key else: # plain key
me += ['{}\t{}'.format(key,' '.join(map(str,self.data[part][section][key])))] me += [f'{key}\t{util.srepr(self.data[part][section][key]," ")}']
return '\n'.join(me) + '\n' return '\n'.join(me) + '\n'
def parse(self, part=None, sections=[], content=None): def parse(self, part=None, sections=[], content=None):
@ -158,7 +159,7 @@ class Material():
self.data[part][name_section][items[0]].append(items[1:]) self.data[part][name_section][items[0]].append(items[1:])
else: # plain key else: # plain key
self.data[part][name_section][items[0]] = items[1:] self.data[part][name_section][items[0]] = items[1:]
def read(self,filename=None): def read(self,filename=None):
@ -174,20 +175,20 @@ class Material():
recursiveRead(match.group(1) if match.group(1).startswith('/') else recursiveRead(match.group(1) if match.group(1).startswith('/') else
os.path.normpath(os.path.join(os.path.dirname(filename),match.group(1)))) os.path.normpath(os.path.join(os.path.dirname(filename),match.group(1))))
return result return result
c = recursiveRead(filename) c = recursiveRead(filename)
for p in self.parts: for p in self.parts:
self.parse(part=p, content=c) self.parse(part=p, content=c)
def write(self,filename='material.config', overwrite=False): def write(self,filename='material.config', overwrite=False):
"""Write to material.config.""" """Write to material.config."""
i = 0 i = 0
outname = filename outname = filename
while os.path.exists(outname) and not overwrite: while os.path.exists(outname) and not overwrite:
i += 1 i += 1
outname = '{}_{}'.format(filename,i) outname = f'{filename}_{i}'
if self.verbose: print('Writing material data to {}'.format(outname)) if self.verbose: print(f'Writing material data to {outname}')
with open(outname,'w') as f: with open(outname,'w') as f:
f.write(str(self)) f.write(str(self))
return outname return outname
@ -196,11 +197,11 @@ class Material():
"""Add Update.""" """Add Update."""
part = part.lower() part = part.lower()
section = section.lower() section = section.lower()
if part not in self.parts: raise Exception('invalid part {}'.format(part)) if part not in self.parts: raise Exception(f'invalid part {part}')
if not isinstance(initialData, dict): if not isinstance(initialData, dict):
initialData = initialData.data() initialData = initialData.data()
if section not in self.data[part]: self.data[part]['__order__'] += [section] if section not in self.data[part]: self.data[part]['__order__'] += [section]
if section in self.data[part] and merge: if section in self.data[part] and merge:
for existing in self.data[part][section]['__order__']: # replace existing for existing in self.data[part][section]['__order__']: # replace existing
@ -215,9 +216,9 @@ class Material():
self.data[part][section]['__order__'] += [new] self.data[part][section]['__order__'] += [new]
else: else:
self.data[part][section] = initialData self.data[part][section] = initialData
def add_microstructure(self, section='', def add_microstructure(self, section='',
components={}, # dict of phase,texture, and fraction lists components={}, # dict of phase,texture, and fraction lists
@ -226,7 +227,7 @@ class Material():
microstructure = Microstructure() microstructure = Microstructure()
# make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python) # make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
components=dict((k.lower(), v) for k,v in components.items()) components=dict((k.lower(), v) for k,v in components.items())
for key in ['phase','texture','fraction','crystallite']: for key in ['phase','texture','fraction','crystallite']:
if isinstance(components[key], list): if isinstance(components[key], list):
for i, x in enumerate(components[key]): for i, x in enumerate(components[key]):
@ -239,7 +240,7 @@ class Material():
components[key] = [components[key].lower()] components[key] = [components[key].lower()]
except AttributeError: except AttributeError:
components[key] = [components[key]] components[key] = [components[key]]
for (phase,texture,fraction,crystallite) in zip(components['phase'],components['texture'], for (phase,texture,fraction,crystallite) in zip(components['phase'],components['texture'],
components['fraction'],components['crystallite']): components['fraction'],components['crystallite']):
microstructure.add_multiKey('constituent','phase %i\ttexture %i\tfraction %g\ncrystallite %i'%( microstructure.add_multiKey('constituent','phase %i\ttexture %i\tfraction %g\ncrystallite %i'%(
@ -247,19 +248,19 @@ class Material():
self.data['texture']['__order__'].index(texture)+1, self.data['texture']['__order__'].index(texture)+1,
fraction, fraction,
self.data['crystallite']['__order__'].index(crystallite)+1)) self.data['crystallite']['__order__'].index(crystallite)+1))
self.add_section('microstructure',section,microstructure) self.add_section('microstructure',section,microstructure)
def change_value(self, part=None, def change_value(self, part=None,
section=None, section=None,
key=None, key=None,
value=None): value=None):
if not isinstance(value,list): if not isinstance(value,list):
if not isinstance(value,str): if not isinstance(value,str):
value = '%s'%value value = '%s'%value
value = [value] value = [value]
newlen = len(value) newlen = len(value)
oldval = self.data[part.lower()][section.lower()][key.lower()] oldval = self.data[part.lower()][section.lower()][key.lower()]
oldlen = len(oldval) oldlen = len(oldval)
print('changing %s:%s:%s from %s to %s '%(part.lower(),section.lower(),key.lower(),oldval,value)) print('changing %s:%s:%s from %s to %s '%(part.lower(),section.lower(),key.lower(),oldval,value))

View File

@ -233,7 +233,7 @@ def cell_coord0_gridSizeOrigin(coord0,ordered=True):
origin[_np.where(grid==1)] = 0.0 origin[_np.where(grid==1)] = 0.0
if grid.prod() != len(coord0): if grid.prod() != len(coord0):
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid)) raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
start = origin + delta*.5 start = origin + delta*.5
end = origin - delta*.5 + size end = origin - delta*.5 + size
@ -384,7 +384,7 @@ def node_coord0_gridSizeOrigin(coord0,ordered=True):
origin = mincorner origin = mincorner
if (grid+1).prod() != len(coord0): if (grid+1).prod() != len(coord0):
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid)) raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
atol = _np.max(size)*5e-2 atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \ if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \

View File

@ -25,7 +25,7 @@ class Marc:
def library_path(self): def library_path(self):
path_MSC = Environment().options['MSC_ROOT'] path_MSC = Environment().options['MSC_ROOT']
path_lib = Path('{}/mentat{}/shlib/linux64'.format(path_MSC,self.version)) path_lib = Path(f'{path_MSC}/mentat{self.version}/shlib/linux64')
return path_lib if path_lib.is_dir() else None return path_lib if path_lib.is_dir() else None
@ -34,7 +34,7 @@ class Marc:
def tools_path(self): def tools_path(self):
path_MSC = Environment().options['MSC_ROOT'] path_MSC = Environment().options['MSC_ROOT']
path_tools = Path('{}/marc{}/tools'.format(path_MSC,self.version)) path_tools = Path(f'{path_MSC}/marc{self.version}/tools')
return path_tools if path_tools.is_dir() else None return path_tools if path_tools.is_dir() else None
@ -51,21 +51,21 @@ class Marc:
env = Environment() env = Environment()
user = env.root_dir/Path('src/DAMASK_marc{}'.format(self.version)).with_suffix('.f90' if compile else '.marc') usersub = env.root_dir/Path(f'src/DAMASK_marc{self.version}').with_suffix('.f90' if compile else '.marc')
if not user.is_file(): if not usersub.is_file():
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),user)) raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),usersub))
# Define options [see Marc Installation and Operation Guide, pp 23] # Define options [see Marc Installation and Operation Guide, pp 23]
script = 'run_damask_{}mp'.format(optimization) script = f'run_damask_{optimization}mp'
cmd = str(self.tools_path/Path(script)) + \ cmd = str(self.tools_path/Path(script)) + \
' -jid ' + model + '_' + job + \ ' -jid ' + model + '_' + job + \
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no' ' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
if compile: cmd += ' -u ' + str(user) + ' -save y' if compile: cmd += ' -u ' + str(usersub) + ' -save y'
else: cmd += ' -prog ' + str(user.with_suffix('')) else: cmd += ' -prog ' + str(usersub.with_suffix(''))
print('job submission {} compilation: {}'.format('with' if compile else 'without',user)) print('job submission {} compilation: {}'.format(('with' if compile else 'without'),usersub))
if logfile: log = open(logfile, 'w') if logfile: log = open(logfile, 'w')
print(cmd) print(cmd)
process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT) process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT)

View File

@ -125,7 +125,7 @@ def execute(cmd,
stdout = stdout.decode('utf-8').replace('\x08','') stdout = stdout.decode('utf-8').replace('\x08','')
stderr = stderr.decode('utf-8').replace('\x08','') stderr = stderr.decode('utf-8').replace('\x08','')
if process.returncode != 0: if process.returncode != 0:
raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode)) raise RuntimeError(f'{cmd} failed with returncode {process.returncode}')
return stdout, stderr return stdout, stderr
@ -156,7 +156,7 @@ def show_progress(iterable,N_iter=None,prefix='',bar_length=50):
def scale_to_coprime(v): def scale_to_coprime(v):
"""Scale vector to co-prime (relatively prime) integers.""" """Scale vector to co-prime (relatively prime) integers."""
MAX_DENOMINATOR = 1000 MAX_DENOMINATOR = 1000000
def get_square_denominator(x): def get_square_denominator(x):
"""Denominator of the square of a number.""" """Denominator of the square of a number."""
@ -166,10 +166,13 @@ def scale_to_coprime(v):
"""Least common multiple.""" """Least common multiple."""
return a * b // np.gcd(a, b) return a * b // np.gcd(a, b)
denominators = [int(get_square_denominator(i)) for i in v] m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
s = reduce(lcm, denominators) ** 0.5 m = m//reduce(np.gcd,m)
m = (np.array(v)*s).astype(np.int)
return m//reduce(np.gcd,m) if not np.allclose(v[v.nonzero()]/m[v.nonzero()],v[v.nonzero()][0]/m[m.nonzero()][0]):
raise ValueError(f'Invalid result {m} for input {v}. Insufficient precision?')
return m
#################################################################################################### ####################################################################################################
@ -223,21 +226,20 @@ class _ProgressBar:
self.start_time = datetime.datetime.now() self.start_time = datetime.datetime.now()
self.last_fraction = 0.0 self.last_fraction = 0.0
sys.stderr.write('{} {} 0% ETA n/a'.format(self.prefix, ''*self.bar_length)) sys.stderr.write(f"{self.prefix} {''*self.bar_length} 0% ETA n/a")
sys.stderr.flush() sys.stderr.flush()
def update(self,iteration): def update(self,iteration):
fraction = (iteration+1) / self.total fraction = (iteration+1) / self.total
filled_length = int(self.bar_length * fraction)
if int(self.bar_length * fraction) > int(self.bar_length * self.last_fraction): if filled_length > int(self.bar_length * self.last_fraction):
bar = '' * filled_length + '' * (self.bar_length - filled_length)
delta_time = datetime.datetime.now() - self.start_time delta_time = datetime.datetime.now() - self.start_time
remaining_time = (self.total - (iteration+1)) * delta_time / (iteration+1) remaining_time = (self.total - (iteration+1)) * delta_time / (iteration+1)
remaining_time -= datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs remaining_time -= datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs
sys.stderr.write(f'\r{self.prefix} {bar} {fraction:>4.0%} ETA {remaining_time}')
filled_length = int(self.bar_length * fraction)
bar = '' * filled_length + '' * (self.bar_length - filled_length)
sys.stderr.write('\r{} {} {:>4.0%} ETA {}'.format(self.prefix, bar, fraction, remaining_time))
sys.stderr.flush() sys.stderr.flush()
self.last_fraction = fraction self.last_fraction = fraction
@ -249,7 +251,7 @@ class _ProgressBar:
class bcolors: class bcolors:
""" """
ASCII Colors. ASCII colors.
https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
https://stackoverflow.com/questions/287871 https://stackoverflow.com/questions/287871

View File

@ -6,6 +6,7 @@ import numpy as np
from damask import Geom from damask import Geom
from damask import Rotation from damask import Rotation
from damask import util
def geom_equal(a,b): def geom_equal(a,b):
@ -85,8 +86,8 @@ class TestGeom:
def test_mirror(self,default,update,reference_dir,directions,reflect): def test_mirror(self,default,update,reference_dir,directions,reflect):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.mirror(directions,reflect) modified.mirror(directions,reflect)
tag = 'directions={}_reflect={}'.format('-'.join(directions),reflect) tag = f'directions={"-".join(directions)}_reflect={reflect}'
reference = os.path.join(reference_dir,'mirror_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'mirror_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))
@ -94,8 +95,8 @@ class TestGeom:
def test_clean(self,default,update,reference_dir,stencil): def test_clean(self,default,update,reference_dir,stencil):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.clean(stencil) modified.clean(stencil)
tag = 'stencil={}'.format(stencil) tag = f'stencil={stencil}'
reference = os.path.join(reference_dir,'clean_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'clean_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))
@ -111,8 +112,8 @@ class TestGeom:
def test_scale(self,default,update,reference_dir,grid): def test_scale(self,default,update,reference_dir,grid):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.scale(grid) modified.scale(grid)
tag = 'grid={}'.format('-'.join([str(x) for x in grid])) tag = f'grid={util.srepr(grid,"-")}'
reference = os.path.join(reference_dir,'scale_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'scale_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))
@ -150,8 +151,8 @@ class TestGeom:
def test_rotate(self,default,update,reference_dir,Eulers): def test_rotate(self,default,update,reference_dir,Eulers):
modified = copy.deepcopy(default) modified = copy.deepcopy(default)
modified.rotate(Rotation.from_Eulers(Eulers,degrees=True)) modified.rotate(Rotation.from_Eulers(Eulers,degrees=True))
tag = 'Eulers={}-{}-{}'.format(*Eulers) tag = f'Eulers={util.srepr(Eulers,"-")}'
reference = os.path.join(reference_dir,'rotate_{}.geom'.format(tag)) reference = os.path.join(reference_dir,f'rotate_{tag}.geom')
if update: modified.to_file(reference) if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference)) assert geom_equal(modified,Geom.from_file(reference))

View File

@ -38,4 +38,4 @@ class TestSymmetry:
def test_invalid_argument(self,function): def test_invalid_argument(self,function):
s = Symmetry() # noqa s = Symmetry() # noqa
with pytest.raises(ValueError): with pytest.raises(ValueError):
eval('s.{}(np.ones(4))'.format(function)) eval(f's.{function}(np.ones(4))')

View File

@ -49,7 +49,7 @@ class TestOrientation:
@pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch']) @pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch'])
@pytest.mark.parametrize('lattice',['fcc','bcc']) @pytest.mark.parametrize('lattice',['fcc','bcc'])
def test_relationship_reference(self,update,reference_dir,model,lattice): def test_relationship_reference(self,update,reference_dir,model,lattice):
reference = os.path.join(reference_dir,'{}_{}.txt'.format(lattice,model)) reference = os.path.join(reference_dir,f'{lattice}_{model}.txt')
ori = Orientation(Rotation(),lattice) ori = Orientation(Rotation(),lattice)
eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)]) eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)])
if update: if update:

View File

@ -137,7 +137,7 @@ class TestResult:
default.add_Cauchy('P','F') default.add_Cauchy('P','F')
default.add_eigenvalue('sigma',eigenvalue) default.add_eigenvalue('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'), loc = {'sigma' :default.get_dataset_location('sigma'),
'lambda':default.get_dataset_location('lambda_{}(sigma)'.format(eigenvalue))} 'lambda':default.get_dataset_location(f'lambda_{eigenvalue}(sigma)')}
in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True) in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
in_file = default.read_dataset(loc['lambda'],0) in_file = default.read_dataset(loc['lambda'],0)
assert np.allclose(in_memory,in_file) assert np.allclose(in_memory,in_file)
@ -147,7 +147,7 @@ class TestResult:
default.add_Cauchy('P','F') default.add_Cauchy('P','F')
default.add_eigenvector('sigma',eigenvalue) default.add_eigenvector('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'), loc = {'sigma' :default.get_dataset_location('sigma'),
'v(sigma)':default.get_dataset_location('v_{}(sigma)'.format(eigenvalue))} 'v(sigma)':default.get_dataset_location(f'v_{eigenvalue}(sigma)')}
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx] in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
in_file = default.read_dataset(loc['v(sigma)'],0) in_file = default.read_dataset(loc['v(sigma)'],0)
assert np.allclose(in_memory,in_file) assert np.allclose(in_memory,in_file)
@ -179,7 +179,7 @@ class TestResult:
t = ['V','U'][np.random.randint(0,2)] t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0 m = np.random.random()*2.0 - 1.0
default.add_strain_tensor('F',t,m) default.add_strain_tensor('F',t,m)
label = 'epsilon_{}^{}(F)'.format(t,m) label = f'epsilon_{t}^{m}(F)'
default.add_Mises(label) default.add_Mises(label)
loc = {label :default.get_dataset_location(label), loc = {label :default.get_dataset_location(label),
label+'_vM':default.get_dataset_location(label+'_vM')} label+'_vM':default.get_dataset_location(label+'_vM')}
@ -248,7 +248,7 @@ class TestResult:
t = ['V','U'][np.random.randint(0,2)] t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0 m = np.random.random()*2.0 - 1.0
default.add_strain_tensor('F',t,m) default.add_strain_tensor('F',t,m)
label = 'epsilon_{}^{}(F)'.format(t,m) label = f'epsilon_{t}^{m}(F)'
loc = {'F': default.get_dataset_location('F'), loc = {'F': default.get_dataset_location('F'),
label: default.get_dataset_location(label)} label: default.get_dataset_location(label)}
in_memory = mechanics.strain_tensor(default.read_dataset(loc['F'],0),t,m) in_memory = mechanics.strain_tensor(default.read_dataset(loc['F'],0),t,m)

View File

@ -556,7 +556,7 @@ def mul(me, other):
else: else:
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors') raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
else: else:
raise TypeError('Cannot rotate {}'.format(type(other))) raise TypeError(f'Cannot rotate {type(other)}')
class TestRotation: class TestRotation:
@ -878,7 +878,7 @@ class TestRotation:
def test_invalid_P(self,fr,to): def test_invalid_P(self,fr,to):
R = Rotation.from_random(np.random.randint(8,32,(3))) # noqa R = Rotation.from_random(np.random.randint(8,32,(3))) # noqa
with pytest.raises(ValueError): with pytest.raises(ValueError):
fr(eval('R.{}()'.format(to)),P=-30) fr(eval(f'R.{to}()'),P=-30)
@pytest.mark.parametrize('shape',[None,(3,),(4,2)]) @pytest.mark.parametrize('shape',[None,(3,),(4,2)])
def test_broadcast(self,shape): def test_broadcast(self,shape):

View File

@ -30,8 +30,8 @@ class TestGridFilters:
grid = np.random.randint(8,32,(3)) grid = np.random.randint(8,32,(3))
size = np.random.random(3) size = np.random.random(3)
origin = np.random.random(3) origin = np.random.random(3)
coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa coord0 = eval(f'grid_filters.{mode}_coord0(grid,size,origin)') # noqa
_grid,_size,_origin = eval('grid_filters.{}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))'.format(mode)) _grid,_size,_origin = eval(f'grid_filters.{mode}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))')
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin) assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin)
def test_displacement_fluct_equivalence(self): def test_displacement_fluct_equivalence(self):
@ -67,8 +67,8 @@ class TestGridFilters:
origin= np.random.random(3) origin= np.random.random(3)
size = np.random.random(3) # noqa size = np.random.random(3) # noqa
grid = np.random.randint(8,32,(3)) grid = np.random.randint(8,32,(3))
shifted = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) shifted = eval(f'grid_filters.{mode}_coord0(grid,size,origin)')
unshifted = eval('grid_filters.{}_coord0(grid,size)'.format(mode)) unshifted = eval(f'grid_filters.{mode}_coord0(grid,size)')
if mode == 'cell': if mode == 'cell':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,))) assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,)))
elif mode == 'node': elif mode == 'node':

View File

@ -1,11 +1,33 @@
import pytest
import numpy as np
from damask import util from damask import util
class TestUtil: class TestUtil:
def test_execute_direct(self): def test_execute_direct(self):
out,err = util.execute('echo test') out,err = util.execute('echo test')
assert out=='test\n' and err=='' assert out=='test\n' and err==''
def test_execute_env(self): def test_execute_env(self):
out,err = util.execute('sh -c "echo $test_for_execute"',env={'test_for_execute':'test'}) out,err = util.execute('sh -c "echo $test_for_execute"',env={'test_for_execute':'test'})
assert out=='test\n' and err=='' assert out=='test\n' and err==''
def test_croak(self):
util.croak('Burp!')
@pytest.mark.parametrize('input,output',
[
([2,0],[1,0]),
([0.5,0.5],[1,1]),
([1./2.,1./3.],[3,2]),
([2./3.,1./2.,1./3.],[4,3,2]),
])
def test_scale2coprime(self,input,output):
assert np.allclose(util.scale_to_coprime(np.array(input)),
np.array(output).astype(int))
def test_lackofprecision(self):
with pytest.raises(ValueError):
util.scale_to_coprime(np.array([1/3333,1,1]))