more f-stringing
This commit is contained in:
parent
966b6f8007
commit
14d3b7e66d
|
@ -1,4 +1,5 @@
|
|||
import numpy as np
|
||||
from . import util
|
||||
|
||||
class Color:
|
||||
"""Color representation in and conversion between different color-spaces."""
|
||||
|
@ -525,8 +526,8 @@ class Colormap:
|
|||
colormap = [ f'1 1 {name}'
|
||||
+ f' 9 {name}'
|
||||
+ ' 0 1 0 3 0 0 -1 9 \\ 0 0 0 255 255 255 0 0 255 '
|
||||
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors))
|
||||
+ ' '.join([' 0 {} 255 1'.format(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])]
|
||||
+ f'30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 {len(colors)}'
|
||||
+ ' '.join([f' 0 {util.srepr((255*np.array(c)).astype(int)," ")} 255 1' for c in reversed(colors)])]
|
||||
|
||||
elif format == 'raw':
|
||||
colormap = ['\t'.join(map(str,color)) for color in colors]
|
||||
|
|
|
@ -257,11 +257,11 @@ class Geom:
|
|||
|
||||
def get_header(self):
|
||||
"""Return the full header (grid, size, origin, homogenization, comments)."""
|
||||
header = ['{} header'.format(len(self.comments)+4)] + self.comments
|
||||
header = [f'{len(self.comments)+4} header'] + self.comments
|
||||
header.append('grid a {} b {} c {}'.format(*self.get_grid()))
|
||||
header.append('size x {} y {} z {}'.format(*self.get_size()))
|
||||
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
|
||||
header.append('homogenization {}'.format(self.get_homogenization()))
|
||||
header.append(f'homogenization {self.get_homogenization()}')
|
||||
return header
|
||||
|
||||
|
||||
|
@ -374,7 +374,6 @@ class Geom:
|
|||
else:
|
||||
microstructure = microstructure.reshape(grid)
|
||||
|
||||
#comments = 'geom.py:from_Laguerre_tessellation v{}'.format(version)
|
||||
return Geom(microstructure+1,size,homogenization=1)
|
||||
|
||||
|
||||
|
@ -399,7 +398,6 @@ class Geom:
|
|||
KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds)
|
||||
devNull,microstructure = KDTree.query(coords)
|
||||
|
||||
#comments = 'geom.py:from_Voronoi_tessellation v{}'.format(version)
|
||||
return Geom(microstructure.reshape(grid)+1,size,homogenization=1)
|
||||
|
||||
|
||||
|
@ -524,7 +522,6 @@ class Geom:
|
|||
if 'x' in directions:
|
||||
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
|
||||
|
||||
#self.add_comments('geom.py:mirror v{}'.format(version)
|
||||
return self.update(ms,rescale=True)
|
||||
|
||||
|
||||
|
@ -538,7 +535,6 @@ class Geom:
|
|||
number of grid points in x,y,z direction.
|
||||
|
||||
"""
|
||||
#self.add_comments('geom.py:scale v{}'.format(version)
|
||||
return self.update(
|
||||
ndimage.interpolation.zoom(
|
||||
self.microstructure,
|
||||
|
@ -565,7 +561,6 @@ class Geom:
|
|||
unique, inverse = np.unique(arr, return_inverse=True)
|
||||
return unique[np.argmax(np.bincount(inverse))]
|
||||
|
||||
#self.add_comments('geom.py:clean v{}'.format(version)
|
||||
return self.update(ndimage.filters.generic_filter(
|
||||
self.microstructure,
|
||||
mostFrequent,
|
||||
|
@ -580,7 +575,6 @@ class Geom:
|
|||
for i, oldID in enumerate(np.unique(self.microstructure)):
|
||||
renumbered = np.where(self.microstructure == oldID, i+1, renumbered)
|
||||
|
||||
#self.add_comments('geom.py:renumber v{}'.format(version)
|
||||
return self.update(renumbered)
|
||||
|
||||
|
||||
|
@ -615,7 +609,6 @@ class Geom:
|
|||
|
||||
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
|
||||
|
||||
#self.add_comments('geom.py:rotate v{}'.format(version)
|
||||
return self.update(microstructure_in,origin=origin,rescale=True)
|
||||
|
||||
|
||||
|
@ -647,7 +640,6 @@ class Geom:
|
|||
|
||||
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
|
||||
|
||||
#self.add_comments('geom.py:canvas v{}'.format(version)
|
||||
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
|
||||
|
||||
|
||||
|
@ -667,5 +659,4 @@ class Geom:
|
|||
for from_ms,to_ms in zip(from_microstructure,to_microstructure):
|
||||
substituted[self.microstructure==from_ms] = to_ms
|
||||
|
||||
#self.add_comments('geom.py:substitute v{}'.format(version)
|
||||
return self.update(substituted)
|
||||
|
|
|
@ -26,7 +26,7 @@ class Symmetry:
|
|||
|
||||
"""
|
||||
if symmetry is not None and symmetry.lower() not in Symmetry.lattices:
|
||||
raise KeyError('Symmetry/crystal system "{}" is unknown'.format(symmetry))
|
||||
raise KeyError(f'Symmetry/crystal system "{symmetry}" is unknown')
|
||||
|
||||
self.lattice = symmetry.lower() if isinstance(symmetry,str) else symmetry
|
||||
|
||||
|
@ -40,7 +40,7 @@ class Symmetry:
|
|||
|
||||
def __repr__(self):
|
||||
"""Readable string."""
|
||||
return '{}'.format(self.lattice)
|
||||
return f'{self.lattice}'
|
||||
|
||||
|
||||
def __eq__(self, other):
|
||||
|
@ -348,7 +348,7 @@ class Lattice:
|
|||
|
||||
def __repr__(self):
|
||||
"""Report basic lattice information."""
|
||||
return 'Bravais lattice {} ({} symmetry)'.format(self.lattice,self.symmetry)
|
||||
return f'Bravais lattice {self.lattice} ({self.symmetry} symmetry)'
|
||||
|
||||
|
||||
# Kurdjomov--Sachs orientation relationship for fcc <-> bcc transformation
|
||||
|
@ -613,10 +613,10 @@ class Lattice:
|
|||
try:
|
||||
relationship = models[model]
|
||||
except KeyError :
|
||||
raise KeyError('Orientation relationship "{}" is unknown'.format(model))
|
||||
raise KeyError(f'Orientation relationship "{model}" is unknown')
|
||||
|
||||
if self.lattice not in relationship['mapping']:
|
||||
raise ValueError('Relationship "{}" not supported for lattice "{}"'.format(model,self.lattice))
|
||||
raise ValueError(f'Relationship "{model}" not supported for lattice "{self.lattice}"')
|
||||
|
||||
r = {'lattice':Lattice((set(relationship['mapping'])-{self.lattice}).pop()), # target lattice
|
||||
'rotations':[] }
|
||||
|
|
|
@ -50,8 +50,7 @@ class Result:
|
|||
self.version_minor = f.attrs['DADF5-minor']
|
||||
|
||||
if self.version_major != 0 or not 2 <= self.version_minor <= 6:
|
||||
raise TypeError('Unsupported DADF5 version {}.{} '.format(self.version_major,
|
||||
self.version_minor))
|
||||
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
|
||||
|
||||
self.structured = 'grid' in f['geometry'].attrs.keys()
|
||||
|
||||
|
@ -107,7 +106,7 @@ class Result:
|
|||
self.pick('increments',all_selected_increments)
|
||||
|
||||
in_between = '' if len(all_selected_increments) < 3 else \
|
||||
''.join(['\n{}\n ...\n'.format(inc) for inc in all_selected_increments[1:-2]])
|
||||
''.join([f'\n{inc}\n ...\n' for inc in all_selected_increments[1:-2]])
|
||||
|
||||
return util.srepr(first + in_between + last)
|
||||
|
||||
|
@ -137,7 +136,7 @@ class Result:
|
|||
|
||||
if what == 'increments':
|
||||
choice = [c if isinstance(c,str) and c.startswith('inc') else
|
||||
'inc{}'.format(c) for c in choice]
|
||||
f'inc{c}' for c in choice]
|
||||
elif what == 'times':
|
||||
what = 'increments'
|
||||
if choice == ['*']:
|
||||
|
@ -412,21 +411,19 @@ class Result:
|
|||
message = ''
|
||||
with h5py.File(self.fname,'r') as f:
|
||||
for i in self.iterate('increments'):
|
||||
message += '\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
|
||||
message += f'\n{i} ({self.times[self.increments.index(i)]}s)\n'
|
||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||
for oo in self.iterate(o):
|
||||
message += ' {}\n'.format(oo)
|
||||
message += f' {oo}\n'
|
||||
for pp in self.iterate(p):
|
||||
message += ' {}\n'.format(pp)
|
||||
message += f' {pp}\n'
|
||||
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||
for d in f[group].keys():
|
||||
try:
|
||||
dataset = f['/'.join([group,d])]
|
||||
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
|
||||
description = dataset.attrs['Description'].decode()
|
||||
if 'Unit' in dataset.attrs:
|
||||
message += ' {} / ({}): {}\n'.format(d,dataset.attrs['Unit'].decode(),description)
|
||||
else:
|
||||
message += ' {}: {}\n'.format(d,description)
|
||||
message += f' {d}{unit}: {description}\n'
|
||||
except KeyError:
|
||||
pass
|
||||
return message
|
||||
|
@ -528,10 +525,10 @@ class Result:
|
|||
def _add_absolute(x):
|
||||
return {
|
||||
'data': np.abs(x['data']),
|
||||
'label': '|{}|'.format(x['label']),
|
||||
'label': f'|{x["label"]}|',
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Description': f"Absolute value of {x['label']} ({x['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -552,14 +549,14 @@ class Result:
|
|||
def _add_calculation(**kwargs):
|
||||
formula = kwargs['formula']
|
||||
for d in re.findall(r'#(.*?)#',formula):
|
||||
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d))
|
||||
formula = formula.replace(f'#{d}#',f"kwargs['{d}']['data']")
|
||||
|
||||
return {
|
||||
'data': eval(formula),
|
||||
'label': kwargs['label'],
|
||||
'meta': {
|
||||
'Unit': kwargs['unit'],
|
||||
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
|
||||
'Description': f"{kwargs['description']} (formula: {kwargs['formula']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -596,8 +593,9 @@ class Result:
|
|||
'label': 'sigma',
|
||||
'meta': {
|
||||
'Unit': P['meta']['Unit'],
|
||||
'Description': 'Cauchy stress calculated from {} ({}) and {} ({})'\
|
||||
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
|
||||
'Description': "Cauchy stress calculated "
|
||||
f"from {P['label']} ({P['meta']['Description']})"
|
||||
f" and {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -620,10 +618,10 @@ class Result:
|
|||
def _add_determinant(T):
|
||||
return {
|
||||
'data': np.linalg.det(T['data']),
|
||||
'label': 'det({})'.format(T['label']),
|
||||
'label': f"det({T['label']})",
|
||||
'meta': {
|
||||
'Unit': T['meta']['Unit'],
|
||||
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||
'Description': f"Determinant of tensor {T['label']} ({T['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -644,10 +642,10 @@ class Result:
|
|||
def _add_deviator(T):
|
||||
return {
|
||||
'data': mechanics.deviatoric_part(T['data']),
|
||||
'label': 's_{}'.format(T['label']),
|
||||
'label': f"s_{T['label']}",
|
||||
'meta': {
|
||||
'Unit': T['meta']['Unit'],
|
||||
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||
'Description': f"Deviator of tensor {T['label']} ({T['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -675,10 +673,10 @@ class Result:
|
|||
|
||||
return {
|
||||
'data': mechanics.eigenvalues(T_sym['data'])[:,p],
|
||||
'label': 'lambda_{}({})'.format(eigenvalue,T_sym['label']),
|
||||
'label': f"lambda_{eigenvalue}({T_sym['label']})",
|
||||
'meta' : {
|
||||
'Unit': T_sym['meta']['Unit'],
|
||||
'Description': '{} eigenvalue of {} ({})'.format(label,T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"{label} eigenvalue of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -708,11 +706,11 @@ class Result:
|
|||
print('p',eigenvalue)
|
||||
return {
|
||||
'data': mechanics.eigenvectors(T_sym['data'])[:,p],
|
||||
'label': 'v_{}({})'.format(eigenvalue,T_sym['label']),
|
||||
'label': f"v_{eigenvalue}({T_sym['label']})",
|
||||
'meta' : {
|
||||
'Unit': '1',
|
||||
'Description': 'Eigenvector corresponding to {} eigenvalue of {} ({})'\
|
||||
.format(label,T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"Eigenvector corresponding to {label} eigenvalue"
|
||||
f" of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -774,10 +772,10 @@ class Result:
|
|||
def _add_maximum_shear(T_sym):
|
||||
return {
|
||||
'data': mechanics.maximum_shear(T_sym['data']),
|
||||
'label': 'max_shear({})'.format(T_sym['label']),
|
||||
'label': f"max_shear({T_sym['label']})",
|
||||
'meta': {
|
||||
'Unit': T_sym['meta']['Unit'],
|
||||
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"Maximum shear component of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -800,11 +798,11 @@ class Result:
|
|||
'stress'
|
||||
|
||||
return {
|
||||
'data': mechanics.Mises_strain(T_sym['data']) if t=='strain' else mechanics.Mises_stress(T_sym['data']),
|
||||
'label': '{}_vM'.format(T_sym['label']),
|
||||
'data': (mechanics.Mises_strain if t=='strain' else mechanics.Mises_stress)(T_sym['data']),
|
||||
'label': f"{T_sym['label']}_vM",
|
||||
'meta': {
|
||||
'Unit': T_sym['meta']['Unit'],
|
||||
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']),
|
||||
'Description': f"Mises equivalent {t} of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -837,10 +835,10 @@ class Result:
|
|||
|
||||
return {
|
||||
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
|
||||
'label': '|{}|_{}'.format(x['label'],o),
|
||||
'label': f"|{x['label']}|_{o}",
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']),
|
||||
'Description': f"{o}-norm of {t} {x['label']} ({x['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -866,19 +864,20 @@ class Result:
|
|||
'label': 'S',
|
||||
'meta': {
|
||||
'Unit': P['meta']['Unit'],
|
||||
'Description': '2. Piola-Kirchhoff stress calculated from {} ({}) and {} ({})'\
|
||||
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
|
||||
'Description': "2. Piola-Kirchhoff stress calculated "
|
||||
f"from {P['label']} ({P['meta']['Description']})"
|
||||
f" and {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
def add_PK2(self,P='P',F='F'):
|
||||
"""
|
||||
Add second Piola-Kirchhoff calculated from first Piola-Kirchhoff stress and deformation gradient.
|
||||
Add second Piola-Kirchhoff stress calculated from first Piola-Kirchhoff stress and deformation gradient.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
P : str, optional
|
||||
Label first Piola-Kirchhoff stress dataset. Defaults to ‘P’.
|
||||
Label of first Piola-Kirchhoff stress dataset. Defaults to ‘P’.
|
||||
F : str, optional
|
||||
Label of deformation gradient dataset. Defaults to ‘F’.
|
||||
|
||||
|
@ -928,10 +927,10 @@ class Result:
|
|||
def _add_rotational_part(F):
|
||||
return {
|
||||
'data': mechanics.rotational_part(F['data']),
|
||||
'label': 'R({})'.format(F['label']),
|
||||
'label': f"R({F['label']})",
|
||||
'meta': {
|
||||
'Unit': F['meta']['Unit'],
|
||||
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
|
||||
'Description': f"Rotational part of {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -952,10 +951,10 @@ class Result:
|
|||
def _add_spherical(T):
|
||||
return {
|
||||
'data': mechanics.spherical_part(T['data']),
|
||||
'label': 'p_{}'.format(T['label']),
|
||||
'label': f"p_{T['label']}",
|
||||
'meta': {
|
||||
'Unit': T['meta']['Unit'],
|
||||
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||
'Description': f"Spherical component of tensor {T['label']} ({T['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -976,10 +975,10 @@ class Result:
|
|||
def _add_strain_tensor(F,t,m):
|
||||
return {
|
||||
'data': mechanics.strain_tensor(F['data'],t,m),
|
||||
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
|
||||
'label': f"epsilon_{t}^{m}({F['label']})",
|
||||
'meta': {
|
||||
'Unit': F['meta']['Unit'],
|
||||
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
|
||||
'Description': f"Strain tensor of {F['label']} ({F['meta']['Description']})",
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
}
|
||||
|
@ -1006,11 +1005,11 @@ class Result:
|
|||
@staticmethod
|
||||
def _add_stretch_tensor(F,t):
|
||||
return {
|
||||
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']),
|
||||
'label': '{}({})'.format(t,F['label']),
|
||||
'data': (mechanics.left_stretch if t.upper() == 'V' else mechanics.right_stretch)(F['data']),
|
||||
'label': f"{t}({F['label']})",
|
||||
'meta': {
|
||||
'Unit': F['meta']['Unit'],
|
||||
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
|
||||
'Description': '{} stretch tensor of {} ({})'.format('Left' if t.upper() == 'V' else 'Right',
|
||||
F['label'],F['meta']['Description']),
|
||||
'Creator': inspect.stack()[0][3][1:]
|
||||
}
|
||||
|
@ -1046,7 +1045,7 @@ class Result:
|
|||
r = func(**datasets_in,**args)
|
||||
return [group,r]
|
||||
except Exception as err:
|
||||
print('Error during calculation: {}.'.format(err))
|
||||
print(f'Error during calculation: {err}.')
|
||||
return None
|
||||
|
||||
|
||||
|
@ -1091,11 +1090,11 @@ class Result:
|
|||
|
||||
for l,v in result[1]['meta'].items():
|
||||
dataset.attrs[l]=v.encode()
|
||||
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version)
|
||||
creator = f"damask.Result.{dataset.attrs['Creator'].decode()} v{version}"
|
||||
dataset.attrs['Creator'] = creator.encode()
|
||||
|
||||
except (OSError,RuntimeError) as err:
|
||||
print('Could not add dataset: {}.'.format(err))
|
||||
print(f'Could not add dataset: {err}.')
|
||||
lock.release()
|
||||
|
||||
pool.close()
|
||||
|
@ -1128,7 +1127,7 @@ class Result:
|
|||
time_data = ET.SubElement(time, 'DataItem')
|
||||
time_data.attrib={'Format': 'XML',
|
||||
'NumberType': 'Float',
|
||||
'Dimensions': '{}'.format(len(self.times))}
|
||||
'Dimensions': f'{len(self.times)}'}
|
||||
time_data.text = ' '.join(map(str,self.times))
|
||||
|
||||
attributes = []
|
||||
|
@ -1169,7 +1168,7 @@ class Result:
|
|||
data_items[-1].attrib={'Format': 'HDF',
|
||||
'Precision': '8',
|
||||
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
|
||||
data_items[-1].text='{}:/{}/geometry/u_n'.format(os.path.split(self.fname)[1],inc)
|
||||
data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
|
||||
|
||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||
for oo in getattr(self,o):
|
||||
|
@ -1184,15 +1183,15 @@ class Result:
|
|||
if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue
|
||||
|
||||
attributes.append(ET.SubElement(grid, 'Attribute'))
|
||||
attributes[-1].attrib={'Name': '{}'.format(name.split('/',2)[2]),
|
||||
attributes[-1].attrib={'Name': name.split('/',2)[2],
|
||||
'Center': 'Cell',
|
||||
'AttributeType': 'Tensor'}
|
||||
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
|
||||
data_items[-1].attrib={'Format': 'HDF',
|
||||
'NumberType': 'Float',
|
||||
'Precision': '{}'.format(prec),
|
||||
'Precision': f'{prec}',
|
||||
'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))}
|
||||
data_items[-1].text='{}:{}'.format(os.path.split(self.fname)[1],name)
|
||||
data_items[-1].text=f'{os.path.split(self.fname)[1]}:{name}'
|
||||
|
||||
with open(self.fname.with_suffix('.xdmf').name,'w') as f:
|
||||
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
||||
|
@ -1270,4 +1269,4 @@ class Result:
|
|||
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
||||
v.add(u,'u')
|
||||
|
||||
v.write('{}_inc{}'.format(self.fname.stem,inc[3:].zfill(N_digits)))
|
||||
v.write(f'{self.fname.stem}_inc{inc[3:].zfill(N_digits)}')
|
||||
|
|
|
@ -136,7 +136,7 @@ class Table:
|
|||
|
||||
content = f.readlines()
|
||||
|
||||
comments = [f'table.py:from_ang v {version}']
|
||||
comments = [f'table.py:from_ang v{version}']
|
||||
for line in content:
|
||||
if line.startswith('#'):
|
||||
comments.append(line.strip())
|
||||
|
|
|
@ -53,7 +53,7 @@ class Test:
|
|||
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
|
||||
|
||||
self.parser = OptionParser(option_class=damask.extendableOption,
|
||||
description = '{} (Test class version: {})'.format(self.description,damask.version),
|
||||
description = f'{self.description} (Test class version: {damask.version})',
|
||||
usage = './test.py [options]')
|
||||
self.parser.add_option("-k", "--keep",
|
||||
action = "store_true",
|
||||
|
@ -93,7 +93,7 @@ class Test:
|
|||
for variant,object in enumerate(self.variants):
|
||||
name = self.variantName(variant)
|
||||
if self.options.show:
|
||||
logging.critical('{}: {}'.format(variant+1,name))
|
||||
logging.critical(f'{variant+1}: {name}')
|
||||
elif self.options.select is not None \
|
||||
and not (name in self.options.select or str(variant+1) in self.options.select):
|
||||
pass
|
||||
|
@ -106,12 +106,12 @@ class Test:
|
|||
self.postprocess(variant)
|
||||
|
||||
if self.options.update:
|
||||
if self.update(variant) != 0: logging.critical('update for "{}" failed.'.format(name))
|
||||
if self.update(variant) != 0: logging.critical(f'update for "{name}" failed.')
|
||||
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
||||
return variant+1 # return culprit
|
||||
|
||||
except Exception as e:
|
||||
logging.critical('exception during variant execution: "{}"'.format(str(e)))
|
||||
logging.critical(f'exception during variant execution: "{e}"')
|
||||
return variant+1 # return culprit
|
||||
return 0
|
||||
|
||||
|
@ -124,13 +124,13 @@ class Test:
|
|||
try:
|
||||
shutil.rmtree(self.dirCurrent())
|
||||
except FileNotFoundError:
|
||||
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
||||
logging.warning(f'removal of directory "{self.dirCurrent()}" not possible...')
|
||||
|
||||
try:
|
||||
os.mkdir(self.dirCurrent())
|
||||
return True
|
||||
except FileExistsError:
|
||||
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
|
||||
logging.critical(f'creation of directory "{self.dirCurrent()}" failed.')
|
||||
return False
|
||||
|
||||
def prepareAll(self):
|
||||
|
@ -211,7 +211,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(source,target)
|
||||
except FileNotFoundError:
|
||||
logging.critical('error copying {} to {}'.format(source,target))
|
||||
logging.critical(f'error copying {source} to {target}')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -222,7 +222,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Reference2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -235,7 +235,7 @@ class Test:
|
|||
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.error(os.path.join(source,f))
|
||||
logging.critical('Base2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Base2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -246,7 +246,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Current2Reference: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -257,7 +257,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Proof2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -267,7 +267,7 @@ class Test:
|
|||
try:
|
||||
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
|
||||
except FileNotFoundError:
|
||||
logging.critical('Current2Current: Unable to copy file "{}"'.format(f))
|
||||
logging.critical(f'Current2Current: Unable to copy file "{f}"')
|
||||
raise FileNotFoundError
|
||||
|
||||
|
||||
|
@ -302,9 +302,7 @@ class Test:
|
|||
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
||||
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
||||
curArray = curArray[curArray.nonzero()]
|
||||
print(' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err,
|
||||
refArrayNonZero[max_loc],
|
||||
curArray[max_loc]))
|
||||
print(f' ********\n * maximum relative error {max_err} between {refArrayNonZero[max_loc]} and {curArray[max_loc]}\n ********')
|
||||
return max_err
|
||||
else:
|
||||
raise Exception('mismatch in array size to compare')
|
||||
|
@ -350,7 +348,7 @@ class Test:
|
|||
|
||||
for i in range(dataLength):
|
||||
if headings0[i]['shape'] != headings1[i]['shape']:
|
||||
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
|
||||
raise Exception(f"shape mismatch between {headings0[i]['label']} and {headings1[i]['label']}")
|
||||
shape[i] = headings0[i]['shape']
|
||||
for j in range(np.shape(shape[i])[0]):
|
||||
length[i] *= shape[i][j]
|
||||
|
@ -358,9 +356,7 @@ class Test:
|
|||
for j in range(np.shape(normShape[i])[0]):
|
||||
normLength[i] *= normShape[i][j]
|
||||
else:
|
||||
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0),
|
||||
len(headings1),
|
||||
len(normHeadings)))
|
||||
raise Exception(f'trying to compare {len(headings0)} with {len(headings1)} normed by {len(normHeadings)} data sets')
|
||||
|
||||
table0 = damask.ASCIItable(name=file0,readonly=True)
|
||||
table0.head_read()
|
||||
|
@ -372,11 +368,11 @@ class Test:
|
|||
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
|
||||
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
|
||||
if key0 not in table0.labels(raw = True):
|
||||
raise Exception('column "{}" not found in first table...\n'.format(key0))
|
||||
raise Exception(f'column "{key0}" not found in first table...')
|
||||
elif key1 not in table1.labels(raw = True):
|
||||
raise Exception('column "{}" not found in second table...\n'.format(key1))
|
||||
raise Exception(f'column "{key1}" not found in second table...')
|
||||
elif normKey not in table0.labels(raw = True):
|
||||
raise Exception('column "{}" not found in first table...\n'.format(normKey))
|
||||
raise Exception(f'column "{normKey}" not found in first table...')
|
||||
else:
|
||||
column[0][i] = table0.label_index(key0)
|
||||
column[1][i] = table1.label_index(key1)
|
||||
|
@ -404,9 +400,9 @@ class Test:
|
|||
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
||||
absTol[i] = True
|
||||
if perLine:
|
||||
logging.warning('At least one norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||
logging.warning(f"At least one norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
||||
else:
|
||||
logging.warning('Maximum norm of "{}" in first table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||
logging.warning(f"Maximum norm of \"{headings0[i]['label']}\" in first table is 0.0, using absolute tolerance")
|
||||
|
||||
line1 = 0
|
||||
while table1.data_read(): # read next data line of ASCII table
|
||||
|
@ -418,18 +414,14 @@ class Test:
|
|||
norm[i][line1-len(skipLines)])
|
||||
line1 +=1
|
||||
|
||||
if (line0 != line1): raise Exception('found {} lines in first table but {} in second table'.format(line0,line1))
|
||||
if (line0 != line1): raise Exception(f'found {line0} lines in first table but {line1} in second table')
|
||||
|
||||
logging.info(' ********')
|
||||
for i in range(dataLength):
|
||||
if absTol[i]:
|
||||
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i],
|
||||
headings0[i]['label'],
|
||||
headings1[i]['label']))
|
||||
logging.info(f" * maximum absolute error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
||||
else:
|
||||
logging.info(' * maximum relative error {} between {} and {}'.format(maxError[i],
|
||||
headings0[i]['label'],
|
||||
headings1[i]['label']))
|
||||
logging.info(f" * maximum relative error {maxError[i]} between {headings0[i]['label']} and {headings1[i]['label']}")
|
||||
logging.info(' ********')
|
||||
return maxError
|
||||
|
||||
|
@ -480,8 +472,8 @@ class Test:
|
|||
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
||||
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
||||
std = np.amax(np.std(normedDelta,0))
|
||||
logging.info('mean: {:f}'.format(mean))
|
||||
logging.info('std: {:f}'.format(std))
|
||||
logging.info(f'mean: {mean:f}')
|
||||
logging.info(f'std: {std:f}')
|
||||
|
||||
return (mean<meanTol) & (std < stdTol)
|
||||
|
||||
|
@ -521,7 +513,7 @@ class Test:
|
|||
|
||||
for i,(table,labels) in enumerate(zip(tables,columns)):
|
||||
if np.any(dimensions != [np.prod(table.shapes[c]) for c in labels]): # check data object consistency
|
||||
logging.critical('Table {} differs in data layout.'.format(files[i]))
|
||||
logging.critical(f'Table {files[i]} differs in data layout.')
|
||||
return False
|
||||
data.append(np.hstack(list(table.get(label) for label in labels)).astype(np.float)) # store
|
||||
|
||||
|
@ -537,19 +529,19 @@ class Test:
|
|||
|
||||
for i in range(len(data)):
|
||||
data[i] /= maximum # normalize each table
|
||||
logging.info('shape of data {}: {}'.format(i,data[i].shape))
|
||||
logging.info(f'shape of data {i}: {data[i].shape}')
|
||||
|
||||
if debug:
|
||||
violators = np.absolute(data[0]-data[1]) > atol + rtol*np.absolute(data[1])
|
||||
logging.info('shape of violators: {}'.format(violators.shape))
|
||||
logging.info(f'shape of violators: {violators.shape}')
|
||||
for j,culprits in enumerate(violators):
|
||||
goodguys = np.logical_not(culprits)
|
||||
if culprits.any():
|
||||
logging.info('{} has {}'.format(j,np.sum(culprits)))
|
||||
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[culprits]))
|
||||
logging.info('data : {}'.format(np.absolute(data[1][j])[culprits]))
|
||||
logging.info('deviation: {}'.format(np.absolute(data[0][j]-data[1][j])[goodguys]))
|
||||
logging.info('data : {}'.format(np.absolute(data[1][j])[goodguys]))
|
||||
logging.info(f'{j} has {np.sum(culprits)}')
|
||||
logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[culprits]}')
|
||||
logging.info(f'data : {np.absolute(data[1][j])[culprits]}')
|
||||
logging.info(f'deviation: {np.absolute(data[0][j]-data[1][j])[goodguys]}')
|
||||
logging.info(f'data : {np.absolute(data[1][j])[goodguys]}')
|
||||
|
||||
allclose = True # start optimistic
|
||||
for i in range(1,len(data)):
|
||||
|
@ -588,12 +580,12 @@ class Test:
|
|||
|
||||
if culprit == 0:
|
||||
count = len(self.variants) if self.options.select is None else len(self.options.select)
|
||||
msg = 'Test passed.' if count == 1 else 'All {} tests passed.'.format(count)
|
||||
msg = 'Test passed.' if count == 1 else f'All {count} tests passed.'
|
||||
elif culprit == -1:
|
||||
msg = 'Warning: could not start test...'
|
||||
ret = 0
|
||||
else:
|
||||
msg = 'Test "{}" failed.'.format(self.variantName(culprit-1))
|
||||
msg = f'Test "{self.variantName(culprit-1)}" failed.'
|
||||
|
||||
logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
|
||||
return ret
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
from damask import util
|
||||
|
||||
class Section():
|
||||
def __init__(self,data = {'__order__':[]},part = ''):
|
||||
|
@ -94,7 +95,7 @@ class Material():
|
|||
'texture',
|
||||
'microstructure',
|
||||
]
|
||||
self.data = {\
|
||||
self.data = {
|
||||
'homogenization': {'__order__': []},
|
||||
'microstructure': {'__order__': []},
|
||||
'crystallite': {'__order__': []},
|
||||
|
@ -107,19 +108,19 @@ class Material():
|
|||
"""Returns current data structure in material.config format."""
|
||||
me = []
|
||||
for part in self.parts:
|
||||
if self.verbose: print('processing <{}>'.format(part))
|
||||
if self.verbose: print(f'processing <{part}>')
|
||||
me += ['',
|
||||
'#'*100,
|
||||
'<{}>'.format(part),
|
||||
f'<{part}>',
|
||||
'#'*100,
|
||||
]
|
||||
for section in self.data[part]['__order__']:
|
||||
me += ['[{}] {}'.format(section,'#'+'-'*max(0,96-len(section)))]
|
||||
me += [f'[{section}] {"#"+"-"*max(0,96-len(section))}']
|
||||
for key in self.data[part][section]['__order__']:
|
||||
if key.startswith('(') and key.endswith(')'): # multiple (key)
|
||||
me += ['{}\t{}'.format(key,' '.join(values)) for values in self.data[part][section][key]]
|
||||
me += [f'{key}\t{" ".join(values)}' for values in self.data[part][section][key]]
|
||||
else: # plain key
|
||||
me += ['{}\t{}'.format(key,' '.join(map(str,self.data[part][section][key])))]
|
||||
me += [f'{key}\t{util.srepr(self.data[part][section][key]," ")}']
|
||||
return '\n'.join(me) + '\n'
|
||||
|
||||
def parse(self, part=None, sections=[], content=None):
|
||||
|
@ -185,9 +186,9 @@ class Material():
|
|||
outname = filename
|
||||
while os.path.exists(outname) and not overwrite:
|
||||
i += 1
|
||||
outname = '{}_{}'.format(filename,i)
|
||||
outname = f'{filename}_{i}'
|
||||
|
||||
if self.verbose: print('Writing material data to {}'.format(outname))
|
||||
if self.verbose: print(f'Writing material data to {outname}')
|
||||
with open(outname,'w') as f:
|
||||
f.write(str(self))
|
||||
return outname
|
||||
|
@ -196,7 +197,7 @@ class Material():
|
|||
"""Add Update."""
|
||||
part = part.lower()
|
||||
section = section.lower()
|
||||
if part not in self.parts: raise Exception('invalid part {}'.format(part))
|
||||
if part not in self.parts: raise Exception(f'invalid part {part}')
|
||||
|
||||
if not isinstance(initialData, dict):
|
||||
initialData = initialData.data()
|
||||
|
|
|
@ -25,7 +25,7 @@ class Marc:
|
|||
def library_path(self):
|
||||
|
||||
path_MSC = Environment().options['MSC_ROOT']
|
||||
path_lib = Path('{}/mentat{}/shlib/linux64'.format(path_MSC,self.version))
|
||||
path_lib = Path(f'{path_MSC}/mentat{self.version}/shlib/linux64')
|
||||
|
||||
return path_lib if path_lib.is_dir() else None
|
||||
|
||||
|
@ -34,7 +34,7 @@ class Marc:
|
|||
def tools_path(self):
|
||||
|
||||
path_MSC = Environment().options['MSC_ROOT']
|
||||
path_tools = Path('{}/marc{}/tools'.format(path_MSC,self.version))
|
||||
path_tools = Path(f'{path_MSC}/marc{self.version}/tools')
|
||||
|
||||
return path_tools if path_tools.is_dir() else None
|
||||
|
||||
|
@ -51,21 +51,21 @@ class Marc:
|
|||
|
||||
env = Environment()
|
||||
|
||||
user = env.root_dir/Path('src/DAMASK_marc{}'.format(self.version)).with_suffix('.f90' if compile else '.marc')
|
||||
if not user.is_file():
|
||||
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),user))
|
||||
usersub = env.root_dir/Path(f'src/DAMASK_marc{self.version}').with_suffix('.f90' if compile else '.marc')
|
||||
if not usersub.is_file():
|
||||
raise FileNotFoundError("DAMASK4Marc ({}) '{}' not found".format(('source' if compile else 'binary'),usersub))
|
||||
|
||||
# Define options [see Marc Installation and Operation Guide, pp 23]
|
||||
script = 'run_damask_{}mp'.format(optimization)
|
||||
script = f'run_damask_{optimization}mp'
|
||||
|
||||
cmd = str(self.tools_path/Path(script)) + \
|
||||
' -jid ' + model + '_' + job + \
|
||||
' -nprocd 1 -autorst 0 -ci n -cr n -dcoup 0 -b no -v no'
|
||||
|
||||
if compile: cmd += ' -u ' + str(user) + ' -save y'
|
||||
else: cmd += ' -prog ' + str(user.with_suffix(''))
|
||||
if compile: cmd += ' -u ' + str(usersub) + ' -save y'
|
||||
else: cmd += ' -prog ' + str(usersub.with_suffix(''))
|
||||
|
||||
print('job submission {} compilation: {}'.format('with' if compile else 'without',user))
|
||||
print('job submission {} compilation: {}'.format(('with' if compile else 'without'),usersub))
|
||||
if logfile: log = open(logfile, 'w')
|
||||
print(cmd)
|
||||
process = subprocess.Popen(shlex.split(cmd),stdout = log,stderr = subprocess.STDOUT)
|
||||
|
|
|
@ -236,7 +236,7 @@ class _ProgressBar:
|
|||
delta_time = datetime.datetime.now() - self.start_time
|
||||
remaining_time = (self.total - (iteration+1)) * delta_time / (iteration+1)
|
||||
remaining_time -= datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs
|
||||
sys.stderr.write('\r{self.prefix} {bar} {fraction:>4.0%} ETA {remaining_time}')
|
||||
sys.stderr.write(f'\r{self.prefix} {bar} {fraction:>4.0%} ETA {remaining_time}')
|
||||
sys.stderr.flush()
|
||||
|
||||
self.last_fraction = fraction
|
||||
|
|
|
@ -6,6 +6,7 @@ import numpy as np
|
|||
|
||||
from damask import Geom
|
||||
from damask import Rotation
|
||||
from damask import util
|
||||
|
||||
|
||||
def geom_equal(a,b):
|
||||
|
@ -85,8 +86,8 @@ class TestGeom:
|
|||
def test_mirror(self,default,update,reference_dir,directions,reflect):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.mirror(directions,reflect)
|
||||
tag = 'directions={}_reflect={}'.format('-'.join(directions),reflect)
|
||||
reference = os.path.join(reference_dir,'mirror_{}.geom'.format(tag))
|
||||
tag = f'directions={"-".join(directions)}_reflect={reflect}'
|
||||
reference = os.path.join(reference_dir,f'mirror_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
@ -94,8 +95,8 @@ class TestGeom:
|
|||
def test_clean(self,default,update,reference_dir,stencil):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.clean(stencil)
|
||||
tag = 'stencil={}'.format(stencil)
|
||||
reference = os.path.join(reference_dir,'clean_{}.geom'.format(tag))
|
||||
tag = f'stencil={stencil}'
|
||||
reference = os.path.join(reference_dir,f'clean_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
@ -111,8 +112,8 @@ class TestGeom:
|
|||
def test_scale(self,default,update,reference_dir,grid):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.scale(grid)
|
||||
tag = 'grid={}'.format('-'.join([str(x) for x in grid]))
|
||||
reference = os.path.join(reference_dir,'scale_{}.geom'.format(tag))
|
||||
tag = f'grid={util.srepr(grid,"-")}'
|
||||
reference = os.path.join(reference_dir,f'scale_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
@ -150,8 +151,8 @@ class TestGeom:
|
|||
def test_rotate(self,default,update,reference_dir,Eulers):
|
||||
modified = copy.deepcopy(default)
|
||||
modified.rotate(Rotation.from_Eulers(Eulers,degrees=True))
|
||||
tag = 'Eulers={}-{}-{}'.format(*Eulers)
|
||||
reference = os.path.join(reference_dir,'rotate_{}.geom'.format(tag))
|
||||
tag = f'Eulers={util.srepr(Eulers,"-")}'
|
||||
reference = os.path.join(reference_dir,f'rotate_{tag}.geom')
|
||||
if update: modified.to_file(reference)
|
||||
assert geom_equal(modified,Geom.from_file(reference))
|
||||
|
||||
|
|
|
@ -38,4 +38,4 @@ class TestSymmetry:
|
|||
def test_invalid_argument(self,function):
|
||||
s = Symmetry() # noqa
|
||||
with pytest.raises(ValueError):
|
||||
eval('s.{}(np.ones(4))'.format(function))
|
||||
eval(f's.{function}(np.ones(4))')
|
||||
|
|
|
@ -49,7 +49,7 @@ class TestOrientation:
|
|||
@pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch'])
|
||||
@pytest.mark.parametrize('lattice',['fcc','bcc'])
|
||||
def test_relationship_reference(self,update,reference_dir,model,lattice):
|
||||
reference = os.path.join(reference_dir,'{}_{}.txt'.format(lattice,model))
|
||||
reference = os.path.join(reference_dir,f'{lattice}_{model}.txt')
|
||||
ori = Orientation(Rotation(),lattice)
|
||||
eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)])
|
||||
if update:
|
||||
|
|
|
@ -137,7 +137,7 @@ class TestResult:
|
|||
default.add_Cauchy('P','F')
|
||||
default.add_eigenvalue('sigma',eigenvalue)
|
||||
loc = {'sigma' :default.get_dataset_location('sigma'),
|
||||
'lambda':default.get_dataset_location('lambda_{}(sigma)'.format(eigenvalue))}
|
||||
'lambda':default.get_dataset_location(f'lambda_{eigenvalue}(sigma)')}
|
||||
in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
|
||||
in_file = default.read_dataset(loc['lambda'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
@ -147,7 +147,7 @@ class TestResult:
|
|||
default.add_Cauchy('P','F')
|
||||
default.add_eigenvector('sigma',eigenvalue)
|
||||
loc = {'sigma' :default.get_dataset_location('sigma'),
|
||||
'v(sigma)':default.get_dataset_location('v_{}(sigma)'.format(eigenvalue))}
|
||||
'v(sigma)':default.get_dataset_location(f'v_{eigenvalue}(sigma)')}
|
||||
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
|
||||
in_file = default.read_dataset(loc['v(sigma)'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
@ -179,7 +179,7 @@ class TestResult:
|
|||
t = ['V','U'][np.random.randint(0,2)]
|
||||
m = np.random.random()*2.0 - 1.0
|
||||
default.add_strain_tensor('F',t,m)
|
||||
label = 'epsilon_{}^{}(F)'.format(t,m)
|
||||
label = f'epsilon_{t}^{m}(F)'
|
||||
default.add_Mises(label)
|
||||
loc = {label :default.get_dataset_location(label),
|
||||
label+'_vM':default.get_dataset_location(label+'_vM')}
|
||||
|
@ -248,7 +248,7 @@ class TestResult:
|
|||
t = ['V','U'][np.random.randint(0,2)]
|
||||
m = np.random.random()*2.0 - 1.0
|
||||
default.add_strain_tensor('F',t,m)
|
||||
label = 'epsilon_{}^{}(F)'.format(t,m)
|
||||
label = f'epsilon_{t}^{m}(F)'
|
||||
loc = {'F': default.get_dataset_location('F'),
|
||||
label: default.get_dataset_location(label)}
|
||||
in_memory = mechanics.strain_tensor(default.read_dataset(loc['F'],0),t,m)
|
||||
|
|
|
@ -556,7 +556,7 @@ def mul(me, other):
|
|||
else:
|
||||
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
|
||||
else:
|
||||
raise TypeError('Cannot rotate {}'.format(type(other)))
|
||||
raise TypeError(f'Cannot rotate {type(other)}')
|
||||
|
||||
|
||||
class TestRotation:
|
||||
|
@ -878,7 +878,7 @@ class TestRotation:
|
|||
def test_invalid_P(self,fr,to):
|
||||
R = Rotation.from_random(np.random.randint(8,32,(3))) # noqa
|
||||
with pytest.raises(ValueError):
|
||||
fr(eval('R.{}()'.format(to)),P=-30)
|
||||
fr(eval(f'R.{to}()'),P=-30)
|
||||
|
||||
@pytest.mark.parametrize('shape',[None,(3,),(4,2)])
|
||||
def test_broadcast(self,shape):
|
||||
|
|
|
@ -30,8 +30,8 @@ class TestGridFilters:
|
|||
grid = np.random.randint(8,32,(3))
|
||||
size = np.random.random(3)
|
||||
origin = np.random.random(3)
|
||||
coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa
|
||||
_grid,_size,_origin = eval('grid_filters.{}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))'.format(mode))
|
||||
coord0 = eval(f'grid_filters.{mode}_coord0(grid,size,origin)') # noqa
|
||||
_grid,_size,_origin = eval(f'grid_filters.{mode}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))')
|
||||
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin)
|
||||
|
||||
def test_displacement_fluct_equivalence(self):
|
||||
|
@ -67,8 +67,8 @@ class TestGridFilters:
|
|||
origin= np.random.random(3)
|
||||
size = np.random.random(3) # noqa
|
||||
grid = np.random.randint(8,32,(3))
|
||||
shifted = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode))
|
||||
unshifted = eval('grid_filters.{}_coord0(grid,size)'.format(mode))
|
||||
shifted = eval(f'grid_filters.{mode}_coord0(grid,size,origin)')
|
||||
unshifted = eval(f'grid_filters.{mode}_coord0(grid,size)')
|
||||
if mode == 'cell':
|
||||
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,)))
|
||||
elif mode == 'node':
|
||||
|
|
Loading…
Reference in New Issue