Merge branch 'development' into modularize-mech
This commit is contained in:
commit
8007c47b48
|
@ -24,6 +24,20 @@ class ConfigMaterial(Config):
|
||||||
super().save(fname,**kwargs)
|
super().save(fname,**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load(cls,fname='material.yaml'):
|
||||||
|
"""
|
||||||
|
Load from yaml file.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : file, str, or pathlib.Path, optional
|
||||||
|
Filename or file for writing. Defaults to 'material.yaml'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return super(ConfigMaterial,cls).load(fname)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_table(table,constituents={},**kwargs):
|
def from_table(table,constituents={},**kwargs):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -21,6 +21,7 @@ from . import grid_filters
|
||||||
from . import mechanics
|
from . import mechanics
|
||||||
from . import util
|
from . import util
|
||||||
|
|
||||||
|
h5py3 = h5py.__version__[0] == '3'
|
||||||
|
|
||||||
class Result:
|
class Result:
|
||||||
"""
|
"""
|
||||||
|
@ -93,7 +94,7 @@ class Result:
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Show selected data."""
|
"""Show summary of file content."""
|
||||||
all_selected_increments = self.selection['increments']
|
all_selected_increments = self.selection['increments']
|
||||||
|
|
||||||
self.pick('increments',all_selected_increments[0:1])
|
self.pick('increments',all_selected_increments[0:1])
|
||||||
|
@ -280,7 +281,8 @@ class Result:
|
||||||
for path_old in self.get_dataset_location(name_old):
|
for path_old in self.get_dataset_location(name_old):
|
||||||
path_new = os.path.join(os.path.dirname(path_old),name_new)
|
path_new = os.path.join(os.path.dirname(path_old),name_new)
|
||||||
f[path_new] = f[path_old]
|
f[path_new] = f[path_old]
|
||||||
f[path_new].attrs['Renamed'] = 'Original name: {}'.encode()
|
f[path_new].attrs['Renamed'] = f'Original name: {name_old}' if h5py3 else \
|
||||||
|
f'Original name: {name_old}'.encode()
|
||||||
del f[path_old]
|
del f[path_old]
|
||||||
else:
|
else:
|
||||||
raise PermissionError('Rename operation not permitted')
|
raise PermissionError('Rename operation not permitted')
|
||||||
|
@ -422,8 +424,13 @@ class Result:
|
||||||
for d in f[group].keys():
|
for d in f[group].keys():
|
||||||
try:
|
try:
|
||||||
dataset = f['/'.join([group,d])]
|
dataset = f['/'.join([group,d])]
|
||||||
unit = f" / {dataset.attrs['Unit'].decode()}" if 'Unit' in dataset.attrs else ''
|
if 'Unit' in dataset.attrs:
|
||||||
description = dataset.attrs['Description'].decode()
|
unit = f" / {dataset.attrs['Unit']}" if h5py3 else \
|
||||||
|
f" / {dataset.attrs['Unit'].decode()}"
|
||||||
|
else:
|
||||||
|
unit = ''
|
||||||
|
description = dataset.attrs['Description'] if h5py3 else \
|
||||||
|
dataset.attrs['Description'].decode()
|
||||||
message += f' {d}{unit}: {description}\n'
|
message += f' {d}{unit}: {description}\n'
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
@ -463,7 +470,8 @@ class Result:
|
||||||
def get_crystal_structure(self): # ToDo: extension to multi constituents/phase
|
def get_crystal_structure(self): # ToDo: extension to multi constituents/phase
|
||||||
"""Info about the crystal structure."""
|
"""Info about the crystal structure."""
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string
|
return f[self.get_dataset_location('O')[0]].attrs['Lattice'] if h5py3 else \
|
||||||
|
f[self.get_dataset_location('O')[0]].attrs['Lattice'].decode()
|
||||||
|
|
||||||
|
|
||||||
def enable_user_function(self,func):
|
def enable_user_function(self,func):
|
||||||
|
@ -788,20 +796,26 @@ class Result:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_Mises(T_sym):
|
def _add_Mises(T_sym,kind):
|
||||||
t = 'strain' if T_sym['meta']['Unit'] == '1' else \
|
k = kind
|
||||||
'stress'
|
if k is None:
|
||||||
|
if T_sym['meta']['Unit'] == '1':
|
||||||
|
k = 'strain'
|
||||||
|
elif T_sym['meta']['Unit'] == 'Pa':
|
||||||
|
k = 'stress'
|
||||||
|
if k not in ['stress', 'strain']:
|
||||||
|
raise ValueError('invalid von Mises kind {kind}')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': (mechanics.Mises_strain if t=='strain' else mechanics.Mises_stress)(T_sym['data']),
|
'data': (mechanics.Mises_strain if k=='strain' else mechanics.Mises_stress)(T_sym['data']),
|
||||||
'label': f"{T_sym['label']}_vM",
|
'label': f"{T_sym['label']}_vM",
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': T_sym['meta']['Unit'],
|
'Unit': T_sym['meta']['Unit'],
|
||||||
'Description': f"Mises equivalent {t} of {T_sym['label']} ({T_sym['meta']['Description']})",
|
'Description': f"Mises equivalent {k} of {T_sym['label']} ({T_sym['meta']['Description']})",
|
||||||
'Creator': 'add_Mises'
|
'Creator': 'add_Mises'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_Mises(self,T_sym):
|
def add_Mises(self,T_sym,kind=None):
|
||||||
"""
|
"""
|
||||||
Add the equivalent Mises stress or strain of a symmetric tensor.
|
Add the equivalent Mises stress or strain of a symmetric tensor.
|
||||||
|
|
||||||
|
@ -809,9 +823,12 @@ class Result:
|
||||||
----------
|
----------
|
||||||
T_sym : str
|
T_sym : str
|
||||||
Label of symmetric tensorial stress or strain dataset.
|
Label of symmetric tensorial stress or strain dataset.
|
||||||
|
kind : {'stress', 'strain', None}, optional
|
||||||
|
Kind of the von Mises equivalent. Defaults to None, in which case
|
||||||
|
it is selected based on the unit of the dataset ('1' -> strain, 'Pa' -> stress').
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._add_generic_pointwise(self._add_Mises,{'T_sym':T_sym})
|
self._add_generic_pointwise(self._add_Mises,{'T_sym':T_sym},{'kind':kind})
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -1035,7 +1052,7 @@ class Result:
|
||||||
loc = f[group+'/'+label]
|
loc = f[group+'/'+label]
|
||||||
datasets_in[arg]={'data' :loc[()],
|
datasets_in[arg]={'data' :loc[()],
|
||||||
'label':label,
|
'label':label,
|
||||||
'meta': {k:v.decode() for k,v in loc.attrs.items()}}
|
'meta': {k:(v if h5py3 else v.decode()) for k,v in loc.attrs.items()}}
|
||||||
lock.release()
|
lock.release()
|
||||||
r = func(**datasets_in,**args)
|
r = func(**datasets_in,**args)
|
||||||
return [group,r]
|
return [group,r]
|
||||||
|
@ -1080,17 +1097,21 @@ class Result:
|
||||||
if self._allow_modification and result[0]+'/'+result[1]['label'] in f:
|
if self._allow_modification and result[0]+'/'+result[1]['label'] in f:
|
||||||
dataset = f[result[0]+'/'+result[1]['label']]
|
dataset = f[result[0]+'/'+result[1]['label']]
|
||||||
dataset[...] = result[1]['data']
|
dataset[...] = result[1]['data']
|
||||||
dataset.attrs['Overwritten'] = 'Yes'.encode()
|
dataset.attrs['Overwritten'] = 'Yes' if h5py3 else \
|
||||||
|
'Yes'.encode()
|
||||||
else:
|
else:
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
||||||
|
|
||||||
now = datetime.datetime.now().astimezone()
|
now = datetime.datetime.now().astimezone()
|
||||||
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
||||||
|
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
||||||
|
|
||||||
for l,v in result[1]['meta'].items():
|
for l,v in result[1]['meta'].items():
|
||||||
dataset.attrs[l]=v.encode()
|
dataset.attrs[l]=v if h5py3 else v.encode()
|
||||||
creator = f"damask.Result.{dataset.attrs['Creator'].decode()} v{damask.version}"
|
creator = dataset.attrs['Creator'] if h5py3 else \
|
||||||
dataset.attrs['Creator'] = creator.encode()
|
dataset.attrs['Creator'].decode()
|
||||||
|
dataset.attrs['Creator'] = f"damask.Result.{creator} v{damask.version}" if h5py3 else \
|
||||||
|
f"damask.Result.{creator} v{damask.version}".encode()
|
||||||
|
|
||||||
except (OSError,RuntimeError) as err:
|
except (OSError,RuntimeError) as err:
|
||||||
print(f'Could not add dataset: {err}.')
|
print(f'Could not add dataset: {err}.')
|
||||||
|
|
|
@ -197,11 +197,10 @@ class VTK:
|
||||||
elif isinstance(self.vtk_data,vtk.vtkPolyData):
|
elif isinstance(self.vtk_data,vtk.vtkPolyData):
|
||||||
writer = vtk.vtkXMLPolyDataWriter()
|
writer = vtk.vtkXMLPolyDataWriter()
|
||||||
|
|
||||||
default_ext = writer.GetDefaultFileExtension()
|
default_ext = '.'+writer.GetDefaultFileExtension()
|
||||||
ext = Path(fname).suffix
|
ext = Path(fname).suffix
|
||||||
if ext and ext != '.'+default_ext:
|
writer.SetFileName(str(fname)+(default_ext if default_ext != ext else ''))
|
||||||
raise ValueError(f'Given extension "{ext}" does not match default ".{default_ext}"')
|
|
||||||
writer.SetFileName(str(Path(fname).with_suffix('.'+default_ext)))
|
|
||||||
if compress:
|
if compress:
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
else:
|
else:
|
||||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -1,126 +0,0 @@
|
||||||
<homogenization>
|
|
||||||
[none]
|
|
||||||
mech none
|
|
||||||
ngrains 1
|
|
||||||
|
|
||||||
<texture>
|
|
||||||
[Grain1]
|
|
||||||
(gauss) phi1 358.98 Phi 65.62 phi2 24.48
|
|
||||||
[Grain2]
|
|
||||||
(gauss) phi1 121.05 Phi 176.11 phi2 295.73
|
|
||||||
[Grain3]
|
|
||||||
(gauss) phi1 43.79 Phi 113.76 phi2 345.90
|
|
||||||
[Grain4]
|
|
||||||
(gauss) phi1 265.15 Phi 62.52 phi2 299.71
|
|
||||||
[Grain5]
|
|
||||||
(gauss) phi1 221.23 Phi 26.54 phi2 207.05
|
|
||||||
[Grain6]
|
|
||||||
(gauss) phi1 249.81 Phi 61.47 phi2 152.14
|
|
||||||
[Grain7]
|
|
||||||
(gauss) phi1 332.45 Phi 99.16 phi2 345.34
|
|
||||||
[Grain8]
|
|
||||||
(gauss) phi1 312.27 Phi 118.27 phi2 181.59
|
|
||||||
[Grain9]
|
|
||||||
(gauss) phi1 303.10 Phi 48.21 phi2 358.03
|
|
||||||
[Grain10]
|
|
||||||
(gauss) phi1 338.26 Phi 48.11 phi2 176.78
|
|
||||||
[Grain11]
|
|
||||||
(gauss) phi1 115.17 Phi 56.54 phi2 223.84
|
|
||||||
[Grain12]
|
|
||||||
(gauss) phi1 281.04 Phi 97.48 phi2 27.94
|
|
||||||
|
|
||||||
<microstructure>
|
|
||||||
[Grain1]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 1 fraction 1.0
|
|
||||||
[Grain2]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 2 fraction 1.0
|
|
||||||
[Grain3]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 3 fraction 1.0
|
|
||||||
[Grain4]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 4 fraction 1.0
|
|
||||||
[Grain5]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 5 fraction 1.0
|
|
||||||
[Grain6]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 6 fraction 1.0
|
|
||||||
[Grain7]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 2 texture 7 fraction 1.0
|
|
||||||
[Grain8]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 2 texture 8 fraction 1.0
|
|
||||||
[Grain9]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 2 texture 9 fraction 1.0
|
|
||||||
[Grain10]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 2 texture 10 fraction 1.0
|
|
||||||
[Grain11]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 2 texture 11 fraction 1.0
|
|
||||||
[Grain12]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 2 texture 12 fraction 1.0
|
|
||||||
|
|
||||||
<phase>
|
|
||||||
[pheno_fcc]
|
|
||||||
elasticity hooke
|
|
||||||
plasticity phenopowerlaw
|
|
||||||
|
|
||||||
(output) orientation # quaternion
|
|
||||||
(output) F # deformation gradient tensor
|
|
||||||
(output) Fe # elastic deformation gradient tensor
|
|
||||||
(output) Fp # plastic deformation gradient tensor
|
|
||||||
(output) P # first Piola-Kichhoff stress tensor
|
|
||||||
(output) Lp # plastic velocity gradient tensor
|
|
||||||
|
|
||||||
|
|
||||||
lattice_structure fcc
|
|
||||||
Nslip 12 # per family
|
|
||||||
Ntwin 0 # per family
|
|
||||||
|
|
||||||
c11 106.75e9
|
|
||||||
c12 60.41e9
|
|
||||||
c44 28.34e9
|
|
||||||
|
|
||||||
gdot0_slip 0.001
|
|
||||||
n_slip 20
|
|
||||||
tau0_slip 31e6 # per family
|
|
||||||
tausat_slip 63e6 # per family
|
|
||||||
a_slip 2.25
|
|
||||||
h0_slipslip 75e6
|
|
||||||
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
|
||||||
atol_resistance 1
|
|
||||||
|
|
||||||
[pheno_bcc]
|
|
||||||
elasticity hooke
|
|
||||||
plasticity phenopowerlaw
|
|
||||||
|
|
||||||
(output) orientation # quaternion
|
|
||||||
(output) F # deformation gradient tensor
|
|
||||||
(output) Fe # elastic deformation gradient tensor
|
|
||||||
(output) Fp # plastic deformation gradient tensor
|
|
||||||
(output) P # first Piola-Kichhoff stress tensor
|
|
||||||
(output) Lp # plastic velocity gradient tensor
|
|
||||||
|
|
||||||
|
|
||||||
lattice_structure bcc
|
|
||||||
Nslip 12 # per family
|
|
||||||
|
|
||||||
c11 106.75e9
|
|
||||||
c12 60.41e9
|
|
||||||
c44 28.34e9
|
|
||||||
|
|
||||||
gdot0_slip 0.001
|
|
||||||
n_slip 20
|
|
||||||
tau0_slip 31e6 # per family
|
|
||||||
tausat_slip 63e6 # per family
|
|
||||||
a_slip 2.25
|
|
||||||
h0_slipslip 75e6
|
|
||||||
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
|
||||||
atol_resistance 1
|
|
|
@ -0,0 +1,103 @@
|
||||||
|
---
|
||||||
|
homogenization:
|
||||||
|
SX:
|
||||||
|
N_constituents: 1
|
||||||
|
mech: {type: none}
|
||||||
|
|
||||||
|
phase:
|
||||||
|
pheno_fcc:
|
||||||
|
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
|
||||||
|
generic:
|
||||||
|
output: [F, P, F_e, F_p, L_p, O]
|
||||||
|
lattice: fcc
|
||||||
|
plasticity:
|
||||||
|
N_sl: [12]
|
||||||
|
a_sl: 2.25
|
||||||
|
atol_xi: 1.0
|
||||||
|
dot_gamma_0_sl: 0.001
|
||||||
|
h_0_sl_sl: 75e6
|
||||||
|
h_sl_sl: [1, 1, 1.4, 1.4, 1.4, 1.4]
|
||||||
|
n_sl: 20
|
||||||
|
output: [xi_sl]
|
||||||
|
type: phenopowerlaw
|
||||||
|
xi_0_sl: [31e6]
|
||||||
|
xi_inf_sl: [63e6]
|
||||||
|
pheno_bcc:
|
||||||
|
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
|
||||||
|
generic:
|
||||||
|
output: [F, P, F_e, F_p, L_p, O]
|
||||||
|
lattice: bcc
|
||||||
|
plasticity:
|
||||||
|
N_sl: [12]
|
||||||
|
a_sl: 2.25
|
||||||
|
atol_xi: 1.0
|
||||||
|
dot_gamma_0_sl: 0.001
|
||||||
|
h_0_sl_sl: 75e6
|
||||||
|
h_sl_sl: [1, 1, 1.4, 1.4, 1.4, 1.4]
|
||||||
|
n_sl: 20
|
||||||
|
output: [xi_sl]
|
||||||
|
type: phenopowerlaw
|
||||||
|
xi_0_sl: [31e6]
|
||||||
|
xi_inf_sl: [63e6]
|
||||||
|
|
||||||
|
material:
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.8229200444892315, 0.5284940239127993, -0.11958598847729246, 0.17086795611292308]
|
||||||
|
phase: pheno_fcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.029934934533052786, -0.0463822071939717, 0.9983440440417412, 0.01617900728410769]
|
||||||
|
phase: pheno_fcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.5285808688806949, 0.7326575088838098, 0.4051997815944012, 0.1401013087924221]
|
||||||
|
phase: pheno_fcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.1839974517790312, 0.49550065903084944, -0.1541415483910751, -0.8347840545305227]
|
||||||
|
phase: pheno_fcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.8055693100147384, -0.22778497057116814, -0.028331746016454287, 0.5462320075864553]
|
||||||
|
phase: pheno_fcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.8025842700117737, -0.33640019337884963, -0.3847408071640489, 0.3076815085881779]
|
||||||
|
phase: pheno_fcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.6048933483394416, 0.7565005822419409, -0.08545681892422426, -0.2334695661144201]
|
||||||
|
phase: pheno_bcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.2012339360745425, -0.3580127491130033, -0.7798091137625135, 0.47247171400774884]
|
||||||
|
phase: pheno_bcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.7949688202267222, 0.3623793306926909, -0.18836147613310203, -0.4485819321629098]
|
||||||
|
phase: pheno_bcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.19733162113429173, -0.06559103894055797, -0.40230149937129567, 0.8915781236183501]
|
||||||
|
phase: pheno_bcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.8659916384140512, -0.2761459420825848, 0.38479354764225004, -0.1604238964779258]
|
||||||
|
phase: pheno_bcc
|
||||||
|
homogenization: SX
|
||||||
|
- constituents:
|
||||||
|
- fraction: 1.0
|
||||||
|
O: [0.5951846978175659, 0.4476701545571293, -0.6038886363266418, -0.2840160613735736]
|
||||||
|
phase: pheno_bcc
|
||||||
|
homogenization: SX
|
|
@ -1 +0,0 @@
|
||||||
fdot * 0 0 0 1.0e-3 0 0 0 * stress 0 * * * * * * * 0 time 20 incs 40 freq 4
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
|
||||||
|
step:
|
||||||
|
- discretization:
|
||||||
|
t: 20
|
||||||
|
N: 40
|
||||||
|
f_out: 4
|
||||||
|
mech:
|
||||||
|
dot_F: [x, 0, 0,
|
||||||
|
0, 1.0e-3, 0,
|
||||||
|
0, 0, x]
|
||||||
|
P: [0, x, x,
|
||||||
|
x, x, x,
|
||||||
|
x, x, 0]
|
|
@ -94,11 +94,11 @@ class TestResult:
|
||||||
default.pick('invalid',True)
|
default.pick('invalid',True)
|
||||||
|
|
||||||
def test_add_absolute(self,default):
|
def test_add_absolute(self,default):
|
||||||
default.add_absolute('Fe')
|
default.add_absolute('F_e')
|
||||||
loc = {'Fe': default.get_dataset_location('Fe'),
|
loc = {'F_e': default.get_dataset_location('F_e'),
|
||||||
'|Fe|': default.get_dataset_location('|Fe|')}
|
'|F_e|': default.get_dataset_location('|F_e|')}
|
||||||
in_memory = np.abs(default.read_dataset(loc['Fe'],0))
|
in_memory = np.abs(default.read_dataset(loc['F_e'],0))
|
||||||
in_file = default.read_dataset(loc['|Fe|'],0)
|
in_file = default.read_dataset(loc['|F_e|'],0)
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['direct','function'])
|
@pytest.mark.parametrize('mode',['direct','function'])
|
||||||
|
@ -168,8 +168,8 @@ class TestResult:
|
||||||
|
|
||||||
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
|
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
|
||||||
def test_add_IPF_color(self,default,d):
|
def test_add_IPF_color(self,default,d):
|
||||||
default.add_IPF_color('orientation',d)
|
default.add_IPF_color('O',d)
|
||||||
loc = {'orientation': default.get_dataset_location('orientation'),
|
loc = {'orientation': default.get_dataset_location('O'),
|
||||||
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
|
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
|
||||||
qu = default.read_dataset(loc['orientation']).view(np.double).reshape(-1,4)
|
qu = default.read_dataset(loc['orientation']).view(np.double).reshape(-1,4)
|
||||||
crystal_structure = default.get_crystal_structure()
|
crystal_structure = default.get_crystal_structure()
|
||||||
|
@ -210,6 +210,22 @@ class TestResult:
|
||||||
in_file = default.read_dataset(loc['sigma_vM'],0)
|
in_file = default.read_dataset(loc['sigma_vM'],0)
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
|
def test_add_Mises_invalid(self,default):
|
||||||
|
default.add_Cauchy('P','F')
|
||||||
|
default.add_calculation('sigma_y','#sigma#',unit='y')
|
||||||
|
default.add_Mises('sigma_y')
|
||||||
|
assert default.get_dataset_location('sigma_y_vM') == []
|
||||||
|
|
||||||
|
def test_add_Mises_stress_strain(self,default):
|
||||||
|
default.add_Cauchy('P','F')
|
||||||
|
default.add_calculation('sigma_y','#sigma#',unit='y')
|
||||||
|
default.add_calculation('sigma_x','#sigma#',unit='x')
|
||||||
|
default.add_Mises('sigma_y',kind='strain')
|
||||||
|
default.add_Mises('sigma_x',kind='stress')
|
||||||
|
loc = {'y' :default.get_dataset_location('sigma_y_vM'),
|
||||||
|
'x' :default.get_dataset_location('sigma_x_vM')}
|
||||||
|
assert not np.allclose(default.read_dataset(loc['y'],0),default.read_dataset(loc['x'],0))
|
||||||
|
|
||||||
def test_add_norm(self,default):
|
def test_add_norm(self,default):
|
||||||
default.add_norm('F',1)
|
default.add_norm('F',1)
|
||||||
loc = {'F': default.get_dataset_location('F'),
|
loc = {'F': default.get_dataset_location('F'),
|
||||||
|
@ -231,8 +247,8 @@ class TestResult:
|
||||||
@pytest.mark.parametrize('polar',[True,False])
|
@pytest.mark.parametrize('polar',[True,False])
|
||||||
def test_add_pole(self,default,polar):
|
def test_add_pole(self,default,polar):
|
||||||
pole = np.array([1.,0.,0.])
|
pole = np.array([1.,0.,0.])
|
||||||
default.add_pole('orientation',pole,polar)
|
default.add_pole('O',pole,polar)
|
||||||
loc = {'orientation': default.get_dataset_location('orientation'),
|
loc = {'orientation': default.get_dataset_location('O'),
|
||||||
'pole': default.get_dataset_location('p^{}_[1 0 0)'.format(u'rφ' if polar else 'xy'))}
|
'pole': default.get_dataset_location('p^{}_[1 0 0)'.format(u'rφ' if polar else 'xy'))}
|
||||||
rot = Rotation(default.read_dataset(loc['orientation']).view(np.double))
|
rot = Rotation(default.read_dataset(loc['orientation']).view(np.double))
|
||||||
rotated_pole = rot * np.broadcast_to(pole,rot.shape+(3,))
|
rotated_pole = rot * np.broadcast_to(pole,rot.shape+(3,))
|
||||||
|
@ -296,7 +312,11 @@ class TestResult:
|
||||||
default.add_Cauchy()
|
default.add_Cauchy()
|
||||||
loc = default.get_dataset_location('sigma')
|
loc = default.get_dataset_location('sigma')
|
||||||
with h5py.File(default.fname,'r') as f:
|
with h5py.File(default.fname,'r') as f:
|
||||||
|
# h5py3 compatibility
|
||||||
|
try:
|
||||||
created_first = f[loc[0]].attrs['Created'].decode()
|
created_first = f[loc[0]].attrs['Created'].decode()
|
||||||
|
except AttributeError:
|
||||||
|
created_first = f[loc[0]].attrs['Created']
|
||||||
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
||||||
|
|
||||||
if overwrite == 'on':
|
if overwrite == 'on':
|
||||||
|
@ -305,9 +325,16 @@ class TestResult:
|
||||||
default.disallow_modification()
|
default.disallow_modification()
|
||||||
|
|
||||||
time.sleep(2.)
|
time.sleep(2.)
|
||||||
|
try:
|
||||||
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
with h5py.File(default.fname,'r') as f:
|
with h5py.File(default.fname,'r') as f:
|
||||||
|
# h5py3 compatibility
|
||||||
|
try:
|
||||||
created_second = f[loc[0]].attrs['Created'].decode()
|
created_second = f[loc[0]].attrs['Created'].decode()
|
||||||
|
except AttributeError:
|
||||||
|
created_second = f[loc[0]].attrs['Created']
|
||||||
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
||||||
if overwrite == 'on':
|
if overwrite == 'on':
|
||||||
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
|
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
|
||||||
|
|
|
@ -85,6 +85,12 @@ class TestVTK:
|
||||||
assert(False)
|
assert(False)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('fname',['a','a.vtp','a.b','a.b.vtp'])
|
||||||
|
def test_filename_variations(self,tmp_path,fname):
|
||||||
|
points = np.random.rand(102,3)
|
||||||
|
v = VTK.from_poly_data(points)
|
||||||
|
v.save(tmp_path/fname)
|
||||||
|
|
||||||
@pytest.mark.parametrize('name,dataset_type',[('this_file_does_not_exist.vtk', None),
|
@pytest.mark.parametrize('name,dataset_type',[('this_file_does_not_exist.vtk', None),
|
||||||
('this_file_does_not_exist.vtk','vtk'),
|
('this_file_does_not_exist.vtk','vtk'),
|
||||||
('this_file_does_not_exist.vtx', None)])
|
('this_file_does_not_exist.vtx', None)])
|
||||||
|
@ -92,9 +98,10 @@ class TestVTK:
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
VTK.load(name,dataset_type)
|
VTK.load(name,dataset_type)
|
||||||
|
|
||||||
def test_invalid_extension_write(self,default):
|
def test_add_extension(self,tmp_path,default):
|
||||||
with pytest.raises(ValueError):
|
default.save(tmp_path/'default.txt',parallel=False)
|
||||||
default.save('default.txt')
|
assert os.path.isfile(tmp_path/'default.txt.vtr')
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_get(self,default):
|
def test_invalid_get(self,default):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
|
|
@ -296,6 +296,8 @@ subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path)
|
||||||
logical :: attrExists
|
logical :: attrExists
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
character(len=:), allocatable :: p
|
character(len=:), allocatable :: p
|
||||||
|
character(len=:,kind=C_CHAR), allocatable,target :: attrValue_
|
||||||
|
type(c_ptr), target, dimension(1) :: ptr
|
||||||
|
|
||||||
if (present(path)) then
|
if (present(path)) then
|
||||||
p = trim(path)
|
p = trim(path)
|
||||||
|
@ -303,12 +305,13 @@ subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path)
|
||||||
p = '.'
|
p = '.'
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
attrValue_ = trim(attrValue)//C_NULL_CHAR
|
||||||
|
ptr(1) = c_loc(attrValue_)
|
||||||
|
|
||||||
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
|
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5screate_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5screate_f')
|
||||||
call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr)
|
call h5tcopy_f(H5T_STRING, type_id, hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tcopy_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tcopy_f')
|
||||||
call h5tset_size_f(type_id, int(len_trim(attrValue),HSIZE_T), hdferr)
|
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tset_size_f')
|
|
||||||
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aexists_by_name_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aexists_by_name_f')
|
||||||
if (attrExists) then
|
if (attrExists) then
|
||||||
|
@ -317,7 +320,7 @@ subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path)
|
||||||
endif
|
endif
|
||||||
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr)
|
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5acreate_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5acreate_f')
|
||||||
call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr)
|
call h5awrite_f(attr_id, type_id, c_loc(ptr(1)), hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5awrite_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5awrite_f')
|
||||||
call h5aclose_f(attr_id,hdferr)
|
call h5aclose_f(attr_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aclose_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aclose_f')
|
||||||
|
@ -436,7 +439,7 @@ subroutine HDF5_addAttribute_int_array(loc_id,attrLabel,attrValue,path)
|
||||||
array_size = size(attrValue,kind=HSIZE_T)
|
array_size = size(attrValue,kind=HSIZE_T)
|
||||||
|
|
||||||
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
|
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_simple_f')
|
||||||
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
|
||||||
if (attrExists) then
|
if (attrExists) then
|
||||||
|
@ -480,7 +483,7 @@ subroutine HDF5_addAttribute_real_array(loc_id,attrLabel,attrValue,path)
|
||||||
array_size = size(attrValue,kind=HSIZE_T)
|
array_size = size(attrValue,kind=HSIZE_T)
|
||||||
|
|
||||||
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
|
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_simple_f')
|
||||||
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
|
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
|
||||||
if (attrExists) then
|
if (attrExists) then
|
||||||
|
|
|
@ -736,7 +736,7 @@ subroutine crystallite_results
|
||||||
integer :: p,o
|
integer :: p,o
|
||||||
real(pReal), allocatable, dimension(:,:,:) :: selected_tensors
|
real(pReal), allocatable, dimension(:,:,:) :: selected_tensors
|
||||||
type(rotation), allocatable, dimension(:) :: selected_rotations
|
type(rotation), allocatable, dimension(:) :: selected_rotations
|
||||||
character(len=pStringLen) :: group,structureLabel
|
character(len=:), allocatable :: group,structureLabel
|
||||||
|
|
||||||
do p=1,size(material_name_phase)
|
do p=1,size(material_name_phase)
|
||||||
group = trim('current/constituent')//'/'//trim(material_name_phase(p))//'/generic'
|
group = trim('current/constituent')//'/'//trim(material_name_phase(p))//'/generic'
|
||||||
|
|
|
@ -529,7 +529,7 @@ subroutine homogenization_results
|
||||||
material_homogenization_type => homogenization_type
|
material_homogenization_type => homogenization_type
|
||||||
|
|
||||||
integer :: p
|
integer :: p
|
||||||
character(len=pStringLen) :: group_base,group
|
character(len=:), allocatable :: group_base,group
|
||||||
|
|
||||||
!real(pReal), dimension(:,:,:), allocatable :: temp
|
!real(pReal), dimension(:,:,:), allocatable :: temp
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue