Merge branch 'incs-no-leading-zero' into 'development'
Incs no leading zero See merge request damask/DAMASK!108
This commit is contained in:
commit
5a6111ec69
|
@ -47,6 +47,8 @@ for filename in options.filenames:
|
|||
|
||||
coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
|
||||
|
||||
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
|
||||
N_digits = 5 # hack to keep test intact
|
||||
for i,inc in enumerate(results.iter_visible('increments')):
|
||||
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
||||
|
||||
|
@ -92,5 +94,6 @@ for filename in options.filenames:
|
|||
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname,0o755)
|
||||
file_out = '{}_{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],inc)
|
||||
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
|
||||
inc[3:].zfill(N_digits))
|
||||
np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='')
|
||||
|
|
|
@ -66,7 +66,7 @@ for filename in options.filenames:
|
|||
for i in f['/geometry/T_c']:
|
||||
grid.InsertNextCell(vtk.VTK_HEXAHEDRON,8,i-1)
|
||||
|
||||
|
||||
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
|
||||
for i,inc in enumerate(results.iter_visible('increments')):
|
||||
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
||||
vtk_data = []
|
||||
|
@ -132,7 +132,9 @@ for filename in options.filenames:
|
|||
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname,0o755)
|
||||
file_out = '{}_{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],inc,writer.GetDefaultFileExtension())
|
||||
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
|
||||
inc[3:].zfill(N_digits),
|
||||
writer.GetDefaultFileExtension())
|
||||
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetDataModeToBinary()
|
||||
|
|
|
@ -52,6 +52,7 @@ for filename in options.filenames:
|
|||
Polydata.SetVerts(Vertices)
|
||||
Polydata.Modified()
|
||||
|
||||
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
|
||||
for i,inc in enumerate(results.iter_visible('increments')):
|
||||
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
||||
vtk_data = []
|
||||
|
@ -111,7 +112,9 @@ for filename in options.filenames:
|
|||
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
|
||||
if not os.path.isdir(dirname):
|
||||
os.mkdir(dirname,0o755)
|
||||
file_out = '{}_{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],inc,writer.GetDefaultFileExtension())
|
||||
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
|
||||
inc[3:].zfill(N_digits),
|
||||
writer.GetDefaultFileExtension())
|
||||
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetDataModeToBinary()
|
||||
|
|
|
@ -30,7 +30,14 @@ class DADF5():
|
|||
"""
|
||||
with h5py.File(fname,'r') as f:
|
||||
|
||||
if f.attrs['DADF5-major'] != 0 or not 2 <= f.attrs['DADF5-minor'] <= 3:
|
||||
try:
|
||||
self.version_major = f.attrs['DADF5_version_major']
|
||||
self.version_minor = f.attrs['DADF5_version_minor']
|
||||
except KeyError:
|
||||
self.version_major = f.attrs['DADF5-major']
|
||||
self.version_minor = f.attrs['DADF5-minor']
|
||||
|
||||
if self.version_major != 0 or not 2 <= self.version_minor <= 4:
|
||||
raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
|
||||
|
||||
self.structured = 'grid' in f['geometry'].attrs.keys()
|
||||
|
@ -40,8 +47,9 @@ class DADF5():
|
|||
self.size = f['geometry'].attrs['size']
|
||||
|
||||
r=re.compile('inc[0-9]+')
|
||||
self.increments = [i for i in f.keys() if r.match(i)]
|
||||
self.times = [round(f[i].attrs['time/s'],12) for i in self.increments]
|
||||
increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)}
|
||||
self.increments = [increments_unsorted[i] for i in sorted(increments_unsorted)]
|
||||
self.times = [round(f[i].attrs['time/s'],12) for i in self.increments]
|
||||
|
||||
self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent'])
|
||||
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
||||
|
@ -165,7 +173,10 @@ class DADF5():
|
|||
end increment (included)
|
||||
|
||||
"""
|
||||
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','set')
|
||||
if self.version_minor >= 4:
|
||||
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','set')
|
||||
else:
|
||||
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','set')
|
||||
|
||||
|
||||
def add_by_increment(self,start,end):
|
||||
|
@ -180,7 +191,10 @@ class DADF5():
|
|||
end increment (included)
|
||||
|
||||
"""
|
||||
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','add')
|
||||
if self.version_minor >= 4:
|
||||
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','add')
|
||||
else:
|
||||
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','add')
|
||||
|
||||
|
||||
def del_by_increment(self,start,end):
|
||||
|
@ -195,7 +209,10 @@ class DADF5():
|
|||
end increment (included)
|
||||
|
||||
"""
|
||||
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','del')
|
||||
if self.version_minor >= 4:
|
||||
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','del')
|
||||
else:
|
||||
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','del')
|
||||
|
||||
|
||||
def iter_visible(self,what):
|
||||
|
@ -420,6 +437,76 @@ class DADF5():
|
|||
else:
|
||||
with h5py.File(self.fname,'r') as f:
|
||||
return f['geometry/x_c'][()]
|
||||
|
||||
|
||||
def add_absolute(self,x):
|
||||
"""
|
||||
Add absolute value.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a scalar, vector, or tensor.
|
||||
|
||||
"""
|
||||
def __add_absolute(x):
|
||||
|
||||
return {
|
||||
'data': np.abs(x['data']),
|
||||
'label': '|{}|'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_abs v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_absolute,requested)
|
||||
|
||||
|
||||
def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True):
|
||||
"""
|
||||
Add result of a general formula.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
formula : str
|
||||
Formula, refer to datasets by ‘#Label#‘.
|
||||
label : str
|
||||
Label of the dataset containing the result of the calculation.
|
||||
unit : str, optional
|
||||
Physical unit of the result.
|
||||
description : str, optional
|
||||
Human readable description of the result.
|
||||
vectorized : bool, optional
|
||||
Indicate whether the formula is written in vectorized form. Default is ‘True’.
|
||||
|
||||
"""
|
||||
if vectorized is not True:
|
||||
raise NotImplementedError
|
||||
|
||||
def __add_calculation(**kwargs):
|
||||
|
||||
formula = kwargs['formula']
|
||||
for d in re.findall(r'#(.*?)#',formula):
|
||||
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d))
|
||||
|
||||
return {
|
||||
'data': eval(formula),
|
||||
'label': kwargs['label'],
|
||||
'meta': {
|
||||
'Unit': kwargs['unit'],
|
||||
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
|
||||
'Creator': 'dadf5.py:add_calculation v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':d,'arg':d} for d in set(re.findall(r'#(.*?)#',formula))] # datasets used in the formula
|
||||
pass_through = {'formula':formula,'label':label,'unit':unit,'description':description}
|
||||
|
||||
self.__add_generic_pointwise(__add_calculation,requested,pass_through)
|
||||
|
||||
|
||||
def add_Cauchy(self,P='P',F='F'):
|
||||
|
@ -453,6 +540,90 @@ class DADF5():
|
|||
self.__add_generic_pointwise(__add_Cauchy,requested)
|
||||
|
||||
|
||||
def add_determinant(self,x):
|
||||
"""
|
||||
Add the determinant of a tensor.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a tensor.
|
||||
|
||||
"""
|
||||
def __add_determinant(x):
|
||||
|
||||
return {
|
||||
'data': np.linalg.det(x['data']),
|
||||
'label': 'det({})'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_determinant v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_determinant,requested)
|
||||
|
||||
|
||||
def add_deviator(self,x):
|
||||
"""
|
||||
Add the deviatoric part of a tensor.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a tensor.
|
||||
|
||||
"""
|
||||
def __add_deviator(x):
|
||||
|
||||
if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
|
||||
raise ValueError
|
||||
|
||||
return {
|
||||
'data': mechanics.deviatoric_part(x['data']),
|
||||
'label': 's_{}'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_deviator v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_deviator,requested)
|
||||
|
||||
|
||||
def add_maximum_shear(self,x):
|
||||
"""
|
||||
Add maximum shear components of symmetric tensor.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a symmetric tensor.
|
||||
|
||||
"""
|
||||
def __add_maximum_shear(x):
|
||||
|
||||
return {
|
||||
'data': mechanics.maximum_shear(x['data']),
|
||||
'label': 'max_shear({})'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Maximum shear component of of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_maximum_shear v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_maximum_shear,requested)
|
||||
|
||||
|
||||
def add_Mises(self,x):
|
||||
"""
|
||||
Add the equivalent Mises stress or strain of a symmetric tensor.
|
||||
|
@ -523,58 +694,33 @@ class DADF5():
|
|||
self.__add_generic_pointwise(__add_norm,requested,{'ord':ord})
|
||||
|
||||
|
||||
def add_absolute(self,x):
|
||||
def add_principal_components(self,x):
|
||||
"""
|
||||
Add absolute value.
|
||||
|
||||
Add principal components of symmetric tensor.
|
||||
|
||||
The principal components are sorted in descending order, each repeated according to its multiplicity.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a scalar, vector, or tensor.
|
||||
Label of the dataset containing a symmetric tensor.
|
||||
|
||||
"""
|
||||
def __add_absolute(x):
|
||||
def __add_principal_components(x):
|
||||
|
||||
return {
|
||||
'data': np.abs(x['data']),
|
||||
'label': '|{}|'.format(x['label']),
|
||||
'data': mechanics.principal_components(x['data']),
|
||||
'label': 'lambda_{}'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_abs v{}'.format(version)
|
||||
'Description': 'Pricipal components of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_principal_components v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_absolute,requested)
|
||||
|
||||
|
||||
def add_determinant(self,x):
|
||||
"""
|
||||
Add the determinant of a tensor.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a tensor.
|
||||
|
||||
"""
|
||||
def __add_determinant(x):
|
||||
|
||||
return {
|
||||
'data': np.linalg.det(x['data']),
|
||||
'label': 'det({})'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_determinant v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_determinant,requested)
|
||||
self.__add_generic_pointwise(__add_principal_components,requested)
|
||||
|
||||
|
||||
def add_spherical(self,x):
|
||||
|
@ -607,79 +753,6 @@ class DADF5():
|
|||
self.__add_generic_pointwise(__add_spherical,requested)
|
||||
|
||||
|
||||
def add_deviator(self,x):
|
||||
"""
|
||||
Add the deviatoric part of a tensor.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a tensor.
|
||||
|
||||
"""
|
||||
def __add_deviator(x):
|
||||
|
||||
if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
|
||||
raise ValueError
|
||||
|
||||
return {
|
||||
'data': mechanics.deviatoric_part(x['data']),
|
||||
'label': 's_{}'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_deviator v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_deviator,requested)
|
||||
|
||||
|
||||
def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True):
|
||||
"""
|
||||
Add result of a general formula.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
formula : str
|
||||
Formula, refer to datasets by ‘#Label#‘.
|
||||
label : str
|
||||
Label of the dataset containing the result of the calculation.
|
||||
unit : str, optional
|
||||
Physical unit of the result.
|
||||
description : str, optional
|
||||
Human readable description of the result.
|
||||
vectorized : bool, optional
|
||||
Indicate whether the formula is written in vectorized form. Default is ‘True’.
|
||||
|
||||
"""
|
||||
if vectorized is not True:
|
||||
raise NotImplementedError
|
||||
|
||||
def __add_calculation(**kwargs):
|
||||
|
||||
formula = kwargs['formula']
|
||||
for d in re.findall(r'#(.*?)#',formula):
|
||||
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d))
|
||||
|
||||
return {
|
||||
'data': eval(formula),
|
||||
'label': kwargs['label'],
|
||||
'meta': {
|
||||
'Unit': kwargs['unit'],
|
||||
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
|
||||
'Creator': 'dadf5.py:add_calculation v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':d,'arg':d} for d in set(re.findall(r'#(.*?)#',formula))] # datasets used in the formula
|
||||
pass_through = {'formula':formula,'label':label,'unit':unit,'description':description}
|
||||
|
||||
self.__add_generic_pointwise(__add_calculation,requested,pass_through)
|
||||
|
||||
|
||||
def add_strain_tensor(self,F='F',t='U',m=0):
|
||||
"""
|
||||
Add strain tensor calculated from a deformation gradient.
|
||||
|
@ -712,62 +785,6 @@ class DADF5():
|
|||
requested = [{'label':F,'arg':'F'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_strain_tensor,requested,{'t':t,'m':m})
|
||||
|
||||
|
||||
def add_principal_components(self,x):
|
||||
"""
|
||||
Add principal components of symmetric tensor.
|
||||
|
||||
The principal components are sorted in descending order, each repeated according to its multiplicity.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a symmetric tensor.
|
||||
|
||||
"""
|
||||
def __add_principal_components(x):
|
||||
|
||||
return {
|
||||
'data': mechanics.principal_components(x['data']),
|
||||
'label': 'lambda_{}'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Pricipal components of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_principal_components v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_principal_components,requested)
|
||||
|
||||
|
||||
def add_maximum_shear(self,x):
|
||||
"""
|
||||
Add maximum shear components of symmetric tensor.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : str
|
||||
Label of the dataset containing a symmetric tensor.
|
||||
|
||||
"""
|
||||
def __add_maximum_shear(x):
|
||||
|
||||
return {
|
||||
'data': mechanics.maximum_shear(x['data']),
|
||||
'label': 'max_shear({})'.format(x['label']),
|
||||
'meta': {
|
||||
'Unit': x['meta']['Unit'],
|
||||
'Description': 'Maximum shear component of of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||
'Creator': 'dadf5.py:add_maximum_shear v{}'.format(version)
|
||||
}
|
||||
}
|
||||
|
||||
requested = [{'label':x,'arg':'x'}]
|
||||
|
||||
self.__add_generic_pointwise(__add_maximum_shear,requested)
|
||||
|
||||
|
||||
def __add_generic_pointwise(self,func,datasets_requested,extra_args={}):
|
||||
|
|
Binary file not shown.
|
@ -23,13 +23,20 @@ def reference_dir(reference_dir_base):
|
|||
|
||||
|
||||
class TestDADF5:
|
||||
|
||||
def test_add_deviator(self,default):
|
||||
default.add_deviator('P')
|
||||
loc = {'P' :default.get_dataset_location('P'),
|
||||
's_P':default.get_dataset_location('s_P')}
|
||||
in_memory = mechanics.deviatoric_part(default.read_dataset(loc['P'],0))
|
||||
in_file = default.read_dataset(loc['s_P'],0)
|
||||
|
||||
def test_time_increments(self,default):
|
||||
shape = default.read_dataset(default.get_dataset_location('F'),0).shape
|
||||
default.set_by_time(0.0,20.0)
|
||||
for i in default.iter_visible('increments'):
|
||||
assert shape == default.read_dataset(default.get_dataset_location('F'),0).shape
|
||||
|
||||
|
||||
def test_add_absolute(self,default):
|
||||
default.add_absolute('Fe')
|
||||
loc = {'Fe': default.get_dataset_location('Fe'),
|
||||
'|Fe|': default.get_dataset_location('|Fe|')}
|
||||
in_memory = np.abs(default.read_dataset(loc['Fe'],0))
|
||||
in_file = default.read_dataset(loc['|Fe|'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
||||
def test_add_Cauchy(self,default):
|
||||
|
@ -42,22 +49,30 @@ class TestDADF5:
|
|||
in_file = default.read_dataset(loc['sigma'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
||||
def test_add_absolute(self,default):
|
||||
default.add_absolute('Fe')
|
||||
loc = {'Fe': default.get_dataset_location('Fe'),
|
||||
'|Fe|': default.get_dataset_location('|Fe|')}
|
||||
in_memory = np.abs(default.read_dataset(loc['Fe'],0))
|
||||
in_file = default.read_dataset(loc['|Fe|'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
||||
def test_add_determinant(self,default):
|
||||
default.add_determinant('P')
|
||||
loc = {'P': default.get_dataset_location('P'),
|
||||
'det(P)': default.get_dataset_location('det(P)')}
|
||||
in_memory = np.linalg.det(default.read_dataset(loc['P'],0)).reshape(-1,1)
|
||||
loc = {'P': default.get_dataset_location('P'),
|
||||
'det(P)':default.get_dataset_location('det(P)')}
|
||||
in_memory = np.linalg.det(default.read_dataset(loc['P'],0)).reshape((-1,1))
|
||||
in_file = default.read_dataset(loc['det(P)'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
||||
def test_add_deviator(self,default):
|
||||
default.add_deviator('P')
|
||||
loc = {'P' :default.get_dataset_location('P'),
|
||||
's_P':default.get_dataset_location('s_P')}
|
||||
in_memory = mechanics.deviatoric_part(default.read_dataset(loc['P'],0))
|
||||
in_file = default.read_dataset(loc['s_P'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
||||
def test_add_norm(self,default):
|
||||
default.add_norm('F',1)
|
||||
loc = {'F': default.get_dataset_location('F'),
|
||||
'|F|_1':default.get_dataset_location('|F|_1')}
|
||||
in_memory = np.linalg.norm(default.read_dataset(loc['F'],0),ord=1,axis=(1,2),keepdims=True)
|
||||
in_file = default.read_dataset(loc['|F|_1'],0)
|
||||
assert np.allclose(in_memory,in_file)
|
||||
|
||||
def test_add_spherical(self,default):
|
||||
default.add_spherical('P')
|
||||
loc = {'P': default.get_dataset_location('P'),
|
||||
|
|
|
@ -69,10 +69,9 @@ subroutine results_init
|
|||
write(6,'(a)') ' https://doi.org/10.1007/s40192-017-0084-5'
|
||||
|
||||
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
|
||||
call HDF5_addAttribute(resultsFile,'DADF5-version',0.3_pReal)
|
||||
call HDF5_addAttribute(resultsFile,'DADF5-major',0)
|
||||
call HDF5_addAttribute(resultsFile,'DADF5-minor',3)
|
||||
call HDF5_addAttribute(resultsFile,'DAMASK',DAMASKVERSION)
|
||||
call HDF5_addAttribute(resultsFile,'DADF5_version_major',0)
|
||||
call HDF5_addAttribute(resultsFile,'DADF5_version_minor',4)
|
||||
call HDF5_addAttribute(resultsFile,'DAMASK_version',DAMASKVERSION)
|
||||
call get_command(commandLine)
|
||||
call HDF5_addAttribute(resultsFile,'call',trim(commandLine))
|
||||
call HDF5_closeGroup(results_addGroup('mapping'))
|
||||
|
@ -111,7 +110,7 @@ subroutine results_addIncrement(inc,time)
|
|||
real(pReal), intent(in) :: time
|
||||
character(len=pStringLen) :: incChar
|
||||
|
||||
write(incChar,'(i5.5)') inc ! allow up to 99999 increments
|
||||
write(incChar,'(i10)') inc
|
||||
call HDF5_closeGroup(results_addGroup(trim('inc'//trim(adjustl(incChar)))))
|
||||
call results_setLink(trim('inc'//trim(adjustl(incChar))),'current')
|
||||
call HDF5_addAttribute(resultsFile,'time/s',time,trim('inc'//trim(adjustl(incChar))))
|
||||
|
|
Loading…
Reference in New Issue