Merge branch 'misc-improvements' into less-shell-scripts

This commit is contained in:
Martin Diehl 2020-05-26 07:24:03 +02:00
commit 908788271a
17 changed files with 179 additions and 109 deletions

View File

@ -110,7 +110,7 @@ for executable in icc icpc ifort ;do
done
firstLevel "MPI Wrappers"
for executable in mpicc mpiCC mpiicc mpic++ mpicpc mpicxx mpifort mpif90 mpif77; do
for executable in mpicc mpiCC mpiicc mpic++ mpiicpc mpicxx mpifort mpiifort mpif90 mpif77; do
getDetails $executable '-show'
done

@ -1 +1 @@
Subproject commit 72d526e5750366a9efe4d1fd9d92e0d1ecd2cd38
Subproject commit 8dde2a68538b7cffbe9d370e2b60be90a31627ab

View File

@ -1 +1 @@
v2.0.3-2504-gcee9daff
v2.0.3-2514-g873b9fa8

View File

@ -1,6 +1,9 @@
###################################################################################################
# GNU Compiler
###################################################################################################
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 8.0)
message (FATAL_ERROR "GCC Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-fopenmp")

View File

@ -1,6 +1,10 @@
###################################################################################################
# Intel Compiler
###################################################################################################
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 18.0)
message (FATAL_ERROR "Intel Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-qopenmp -parallel")
endif ()

View File

@ -1,4 +1,4 @@
"""Main aggregator."""
"""Tools for pre and post processing of DAMASK simulations."""
import os as _os
import re as _re

View File

@ -10,7 +10,6 @@ from . import VTK
from . import util
from . import Environment
from . import grid_filters
from . import Rotation
class Geom:
@ -243,7 +242,7 @@ class Geom:
def get_grid(self):
"""Return the grid discretization."""
return np.array(self.microstructure.shape)
return np.asarray(self.microstructure.shape)
def get_homogenization(self):
@ -605,7 +604,7 @@ class Geom:
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
#self.add_comments('geom.py:renumber v{}'.format(version)
#self.add_comments('geom.py:rotate v{}'.format(version)
return self.update(microstructure_in,origin=origin,rescale=True)
@ -613,11 +612,10 @@ class Geom:
"""Crop or enlarge/pad microstructure."""
if fill is None: fill = np.nanmax(self.microstructure) + 1
if offset is None: offset = 0
dtype = float if np.isnan(fill) or int(fill) != fill or self.microstructure.dtype==np.float else int
dtype = float if int(fill) != fill or self.microstructure.dtype==np.float else int
canvas = np.full(self.grid if grid is None else grid,
fill if fill is not None else np.nanmax(self.microstructure)+1,
self.microstructure.dtype)
fill if fill is not None else np.nanmax(self.microstructure)+1,dtype)
l = np.clip( offset, 0,np.minimum(self.grid +offset,grid)) # noqa
r = np.clip( offset+self.grid,0,np.minimum(self.grid*2+offset,grid))
@ -626,7 +624,7 @@ class Geom:
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
#self.add_comments('geom.py:renumber v{}'.format(version)
#self.add_comments('geom.py:canvas v{}'.format(version)
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
@ -636,4 +634,5 @@ class Geom:
for from_ms,to_ms in zip(from_microstructure,to_microstructure):
substituted[self.microstructure==from_ms] = to_ms
#self.add_comments('geom.py:substitute v{}'.format(version)
return self.update(substituted)

View File

@ -229,19 +229,20 @@ class Symmetry:
Return inverse pole figure color if requested.
Bases are computed from
basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,1.]/np.sqrt(2.), # direction of green
[1.,1.,1.]/np.sqrt(3.)]).T), # direction of blue
'hexagonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,0.], # direction of green
[np.sqrt(3.),1.,0.]/np.sqrt(4.)]).T), # direction of blue
'tetragonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,0.], # direction of green
[1.,1.,0.]/np.sqrt(2.)]).T), # direction of blue
'orthorhombic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,0.], # direction of green
[0.,1.,0.]]).T), # direction of blue
}
>>> basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,1.]/np.sqrt(2.), # direction of green
... [1.,1.,1.]/np.sqrt(3.)]).T), # direction of blue
... 'hexagonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,0.], # direction of green
... [np.sqrt(3.),1.,0.]/np.sqrt(4.)]).T), # direction of blue
... 'tetragonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,0.], # direction of green
... [1.,1.,0.]/np.sqrt(2.)]).T), # direction of blue
... 'orthorhombic': np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,0.], # direction of green
... [0.,1.,0.]]).T), # direction of blue
... }
"""
if self.lattice == 'cubic':
basis = {'improper':np.array([ [-1. , 0. , 1. ],

View File

@ -8,6 +8,7 @@ class Orientation:
Crystallographic orientation.
A crystallographic orientation contains a rotation and a lattice.
"""
__slots__ = ['rotation','lattice']
@ -49,8 +50,10 @@ class Orientation:
Disorientation between myself and given other orientation.
Rotation axis falls into SST if SST == True.
(Currently requires same symmetry for both orientations.
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
Currently requires same symmetry for both orientations.
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.
"""
if self.lattice.symmetry != other.lattice.symmetry:
raise NotImplementedError('disorientation between different symmetry classes not supported yet.')

View File

@ -1,7 +1,9 @@
import multiprocessing
import re
import inspect
import glob
import os
import datetime
import xml.etree.ElementTree as ET
import xml.dom.minidom
from functools import partial
@ -88,6 +90,8 @@ class Result:
self.fname = os.path.abspath(fname)
self._allow_overwrite = False
def __repr__(self):
"""Show selected data."""
@ -142,6 +146,7 @@ class Result:
choice = []
for c in iterator:
idx = np.searchsorted(self.times,c)
if idx >= len(self.times): continue
if np.isclose(c,self.times[idx]):
choice.append(self.increments[idx])
elif np.isclose(c,self.times[idx+1]):
@ -162,6 +167,16 @@ class Result:
self.selection[what] = diff_sorted
def enable_overwrite(self):
print(util.bcolors().WARNING,util.bcolors().BOLD,
'Warning: Enabled overwrite of existing datasets!',
util.bcolors().ENDC)
self._allow_overwrite = True
def disable_overwrite(self):
self._allow_overwrite = False
def incs_in_range(self,start,end):
selected = []
for i,inc in enumerate([int(i[3:]) for i in self.increments]):
@ -322,9 +337,10 @@ class Result:
Return groups that contain all requested datasets.
Only groups within
- inc?????/constituent/*_*/*
- inc?????/materialpoint/*_*/*
- inc?????/geometry/*
- inc*/constituent/*/*
- inc*/materialpoint/*/*
- inc*/geometry/*
are considered as they contain user-relevant data.
Single strings will be treated as list with one entry.
@ -488,7 +504,7 @@ class Result:
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'result.py:add_abs v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_absolute(self,x):
@ -516,7 +532,7 @@ class Result:
'meta': {
'Unit': kwargs['unit'],
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
'Creator': 'result.py:add_calculation v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_calculation(self,label,formula,unit='n/a',description=None,vectorized=True):
@ -555,7 +571,7 @@ class Result:
'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_Cauchy v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_Cauchy(self,P='P',F='F'):
@ -581,7 +597,7 @@ class Result:
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_determinant v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_determinant(self,T):
@ -605,7 +621,7 @@ class Result:
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_deviator v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_deviator(self,T):
@ -629,7 +645,7 @@ class Result:
'meta' : {
'Unit': T_sym['meta']['Unit'],
'Description': 'Eigenvalues of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_eigenvalues v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_eigenvalues(self,T_sym):
@ -653,7 +669,7 @@ class Result:
'meta' : {
'Unit': '1',
'Description': 'Eigenvectors of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_eigenvectors v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_eigenvectors(self,T_sym):
@ -689,7 +705,7 @@ class Result:
'Unit': 'RGB (8bit)',
'Lattice': lattice,
'Description': 'Inverse Pole Figure (IPF) colors along sample direction [{} {} {}]'.format(*m),
'Creator': 'result.py:add_IPFcolor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_IPFcolor(self,q,l):
@ -715,7 +731,7 @@ class Result:
'meta': {
'Unit': T_sym['meta']['Unit'],
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_maximum_shear v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_maximum_shear(self,T_sym):
@ -742,7 +758,7 @@ class Result:
'meta': {
'Unit': T_sym['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_Mises v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_Mises(self,T_sym):
@ -778,7 +794,7 @@ class Result:
'meta': {
'Unit': x['meta']['Unit'],
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']),
'Creator': 'result.py:add_norm v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_norm(self,x,ord=None):
@ -806,7 +822,7 @@ class Result:
'Description': '2. Kirchhoff stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_PK2 v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_PK2(self,P='P',F='F'):
@ -839,10 +855,10 @@ class Result:
'data': coords,
'label': 'p^{}_[{} {} {})'.format(u'' if polar else 'xy',*m),
'meta' : {
'Unit': '1',
'Unit': '1',
'Description': '{} coordinates of stereographic projection of pole (direction/plane) in crystal frame'\
.format('Polar' if polar else 'Cartesian'),
'Creator' : 'result.py:add_pole v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_pole(self,q,p,polar=False):
@ -870,7 +886,7 @@ class Result:
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_rotational_part v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_rotational_part(self,F):
@ -894,7 +910,7 @@ class Result:
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_spherical v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_spherical(self,T):
@ -918,7 +934,7 @@ class Result:
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_strain_tensor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_strain_tensor(self,F='F',t='V',m=0.0):
@ -950,7 +966,7 @@ class Result:
'Unit': F['meta']['Unit'],
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
F['label'],F['meta']['Description']),
'Creator': 'result.py:add_stretch_tensor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_stretch_tensor(self,F='F',t='V'):
@ -1013,11 +1029,23 @@ class Result:
continue
lock.acquire()
with h5py.File(self.fname, 'a') as f:
try: # ToDo: Replace if exists?
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
try:
if self._allow_overwrite and result[0]+'/'+result[1]['label'] in f:
dataset = f[result[0]+'/'+result[1]['label']]
dataset[...] = result[1]['data']
dataset.attrs['Overwritten'] = 'Yes'.encode()
else:
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
now = datetime.datetime.now().astimezone()
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
for l,v in result[1]['meta'].items():
dataset.attrs[l]=v.encode()
except OSError as err:
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version)
dataset.attrs['Creator'] = creator.encode()
except (OSError,RuntimeError) as err:
print('Could not add dataset: {}.'.format(err))
lock.release()

View File

@ -13,30 +13,31 @@ class Rotation:
u"""
Orientation stored with functionality for conversion to different representations.
The following conventions apply:
- coordinate frames are right-handed.
- a rotation angle ω is taken to be positive for a counterclockwise rotation
when viewing from the end point of the rotation axis towards the origin.
- rotations will be interpreted in the passive sense.
- Euler angle triplets are implemented using the Bunge convention,
with the angular ranges as [0, 2π],[0, π],[0, 2π].
- the rotation angle ω is limited to the interval [0, π].
- the real part of a quaternion is positive, Re(q) > 0
- P = -1 (as default).
Examples
--------
Rotate vector "a" (defined in coordinate system "A") to
coordinates "b" expressed in system "B":
- b = Q @ a
- b = np.dot(Q.asMatrix(),a)
References
----------
D. Rowenhorst et al., Modelling and Simulation in Materials Science and Engineering 23:083501, 2015
https://doi.org/10.1088/0965-0393/23/8/083501
Conventions
-----------
Convention 1: Coordinate frames are right-handed.
Convention 2: A rotation angle ω is taken to be positive for a counterclockwise rotation
when viewing from the end point of the rotation axis towards the origin.
Convention 3: Rotations will be interpreted in the passive sense.
Convention 4: Euler angle triplets are implemented using the Bunge convention,
with the angular ranges as [0, 2π],[0, π],[0, 2π].
Convention 5: The rotation angle ω is limited to the interval [0, π].
Convention 6: the real part of a quaternion is positive, Re(q) > 0
Convention 7: P = -1 (as default).
Usage
-----
Vector "a" (defined in coordinate system "A") is passively rotated
resulting in new coordinates "b" when expressed in system "B".
b = Q @ a
b = np.dot(Q.as_matrix(),a)
"""
__slots__ = ['quaternion']
@ -161,10 +162,10 @@ class Rotation:
if self.shape == ():
q = np.broadcast_to(self.quaternion,shape+(4,))
else:
q = np.block([np.broadcast_to(self.quaternion[...,0:1],shape),
np.broadcast_to(self.quaternion[...,1:2],shape),
np.broadcast_to(self.quaternion[...,2:3],shape),
np.broadcast_to(self.quaternion[...,3:4],shape)]).reshape(shape+(4,))
q = np.block([np.broadcast_to(self.quaternion[...,0:1],shape).reshape(shape+(1,)),
np.broadcast_to(self.quaternion[...,1:2],shape).reshape(shape+(1,)),
np.broadcast_to(self.quaternion[...,2:3],shape).reshape(shape+(1,)),
np.broadcast_to(self.quaternion[...,3:4],shape).reshape(shape+(1,))])
return self.__class__(q)

View File

@ -120,9 +120,9 @@ class VTK:
Parameters
----------
fname : str
Filename for reading. Valid extensions are *.vtr, *.vtu, *.vtp, and *.vtk.
Filename for reading. Valid extensions are .vtr, .vtu, .vtp, and .vtk.
dataset_type : str, optional
Name of the vtk.vtkDataSet subclass when opening an *.vtk file. Valid types are vtkRectilinearGrid,
Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,
vtkUnstructuredGrid, and vtkPolyData.
"""

View File

@ -4,14 +4,13 @@ import pytest
import numpy as np
from damask import Symmetry
from damask import Lattice
class TestSymmetry:
@pytest.mark.parametrize('invalid_symmetry',['fcc','bcc','hello'])
def test_invalid_symmetry(self,invalid_symmetry):
with pytest.raises(KeyError):
s = Symmetry(invalid_symmetry)
s = Symmetry(invalid_symmetry) # noqa
def test_equal(self):
symmetry = random.choice(Symmetry.lattices)
@ -37,6 +36,6 @@ class TestSymmetry:
@pytest.mark.parametrize('function',['inFZ','inDisorientationSST'])
def test_invalid_argument(self,function):
s = Symmetry()
s = Symmetry() # noqa
with pytest.raises(ValueError):
eval('s.{}(np.ones(4))'.format(function))

View File

@ -1,8 +1,11 @@
import time
import shutil
import os
from datetime import datetime
import pytest
import numpy as np
import h5py
import damask
from damask import Result
@ -80,10 +83,6 @@ class TestResult:
with pytest.raises(AttributeError):
default.pick('invalid',True)
def test_add_invalid(self,default):
with pytest.raises(Exception):
default.add_calculation('#invalid#*2')
def test_add_absolute(self,default):
default.add_absolute('Fe')
loc = {'Fe': default.get_dataset_location('Fe'),
@ -263,6 +262,37 @@ class TestResult:
in_file = default.read_dataset(loc['V(F)'],0)
assert np.allclose(in_memory,in_file)
def test_add_invalid(self,default):
with pytest.raises(TypeError):
default.add_calculation('#invalid#*2')
@pytest.mark.parametrize('overwrite',['off','on'])
def test_add_overwrite(self,default,overwrite):
default.pick('times',default.times_in_range(0,np.inf)[-1])
default.add_Cauchy()
loc = default.get_dataset_location('sigma')
print(loc)
with h5py.File(default.fname,'r') as f:
created_first = f[loc[0]].attrs['Created'].decode()
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
default.enable_overwrite()
else:
default.disable_overwrite()
time.sleep(2.)
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
with h5py.File(default.fname,'r') as f:
created_second = f[loc[0]].attrs['Created'].decode()
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
else:
assert created_first == created_second and not np.allclose(default.read_dataset(loc),311.)
@pytest.mark.parametrize('output',['F',[],['F','P']])
def test_vtk(self,default,output):
def test_vtk(self,tmp_path,default,output):
os.chdir(tmp_path)
default.to_vtk(output)

View File

@ -873,7 +873,7 @@ class TestRotation:
rot.shape + (np.random.randint(8,32),)
rot_broadcast = rot.broadcast_to(tuple(new_shape))
for i in range(rot_broadcast.shape[-1]):
assert (rot_broadcast.quaternion[...,i,:], rot.quaternion)
assert np.allclose(rot_broadcast.quaternion[...,i,:], rot.quaternion)
@pytest.mark.parametrize('function,invalid',[(Rotation.from_quaternion, np.array([-1,0,0,0])),

View File

@ -9,8 +9,6 @@
!> by DAMASK. Interpretating the command line arguments to get load case, geometry file,
!> and working directory.
!--------------------------------------------------------------------------------------------------
#define GCC_MIN 6
#define INTEL_MIN 1700
#define PETSC_MAJOR 3
#define PETSC_MINOR_MIN 10
#define PETSC_MINOR_MAX 13
@ -50,29 +48,6 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine DAMASK_interface_init
#include <petsc/finclude/petscsys.h>
#if defined(__GFORTRAN__) && __GNUC__<GCC_MIN
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
=============== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ======================
================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ===================
===================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ================
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
#endif
#if defined(__INTEL_COMPILER) && __INTEL_COMPILER<INTEL_MIN
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =======================
==================== THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION ====================
======================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =================
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
#endif
#if PETSC_VERSION_MAJOR!=3 || PETSC_VERSION_MINOR<PETSC_MINOR_MIN || PETSC_VERSION_MINOR>PETSC_MINOR_MAX
===================================================================================================

View File

@ -311,6 +311,8 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeScalarDataset_real
@ -340,6 +342,8 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeVectorDataset_real
@ -391,6 +395,8 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeTensorDataset_real
@ -421,6 +427,8 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeVectorDataset_int
@ -451,6 +459,8 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeTensorDataset_int
@ -481,6 +491,8 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l
call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeScalarDataset_rotation
@ -756,6 +768,21 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
end subroutine results_mapping_materialpoint
!--------------------------------------------------------------------------------------------------
!> @brief current date and time (including time zone information)
!--------------------------------------------------------------------------------------------------
character(len=24) function now()
character(len=5) :: zone
integer, dimension(8) :: values
call date_and_time(values=values,zone=zone)
write(now,'(i4.4,5(a,i2.2),a)') &
values(1),'-',values(2),'-',values(3),' ',values(5),':',values(6),':',values(7),zone
end function now
!!--------------------------------------------------------------------------------------------------
!!> @brief adds the backward mapping from spatial position and constituent ID to results
!!--------------------------------------------------------------------------------------------------