Merge remote-tracking branch 'origin/development' into internal-restructure
This commit is contained in:
commit
8d57252a54
|
@ -84,7 +84,7 @@ for executable in python python3; do
|
||||||
done
|
done
|
||||||
secondLevel "Details on $DEFAULT_PYTHON:"
|
secondLevel "Details on $DEFAULT_PYTHON:"
|
||||||
echo $(ls -la $(which $DEFAULT_PYTHON))
|
echo $(ls -la $(which $DEFAULT_PYTHON))
|
||||||
for module in numpy scipy pandas;do
|
for module in numpy scipy pandas matplotlib yaml h5py;do
|
||||||
thirdLevel $module
|
thirdLevel $module
|
||||||
$DEFAULT_PYTHON -c "import $module; \
|
$DEFAULT_PYTHON -c "import $module; \
|
||||||
print('Version: {}'.format($module.__version__)); \
|
print('Version: {}'.format($module.__version__)); \
|
||||||
|
@ -94,10 +94,6 @@ thirdLevel vtk
|
||||||
$DEFAULT_PYTHON -c "import vtk; \
|
$DEFAULT_PYTHON -c "import vtk; \
|
||||||
print('Version: {}'.format(vtk.vtkVersion.GetVTKVersion())); \
|
print('Version: {}'.format(vtk.vtkVersion.GetVTKVersion())); \
|
||||||
print('Location: {}'.format(vtk.__file__))"
|
print('Location: {}'.format(vtk.__file__))"
|
||||||
thirdLevel h5py
|
|
||||||
$DEFAULT_PYTHON -c "import h5py; \
|
|
||||||
print('Version: {}'.format(h5py.version.version)); \
|
|
||||||
print('Location: {}'.format(h5py.__file__))"
|
|
||||||
|
|
||||||
firstLevel "GNU Compiler Collection"
|
firstLevel "GNU Compiler Collection"
|
||||||
for executable in gcc g++ gfortran ;do
|
for executable in gcc g++ gfortran ;do
|
||||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit 2ed5cd4ba97b44ad9c8b61ced94060aee57a2dd8
|
Subproject commit 3efdf7dd9de96fe6c55240ecf6d0d78d9d0e36ec
|
|
@ -103,7 +103,6 @@ class ConfigMaterial(Config):
|
||||||
"""Check for completeness."""
|
"""Check for completeness."""
|
||||||
ok = True
|
ok = True
|
||||||
for top_level in ['homogenization','phase','material']:
|
for top_level in ['homogenization','phase','material']:
|
||||||
# ToDo: With python 3.8 as prerequisite we can shorten with :=
|
|
||||||
ok &= top_level in self
|
ok &= top_level in self
|
||||||
if top_level not in self: print(f'{top_level} entry missing')
|
if top_level not in self: print(f'{top_level} entry missing')
|
||||||
|
|
||||||
|
@ -203,7 +202,7 @@ class ConfigMaterial(Config):
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
for i,m in enumerate(dup['material']):
|
for i,m in enumerate(dup['material']):
|
||||||
if ID and i not in ID: continue
|
if ID is not None and i not in ID: continue
|
||||||
for c in m['constituents']:
|
for c in m['constituents']:
|
||||||
if constituent is not None and c not in constituent: continue
|
if constituent is not None and c not in constituent: continue
|
||||||
try:
|
try:
|
||||||
|
@ -227,7 +226,7 @@ class ConfigMaterial(Config):
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
for i,m in enumerate(dup['material']):
|
for i,m in enumerate(dup['material']):
|
||||||
if ID and i not in ID: continue
|
if ID is not None and i not in ID: continue
|
||||||
try:
|
try:
|
||||||
m['homogenization'] = mapping[m['homogenization']]
|
m['homogenization'] = mapping[m['homogenization']]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -249,7 +248,7 @@ class ConfigMaterial(Config):
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
>>> import damask
|
>>> import damask
|
||||||
>>> O = damask.Rotation.from_random(3).as_quaternion()
|
>>> O = damask.Rotation.from_random(3)
|
||||||
>>> phase = ['Aluminum','Steel','Aluminum']
|
>>> phase = ['Aluminum','Steel','Aluminum']
|
||||||
>>> m = damask.ConfigMaterial().material_add(constituents={'phase':phase,'O':O},
|
>>> m = damask.ConfigMaterial().material_add(constituents={'phase':phase,'O':O},
|
||||||
... homogenization='SX')
|
... homogenization='SX')
|
||||||
|
|
|
@ -202,7 +202,7 @@ class Grid:
|
||||||
Geometry file to read.
|
Geometry file to read.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning)
|
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning,2)
|
||||||
try:
|
try:
|
||||||
f = open(fname)
|
f = open(fname)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
|
@ -541,7 +541,7 @@ class Grid:
|
||||||
Compress geometry with 'x of y' and 'a to b'.
|
Compress geometry with 'x of y' and 'a to b'.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning)
|
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning,2)
|
||||||
header = [f'{len(self.comments)+4} header'] + self.comments \
|
header = [f'{len(self.comments)+4} header'] + self.comments \
|
||||||
+ ['grid a {} b {} c {}'.format(*self.cells),
|
+ ['grid a {} b {} c {}'.format(*self.cells),
|
||||||
'size x {} y {} z {}'.format(*self.size),
|
'size x {} y {} z {}'.format(*self.size),
|
||||||
|
@ -760,7 +760,7 @@ class Grid:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if fill is None: fill = np.nanmax(self.material) + 1
|
if fill is None: fill = np.nanmax(self.material) + 1
|
||||||
dtype = float if np.isnan(fill) or int(fill) != fill or self.material.dtype==np.float else int
|
dtype = float if isinstance(fill,float) or self.material.dtype in np.sctypes['float'] else int
|
||||||
|
|
||||||
material = self.material
|
material = self.material
|
||||||
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
|
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
|
||||||
|
|
|
@ -125,9 +125,18 @@ class Rotation:
|
||||||
return np.logical_not(self==other)
|
return np.logical_not(self==other)
|
||||||
|
|
||||||
|
|
||||||
|
def __array__(self):
|
||||||
|
"""Initializer for numpy."""
|
||||||
|
return self.quaternion
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self):
|
||||||
|
return self.quaternion[...,0].size
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def shape(self):
|
def shape(self):
|
||||||
return self.quaternion.shape[:-1]
|
return self.quaternion[...,0].shape
|
||||||
|
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
|
|
|
@ -246,8 +246,8 @@ class VTK:
|
||||||
raise ValueError('No label defined for numpy.ndarray')
|
raise ValueError('No label defined for numpy.ndarray')
|
||||||
|
|
||||||
N_data = data.shape[0]
|
N_data = data.shape[0]
|
||||||
d = np_to_vtk((data.astype(np.float32) if data.dtype in [np.float64, np.float128]
|
d = np_to_vtk((data.astype(np.single) if data.dtype in [np.double, np.longdouble] else
|
||||||
else data).reshape(N_data,-1),deep=True) # avoid large files
|
data).reshape(N_data,-1),deep=True) # avoid large files
|
||||||
d.SetName(label)
|
d.SetName(label)
|
||||||
|
|
||||||
if N_data == N_points:
|
if N_data == N_points:
|
||||||
|
|
|
@ -183,7 +183,7 @@ def scale_to_coprime(v):
|
||||||
# Python 3.9 provides math.lcm, see https://stackoverflow.com/questions/51716916.
|
# Python 3.9 provides math.lcm, see https://stackoverflow.com/questions/51716916.
|
||||||
return a * b // np.gcd(a, b)
|
return a * b // np.gcd(a, b)
|
||||||
|
|
||||||
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(np.int)
|
m = (np.array(v) * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v)) ** 0.5).astype(int)
|
||||||
m = m//reduce(np.gcd,m)
|
m = m//reduce(np.gcd,m)
|
||||||
|
|
||||||
with np.errstate(invalid='ignore'):
|
with np.errstate(invalid='ignore'):
|
||||||
|
|
|
@ -6,28 +6,29 @@ with open(Path(__file__).parent/'damask/VERSION') as f:
|
||||||
version = re.sub(r'(-([^-]*)).*$',r'.\2',re.sub(r'^v(\d+\.\d+(\.\d+)?)',r'\1',f.readline().strip()))
|
version = re.sub(r'(-([^-]*)).*$',r'.\2',re.sub(r'^v(\d+\.\d+(\.\d+)?)',r'\1',f.readline().strip()))
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="damask",
|
name='damask',
|
||||||
version=version,
|
version=version,
|
||||||
author="The DAMASK team",
|
author='The DAMASK team',
|
||||||
author_email="damask@mpie.de",
|
author_email='damask@mpie.de',
|
||||||
description="DAMASK library",
|
description='DAMASK library',
|
||||||
long_description="Python library for pre and post processing of DAMASK simulations",
|
long_description='Python library for pre and post processing of DAMASK simulations',
|
||||||
url="https://damask.mpie.de",
|
url='https://damask.mpie.de',
|
||||||
packages=setuptools.find_packages(),
|
packages=setuptools.find_packages(),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
|
python_requires = '>=3.6',
|
||||||
install_requires = [
|
install_requires = [
|
||||||
"pandas", # requires numpy
|
'pandas>=0.24', # requires numpy
|
||||||
"scipy",
|
'scipy>=1.2',
|
||||||
"h5py", # requires numpy
|
'h5py>=2.9', # requires numpy
|
||||||
"vtk",
|
'vtk>=8.1',
|
||||||
"matplotlib", # requires numpy, pillow
|
'matplotlib>=3.0', # requires numpy, pillow
|
||||||
"pyaml"
|
'pyaml>=3.12'
|
||||||
],
|
],
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Intended Audience :: Science/Research",
|
'Intended Audience :: Science/Research',
|
||||||
"Topic :: Scientific/Engineering",
|
'Topic :: Scientific/Engineering',
|
||||||
"Programming Language :: Python :: 3",
|
'Programming Language :: Python :: 3',
|
||||||
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
|
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
|
||||||
"Operating System :: OS Independent",
|
'Operating System :: OS Independent',
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
|
@ -347,7 +347,7 @@ class TestGrid:
|
||||||
@pytest.mark.parametrize('approach',['Laguerre','Voronoi'])
|
@pytest.mark.parametrize('approach',['Laguerre','Voronoi'])
|
||||||
def test_tessellate_bicrystal(self,approach):
|
def test_tessellate_bicrystal(self,approach):
|
||||||
cells = np.random.randint(5,10,3)*2
|
cells = np.random.randint(5,10,3)*2
|
||||||
size = cells.astype(np.float)
|
size = cells.astype(float)
|
||||||
seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5])))
|
seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5])))
|
||||||
material = np.zeros(cells)
|
material = np.zeros(cells)
|
||||||
material[:,cells[1]//2:,:] = 1
|
material[:,cells[1]//2:,:] = 1
|
||||||
|
|
|
@ -689,6 +689,10 @@ class TestRotation:
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
Rotation(np.ones(3))
|
Rotation(np.ones(3))
|
||||||
|
|
||||||
|
def test_to_numpy(self):
|
||||||
|
r = Rotation.from_random(np.random.randint(0,10,4))
|
||||||
|
assert np.all(r.as_quaternion() == np.array(r))
|
||||||
|
|
||||||
@pytest.mark.parametrize('degrees',[True,False])
|
@pytest.mark.parametrize('degrees',[True,False])
|
||||||
def test_Eulers(self,set_of_rotations,degrees):
|
def test_Eulers(self,set_of_rotations,degrees):
|
||||||
for rot in set_of_rotations:
|
for rot in set_of_rotations:
|
||||||
|
@ -804,7 +808,11 @@ class TestRotation:
|
||||||
r = Rotation.from_random()
|
r = Rotation.from_random()
|
||||||
assert r == ~~r
|
assert r == ~~r
|
||||||
|
|
||||||
@pytest.mark.parametrize('shape',[None,1,(1,),(4,2),(1,1,1)])
|
@pytest.mark.parametrize('shape',[1,(1,),(4,2),(1,1,1),tuple(np.random.randint(0,10,4))])
|
||||||
|
def test_size(self,shape):
|
||||||
|
assert Rotation.from_random(shape).size == np.prod(shape)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('shape',[None,1,(1,),(4,2),(1,1,1),tuple(np.random.randint(0,10,4))])
|
||||||
def test_shape(self,shape):
|
def test_shape(self,shape):
|
||||||
r = Rotation.from_random(shape=shape)
|
r = Rotation.from_random(shape=shape)
|
||||||
assert r.shape == (shape if isinstance(shape,tuple) else (shape,) if shape else ())
|
assert r.shape == (shape if isinstance(shape,tuple) else (shape,) if shape else ())
|
||||||
|
|
|
@ -75,7 +75,6 @@ end subroutine CPFEM_initAll
|
||||||
subroutine CPFEM_init
|
subroutine CPFEM_init
|
||||||
|
|
||||||
integer(HID_T) :: fileHandle
|
integer(HID_T) :: fileHandle
|
||||||
character(len=pStringLen) :: fileName
|
|
||||||
|
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(IO_STDOUT)
|
print'(/,a)', ' <<<+- CPFEM init -+>>>'; flush(IO_STDOUT)
|
||||||
|
@ -83,8 +82,8 @@ subroutine CPFEM_init
|
||||||
|
|
||||||
if (interface_restartInc > 0) then
|
if (interface_restartInc > 0) then
|
||||||
print'(/,a,i0,a)', ' reading restart information of increment from file'; flush(IO_STDOUT)
|
print'(/,a,i0,a)', ' reading restart information of increment from file'; flush(IO_STDOUT)
|
||||||
write(fileName,'(a,i0,a)') trim(getSolverJobName())//'_',worldrank,'.hdf5'
|
|
||||||
fileHandle = HDF5_openFile(fileName)
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||||
|
|
||||||
call homogenization_restartRead(fileHandle)
|
call homogenization_restartRead(fileHandle)
|
||||||
call phase_restartRead(fileHandle)
|
call phase_restartRead(fileHandle)
|
||||||
|
@ -101,13 +100,11 @@ end subroutine CPFEM_init
|
||||||
subroutine CPFEM_restartWrite
|
subroutine CPFEM_restartWrite
|
||||||
|
|
||||||
integer(HID_T) :: fileHandle
|
integer(HID_T) :: fileHandle
|
||||||
character(len=pStringLen) :: fileName
|
|
||||||
|
|
||||||
|
|
||||||
print*, ' writing field and constitutive data required for restart to file';flush(IO_STDOUT)
|
print*, ' writing field and constitutive data required for restart to file';flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,i0,a)') trim(getSolverJobName())//'_',worldrank,'.hdf5'
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
|
||||||
fileHandle = HDF5_openFile(fileName,'a')
|
|
||||||
|
|
||||||
call homogenization_restartWrite(fileHandle)
|
call homogenization_restartWrite(fileHandle)
|
||||||
call phase_restartWrite(fileHandle)
|
call phase_restartWrite(fileHandle)
|
||||||
|
|
|
@ -71,6 +71,12 @@ module HDF5_utilities
|
||||||
module procedure HDF5_addAttribute_real_array
|
module procedure HDF5_addAttribute_real_array
|
||||||
end interface HDF5_addAttribute
|
end interface HDF5_addAttribute
|
||||||
|
|
||||||
|
#ifdef PETSc
|
||||||
|
logical, parameter, private :: parallel_default = .true.
|
||||||
|
#else
|
||||||
|
logical, parameter, private :: parallel_default = .false.
|
||||||
|
#endif
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,16 +111,16 @@ end subroutine HDF5_utilities_init
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief open and initializes HDF5 output file
|
!> @brief open and initializes HDF5 output file
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
|
integer(HID_T) function HDF5_openFile(fileName,mode)
|
||||||
|
|
||||||
character(len=*), intent(in) :: fileName
|
character(len=*), intent(in) :: fileName
|
||||||
character, intent(in), optional :: mode
|
character, intent(in), optional :: mode
|
||||||
logical, intent(in), optional :: parallel
|
|
||||||
|
|
||||||
character :: m
|
character :: m
|
||||||
integer(HID_T) :: plist_id
|
integer(HID_T) :: plist_id
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
|
|
||||||
|
|
||||||
if (present(mode)) then
|
if (present(mode)) then
|
||||||
m = mode
|
m = mode
|
||||||
else
|
else
|
||||||
|
@ -125,10 +131,8 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
#ifdef PETSc
|
#ifdef PETSc
|
||||||
if (present(parallel)) then; if (parallel) then
|
call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
|
||||||
call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
|
||||||
endif; endif
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (m == 'w') then
|
if (m == 'w') then
|
||||||
|
@ -547,7 +551,7 @@ subroutine HDF5_read_real1(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
||||||
|
@ -587,7 +591,7 @@ subroutine HDF5_read_real2(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
||||||
|
@ -627,7 +631,7 @@ subroutine HDF5_read_real3(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
||||||
|
@ -667,7 +671,7 @@ subroutine HDF5_read_real4(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
||||||
|
@ -707,7 +711,7 @@ subroutine HDF5_read_real5(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
||||||
|
@ -747,7 +751,7 @@ subroutine HDF5_read_real6(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
||||||
|
@ -787,7 +791,7 @@ subroutine HDF5_read_real7(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
|
||||||
|
@ -829,7 +833,7 @@ subroutine HDF5_read_int1(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
||||||
|
@ -869,7 +873,7 @@ subroutine HDF5_read_int2(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
||||||
|
@ -909,7 +913,7 @@ subroutine HDF5_read_int3(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
||||||
|
@ -949,7 +953,7 @@ subroutine HDF5_read_int4(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
||||||
|
@ -989,7 +993,7 @@ subroutine HDF5_read_int5(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
||||||
|
@ -1029,7 +1033,7 @@ subroutine HDF5_read_int6(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
||||||
|
@ -1069,7 +1073,7 @@ subroutine HDF5_read_int7(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel)
|
||||||
else
|
else
|
||||||
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
|
||||||
|
@ -1109,7 +1113,7 @@ subroutine HDF5_write_real1(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape,loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
myStart, totalShape,loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape,loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,.false.)
|
myStart, totalShape,loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1150,7 +1154,7 @@ subroutine HDF5_write_real2(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1191,7 +1195,7 @@ subroutine HDF5_write_real3(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1232,7 +1236,7 @@ subroutine HDF5_write_real4(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1274,7 +1278,7 @@ subroutine HDF5_write_real5(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1315,7 +1319,7 @@ subroutine HDF5_write_real6(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1356,7 +1360,7 @@ subroutine HDF5_write_real7(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_DOUBLE,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1398,7 +1402,7 @@ subroutine HDF5_write_int1(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1439,7 +1443,7 @@ subroutine HDF5_write_int2(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1480,7 +1484,7 @@ subroutine HDF5_write_int3(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1521,7 +1525,7 @@ subroutine HDF5_write_int4(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1562,7 +1566,7 @@ subroutine HDF5_write_int5(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1603,7 +1607,7 @@ subroutine HDF5_write_int6(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
@ -1644,7 +1648,7 @@ subroutine HDF5_write_int7(loc_id,dataset,datasetName,parallel)
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel)
|
||||||
else
|
else
|
||||||
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
call initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,.false.)
|
myStart, totalShape, loc_id,myShape,datasetName,H5T_NATIVE_INTEGER,parallel_default)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (product(totalShape) /= 0) then
|
if (product(totalShape) /= 0) then
|
||||||
|
|
|
@ -225,8 +225,8 @@ program DAMASK_grid
|
||||||
loadCases(l)%t = step_discretization%get_asFloat('t')
|
loadCases(l)%t = step_discretization%get_asFloat('t')
|
||||||
loadCases(l)%N = step_discretization%get_asInt ('N')
|
loadCases(l)%N = step_discretization%get_asInt ('N')
|
||||||
loadCases(l)%r = step_discretization%get_asFloat('r', defaultVal= 1.0_pReal)
|
loadCases(l)%r = step_discretization%get_asFloat('r', defaultVal= 1.0_pReal)
|
||||||
loadCases(l)%f_restart = step_discretization%get_asInt ('f_restart', defaultVal=huge(0))
|
|
||||||
|
|
||||||
|
loadCases(l)%f_restart = load_step%get_asInt('f_restart', defaultVal=huge(0))
|
||||||
loadCases(l)%f_out = load_step%get_asInt('f_out', defaultVal=1)
|
loadCases(l)%f_out = load_step%get_asInt('f_out', defaultVal=1)
|
||||||
loadCases(l)%estimate_rate = (load_step%get_asBool('estimate_rate',defaultVal=.true.) .and. &
|
loadCases(l)%estimate_rate = (load_step%get_asBool('estimate_rate',defaultVal=.true.) .and. &
|
||||||
merge(.true.,.false.,l > 1))
|
merge(.true.,.false.,l > 1))
|
||||||
|
|
|
@ -68,8 +68,11 @@ subroutine discretization_grid_init(restart)
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
|
print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
if(worldrank == 0) call readVTR(grid,geomSize,origin,materialAt_global)
|
if(worldrank == 0) then
|
||||||
|
call readVTR(grid,geomSize,origin,materialAt_global)
|
||||||
|
else
|
||||||
|
allocate(materialAt_global(0)) ! needed for IntelMPI
|
||||||
|
endif
|
||||||
|
|
||||||
call MPI_Bcast(grid,3,MPI_INTEGER,0,PETSC_COMM_WORLD, ierr)
|
call MPI_Bcast(grid,3,MPI_INTEGER,0,PETSC_COMM_WORLD, ierr)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (ierr /= 0) error stop 'MPI error'
|
||||||
|
|
|
@ -108,8 +108,6 @@ subroutine grid_mechanical_FEM_init
|
||||||
u_current,u_lastInc
|
u_current,u_lastInc
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
PetscInt, dimension(0:worldsize-1) :: localK
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
character(len=pStringLen) :: &
|
|
||||||
fileName
|
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_grid, &
|
num_grid, &
|
||||||
debug_grid
|
debug_grid
|
||||||
|
@ -234,8 +232,7 @@ subroutine grid_mechanical_FEM_init
|
||||||
restartRead: if (interface_restartInc > 0) then
|
restartRead: if (interface_restartInc > 0) then
|
||||||
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||||
fileHandle = HDF5_openFile(fileName)
|
|
||||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_read(groupHandle,P_aim, 'P_aim')
|
call HDF5_read(groupHandle,P_aim, 'P_aim')
|
||||||
|
@ -422,7 +419,7 @@ subroutine grid_mechanical_FEM_restartWrite
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: ierr
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
|
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
|
||||||
character(len=pStringLen) :: fileName
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(ierr)
|
||||||
|
@ -431,8 +428,7 @@ subroutine grid_mechanical_FEM_restartWrite
|
||||||
|
|
||||||
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
|
||||||
fileHandle = HDF5_openFile(fileName,'w')
|
|
||||||
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_write(groupHandle,P_aim, 'P_aim')
|
call HDF5_write(groupHandle,P_aim, 'P_aim')
|
||||||
|
|
|
@ -99,8 +99,6 @@ subroutine grid_mechanical_spectral_basic_init
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
PetscInt, dimension(0:worldsize-1) :: localK
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
integer :: fileUnit
|
integer :: fileUnit
|
||||||
character(len=pStringLen) :: &
|
|
||||||
fileName
|
|
||||||
class (tNode), pointer :: &
|
class (tNode), pointer :: &
|
||||||
num_grid, &
|
num_grid, &
|
||||||
debug_grid
|
debug_grid
|
||||||
|
@ -182,8 +180,7 @@ subroutine grid_mechanical_spectral_basic_init
|
||||||
restartRead: if (interface_restartInc > 0) then
|
restartRead: if (interface_restartInc > 0) then
|
||||||
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||||
fileHandle = HDF5_openFile(fileName)
|
|
||||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_read(groupHandle,P_aim, 'P_aim')
|
call HDF5_read(groupHandle,P_aim, 'P_aim')
|
||||||
|
@ -365,14 +362,12 @@ subroutine grid_mechanical_spectral_basic_restartWrite
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: ierr
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: F
|
PetscScalar, dimension(:,:,:,:), pointer :: F
|
||||||
character(len=pStringLen) :: fileName
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
||||||
|
|
||||||
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
|
||||||
fileHandle = HDF5_openFile(fileName,'w')
|
|
||||||
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_write(groupHandle,P_aim, 'P_aim')
|
call HDF5_write(groupHandle,P_aim, 'P_aim')
|
||||||
|
|
|
@ -112,8 +112,6 @@ subroutine grid_mechanical_spectral_polarisation_init
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
PetscInt, dimension(0:worldsize-1) :: localK
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
integer :: fileUnit
|
integer :: fileUnit
|
||||||
character(len=pStringLen) :: &
|
|
||||||
fileName
|
|
||||||
class (tNode), pointer :: &
|
class (tNode), pointer :: &
|
||||||
num_grid, &
|
num_grid, &
|
||||||
debug_grid
|
debug_grid
|
||||||
|
@ -204,8 +202,7 @@ subroutine grid_mechanical_spectral_polarisation_init
|
||||||
restartRead: if (interface_restartInc > 0) then
|
restartRead: if (interface_restartInc > 0) then
|
||||||
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
print'(/,a,i0,a)', ' reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||||
fileHandle = HDF5_openFile(fileName)
|
|
||||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_read(groupHandle,P_aim, 'P_aim')
|
call HDF5_read(groupHandle,P_aim, 'P_aim')
|
||||||
|
@ -419,7 +416,6 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: ierr
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
|
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
|
||||||
character(len=pStringLen) :: fileName
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
|
@ -427,8 +423,7 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
|
||||||
|
|
||||||
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
print*, 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
write(fileName,'(a,a,i0,a)') trim(getSolverJobName()),'_',worldrank,'.hdf5'
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
|
||||||
fileHandle = HDF5_openFile(fileName,'w')
|
|
||||||
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_write(groupHandle,F_aim, 'P_aim')
|
call HDF5_write(groupHandle,F_aim, 'P_aim')
|
||||||
|
|
|
@ -451,7 +451,7 @@ subroutine homogenization_restartWrite(fileHandle)
|
||||||
|
|
||||||
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_homogenization(ho))
|
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_homogenization(ho))
|
||||||
|
|
||||||
call HDF5_read(groupHandle(2),homogState(ho)%state,'omega') ! ToDo: should be done by mech
|
call HDF5_write(groupHandle(2),homogState(ho)%state,'omega') ! ToDo: should be done by mech
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle(2))
|
call HDF5_closeGroup(groupHandle(2))
|
||||||
|
|
||||||
|
@ -478,7 +478,7 @@ subroutine homogenization_restartRead(fileHandle)
|
||||||
|
|
||||||
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_homogenization(ho))
|
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_homogenization(ho))
|
||||||
|
|
||||||
call HDF5_write(groupHandle(2),homogState(ho)%state,'omega') ! ToDo: should be done by mech
|
call HDF5_read(groupHandle(2),homogState(ho)%state,'omega') ! ToDo: should be done by mech
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle(2))
|
call HDF5_closeGroup(groupHandle(2))
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ subroutine results_init(restart)
|
||||||
print*, 'https://doi.org/10.1007/s40192-017-0084-5'//IO_EOL
|
print*, 'https://doi.org/10.1007/s40192-017-0084-5'//IO_EOL
|
||||||
|
|
||||||
if(.not. restart) then
|
if(.not. restart) then
|
||||||
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
|
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','w')
|
||||||
call results_addAttribute('DADF5_version_major',0)
|
call results_addAttribute('DADF5_version_major',0)
|
||||||
call results_addAttribute('DADF5_version_minor',11)
|
call results_addAttribute('DADF5_version_minor',11)
|
||||||
call results_addAttribute('DAMASK_version',DAMASKVERSION)
|
call results_addAttribute('DAMASK_version',DAMASKVERSION)
|
||||||
|
@ -83,7 +83,7 @@ end subroutine results_init
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine results_openJobFile
|
subroutine results_openJobFile
|
||||||
|
|
||||||
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','a',.true.)
|
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','a')
|
||||||
|
|
||||||
end subroutine results_openJobFile
|
end subroutine results_openJobFile
|
||||||
|
|
||||||
|
@ -289,11 +289,7 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
|
||||||
|
|
||||||
groupHandle = results_openGroup(group)
|
groupHandle = results_openGroup(group)
|
||||||
|
|
||||||
#ifdef PETSc
|
call HDF5_write(groupHandle,dataset,label)
|
||||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
|
||||||
#else
|
|
||||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
|
@ -320,11 +316,7 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
|
||||||
|
|
||||||
groupHandle = results_openGroup(group)
|
groupHandle = results_openGroup(group)
|
||||||
|
|
||||||
#ifdef PETSc
|
call HDF5_write(groupHandle,dataset,label)
|
||||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
|
||||||
#else
|
|
||||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
|
@ -362,7 +354,7 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if(transposed_) then
|
if(transposed_) then
|
||||||
if(size(dataset,1) /= size(dataset,2)) call IO_error(0,ext_msg='transpose non-symmetric tensor')
|
if(size(dataset,1) /= size(dataset,2)) error stop 'transpose non-symmetric tensor'
|
||||||
allocate(dataset_transposed,mold=dataset)
|
allocate(dataset_transposed,mold=dataset)
|
||||||
do i=1,size(dataset_transposed,3)
|
do i=1,size(dataset_transposed,3)
|
||||||
dataset_transposed(:,:,i) = transpose(dataset(:,:,i))
|
dataset_transposed(:,:,i) = transpose(dataset(:,:,i))
|
||||||
|
@ -373,11 +365,7 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
|
||||||
|
|
||||||
groupHandle = results_openGroup(group)
|
groupHandle = results_openGroup(group)
|
||||||
|
|
||||||
#ifdef PETSc
|
call HDF5_write(groupHandle,dataset_transposed,label)
|
||||||
call HDF5_write(groupHandle,dataset_transposed,label,.true.)
|
|
||||||
#else
|
|
||||||
call HDF5_write(groupHandle,dataset_transposed,label,.false.)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
|
@ -405,11 +393,7 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
|
||||||
|
|
||||||
groupHandle = results_openGroup(group)
|
groupHandle = results_openGroup(group)
|
||||||
|
|
||||||
#ifdef PETSc
|
call HDF5_write(groupHandle,dataset,label)
|
||||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
|
||||||
#else
|
|
||||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
|
@ -437,11 +421,7 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
|
||||||
|
|
||||||
groupHandle = results_openGroup(group)
|
groupHandle = results_openGroup(group)
|
||||||
|
|
||||||
#ifdef PETSc
|
call HDF5_write(groupHandle,dataset,label)
|
||||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
|
||||||
#else
|
|
||||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
|
@ -577,7 +557,7 @@ subroutine results_mapping_phase(phaseAt,memberAtLocal,label)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! write the components of the compound type individually
|
! write the components of the compound type individually
|
||||||
call h5pset_preserve_f(plist_id, .TRUE., hdferr)
|
call h5pset_preserve_f(plist_id, .true., hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
loc_id = results_openGroup('/mapping')
|
loc_id = results_openGroup('/mapping')
|
||||||
|
@ -733,7 +713,8 @@ subroutine results_mapping_homogenization(homogenizationAt,memberAtLocal,label)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! write the components of the compound type individually
|
! write the components of the compound type individually
|
||||||
call h5pset_preserve_f(plist_id, .TRUE., hdferr)
|
call h5pset_preserve_f(plist_id, .true., hdferr)
|
||||||
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
loc_id = results_openGroup('/mapping')
|
loc_id = results_openGroup('/mapping')
|
||||||
call h5dcreate_f(loc_id, 'homogenization', dtype_id, filespace_id, dset_id, hdferr)
|
call h5dcreate_f(loc_id, 'homogenization', dtype_id, filespace_id, dset_id, hdferr)
|
||||||
|
|
Loading…
Reference in New Issue