Merge branch 'store-sim-setup' into 'development'

store information on slip and twin systems

See merge request damask/DAMASK!422
This commit is contained in:
Vitesh 2021-08-03 13:16:59 +00:00
commit 4a8c836117
16 changed files with 301 additions and 91 deletions

@ -1 +1 @@
Subproject commit 72c58103860e127d37ccf3a06827331de29406ca Subproject commit 9699f20f21f8a5f532c735a1aa9daeba395da94d

View File

@ -99,8 +99,10 @@ class Result:
self.version_major = f.attrs['DADF5_version_major'] self.version_major = f.attrs['DADF5_version_major']
self.version_minor = f.attrs['DADF5_version_minor'] self.version_minor = f.attrs['DADF5_version_minor']
if self.version_major != 0 or not 12 <= self.version_minor <= 13: if self.version_major != 0 or not 12 <= self.version_minor <= 14:
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}') raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
if self.version_major == 0 and self.version_minor < 14:
self.export_setup = None
self.structured = 'cells' in f['geometry'].attrs.keys() self.structured = 'cells' in f['geometry'].attrs.keys()
@ -1395,7 +1397,7 @@ class Result:
def export_XDMF(self,output='*'): def export_XDMF(self,output='*'):
""" """
Write XDMF file to directly visualize data in DADF5 file. Write XDMF file to directly visualize data from DADF5 file.
The XDMF format is only supported for structured grids The XDMF format is only supported for structured grids
with single phase and single constituent. with single phase and single constituent.
@ -1748,3 +1750,32 @@ class Result:
if flatten: r = util.dict_flatten(r) if flatten: r = util.dict_flatten(r)
return None if (type(r) == dict and r == {}) else r return None if (type(r) == dict and r == {}) else r
def export_setup(self,output='*',overwrite=False):
"""
Export configuration files.
Parameters
----------
output : (list of) str, optional
Names of the datasets to export to the file.
Defaults to '*', in which case all datasets are exported.
overwrite : boolean, optional
Overwrite existing configuration files.
Defaults to False.
"""
def export(name,obj,output,overwrite):
if type(obj) == h5py.Dataset and _match(output,[name]):
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
if not Path(name).exists() or overwrite:
with open(name,'w') as f_out: f_out.write(obj[()].decode())
print(f"Exported {d} to '{name}'.")
else:
print(f"'{name}' exists, {d} not exported.")
elif type(obj) == h5py.Group:
os.makedirs(name, exist_ok=True)
with h5py.File(self.fname,'r') as f_in:
f_in['setup'].visititems(partial(export,output=output,overwrite=overwrite))

View File

@ -4,22 +4,23 @@
!> @brief CPFEM engine !> @brief CPFEM engine
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module CPFEM module CPFEM
use DAMASK_interface
use prec use prec
use math use IO
use rotations
use YAML_types use YAML_types
use YAML_parse use YAML_parse
use discretization_marc
use material
use config
use homogenization
use IO
use discretization
use DAMASK_interface
use HDF5_utilities use HDF5_utilities
use results use results
use config
use math
use rotations
use lattice use lattice
use material
use phase use phase
use homogenization
use discretization
use discretization_marc
implicit none implicit none
private private
@ -68,7 +69,7 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief call all module initializations !> @brief Initialize all modules.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_initAll subroutine CPFEM_initAll
@ -77,13 +78,13 @@ subroutine CPFEM_initAll
call IO_init call IO_init
call YAML_types_init call YAML_types_init
call YAML_parse_init call YAML_parse_init
call HDF5_utilities_init
call results_init(.false.)
call config_init call config_init
call math_init call math_init
call rotations_init call rotations_init
call HDF5_utilities_init
call results_init(.false.)
call discretization_marc_init
call lattice_init call lattice_init
call discretization_marc_init
call material_init(.false.) call material_init(.false.)
call phase_init call phase_init
call homogenization_init call homogenization_init

View File

@ -4,28 +4,29 @@
!> @brief needs a good name and description !> @brief needs a good name and description
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module CPFEM2 module CPFEM2
use prec
use parallelization use parallelization
use config use DAMASK_interface
use math use prec
use rotations use IO
use YAML_types use YAML_types
use YAML_parse use YAML_parse
use material
use lattice
use IO
use base64
use DAMASK_interface
use discretization
use HDF5 use HDF5
use HDF5_utilities use HDF5_utilities
use results use results
use homogenization use config
use math
use rotations
use lattice
use material
use phase use phase
use homogenization
use discretization
#if defined(MESH) #if defined(MESH)
use FEM_quadrature use FEM_quadrature
use discretization_mesh use discretization_mesh
#elif defined(GRID) #elif defined(GRID)
use base64
use discretization_grid use discretization_grid
#endif #endif
@ -36,7 +37,7 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief call all module initializations !> @brief Initialize all modules.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_initAll subroutine CPFEM_initAll
@ -44,18 +45,19 @@ subroutine CPFEM_initAll
call DAMASK_interface_init ! Spectral and FEM interface to commandline call DAMASK_interface_init ! Spectral and FEM interface to commandline
call prec_init call prec_init
call IO_init call IO_init
call base64_init #if defined(MESH)
#ifdef MESH
call FEM_quadrature_init call FEM_quadrature_init
#elif defined(GRID)
call base64_init
#endif #endif
call YAML_types_init call YAML_types_init
call YAML_parse_init call YAML_parse_init
call HDF5_utilities_init
call results_init(restart=interface_restartInc>0)
call config_init call config_init
call math_init call math_init
call rotations_init call rotations_init
call lattice_init call lattice_init
call HDF5_utilities_init
call results_init(restart=interface_restartInc>0)
#if defined(MESH) #if defined(MESH)
call discretization_mesh_init(restart=interface_restartInc>0) call discretization_mesh_init(restart=interface_restartInc>0)
#elif defined(GRID) #elif defined(GRID)

View File

@ -85,6 +85,7 @@ module HDF5_utilities
HDF5_utilities_init, & HDF5_utilities_init, &
HDF5_read, & HDF5_read, &
HDF5_write, & HDF5_write, &
HDF5_write_str, &
HDF5_addAttribute, & HDF5_addAttribute, &
HDF5_addGroup, & HDF5_addGroup, &
HDF5_openGroup, & HDF5_openGroup, &
@ -128,10 +129,11 @@ end subroutine HDF5_utilities_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief open and initializes HDF5 output file !> @brief open and initializes HDF5 output file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
integer(HID_T) function HDF5_openFile(fileName,mode) integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
character(len=*), intent(in) :: fileName character(len=*), intent(in) :: fileName
character, intent(in), optional :: mode character, intent(in), optional :: mode
logical, intent(in), optional :: parallel
character :: m character :: m
integer(HID_T) :: plist_id integer(HID_T) :: plist_id
@ -148,7 +150,11 @@ integer(HID_T) function HDF5_openFile(fileName,mode)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
#ifdef PETSC #ifdef PETSC
call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) if (present(parallel)) then
if (parallel) call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
else
call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
endif
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
#endif #endif
@ -1467,6 +1473,48 @@ subroutine HDF5_write_real7(dataset,loc_id,datasetName,parallel)
end subroutine HDF5_write_real7 end subroutine HDF5_write_real7
!--------------------------------------------------------------------------------------------------
!> @brief Write dataset of type string (scalar).
!> @details Not collective, must be called by one process at at time.
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_str(dataset,loc_id,datasetName)
character(len=*), intent(in) :: dataset
integer(HID_T), intent(in) :: loc_id
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
integer(HID_T) :: filetype_id, space_id, dataset_id
integer :: hdferr
character(len=len_trim(dataset)+1,kind=C_CHAR), dimension(1), target :: dataset_
type(C_PTR), target, dimension(1) :: ptr
dataset_(1) = trim(dataset)//C_NULL_CHAR
ptr(1) = c_loc(dataset_(1))
call h5tcopy_f(H5T_STRING, filetype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tset_size_f(filetype_id, int(len(dataset_),HSIZE_T), hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5screate_f(H5S_SCALAR_F, space_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dcreate_f(loc_id, datasetName, H5T_STRING, space_id, dataset_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dwrite_f(dataset_id, H5T_STRING, c_loc(ptr), hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dclose_f(dataset_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(space_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(filetype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_write_str
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief write dataset of type integer with 1 dimension !> @brief write dataset of type integer with 1 dimension
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -1872,7 +1920,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
!------------------------------------------------------------------------------------------------- !-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective when reading in parallel) ! creating a property list for transfer properties (is collective when writing in parallel)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
#ifdef PETSC #ifdef PETSC

View File

@ -119,27 +119,28 @@ function IO_read(fileName) result(fileContent)
character(len=:), allocatable :: fileContent character(len=:), allocatable :: fileContent
integer :: & integer :: &
fileLength, &
fileUnit, & fileUnit, &
myStat myStat
integer(pI64) :: &
fileLength
inquire(file = fileName, size=fileLength) inquire(file = fileName, size=fileLength)
open(newunit=fileUnit, file=fileName, access='stream',& open(newunit=fileUnit, file=fileName, access='stream',&
status='old', position='rewind', action='read',iostat=myStat) status='old', position='rewind', action='read',iostat=myStat)
if(myStat /= 0) call IO_error(100,ext_msg=trim(fileName)) if (myStat /= 0) call IO_error(100,ext_msg=trim(fileName))
allocate(character(len=fileLength)::fileContent) allocate(character(len=fileLength)::fileContent)
if(fileLength==0) then if (fileLength==0) then
close(fileUnit) close(fileUnit)
return return
endif endif
read(fileUnit,iostat=myStat) fileContent read(fileUnit,iostat=myStat) fileContent
if(myStat /= 0) call IO_error(102,ext_msg=trim(fileName)) if (myStat /= 0) call IO_error(102,ext_msg=trim(fileName))
close(fileUnit) close(fileUnit)
if (scan(fileContent(:index(fileContent,LF)),CR//LF) /= 0) fileContent = CRLF2LF(fileContent) if (scan(fileContent(:index(fileContent,LF)),CR//LF) /= 0) fileContent = CRLF2LF(fileContent)
if(fileContent(fileLength:fileLength) /= IO_EOL) fileContent = fileContent//IO_EOL ! ensure EOL@EOF if (fileContent(fileLength:fileLength) /= IO_EOL) fileContent = fileContent//IO_EOL ! ensure EOL@EOF
end function IO_read end function IO_read

View File

@ -216,7 +216,13 @@ subroutine inputRead(elem,node0_elem,connectivity_elem,materialAt)
mapElemSet !< list of elements in elementSet mapElemSet !< list of elements in elementSet
inputFile = IO_readlines(trim(getSolverJobName())//trim(InputFileExtension)) call results_openJobFile
call results_writeDataset_str(IO_read(trim(getSolverJobName())//InputFileExtension), 'setup', &
trim(getSolverJobName())//InputFileExtension, &
'MSC.Marc input deck')
call results_closeJobFile
inputFile = IO_readlines(trim(getSolverJobName())//InputFileExtension)
call inputRead_fileFormat(fileFormatVersion, & call inputRead_fileFormat(fileFormatVersion, &
inputFile) inputFile)
call inputRead_tableStyles(initialcondTableStyle,hypoelasticTableStyle, & call inputRead_tableStyles(initialcondTableStyle,hypoelasticTableStyle, &

View File

@ -14,7 +14,7 @@ module YAML_parse
public :: & public :: &
YAML_parse_init, & YAML_parse_init, &
YAML_parse_file YAML_parse_str
contains contains
@ -29,16 +29,16 @@ end subroutine YAML_parse_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Parse a YAML file into a a structure of nodes. !> @brief Parse a YAML string into a a structure of nodes.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
function YAML_parse_file(fname) result(node) function YAML_parse_str(str) result(node)
character(len=*), intent(in) :: fname character(len=*), intent(in) :: str
class (tNode), pointer :: node class (tNode), pointer :: node
node => parse_flow(to_flow(IO_read(fname))) node => parse_flow(to_flow(str))
end function YAML_parse_file end function YAML_parse_str
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -7,18 +7,18 @@
#include "IO.f90" #include "IO.f90"
#include "YAML_types.f90" #include "YAML_types.f90"
#include "YAML_parse.f90" #include "YAML_parse.f90"
#include "HDF5_utilities.f90"
#include "results.f90"
#include "config.f90" #include "config.f90"
#include "LAPACK_interface.f90" #include "LAPACK_interface.f90"
#include "math.f90" #include "math.f90"
#include "rotations.f90" #include "rotations.f90"
#include "lattice.f90"
#include "element.f90" #include "element.f90"
#include "HDF5_utilities.f90"
#include "results.f90"
#include "geometry_plastic_nonlocal.f90" #include "geometry_plastic_nonlocal.f90"
#include "discretization.f90" #include "discretization.f90"
#include "Marc/discretization_Marc.f90" #include "Marc/discretization_Marc.f90"
#include "material.f90" #include "material.f90"
#include "lattice.f90"
#include "phase.f90" #include "phase.f90"
#include "phase_mechanical.f90" #include "phase_mechanical.f90"
#include "phase_mechanical_elastic.f90" #include "phase_mechanical_elastic.f90"

View File

@ -8,7 +8,8 @@ module config
use IO use IO
use YAML_parse use YAML_parse
use YAML_types use YAML_types
use results
use parallelization
implicit none implicit none
private private
@ -31,6 +32,7 @@ subroutine config_init
print'(/,a)', ' <<<+- config init -+>>>'; flush(IO_STDOUT) print'(/,a)', ' <<<+- config init -+>>>'; flush(IO_STDOUT)
call parse_material call parse_material
call parse_numerics call parse_numerics
call parse_debug call parse_debug
@ -41,15 +43,25 @@ end subroutine config_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Read material.yaml or <jobname>.yaml. !> @brief Read material.yaml or <jobname>.yaml.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine parse_material subroutine parse_material()
logical :: fileExists logical :: fileExists
character(len=:), allocatable :: fileContent
inquire(file='material.yaml',exist=fileExists) inquire(file='material.yaml',exist=fileExists)
if(.not. fileExists) call IO_error(100,ext_msg='material.yaml') if(.not. fileExists) call IO_error(100,ext_msg='material.yaml')
print*, 'reading material.yaml'; flush(IO_STDOUT)
config_material => YAML_parse_file('material.yaml') if (worldrank == 0) then
print*, 'reading material.yaml'; flush(IO_STDOUT)
fileContent = IO_read('material.yaml')
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup','material.yaml','main configuration')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_material => YAML_parse_str(fileContent)
end subroutine parse_material end subroutine parse_material
@ -57,16 +69,28 @@ end subroutine parse_material
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Read numerics.yaml. !> @brief Read numerics.yaml.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine parse_numerics subroutine parse_numerics()
logical :: fexist logical :: fileExists
character(len=:), allocatable :: fileContent
config_numerics => emptyDict config_numerics => emptyDict
inquire(file='numerics.yaml', exist=fexist)
if (fexist) then inquire(file='numerics.yaml', exist=fileExists)
print*, 'reading numerics.yaml'; flush(IO_STDOUT) if (fileExists) then
config_numerics => YAML_parse_file('numerics.yaml')
if (worldrank == 0) then
print*, 'reading numerics.yaml'; flush(IO_STDOUT)
fileContent = IO_read('numerics.yaml')
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_numerics => YAML_parse_str(fileContent)
endif endif
end subroutine parse_numerics end subroutine parse_numerics
@ -75,17 +99,29 @@ end subroutine parse_numerics
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Read debug.yaml. !> @brief Read debug.yaml.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine parse_debug subroutine parse_debug()
logical :: fexist logical :: fileExists
character(len=:), allocatable :: fileContent
config_debug => emptyDict config_debug => emptyDict
inquire(file='debug.yaml', exist=fexist)
fileExists: if (fexist) then inquire(file='debug.yaml', exist=fileExists)
print*, 'reading debug.yaml'; flush(IO_STDOUT) if (fileExists) then
config_debug => YAML_parse_file('debug.yaml')
endif fileExists if (worldrank == 0) then
print*, 'reading debug.yaml'; flush(IO_STDOUT)
fileContent = IO_read('debug.yaml')
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_debug => YAML_parse_str(fileContent)
endif
end subroutine parse_debug end subroutine parse_debug

View File

@ -107,6 +107,8 @@ program DAMASK_grid
step_bc, & step_bc, &
step_mech, & step_mech, &
step_discretization step_discretization
character(len=:), allocatable :: &
fileContent, fname
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init DAMASK (all modules) ! init DAMASK (all modules)
@ -127,7 +129,17 @@ program DAMASK_grid
if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter') if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter')
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack') if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
config_load => YAML_parse_file(trim(interface_loadFile)) if (worldrank == 0) then
fileContent = IO_read(interface_loadFile)
fname = interface_loadFile
if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:)
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup',fname,'load case definition (grid solver)')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_load => YAML_parse_str(fileContent)
solver => config_load%get('solver') solver => config_load%get('solver')
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -68,11 +68,21 @@ subroutine discretization_grid_init(restart)
devNull, z, z_offset devNull, z, z_offset
integer, dimension(worldsize) :: & integer, dimension(worldsize) :: &
displs, sendcounts displs, sendcounts
character(len=:), allocatable :: &
fileContent, fname
print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT) print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
if(worldrank == 0) then if(worldrank == 0) then
call readVTI(grid,geomSize,origin,materialAt_global) fileContent = IO_read(interface_geomFile)
call readVTI(grid,geomSize,origin,materialAt_global,fileContent)
fname = interface_geomFile
if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:)
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup',fname,'geometry definition (grid solver)')
call results_closeJobFile
else else
allocate(materialAt_global(0)) ! needed for IntelMPI allocate(materialAt_global(0)) ! needed for IntelMPI
endif endif
@ -157,7 +167,8 @@ end subroutine discretization_grid_init
!> @brief Parse vtk image data (.vti) !> @brief Parse vtk image data (.vti)
!> @details https://vtk.org/Wiki/VTK_XML_Formats !> @details https://vtk.org/Wiki/VTK_XML_Formats
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine readVTI(grid,geomSize,origin,material) subroutine readVTI(grid,geomSize,origin,material, &
fileContent)
integer, dimension(3), intent(out) :: & integer, dimension(3), intent(out) :: &
grid ! grid (across all processes!) grid ! grid (across all processes!)
@ -166,28 +177,19 @@ subroutine readVTI(grid,geomSize,origin,material)
origin ! origin (across all processes!) origin ! origin (across all processes!)
integer, dimension(:), intent(out), allocatable :: & integer, dimension(:), intent(out), allocatable :: &
material material
character(len=*), intent(in) :: &
fileContent
character(len=:), allocatable :: fileContent, dataType, headerType character(len=:), allocatable :: dataType, headerType
logical :: inFile,inImage,gotCellData,compressed logical :: inFile,inImage,gotCellData,compressed
integer :: fileUnit, myStat
integer(pI64) :: & integer(pI64) :: &
fileLength, & !< length of the geom file (in characters)
startPos, endPos, & startPos, endPos, &
s s
grid = -1 grid = -1
geomSize = -1.0_pReal geomSize = -1.0_pReal
!--------------------------------------------------------------------------------------------------
! read raw data as stream
inquire(file = trim(interface_geomFile), size=fileLength)
open(newunit=fileUnit, file=trim(interface_geomFile), access='stream',&
status='old', position='rewind', action='read',iostat=myStat)
if(myStat /= 0) call IO_error(100,ext_msg=trim(interface_geomFile))
allocate(character(len=fileLength)::fileContent)
read(fileUnit) fileContent
close(fileUnit)
inFile = .false. inFile = .false.
inImage = .false. inImage = .false.
gotCelldata = .false. gotCelldata = .false.

View File

@ -1258,7 +1258,7 @@ subroutine selfTest
error stop 'math_sym33to6/math_6toSym33' error stop 'math_sym33to6/math_6toSym33'
call random_number(t66) call random_number(t66)
if(any(dNeq(math_sym3333to66(math_66toSym3333(t66)),t66))) & if(any(dNeq(math_sym3333to66(math_66toSym3333(t66)),t66,1.0e-15_pReal))) &
error stop 'math_sym3333to66/math_66toSym3333' error stop 'math_sym3333to66/math_66toSym3333'
call random_number(v6) call random_number(v6)

View File

@ -24,9 +24,18 @@ module parallelization
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only) worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only)
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only) worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only)
#ifdef PETSC #ifndef PETSC
public :: parallelization_bcast_str
contains
subroutine parallelization_bcast_str(string)
character(len=:), allocatable, intent(inout) :: string
end subroutine parallelization_bcast_str
#else
public :: & public :: &
parallelization_init parallelization_init, &
parallelization_bcast_str
contains contains
@ -101,6 +110,27 @@ subroutine parallelization_init
!$ call omp_set_num_threads(OMP_NUM_THREADS) !$ call omp_set_num_threads(OMP_NUM_THREADS)
end subroutine parallelization_init end subroutine parallelization_init
!--------------------------------------------------------------------------------------------------
!> @brief Broadcast a string from process 0.
!--------------------------------------------------------------------------------------------------
subroutine parallelization_bcast_str(string)
character(len=:), allocatable, intent(inout) :: string
integer :: strlen, ierr ! pI64 for strlen not supported by MPI
if (worldrank == 0) strlen = len(string)
call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr)
if (worldrank /= 0) allocate(character(len=strlen)::string)
call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr)
end subroutine parallelization_bcast_str
#endif #endif
end module parallelization end module parallelization

View File

@ -89,6 +89,7 @@ end subroutine prec_init
! replaces "==" but for certain (relative) tolerance. Counterpart to dNeq ! replaces "==" but for certain (relative) tolerance. Counterpart to dNeq
! https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/ ! https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/
! AlmostEqualRelative ! AlmostEqualRelative
! ToDo: Use 'spacing': https://gcc.gnu.org/onlinedocs/gfortran/SPACING.html#SPACING
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
logical elemental pure function dEq(a,b,tol) logical elemental pure function dEq(a,b,tol)

View File

@ -52,6 +52,7 @@ module results
results_openGroup, & results_openGroup, &
results_closeGroup, & results_closeGroup, &
results_writeDataset, & results_writeDataset, &
results_writeDataset_str, &
results_setLink, & results_setLink, &
results_addAttribute, & results_addAttribute, &
results_removeLink, & results_removeLink, &
@ -64,16 +65,20 @@ subroutine results_init(restart)
logical, intent(in) :: restart logical, intent(in) :: restart
character(len=pPathLen) :: commandLine character(len=pPathLen) :: commandLine
integer :: hdferr
integer(HID_T) :: group_id
character(len=:), allocatable :: date
print'(/,a)', ' <<<+- results init -+>>>'; flush(IO_STDOUT) print'(/,a)', ' <<<+- results init -+>>>'; flush(IO_STDOUT)
print*, 'M. Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):8391, 2017' print*, 'M. Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):8391, 2017'
print*, 'https://doi.org/10.1007/s40192-017-0084-5'//IO_EOL print*, 'https://doi.org/10.1007/s40192-017-0084-5'//IO_EOL
if(.not. restart) then if (.not. restart) then
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','w') resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','w')
call results_addAttribute('DADF5_version_major',0) call results_addAttribute('DADF5_version_major',0)
call results_addAttribute('DADF5_version_minor',13) call results_addAttribute('DADF5_version_minor',14)
call get_command_argument(0,commandLine) call get_command_argument(0,commandLine)
call results_addAttribute('creator',trim(commandLine)//' '//DAMASKVERSION) call results_addAttribute('creator',trim(commandLine)//' '//DAMASKVERSION)
call results_addAttribute('created',now()) call results_addAttribute('created',now())
@ -81,18 +86,34 @@ subroutine results_init(restart)
call results_addAttribute('call',trim(commandLine)) call results_addAttribute('call',trim(commandLine))
call results_closeGroup(results_addGroup('cell_to')) call results_closeGroup(results_addGroup('cell_to'))
call results_addAttribute('description','mappings to place data in space','cell_to') call results_addAttribute('description','mappings to place data in space','cell_to')
call results_closeJobFile call results_closeGroup(results_addGroup('setup'))
call results_addAttribute('description','input data used to run the simulation','setup')
else
date = now()
call results_openJobFile
call get_command(commandLine)
call results_addAttribute('call (restart at '//date//')',trim(commandLine))
call h5gmove_f(resultsFile,'setup','tmp',hdferr)
call results_addAttribute('description','input data used to run the simulation up to restart at '//date,'tmp')
call results_closeGroup(results_addGroup('setup'))
call results_addAttribute('description','input data used to run the simulation','setup')
call h5gmove_f(resultsFile,'tmp','setup/previous',hdferr)
endif endif
call results_closeJobFile
end subroutine results_init end subroutine results_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief opens the results file to append data !> @brief opens the results file to append data
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine results_openJobFile subroutine results_openJobFile(parallel)
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','a') logical, intent(in), optional :: parallel
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','a',parallel)
end subroutine results_openJobFile end subroutine results_openJobFile
@ -297,6 +318,25 @@ subroutine results_removeLink(link)
end subroutine results_removeLink end subroutine results_removeLink
!--------------------------------------------------------------------------------------------------
!> @brief Store string dataset.
!> @details Not collective, must be called by one process at at time.
!--------------------------------------------------------------------------------------------------
subroutine results_writeDataset_str(dataset,group,label,description)
character(len=*), intent(in) :: label,group,description,dataset
integer(HID_T) :: groupHandle
groupHandle = results_openGroup(group)
call HDF5_write_str(dataset,groupHandle,label)
call executionStamp(group//'/'//label,description)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeDataset_str
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Store real scalar dataset with associated metadata. !> @brief Store real scalar dataset with associated metadata.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------