From 58c1f5fdc1f4ac58b5456b9282376e2a0b174acc Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 26 Jul 2021 22:39:23 +0200 Subject: [PATCH 01/14] write string, only from one MPI process needed to store simulation setup. I don't see a situation where strings are distributed over multiple processes --- src/HDF5_utilities.f90 | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index c981fad53..7de6be431 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -85,6 +85,7 @@ module HDF5_utilities HDF5_utilities_init, & HDF5_read, & HDF5_write, & + HDF5_write_str, & HDF5_addAttribute, & HDF5_addGroup, & HDF5_openGroup, & @@ -1467,6 +1468,37 @@ subroutine HDF5_write_real7(dataset,loc_id,datasetName,parallel) end subroutine HDF5_write_real7 +subroutine HDF5_write_str(dataset,loc_id,datasetName) + + character(len=*), intent(in) :: dataset + integer(HID_T), intent(in) :: loc_id + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + INTEGER(HID_T) :: filetype_id, space_id, dataset_id + INTEGER :: hdferr + + character(len=len_trim(dataset)+1,kind=C_CHAR), dimension(1), target :: dataset_ + type(C_PTR), target, dimension(1) :: ptr + + + dataset_(1) = trim(dataset)//C_NULL_CHAR + ptr(1) = c_loc(dataset_(1)) + + call h5tcopy_f(H5T_STRING, filetype_id, hdferr) + call h5tset_size_f(filetype_id, int(len(dataset_),HSIZE_T), hdferr) + + call h5screate_f(H5S_SCALAR_F, space_id, hdferr) + call h5dcreate_f(loc_id, datasetName, H5T_STRING, space_id, dataset_id, hdferr) + + call h5dwrite_f(dataset_id, H5T_STRING, c_loc(ptr), hdferr); + + call h5dclose_f(dataset_id, hdferr) + call h5sclose_f(space_id, hdferr) + call h5tclose_f(filetype_id, hdferr) + +end subroutine HDF5_write_str + + !-------------------------------------------------------------------------------------------------- !> @brief write dataset of type integer with 1 dimension !-------------------------------------------------------------------------------------------------- @@ -1872,7 +1904,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T !------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties (is collective when reading in parallel) +! creating a property list for transfer properties (is collective when writing in parallel) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' #ifdef PETSC From 820d590b6b8a5096226f65029111f2475089119a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 07:25:44 +0200 Subject: [PATCH 02/14] write string to result requires to open file without MPI support --- src/HDF5_utilities.f90 | 9 +++++++-- src/results.f90 | 27 +++++++++++++++++++++++++-- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 7de6be431..bd88feee2 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -129,10 +129,11 @@ end subroutine HDF5_utilities_init !-------------------------------------------------------------------------------------------------- !> @brief open and initializes HDF5 output file !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_openFile(fileName,mode) +integer(HID_T) function HDF5_openFile(fileName,mode,parallel) character(len=*), intent(in) :: fileName character, intent(in), optional :: mode + logical, intent(in), optional :: parallel character :: m integer(HID_T) :: plist_id @@ -149,7 +150,11 @@ integer(HID_T) function HDF5_openFile(fileName,mode) if(hdferr < 0) error stop 'HDF5 error' #ifdef PETSC - call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) + if (present(parallel)) then + if (parallel) call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) + else + call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) + endif if(hdferr < 0) error stop 'HDF5 error' #endif diff --git a/src/results.f90 b/src/results.f90 index 94625a4b9..ef6298151 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -52,6 +52,7 @@ module results results_openGroup, & results_closeGroup, & results_writeDataset, & + results_writeDataset_str, & results_setLink, & results_addAttribute, & results_removeLink, & @@ -90,9 +91,12 @@ end subroutine results_init !-------------------------------------------------------------------------------------------------- !> @brief opens the results file to append data !-------------------------------------------------------------------------------------------------- -subroutine results_openJobFile +subroutine results_openJobFile(parallel) - resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','a') + logical, intent(in), optional :: parallel + + + resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','a',parallel) end subroutine results_openJobFile @@ -297,6 +301,25 @@ subroutine results_removeLink(link) end subroutine results_removeLink +!-------------------------------------------------------------------------------------------------- +!> @brief Store string dataset. +!> @details Not collective, must be called by one process at at time. +!-------------------------------------------------------------------------------------------------- +subroutine results_writeDataset_str(dataset,group,label,description) + + character(len=*), intent(in) :: label,group,description,dataset + + integer(HID_T) :: groupHandle + + + groupHandle = results_openGroup(group) + call HDF5_write_str(dataset,groupHandle,label) + call executionStamp(group//'/'//label,description) + call HDF5_closeGroup(groupHandle) + +end subroutine results_writeDataset_str + + !-------------------------------------------------------------------------------------------------- !> @brief Store real scalar dataset with associated metadata. !-------------------------------------------------------------------------------------------------- From f20302e0eed91f63043a8f97971ee26365879d56 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 07:27:18 +0200 Subject: [PATCH 03/14] early initialization of results need to be done before config if config writes to it --- src/CPFEM.f90 | 27 ++++++++++++++------------- src/CPFEM2.f90 | 34 ++++++++++++++++++---------------- 2 files changed, 32 insertions(+), 29 deletions(-) diff --git a/src/CPFEM.f90 b/src/CPFEM.f90 index aa532859a..4123af37e 100644 --- a/src/CPFEM.f90 +++ b/src/CPFEM.f90 @@ -4,22 +4,23 @@ !> @brief CPFEM engine !-------------------------------------------------------------------------------------------------- module CPFEM + use DAMASK_interface use prec - use math - use rotations + use IO use YAML_types use YAML_parse - use discretization_marc - use material - use config - use homogenization - use IO - use discretization - use DAMASK_interface use HDF5_utilities use results + use config + use math + use rotations use lattice + use material use phase + use homogenization + + use discretization + use discretization_marc implicit none private @@ -68,7 +69,7 @@ contains !-------------------------------------------------------------------------------------------------- -!> @brief call all module initializations +!> @brief Initialize all modules. !-------------------------------------------------------------------------------------------------- subroutine CPFEM_initAll @@ -77,13 +78,13 @@ subroutine CPFEM_initAll call IO_init call YAML_types_init call YAML_parse_init + call HDF5_utilities_init + call results_init(.false.) call config_init call math_init call rotations_init - call HDF5_utilities_init - call results_init(.false.) - call discretization_marc_init call lattice_init + call discretization_marc_init call material_init(.false.) call phase_init call homogenization_init diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index ea5820852..2bb8420b9 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -4,28 +4,29 @@ !> @brief needs a good name and description !-------------------------------------------------------------------------------------------------- module CPFEM2 - use prec use parallelization - use config - use math - use rotations + use DAMASK_interface + use prec + use IO use YAML_types use YAML_parse - use material - use lattice - use IO - use base64 - use DAMASK_interface - use discretization use HDF5 use HDF5_utilities use results - use homogenization + use config + use math + use rotations + use lattice + use material use phase + use homogenization + + use discretization #if defined(MESH) use FEM_quadrature use discretization_mesh #elif defined(GRID) + use base64 use discretization_grid #endif @@ -36,7 +37,7 @@ contains !-------------------------------------------------------------------------------------------------- -!> @brief call all module initializations +!> @brief Initialize all modules. !-------------------------------------------------------------------------------------------------- subroutine CPFEM_initAll @@ -44,18 +45,19 @@ subroutine CPFEM_initAll call DAMASK_interface_init ! Spectral and FEM interface to commandline call prec_init call IO_init - call base64_init -#ifdef MESH +#if defined(MESH) call FEM_quadrature_init +#elif defined(GRID) + call base64_init #endif call YAML_types_init call YAML_parse_init + call HDF5_utilities_init + call results_init(restart=interface_restartInc>0) call config_init call math_init call rotations_init call lattice_init - call HDF5_utilities_init - call results_init(restart=interface_restartInc>0) #if defined(MESH) call discretization_mesh_init(restart=interface_restartInc>0) #elif defined(GRID) From 3b06498c2f1b3d9d563e7675a7f25ab9032b87bf Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 07:28:35 +0200 Subject: [PATCH 04/14] make results self-contained for reproducibility ToDo: same functionality for load and geom --- src/config.f90 | 45 ++++++++++++++++++++++++++++++++++----------- src/results.f90 | 2 ++ 2 files changed, 36 insertions(+), 11 deletions(-) diff --git a/src/config.f90 b/src/config.f90 index 7d75cb444..8ee25182f 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -8,7 +8,8 @@ module config use IO use YAML_parse use YAML_types - + use results + use parallelization implicit none private @@ -31,6 +32,7 @@ subroutine config_init print'(/,a)', ' <<<+- config init -+>>>'; flush(IO_STDOUT) + call parse_material call parse_numerics call parse_debug @@ -41,14 +43,21 @@ end subroutine config_init !-------------------------------------------------------------------------------------------------- !> @brief Read material.yaml or .yaml. !-------------------------------------------------------------------------------------------------- -subroutine parse_material +subroutine parse_material() logical :: fileExists + character(len=:), allocatable :: fileContent inquire(file='material.yaml',exist=fileExists) if(.not. fileExists) call IO_error(100,ext_msg='material.yaml') print*, 'reading material.yaml'; flush(IO_STDOUT) + fileContent = IO_read('material.yaml') + if (worldrank == 0) then + call results_openJobFile(parallel=.false.) + call results_writeDataset_str(fileContent,'setup','material.yaml','DAMASK main configuration') + call results_closeJobFile + endif config_material => YAML_parse_file('material.yaml') end subroutine parse_material @@ -57,15 +66,22 @@ end subroutine parse_material !-------------------------------------------------------------------------------------------------- !> @brief Read numerics.yaml. !-------------------------------------------------------------------------------------------------- -subroutine parse_numerics +subroutine parse_numerics() - logical :: fexist + logical :: fileExists + character(len=:), allocatable :: fileContent config_numerics => emptyDict - inquire(file='numerics.yaml', exist=fexist) - if (fexist) then + inquire(file='numerics.yaml', exist=fileExists) + if (fileExists) then print*, 'reading numerics.yaml'; flush(IO_STDOUT) + fileContent = IO_read('numerics.yaml') + if (worldrank == 0) then + call results_openJobFile(parallel=.false.) + call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration (optional)') + call results_closeJobFile + endif config_numerics => YAML_parse_file('numerics.yaml') endif @@ -75,17 +91,24 @@ end subroutine parse_numerics !-------------------------------------------------------------------------------------------------- !> @brief Read debug.yaml. !-------------------------------------------------------------------------------------------------- -subroutine parse_debug +subroutine parse_debug() - logical :: fexist + logical :: fileExists + character(len=:), allocatable :: fileContent config_debug => emptyDict - inquire(file='debug.yaml', exist=fexist) - fileExists: if (fexist) then + inquire(file='debug.yaml', exist=fileExists) + if (fileExists) then print*, 'reading debug.yaml'; flush(IO_STDOUT) + fileContent = IO_read('debug.yaml') + if (worldrank == 0) then + call results_openJobFile(parallel=.false.) + call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration (optional)') + call results_closeJobFile + endif config_debug => YAML_parse_file('debug.yaml') - endif fileExists + endif end subroutine parse_debug diff --git a/src/results.f90 b/src/results.f90 index ef6298151..8b04800ef 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -82,6 +82,8 @@ subroutine results_init(restart) call results_addAttribute('call',trim(commandLine)) call results_closeGroup(results_addGroup('cell_to')) call results_addAttribute('description','mappings to place data in space','cell_to') + call results_closeGroup(results_addGroup('setup')) + call results_addAttribute('description','input data used to run the simulation','setup') call results_closeJobFile endif From e01d271ee4c668fc87c58cfcebf3e8f3d35676c1 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 07:43:35 +0200 Subject: [PATCH 05/14] store geometry (for full reproducibility) --- src/grid/discretization_grid.f90 | 39 ++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/src/grid/discretization_grid.f90 b/src/grid/discretization_grid.f90 index 049b3b60d..f94c9921b 100644 --- a/src/grid/discretization_grid.f90 +++ b/src/grid/discretization_grid.f90 @@ -68,11 +68,28 @@ subroutine discretization_grid_init(restart) devNull, z, z_offset integer, dimension(worldsize) :: & displs, sendcounts + integer :: fileUnit, myStat + integer(pI64) :: & + fileLength + character(len=:), allocatable :: & + fileContent + print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT) + if(worldrank == 0) then - call readVTI(grid,geomSize,origin,materialAt_global) + inquire(file = trim(interface_geomFile), size=fileLength) + open(newunit=fileUnit, file=trim(interface_geomFile), access='stream',& + status='old', position='rewind', action='read',iostat=myStat) + if(myStat /= 0) call IO_error(100,ext_msg=trim(interface_geomFile)) + allocate(character(len=fileLength)::fileContent) + read(fileUnit) fileContent + close(fileUnit) + call readVTI(grid,geomSize,origin,materialAt_global,fileContent) + call results_openJobFile(parallel=.false.) + call results_writeDataset_str(fileContent,'setup',interface_geomFile,'geometry definition (grid solver)') + call results_closeJobFile else allocate(materialAt_global(0)) ! needed for IntelMPI endif @@ -157,7 +174,8 @@ end subroutine discretization_grid_init !> @brief Parse vtk image data (.vti) !> @details https://vtk.org/Wiki/VTK_XML_Formats !-------------------------------------------------------------------------------------------------- -subroutine readVTI(grid,geomSize,origin,material) +subroutine readVTI(grid,geomSize,origin,material, & + fileContent) integer, dimension(3), intent(out) :: & grid ! grid (across all processes!) @@ -166,28 +184,19 @@ subroutine readVTI(grid,geomSize,origin,material) origin ! origin (across all processes!) integer, dimension(:), intent(out), allocatable :: & material + character(len=*), intent(in) :: & + fileContent - character(len=:), allocatable :: fileContent, dataType, headerType + character(len=:), allocatable :: dataType, headerType logical :: inFile,inImage,gotCellData,compressed - integer :: fileUnit, myStat integer(pI64) :: & - fileLength, & !< length of the geom file (in characters) startPos, endPos, & s + grid = -1 geomSize = -1.0_pReal -!-------------------------------------------------------------------------------------------------- -! read raw data as stream - inquire(file = trim(interface_geomFile), size=fileLength) - open(newunit=fileUnit, file=trim(interface_geomFile), access='stream',& - status='old', position='rewind', action='read',iostat=myStat) - if(myStat /= 0) call IO_error(100,ext_msg=trim(interface_geomFile)) - allocate(character(len=fileLength)::fileContent) - read(fileUnit) fileContent - close(fileUnit) - inFile = .false. inImage = .false. gotCelldata = .false. From ddb0429a1d5451237e69202c0e312bc8e8c984d8 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 07:57:04 +0200 Subject: [PATCH 06/14] store load case (full reproducibility for grid solver) --- src/IO.f90 | 3 ++- src/grid/DAMASK_grid.f90 | 8 ++++++++ src/grid/discretization_grid.f90 | 11 +---------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/IO.f90 b/src/IO.f90 index 399e2e6df..961e92d63 100644 --- a/src/IO.f90 +++ b/src/IO.f90 @@ -119,9 +119,10 @@ function IO_read(fileName) result(fileContent) character(len=:), allocatable :: fileContent integer :: & - fileLength, & fileUnit, & myStat + integer(pI64) :: & + fileLength inquire(file = fileName, size=fileLength) diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index 9c31b6f26..800003552 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -107,6 +107,8 @@ program DAMASK_grid step_bc, & step_mech, & step_discretization + character(len=:), allocatable :: & + fileContent !-------------------------------------------------------------------------------------------------- ! init DAMASK (all modules) @@ -127,6 +129,12 @@ program DAMASK_grid if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter') if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack') + if (worldrank == 0) then + fileContent = IO_read(interface_loadFile) + call results_openJobFile(parallel=.false.) + call results_writeDataset_str(fileContent,'setup',interface_loadFile,'load case definition (grid solver)') + call results_closeJobFile + endif config_load => YAML_parse_file(trim(interface_loadFile)) solver => config_load%get('solver') diff --git a/src/grid/discretization_grid.f90 b/src/grid/discretization_grid.f90 index f94c9921b..8592f7963 100644 --- a/src/grid/discretization_grid.f90 +++ b/src/grid/discretization_grid.f90 @@ -68,9 +68,6 @@ subroutine discretization_grid_init(restart) devNull, z, z_offset integer, dimension(worldsize) :: & displs, sendcounts - integer :: fileUnit, myStat - integer(pI64) :: & - fileLength character(len=:), allocatable :: & fileContent @@ -79,13 +76,7 @@ subroutine discretization_grid_init(restart) if(worldrank == 0) then - inquire(file = trim(interface_geomFile), size=fileLength) - open(newunit=fileUnit, file=trim(interface_geomFile), access='stream',& - status='old', position='rewind', action='read',iostat=myStat) - if(myStat /= 0) call IO_error(100,ext_msg=trim(interface_geomFile)) - allocate(character(len=fileLength)::fileContent) - read(fileUnit) fileContent - close(fileUnit) + fileContent = IO_read(interface_geomFile) call readVTI(grid,geomSize,origin,materialAt_global,fileContent) call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup',interface_geomFile,'geometry definition (grid solver)') From 367c088b1661a817d6fe95aa39ddcf294b968804 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 08:00:24 +0200 Subject: [PATCH 07/14] sorting according to dependency (no Makefile) --- src/commercialFEM_fileList.f90 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/commercialFEM_fileList.f90 b/src/commercialFEM_fileList.f90 index 5fe754ce3..e1d53ca83 100644 --- a/src/commercialFEM_fileList.f90 +++ b/src/commercialFEM_fileList.f90 @@ -7,18 +7,18 @@ #include "IO.f90" #include "YAML_types.f90" #include "YAML_parse.f90" +#include "HDF5_utilities.f90" +#include "results.f90" #include "config.f90" #include "LAPACK_interface.f90" #include "math.f90" #include "rotations.f90" +#include "lattice.f90" #include "element.f90" -#include "HDF5_utilities.f90" -#include "results.f90" #include "geometry_plastic_nonlocal.f90" #include "discretization.f90" #include "Marc/discretization_Marc.f90" #include "material.f90" -#include "lattice.f90" #include "phase.f90" #include "phase_mechanical.f90" #include "phase_mechanical_elastic.f90" From 812b0f07f5fc8037df99cf62f26c9c7bccb9ed3e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 08:35:52 +0200 Subject: [PATCH 08/14] read file only once (per process) --- src/YAML_parse.f90 | 12 ++++++------ src/config.f90 | 19 ++++++++++++++++--- src/grid/DAMASK_grid.f90 | 4 ++-- 3 files changed, 24 insertions(+), 11 deletions(-) diff --git a/src/YAML_parse.f90 b/src/YAML_parse.f90 index d7cfa016f..2df09936d 100644 --- a/src/YAML_parse.f90 +++ b/src/YAML_parse.f90 @@ -14,7 +14,7 @@ module YAML_parse public :: & YAML_parse_init, & - YAML_parse_file + YAML_parse_str contains @@ -29,16 +29,16 @@ end subroutine YAML_parse_init !-------------------------------------------------------------------------------------------------- -!> @brief Parse a YAML file into a a structure of nodes. +!> @brief Parse a YAML string into a a structure of nodes. !-------------------------------------------------------------------------------------------------- -function YAML_parse_file(fname) result(node) +function YAML_parse_str(str) result(node) - character(len=*), intent(in) :: fname + character(len=*), intent(in) :: str class (tNode), pointer :: node - node => parse_flow(to_flow(IO_read(fname))) + node => parse_flow(to_flow(str)) -end function YAML_parse_file +end function YAML_parse_str !-------------------------------------------------------------------------------------------------- diff --git a/src/config.f90 b/src/config.f90 index 8ee25182f..b80658213 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -51,14 +51,17 @@ subroutine parse_material() inquire(file='material.yaml',exist=fileExists) if(.not. fileExists) call IO_error(100,ext_msg='material.yaml') + print*, 'reading material.yaml'; flush(IO_STDOUT) fileContent = IO_read('material.yaml') + if (worldrank == 0) then call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','material.yaml','DAMASK main configuration') call results_closeJobFile endif - config_material => YAML_parse_file('material.yaml') + + config_material => YAML_parse_str(fileContent) end subroutine parse_material @@ -73,16 +76,21 @@ subroutine parse_numerics() config_numerics => emptyDict + inquire(file='numerics.yaml', exist=fileExists) if (fileExists) then + print*, 'reading numerics.yaml'; flush(IO_STDOUT) fileContent = IO_read('numerics.yaml') + if (worldrank == 0) then call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration (optional)') call results_closeJobFile endif - config_numerics => YAML_parse_file('numerics.yaml') + + config_numerics => YAML_parse_str(fileContent) + endif end subroutine parse_numerics @@ -98,16 +106,21 @@ subroutine parse_debug() config_debug => emptyDict + inquire(file='debug.yaml', exist=fileExists) if (fileExists) then + print*, 'reading debug.yaml'; flush(IO_STDOUT) fileContent = IO_read('debug.yaml') + if (worldrank == 0) then call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration (optional)') call results_closeJobFile endif - config_debug => YAML_parse_file('debug.yaml') + + config_debug => YAML_parse_str(fileContent) + endif end subroutine parse_debug diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index 800003552..65c050b0c 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -129,13 +129,13 @@ program DAMASK_grid if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter') if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack') + fileContent = IO_read(interface_loadFile) if (worldrank == 0) then - fileContent = IO_read(interface_loadFile) call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup',interface_loadFile,'load case definition (grid solver)') call results_closeJobFile endif - config_load => YAML_parse_file(trim(interface_loadFile)) + config_load => YAML_parse_str(fileContent) solver => config_load%get('solver') !-------------------------------------------------------------------------------------------------- From b9d4eb23ccebc8d6d4ecbfc02b1c8166743db7e3 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 08:54:17 +0200 Subject: [PATCH 09/14] only rank 0 reads file for MPI --- src/config.f90 | 18 +++++++++--------- src/grid/DAMASK_grid.f90 | 4 +++- src/parallelization.f90 | 34 ++++++++++++++++++++++++++++++++-- 3 files changed, 44 insertions(+), 12 deletions(-) diff --git a/src/config.f90 b/src/config.f90 index b80658213..8f5680158 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -52,14 +52,14 @@ subroutine parse_material() inquire(file='material.yaml',exist=fileExists) if(.not. fileExists) call IO_error(100,ext_msg='material.yaml') - print*, 'reading material.yaml'; flush(IO_STDOUT) - fileContent = IO_read('material.yaml') - if (worldrank == 0) then + print*, 'reading material.yaml'; flush(IO_STDOUT) + fileContent = IO_read('material.yaml') call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','material.yaml','DAMASK main configuration') call results_closeJobFile endif + call parallelization_bcast_str(fileContent) config_material => YAML_parse_str(fileContent) @@ -80,14 +80,14 @@ subroutine parse_numerics() inquire(file='numerics.yaml', exist=fileExists) if (fileExists) then - print*, 'reading numerics.yaml'; flush(IO_STDOUT) - fileContent = IO_read('numerics.yaml') - if (worldrank == 0) then + print*, 'reading numerics.yaml'; flush(IO_STDOUT) + fileContent = IO_read('numerics.yaml') call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration (optional)') call results_closeJobFile endif + call parallelization_bcast_str(fileContent) config_numerics => YAML_parse_str(fileContent) @@ -110,14 +110,14 @@ subroutine parse_debug() inquire(file='debug.yaml', exist=fileExists) if (fileExists) then - print*, 'reading debug.yaml'; flush(IO_STDOUT) - fileContent = IO_read('debug.yaml') - if (worldrank == 0) then + print*, 'reading debug.yaml'; flush(IO_STDOUT) + fileContent = IO_read('debug.yaml') call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration (optional)') call results_closeJobFile endif + call parallelization_bcast_str(fileContent) config_debug => YAML_parse_str(fileContent) diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index 65c050b0c..89521a223 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -129,12 +129,14 @@ program DAMASK_grid if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter') if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack') - fileContent = IO_read(interface_loadFile) if (worldrank == 0) then + fileContent = IO_read(interface_loadFile) call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup',interface_loadFile,'load case definition (grid solver)') call results_closeJobFile endif + + call parallelization_bcast_str(fileContent) config_load => YAML_parse_str(fileContent) solver => config_load%get('solver') diff --git a/src/parallelization.f90 b/src/parallelization.f90 index 534478cef..31da0f969 100644 --- a/src/parallelization.f90 +++ b/src/parallelization.f90 @@ -24,9 +24,18 @@ module parallelization worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only) worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only) -#ifdef PETSC +#ifndef PETSC +public :: parallelization_broadcast_str + +contains +subroutine parallelization_bcast_str(string) + character(len=*), allocatable, intent(inout) :: string +end subroutine parallelization_bcast_str + +#else public :: & - parallelization_init + parallelization_init, & + parallelization_bcast_str contains @@ -101,6 +110,27 @@ subroutine parallelization_init !$ call omp_set_num_threads(OMP_NUM_THREADS) end subroutine parallelization_init + + +!-------------------------------------------------------------------------------------------------- +!> @brief Broadcast a string from process 0. +!-------------------------------------------------------------------------------------------------- +subroutine parallelization_bcast_str(string) + + character(len=:), allocatable, intent(inout) :: string + + integer :: strlen, ierr ! pI64 for strlen not supported by MPI + + + if (worldrank == 0) strlen = len(string,pI64) + call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr) + if (worldrank /= 0) allocate(character(len=strlen)::string) + + call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr) + + +end subroutine parallelization_bcast_str + #endif end module parallelization From 26ae352a4c35c52edf99ea384f8deed2f808f85f Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 09:31:57 +0200 Subject: [PATCH 10/14] make setup data easily accessible to the user --- PRIVATE | 2 +- python/damask/_result.py | 32 ++++++++++++++++++++++++++++++-- src/parallelization.f90 | 4 ++-- src/results.f90 | 2 +- 4 files changed, 34 insertions(+), 6 deletions(-) diff --git a/PRIVATE b/PRIVATE index 72c581038..7d48d8158 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 72c58103860e127d37ccf3a06827331de29406ca +Subproject commit 7d48d8158c1b2c0ab8bdd1c5d2baa3d020ae006d diff --git a/python/damask/_result.py b/python/damask/_result.py index 1fa376f63..0f69c4c5f 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -99,8 +99,10 @@ class Result: self.version_major = f.attrs['DADF5_version_major'] self.version_minor = f.attrs['DADF5_version_minor'] - if self.version_major != 0 or not 12 <= self.version_minor <= 13: + if self.version_major != 0 or not 12 <= self.version_minor <= 14: raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}') + if self.version_major == 0 and self.version_minor < 14: + self.export_setup = None self.structured = 'cells' in f['geometry'].attrs.keys() @@ -1395,7 +1397,7 @@ class Result: def export_XDMF(self,output='*'): """ - Write XDMF file to directly visualize data in DADF5 file. + Write XDMF file to directly visualize data from DADF5 file. The XDMF format is only supported for structured grids with single phase and single constituent. @@ -1748,3 +1750,29 @@ class Result: if flatten: r = util.dict_flatten(r) return None if (type(r) == dict and r == {}) else r + + + def export_setup(self,output='*',overwrite=False): + """ + Export configuration files. + + Parameters + ---------- + output : (list of) str, optional + Names of the datasets to export to the file. + Defaults to '*', in which case all datasets are exported. + overwrite : boolean, optional + Overwrite existing configuration files. + Defaults to False. + + """ + with h5py.File(self.fname,'r') as f_in: + for out in _match(output,f_in['setup'].keys()): + description = f_in['/'.join(('setup',out))].attrs['description'] + if not h5py3: description = description.decode() + if not Path(out).exists() or overwrite: + with open(out,'w') as f_out: + f_out.write(f_in['/'.join(('setup',out))][()].decode()) + print(f"exported {description} to '{out}'") + else: + print(f"'{out}' exists, {description} not exported") diff --git a/src/parallelization.f90 b/src/parallelization.f90 index 31da0f969..a5e89561c 100644 --- a/src/parallelization.f90 +++ b/src/parallelization.f90 @@ -25,11 +25,11 @@ module parallelization worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only) #ifndef PETSC -public :: parallelization_broadcast_str +public :: parallelization_bcast_str contains subroutine parallelization_bcast_str(string) - character(len=*), allocatable, intent(inout) :: string + character(len=:), allocatable, intent(inout) :: string end subroutine parallelization_bcast_str #else diff --git a/src/results.f90 b/src/results.f90 index 8b04800ef..9c4507938 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -74,7 +74,7 @@ subroutine results_init(restart) if(.not. restart) then resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','w') call results_addAttribute('DADF5_version_major',0) - call results_addAttribute('DADF5_version_minor',13) + call results_addAttribute('DADF5_version_minor',14) call get_command_argument(0,commandLine) call results_addAttribute('creator',trim(commandLine)//' '//DAMASKVERSION) call results_addAttribute('created',now()) From 64d52dbbf47632891f08b61a686b18be2de7fc0c Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 09:46:51 +0200 Subject: [PATCH 11/14] store input deck, MSC.Marc now 100% reproducible --- src/Marc/discretization_Marc.f90 | 8 +++++++- src/parallelization.f90 | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/Marc/discretization_Marc.f90 b/src/Marc/discretization_Marc.f90 index d93eea2d5..fd6b8699d 100644 --- a/src/Marc/discretization_Marc.f90 +++ b/src/Marc/discretization_Marc.f90 @@ -216,7 +216,13 @@ subroutine inputRead(elem,node0_elem,connectivity_elem,materialAt) mapElemSet !< list of elements in elementSet - inputFile = IO_readlines(trim(getSolverJobName())//trim(InputFileExtension)) + call results_openJobFile + call results_writeDataset_str(IO_read(trim(getSolverJobName())//InputFileExtension), 'setup', & + trim(getSolverJobName())//InputFileExtension, & + 'MSC.Marc input deck') + call results_closeJobFile + + inputFile = IO_readlines(trim(getSolverJobName())//InputFileExtension) call inputRead_fileFormat(fileFormatVersion, & inputFile) call inputRead_tableStyles(initialcondTableStyle,hypoelasticTableStyle, & diff --git a/src/parallelization.f90 b/src/parallelization.f90 index a5e89561c..0f8b1b8f8 100644 --- a/src/parallelization.f90 +++ b/src/parallelization.f90 @@ -122,7 +122,7 @@ subroutine parallelization_bcast_str(string) integer :: strlen, ierr ! pI64 for strlen not supported by MPI - if (worldrank == 0) strlen = len(string,pI64) + if (worldrank == 0) strlen = string call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr) if (worldrank /= 0) allocate(character(len=strlen)::string) From a6f7e4f1a6cc2759450d2ef7fccd84c482fe072c Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 09:50:23 +0200 Subject: [PATCH 12/14] play it safe --- src/HDF5_utilities.f90 | 31 +++++++++++++++++++------------ src/parallelization.f90 | 2 +- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index bd88feee2..2d2632e13 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1479,27 +1479,34 @@ subroutine HDF5_write_str(dataset,loc_id,datasetName) integer(HID_T), intent(in) :: loc_id character(len=*), intent(in) :: datasetName !< name of the dataset in the file - INTEGER(HID_T) :: filetype_id, space_id, dataset_id - INTEGER :: hdferr - + integer(HID_T) :: filetype_id, space_id, dataset_id + integer :: hdferr character(len=len_trim(dataset)+1,kind=C_CHAR), dimension(1), target :: dataset_ type(C_PTR), target, dimension(1) :: ptr - + dataset_(1) = trim(dataset)//C_NULL_CHAR ptr(1) = c_loc(dataset_(1)) - - call h5tcopy_f(H5T_STRING, filetype_id, hdferr) - call h5tset_size_f(filetype_id, int(len(dataset_),HSIZE_T), hdferr) - - call h5screate_f(H5S_SCALAR_F, space_id, hdferr) - call h5dcreate_f(loc_id, datasetName, H5T_STRING, space_id, dataset_id, hdferr) - call h5dwrite_f(dataset_id, H5T_STRING, c_loc(ptr), hdferr); - + call h5tcopy_f(H5T_STRING, filetype_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' + call h5tset_size_f(filetype_id, int(len(dataset_),HSIZE_T), hdferr) + if(hdferr < 0) error stop 'HDF5 error' + + call h5screate_f(H5S_SCALAR_F, space_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' + call h5dcreate_f(loc_id, datasetName, H5T_STRING, space_id, dataset_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' + + call h5dwrite_f(dataset_id, H5T_STRING, c_loc(ptr), hdferr) + if(hdferr < 0) error stop 'HDF5 error' + call h5dclose_f(dataset_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' call h5sclose_f(space_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' call h5tclose_f(filetype_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' end subroutine HDF5_write_str diff --git a/src/parallelization.f90 b/src/parallelization.f90 index 0f8b1b8f8..8ea22ef0a 100644 --- a/src/parallelization.f90 +++ b/src/parallelization.f90 @@ -122,7 +122,7 @@ subroutine parallelization_bcast_str(string) integer :: strlen, ierr ! pI64 for strlen not supported by MPI - if (worldrank == 0) strlen = string + if (worldrank == 0) strlen = len(string) call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr) if (worldrank /= 0) allocate(character(len=strlen)::string) From 044a0489446f2b18051c528e7cfac304d03ff7d7 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 27 Jul 2021 11:59:46 +0200 Subject: [PATCH 13/14] taking care of corner cases (e.g. restart) adjusting tests to take care of new 'setup' group --- PRIVATE | 2 +- src/HDF5_utilities.f90 | 4 ++++ src/IO.f90 | 8 ++++---- src/config.f90 | 2 +- src/grid/DAMASK_grid.f90 | 6 ++++-- src/grid/discretization_grid.f90 | 6 ++++-- src/math.f90 | 2 +- src/prec.f90 | 1 + src/results.f90 | 19 +++++++++++++++++-- 9 files changed, 37 insertions(+), 13 deletions(-) diff --git a/PRIVATE b/PRIVATE index 7d48d8158..9699f20f2 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 7d48d8158c1b2c0ab8bdd1c5d2baa3d020ae006d +Subproject commit 9699f20f21f8a5f532c735a1aa9daeba395da94d diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 2d2632e13..fa62e4840 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1473,6 +1473,10 @@ subroutine HDF5_write_real7(dataset,loc_id,datasetName,parallel) end subroutine HDF5_write_real7 +!-------------------------------------------------------------------------------------------------- +!> @brief Write dataset of type string (scalar). +!> @details Not collective, must be called by one process at at time. +!-------------------------------------------------------------------------------------------------- subroutine HDF5_write_str(dataset,loc_id,datasetName) character(len=*), intent(in) :: dataset diff --git a/src/IO.f90 b/src/IO.f90 index 961e92d63..a32bd5ef0 100644 --- a/src/IO.f90 +++ b/src/IO.f90 @@ -128,19 +128,19 @@ function IO_read(fileName) result(fileContent) inquire(file = fileName, size=fileLength) open(newunit=fileUnit, file=fileName, access='stream',& status='old', position='rewind', action='read',iostat=myStat) - if(myStat /= 0) call IO_error(100,ext_msg=trim(fileName)) + if (myStat /= 0) call IO_error(100,ext_msg=trim(fileName)) allocate(character(len=fileLength)::fileContent) - if(fileLength==0) then + if (fileLength==0) then close(fileUnit) return endif read(fileUnit,iostat=myStat) fileContent - if(myStat /= 0) call IO_error(102,ext_msg=trim(fileName)) + if (myStat /= 0) call IO_error(102,ext_msg=trim(fileName)) close(fileUnit) if (scan(fileContent(:index(fileContent,LF)),CR//LF) /= 0) fileContent = CRLF2LF(fileContent) - if(fileContent(fileLength:fileLength) /= IO_EOL) fileContent = fileContent//IO_EOL ! ensure EOL@EOF + if (fileContent(fileLength:fileLength) /= IO_EOL) fileContent = fileContent//IO_EOL ! ensure EOL@EOF end function IO_read diff --git a/src/config.f90 b/src/config.f90 index 8f5680158..c460ce7c5 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -56,7 +56,7 @@ subroutine parse_material() print*, 'reading material.yaml'; flush(IO_STDOUT) fileContent = IO_read('material.yaml') call results_openJobFile(parallel=.false.) - call results_writeDataset_str(fileContent,'setup','material.yaml','DAMASK main configuration') + call results_writeDataset_str(fileContent,'setup','material.yaml','main configuration') call results_closeJobFile endif call parallelization_bcast_str(fileContent) diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index 89521a223..b6e45a75b 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -108,7 +108,7 @@ program DAMASK_grid step_mech, & step_discretization character(len=:), allocatable :: & - fileContent + fileContent, fname !-------------------------------------------------------------------------------------------------- ! init DAMASK (all modules) @@ -131,8 +131,10 @@ program DAMASK_grid if (worldrank == 0) then fileContent = IO_read(interface_loadFile) + fname = interface_loadFile + if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:) call results_openJobFile(parallel=.false.) - call results_writeDataset_str(fileContent,'setup',interface_loadFile,'load case definition (grid solver)') + call results_writeDataset_str(fileContent,'setup',fname,'load case definition (grid solver)') call results_closeJobFile endif diff --git a/src/grid/discretization_grid.f90 b/src/grid/discretization_grid.f90 index 8592f7963..1c88e5817 100644 --- a/src/grid/discretization_grid.f90 +++ b/src/grid/discretization_grid.f90 @@ -69,7 +69,7 @@ subroutine discretization_grid_init(restart) integer, dimension(worldsize) :: & displs, sendcounts character(len=:), allocatable :: & - fileContent + fileContent, fname print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT) @@ -78,8 +78,10 @@ subroutine discretization_grid_init(restart) if(worldrank == 0) then fileContent = IO_read(interface_geomFile) call readVTI(grid,geomSize,origin,materialAt_global,fileContent) + fname = interface_geomFile + if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:) call results_openJobFile(parallel=.false.) - call results_writeDataset_str(fileContent,'setup',interface_geomFile,'geometry definition (grid solver)') + call results_writeDataset_str(fileContent,'setup',fname,'geometry definition (grid solver)') call results_closeJobFile else allocate(materialAt_global(0)) ! needed for IntelMPI diff --git a/src/math.f90 b/src/math.f90 index a48f44b93..052ff7fa0 100644 --- a/src/math.f90 +++ b/src/math.f90 @@ -1209,7 +1209,7 @@ subroutine selfTest error stop 'math_sym33to6/math_6toSym33' call random_number(t66) - if(any(dNeq(math_sym3333to66(math_66toSym3333(t66)),t66))) & + if(any(dNeq(math_sym3333to66(math_66toSym3333(t66)),t66,1.0e-15_pReal))) & error stop 'math_sym3333to66/math_66toSym3333' call random_number(v6) diff --git a/src/prec.f90 b/src/prec.f90 index 3090eb67c..4b70ffbfa 100644 --- a/src/prec.f90 +++ b/src/prec.f90 @@ -89,6 +89,7 @@ end subroutine prec_init ! replaces "==" but for certain (relative) tolerance. Counterpart to dNeq ! https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/ ! AlmostEqualRelative +! ToDo: Use 'spacing': https://gcc.gnu.org/onlinedocs/gfortran/SPACING.html#SPACING !-------------------------------------------------------------------------------------------------- logical elemental pure function dEq(a,b,tol) diff --git a/src/results.f90 b/src/results.f90 index 9c4507938..5553a883a 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -65,13 +65,17 @@ subroutine results_init(restart) logical, intent(in) :: restart character(len=pPathLen) :: commandLine + integer :: hdferr + integer(HID_T) :: group_id + character(len=:), allocatable :: date + print'(/,a)', ' <<<+- results init -+>>>'; flush(IO_STDOUT) print*, 'M. Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):83–91, 2017' print*, 'https://doi.org/10.1007/s40192-017-0084-5'//IO_EOL - if(.not. restart) then + if (.not. restart) then resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','w') call results_addAttribute('DADF5_version_major',0) call results_addAttribute('DADF5_version_minor',14) @@ -84,9 +88,20 @@ subroutine results_init(restart) call results_addAttribute('description','mappings to place data in space','cell_to') call results_closeGroup(results_addGroup('setup')) call results_addAttribute('description','input data used to run the simulation','setup') - call results_closeJobFile + else + date = now() + call results_openJobFile + call get_command(commandLine) + call results_addAttribute('call (restart at '//date//')',trim(commandLine)) + call h5gmove_f(resultsFile,'setup','tmp',hdferr) + call results_addAttribute('description','input data used to run the simulation (backup from restart at '//date//')','tmp') + call results_closeGroup(results_addGroup('setup')) + call results_addAttribute('description','input data used to run the simulation','setup') + call h5gmove_f(resultsFile,'tmp','setup/backup',hdferr) endif + call results_closeJobFile + end subroutine results_init From e8312a49ed3a509e8995e808e590c676344d6287 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 2 Aug 2021 10:38:59 +0200 Subject: [PATCH 14/14] polishing --- python/damask/_result.py | 21 ++++++++++++--------- src/config.f90 | 4 ++-- src/results.f90 | 4 ++-- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/python/damask/_result.py b/python/damask/_result.py index 0f69c4c5f..d502969a1 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1766,13 +1766,16 @@ class Result: Defaults to False. """ - with h5py.File(self.fname,'r') as f_in: - for out in _match(output,f_in['setup'].keys()): - description = f_in['/'.join(('setup',out))].attrs['description'] - if not h5py3: description = description.decode() - if not Path(out).exists() or overwrite: - with open(out,'w') as f_out: - f_out.write(f_in['/'.join(('setup',out))][()].decode()) - print(f"exported {description} to '{out}'") + def export(name,obj,output,overwrite): + if type(obj) == h5py.Dataset and _match(output,[name]): + d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode() + if not Path(name).exists() or overwrite: + with open(name,'w') as f_out: f_out.write(obj[()].decode()) + print(f"Exported {d} to '{name}'.") else: - print(f"'{out}' exists, {description} not exported") + print(f"'{name}' exists, {d} not exported.") + elif type(obj) == h5py.Group: + os.makedirs(name, exist_ok=True) + + with h5py.File(self.fname,'r') as f_in: + f_in['setup'].visititems(partial(export,output=output,overwrite=overwrite)) diff --git a/src/config.f90 b/src/config.f90 index c460ce7c5..ecde0831c 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -84,7 +84,7 @@ subroutine parse_numerics() print*, 'reading numerics.yaml'; flush(IO_STDOUT) fileContent = IO_read('numerics.yaml') call results_openJobFile(parallel=.false.) - call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration (optional)') + call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration') call results_closeJobFile endif call parallelization_bcast_str(fileContent) @@ -114,7 +114,7 @@ subroutine parse_debug() print*, 'reading debug.yaml'; flush(IO_STDOUT) fileContent = IO_read('debug.yaml') call results_openJobFile(parallel=.false.) - call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration (optional)') + call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration') call results_closeJobFile endif call parallelization_bcast_str(fileContent) diff --git a/src/results.f90 b/src/results.f90 index 5553a883a..f6bc4a045 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -94,10 +94,10 @@ subroutine results_init(restart) call get_command(commandLine) call results_addAttribute('call (restart at '//date//')',trim(commandLine)) call h5gmove_f(resultsFile,'setup','tmp',hdferr) - call results_addAttribute('description','input data used to run the simulation (backup from restart at '//date//')','tmp') + call results_addAttribute('description','input data used to run the simulation up to restart at '//date,'tmp') call results_closeGroup(results_addGroup('setup')) call results_addAttribute('description','input data used to run the simulation','setup') - call h5gmove_f(resultsFile,'tmp','setup/backup',hdferr) + call h5gmove_f(resultsFile,'tmp','setup/previous',hdferr) endif call results_closeJobFile