From 809365cd726348ca5bf915a778abd6bd13a440aa Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 14 Aug 2021 13:01:37 +0200 Subject: [PATCH 1/8] reference for 1.4/1 --- examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml | 3 +++ examples/config/phase/mechanical/plastic/phenopowerlaw_Au.yaml | 3 +++ examples/config/phase/mechanical/plastic/phenopowerlaw_Cu.yaml | 3 +++ .../mechanical/plastic/phenopowerlaw_DP-steel-ferrite.yaml | 3 +++ 4 files changed, 12 insertions(+) diff --git a/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml b/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml index 8a0692c9b..5024f99e7 100644 --- a/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml +++ b/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml @@ -4,6 +4,9 @@ references: Acta Metallurgica 8(3):187-199, 1960, https://doi.org/10.1016/0001-6160(60)90127-9, fitted from Fig. 5 + - U.F. Kocks, + Metallurgical and Materials Transactions B 1:1121–1143, 1970, + https://doi.org/10.1007/BF02900224 output: [xi_sl, gamma_sl] N_sl: [12] n_sl: 20 diff --git a/examples/config/phase/mechanical/plastic/phenopowerlaw_Au.yaml b/examples/config/phase/mechanical/plastic/phenopowerlaw_Au.yaml index b5a034c60..ac9c93ccf 100644 --- a/examples/config/phase/mechanical/plastic/phenopowerlaw_Au.yaml +++ b/examples/config/phase/mechanical/plastic/phenopowerlaw_Au.yaml @@ -6,6 +6,9 @@ references: - I. Kovács and G.Vörös, International Journal of Plasticity 12:35-43, 1996, https://doi.org/10.1016/S0749-6419(95)00043-7 + - U.F. Kocks, + Metallurgical and Materials Transactions B 1:1121–1143, 1970, + https://doi.org/10.1007/BF02900224 output: [xi_sl, gamma_sl] N_sl: [12] n_sl: 83.3 diff --git a/examples/config/phase/mechanical/plastic/phenopowerlaw_Cu.yaml b/examples/config/phase/mechanical/plastic/phenopowerlaw_Cu.yaml index 23fd38acd..4047703cb 100644 --- a/examples/config/phase/mechanical/plastic/phenopowerlaw_Cu.yaml +++ b/examples/config/phase/mechanical/plastic/phenopowerlaw_Cu.yaml @@ -4,6 +4,9 @@ references: Transactions of the Japan Institute of Metals 16(10):629-640, 1975, https://doi.org/10.2320/matertrans1960.16.629, fitted from Fig. 3b + - U.F. Kocks, + Metallurgical and Materials Transactions B 1:1121–1143, 1970, + https://doi.org/10.1007/BF02900224 output: [xi_sl, gamma_sl] N_sl: [12] n_sl: 20 diff --git a/examples/config/phase/mechanical/plastic/phenopowerlaw_DP-steel-ferrite.yaml b/examples/config/phase/mechanical/plastic/phenopowerlaw_DP-steel-ferrite.yaml index 40812c89d..5218e42a8 100644 --- a/examples/config/phase/mechanical/plastic/phenopowerlaw_DP-steel-ferrite.yaml +++ b/examples/config/phase/mechanical/plastic/phenopowerlaw_DP-steel-ferrite.yaml @@ -3,6 +3,9 @@ references: - C.C. Tasan et al., Acta Materialia 81:386-400, 2014, https://doi.org/10.1016/j.actamat.2014.07.071 + - U.F. Kocks, + Metallurgical and Materials Transactions B 1:1121–1143, 1970, + https://doi.org/10.1007/BF02900224 output: [xi_sl, gamma_sl] N_sl: [12, 12] n_sl: 20 From 0107acff16bf9c2adb9894cd87a1c7b53823e400 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 14 Aug 2021 13:12:21 +0200 Subject: [PATCH 2/8] original data was given in min^-1 --- examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml b/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml index 5024f99e7..cc4b56eb6 100644 --- a/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml +++ b/examples/config/phase/mechanical/plastic/phenopowerlaw_Al.yaml @@ -15,4 +15,4 @@ h_0_sl-sl: 1.7e+8 xi_0_sl: [5.0e+6] xi_inf_sl: [37.5e+6] h_sl-sl: [1, 1, 1.4, 1.4, 1.4, 1.4, 1.4] -dot_gamma_0_sl: 4.5e-3 +dot_gamma_0_sl: 7.5e-5 From 595cc4e59e3938fa28e5627069c43fb88f6e586b Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 14 Aug 2021 15:07:01 +0200 Subject: [PATCH 3/8] always chunk data for small datasets: - only one chunk - no compression --- python/damask/_result.py | 14 +++++----- src/HDF5_utilities.f90 | 58 +++++++++++++++++++++++----------------- 2 files changed, 42 insertions(+), 30 deletions(-) diff --git a/python/damask/_result.py b/python/damask/_result.py index a65ede773..5612df855 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1355,15 +1355,17 @@ class Result: dataset[...] = result['data'] dataset.attrs['overwritten'] = True else: + shape = result['data'].shape if result['data'].size >= chunk_size*2: - shape = result['data'].shape chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:] - dataset = f[group].create_dataset(result['label'],data=result['data'], - maxshape=shape, chunks=chunks, - compression='gzip', compression_opts=6, - shuffle=True,fletcher32=True) + compression = ('gzip',6) else: - dataset = f[group].create_dataset(result['label'],data=result['data']) + chunks = shape + compression = (None,None) + dataset = f[group].create_dataset(result['label'],data=result['data'], + maxshape=shape, chunks=chunks, + compression=compression[0], compression_opts=compression[1], + shuffle=True,fletcher32=True) now = datetime.datetime.now().astimezone() dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \ diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index d3af4f7b5..aa83b146b 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -77,10 +77,12 @@ module HDF5_utilities end interface HDF5_addAttribute #ifdef PETSC - logical, parameter, private :: parallel_default = .true. + logical, parameter :: parallel_default = .true. #else - logical, parameter, private :: parallel_default = .false. + logical, parameter :: parallel_default = .false. #endif + logical :: compression_possible + public :: & HDF5_utilities_init, & HDF5_read, & @@ -103,26 +105,33 @@ contains !-------------------------------------------------------------------------------------------------- subroutine HDF5_utilities_init - integer :: hdferr + integer :: hdferr, HDF5_major, HDF5_minor, HDF5_release, deflate_info integer(SIZE_T) :: typeSize + print'(/,a)', ' <<<+- HDF5_Utilities init -+>>>' -!-------------------------------------------------------------------------------------------------- -!initialize HDF5 library and check if integer and float type size match + call h5open_f(hdferr) - if(hdferr < 0) error stop 'HDF5 error' + if (hdferr < 0) error stop 'HDF5 error' call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr) - if(hdferr < 0) error stop 'HDF5 error' + if (hdferr < 0) error stop 'HDF5 error' if (int(bit_size(0),SIZE_T)/=typeSize*8) & error stop 'Default integer size does not match H5T_NATIVE_INTEGER' call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr) - if(hdferr < 0) error stop 'HDF5 error' + if (hdferr < 0) error stop 'HDF5 error' if (int(storage_size(0.0_pReal),SIZE_T)/=typeSize*8) & error stop 'pReal does not match H5T_NATIVE_DOUBLE' + call H5get_libversion_f(HDF5_major,HDF5_minor,HDF5_release,hdferr) + if (hdferr < 0) error stop 'HDF5 error' + call H5Zget_filter_info_f(H5Z_FILTER_DEFLATE_F,deflate_info,hdferr) + if (hdferr < 0) error stop 'HDF5 error' + compression_possible = (HDF5_major == 1 .and. HDF5_minor >= 12) .and. & ! https://forum.hdfgroup.org/t/6186 + ior(H5Z_FILTER_ENCODE_ENABLED_F,deflate_info) > 0 + end subroutine HDF5_utilities_init @@ -1907,11 +1916,12 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & totalShape !< shape of the dataset (all processes) integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id - integer, dimension(worldsize) :: writeSize !< contribution of all processes + integer, dimension(worldsize) :: writeSize !< contribution of all processes integer(HID_T) :: dcpl - integer :: ierr, hdferr, HDF5_major, HDF5_minor, HDF5_release + integer :: ierr, hdferr integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T + !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties (is collective when writing in parallel) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) @@ -1938,23 +1948,23 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & totalShape = [myShape(1:ubound(myShape,1)-1),int(sum(writeSize),HSIZE_T)] !-------------------------------------------------------------------------------------------------- -! compress (and chunk) larger datasets +! chunk dataset, enable compression for larger datasets call h5pcreate_f(H5P_DATASET_CREATE_F, dcpl, hdferr) - if(hdferr < 0) error stop 'HDF5 error' - if(product(totalShape) >= chunkSize*2_HSIZE_T) then - call H5get_libversion_f(HDF5_major,HDF5_minor,HDF5_release,hdferr) + if (hdferr < 0) error stop 'HDF5 error' + + call h5pset_shuffle_f(dcpl, hdferr) + if (hdferr < 0) error stop 'HDF5 error' + call h5pset_Fletcher32_f(dcpl,hdferr) + if (hdferr < 0) error stop 'HDF5 error' + + if (product(totalShape) >= chunkSize*2_HSIZE_T) then + call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr) if (hdferr < 0) error stop 'HDF5 error' - if (HDF5_major == 1 .and. HDF5_minor >= 12) then ! https://forum.hdfgroup.org/t/6186 - call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr) - if (hdferr < 0) error stop 'HDF5 error' - call h5pset_shuffle_f(dcpl, hdferr) - if (hdferr < 0) error stop 'HDF5 error' - call h5pset_deflate_f(dcpl, 6, hdferr) - if (hdferr < 0) error stop 'HDF5 error' - call h5pset_Fletcher32_f(dcpl,hdferr) - if (hdferr < 0) error stop 'HDF5 error' - endif + if (compression_possible) call h5pset_deflate_f(dcpl, 6, hdferr) + else + call h5pset_chunk_f(dcpl, size(totalShape), totalShape, hdferr) endif + if (hdferr < 0) error stop 'HDF5 error' !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) and in file (global shape) From 30d9f57db08c363857beaf812c01233d6be976d8 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 14 Aug 2021 16:28:51 +0200 Subject: [PATCH 4/8] also compress string datasets results in different type (numpy string instead of numpy object), tests need to be adjusted --- PRIVATE | 2 +- python/damask/_result.py | 2 +- src/HDF5_utilities.f90 | 40 +++++++++++++++++++++++++++++----------- 3 files changed, 31 insertions(+), 13 deletions(-) diff --git a/PRIVATE b/PRIVATE index 4ce625b4a..bea0b1c2e 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit 4ce625b4ac0da9d490620f8cf1694d0a057cfa47 +Subproject commit bea0b1c2e3ead08d0d2f1e5b4561bfe6251052a4 diff --git a/python/damask/_result.py b/python/damask/_result.py index 5612df855..6138e72fc 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1761,7 +1761,7 @@ class Result: if type(obj) == h5py.Dataset and _match(output,[name]): d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode() if not Path(name).exists() or overwrite: - with open(name,'w') as f_out: f_out.write(obj[()].decode()) + with open(name,'w') as f_out: f_out.write(obj[0].decode()) print(f"Exported {d} to '{name}'.") else: print(f"'{name}' exists, {d} not exported.") diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index aa83b146b..30b877216 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -136,7 +136,7 @@ end subroutine HDF5_utilities_init !-------------------------------------------------------------------------------------------------- -!> @brief open and initializes HDF5 output file +!> @brief Open and initialize HDF5 file. !-------------------------------------------------------------------------------------------------- integer(HID_T) function HDF5_openFile(fileName,mode,parallel) @@ -1485,32 +1485,50 @@ subroutine HDF5_write_str(dataset,loc_id,datasetName) integer(HID_T), intent(in) :: loc_id character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(HID_T) :: filetype_id, space_id, dataset_id + integer(HID_T) :: filetype_id, memtype_id, space_id, dataset_id, dcpl integer :: hdferr - character(len=len_trim(dataset)+1,kind=C_CHAR), dimension(1), target :: dataset_ - type(C_PTR), target, dimension(1) :: ptr + character(len=len_trim(dataset),kind=C_CHAR), target :: dataset_ - dataset_(1) = trim(dataset)//C_NULL_CHAR - ptr(1) = c_loc(dataset_(1)) + dataset_ = trim(dataset) - call h5tcopy_f(H5T_STRING, filetype_id, hdferr) + call h5tcopy_f(H5T_C_S1, filetype_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' - call h5tset_size_f(filetype_id, int(len(dataset_),HSIZE_T), hdferr) + call h5tset_size_f(filetype_id, int(len(dataset_)+1,HSIZE_T), hdferr) ! +1 for NULL if(hdferr < 0) error stop 'HDF5 error' - call h5screate_f(H5S_SCALAR_F, space_id, hdferr) + call H5Tcopy_f(H5T_FORTRAN_S1, memtype_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' - call h5dcreate_f(loc_id, datasetName, H5T_STRING, space_id, dataset_id, hdferr) + call H5Tset_size_f(memtype_id, int(len(dataset_),HSIZE_T), hdferr) if(hdferr < 0) error stop 'HDF5 error' - call h5dwrite_f(dataset_id, H5T_STRING, c_loc(ptr), hdferr) + call h5pcreate_f(H5P_DATASET_CREATE_F, dcpl, hdferr) + if (hdferr < 0) error stop 'HDF5 error' + call h5pset_chunk_f(dcpl, 1, [1_HSIZE_T], hdferr) + if (hdferr < 0) error stop 'HDF5 error' + call h5pset_shuffle_f(dcpl, hdferr) + if (hdferr < 0) error stop 'HDF5 error' + call h5pset_Fletcher32_f(dcpl,hdferr) + if (hdferr < 0) error stop 'HDF5 error' + if (compression_possible .and. len(dataset) > 1024*256) call h5pset_deflate_f(dcpl, 6, hdferr) + if (hdferr < 0) error stop 'HDF5 error' + + call h5screate_simple_f(1, [1_HSIZE_T], space_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' + CALL h5dcreate_f(loc_id, datasetName, filetype_id, space_id, dataset_id, hdferr, dcpl) if(hdferr < 0) error stop 'HDF5 error' + call h5dwrite_f(dataset_id, memtype_id, c_loc(dataset_(1:1)), hdferr) + if(hdferr < 0) error stop 'HDF5 error' + + call h5pclose_f(dcpl, hdferr) + if(hdferr < 0) error stop 'HDF5 error' call h5dclose_f(dataset_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' call h5sclose_f(space_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' + call h5tclose_f(memtype_id, hdferr) + if(hdferr < 0) error stop 'HDF5 error' call h5tclose_f(filetype_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' From 1963343cd8e0388f5af0f75b61c8f1d185f3e72e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 14 Aug 2021 20:01:25 +0200 Subject: [PATCH 5/8] avoid issues with empty files optional files (numerics.yaml, debug.yaml) can be empty --- PRIVATE | 2 +- src/config.f90 | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/PRIVATE b/PRIVATE index bea0b1c2e..ad4a685d4 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit bea0b1c2e3ead08d0d2f1e5b4561bfe6251052a4 +Subproject commit ad4a685d454271a400914c0334c017e3ac0dfc99 diff --git a/src/config.f90 b/src/config.f90 index ecde0831c..0dfb8c878 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -83,9 +83,11 @@ subroutine parse_numerics() if (worldrank == 0) then print*, 'reading numerics.yaml'; flush(IO_STDOUT) fileContent = IO_read('numerics.yaml') - call results_openJobFile(parallel=.false.) - call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration') - call results_closeJobFile + if (len(fileContent) > 0) then + call results_openJobFile(parallel=.false.) + call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration') + call results_closeJobFile + endif endif call parallelization_bcast_str(fileContent) @@ -113,9 +115,11 @@ subroutine parse_debug() if (worldrank == 0) then print*, 'reading debug.yaml'; flush(IO_STDOUT) fileContent = IO_read('debug.yaml') - call results_openJobFile(parallel=.false.) - call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration') - call results_closeJobFile + if (len(fileContent) > 0) then + call results_openJobFile(parallel=.false.) + call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration') + call results_closeJobFile + endif endif call parallelization_bcast_str(fileContent) From 497bf2c6455c6370041f69c22dbb973fe29d2e24 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 14 Aug 2021 20:44:59 +0200 Subject: [PATCH 6/8] avoid errors for empty datasets empty dataset cannot be chunked, but filters can only be applied to chunked datasets --- src/HDF5_utilities.f90 | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 30b877216..540ce6781 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1970,20 +1970,22 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & call h5pcreate_f(H5P_DATASET_CREATE_F, dcpl, hdferr) if (hdferr < 0) error stop 'HDF5 error' - call h5pset_shuffle_f(dcpl, hdferr) - if (hdferr < 0) error stop 'HDF5 error' - call h5pset_Fletcher32_f(dcpl,hdferr) - if (hdferr < 0) error stop 'HDF5 error' - - if (product(totalShape) >= chunkSize*2_HSIZE_T) then - call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr) + if (product(totalShape) > 0) then + call h5pset_shuffle_f(dcpl, hdferr) + if (hdferr < 0) error stop 'HDF5 error' + call h5pset_Fletcher32_f(dcpl,hdferr) if (hdferr < 0) error stop 'HDF5 error' - if (compression_possible) call h5pset_deflate_f(dcpl, 6, hdferr) - else - call h5pset_chunk_f(dcpl, size(totalShape), totalShape, hdferr) - endif - if (hdferr < 0) error stop 'HDF5 error' + if (product(totalShape) >= chunkSize*2_HSIZE_T) then + call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr) + if (hdferr < 0) error stop 'HDF5 error' + if (compression_possible) call h5pset_deflate_f(dcpl, 6, hdferr) + else + call h5pset_chunk_f(dcpl, size(totalShape), totalShape, hdferr) + endif + if (hdferr < 0) error stop 'HDF5 error' + endif + !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) and in file (global shape) call h5screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape) From 4160c4fdb45e0be4c131a090b0ca8003cc9d981b Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 15 Aug 2021 08:04:27 +0200 Subject: [PATCH 7/8] fix for parallel HDF5 if filters are applied, writing from one process does not work if the file is opened for parallel write --- PRIVATE | 2 +- cmake/Compiler-GNU.cmake | 4 ++-- src/DAMASK_interface.f90 | 4 ++-- src/grid/grid_mech_FEM.f90 | 22 ++++++++++-------- src/grid/grid_mech_spectral_basic.f90 | 24 ++++++++++++-------- src/grid/grid_mech_spectral_polarisation.f90 | 22 ++++++++++-------- 6 files changed, 45 insertions(+), 33 deletions(-) diff --git a/PRIVATE b/PRIVATE index ad4a685d4..7d783328f 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit ad4a685d454271a400914c0334c017e3ac0dfc99 +Subproject commit 7d783328ff5deac313eb7de951d51d19b5883b84 diff --git a/cmake/Compiler-GNU.cmake b/cmake/Compiler-GNU.cmake index b9a7406dc..d0e7d81a2 100644 --- a/cmake/Compiler-GNU.cmake +++ b/cmake/Compiler-GNU.cmake @@ -12,9 +12,9 @@ endif () if (OPTIMIZATION STREQUAL "OFF") set (OPTIMIZATION_FLAGS "-O0") elseif (OPTIMIZATION STREQUAL "DEFENSIVE") - set (OPTIMIZATION_FLAGS "-O2") + set (OPTIMIZATION_FLAGS "-O2 -mtune=generic") elseif (OPTIMIZATION STREQUAL "AGGRESSIVE") - set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize") + set (OPTIMIZATION_FLAGS "-O3 -march=native -ffast-math -funroll-loops -ftree-vectorize") endif () set (STANDARD_CHECK "-std=f2018 -pedantic-errors" ) diff --git a/src/DAMASK_interface.f90 b/src/DAMASK_interface.f90 index ca3179afc..7bfe93f9a 100644 --- a/src/DAMASK_interface.f90 +++ b/src/DAMASK_interface.f90 @@ -189,10 +189,10 @@ subroutine DAMASK_interface_init if (len_trim(workingDirArg) > 0) & print'(a)', ' Working dir argument: '//trim(workingDirArg) print'(a)', ' Geometry argument: '//trim(geometryArg) - print'(a)', ' Loadcase argument: '//trim(loadcaseArg) + print'(a)', ' Load case argument: '//trim(loadcaseArg) print'(/,a)', ' Working directory: '//getCWD() print'(a)', ' Geometry file: '//interface_geomFile - print'(a)', ' Loadcase file: '//interface_loadFile + print'(a)', ' Load case file: '//interface_loadFile print'(a)', ' Solver job name: '//getSolverJobName() if (interface_restartInc > 0) & print'(a,i6.6)', ' Restart from increment: ', interface_restartInc diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index 9cccd2fc0..ca616dffb 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -446,22 +446,26 @@ subroutine grid_mechanical_FEM_restartWrite fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w') groupHandle = HDF5_addGroup(fileHandle,'solver') - - call HDF5_write(P_aim,groupHandle,'P_aim',.false.) - call HDF5_write(F_aim,groupHandle,'F_aim',.false.) - call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) - call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_write(F,groupHandle,'F') call HDF5_write(F_lastInc,groupHandle,'F_lastInc') call HDF5_write(u_current,groupHandle,'u') call HDF5_write(u_lastInc,groupHandle,'u_lastInc') - - call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.) - call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) - call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle) + if (worldrank == 0) then + fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a',.false.) + groupHandle = HDF5_openGroup(fileHandle,'solver') + call HDF5_write(P_aim,groupHandle,'P_aim',.false.) + call HDF5_write(F_aim,groupHandle,'F_aim',.false.) + call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) + call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.) + call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.) + call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) + call HDF5_closeGroup(groupHandle) + call HDF5_closeFile(fileHandle) + endif + call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) CHKERRQ(ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 6f382d639..b033a7b29 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -389,21 +389,25 @@ subroutine grid_mechanical_spectral_basic_restartWrite fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w') groupHandle = HDF5_addGroup(fileHandle,'solver') - - call HDF5_write(P_aim,groupHandle,'P_aim',.false.) - call HDF5_write(F_aim,groupHandle,'F_aim',.false.) - call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) - call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_write(F,groupHandle,'F') call HDF5_write(F_lastInc,groupHandle,'F_lastInc') - - call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.) - call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) - call HDF5_write(C_minMaxAvg,groupHandle,'C_minMaxAvg',.false.) - call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle) + if (worldrank == 0) then + fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a',.false.) + groupHandle = HDF5_openGroup(fileHandle,'solver') + call HDF5_write(P_aim,groupHandle,'P_aim',.false.) + call HDF5_write(F_aim,groupHandle,'F_aim',.false.) + call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) + call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.) + call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.) + call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) + call HDF5_write(C_minMaxAvg,groupHandle,'C_minMaxAvg',.false.) + call HDF5_closeGroup(groupHandle) + call HDF5_closeFile(fileHandle) + endif + if (num%update_gamma) call utilities_saveReferenceStiffness call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index 7a4ae7595..8258ad43d 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -445,22 +445,26 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w') groupHandle = HDF5_addGroup(fileHandle,'solver') - - call HDF5_write(F_aim,groupHandle,'P_aim',.false.) - call HDF5_write(F_aim,groupHandle,'F_aim',.false.) - call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) - call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_write(F,groupHandle,'F') call HDF5_write(F_lastInc,groupHandle,'F_lastInc') call HDF5_write(F_tau,groupHandle,'F_tau') call HDF5_write(F_tau_lastInc,groupHandle,'F_tau_lastInc') - - call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.) - call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) - call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle) + if (worldrank == 0) then + fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a',.false.) + groupHandle = HDF5_openGroup(fileHandle,'solver') + call HDF5_write(F_aim,groupHandle,'P_aim',.false.) + call HDF5_write(F_aim,groupHandle,'F_aim',.false.) + call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) + call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.) + call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.) + call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) + call HDF5_closeGroup(groupHandle) + call HDF5_closeFile(fileHandle) + endif + if(num%update_gamma) call utilities_saveReferenceStiffness call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) From 5b63dc1e5e72a4a624c5997693ceaafcde298961 Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 16 Aug 2021 19:23:46 +0200 Subject: [PATCH 8/8] [skip ci] updated version information after successful test of v3.0.0-alpha4-308-gb79fc5c09 --- python/damask/VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/damask/VERSION b/python/damask/VERSION index e35e43420..5ea374cfd 100644 --- a/python/damask/VERSION +++ b/python/damask/VERSION @@ -1 +1 @@ -v3.0.0-alpha4-298-g01d545861 +v3.0.0-alpha4-308-gb79fc5c09