Merge remote-tracking branch 'origin/development' into YAML-bug-fix

This commit is contained in:
Sharan Roongta 2021-08-17 18:57:14 +02:00
commit 93d23fa6bb
13 changed files with 140 additions and 80 deletions

View File

@ -12,9 +12,9 @@ endif ()
if (OPTIMIZATION STREQUAL "OFF") if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0") set (OPTIMIZATION_FLAGS "-O0")
elseif (OPTIMIZATION STREQUAL "DEFENSIVE") elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2") set (OPTIMIZATION_FLAGS "-O2 -mtune=generic")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE") elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize") set (OPTIMIZATION_FLAGS "-O3 -march=native -ffast-math -funroll-loops -ftree-vectorize")
endif () endif ()
set (STANDARD_CHECK "-std=f2018 -pedantic-errors" ) set (STANDARD_CHECK "-std=f2018 -pedantic-errors" )

View File

@ -4,6 +4,9 @@ references:
Acta Metallurgica 8(3):187-199, 1960, Acta Metallurgica 8(3):187-199, 1960,
https://doi.org/10.1016/0001-6160(60)90127-9, https://doi.org/10.1016/0001-6160(60)90127-9,
fitted from Fig. 5 fitted from Fig. 5
- U.F. Kocks,
Metallurgical and Materials Transactions B 1:11211143, 1970,
https://doi.org/10.1007/BF02900224
output: [xi_sl, gamma_sl] output: [xi_sl, gamma_sl]
N_sl: [12] N_sl: [12]
n_sl: 20 n_sl: 20
@ -12,4 +15,4 @@ h_0_sl-sl: 1.7e+8
xi_0_sl: [5.0e+6] xi_0_sl: [5.0e+6]
xi_inf_sl: [37.5e+6] xi_inf_sl: [37.5e+6]
h_sl-sl: [1, 1, 1.4, 1.4, 1.4, 1.4, 1.4] h_sl-sl: [1, 1, 1.4, 1.4, 1.4, 1.4, 1.4]
dot_gamma_0_sl: 4.5e-3 dot_gamma_0_sl: 7.5e-5

View File

@ -6,6 +6,9 @@ references:
- I. Kovács and G.Vörös, - I. Kovács and G.Vörös,
International Journal of Plasticity 12:35-43, 1996, International Journal of Plasticity 12:35-43, 1996,
https://doi.org/10.1016/S0749-6419(95)00043-7 https://doi.org/10.1016/S0749-6419(95)00043-7
- U.F. Kocks,
Metallurgical and Materials Transactions B 1:11211143, 1970,
https://doi.org/10.1007/BF02900224
output: [xi_sl, gamma_sl] output: [xi_sl, gamma_sl]
N_sl: [12] N_sl: [12]
n_sl: 83.3 n_sl: 83.3

View File

@ -4,6 +4,9 @@ references:
Transactions of the Japan Institute of Metals 16(10):629-640, 1975, Transactions of the Japan Institute of Metals 16(10):629-640, 1975,
https://doi.org/10.2320/matertrans1960.16.629, https://doi.org/10.2320/matertrans1960.16.629,
fitted from Fig. 3b fitted from Fig. 3b
- U.F. Kocks,
Metallurgical and Materials Transactions B 1:11211143, 1970,
https://doi.org/10.1007/BF02900224
output: [xi_sl, gamma_sl] output: [xi_sl, gamma_sl]
N_sl: [12] N_sl: [12]
n_sl: 20 n_sl: 20

View File

@ -3,6 +3,9 @@ references:
- C.C. Tasan et al., - C.C. Tasan et al.,
Acta Materialia 81:386-400, 2014, Acta Materialia 81:386-400, 2014,
https://doi.org/10.1016/j.actamat.2014.07.071 https://doi.org/10.1016/j.actamat.2014.07.071
- U.F. Kocks,
Metallurgical and Materials Transactions B 1:11211143, 1970,
https://doi.org/10.1007/BF02900224
output: [xi_sl, gamma_sl] output: [xi_sl, gamma_sl]
N_sl: [12, 12] N_sl: [12, 12]
n_sl: 20 n_sl: 20

View File

@ -1 +1 @@
v3.0.0-alpha4-298-g01d545861 v3.0.0-alpha4-308-gb79fc5c09

View File

@ -1355,15 +1355,17 @@ class Result:
dataset[...] = result['data'] dataset[...] = result['data']
dataset.attrs['overwritten'] = True dataset.attrs['overwritten'] = True
else: else:
shape = result['data'].shape
if result['data'].size >= chunk_size*2: if result['data'].size >= chunk_size*2:
shape = result['data'].shape
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:] chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
dataset = f[group].create_dataset(result['label'],data=result['data'], compression = ('gzip',6)
maxshape=shape, chunks=chunks,
compression='gzip', compression_opts=6,
shuffle=True,fletcher32=True)
else: else:
dataset = f[group].create_dataset(result['label'],data=result['data']) chunks = shape
compression = (None,None)
dataset = f[group].create_dataset(result['label'],data=result['data'],
maxshape=shape, chunks=chunks,
compression=compression[0], compression_opts=compression[1],
shuffle=True,fletcher32=True)
now = datetime.datetime.now().astimezone() now = datetime.datetime.now().astimezone()
dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \ dataset.attrs['created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
@ -1759,7 +1761,7 @@ class Result:
if type(obj) == h5py.Dataset and _match(output,[name]): if type(obj) == h5py.Dataset and _match(output,[name]):
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode() d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
if not Path(name).exists() or overwrite: if not Path(name).exists() or overwrite:
with open(name,'w') as f_out: f_out.write(obj[()].decode()) with open(name,'w') as f_out: f_out.write(obj[0].decode())
print(f"Exported {d} to '{name}'.") print(f"Exported {d} to '{name}'.")
else: else:
print(f"'{name}' exists, {d} not exported.") print(f"'{name}' exists, {d} not exported.")

View File

@ -189,10 +189,10 @@ subroutine DAMASK_interface_init
if (len_trim(workingDirArg) > 0) & if (len_trim(workingDirArg) > 0) &
print'(a)', ' Working dir argument: '//trim(workingDirArg) print'(a)', ' Working dir argument: '//trim(workingDirArg)
print'(a)', ' Geometry argument: '//trim(geometryArg) print'(a)', ' Geometry argument: '//trim(geometryArg)
print'(a)', ' Loadcase argument: '//trim(loadcaseArg) print'(a)', ' Load case argument: '//trim(loadcaseArg)
print'(/,a)', ' Working directory: '//getCWD() print'(/,a)', ' Working directory: '//getCWD()
print'(a)', ' Geometry file: '//interface_geomFile print'(a)', ' Geometry file: '//interface_geomFile
print'(a)', ' Loadcase file: '//interface_loadFile print'(a)', ' Load case file: '//interface_loadFile
print'(a)', ' Solver job name: '//getSolverJobName() print'(a)', ' Solver job name: '//getSolverJobName()
if (interface_restartInc > 0) & if (interface_restartInc > 0) &
print'(a,i6.6)', ' Restart from increment: ', interface_restartInc print'(a,i6.6)', ' Restart from increment: ', interface_restartInc

View File

@ -77,10 +77,12 @@ module HDF5_utilities
end interface HDF5_addAttribute end interface HDF5_addAttribute
#ifdef PETSC #ifdef PETSC
logical, parameter, private :: parallel_default = .true. logical, parameter :: parallel_default = .true.
#else #else
logical, parameter, private :: parallel_default = .false. logical, parameter :: parallel_default = .false.
#endif #endif
logical :: compression_possible
public :: & public :: &
HDF5_utilities_init, & HDF5_utilities_init, &
HDF5_read, & HDF5_read, &
@ -103,31 +105,38 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine HDF5_utilities_init subroutine HDF5_utilities_init
integer :: hdferr integer :: hdferr, HDF5_major, HDF5_minor, HDF5_release, deflate_info
integer(SIZE_T) :: typeSize integer(SIZE_T) :: typeSize
print'(/,a)', ' <<<+- HDF5_Utilities init -+>>>' print'(/,a)', ' <<<+- HDF5_Utilities init -+>>>'
!--------------------------------------------------------------------------------------------------
!initialize HDF5 library and check if integer and float type size match
call h5open_f(hdferr) call h5open_f(hdferr)
if(hdferr < 0) error stop 'HDF5 error' if (hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr) call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if (hdferr < 0) error stop 'HDF5 error'
if (int(bit_size(0),SIZE_T)/=typeSize*8) & if (int(bit_size(0),SIZE_T)/=typeSize*8) &
error stop 'Default integer size does not match H5T_NATIVE_INTEGER' error stop 'Default integer size does not match H5T_NATIVE_INTEGER'
call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr) call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if (hdferr < 0) error stop 'HDF5 error'
if (int(storage_size(0.0_pReal),SIZE_T)/=typeSize*8) & if (int(storage_size(0.0_pReal),SIZE_T)/=typeSize*8) &
error stop 'pReal does not match H5T_NATIVE_DOUBLE' error stop 'pReal does not match H5T_NATIVE_DOUBLE'
call H5get_libversion_f(HDF5_major,HDF5_minor,HDF5_release,hdferr)
if (hdferr < 0) error stop 'HDF5 error'
call H5Zget_filter_info_f(H5Z_FILTER_DEFLATE_F,deflate_info,hdferr)
if (hdferr < 0) error stop 'HDF5 error'
compression_possible = (HDF5_major == 1 .and. HDF5_minor >= 12) .and. & ! https://forum.hdfgroup.org/t/6186
ior(H5Z_FILTER_ENCODE_ENABLED_F,deflate_info) > 0
end subroutine HDF5_utilities_init end subroutine HDF5_utilities_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief open and initializes HDF5 output file !> @brief Open and initialize HDF5 file.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
integer(HID_T) function HDF5_openFile(fileName,mode,parallel) integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
@ -1476,32 +1485,50 @@ subroutine HDF5_write_str(dataset,loc_id,datasetName)
integer(HID_T), intent(in) :: loc_id integer(HID_T), intent(in) :: loc_id
character(len=*), intent(in) :: datasetName !< name of the dataset in the file character(len=*), intent(in) :: datasetName !< name of the dataset in the file
integer(HID_T) :: filetype_id, space_id, dataset_id integer(HID_T) :: filetype_id, memtype_id, space_id, dataset_id, dcpl
integer :: hdferr integer :: hdferr
character(len=len_trim(dataset)+1,kind=C_CHAR), dimension(1), target :: dataset_ character(len=len_trim(dataset),kind=C_CHAR), target :: dataset_
type(C_PTR), target, dimension(1) :: ptr
dataset_(1) = trim(dataset)//C_NULL_CHAR dataset_ = trim(dataset)
ptr(1) = c_loc(dataset_(1))
call h5tcopy_f(H5T_STRING, filetype_id, hdferr) call h5tcopy_f(H5T_C_S1, filetype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tset_size_f(filetype_id, int(len(dataset_),HSIZE_T), hdferr) call h5tset_size_f(filetype_id, int(len(dataset_)+1,HSIZE_T), hdferr) ! +1 for NULL
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5screate_f(H5S_SCALAR_F, space_id, hdferr) call H5Tcopy_f(H5T_FORTRAN_S1, memtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5dcreate_f(loc_id, datasetName, H5T_STRING, space_id, dataset_id, hdferr) call H5Tset_size_f(memtype_id, int(len(dataset_),HSIZE_T), hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5dwrite_f(dataset_id, H5T_STRING, c_loc(ptr), hdferr) call h5pcreate_f(H5P_DATASET_CREATE_F, dcpl, hdferr)
if (hdferr < 0) error stop 'HDF5 error'
call h5pset_chunk_f(dcpl, 1, [1_HSIZE_T], hdferr)
if (hdferr < 0) error stop 'HDF5 error'
call h5pset_shuffle_f(dcpl, hdferr)
if (hdferr < 0) error stop 'HDF5 error'
call h5pset_Fletcher32_f(dcpl,hdferr)
if (hdferr < 0) error stop 'HDF5 error'
if (compression_possible .and. len(dataset) > 1024*256) call h5pset_deflate_f(dcpl, 6, hdferr)
if (hdferr < 0) error stop 'HDF5 error'
call h5screate_simple_f(1, [1_HSIZE_T], space_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
CALL h5dcreate_f(loc_id, datasetName, filetype_id, space_id, dataset_id, hdferr, dcpl)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5dwrite_f(dataset_id, memtype_id, c_loc(dataset_(1:1)), hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5pclose_f(dcpl, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dclose_f(dataset_id, hdferr) call h5dclose_f(dataset_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(space_id, hdferr) call h5sclose_f(space_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(memtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(filetype_id, hdferr) call h5tclose_f(filetype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
@ -1907,11 +1934,12 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
totalShape !< shape of the dataset (all processes) totalShape !< shape of the dataset (all processes)
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id
integer, dimension(worldsize) :: writeSize !< contribution of all processes integer, dimension(worldsize) :: writeSize !< contribution of all processes
integer(HID_T) :: dcpl integer(HID_T) :: dcpl
integer :: ierr, hdferr, HDF5_major, HDF5_minor, HDF5_release integer :: ierr, hdferr
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
!------------------------------------------------------------------------------------------------- !-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective when writing in parallel) ! creating a property list for transfer properties (is collective when writing in parallel)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
@ -1938,24 +1966,26 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
totalShape = [myShape(1:ubound(myShape,1)-1),int(sum(writeSize),HSIZE_T)] totalShape = [myShape(1:ubound(myShape,1)-1),int(sum(writeSize),HSIZE_T)]
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! compress (and chunk) larger datasets ! chunk dataset, enable compression for larger datasets
call h5pcreate_f(H5P_DATASET_CREATE_F, dcpl, hdferr) call h5pcreate_f(H5P_DATASET_CREATE_F, dcpl, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if (hdferr < 0) error stop 'HDF5 error'
if(product(totalShape) >= chunkSize*2_HSIZE_T) then
call H5get_libversion_f(HDF5_major,HDF5_minor,HDF5_release,hdferr) if (product(totalShape) > 0) then
call h5pset_shuffle_f(dcpl, hdferr)
if (hdferr < 0) error stop 'HDF5 error' if (hdferr < 0) error stop 'HDF5 error'
if (HDF5_major == 1 .and. HDF5_minor >= 12) then ! https://forum.hdfgroup.org/t/6186 call h5pset_Fletcher32_f(dcpl,hdferr)
if (hdferr < 0) error stop 'HDF5 error'
if (product(totalShape) >= chunkSize*2_HSIZE_T) then
call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr) call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr)
if (hdferr < 0) error stop 'HDF5 error' if (hdferr < 0) error stop 'HDF5 error'
call h5pset_shuffle_f(dcpl, hdferr) if (compression_possible) call h5pset_deflate_f(dcpl, 6, hdferr)
if (hdferr < 0) error stop 'HDF5 error' else
call h5pset_deflate_f(dcpl, 6, hdferr) call h5pset_chunk_f(dcpl, size(totalShape), totalShape, hdferr)
if (hdferr < 0) error stop 'HDF5 error'
call h5pset_Fletcher32_f(dcpl,hdferr)
if (hdferr < 0) error stop 'HDF5 error'
endif endif
if (hdferr < 0) error stop 'HDF5 error'
endif endif
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape) and in file (global shape) ! create dataspace in memory (local shape) and in file (global shape)
call h5screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape) call h5screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape)

View File

@ -83,9 +83,11 @@ subroutine parse_numerics()
if (worldrank == 0) then if (worldrank == 0) then
print*, 'reading numerics.yaml'; flush(IO_STDOUT) print*, 'reading numerics.yaml'; flush(IO_STDOUT)
fileContent = IO_read('numerics.yaml') fileContent = IO_read('numerics.yaml')
call results_openJobFile(parallel=.false.) if (len(fileContent) > 0) then
call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration') call results_openJobFile(parallel=.false.)
call results_closeJobFile call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration')
call results_closeJobFile
endif
endif endif
call parallelization_bcast_str(fileContent) call parallelization_bcast_str(fileContent)
@ -113,9 +115,11 @@ subroutine parse_debug()
if (worldrank == 0) then if (worldrank == 0) then
print*, 'reading debug.yaml'; flush(IO_STDOUT) print*, 'reading debug.yaml'; flush(IO_STDOUT)
fileContent = IO_read('debug.yaml') fileContent = IO_read('debug.yaml')
call results_openJobFile(parallel=.false.) if (len(fileContent) > 0) then
call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration') call results_openJobFile(parallel=.false.)
call results_closeJobFile call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration')
call results_closeJobFile
endif
endif endif
call parallelization_bcast_str(fileContent) call parallelization_bcast_str(fileContent)

View File

@ -446,22 +446,26 @@ subroutine grid_mechanical_FEM_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w') fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver') groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(P_aim,groupHandle,'P_aim',.false.)
call HDF5_write(F_aim,groupHandle,'F_aim',.false.)
call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.)
call HDF5_write(F,groupHandle,'F') call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc') call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(u_current,groupHandle,'u') call HDF5_write(u_current,groupHandle,'u')
call HDF5_write(u_lastInc,groupHandle,'u_lastInc') call HDF5_write(u_lastInc,groupHandle,'u_lastInc')
call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.)
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
if (worldrank == 0) then
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a',.false.)
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(P_aim,groupHandle,'P_aim',.false.)
call HDF5_write(F_aim,groupHandle,'F_aim',.false.)
call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.)
call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.)
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
endif
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr)

View File

@ -389,21 +389,25 @@ subroutine grid_mechanical_spectral_basic_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w') fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver') groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(P_aim,groupHandle,'P_aim',.false.)
call HDF5_write(F_aim,groupHandle,'F_aim',.false.)
call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.)
call HDF5_write(F,groupHandle,'F') call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc') call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.)
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_write(C_minMaxAvg,groupHandle,'C_minMaxAvg',.false.)
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
if (worldrank == 0) then
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a',.false.)
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(P_aim,groupHandle,'P_aim',.false.)
call HDF5_write(F_aim,groupHandle,'F_aim',.false.)
call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.)
call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.)
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_write(C_minMaxAvg,groupHandle,'C_minMaxAvg',.false.)
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
endif
if (num%update_gamma) call utilities_saveReferenceStiffness if (num%update_gamma) call utilities_saveReferenceStiffness
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)

View File

@ -445,22 +445,26 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w') fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver') groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F_aim,groupHandle,'P_aim',.false.)
call HDF5_write(F_aim,groupHandle,'F_aim',.false.)
call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.)
call HDF5_write(F,groupHandle,'F') call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc') call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(F_tau,groupHandle,'F_tau') call HDF5_write(F_tau,groupHandle,'F_tau')
call HDF5_write(F_tau_lastInc,groupHandle,'F_tau_lastInc') call HDF5_write(F_tau_lastInc,groupHandle,'F_tau_lastInc')
call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.)
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
if (worldrank == 0) then
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a',.false.)
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(F_aim,groupHandle,'P_aim',.false.)
call HDF5_write(F_aim,groupHandle,'F_aim',.false.)
call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.)
call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.)
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
endif
if(num%update_gamma) call utilities_saveReferenceStiffness if(num%update_gamma) call utilities_saveReferenceStiffness
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)