From 58f2a25ffd2c17762d1b4493f0e0a80651673d72 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 15 Nov 2018 06:49:31 +0100 Subject: [PATCH 01/89] clearer separation of tasks 1) general HDF5_utilities (for results, restart, ...) 2) results related helpers (based on HDF5_utilities) --- src/results.f90 | 2341 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 2341 insertions(+) create mode 100644 src/results.f90 diff --git a/src/results.f90 b/src/results.f90 new file mode 100644 index 000000000..43a7a26e8 --- /dev/null +++ b/src/results.f90 @@ -0,0 +1,2341 @@ +!-------------------------------------------------------------------------------------------------- +!> @author Vitesh Shah, Max-Planck-Institut für Eisenforschung GmbH +!> @author Yi-Chin Yang, Max-Planck-Institut für Eisenforschung GmbH +!> @author Jennifer Nastola, Max-Planck-Institut für Eisenforschung GmbH +!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH +!-------------------------------------------------------------------------------------------------- +module HDF5_Utilities + use prec + use IO + use HDF5 +#ifdef PETSc + use PETSC +#endif + + implicit none + private + integer(HID_T), public, protected :: tempCoordinates, tempResults + integer(HID_T), private :: resultsFile, currentIncID, plist_id + integer(pInt), private :: currentInc + +!-------------------------------------------------------------------------------------------------- +!> @brief reads pInt or pReal data of defined shape from file +!-------------------------------------------------------------------------------------------------- + interface HDF5_read + module procedure HDF5_read_pReal_1 + module procedure HDF5_read_pReal_2 + module procedure HDF5_read_pReal_3 + module procedure HDF5_read_pReal_4 + module procedure HDF5_read_pReal_5 + module procedure HDF5_read_pReal_6 + module procedure HDF5_read_pReal_7 + + module procedure HDF5_read_pInt_1 + module procedure HDF5_read_pInt_2 + module procedure HDF5_read_pInt_3 + module procedure HDF5_read_pInt_4 + module procedure HDF5_read_pInt_5 + module procedure HDF5_read_pInt_6 + module procedure HDF5_read_pInt_7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK + + end interface HDF5_read + +!-------------------------------------------------------------------------------------------------- +!> @brief writes pInt or pReal data of defined shape to file +!-------------------------------------------------------------------------------------------------- + interface HDF5_write + module procedure HDF5_write_pReal1 + module procedure HDF5_write_pReal2 + module procedure HDF5_write_pReal3 + module procedure HDF5_write_pReal4 + module procedure HDF5_write_pReal5 + module procedure HDF5_write_pReal6 + module procedure HDF5_write_pReal7 + + module procedure HDF5_write_pInt1 + module procedure HDF5_write_pInt2 + module procedure HDF5_write_pInt3 + module procedure HDF5_write_pInt4 + module procedure HDF5_write_pInt5 + module procedure HDF5_write_pInt6 + module procedure HDF5_write_pInt7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK + + end interface HDF5_write + + public :: & + HDF5_Utilities_init, & + HDF5_mappingPhase, & + HDF5_mappingHomog, & + HDF5_mappingCrystallite, & + HDF5_backwardMappingPhase, & + HDF5_backwardMappingHomog, & + HDF5_backwardMappingCrystallite, & + HDF5_mappingCells, & + HDF5_addGroup ,& + HDF5_closeGroup ,& + HDF5_openGroup, & + HDF5_openGroup2, & + HDF5_forwardResults, & + HDF5_writeVectorDataset, & + HDF5_writeScalarDataset, & + HDF5_writeTensorDataset, & + HDF5_closeJobFile, & + HDF5_removeLink, & + HDF5_createFile, & + HDF5_closeFile, & + HDF5_addGroup2, & + HDF5_openFile, & + HDF5_read, & + HDF5_write +contains + +subroutine HDF5_Utilities_init + use, intrinsic :: & + iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) + + implicit none + integer :: hdferr + integer(SIZE_T) :: typeSize + + write(6,'(/,a)') ' <<<+- HDF5_Utilities init -+>>>' +#include "compilation_info.f90" + + !currentInc = -1_pInt ToDo + !call HDF5_createJobFile ToDo + +!-------------------------------------------------------------------------------------------------- +!initialize HDF5 library and check if integer and float type size match + call h5open_f(hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5open_f') + call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5tget_size_f (int)') + if (int(pInt,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pInt does not match H5T_NATIVE_INTEGER') + call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5tget_size_f (double)') + if (int(pReal,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pReal does not match H5T_NATIVE_DOUBLE') + +end subroutine HDF5_Utilities_init + +!-------------------------------------------------------------------------------------------------- +!> @brief creates and initializes HDF5 output files +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_createJobFile + use hdf5 + use DAMASK_interface, only: & + getSolverJobName + + implicit none + integer :: hdferr + character(len=1024) :: path +#ifdef PETSc +#include + + call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') + call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') +#endif + +!-------------------------------------------------------------------------------------------------- +! open file + path = trim(getSolverJobName())//'.'//'hdf5' + !call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr) + call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr,access_prp = plist_id) + if (hdferr < 0) call IO_error(100_pInt,ext_msg=path) + call HDF5_addStringAttribute(resultsFile,'createdBy',DAMASKVERSION) + call h5pclose_f(plist_id, hdferr) !neu + +end subroutine HDF5_createJobFile + + +!-------------------------------------------------------------------------------------------------- +!> @brief creates and initializes HDF5 output files +!-------------------------------------------------------------------------------------------------- + integer(HID_T) function HDF5_createFile(path) + use hdf5 + use DAMASK_interface, only: & + getSolverJobName + + implicit none + integer :: hdferr + integer(SIZE_T) :: typeSize + character(len=*), intent(in) :: path +#ifdef PETSc +#include +#endif + call h5open_f(hdferr) !############################################################ DANGEROUS +#ifdef PETSc + call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') + call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') +#endif +!-------------------------------------------------------------------------------------------------- +! create a file + !call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr) + call h5fcreate_f(path,H5F_ACC_TRUNC_F,HDF5_createFile,hdferr,access_prp = plist_id) + if (hdferr < 0) call IO_error(100_pInt,ext_msg=path) + !call HDF5_addStringAttribute(HDF5_createFile,'createdBy',DAMASKVERSION) + call h5pclose_f(plist_id, hdferr) !neu + +end function HDF5_createFile + +!-------------------------------------------------------------------------------------------------- +!> @brief close the opened HDF5 output file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_closeJobFile() + use hdf5 + + implicit none + integer :: hdferr + call HDF5_removeLink('current') + call h5fclose_f(resultsFile,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeJobFile: h5fclose_f',el=hdferr) +! call h5close_f(hdferr) + +end subroutine HDF5_closeJobFile + +!-------------------------------------------------------------------------------------------------- +!> @brief open and initializes HDF5 output file +!-------------------------------------------------------------------------------------------------- +integer(HID_T) function HDF5_openFile(fileName,mode) + + implicit none + character(len=*), intent(in) :: fileName + character, intent(in), optional :: mode + character :: m + integer :: hdferr + + if (present(mode)) then + m = mode + else + m = 'r' + endif + + if (m == 'w') then + call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f',el=hdferr) + elseif(m == 'a') then + call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)',el=hdferr) + elseif(m == 'r') then + call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)',el=hdferr) + else + call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f unknown access mode',el=hdferr) + endif + +end function HDF5_openFile + +!-------------------------------------------------------------------------------------------------- +!> @brief close the opened HDF5 output file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_closeFile(fileHandle) + + implicit none + integer :: hdferr + integer(HID_T), intent(in) :: fileHandle + call h5fclose_f(fileHandle,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeFile: h5fclose_f',el=hdferr) + +end subroutine HDF5_closeFile + +!-------------------------------------------------------------------------------------------------- +!> @brief adds a new group to the results file +!-------------------------------------------------------------------------------------------------- +integer(HID_T) function HDF5_addGroup(groupName) + use hdf5 + + implicit none + character(len=*), intent(in) :: groupName + integer :: hdferr + + call h5gcreate_f(resultsFile, trim(groupName), HDF5_addGroup, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')') + +end function HDF5_addGroup + + +!-------------------------------------------------------------------------------------------------- +!> @brief adds a new group to the fileHandle (additional to addGroup2) +!-------------------------------------------------------------------------------------------------- +integer(HID_T) function HDF5_addGroup2(fileHandle,groupName) + use hdf5 + + implicit none + character(len=*), intent(in) :: groupName + integer(HID_T), intent(in) :: fileHandle + integer :: hdferr + + call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup2, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup2: h5gcreate_f ('//trim(groupName)//')') + +end function HDF5_addGroup2 + + +!-------------------------------------------------------------------------------------------------- +!> @brief open a group from the results file +!-------------------------------------------------------------------------------------------------- +integer(HID_T) function HDF5_openGroup(groupName) + use hdf5 + + implicit none + character(len=*), intent(in) :: groupName + integer :: hdferr + + call h5gopen_f(resultsFile, trim(groupName), HDF5_openGroup, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(groupName)//')') + +end function HDF5_openGroup + +!-------------------------------------------------------------------------------------------------- +!> @brief open an existing group of a file +!-------------------------------------------------------------------------------------------------- +integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) + use hdf5 + + implicit none + character(len=*), intent(in) :: groupName + integer :: hdferr + integer(HID_T), intent(in) :: FileReadID + + call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5gopen_f ('//trim(groupName)//')') + +end function HDF5_openGroup2 + +!-------------------------------------------------------------------------------------------------- +!> @brief set link to object in results file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_setLink(path,link) + use hdf5 + + implicit none + character(len=*), intent(in) :: path, link + integer :: hdferr + logical :: linkExists + + call h5lexists_f(resultsFile, link,linkExists, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link)//')') + if (linkExists) then + call h5ldelete_f(resultsFile,link, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link)//')') + endif + call h5lcreate_soft_f(path, resultsFile, link, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(path)//' '//trim(link)//')') + +end subroutine HDF5_setLink + +!-------------------------------------------------------------------------------------------------- +!> @brief remove link to an object +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_removeLink(link) + use hdf5 + + implicit none + character(len=*), intent(in) :: link + integer :: hdferr + + call h5ldelete_f(resultsFile,link, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_removeLink: h5ldelete_soft_f ('//trim(link)//')') + +end subroutine HDF5_removeLink + +!-------------------------------------------------------------------------------------------------- +!> @brief close a group +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_closeGroup(ID) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: ID + integer :: hdferr + + call h5gclose_f(ID, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(ID,pInt)) + +end subroutine HDF5_closeGroup + +!-------------------------------------------------------------------------------------------------- +!> @brief adds a StringAttribute to the results file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: entity + character(len=*), intent(in) :: attrLabel, attrValue + integer :: hdferr + integer(HID_T) :: attr_id, space_id, type_id + + call h5screate_f(H5S_SCALAR_F,space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5screate_f') + call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tcopy_f') + call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tset_size_f') + call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5acreate_f') + call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5awrite_f') + call h5aclose_f(attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5aclose_f') + call h5tclose_f(type_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tclose_f') + call h5sclose_f(space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5sclose_f') + +end subroutine HDF5_addStringAttribute + +!-------------------------------------------------------------------------------------------------- +!> @brief adds the unique mapping from spatial position and constituent ID to results +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_mappingPhase(mapping,mapping2,Nconstituents,material_phase,phase_name,dataspace_size,mpiOffset,mpiOffset_phase) + use hdf5 + + implicit none + integer(pInt), intent(in) :: Nconstituents, dataspace_size, mpiOffset + integer(pInt), intent(in), dimension(:) :: mapping, mapping2 + character(len=*), intent(in), dimension(:) :: phase_name + integer(pInt), intent(in), dimension(:) :: mpiOffset_phase + integer(pInt), intent(in), dimension(:,:,:) :: material_phase + + character(len=len(phase_name(1))), dimension(:), allocatable :: namesNA + character(len=len(phase_name(1))) :: a + character(len=*), parameter :: n = "NULL" + + integer(pInt) :: hdferr, NmatPoints, i, j, k + integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, name_id, position_id, plist_id, memspace + integer(HID_T) :: dt5_id ! Memory datatype identifier + integer(SIZE_T) :: typesize, type_sizec, type_sizei, type_size + + integer(HSIZE_T), dimension(2) :: counter + integer(HSSIZE_T), dimension(2) :: fileOffset + integer(pInt), dimension(:,:), allocatable :: arrOffset + + a = n + allocate(namesNA(0:size(phase_name)),source=[a,phase_name]) + NmatPoints = size(mapping,1)/Nconstituents + mapping_ID = HDF5_openGroup("current/mapGeometry") + + allocate(arrOffset(Nconstituents,NmatPoints)) + do i=1_pInt, NmatPoints + do k=1_pInt, Nconstituents + do j=1_pInt, size(phase_name) + if(material_phase(k,1,i) == j) & + arrOffset(k,i) = mpiOffset_phase(j) + enddo + enddo + enddo + +!-------------------------------------------------------------------------------------------------- +! create dataspace + call h5screate_simple_f(2, int([Nconstituents,dataspace_size],HSIZE_T), space_id, hdferr, & + int([Nconstituents,dataspace_size],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping') + +!-------------------------------------------------------------------------------------------------- +! compound type + ! First calculate total size by calculating sizes of each member + ! + CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt5_id, hdferr) + typesize = len(phase_name(1)) + CALL h5tset_size_f(dt5_id, typesize, hdferr) + CALL h5tget_size_f(dt5_id, type_sizec, hdferr) + CALL h5tget_size_f(H5T_STD_I32LE,type_sizei, hdferr) + type_size = type_sizec + type_sizei + call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping: h5tcreate_f dtype_id') + + call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f 0') + call h5tinsert_f(dtype_id, "Position", type_sizec, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f 2') + +!-------------------------------------------------------------------------------------------------- +! create Dataset + call h5dcreate_f(mapping_id, 'constitutive', dtype_id, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase') + +!-------------------------------------------------------------------------------------------------- +! Create memory types (one compound datatype for each member) + call h5tcreate_f(H5T_COMPOUND_F, int(type_sizec,SIZE_T), name_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tcreate_f instance_id') + call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f instance_id') + + call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tcreate_f position_id') + call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f position_id') + +!-------------------------------------------------------------------------------------------------- +! Define and select hyperslabs + counter(1) = Nconstituents ! how big i am + counter(2) = NmatPoints + fileOffset(1) = 0 ! where i start to write my data + fileOffset(2) = mpiOffset + + call h5screate_simple_f(2, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5pset_dxpl_mpio_f') +#endif + +!-------------------------------------------------------------------------------------------------- +! write data by fields in the datatype. Fields order is not important. + call h5dwrite_f(dset_id, name_id, reshape(namesNA(mapping),[Nconstituents,NmatPoints]), & + int([Nconstituents, dataspace_size],HSIZE_T), hdferr, & + file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dwrite_f position_id') + + call h5dwrite_f(dset_id, position_id, reshape(mapping2-1_pInt,[Nconstituents,NmatPoints])+arrOffset, & + int([Nconstituents, dataspace_size],HSIZE_T), hdferr, & + file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dwrite_f instance_id') + +!-------------------------------------------------------------------------------------------------- +! close types, dataspaces + call h5tclose_f(dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f dtype_id') + call h5tclose_f(position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f position_id') + call h5tclose_f(name_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f name_id ') + call h5tclose_f(dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f dt5_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dclose_f') + call h5sclose_f(space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5sclose_f space_id') + call h5sclose_f(memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5sclose_f memspace') + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5pclose_f') + call HDF5_closeGroup(mapping_ID) + +end subroutine HDF5_mappingPhase + +!-------------------------------------------------------------------------------------------------- +!> @brief adds the backward mapping from spatial position and constituent ID to results +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_backwardMappingPhase(material_phase,phasememberat,phase_name,dataspace_size,mpiOffset,mpiOffset_phase) + use hdf5 + + implicit none + integer(pInt), intent(in), dimension(:,:,:) :: material_phase, phasememberat + character(len=*), intent(in), dimension(:) :: phase_name + integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_phase + integer(pInt), intent(in) :: mpiOffset + + integer(pInt) :: hdferr, NmatPoints, Nconstituents, i, j + integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace + integer(SIZE_T) :: type_size + + integer(pInt), dimension(:,:), allocatable :: arr + + integer(HSIZE_T), dimension(1) :: counter + integer(HSSIZE_T), dimension(1) :: fileOffset + + character(len=64) :: phaseID + + Nconstituents = size(phasememberat,1) + NmatPoints = count(material_phase /=0_pInt)/Nconstituents + + allocate(arr(2,NmatPoints*Nconstituents)) + + do i=1_pInt, NmatPoints + do j=Nconstituents-1_pInt, 0_pInt, -1_pInt + arr(1,Nconstituents*i-j) = i-1_pInt + enddo + enddo + arr(2,:) = pack(material_phase,material_phase/=0_pInt) + + do i=1_pInt, size(phase_name) + write(phaseID, '(i0)') i + mapping_ID = HDF5_openGroup('/current/constitutive/'//trim(phaseID)//'_'//phase_name(i)) + NmatPoints = count(material_phase == i) + +!-------------------------------------------------------------------------------------------------- + ! create dataspace + call h5screate_simple_f(1, int([dataspace_size(i)],HSIZE_T), space_id, hdferr, & + int([dataspace_size(i)],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping') + +!-------------------------------------------------------------------------------------------------- + ! compound type + call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr) + call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id') + + call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tinsert_f 0') + +!-------------------------------------------------------------------------------------------------- + ! create Dataset + call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase') + +!-------------------------------------------------------------------------------------------------- + ! Create memory types (one compound datatype for each member) + call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tcreate_f position_id') + call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tinsert_f position_id') + +!-------------------------------------------------------------------------------------------------- + ! Define and select hyperslabs + counter = NmatPoints ! how big i am + fileOffset = mpiOffset_phase(i) ! where i start to write my data + + call h5screate_simple_f(1, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5pset_dxpl_mpio_f') +#endif + +!-------------------------------------------------------------------------------------------------- + ! write data by fields in the datatype. Fields order is not important. + call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i)+mpiOffset, int([dataspace_size(i)],HSIZE_T),& + hdferr, file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5dwrite_f instance_id') + +!-------------------------------------------------------------------------------------------------- + !close types, dataspaces + call h5tclose_f(dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tclose_f dtype_id') + call h5tclose_f(position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tclose_f position_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5dclose_f') + call h5sclose_f(space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5sclose_f space_id') + call h5sclose_f(memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5sclose_f memspace') + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5pclose_f') + call HDF5_closeGroup(mapping_ID) + + enddo + +end subroutine HDF5_backwardMappingPhase + +!-------------------------------------------------------------------------------------------------- +!> @brief adds the unique mapping from spatial position and constituent ID to results +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_mappingHomog(material_homog,homogmemberat,homogenization_name,dataspace_size,mpiOffset,mpiOffset_homog) + use hdf5 + + implicit none + integer(pInt), intent(in), dimension(:,:) :: material_homog, homogmemberat + character(len=*), intent(in), dimension(:) :: homogenization_name + integer(pInt), intent(in), dimension(:) :: mpiOffset_homog + integer(pInt), intent(in) :: dataspace_size, mpiOffset + + integer(pInt) :: hdferr, NmatPoints, i, j + integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, name_id, position_id, plist_id, memspace + + integer(HID_T) :: dt5_id ! Memory datatype identifier + integer(SIZE_T) :: typesize, type_sizec, type_sizei, type_size + + integer(HSIZE_T), dimension(1) :: counter + integer(HSSIZE_T), dimension(1) :: fileOffset + integer(pInt), dimension(:), allocatable :: arrOffset + + NmatPoints = count(material_homog /=0_pInt) + mapping_ID = HDF5_openGroup("current/mapGeometry") + + allocate(arrOffset(NmatPoints)) + do i=1_pInt, NmatPoints + do j=1_pInt, size(homogenization_name) + if(material_homog(1,i) == j) & + arrOffset(i) = mpiOffset_homog(j) + enddo + enddo + +!-------------------------------------------------------------------------------------------------- +! create dataspace + call h5screate_simple_f(1, int([dataspace_size],HSIZE_T), space_id, hdferr, & + int([dataspace_size],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping') + +!-------------------------------------------------------------------------------------------------- +! compound type + ! First calculate total size by calculating sizes of each member + ! + CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt5_id, hdferr) + typesize = len(homogenization_name(1)) + CALL h5tset_size_f(dt5_id, typesize, hdferr) + CALL h5tget_size_f(dt5_id, type_sizec, hdferr) + CALL h5tget_size_f(H5T_STD_I32LE,type_sizei, hdferr) + type_size = type_sizec + type_sizei + call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping: h5tcreate_f dtype_id') + + call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f 0') + call h5tinsert_f(dtype_id, "Position", type_sizec, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f 2') + +!-------------------------------------------------------------------------------------------------- +! create Dataset + call h5dcreate_f(mapping_id, 'homogenization', dtype_id, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog') + +!-------------------------------------------------------------------------------------------------- +! Create memory types (one compound datatype for each member) + call h5tcreate_f(H5T_COMPOUND_F, int(type_sizec,SIZE_T), name_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tcreate_f instance_id') + call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f instance_id') + + call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tcreate_f position_id') + call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f position_id') + +!-------------------------------------------------------------------------------------------------- +! Define and select hyperslabs + counter = NmatPoints ! how big i am + fileOffset = mpiOffset ! where i start to write my data + + call h5screate_simple_f(1, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5pset_dxpl_mpio_f') +#endif + +!-------------------------------------------------------------------------------------------------- +! write data by fields in the datatype. Fields order is not important. + call h5dwrite_f(dset_id, name_id, homogenization_name(pack(material_homog,material_homog/=0_pInt)), & + int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & + mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dwrite_f position_id') + + call h5dwrite_f(dset_id, position_id, pack(homogmemberat-1_pInt,homogmemberat/=0_pInt) + arrOffset, & + int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & + mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dwrite_f instance_id') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces +call h5tclose_f(dtype_id, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f dtype_id') +call h5tclose_f(position_id, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f position_id') +call h5tclose_f(name_id, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f name_id ') +call h5tclose_f(dt5_id, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f dt5_id') +call h5dclose_f(dset_id, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dclose_f') +call h5sclose_f(space_id, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5sclose_f space_id') +call h5sclose_f(memspace, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5sclose_f memspace') +call h5pclose_f(plist_id, hdferr) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5pclose_f') +call HDF5_closeGroup(mapping_ID) + +end subroutine HDF5_mappingHomog + +!-------------------------------------------------------------------------------------------------- +!> @brief adds the backward mapping from spatial position and constituent ID to results +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_backwardMappingHomog(material_homog,homogmemberat,homogenization_name,dataspace_size,mpiOffset,mpiOffset_homog) + use hdf5 + + implicit none + integer(pInt), intent(in), dimension(:,:) :: material_homog, homogmemberat + character(len=*), intent(in), dimension(:) :: homogenization_name + integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_homog + integer(pInt), intent(in) :: mpiOffset + + integer(pInt) :: hdferr, NmatPoints, i + integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace + integer(SIZE_T) :: type_size + + integer(pInt), dimension(:,:), allocatable :: arr + + integer(HSIZE_T), dimension(1) :: counter + integer(HSSIZE_T), dimension(1) :: fileOffset + + character(len=64) :: homogID + + NmatPoints = count(material_homog /=0_pInt) + allocate(arr(2,NmatPoints)) + + arr(1,:) = (/(i, i=0_pint,NmatPoints-1_pInt)/) + arr(2,:) = pack(material_homog,material_homog/=0_pInt) + + do i=1_pInt, size(homogenization_name) + write(homogID, '(i0)') i + mapping_ID = HDF5_openGroup('/current/homogenization/'//trim(homogID)//'_'//homogenization_name(i)) + +!-------------------------------------------------------------------------------------------------- + ! create dataspace + call h5screate_simple_f(1, int([dataspace_size(i)],HSIZE_T), space_id, hdferr, & + int([dataspace_size(i)],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping') + +!-------------------------------------------------------------------------------------------------- + ! compound type + call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr) + call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id') + + call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tinsert_f 0') + +!-------------------------------------------------------------------------------------------------- + ! create Dataset + call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog') + +!-------------------------------------------------------------------------------------------------- + ! Create memory types (one compound datatype for each member) + call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tcreate_f position_id') + call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tinsert_f position_id') + +!-------------------------------------------------------------------------------------------------- + ! Define and select hyperslabs + counter = NmatPoints ! how big i am + fileOffset = mpiOffset_homog(i) ! where i start to write my data + + call h5screate_simple_f(1, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5pset_dxpl_mpio_f') +#endif + +!-------------------------------------------------------------------------------------------------- + ! write data by fields in the datatype. Fields order is not important. + call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i)+mpiOffset,int([dataspace_size(i)],HSIZE_T),& + hdferr, file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5dwrite_f instance_id') + +!-------------------------------------------------------------------------------------------------- + !close types, dataspaces + call h5tclose_f(dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tclose_f dtype_id') + call h5tclose_f(position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tclose_f position_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5dclose_f') + call h5sclose_f(space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5sclose_f space_id') + call h5sclose_f(memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5sclose_f memspace') + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5pclose_f') + call HDF5_closeGroup(mapping_ID) + + enddo + +end subroutine HDF5_backwardMappingHomog + +!-------------------------------------------------------------------------------------------------- +!> @brief adds the unique mapping from spatial position and constituent ID to results +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_mappingCrystallite(crystalliteAt,crystmemberAt,crystallite_name,dataspace_size,mpiOffset,mpiOffset_cryst) + use hdf5 + + implicit none + integer(pInt), intent(in), dimension(:,:) :: crystalliteAt + integer(pInt), intent(in), dimension(:,:,:) :: crystmemberAt + character(len=*), intent(in), dimension(:) :: crystallite_name + integer(pInt), intent(in), dimension(:) :: mpiOffset_cryst + integer(pInt), intent(in) :: dataspace_size, mpiOffset + + integer :: hdferr + integer(pInt) :: NmatPoints, Nconstituents, i, j + integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, name_id, plist_id, memspace + + integer(HID_T), dimension(:), allocatable :: position_id + + integer(HID_T) :: dt5_id ! Memory datatype identifier + integer(SIZE_T) :: typesize, type_sizec, type_sizei, type_size + + integer(HSIZE_T), dimension(1) :: counter + integer(HSSIZE_T), dimension(1) :: fileOffset + integer(pInt), dimension(:), allocatable :: arrOffset + + character(len=64) :: m + + Nconstituents = size(crystmemberAt,1) + NmatPoints = count(crystalliteAt /=0_pInt) + mapping_ID = HDF5_openGroup("current/mapGeometry") + + allocate(position_id(Nconstituents)) + + allocate(arrOffset(NmatPoints)) + do i=1_pInt, NmatPoints + do j=1_pInt, size(crystallite_name) + if(crystalliteAt(1,i) == j) & + arrOffset(i) = Nconstituents*mpiOffset_cryst(j) + enddo + enddo + +!-------------------------------------------------------------------------------------------------- +! create dataspace + call h5screate_simple_f(1, int([dataspace_size],HSIZE_T), space_id, hdferr, & + int([dataspace_size],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping') + +!-------------------------------------------------------------------------------------------------- +! compound type + ! First calculate total size by calculating sizes of each member + ! + CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt5_id, hdferr) + typesize = len(crystallite_name(1)) + CALL h5tset_size_f(dt5_id, typesize, hdferr) + CALL h5tget_size_f(dt5_id, type_sizec, hdferr) + CALL h5tget_size_f(H5T_STD_I32LE, type_sizei, hdferr) + type_size = type_sizec + type_sizei*Nconstituents + call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping: h5tcreate_f dtype_id') + + call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 0') + do i=1_pInt, Nconstituents + write(m, '(i0)') i + call h5tinsert_f(dtype_id, "Position "//trim(m), type_sizec+(i-1)*type_sizei, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 2 '//trim(m)) + enddo + +!-------------------------------------------------------------------------------------------------- +! create Dataset + call h5dcreate_f(mapping_id, 'crystallite', dtype_id, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite') + +!-------------------------------------------------------------------------------------------------- +! Create memory types (one compound datatype for each member) + call h5tcreate_f(H5T_COMPOUND_F, int(type_sizec,SIZE_T), name_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f instance_id') + call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f instance_id') + + do i=1_pInt, Nconstituents + write(m, '(i0)') i + call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id(i), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f position_id') + call h5tinsert_f(position_id(i), "Position "//trim(m), 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f position_id') + enddo + +!-------------------------------------------------------------------------------------------------- +! Define and select hyperslabs + counter = NmatPoints ! how big i am + fileOffset = mpiOffset ! where i start to write my data + + call h5screate_simple_f(1, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5pset_dxpl_mpio_f') +#endif + +!-------------------------------------------------------------------------------------------------- +! write data by fields in the datatype. Fields order is not important. + call h5dwrite_f(dset_id, name_id, crystallite_name(pack(crystalliteAt,crystalliteAt/=0_pInt)), & + int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & + mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f position_id') + + do i=1_pInt, Nconstituents + call h5dwrite_f(dset_id, position_id(i), pack(crystmemberAt(i,:,:)-1_pInt,crystmemberAt(i,:,:)/=0_pInt)+arrOffset,& + int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & + mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f instance_id') + enddo + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5tclose_f(dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f dtype_id') + do i=1_pInt, Nconstituents + call h5tclose_f(position_id(i), hdferr) + enddo + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f position_id') + call h5tclose_f(name_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f name_id') + call h5tclose_f(dt5_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f dt5_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dclose_f') + call h5sclose_f(space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sclose_f space_id') + call h5sclose_f(memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sclose_f memspace') + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5pclose_f') + call HDF5_closeGroup(mapping_ID) + +end subroutine HDF5_mappingCrystallite + + +!-------------------------------------------------------------------------------------------------- +!> @brief adds the backward mapping from spatial position and constituent ID to results +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_backwardMappingCrystallite(crystalliteAt,crystmemberAt,crystallite_name,dataspace_size,mpiOffset,mpiOffset_cryst) + use hdf5 + + implicit none + integer(pInt), intent(in), dimension(:,:) :: crystalliteAt + integer(pInt), intent(in), dimension(:,:,:) :: crystmemberAt + character(len=*), intent(in), dimension(:) :: crystallite_name + integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_cryst + integer(pInt), intent(in) :: mpiOffset + + integer :: hdferr + integer(pInt) :: NmatPoints, Nconstituents, i, j + integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace + integer(SIZE_T) :: type_size + + integer(pInt), dimension(:,:), allocatable :: h_arr, arr + + integer(HSIZE_T), dimension(1) :: counter + integer(HSSIZE_T), dimension(1) :: fileOffset + + character(len=64) :: crystallID + + Nconstituents = size(crystmemberAt,1) + NmatPoints = count(crystalliteAt /=0_pInt) + + allocate(h_arr(2,NmatPoints)) + allocate(arr(2,Nconstituents*NmatPoints)) + + h_arr(1,:) = (/(i, i=0_pInt,NmatPoints-1_pInt)/) + h_arr(2,:) = pack(crystalliteAt,crystalliteAt/=0_pInt) + + do i=1_pInt, NmatPoints + do j=Nconstituents-1_pInt, 0_pInt, -1_pInt + arr(1,Nconstituents*i-j) = h_arr(1,i) + arr(2,Nconstituents*i-j) = h_arr(2,i) + enddo + enddo + + do i=1_pInt, size(crystallite_name) + if (crystallite_name(i) == 'none') cycle + write(crystallID, '(i0)') i + mapping_ID = HDF5_openGroup('/current/crystallite/'//trim(crystallID)//'_'//crystallite_name(i)) + NmatPoints = count(crystalliteAt == i) + +!-------------------------------------------------------------------------------------------------- + ! create dataspace + call h5screate_simple_f(1, int([Nconstituents*dataspace_size(i)],HSIZE_T), space_id, hdferr, & + int([Nconstituents*dataspace_size(i)],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping') + +!-------------------------------------------------------------------------------------------------- + ! compound type + call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr) + call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id') + + call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tinsert_f 0') + +!-------------------------------------------------------------------------------------------------- + ! create Dataset + call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite') + +!-------------------------------------------------------------------------------------------------- + ! Create memory types + call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tcreate_f position_id') + call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tinsert_f position_id') + +!-------------------------------------------------------------------------------------------------- + ! Define and select hyperslabs + counter = Nconstituents*NmatPoints ! how big i am + fileOffset = Nconstituents*mpiOffset_cryst(i) ! where i start to write my data + + call h5screate_simple_f(1, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5pset_dxpl_mpio_f') +#endif + +!-------------------------------------------------------------------------------------------------- + ! write data by fields in the datatype. Fields order is not important. + call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i) + mpiOffset,& + int([Nconstituents*dataspace_size(i)],HSIZE_T), hdferr, file_space_id = space_id, & + mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5dwrite_f instance_id') + +!-------------------------------------------------------------------------------------------------- + !close types, dataspaces + call h5tclose_f(dtype_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tclose_f dtype_id') + call h5tclose_f(position_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tclose_f position_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5dclose_f') + call h5sclose_f(space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5sclose_f space_id') + call h5sclose_f(memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5sclose_f memspace') + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5pclose_f') + call HDF5_closeGroup(mapping_ID) + + enddo + +end subroutine HDF5_backwardMappingCrystallite + +!-------------------------------------------------------------------------------------------------- +!> @brief adds the unique cell to node mapping +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_mappingCells(mapping) + use hdf5 + + implicit none + integer(pInt), intent(in), dimension(:) :: mapping + + integer :: hdferr, Nnodes + integer(HID_T) :: mapping_id, dset_id, space_id + + Nnodes=size(mapping) + mapping_ID = HDF5_openGroup("mapping") + +!-------------------------------------------------------------------------------------------------- +! create dataspace + call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, & + int([Nnodes],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5screate_simple_f') + +!-------------------------------------------------------------------------------------------------- +! create Dataset + call h5dcreate_f(mapping_id, "Cell",H5T_NATIVE_INTEGER, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells') + +!-------------------------------------------------------------------------------------------------- +! write data by fields in the datatype. Fields order is not important. + call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, mapping, int([Nnodes],HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5dwrite_f instance_id') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dclose_f') + call h5sclose_f(space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5sclose_f') + call HDF5_closeGroup(mapping_ID) + +end subroutine HDF5_mappingCells + +!-------------------------------------------------------------------------------------------------- +!> @brief creates a new 3D Tensor dataset in the given group location !!!TODO: really necessary? +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_addTensor3DDataset(group,Nnodes,tensorSize,label,SIunit) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: group + integer(pInt), intent(in) :: Nnodes, tensorSize + character(len=*), intent(in) :: SIunit, label + + integer :: hdferr + integer(HID_T) :: space_id, dset_id + integer(HSIZE_T), dimension(3) :: dataShape + + dataShape = int([tensorSize,tensorSize,Nnodes], HSIZE_T) + +!-------------------------------------------------------------------------------------------------- +! create dataspace + call h5screate_simple_f(3, dataShape, space_id, hdferr, dataShape) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5screate_simple_f') + +!-------------------------------------------------------------------------------------------------- +! create Dataset + call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5dcreate_f') + call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5dclose_f') + call h5sclose_f(space_id, hdferr) + +end subroutine HDF5_addTensor3DDataset + +!-------------------------------------------------------------------------------------------------- +!> @brief creates a new vector dataset in the given group location !!!TODO: really necessary? +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_writeVectorDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: group + character(len=*), intent(in) :: SIunit,label + integer(pInt), intent(in) :: dataspace_size, mpiOffset + real(pReal), intent(in), dimension(:,:) :: dataset + + integer :: hdferr, vectorSize + integer(HID_T) :: dset_id, space_id, memspace, plist_id + + integer(HSIZE_T), dimension(2) :: counter + integer(HSSIZE_T), dimension(2) :: fileOffset + + if(any(shape(dataset) == 0)) return + + vectorSize = size(dataset,1) + + call HDF5_addVectorDataset(group,dataspace_size,vectorSize,label,SIunit) ! here nNodes need to be global + call h5dopen_f(group, label, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dopen_f') + + ! Define and select hyperslabs + counter(1) = vectorSize ! how big i am + counter(2) = size(dataset,2) + fileOffset(1) = 0 ! where i start to write my data + fileOffset(2) = mpiOffset + + call h5screate_simple_f(2, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5sselect_hyperslab_f') + + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5pset_dxpl_mpio_f') +#endif + + ! Write the dataset collectively + call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([vectorSize, dataspace_size],HSIZE_T), hdferr, & + file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dwrite_f') + + call h5sclose_f(space_id, hdferr) + call h5sclose_f(memspace, hdferr) + call h5dclose_f(dset_id, hdferr) + call h5pclose_f(plist_id, hdferr) + +end subroutine HDF5_writeVectorDataset + +!-------------------------------------------------------------------------------------------------- +!> @brief creates a new tensor dataset in the given group location +! by default, a 3x3 tensor is assumed !!!TODO: really necessary? +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_writeTensorDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: group + character(len=*), intent(in) :: SIunit,label + integer(pInt), intent(in) :: dataspace_size, mpiOffset + real(pReal), intent(in), dimension(:,:,:) :: dataset + + integer :: hdferr, tensorSize + integer(HID_T) :: dset_id, space_id, memspace, plist_id + + integer(HSIZE_T), dimension(3) :: counter + integer(HSSIZE_T), dimension(3) :: fileOffset + + if(any(shape(dataset) == 0)) return + + tensorSize = size(dataset,1) + + call HDF5_addTensor3DDataset(group,dataspace_size,tensorSize,label,SIunit) + call h5dopen_f(group, label, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dopen_f') + + ! Define and select hyperslabs + counter(1) = tensorSize ! how big i am + counter(2) = tensorSize + counter(3) = size(dataset,3) + fileOffset(1) = 0 ! where i start to write my data + fileOffset(2) = 0 + fileOffset(3) = mpiOffset + + call h5screate_simple_f(3, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5sselect_hyperslab_f') + + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5pset_dxpl_mpio_f') +#endif + + ! Write the dataset collectively + call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([tensorSize, dataspace_size],HSIZE_T), hdferr, & + file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dwrite_f') + + call h5sclose_f(space_id, hdferr) + call h5sclose_f(memspace, hdferr) + call h5dclose_f(dset_id, hdferr) + call h5pclose_f(plist_id, hdferr) + + end subroutine HDF5_writeTensorDataset + +!-------------------------------------------------------------------------------------------------- +!> @brief adds a new vector dataset to the given group location +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_addVectorDataset(group,nnodes,vectorSize,label,SIunit) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: group + integer(pInt), intent(in) :: nnodes,vectorSize + character(len=*), intent(in) :: SIunit,label + + integer :: hdferr + integer(HID_T) :: space_id, dset_id + +!-------------------------------------------------------------------------------------------------- +! create dataspace + call h5screate_simple_f(2, int([vectorSize,Nnodes],HSIZE_T), space_id, hdferr, & + int([vectorSize,Nnodes],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5screate_simple_f') + +!-------------------------------------------------------------------------------------------------- +! create Dataset + call h5dcreate_f(group, trim(label), H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5dcreate_f') + call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5dclose_f') + call h5sclose_f(space_id, hdferr) + +end subroutine HDF5_addVectorDataset + +!-------------------------------------------------------------------------------------------------- +!> @brief writes to a new scalar dataset in the given group location +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_writeScalarDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: group + character(len=*), intent(in) :: SIunit,label + integer(pInt), intent(in) :: dataspace_size, mpiOffset + real(pReal), intent(in), dimension(:) :: dataset + + integer :: hdferr, nNodes + integer(HID_T) :: dset_id, space_id, memspace, plist_id + + integer(HSIZE_T), dimension(1) :: counter + integer(HSIZE_T), dimension(1) :: fileOffset + + nNodes = size(dataset) + if (nNodes < 1) return + + call HDF5_addScalarDataset(group,dataspace_size,label,SIunit) + call h5dopen_f(group, label, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dopen_f') + + ! Define and select hyperslabs + counter = size(dataset) ! how big i am + fileOffset = mpiOffset ! where i start to write my data + + call h5screate_simple_f(1, counter, memspace, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5screate_simple_f') + call h5dget_space_f(dset_id, space_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dget_space_f') + call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5sselect_hyperslab_f') + + ! Create property list for collective dataset write +#ifdef PETSc + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5pcreate_f') + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5pset_dxpl_mpio_f') +#endif + + ! Write the dataset collectively + call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([dataspace_size],HSIZE_T), hdferr, & + file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dwrite_f') + + call h5sclose_f(space_id, hdferr) + call h5sclose_f(memspace, hdferr) + call h5dclose_f(dset_id, hdferr) + call h5pclose_f(plist_id, hdferr) + +end subroutine HDF5_writeScalarDataset + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pReal with 1 dimension +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal_1(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dclose_f') + +end subroutine HDF5_read_pReal_1 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pReal with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal_2(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dclose_f') + +end subroutine HDF5_read_pReal_2 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pReal with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal_3(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dclose_f') + +end subroutine HDF5_read_pReal_3 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pReal with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal_4(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dclose_f') + +end subroutine HDF5_read_pReal_4 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pReal with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dclose_f') + +end subroutine HDF5_read_pReal_5 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pReal with 6 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal_6(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dclose_f') + +end subroutine HDF5_read_pReal_6 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pReal with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal_7(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dclose_f') + +end subroutine HDF5_read_pReal_7 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pInt with 1 dimension +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt_1(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dclose_f') + +end subroutine HDF5_read_pInt_1 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pInt with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt_2(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dclose_f') + +end subroutine HDF5_read_pInt_2 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pInt with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt_3(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dclose_f') + +end subroutine HDF5_read_pInt_3 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pInt with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt_4(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dclose_f') + +end subroutine HDF5_read_pInt_4 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pInt with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt_5(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dclose_f') + +end subroutine HDF5_read_pInt_5 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pInt with 6 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt_6(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dclose_f') + +end subroutine HDF5_read_pInt_6 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of the type pInt with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + integer(pInt),dimension(:), allocatable :: myShape + + integer :: hdferr + integer(HID_T) :: dset_id + myShape = shape(dataset) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dopen_f') + call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dread_f') + call h5dclose_f(dset_id,hdferr) + if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dclose_f') + +end subroutine HDF5_read_pInt_7 + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for writing dataset of the type pReal with 1 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pReal1(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pReal2(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pReal3(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pReal4(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pReal5(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 6 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pReal6(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pReal7(dataset,loc_id,datasetName) + + implicit none + real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 1 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pInt1(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pInt2(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pInt3(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pInt4(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pInt5(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 6 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pInt6(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_write_pInt7(dataset,loc_id,datasetName) + + implicit none + integer(pInt), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + + integer(pInt), dimension(:), allocatable :: myShape ! @brief adds a new scalar dataset to the given group location +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_addScalarDataset(group,nnodes,label,SIunit) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: group + integer(pInt), intent(in) :: nnodes + character(len=*), intent(in) :: SIunit,label + + integer :: hdferr + integer(HID_T) :: space_id, dset_id + +!-------------------------------------------------------------------------------------------------- +! create dataspace + call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, & + int([Nnodes],HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5screate_simple_f') + +!-------------------------------------------------------------------------------------------------- +! create Dataset + call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dcreate_f') + call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dclose_f') + call h5sclose_f(space_id, hdferr) + +end subroutine HDF5_addScalarDataset + +!-------------------------------------------------------------------------------------------------- +!> @brief copies the current temp results to the actual results file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_forwardResults(time) + use hdf5 + use IO, only: & + IO_intOut + + implicit none + integer :: hdferr + integer(HID_T) :: currentIncID + real(pReal), intent(in) :: time + character(len=1024) :: myName + + currentInc = currentInc +1_pInt + write(6,*) 'forward results';flush(6) + write(myName,'(a,'//IO_intOut(currentInc)//')') 'inc',currentInc + currentIncID = HDF5_addGroup(myName) + call HDF5_setLink(myName,'current') +! call HDF5_flush(resultsFile) + call HDF5_closeGroup(currentIncID) + +end subroutine HDF5_forwardResults + +end module HDF5_Utilities \ No newline at end of file From b48bd3a08223d794fe52368bfdbefe08dbbf4bcd Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 15 Nov 2018 07:07:59 +0100 Subject: [PATCH 02/89] keep only general functionality in HDF5_results --- src/HDF5_utilities.f90 | 1267 +--------------------------------------- 1 file changed, 3 insertions(+), 1264 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 43a7a26e8..1193eb25d 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -63,25 +63,9 @@ module HDF5_Utilities end interface HDF5_write public :: & - HDF5_Utilities_init, & - HDF5_mappingPhase, & - HDF5_mappingHomog, & - HDF5_mappingCrystallite, & - HDF5_backwardMappingPhase, & - HDF5_backwardMappingHomog, & - HDF5_backwardMappingCrystallite, & - HDF5_mappingCells, & - HDF5_addGroup ,& + HDF5_utilities_init, & HDF5_closeGroup ,& - HDF5_openGroup, & HDF5_openGroup2, & - HDF5_forwardResults, & - HDF5_writeVectorDataset, & - HDF5_writeScalarDataset, & - HDF5_writeTensorDataset, & - HDF5_closeJobFile, & - HDF5_removeLink, & - HDF5_createFile, & HDF5_closeFile, & HDF5_addGroup2, & HDF5_openFile, & @@ -100,9 +84,6 @@ subroutine HDF5_Utilities_init write(6,'(/,a)') ' <<<+- HDF5_Utilities init -+>>>' #include "compilation_info.f90" - !currentInc = -1_pInt ToDo - !call HDF5_createJobFile ToDo - !-------------------------------------------------------------------------------------------------- !initialize HDF5 library and check if integer and float type size match call h5open_f(hdferr) @@ -116,36 +97,7 @@ subroutine HDF5_Utilities_init end subroutine HDF5_Utilities_init -!-------------------------------------------------------------------------------------------------- -!> @brief creates and initializes HDF5 output files -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_createJobFile - use hdf5 - use DAMASK_interface, only: & - getSolverJobName - implicit none - integer :: hdferr - character(len=1024) :: path -#ifdef PETSc -#include - - call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') - call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- -! open file - path = trim(getSolverJobName())//'.'//'hdf5' - !call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr) - call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr,access_prp = plist_id) - if (hdferr < 0) call IO_error(100_pInt,ext_msg=path) - call HDF5_addStringAttribute(resultsFile,'createdBy',DAMASKVERSION) - call h5pclose_f(plist_id, hdferr) !neu - -end subroutine HDF5_createJobFile !-------------------------------------------------------------------------------------------------- @@ -180,20 +132,7 @@ end subroutine HDF5_createJobFile end function HDF5_createFile -!-------------------------------------------------------------------------------------------------- -!> @brief close the opened HDF5 output file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_closeJobFile() - use hdf5 - implicit none - integer :: hdferr - call HDF5_removeLink('current') - call h5fclose_f(resultsFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeJobFile: h5fclose_f',el=hdferr) -! call h5close_f(hdferr) - -end subroutine HDF5_closeJobFile !-------------------------------------------------------------------------------------------------- !> @brief open and initializes HDF5 output file @@ -227,6 +166,7 @@ integer(HID_T) function HDF5_openFile(fileName,mode) end function HDF5_openFile + !-------------------------------------------------------------------------------------------------- !> @brief close the opened HDF5 output file !-------------------------------------------------------------------------------------------------- @@ -240,21 +180,6 @@ subroutine HDF5_closeFile(fileHandle) end subroutine HDF5_closeFile -!-------------------------------------------------------------------------------------------------- -!> @brief adds a new group to the results file -!-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_addGroup(groupName) - use hdf5 - - implicit none - character(len=*), intent(in) :: groupName - integer :: hdferr - - call h5gcreate_f(resultsFile, trim(groupName), HDF5_addGroup, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')') - -end function HDF5_addGroup - !-------------------------------------------------------------------------------------------------- !> @brief adds a new group to the fileHandle (additional to addGroup2) @@ -273,21 +198,6 @@ integer(HID_T) function HDF5_addGroup2(fileHandle,groupName) end function HDF5_addGroup2 -!-------------------------------------------------------------------------------------------------- -!> @brief open a group from the results file -!-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_openGroup(groupName) - use hdf5 - - implicit none - character(len=*), intent(in) :: groupName - integer :: hdferr - - call h5gopen_f(resultsFile, trim(groupName), HDF5_openGroup, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(groupName)//')') - -end function HDF5_openGroup - !-------------------------------------------------------------------------------------------------- !> @brief open an existing group of a file !-------------------------------------------------------------------------------------------------- @@ -304,42 +214,6 @@ integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) end function HDF5_openGroup2 -!-------------------------------------------------------------------------------------------------- -!> @brief set link to object in results file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_setLink(path,link) - use hdf5 - - implicit none - character(len=*), intent(in) :: path, link - integer :: hdferr - logical :: linkExists - - call h5lexists_f(resultsFile, link,linkExists, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link)//')') - if (linkExists) then - call h5ldelete_f(resultsFile,link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link)//')') - endif - call h5lcreate_soft_f(path, resultsFile, link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(path)//' '//trim(link)//')') - -end subroutine HDF5_setLink - -!-------------------------------------------------------------------------------------------------- -!> @brief remove link to an object -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_removeLink(link) - use hdf5 - - implicit none - character(len=*), intent(in) :: link - integer :: hdferr - - call h5ldelete_f(resultsFile,link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_removeLink: h5ldelete_soft_f ('//trim(link)//')') - -end subroutine HDF5_removeLink !-------------------------------------------------------------------------------------------------- !> @brief close a group @@ -356,1084 +230,6 @@ subroutine HDF5_closeGroup(ID) end subroutine HDF5_closeGroup -!-------------------------------------------------------------------------------------------------- -!> @brief adds a StringAttribute to the results file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: entity - character(len=*), intent(in) :: attrLabel, attrValue - integer :: hdferr - integer(HID_T) :: attr_id, space_id, type_id - - call h5screate_f(H5S_SCALAR_F,space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5screate_f') - call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tcopy_f') - call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tset_size_f') - call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5acreate_f') - call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5awrite_f') - call h5aclose_f(attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5aclose_f') - call h5tclose_f(type_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tclose_f') - call h5sclose_f(space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5sclose_f') - -end subroutine HDF5_addStringAttribute - -!-------------------------------------------------------------------------------------------------- -!> @brief adds the unique mapping from spatial position and constituent ID to results -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_mappingPhase(mapping,mapping2,Nconstituents,material_phase,phase_name,dataspace_size,mpiOffset,mpiOffset_phase) - use hdf5 - - implicit none - integer(pInt), intent(in) :: Nconstituents, dataspace_size, mpiOffset - integer(pInt), intent(in), dimension(:) :: mapping, mapping2 - character(len=*), intent(in), dimension(:) :: phase_name - integer(pInt), intent(in), dimension(:) :: mpiOffset_phase - integer(pInt), intent(in), dimension(:,:,:) :: material_phase - - character(len=len(phase_name(1))), dimension(:), allocatable :: namesNA - character(len=len(phase_name(1))) :: a - character(len=*), parameter :: n = "NULL" - - integer(pInt) :: hdferr, NmatPoints, i, j, k - integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, name_id, position_id, plist_id, memspace - integer(HID_T) :: dt5_id ! Memory datatype identifier - integer(SIZE_T) :: typesize, type_sizec, type_sizei, type_size - - integer(HSIZE_T), dimension(2) :: counter - integer(HSSIZE_T), dimension(2) :: fileOffset - integer(pInt), dimension(:,:), allocatable :: arrOffset - - a = n - allocate(namesNA(0:size(phase_name)),source=[a,phase_name]) - NmatPoints = size(mapping,1)/Nconstituents - mapping_ID = HDF5_openGroup("current/mapGeometry") - - allocate(arrOffset(Nconstituents,NmatPoints)) - do i=1_pInt, NmatPoints - do k=1_pInt, Nconstituents - do j=1_pInt, size(phase_name) - if(material_phase(k,1,i) == j) & - arrOffset(k,i) = mpiOffset_phase(j) - enddo - enddo - enddo - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(2, int([Nconstituents,dataspace_size],HSIZE_T), space_id, hdferr, & - int([Nconstituents,dataspace_size],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping') - -!-------------------------------------------------------------------------------------------------- -! compound type - ! First calculate total size by calculating sizes of each member - ! - CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt5_id, hdferr) - typesize = len(phase_name(1)) - CALL h5tset_size_f(dt5_id, typesize, hdferr) - CALL h5tget_size_f(dt5_id, type_sizec, hdferr) - CALL h5tget_size_f(H5T_STD_I32LE,type_sizei, hdferr) - type_size = type_sizec + type_sizei - call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping: h5tcreate_f dtype_id') - - call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f 0') - call h5tinsert_f(dtype_id, "Position", type_sizec, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f 2') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(mapping_id, 'constitutive', dtype_id, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase') - -!-------------------------------------------------------------------------------------------------- -! Create memory types (one compound datatype for each member) - call h5tcreate_f(H5T_COMPOUND_F, int(type_sizec,SIZE_T), name_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tcreate_f instance_id') - call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f instance_id') - - call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tcreate_f position_id') - call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tinsert_f position_id') - -!-------------------------------------------------------------------------------------------------- -! Define and select hyperslabs - counter(1) = Nconstituents ! how big i am - counter(2) = NmatPoints - fileOffset(1) = 0 ! where i start to write my data - fileOffset(2) = mpiOffset - - call h5screate_simple_f(2, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5pset_dxpl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- -! write data by fields in the datatype. Fields order is not important. - call h5dwrite_f(dset_id, name_id, reshape(namesNA(mapping),[Nconstituents,NmatPoints]), & - int([Nconstituents, dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dwrite_f position_id') - - call h5dwrite_f(dset_id, position_id, reshape(mapping2-1_pInt,[Nconstituents,NmatPoints])+arrOffset, & - int([Nconstituents, dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dwrite_f instance_id') - -!-------------------------------------------------------------------------------------------------- -! close types, dataspaces - call h5tclose_f(dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f dtype_id') - call h5tclose_f(position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f position_id') - call h5tclose_f(name_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f name_id ') - call h5tclose_f(dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5tclose_f dt5_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5dclose_f') - call h5sclose_f(space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5sclose_f space_id') - call h5sclose_f(memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5sclose_f memspace') - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingPhase: h5pclose_f') - call HDF5_closeGroup(mapping_ID) - -end subroutine HDF5_mappingPhase - -!-------------------------------------------------------------------------------------------------- -!> @brief adds the backward mapping from spatial position and constituent ID to results -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_backwardMappingPhase(material_phase,phasememberat,phase_name,dataspace_size,mpiOffset,mpiOffset_phase) - use hdf5 - - implicit none - integer(pInt), intent(in), dimension(:,:,:) :: material_phase, phasememberat - character(len=*), intent(in), dimension(:) :: phase_name - integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_phase - integer(pInt), intent(in) :: mpiOffset - - integer(pInt) :: hdferr, NmatPoints, Nconstituents, i, j - integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace - integer(SIZE_T) :: type_size - - integer(pInt), dimension(:,:), allocatable :: arr - - integer(HSIZE_T), dimension(1) :: counter - integer(HSSIZE_T), dimension(1) :: fileOffset - - character(len=64) :: phaseID - - Nconstituents = size(phasememberat,1) - NmatPoints = count(material_phase /=0_pInt)/Nconstituents - - allocate(arr(2,NmatPoints*Nconstituents)) - - do i=1_pInt, NmatPoints - do j=Nconstituents-1_pInt, 0_pInt, -1_pInt - arr(1,Nconstituents*i-j) = i-1_pInt - enddo - enddo - arr(2,:) = pack(material_phase,material_phase/=0_pInt) - - do i=1_pInt, size(phase_name) - write(phaseID, '(i0)') i - mapping_ID = HDF5_openGroup('/current/constitutive/'//trim(phaseID)//'_'//phase_name(i)) - NmatPoints = count(material_phase == i) - -!-------------------------------------------------------------------------------------------------- - ! create dataspace - call h5screate_simple_f(1, int([dataspace_size(i)],HSIZE_T), space_id, hdferr, & - int([dataspace_size(i)],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping') - -!-------------------------------------------------------------------------------------------------- - ! compound type - call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr) - call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id') - - call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tinsert_f 0') - -!-------------------------------------------------------------------------------------------------- - ! create Dataset - call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase') - -!-------------------------------------------------------------------------------------------------- - ! Create memory types (one compound datatype for each member) - call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tcreate_f position_id') - call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tinsert_f position_id') - -!-------------------------------------------------------------------------------------------------- - ! Define and select hyperslabs - counter = NmatPoints ! how big i am - fileOffset = mpiOffset_phase(i) ! where i start to write my data - - call h5screate_simple_f(1, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5pset_dxpl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- - ! write data by fields in the datatype. Fields order is not important. - call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i)+mpiOffset, int([dataspace_size(i)],HSIZE_T),& - hdferr, file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5dwrite_f instance_id') - -!-------------------------------------------------------------------------------------------------- - !close types, dataspaces - call h5tclose_f(dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tclose_f dtype_id') - call h5tclose_f(position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5tclose_f position_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5dclose_f') - call h5sclose_f(space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5sclose_f space_id') - call h5sclose_f(memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5sclose_f memspace') - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingPhase: h5pclose_f') - call HDF5_closeGroup(mapping_ID) - - enddo - -end subroutine HDF5_backwardMappingPhase - -!-------------------------------------------------------------------------------------------------- -!> @brief adds the unique mapping from spatial position and constituent ID to results -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_mappingHomog(material_homog,homogmemberat,homogenization_name,dataspace_size,mpiOffset,mpiOffset_homog) - use hdf5 - - implicit none - integer(pInt), intent(in), dimension(:,:) :: material_homog, homogmemberat - character(len=*), intent(in), dimension(:) :: homogenization_name - integer(pInt), intent(in), dimension(:) :: mpiOffset_homog - integer(pInt), intent(in) :: dataspace_size, mpiOffset - - integer(pInt) :: hdferr, NmatPoints, i, j - integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, name_id, position_id, plist_id, memspace - - integer(HID_T) :: dt5_id ! Memory datatype identifier - integer(SIZE_T) :: typesize, type_sizec, type_sizei, type_size - - integer(HSIZE_T), dimension(1) :: counter - integer(HSSIZE_T), dimension(1) :: fileOffset - integer(pInt), dimension(:), allocatable :: arrOffset - - NmatPoints = count(material_homog /=0_pInt) - mapping_ID = HDF5_openGroup("current/mapGeometry") - - allocate(arrOffset(NmatPoints)) - do i=1_pInt, NmatPoints - do j=1_pInt, size(homogenization_name) - if(material_homog(1,i) == j) & - arrOffset(i) = mpiOffset_homog(j) - enddo - enddo - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(1, int([dataspace_size],HSIZE_T), space_id, hdferr, & - int([dataspace_size],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping') - -!-------------------------------------------------------------------------------------------------- -! compound type - ! First calculate total size by calculating sizes of each member - ! - CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt5_id, hdferr) - typesize = len(homogenization_name(1)) - CALL h5tset_size_f(dt5_id, typesize, hdferr) - CALL h5tget_size_f(dt5_id, type_sizec, hdferr) - CALL h5tget_size_f(H5T_STD_I32LE,type_sizei, hdferr) - type_size = type_sizec + type_sizei - call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping: h5tcreate_f dtype_id') - - call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f 0') - call h5tinsert_f(dtype_id, "Position", type_sizec, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f 2') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(mapping_id, 'homogenization', dtype_id, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog') - -!-------------------------------------------------------------------------------------------------- -! Create memory types (one compound datatype for each member) - call h5tcreate_f(H5T_COMPOUND_F, int(type_sizec,SIZE_T), name_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tcreate_f instance_id') - call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f instance_id') - - call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tcreate_f position_id') - call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tinsert_f position_id') - -!-------------------------------------------------------------------------------------------------- -! Define and select hyperslabs - counter = NmatPoints ! how big i am - fileOffset = mpiOffset ! where i start to write my data - - call h5screate_simple_f(1, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5pset_dxpl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- -! write data by fields in the datatype. Fields order is not important. - call h5dwrite_f(dset_id, name_id, homogenization_name(pack(material_homog,material_homog/=0_pInt)), & - int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & - mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dwrite_f position_id') - - call h5dwrite_f(dset_id, position_id, pack(homogmemberat-1_pInt,homogmemberat/=0_pInt) + arrOffset, & - int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & - mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dwrite_f instance_id') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces -call h5tclose_f(dtype_id, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f dtype_id') -call h5tclose_f(position_id, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f position_id') -call h5tclose_f(name_id, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f name_id ') -call h5tclose_f(dt5_id, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5tclose_f dt5_id') -call h5dclose_f(dset_id, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5dclose_f') -call h5sclose_f(space_id, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5sclose_f space_id') -call h5sclose_f(memspace, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5sclose_f memspace') -call h5pclose_f(plist_id, hdferr) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomog: h5pclose_f') -call HDF5_closeGroup(mapping_ID) - -end subroutine HDF5_mappingHomog - -!-------------------------------------------------------------------------------------------------- -!> @brief adds the backward mapping from spatial position and constituent ID to results -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_backwardMappingHomog(material_homog,homogmemberat,homogenization_name,dataspace_size,mpiOffset,mpiOffset_homog) - use hdf5 - - implicit none - integer(pInt), intent(in), dimension(:,:) :: material_homog, homogmemberat - character(len=*), intent(in), dimension(:) :: homogenization_name - integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_homog - integer(pInt), intent(in) :: mpiOffset - - integer(pInt) :: hdferr, NmatPoints, i - integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace - integer(SIZE_T) :: type_size - - integer(pInt), dimension(:,:), allocatable :: arr - - integer(HSIZE_T), dimension(1) :: counter - integer(HSSIZE_T), dimension(1) :: fileOffset - - character(len=64) :: homogID - - NmatPoints = count(material_homog /=0_pInt) - allocate(arr(2,NmatPoints)) - - arr(1,:) = (/(i, i=0_pint,NmatPoints-1_pInt)/) - arr(2,:) = pack(material_homog,material_homog/=0_pInt) - - do i=1_pInt, size(homogenization_name) - write(homogID, '(i0)') i - mapping_ID = HDF5_openGroup('/current/homogenization/'//trim(homogID)//'_'//homogenization_name(i)) - -!-------------------------------------------------------------------------------------------------- - ! create dataspace - call h5screate_simple_f(1, int([dataspace_size(i)],HSIZE_T), space_id, hdferr, & - int([dataspace_size(i)],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping') - -!-------------------------------------------------------------------------------------------------- - ! compound type - call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr) - call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id') - - call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tinsert_f 0') - -!-------------------------------------------------------------------------------------------------- - ! create Dataset - call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog') - -!-------------------------------------------------------------------------------------------------- - ! Create memory types (one compound datatype for each member) - call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tcreate_f position_id') - call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tinsert_f position_id') - -!-------------------------------------------------------------------------------------------------- - ! Define and select hyperslabs - counter = NmatPoints ! how big i am - fileOffset = mpiOffset_homog(i) ! where i start to write my data - - call h5screate_simple_f(1, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5pset_dxpl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- - ! write data by fields in the datatype. Fields order is not important. - call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i)+mpiOffset,int([dataspace_size(i)],HSIZE_T),& - hdferr, file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5dwrite_f instance_id') - -!-------------------------------------------------------------------------------------------------- - !close types, dataspaces - call h5tclose_f(dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tclose_f dtype_id') - call h5tclose_f(position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5tclose_f position_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5dclose_f') - call h5sclose_f(space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5sclose_f space_id') - call h5sclose_f(memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5sclose_f memspace') - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingHomog: h5pclose_f') - call HDF5_closeGroup(mapping_ID) - - enddo - -end subroutine HDF5_backwardMappingHomog - -!-------------------------------------------------------------------------------------------------- -!> @brief adds the unique mapping from spatial position and constituent ID to results -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_mappingCrystallite(crystalliteAt,crystmemberAt,crystallite_name,dataspace_size,mpiOffset,mpiOffset_cryst) - use hdf5 - - implicit none - integer(pInt), intent(in), dimension(:,:) :: crystalliteAt - integer(pInt), intent(in), dimension(:,:,:) :: crystmemberAt - character(len=*), intent(in), dimension(:) :: crystallite_name - integer(pInt), intent(in), dimension(:) :: mpiOffset_cryst - integer(pInt), intent(in) :: dataspace_size, mpiOffset - - integer :: hdferr - integer(pInt) :: NmatPoints, Nconstituents, i, j - integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, name_id, plist_id, memspace - - integer(HID_T), dimension(:), allocatable :: position_id - - integer(HID_T) :: dt5_id ! Memory datatype identifier - integer(SIZE_T) :: typesize, type_sizec, type_sizei, type_size - - integer(HSIZE_T), dimension(1) :: counter - integer(HSSIZE_T), dimension(1) :: fileOffset - integer(pInt), dimension(:), allocatable :: arrOffset - - character(len=64) :: m - - Nconstituents = size(crystmemberAt,1) - NmatPoints = count(crystalliteAt /=0_pInt) - mapping_ID = HDF5_openGroup("current/mapGeometry") - - allocate(position_id(Nconstituents)) - - allocate(arrOffset(NmatPoints)) - do i=1_pInt, NmatPoints - do j=1_pInt, size(crystallite_name) - if(crystalliteAt(1,i) == j) & - arrOffset(i) = Nconstituents*mpiOffset_cryst(j) - enddo - enddo - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(1, int([dataspace_size],HSIZE_T), space_id, hdferr, & - int([dataspace_size],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping') - -!-------------------------------------------------------------------------------------------------- -! compound type - ! First calculate total size by calculating sizes of each member - ! - CALL h5tcopy_f(H5T_NATIVE_CHARACTER, dt5_id, hdferr) - typesize = len(crystallite_name(1)) - CALL h5tset_size_f(dt5_id, typesize, hdferr) - CALL h5tget_size_f(dt5_id, type_sizec, hdferr) - CALL h5tget_size_f(H5T_STD_I32LE, type_sizei, hdferr) - type_size = type_sizec + type_sizei*Nconstituents - call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeMapping: h5tcreate_f dtype_id') - - call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 0') - do i=1_pInt, Nconstituents - write(m, '(i0)') i - call h5tinsert_f(dtype_id, "Position "//trim(m), type_sizec+(i-1)*type_sizei, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 2 '//trim(m)) - enddo - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(mapping_id, 'crystallite', dtype_id, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite') - -!-------------------------------------------------------------------------------------------------- -! Create memory types (one compound datatype for each member) - call h5tcreate_f(H5T_COMPOUND_F, int(type_sizec,SIZE_T), name_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f instance_id') - call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f instance_id') - - do i=1_pInt, Nconstituents - write(m, '(i0)') i - call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id(i), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f position_id') - call h5tinsert_f(position_id(i), "Position "//trim(m), 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f position_id') - enddo - -!-------------------------------------------------------------------------------------------------- -! Define and select hyperslabs - counter = NmatPoints ! how big i am - fileOffset = mpiOffset ! where i start to write my data - - call h5screate_simple_f(1, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5pset_dxpl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- -! write data by fields in the datatype. Fields order is not important. - call h5dwrite_f(dset_id, name_id, crystallite_name(pack(crystalliteAt,crystalliteAt/=0_pInt)), & - int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & - mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f position_id') - - do i=1_pInt, Nconstituents - call h5dwrite_f(dset_id, position_id(i), pack(crystmemberAt(i,:,:)-1_pInt,crystmemberAt(i,:,:)/=0_pInt)+arrOffset,& - int([dataspace_size],HSIZE_T), hdferr, file_space_id = space_id, & - mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f instance_id') - enddo - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5tclose_f(dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f dtype_id') - do i=1_pInt, Nconstituents - call h5tclose_f(position_id(i), hdferr) - enddo - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f position_id') - call h5tclose_f(name_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f name_id') - call h5tclose_f(dt5_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f dt5_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dclose_f') - call h5sclose_f(space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sclose_f space_id') - call h5sclose_f(memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sclose_f memspace') - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5pclose_f') - call HDF5_closeGroup(mapping_ID) - -end subroutine HDF5_mappingCrystallite - - -!-------------------------------------------------------------------------------------------------- -!> @brief adds the backward mapping from spatial position and constituent ID to results -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_backwardMappingCrystallite(crystalliteAt,crystmemberAt,crystallite_name,dataspace_size,mpiOffset,mpiOffset_cryst) - use hdf5 - - implicit none - integer(pInt), intent(in), dimension(:,:) :: crystalliteAt - integer(pInt), intent(in), dimension(:,:,:) :: crystmemberAt - character(len=*), intent(in), dimension(:) :: crystallite_name - integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_cryst - integer(pInt), intent(in) :: mpiOffset - - integer :: hdferr - integer(pInt) :: NmatPoints, Nconstituents, i, j - integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace - integer(SIZE_T) :: type_size - - integer(pInt), dimension(:,:), allocatable :: h_arr, arr - - integer(HSIZE_T), dimension(1) :: counter - integer(HSSIZE_T), dimension(1) :: fileOffset - - character(len=64) :: crystallID - - Nconstituents = size(crystmemberAt,1) - NmatPoints = count(crystalliteAt /=0_pInt) - - allocate(h_arr(2,NmatPoints)) - allocate(arr(2,Nconstituents*NmatPoints)) - - h_arr(1,:) = (/(i, i=0_pInt,NmatPoints-1_pInt)/) - h_arr(2,:) = pack(crystalliteAt,crystalliteAt/=0_pInt) - - do i=1_pInt, NmatPoints - do j=Nconstituents-1_pInt, 0_pInt, -1_pInt - arr(1,Nconstituents*i-j) = h_arr(1,i) - arr(2,Nconstituents*i-j) = h_arr(2,i) - enddo - enddo - - do i=1_pInt, size(crystallite_name) - if (crystallite_name(i) == 'none') cycle - write(crystallID, '(i0)') i - mapping_ID = HDF5_openGroup('/current/crystallite/'//trim(crystallID)//'_'//crystallite_name(i)) - NmatPoints = count(crystalliteAt == i) - -!-------------------------------------------------------------------------------------------------- - ! create dataspace - call h5screate_simple_f(1, int([Nconstituents*dataspace_size(i)],HSIZE_T), space_id, hdferr, & - int([Nconstituents*dataspace_size(i)],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping') - -!-------------------------------------------------------------------------------------------------- - ! compound type - call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr) - call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id') - - call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tinsert_f 0') - -!-------------------------------------------------------------------------------------------------- - ! create Dataset - call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite') - -!-------------------------------------------------------------------------------------------------- - ! Create memory types - call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tcreate_f position_id') - call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tinsert_f position_id') - -!-------------------------------------------------------------------------------------------------- - ! Define and select hyperslabs - counter = Nconstituents*NmatPoints ! how big i am - fileOffset = Nconstituents*mpiOffset_cryst(i) ! where i start to write my data - - call h5screate_simple_f(1, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5pset_dxpl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- - ! write data by fields in the datatype. Fields order is not important. - call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i) + mpiOffset,& - int([Nconstituents*dataspace_size(i)],HSIZE_T), hdferr, file_space_id = space_id, & - mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5dwrite_f instance_id') - -!-------------------------------------------------------------------------------------------------- - !close types, dataspaces - call h5tclose_f(dtype_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tclose_f dtype_id') - call h5tclose_f(position_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5tclose_f position_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5dclose_f') - call h5sclose_f(space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5sclose_f space_id') - call h5sclose_f(memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5sclose_f memspace') - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_backwardMappingCrystallite: h5pclose_f') - call HDF5_closeGroup(mapping_ID) - - enddo - -end subroutine HDF5_backwardMappingCrystallite - -!-------------------------------------------------------------------------------------------------- -!> @brief adds the unique cell to node mapping -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_mappingCells(mapping) - use hdf5 - - implicit none - integer(pInt), intent(in), dimension(:) :: mapping - - integer :: hdferr, Nnodes - integer(HID_T) :: mapping_id, dset_id, space_id - - Nnodes=size(mapping) - mapping_ID = HDF5_openGroup("mapping") - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, & - int([Nnodes],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5screate_simple_f') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(mapping_id, "Cell",H5T_NATIVE_INTEGER, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells') - -!-------------------------------------------------------------------------------------------------- -! write data by fields in the datatype. Fields order is not important. - call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, mapping, int([Nnodes],HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5dwrite_f instance_id') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dclose_f') - call h5sclose_f(space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5sclose_f') - call HDF5_closeGroup(mapping_ID) - -end subroutine HDF5_mappingCells - -!-------------------------------------------------------------------------------------------------- -!> @brief creates a new 3D Tensor dataset in the given group location !!!TODO: really necessary? -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addTensor3DDataset(group,Nnodes,tensorSize,label,SIunit) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - integer(pInt), intent(in) :: Nnodes, tensorSize - character(len=*), intent(in) :: SIunit, label - - integer :: hdferr - integer(HID_T) :: space_id, dset_id - integer(HSIZE_T), dimension(3) :: dataShape - - dataShape = int([tensorSize,tensorSize,Nnodes], HSIZE_T) - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(3, dataShape, space_id, hdferr, dataShape) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5screate_simple_f') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5dcreate_f') - call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5dclose_f') - call h5sclose_f(space_id, hdferr) - -end subroutine HDF5_addTensor3DDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief creates a new vector dataset in the given group location !!!TODO: really necessary? -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_writeVectorDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - character(len=*), intent(in) :: SIunit,label - integer(pInt), intent(in) :: dataspace_size, mpiOffset - real(pReal), intent(in), dimension(:,:) :: dataset - - integer :: hdferr, vectorSize - integer(HID_T) :: dset_id, space_id, memspace, plist_id - - integer(HSIZE_T), dimension(2) :: counter - integer(HSSIZE_T), dimension(2) :: fileOffset - - if(any(shape(dataset) == 0)) return - - vectorSize = size(dataset,1) - - call HDF5_addVectorDataset(group,dataspace_size,vectorSize,label,SIunit) ! here nNodes need to be global - call h5dopen_f(group, label, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dopen_f') - - ! Define and select hyperslabs - counter(1) = vectorSize ! how big i am - counter(2) = size(dataset,2) - fileOffset(1) = 0 ! where i start to write my data - fileOffset(2) = mpiOffset - - call h5screate_simple_f(2, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5sselect_hyperslab_f') - - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5pset_dxpl_mpio_f') -#endif - - ! Write the dataset collectively - call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([vectorSize, dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dwrite_f') - - call h5sclose_f(space_id, hdferr) - call h5sclose_f(memspace, hdferr) - call h5dclose_f(dset_id, hdferr) - call h5pclose_f(plist_id, hdferr) - -end subroutine HDF5_writeVectorDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief creates a new tensor dataset in the given group location -! by default, a 3x3 tensor is assumed !!!TODO: really necessary? -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_writeTensorDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - character(len=*), intent(in) :: SIunit,label - integer(pInt), intent(in) :: dataspace_size, mpiOffset - real(pReal), intent(in), dimension(:,:,:) :: dataset - - integer :: hdferr, tensorSize - integer(HID_T) :: dset_id, space_id, memspace, plist_id - - integer(HSIZE_T), dimension(3) :: counter - integer(HSSIZE_T), dimension(3) :: fileOffset - - if(any(shape(dataset) == 0)) return - - tensorSize = size(dataset,1) - - call HDF5_addTensor3DDataset(group,dataspace_size,tensorSize,label,SIunit) - call h5dopen_f(group, label, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dopen_f') - - ! Define and select hyperslabs - counter(1) = tensorSize ! how big i am - counter(2) = tensorSize - counter(3) = size(dataset,3) - fileOffset(1) = 0 ! where i start to write my data - fileOffset(2) = 0 - fileOffset(3) = mpiOffset - - call h5screate_simple_f(3, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5sselect_hyperslab_f') - - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5pset_dxpl_mpio_f') -#endif - - ! Write the dataset collectively - call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([tensorSize, dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dwrite_f') - - call h5sclose_f(space_id, hdferr) - call h5sclose_f(memspace, hdferr) - call h5dclose_f(dset_id, hdferr) - call h5pclose_f(plist_id, hdferr) - - end subroutine HDF5_writeTensorDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief adds a new vector dataset to the given group location -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addVectorDataset(group,nnodes,vectorSize,label,SIunit) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - integer(pInt), intent(in) :: nnodes,vectorSize - character(len=*), intent(in) :: SIunit,label - - integer :: hdferr - integer(HID_T) :: space_id, dset_id - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(2, int([vectorSize,Nnodes],HSIZE_T), space_id, hdferr, & - int([vectorSize,Nnodes],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5screate_simple_f') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(group, trim(label), H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5dcreate_f') - call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5dclose_f') - call h5sclose_f(space_id, hdferr) - -end subroutine HDF5_addVectorDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief writes to a new scalar dataset in the given group location -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_writeScalarDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - character(len=*), intent(in) :: SIunit,label - integer(pInt), intent(in) :: dataspace_size, mpiOffset - real(pReal), intent(in), dimension(:) :: dataset - - integer :: hdferr, nNodes - integer(HID_T) :: dset_id, space_id, memspace, plist_id - - integer(HSIZE_T), dimension(1) :: counter - integer(HSIZE_T), dimension(1) :: fileOffset - - nNodes = size(dataset) - if (nNodes < 1) return - - call HDF5_addScalarDataset(group,dataspace_size,label,SIunit) - call h5dopen_f(group, label, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dopen_f') - - ! Define and select hyperslabs - counter = size(dataset) ! how big i am - fileOffset = mpiOffset ! where i start to write my data - - call h5screate_simple_f(1, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5sselect_hyperslab_f') - - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5pset_dxpl_mpio_f') -#endif - - ! Write the dataset collectively - call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dwrite_f') - - call h5sclose_f(space_id, hdferr) - call h5sclose_f(memspace, hdferr) - call h5dclose_f(dset_id, hdferr) - call h5pclose_f(plist_id, hdferr) - -end subroutine HDF5_writeScalarDataset !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of the type pReal with 1 dimension @@ -2280,62 +1076,5 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_HDF5_write_pInt7: h5sclose_f' end subroutine HDF5_write_pInt7 -!-------------------------------------------------------------------------------------------------- -!> @brief adds a new scalar dataset to the given group location -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addScalarDataset(group,nnodes,label,SIunit) - use hdf5 - implicit none - integer(HID_T), intent(in) :: group - integer(pInt), intent(in) :: nnodes - character(len=*), intent(in) :: SIunit,label - - integer :: hdferr - integer(HID_T) :: space_id, dset_id - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, & - int([Nnodes],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5screate_simple_f') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dcreate_f') - call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dclose_f') - call h5sclose_f(space_id, hdferr) - -end subroutine HDF5_addScalarDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief copies the current temp results to the actual results file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_forwardResults(time) - use hdf5 - use IO, only: & - IO_intOut - - implicit none - integer :: hdferr - integer(HID_T) :: currentIncID - real(pReal), intent(in) :: time - character(len=1024) :: myName - - currentInc = currentInc +1_pInt - write(6,*) 'forward results';flush(6) - write(myName,'(a,'//IO_intOut(currentInc)//')') 'inc',currentInc - currentIncID = HDF5_addGroup(myName) - call HDF5_setLink(myName,'current') -! call HDF5_flush(resultsFile) - call HDF5_closeGroup(currentIncID) - -end subroutine HDF5_forwardResults - -end module HDF5_Utilities \ No newline at end of file +end module HDF5_Utilities From 9b32fe6dbd2582893c83bd127d2b7ef070f83b74 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 15 Nov 2018 07:28:34 +0100 Subject: [PATCH 03/89] MPI file access needed for output --- src/HDF5_utilities.f90 | 59 ++++++++++++------------------------------ 1 file changed, 16 insertions(+), 43 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 1193eb25d..b1f32e805 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -4,7 +4,7 @@ !> @author Jennifer Nastola, Max-Planck-Institut für Eisenforschung GmbH !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !-------------------------------------------------------------------------------------------------- -module HDF5_Utilities +module HDF5_utilities use prec use IO use HDF5 @@ -14,9 +14,6 @@ module HDF5_Utilities implicit none private - integer(HID_T), public, protected :: tempCoordinates, tempResults - integer(HID_T), private :: resultsFile, currentIncID, plist_id - integer(pInt), private :: currentInc !-------------------------------------------------------------------------------------------------- !> @brief reads pInt or pReal data of defined shape from file @@ -73,7 +70,7 @@ module HDF5_Utilities HDF5_write contains -subroutine HDF5_Utilities_init +subroutine HDF5_utilities_init use, intrinsic :: & iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) @@ -95,54 +92,21 @@ subroutine HDF5_Utilities_init if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5tget_size_f (double)') if (int(pReal,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pReal does not match H5T_NATIVE_DOUBLE') -end subroutine HDF5_Utilities_init - - - - -!-------------------------------------------------------------------------------------------------- -!> @brief creates and initializes HDF5 output files -!-------------------------------------------------------------------------------------------------- - integer(HID_T) function HDF5_createFile(path) - use hdf5 - use DAMASK_interface, only: & - getSolverJobName - - implicit none - integer :: hdferr - integer(SIZE_T) :: typeSize - character(len=*), intent(in) :: path -#ifdef PETSc -#include -#endif - call h5open_f(hdferr) !############################################################ DANGEROUS -#ifdef PETSc - call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') - call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') -#endif -!-------------------------------------------------------------------------------------------------- -! create a file - !call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr) - call h5fcreate_f(path,H5F_ACC_TRUNC_F,HDF5_createFile,hdferr,access_prp = plist_id) - if (hdferr < 0) call IO_error(100_pInt,ext_msg=path) - !call HDF5_addStringAttribute(HDF5_createFile,'createdBy',DAMASKVERSION) - call h5pclose_f(plist_id, hdferr) !neu - -end function HDF5_createFile - +end subroutine HDF5_utilities_init !-------------------------------------------------------------------------------------------------- !> @brief open and initializes HDF5 output file !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_openFile(fileName,mode) +integer(HID_T) function HDF5_openFile(fileName,mode,parallel) implicit none character(len=*), intent(in) :: fileName character, intent(in), optional :: mode + logical, intent(in), optional :: parallel + character :: m + integer(HID_T) :: plist_id integer :: hdferr if (present(mode)) then @@ -151,6 +115,15 @@ integer(HID_T) function HDF5_openFile(fileName,mode) m = 'r' endif +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') + call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') + endif; endif +#endif + if (m == 'w') then call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f',el=hdferr) From d5963b403a98b410c18cadc44207c29b02e0b152 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 16 Nov 2018 07:05:43 +0100 Subject: [PATCH 04/89] setting the property when opening the file --- src/HDF5_utilities.f90 | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index b1f32e805..4c29fe980 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -115,28 +115,32 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel) m = 'r' endif + call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pcreate_f') + #ifdef PETSc if (present(parallel)) then; if (parallel) then - call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pset_fapl_mpio_f') endif; endif #endif - if (m == 'w') then - call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f',el=hdferr) + if (m == 'w') then + call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr,access_prp = plist_id) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f',el=hdferr) elseif(m == 'a') then - call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)',el=hdferr) + call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr,access_prp = plist_id) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)',el=hdferr) elseif(m == 'r') then - call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)',el=hdferr) + call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr,access_prp = plist_id) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)',el=hdferr) else - call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f unknown access mode',el=hdferr) + call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f unknown access mode') endif + call h5pclose_f(plist_id, hdferr) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pclose_f') + end function HDF5_openFile From d8a425b4643be043eb2161b4f4c753b5197718e4 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 17 Nov 2018 15:00:51 +0100 Subject: [PATCH 05/89] prevent error h5close_f will give an error if h5open_f was not called. according to the manual, calling h5open_f multiple time is not a problem --- src/quit.f90 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/quit.f90 b/src/quit.f90 index 4219830a5..ad61943e4 100644 --- a/src/quit.f90 +++ b/src/quit.f90 @@ -23,6 +23,8 @@ subroutine quit(stop_id) integer(pInt) :: error = 0_pInt PetscErrorCode :: ierr = 0 + call h5open_f(hdferr) + if (hdferr /= 0) write(6,'(a,i5)') ' Error in h5open_f',hdferr ! prevents error if not opened yet call h5close_f(hdferr) if (hdferr /= 0) write(6,'(a,i5)') ' Error in h5close_f',hdferr From 73ca289322f38e748b71ba761e38cb69f6d01ee3 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 17 Nov 2018 16:50:19 +0100 Subject: [PATCH 06/89] F_aim was wrong in case of MPI parallelization --- src/spectral_mech_Basic.f90 | 5 +++++ src/spectral_mech_Polarisation.f90 | 6 +++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/spectral_mech_Basic.f90 b/src/spectral_mech_Basic.f90 index d6f353c91..003c9820d 100644 --- a/src/spectral_mech_Basic.f90 +++ b/src/spectral_mech_Basic.f90 @@ -80,6 +80,7 @@ subroutine basic_init #endif use IO, only: & IO_intOut, & + IO_error, & IO_read_realFile, & IO_timeStamp use debug, only: & @@ -173,7 +174,11 @@ subroutine basic_init call IO_read_realFile(777,'F_aimDot',trim(getSolverJobName()),size(F_aimDot)) read (777,rec=1) F_aimDot; close (777) F_aim = reshape(sum(sum(sum(F,dim=4),dim=3),dim=2) * wgt, [3,3]) ! average of F + call MPI_Allreduce(MPI_IN_PLACE,F_aim,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr) + if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='F_aim') F_aim_lastInc = sum(sum(sum(F_lastInc,dim=5),dim=4),dim=3) * wgt ! average of F_lastInc + call MPI_Allreduce(MPI_IN_PLACE,F_aim_lastInc,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr) + if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='F_aim_lastInc') elseif (restartInc == 0_pInt) then restart F_lastInc = spread(spread(spread(math_I3,3,grid(1)),4,grid(2)),5,grid3) ! initialize to identity F = reshape(F_lastInc,[9,grid(1),grid(2),grid3]) diff --git a/src/spectral_mech_Polarisation.f90 b/src/spectral_mech_Polarisation.f90 index 04f51cb35..b1da2a3f0 100644 --- a/src/spectral_mech_Polarisation.f90 +++ b/src/spectral_mech_Polarisation.f90 @@ -78,7 +78,6 @@ contains !-------------------------------------------------------------------------------------------------- !> @brief allocates all necessary fields and fills them with data, potentially from restart info -!> @todo use sourced allocation, e.g. allocate(Fdot,source = F_lastInc) !-------------------------------------------------------------------------------------------------- subroutine Polarisation_init #if defined(__GFORTRAN__) || __INTEL_COMPILER >= 1800 @@ -88,6 +87,7 @@ subroutine Polarisation_init #endif use IO, only: & IO_intOut, & + IO_error, & IO_read_realFile, & IO_timeStamp use debug, only: & @@ -191,7 +191,11 @@ subroutine Polarisation_init call IO_read_realFile(777,'F_aimDot',trim(getSolverJobName()),size(F_aimDot)) read (777,rec=1) F_aimDot; close (777) F_aim = reshape(sum(sum(sum(F,dim=4),dim=3),dim=2) * wgt, [3,3]) ! average of F + call MPI_Allreduce(MPI_IN_PLACE,F_aim,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr) + if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='F_aim') F_aim_lastInc = sum(sum(sum(F_lastInc,dim=5),dim=4),dim=3) * wgt ! average of F_lastInc + call MPI_Allreduce(MPI_IN_PLACE,F_aim_lastInc,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr) + if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='F_aim_lastInc') elseif (restartInc == 0_pInt) then restart F_lastInc = spread(spread(spread(math_I3,3,grid(1)),4,grid(2)),5,grid3) ! initialize to identity F = reshape(F_lastInc,[9,grid(1),grid(2),grid3]) From 8b1785c05ccca92fadb1339255bf2d13a334feec Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 09:47:50 +0100 Subject: [PATCH 07/89] parallel writing for pReal --- src/HDF5_utilities.f90 | 582 ++++++++++++++++++++++++++++++++--------- 1 file changed, 463 insertions(+), 119 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 4c29fe980..e36e39e29 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -542,265 +542,609 @@ subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) end subroutine HDF5_read_pInt_7 + !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of the type pReal with 1 dimensions +!> @brief subroutine for writing dataset of type pReal with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal1(dataset,loc_id,datasetName) +subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 2 dimensions +!> @brief subroutine for writing dataset of type pReal with 2 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal2(dataset,loc_id,datasetName) +subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 3 dimensions +!> @brief subroutine for writing dataset of type pReal with 3 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal3(dataset,loc_id,datasetName) +subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 4 dimensions +!> @brief subroutine for writing dataset of type pReal with 4 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal4(dataset,loc_id,datasetName) +subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 5 dimensions +!> @brief subroutine for writing dataset of type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal5(dataset,loc_id,datasetName) +subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 6 dimensions +!> @brief subroutine for writing dataset of type pReal with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal6(dataset,loc_id,datasetName) +subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 7 dimensions +!> @brief subroutine for writing dataset of type pReal with 7 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal7(dataset,loc_id,datasetName) +subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 1 dimensions !-------------------------------------------------------------------------------------------------- From 366c63e273c2d61779d5a6907c69c5d3b63f31c9 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 11:21:43 +0100 Subject: [PATCH 08/89] CONFIG is a keyword for Cmake --- src/CMakeLists.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 2e4462243..3bb72bb04 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -41,9 +41,9 @@ add_library(DEBUG OBJECT "debug.f90") add_dependencies(DEBUG NUMERICS) list(APPEND OBJECTFILES $) -add_library(CONFIG OBJECT "config.f90") -add_dependencies(CONFIG DEBUG) -list(APPEND OBJECTFILES $) +add_library(DAMASK_CONFIG OBJECT "config.f90") +add_dependencies(DAMASK_CONFIG DEBUG) +list(APPEND OBJECTFILES $) add_library(FEsolving OBJECT "FEsolving.f90") add_dependencies(FEsolving DEBUG) @@ -68,7 +68,7 @@ elseif (PROJECT_NAME STREQUAL "DAMASK_FEM") endif() add_library(MATERIAL OBJECT "material.f90") -add_dependencies(MATERIAL MESH CONFIG) +add_dependencies(MATERIAL MESH DAMASK_CONFIG) list(APPEND OBJECTFILES $) add_library(DAMASK_HELPERS OBJECT "lattice.f90") From 35211a8468cb8028c19df0431d2d5b2eaa55b846 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 11:24:55 +0100 Subject: [PATCH 09/89] compilation order should reflect dependency --- src/CMakeLists.txt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 3bb72bb04..6e5a808df 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -29,16 +29,16 @@ add_library(IO OBJECT "IO.f90") add_dependencies(IO DAMASK_INTERFACE) list(APPEND OBJECTFILES $) -add_library(HDF5_UTILITIES OBJECT "HDF5_utilities.f90") -add_dependencies(HDF5_UTILITIES IO) -list(APPEND OBJECTFILES $) - add_library(NUMERICS OBJECT "numerics.f90") -add_dependencies(NUMERICS HDF5_UTILITIES) +add_dependencies(NUMERICS IO) list(APPEND OBJECTFILES $) +add_library(HDF5_UTILITIES OBJECT "HDF5_utilities.f90") +add_dependencies(HDF5_UTILITIES NUMERICS) +list(APPEND OBJECTFILES $) + add_library(DEBUG OBJECT "debug.f90") -add_dependencies(DEBUG NUMERICS) +add_dependencies(DEBUG HDF5_UTILITIES) list(APPEND OBJECTFILES $) add_library(DAMASK_CONFIG OBJECT "config.f90") From fb5b1bfd8e6a5117c9a0f51e8b2765636b71ffcc Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 11:32:53 +0100 Subject: [PATCH 10/89] compile results module --- src/CMakeLists.txt | 16 +- src/results.f90 | 919 +-------------------------------------------- 2 files changed, 17 insertions(+), 918 deletions(-) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 6e5a808df..90fb291fd 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -33,20 +33,24 @@ add_library(NUMERICS OBJECT "numerics.f90") add_dependencies(NUMERICS IO) list(APPEND OBJECTFILES $) -add_library(HDF5_UTILITIES OBJECT "HDF5_utilities.f90") -add_dependencies(HDF5_UTILITIES NUMERICS) -list(APPEND OBJECTFILES $) - add_library(DEBUG OBJECT "debug.f90") -add_dependencies(DEBUG HDF5_UTILITIES) +add_dependencies(DEBUG NUMERICS) list(APPEND OBJECTFILES $) add_library(DAMASK_CONFIG OBJECT "config.f90") add_dependencies(DAMASK_CONFIG DEBUG) list(APPEND OBJECTFILES $) +add_library(HDF5_UTILITIES OBJECT "HDF5_utilities.f90") +add_dependencies(HDF5_UTILITIES DAMASK_CONFIG) +list(APPEND OBJECTFILES $) + +add_library(RESULTS OBJECT "results.f90") +add_dependencies(RESULTS HDF5_UTILITIES) +list(APPEND OBJECTFILES $) + add_library(FEsolving OBJECT "FEsolving.f90") -add_dependencies(FEsolving DEBUG) +add_dependencies(FEsolving RESULTS) list(APPEND OBJECTFILES $) add_library(DAMASK_MATH OBJECT "math.f90") diff --git a/src/results.f90 b/src/results.f90 index 43a7a26e8..855fc5128 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -4,7 +4,7 @@ !> @author Jennifer Nastola, Max-Planck-Institut für Eisenforschung GmbH !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !-------------------------------------------------------------------------------------------------- -module HDF5_Utilities +module results use prec use IO use HDF5 @@ -18,52 +18,9 @@ module HDF5_Utilities integer(HID_T), private :: resultsFile, currentIncID, plist_id integer(pInt), private :: currentInc -!-------------------------------------------------------------------------------------------------- -!> @brief reads pInt or pReal data of defined shape from file -!-------------------------------------------------------------------------------------------------- - interface HDF5_read - module procedure HDF5_read_pReal_1 - module procedure HDF5_read_pReal_2 - module procedure HDF5_read_pReal_3 - module procedure HDF5_read_pReal_4 - module procedure HDF5_read_pReal_5 - module procedure HDF5_read_pReal_6 - module procedure HDF5_read_pReal_7 - - module procedure HDF5_read_pInt_1 - module procedure HDF5_read_pInt_2 - module procedure HDF5_read_pInt_3 - module procedure HDF5_read_pInt_4 - module procedure HDF5_read_pInt_5 - module procedure HDF5_read_pInt_6 - module procedure HDF5_read_pInt_7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK - - end interface HDF5_read - -!-------------------------------------------------------------------------------------------------- -!> @brief writes pInt or pReal data of defined shape to file -!-------------------------------------------------------------------------------------------------- - interface HDF5_write - module procedure HDF5_write_pReal1 - module procedure HDF5_write_pReal2 - module procedure HDF5_write_pReal3 - module procedure HDF5_write_pReal4 - module procedure HDF5_write_pReal5 - module procedure HDF5_write_pReal6 - module procedure HDF5_write_pReal7 - - module procedure HDF5_write_pInt1 - module procedure HDF5_write_pInt2 - module procedure HDF5_write_pInt3 - module procedure HDF5_write_pInt4 - module procedure HDF5_write_pInt5 - module procedure HDF5_write_pInt6 - module procedure HDF5_write_pInt7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK - - end interface HDF5_write public :: & - HDF5_Utilities_init, & + results_init, & HDF5_mappingPhase, & HDF5_mappingHomog, & HDF5_mappingCrystallite, & @@ -83,38 +40,21 @@ module HDF5_Utilities HDF5_removeLink, & HDF5_createFile, & HDF5_closeFile, & - HDF5_addGroup2, & - HDF5_openFile, & - HDF5_read, & - HDF5_write + HDF5_addGroup2 contains -subroutine HDF5_Utilities_init +subroutine results_init use, intrinsic :: & iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) implicit none - integer :: hdferr - integer(SIZE_T) :: typeSize write(6,'(/,a)') ' <<<+- HDF5_Utilities init -+>>>' #include "compilation_info.f90" - !currentInc = -1_pInt ToDo - !call HDF5_createJobFile ToDo + currentInc = -1_pInt -!-------------------------------------------------------------------------------------------------- -!initialize HDF5 library and check if integer and float type size match - call h5open_f(hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5open_f') - call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5tget_size_f (int)') - if (int(pInt,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pInt does not match H5T_NATIVE_INTEGER') - call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5tget_size_f (double)') - if (int(pReal,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pReal does not match H5T_NATIVE_DOUBLE') - -end subroutine HDF5_Utilities_init +end subroutine results_init !-------------------------------------------------------------------------------------------------- !> @brief creates and initializes HDF5 output files @@ -1435,851 +1375,6 @@ subroutine HDF5_writeScalarDataset(group,dataset,label,SIunit,dataspace_size,mpi end subroutine HDF5_writeScalarDataset -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 1 dimension -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_1(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dclose_f') - -end subroutine HDF5_read_pReal_1 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 2 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_2(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dclose_f') - -end subroutine HDF5_read_pReal_2 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 3 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_3(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dclose_f') - -end subroutine HDF5_read_pReal_3 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 4 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_4(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dclose_f') - -end subroutine HDF5_read_pReal_4 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 5 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dclose_f') - -end subroutine HDF5_read_pReal_5 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 6 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_6(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dclose_f') - -end subroutine HDF5_read_pReal_6 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 7 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_7(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dclose_f') - -end subroutine HDF5_read_pReal_7 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 1 dimension -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_1(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dclose_f') - -end subroutine HDF5_read_pInt_1 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 2 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_2(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dclose_f') - -end subroutine HDF5_read_pInt_2 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 3 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_3(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dclose_f') - -end subroutine HDF5_read_pInt_3 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 4 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_4(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dclose_f') - -end subroutine HDF5_read_pInt_4 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 5 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_5(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dclose_f') - -end subroutine HDF5_read_pInt_5 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 6 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_6(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dclose_f') - -end subroutine HDF5_read_pInt_6 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 7 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dclose_f') - -end subroutine HDF5_read_pInt_7 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of the type pReal with 1 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal1(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 2 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal2(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 3 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal3(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 4 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal4(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 5 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal5(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 6 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal6(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pReal with 7 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal7(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 1 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt1(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 2 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt2(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 3 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt3(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 4 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt4(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 5 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt5(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 6 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt6(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 7 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt7(dataset,loc_id,datasetName) - - implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - - integer(pInt), dimension(:), allocatable :: myShape ! @brief adds a new scalar dataset to the given group location !-------------------------------------------------------------------------------------------------- @@ -2338,4 +1433,4 @@ subroutine HDF5_forwardResults(time) end subroutine HDF5_forwardResults -end module HDF5_Utilities \ No newline at end of file +end module results From dfd624e520ca5b77660beb35a6b2a42208781ea7 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 11:58:49 +0100 Subject: [PATCH 11/89] create results file --- src/CPFEM2.f90 | 3 + src/results.f90 | 209 ++++++------------------------------------------ 2 files changed, 29 insertions(+), 183 deletions(-) diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index 2aed858a7..126e9240b 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -39,6 +39,8 @@ subroutine CPFEM_initAll() material_init use HDF5_utilities, only: & HDF5_utilities_init + use results, only: & + results_init use lattice, only: & lattice_init use constitutive, only: & @@ -73,6 +75,7 @@ subroutine CPFEM_initAll() call lattice_init call material_init call HDF5_utilities_init + call results_init call constitutive_init call crystallite_init call homogenization_init diff --git a/src/results.f90 b/src/results.f90 index 855fc5128..f667edb10 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -8,6 +8,7 @@ module results use prec use IO use HDF5 + use HDF5_utilities #ifdef PETSc use PETSC #endif @@ -31,187 +32,29 @@ module results HDF5_addGroup ,& HDF5_closeGroup ,& HDF5_openGroup, & - HDF5_openGroup2, & HDF5_forwardResults, & HDF5_writeVectorDataset, & HDF5_writeScalarDataset, & HDF5_writeTensorDataset, & HDF5_closeJobFile, & - HDF5_removeLink, & - HDF5_createFile, & - HDF5_closeFile, & - HDF5_addGroup2 + HDF5_removeLink contains subroutine results_init use, intrinsic :: & iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) - + use DAMASK_interface, only: & + getSolverJobName implicit none - write(6,'(/,a)') ' <<<+- HDF5_Utilities init -+>>>' + write(6,'(/,a)') ' <<<+- results init -+>>>' #include "compilation_info.f90" currentInc = -1_pInt + call HDF5_closeFile(HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)) end subroutine results_init -!-------------------------------------------------------------------------------------------------- -!> @brief creates and initializes HDF5 output files -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_createJobFile - use hdf5 - use DAMASK_interface, only: & - getSolverJobName - - implicit none - integer :: hdferr - character(len=1024) :: path -#ifdef PETSc -#include - - call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') - call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') -#endif - -!-------------------------------------------------------------------------------------------------- -! open file - path = trim(getSolverJobName())//'.'//'hdf5' - !call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr) - call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr,access_prp = plist_id) - if (hdferr < 0) call IO_error(100_pInt,ext_msg=path) - call HDF5_addStringAttribute(resultsFile,'createdBy',DAMASKVERSION) - call h5pclose_f(plist_id, hdferr) !neu - -end subroutine HDF5_createJobFile - - -!-------------------------------------------------------------------------------------------------- -!> @brief creates and initializes HDF5 output files -!-------------------------------------------------------------------------------------------------- - integer(HID_T) function HDF5_createFile(path) - use hdf5 - use DAMASK_interface, only: & - getSolverJobName - - implicit none - integer :: hdferr - integer(SIZE_T) :: typeSize - character(len=*), intent(in) :: path -#ifdef PETSc -#include -#endif - call h5open_f(hdferr) !############################################################ DANGEROUS -#ifdef PETSc - call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pcreate_f') - call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_Utilities_init: h5pset_fapl_mpio_f') -#endif -!-------------------------------------------------------------------------------------------------- -! create a file - !call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr) - call h5fcreate_f(path,H5F_ACC_TRUNC_F,HDF5_createFile,hdferr,access_prp = plist_id) - if (hdferr < 0) call IO_error(100_pInt,ext_msg=path) - !call HDF5_addStringAttribute(HDF5_createFile,'createdBy',DAMASKVERSION) - call h5pclose_f(plist_id, hdferr) !neu - -end function HDF5_createFile - -!-------------------------------------------------------------------------------------------------- -!> @brief close the opened HDF5 output file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_closeJobFile() - use hdf5 - - implicit none - integer :: hdferr - call HDF5_removeLink('current') - call h5fclose_f(resultsFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeJobFile: h5fclose_f',el=hdferr) -! call h5close_f(hdferr) - -end subroutine HDF5_closeJobFile - -!-------------------------------------------------------------------------------------------------- -!> @brief open and initializes HDF5 output file -!-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_openFile(fileName,mode) - - implicit none - character(len=*), intent(in) :: fileName - character, intent(in), optional :: mode - character :: m - integer :: hdferr - - if (present(mode)) then - m = mode - else - m = 'r' - endif - - if (m == 'w') then - call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f',el=hdferr) - elseif(m == 'a') then - call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)',el=hdferr) - elseif(m == 'r') then - call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)',el=hdferr) - else - call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f unknown access mode',el=hdferr) - endif - -end function HDF5_openFile - -!-------------------------------------------------------------------------------------------------- -!> @brief close the opened HDF5 output file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_closeFile(fileHandle) - - implicit none - integer :: hdferr - integer(HID_T), intent(in) :: fileHandle - call h5fclose_f(fileHandle,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeFile: h5fclose_f',el=hdferr) - -end subroutine HDF5_closeFile - -!-------------------------------------------------------------------------------------------------- -!> @brief adds a new group to the results file -!-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_addGroup(groupName) - use hdf5 - - implicit none - character(len=*), intent(in) :: groupName - integer :: hdferr - - call h5gcreate_f(resultsFile, trim(groupName), HDF5_addGroup, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')') - -end function HDF5_addGroup - - -!-------------------------------------------------------------------------------------------------- -!> @brief adds a new group to the fileHandle (additional to addGroup2) -!-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_addGroup2(fileHandle,groupName) - use hdf5 - - implicit none - character(len=*), intent(in) :: groupName - integer(HID_T), intent(in) :: fileHandle - integer :: hdferr - - call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup2, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup2: h5gcreate_f ('//trim(groupName)//')') - -end function HDF5_addGroup2 - !-------------------------------------------------------------------------------------------------- !> @brief open a group from the results file @@ -229,20 +72,34 @@ integer(HID_T) function HDF5_openGroup(groupName) end function HDF5_openGroup !-------------------------------------------------------------------------------------------------- -!> @brief open an existing group of a file +!> @brief close the opened HDF5 output file !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) +subroutine HDF5_closeJobFile() + use hdf5 + + implicit none + integer :: hdferr + call HDF5_removeLink('current') + call h5fclose_f(resultsFile,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeJobFile: h5fclose_f',el=hdferr) +! call h5close_f(hdferr) + +end subroutine HDF5_closeJobFile + +!-------------------------------------------------------------------------------------------------- +!> @brief adds a new group to the results file +!-------------------------------------------------------------------------------------------------- +integer(HID_T) function HDF5_addGroup(groupName) use hdf5 implicit none character(len=*), intent(in) :: groupName integer :: hdferr - integer(HID_T), intent(in) :: FileReadID - call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5gopen_f ('//trim(groupName)//')') + call h5gcreate_f(resultsFile, trim(groupName), HDF5_addGroup, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')') -end function HDF5_openGroup2 +end function HDF5_addGroup !-------------------------------------------------------------------------------------------------- !> @brief set link to object in results file @@ -281,20 +138,6 @@ subroutine HDF5_removeLink(link) end subroutine HDF5_removeLink -!-------------------------------------------------------------------------------------------------- -!> @brief close a group -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_closeGroup(ID) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: ID - integer :: hdferr - - call h5gclose_f(ID, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(ID,pInt)) - -end subroutine HDF5_closeGroup !-------------------------------------------------------------------------------------------------- !> @brief adds a StringAttribute to the results file From fcb14f6099478d7b0c22c6d1081458d20e9aed36 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 12:04:08 +0100 Subject: [PATCH 12/89] was never used --- processing/post/ascii2hdf5.py | 142 ---------------------------------- 1 file changed, 142 deletions(-) delete mode 100755 processing/post/ascii2hdf5.py diff --git a/processing/post/ascii2hdf5.py b/processing/post/ascii2hdf5.py deleted file mode 100755 index effac981b..000000000 --- a/processing/post/ascii2hdf5.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python2.7 -# -*- coding: UTF-8 no BOM -*- - -# ------------------------------------------------------------------- # -# NOTE: # -# 1. Not all output is defined in the DS_HDF5.xml, please add new # -# new one to the system wide definition file # -# /lib/damask/DS_HDF5.xml # -# or specify your own when initializing HDF5 class # -# 2. Somehow the point cloud structure cannot be properly handled # -# by Xdmf, which is a descriptive wrapper for visualizing HDF5 # -# using Paraview. The current solution is using cell structured # -# HDF5 so that Xdmf can describe the data shape as a rectangular # -# mesh rather than polyvertex. # -# TODO: # -# 1. remove the ._tmp file, basically need a way to # -# just load data from ASCII table. # -# 2. a progress monitor when transferring data from ASCII table # -# to HDF5. # -# 3. a more flexible way handle the data structure rather than a # -# xml file. # -# ------------------------------------------------------------------- # - -import os -import damask -import numpy as np -from optparse import OptionParser - - -scriptName = os.path.splitext(os.path.basename(__file__))[0] -scriptID = ' '.join([scriptName, damask.version]) - - -# ----- helper function ----- # -def get_rectMshVectors(xyz_array, posNum): - """Get Vx, Vy, Vz for rectLinear grid""" - # need some improvement, and only works for rectangular grid - v = sorted(list(set(xyz_array[:, posNum]))) - v_interval = (v[2]+v[1])/2.0 - (v[1]+v[0])/2.0 - v_start = (v[1]+v[0])/2.0 - v_interval - v_end = (v[-1]+v[-2])/2.0 + v_interval - V = np.linspace(v_start, v_end, len(v)+1) - return V - - -# ----- MAIN ---- # -desp_msg = "Convert DAMASK ascii table to HDF5 file" -parser = OptionParser(option_class=damask.extendableOption, - usage='%prog options [file[s]]', - description=desp_msg, - version=scriptID) -parser.add_option('-D', '--DefinitionFile', - dest='storage definition file', - type='string', - metavar='string', - help='definition file for H5 data storage') -parser.add_option('-p', '--pos', '--position', - dest='pos', - type='string', metavar='string', - help='label of coordinates [%default]') - -parser.set_defaults(DefinitionFile='default', - pos='pos') - -(options, filenames) = parser.parse_args() - -filename = filenames[0] - -if options.DefinitionFile == 'default': - defFile = None -else: - defFile = options.DefinitionFile - -# ----- read in data using DAMASK ASCII table class ----- # -asciiTable = damask.ASCIItable(name=filename, buffered=False) -asciiTable.head_read() -asciiTable.data_readArray() -incNum = int(asciiTable.data[asciiTable.label_index('inc'), 0]) -fullTable = np.copy(asciiTable.data) # deep copy all data, just to be safe -labels = asciiTable.labels() -labels_idx = [asciiTable.label_index(label) for label in labels] -featuresDim = [labels_idx[i+1] - labels_idx[i] for i in range(len(labels)-1)] -featuresDim.append(fullTable.shape[1] - labels_idx[-1]) - -# ----- figure out size and grid ----- # -pos_idx = asciiTable.label_index('pos') -xyz_array = asciiTable.data[:, pos_idx:pos_idx+3] -Vx = get_rectMshVectors(xyz_array, 0) -Vy = get_rectMshVectors(xyz_array, 1) -Vz = get_rectMshVectors(xyz_array, 2) -# use the dimension of the rectangular grid to reshape all other data -mshGridDim = [len(Vx)-1, len(Vy)-1, len(Vz)-1] - -# ----- compose cmd log ----- # -cmd_log = " ".join([scriptID, filename]) - -# ----- create a new HDF5 file and save the data -----# -# force remove existing HDF5 file -h5fName = filename.replace(".txt", ".h5") -try: - os.remove(h5fName) -except OSError: - pass -h5f = damask.H5Table(h5fName, - new_file=True, - dsXMLFile=defFile) -# adding increment number as root level attributes -h5f.add_attr('inc', incNum) -# add the mesh grid data now -h5f.add_data("Vx", Vx, cmd_log=cmd_log) -h5f.add_data("Vy", Vy, cmd_log=cmd_log) -h5f.add_data("Vz", Vz, cmd_log=cmd_log) - -# add the rest of data from table -labelsProcessed = ['inc'] -for fi in range(len(labels)): - featureName = labels[fi] - # remove trouble maker "("" and ")" from label/feature name - if "(" in featureName: - featureName = featureName.replace("(", "") - if ")" in featureName: - featureName = featureName.replace(")", "") - # skip increment and duplicated columns in the ASCII table - if featureName in labelsProcessed: - continue - - featureIdx = labels_idx[fi] - featureDim = featuresDim[fi] - # grab the data hook - dataset = fullTable[:, featureIdx:featureIdx+featureDim] - # mapping 2D data onto a 3D rectangular mesh to get 4D data - # WARNING: In paraview, the data for a recmesh is mapped as: - # --> len(z), len(y), len(x), size(data) - # dataset = dataset.reshape((mshGridDim[0], - # mshGridDim[1], - # mshGridDim[2], - # dataset.shape[1])) - # write out data - print("adding {}...".format(featureName)) - h5f.add_data(featureName, dataset, cmd_log=cmd_log) - # write down the processed label - labelsProcessed.append(featureName) From 2be01e7bea6c5b5aba08d86aedbb681c9288cb9c Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 12:04:49 +0100 Subject: [PATCH 13/89] results files should not be part of the repository --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 22c568409..2a118ef29 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.pyc *.mod *.o +*.hdf5 *.exe *.bak *~ From dd6f6bba9d108dbf294c6772b397360585d43b79 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 12:41:05 +0100 Subject: [PATCH 14/89] planning the forwarding of the results --- src/DAMASK_spectral.f90 | 5 ++ src/HDF5_utilities.f90 | 71 ++++++++++++++++++++++++- src/results.f90 | 112 ++++++++++++++-------------------------- 3 files changed, 113 insertions(+), 75 deletions(-) diff --git a/src/DAMASK_spectral.f90 b/src/DAMASK_spectral.f90 index d6827543a..1e75f2761 100644 --- a/src/DAMASK_spectral.f90 +++ b/src/DAMASK_spectral.f90 @@ -80,6 +80,7 @@ program DAMASK_spectral use spectral_mech_Polarisation use spectral_damage use spectral_thermal + use results implicit none @@ -157,6 +158,10 @@ program DAMASK_spectral write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" + + call results_openJobFile() + call results_addIncrement() + call results_closeJobFile() !-------------------------------------------------------------------------------------------------- ! initialize field solver information nActiveFields = 1 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index e36e39e29..bef73f30f 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -61,11 +61,13 @@ module HDF5_utilities public :: & HDF5_utilities_init, & + HDF5_openFile, & + HDF5_closeFile, & + HDF5_addStringAttribute, & + HDF5_addIntegerAttribute, & HDF5_closeGroup ,& HDF5_openGroup2, & - HDF5_closeFile, & HDF5_addGroup2, & - HDF5_openFile, & HDF5_read, & HDF5_write contains @@ -208,6 +210,71 @@ subroutine HDF5_closeGroup(ID) end subroutine HDF5_closeGroup +!-------------------------------------------------------------------------------------------------- +!> @brief adds a StringAttribute to the results file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: entity + character(len=*), intent(in) :: attrLabel, attrValue + integer :: hdferr + integer(HID_T) :: attr_id, space_id, type_id + + call h5screate_f(H5S_SCALAR_F,space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5screate_f') + call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5tcopy_f') + call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5tset_size_f') + call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5acreate_f') + call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5awrite_f') + call h5aclose_f(attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5aclose_f') + call h5tclose_f(type_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5tclose_f') + call h5sclose_f(space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5sclose_f') + +end subroutine HDF5_addStringAttribute + + +!-------------------------------------------------------------------------------------------------- +!> @brief adds a StringAttribute to the results file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) + use hdf5 + + implicit none + integer(HID_T), intent(in) :: entity + character(len=*), intent(in) :: attrLabel + integer(pInt), intent(in) :: attrValue + integer :: hdferr + integer(HID_T) :: attr_id, space_id, type_id + + call h5screate_f(H5S_SCALAR_F,space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5screate_f') + call h5tcopy_f(H5T_NATIVE_Integer, type_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tcopy_f') + call h5tset_size_f(type_id, 1_HSIZE_T, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tset_size_f') + call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5acreate_f') + call h5awrite_f(attr_id, type_id, attrValue, int([1],HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5awrite_f') + call h5aclose_f(attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5aclose_f') + call h5tclose_f(type_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tclose_f') + call h5sclose_f(space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5sclose_f') + +end subroutine HDF5_addIntegerAttribute + + !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of the type pReal with 1 dimension !-------------------------------------------------------------------------------------------------- diff --git a/src/results.f90 b/src/results.f90 index f667edb10..2a695c55c 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -17,11 +17,13 @@ module results private integer(HID_T), public, protected :: tempCoordinates, tempResults integer(HID_T), private :: resultsFile, currentIncID, plist_id - integer(pInt), private :: currentInc public :: & results_init, & + results_openJobFile, & + results_closeJobFile, & + results_addIncrement, & HDF5_mappingPhase, & HDF5_mappingHomog, & HDF5_mappingCrystallite, & @@ -32,11 +34,9 @@ module results HDF5_addGroup ,& HDF5_closeGroup ,& HDF5_openGroup, & - HDF5_forwardResults, & HDF5_writeVectorDataset, & HDF5_writeScalarDataset, & HDF5_writeTensorDataset, & - HDF5_closeJobFile, & HDF5_removeLink contains @@ -50,12 +50,47 @@ subroutine results_init write(6,'(/,a)') ' <<<+- results init -+>>>' #include "compilation_info.f90" - currentInc = -1_pInt call HDF5_closeFile(HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)) end subroutine results_init +!-------------------------------------------------------------------------------------------------- +!> @brief opens the results file to append data +!-------------------------------------------------------------------------------------------------- +subroutine results_openJobFile() + use DAMASK_interface, only: & + getSolverJobName + implicit none + + resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','a',.true.) + +end subroutine results_openJobFile + + +!-------------------------------------------------------------------------------------------------- +!> @brief closes the results file +!-------------------------------------------------------------------------------------------------- +subroutine results_closeJobFile() + use DAMASK_interface, only: & + getSolverJobName + implicit none + + call HDF5_closeFile(resultsFile) + +end subroutine results_closeJobFile + + +!-------------------------------------------------------------------------------------------------- +!> @brief closes the results file +!-------------------------------------------------------------------------------------------------- +subroutine results_addIncrement() + implicit none + + call HDF5_addIntegerAttribute(resultsFile,'test',1) + +end subroutine results_addIncrement + !-------------------------------------------------------------------------------------------------- !> @brief open a group from the results file !-------------------------------------------------------------------------------------------------- @@ -71,20 +106,6 @@ integer(HID_T) function HDF5_openGroup(groupName) end function HDF5_openGroup -!-------------------------------------------------------------------------------------------------- -!> @brief close the opened HDF5 output file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_closeJobFile() - use hdf5 - - implicit none - integer :: hdferr - call HDF5_removeLink('current') - call h5fclose_f(resultsFile,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeJobFile: h5fclose_f',el=hdferr) -! call h5close_f(hdferr) - -end subroutine HDF5_closeJobFile !-------------------------------------------------------------------------------------------------- !> @brief adds a new group to the results file @@ -139,37 +160,6 @@ subroutine HDF5_removeLink(link) end subroutine HDF5_removeLink -!-------------------------------------------------------------------------------------------------- -!> @brief adds a StringAttribute to the results file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: entity - character(len=*), intent(in) :: attrLabel, attrValue - integer :: hdferr - integer(HID_T) :: attr_id, space_id, type_id - - call h5screate_f(H5S_SCALAR_F,space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5screate_f') - call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tcopy_f') - call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tset_size_f') - call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5acreate_f') - call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5awrite_f') - call h5aclose_f(attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5aclose_f') - call h5tclose_f(type_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tclose_f') - call h5sclose_f(space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5sclose_f') - -end subroutine HDF5_addStringAttribute - !-------------------------------------------------------------------------------------------------- !> @brief adds the unique mapping from spatial position and constituent ID to results !-------------------------------------------------------------------------------------------------- @@ -1252,28 +1242,4 @@ subroutine HDF5_addScalarDataset(group,nnodes,label,SIunit) end subroutine HDF5_addScalarDataset -!-------------------------------------------------------------------------------------------------- -!> @brief copies the current temp results to the actual results file -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_forwardResults(time) - use hdf5 - use IO, only: & - IO_intOut - - implicit none - integer :: hdferr - integer(HID_T) :: currentIncID - real(pReal), intent(in) :: time - character(len=1024) :: myName - - currentInc = currentInc +1_pInt - write(6,*) 'forward results';flush(6) - write(myName,'(a,'//IO_intOut(currentInc)//')') 'inc',currentInc - currentIncID = HDF5_addGroup(myName) - call HDF5_setLink(myName,'current') -! call HDF5_flush(resultsFile) - call HDF5_closeGroup(currentIncID) - -end subroutine HDF5_forwardResults - end module results From 2b96ea3da52a4987b6d1976ab3d2b2d2f477975b Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 18 Nov 2018 20:17:07 +0100 Subject: [PATCH 15/89] testing restart with MPI --- .gitlab-ci.yml | 9 +++++++++ PRIVATE | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5fafb19c0..186e73dc5 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -321,6 +321,15 @@ Spectral_MPI: - master - release +SpectralAll_restartMPI: + stage: spectral + script: + - module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel + - SpectralAll_restartMPI/test.py + except: + - master + - release + Plasticity_DetectChanges: stage: spectral script: Plasticity_DetectChanges/test.py diff --git a/PRIVATE b/PRIVATE index ee5a63d34..d3bc62220 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit ee5a63d34abbac295207354fddf30e6d7cc258cd +Subproject commit d3bc62220544da0a3198c521e0f73fa07898d357 From c0481307ee166ede1432565d5d5b914c5ba45eb3 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Tue, 20 Nov 2018 11:54:51 +0100 Subject: [PATCH 16/89] Changed the intent of input argument of dataset to inout --- src/HDF5_utilities.f90 | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index bef73f30f..c44b56729 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -619,7 +619,7 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) worldsize implicit none - real(pReal), intent(out), dimension(:) :: dataset + real(pReal), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel @@ -705,7 +705,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) worldsize implicit none - real(pReal), intent(out), dimension(:,:) :: dataset + real(pReal), intent(inout), dimension(:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel @@ -791,7 +791,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) worldsize implicit none - real(pReal), intent(out), dimension(:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel @@ -877,7 +877,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel @@ -963,7 +963,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel @@ -1049,7 +1049,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel @@ -1135,7 +1135,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel @@ -1218,7 +1218,7 @@ end subroutine HDF5_write_pReal7 subroutine HDF5_write_pInt1(dataset,loc_id,datasetName) implicit none - integer(pInt), intent(out), dimension(:) :: dataset + integer(pInt), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file @@ -1254,7 +1254,7 @@ end subroutine HDF5_write_pInt1 subroutine HDF5_write_pInt2(dataset,loc_id,datasetName) implicit none - integer(pInt), intent(out), dimension(:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file @@ -1290,7 +1290,7 @@ end subroutine HDF5_write_pInt2 subroutine HDF5_write_pInt3(dataset,loc_id,datasetName) implicit none - integer(pInt), intent(out), dimension(:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file @@ -1326,7 +1326,7 @@ end subroutine HDF5_write_pInt3 subroutine HDF5_write_pInt4(dataset,loc_id,datasetName) implicit none - integer(pInt), intent(out), dimension(:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file @@ -1362,7 +1362,7 @@ end subroutine HDF5_write_pInt4 subroutine HDF5_write_pInt5(dataset,loc_id,datasetName) implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file @@ -1434,7 +1434,7 @@ end subroutine HDF5_write_pInt6 subroutine HDF5_write_pInt7(dataset,loc_id,datasetName) implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file From 50a1ba62876cc7ed8a3ab5c09811956e125435f7 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 20 Nov 2018 15:26:49 +0100 Subject: [PATCH 17/89] hdferr needs to be integer type during compile time of HDF5 library --- src/HDF5_utilities.f90 | 114 ++++++++++++++++++++++------------------- 1 file changed, 60 insertions(+), 54 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index c44b56729..f30781fc7 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -14,6 +14,8 @@ module HDF5_utilities implicit none private + integer(pInt), parameter, private :: & + HDF5_ERR_TYPE = 4_pInt !< kind of the integer return in the HDF5 library !-------------------------------------------------------------------------------------------------- !> @brief reads pInt or pReal data of defined shape from file @@ -77,8 +79,8 @@ subroutine HDF5_utilities_init iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) implicit none - integer :: hdferr - integer(SIZE_T) :: typeSize + integer(HDF5_ERR_TYPE) :: hdferr + integer(SIZE_T) :: typeSize write(6,'(/,a)') ' <<<+- HDF5_Utilities init -+>>>' #include "compilation_info.f90" @@ -109,7 +111,7 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel) character :: m integer(HID_T) :: plist_id - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr if (present(mode)) then m = mode @@ -129,13 +131,13 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel) if (m == 'w') then call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr,access_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f',el=hdferr) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f') elseif(m == 'a') then call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr,access_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)',el=hdferr) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)') elseif(m == 'r') then call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr,access_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)',el=hdferr) + if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)') else call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f unknown access mode') endif @@ -152,10 +154,11 @@ end function HDF5_openFile subroutine HDF5_closeFile(fileHandle) implicit none - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T), intent(in) :: fileHandle + call h5fclose_f(fileHandle,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeFile: h5fclose_f',el=hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeFile: h5fclose_f') end subroutine HDF5_closeFile @@ -164,12 +167,11 @@ end subroutine HDF5_closeFile !> @brief adds a new group to the fileHandle (additional to addGroup2) !-------------------------------------------------------------------------------------------------- integer(HID_T) function HDF5_addGroup2(fileHandle,groupName) - use hdf5 implicit none character(len=*), intent(in) :: groupName integer(HID_T), intent(in) :: fileHandle - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup2, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup2: h5gcreate_f ('//trim(groupName)//')') @@ -181,11 +183,10 @@ end function HDF5_addGroup2 !> @brief open an existing group of a file !-------------------------------------------------------------------------------------------------- integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) - use hdf5 implicit none character(len=*), intent(in) :: groupName - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T), intent(in) :: FileReadID call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr) @@ -198,11 +199,10 @@ end function HDF5_openGroup2 !> @brief close a group !-------------------------------------------------------------------------------------------------- subroutine HDF5_closeGroup(ID) - use hdf5 implicit none integer(HID_T), intent(in) :: ID - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr call h5gclose_f(ID, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(ID,pInt)) @@ -214,12 +214,11 @@ end subroutine HDF5_closeGroup !> @brief adds a StringAttribute to the results file !-------------------------------------------------------------------------------------------------- subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue) - use hdf5 implicit none integer(HID_T), intent(in) :: entity character(len=*), intent(in) :: attrLabel, attrValue - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: attr_id, space_id, type_id call h5screate_f(H5S_SCALAR_F,space_id,hdferr) @@ -246,13 +245,12 @@ end subroutine HDF5_addStringAttribute !> @brief adds a StringAttribute to the results file !-------------------------------------------------------------------------------------------------- subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) - use hdf5 implicit none integer(HID_T), intent(in) :: entity character(len=*), intent(in) :: attrLabel integer(pInt), intent(in) :: attrValue - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: attr_id, space_id, type_id call h5screate_f(H5S_SCALAR_F,space_id,hdferr) @@ -285,7 +283,7 @@ subroutine HDF5_read_pReal_1(dataset,loc_id,datasetName) integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -309,8 +307,8 @@ subroutine HDF5_read_pReal_2(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr - integer(HID_T) :: dset_id + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) @@ -332,7 +330,7 @@ subroutine HDF5_read_pReal_3(dataset,loc_id,datasetName) integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -356,7 +354,7 @@ subroutine HDF5_read_pReal_4(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -380,7 +378,7 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -404,7 +402,7 @@ subroutine HDF5_read_pReal_6(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -428,7 +426,7 @@ subroutine HDF5_read_pReal_7(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -452,7 +450,7 @@ subroutine HDF5_read_pInt_1(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -476,7 +474,7 @@ subroutine HDF5_read_pInt_2(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -500,7 +498,7 @@ subroutine HDF5_read_pInt_3(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -524,7 +522,7 @@ subroutine HDF5_read_pInt_4(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -548,7 +546,7 @@ subroutine HDF5_read_pInt_5(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -572,7 +570,7 @@ subroutine HDF5_read_pInt_6(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -596,7 +594,7 @@ subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer :: hdferr + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id myShape = shape(dataset) @@ -629,11 +627,12 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes - integer :: hdferr,ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(1) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- ! determine shape of dataset @@ -715,11 +714,12 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes - integer :: hdferr,ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(2) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- ! determine shape of dataset @@ -801,11 +801,12 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes - integer :: hdferr,ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(3) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- ! determine shape of dataset @@ -887,11 +888,12 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes - integer :: hdferr,ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(4) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- ! determine shape of dataset @@ -973,11 +975,12 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes - integer :: hdferr,ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(5) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- ! determine shape of dataset @@ -1059,11 +1062,12 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes - integer :: hdferr,ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(6) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- ! determine shape of dataset @@ -1145,11 +1149,12 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes - integer :: hdferr,ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(7) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- ! determine shape of dataset @@ -1223,7 +1228,8 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt), dimension(:), allocatable :: myShape ! Date: Tue, 20 Nov 2018 15:27:32 +0100 Subject: [PATCH 18/89] respect dependencies of inclusion --- src/commercialFEM_fileList.f90 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/commercialFEM_fileList.f90 b/src/commercialFEM_fileList.f90 index 36f0244ef..7ab022d5a 100644 --- a/src/commercialFEM_fileList.f90 +++ b/src/commercialFEM_fileList.f90 @@ -4,12 +4,12 @@ !> @details List of files needed by MSC.Marc, Abaqus/Explicit, and Abaqus/Standard !-------------------------------------------------------------------------------------------------- #include "IO.f90" -#ifdef DAMASKHDF5 -#include "HDF5_utilities.f90" -#endif #include "numerics.f90" #include "debug.f90" #include "config.f90" +#ifdef DAMASKHDF5 +#include "HDF5_utilities.f90" +#endif #include "math.f90" #include "FEsolving.f90" #include "mesh.f90" From 32b62da84f2336b22cc407a04ab9d7cf6c54fe45 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 20 Nov 2018 16:05:59 +0100 Subject: [PATCH 19/89] compile MSC.Marc with HDF5 --- .../2018.1/Marc_tools/include_linux64 | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 b/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 index 22b306cb1..53052b065 100644 --- a/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 +++ b/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 @@ -63,7 +63,14 @@ else INTEGER_PATH=/$MARC_INTEGER_SIZE fi -FCOMP=ifort +if test "$DAMASK_HDF5" = "ON";then + H5FC="$(h5fc -show)" + HDF5_LIB=${$H5FC//ifort/} + FCOMP=$H5FC +else + FCOMP=ifort +fi + INTELPATH="/opt/intel/compilers_and_libraries_2017/linux" # find the root directory of the compiler installation: @@ -535,23 +542,17 @@ else DAMASKVERSION="'N/A'" fi -if test "$DAMASK_HDF5" = "ON";then - DFCOMP="$(h5fc -show) -DDAMASKHDF5" -else - DFCOMP=$FCOMP -fi -# # DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3 -DFORTLOWMP="$DFCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O0 $I8FFLAGS -I$MARC_SOURCE/common \ +DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O0 $I8FFLAGS -I$MARC_SOURCE/common \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -qopenmp -qopenmp-threadprivate=compat\ $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" -DFORTRANMP="$DFCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O1 $I8FFLAGS -I$MARC_SOURCE/common \ +DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O1 $I8FFLAGS -I$MARC_SOURCE/common \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -qopenmp -qopenmp-threadprivate=compat\ $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" -DFORTHIGHMP="$DFCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias -O2 $I8FFLAGS -I$MARC_SOURCE/common \ +DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias -O2 $I8FFLAGS -I$MARC_SOURCE/common \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -qopenmp -qopenmp-threadprivate=compat\ $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" @@ -570,15 +571,15 @@ then fi # DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3 - DFORTLOWMP="$DFCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \ + DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -qopenmp -qopenmp-threadprivate=compat\ $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" - DFORTRANMP="$DFCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \ + DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -qopenmp -qopenmp-threadprivate=compat\ $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" - DFORTHIGHMP="$DFCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias $I8FFLAGS -I$MARC_SOURCE/common \ + DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias $I8FFLAGS -I$MARC_SOURCE/common \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -qopenmp -qopenmp-threadprivate=compat\ $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" @@ -744,7 +745,7 @@ SECLIBS="-L$MARC_LIB -llapi" SOLVERLIBS="${BCSSOLVERLIBS} ${VKISOLVERLIBS} ${CASISOLVERLIBS} ${MF2SOLVERLIBS} \ $MKLLIB -L$MARC_MKL -liomp5 \ - $MARC_LIB/blas_src.a ${ACSI_LIB}/ACSI_MarcLib.a $KDTREE2_LIB/kdtree2.a " + $MARC_LIB/blas_src.a ${ACSI_LIB}/ACSI_MarcLib.a $KDTREE2_LIB/kdtree2.a $HDF5_LIB " SOLVERLIBS_DLL=${SOLVERLIBS} if test "$AEM_DLL" -eq 1 From aea5730c940b3682315bbef08cae300e88f15569 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 20 Nov 2018 21:54:55 +0100 Subject: [PATCH 20/89] don't disturb the search routines of MSC.Marc --- .../2018.1/Marc_tools/include_linux64 | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 b/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 index 53052b065..53eef9d83 100644 --- a/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 +++ b/installation/mods_MarcMentat/2018.1/Marc_tools/include_linux64 @@ -63,14 +63,6 @@ else INTEGER_PATH=/$MARC_INTEGER_SIZE fi -if test "$DAMASK_HDF5" = "ON";then - H5FC="$(h5fc -show)" - HDF5_LIB=${$H5FC//ifort/} - FCOMP=$H5FC -else - FCOMP=ifort -fi - INTELPATH="/opt/intel/compilers_and_libraries_2017/linux" # find the root directory of the compiler installation: @@ -106,6 +98,16 @@ else FCOMPROOT= fi +# DAMASK uses the HDF5 compiler wrapper around the Intel compiler +if test "$DAMASK_HDF5" = "ON";then + H5FC="$(h5fc -shlib -show)" + HDF5_LIB=${H5FC//ifort/} + FCOMP="$H5FC -DDAMASKHDF5" + echo $FCOMP +else + FCOMP=ifort +fi + # AEM if test "$MARCDLLOUTDIR" = ""; then DLLOUTDIR="$MARC_LIB" From d110534eca8cb63c56f969fcc0d496836c37ce51 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 20 Nov 2018 22:00:08 +0100 Subject: [PATCH 21/89] draft (no license for checking) --- installation/mods_Abaqus/abaqus_v6.env | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/installation/mods_Abaqus/abaqus_v6.env b/installation/mods_Abaqus/abaqus_v6.env index d09257a9d..33c13c2ee 100644 --- a/installation/mods_Abaqus/abaqus_v6.env +++ b/installation/mods_Abaqus/abaqus_v6.env @@ -12,9 +12,15 @@ # import os, re, glob, driverUtils from damask import version as DAMASKVERSION +from damask import Environment +myEnv = damask.Environment() -# Use the version in $PATH -fortCmd = "ifort" +if myEnv.options['DAMASK_HDF5'] == 'ON': + # use hdf5 compiler wrapper in $PATH + fortCmd = os.popen('h5fc -shlib -show').read().replace('\n','') +else + # Use the version in $PATH + fortCmd = "ifort" # -free to use free-format FORTRAN 90 syntax # -O <0-3> optimization level From f51aafabdd832b4e01b8a8dad19d4662488a7686 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 21 Nov 2018 00:00:18 +0100 Subject: [PATCH 22/89] using hdf5 wrapper for compilation of subroutines --- installation/mods_Abaqus/abaqus_v6.env | 10 +++++++--- installation/mods_Abaqus/abaqus_v6_debug.env | 14 ++++++++++++-- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/installation/mods_Abaqus/abaqus_v6.env b/installation/mods_Abaqus/abaqus_v6.env index 33c13c2ee..0b4a7fd43 100644 --- a/installation/mods_Abaqus/abaqus_v6.env +++ b/installation/mods_Abaqus/abaqus_v6.env @@ -13,12 +13,14 @@ import os, re, glob, driverUtils from damask import version as DAMASKVERSION from damask import Environment -myEnv = damask.Environment() +myEnv = Environment() if myEnv.options['DAMASK_HDF5'] == 'ON': # use hdf5 compiler wrapper in $PATH - fortCmd = os.popen('h5fc -shlib -show').read().replace('\n','') -else + fortCmd = os.popen('h5fc -shlib -show').read().replace('\n','') # complicated way needed to pass in DAMASKVERSION string + link_sl += fortCmd.split()[1:] + fortCmd +=" -DDAMASKHDF5" +else: # Use the version in $PATH fortCmd = "ifort" @@ -56,4 +58,6 @@ ask_delete=OFF # Remove the temporary names from the namespace del fortCmd +del Environment +del myEnv del DAMASKVERSION diff --git a/installation/mods_Abaqus/abaqus_v6_debug.env b/installation/mods_Abaqus/abaqus_v6_debug.env index de5189a52..c967c1e65 100644 --- a/installation/mods_Abaqus/abaqus_v6_debug.env +++ b/installation/mods_Abaqus/abaqus_v6_debug.env @@ -12,9 +12,17 @@ # import os, re, glob, driverUtils from damask import version as DAMASKVERSION +from damask import Environment +myEnv = Environment() -# Use the version in $PATH -fortCmd = "ifort" +if myEnv.options['DAMASK_HDF5'] == 'ON': + # use hdf5 compiler wrapper in $PATH + fortCmd = os.popen('h5fc -shlib -show').read().replace('\n','') # complicated way needed to pass in DAMASKVERSION string + link_sl += fortCmd.split()[1:] + fortCmd +=" -DDAMASKHDF5" +else: + # Use the version in $PATH + fortCmd = "ifort" # -free to use free-format FORTRAN 90 syntax # -O <0-3> optimization level @@ -55,4 +63,6 @@ ask_delete=OFF # Remove the temporary names from the namespace del fortCmd +del Environment +del myEnv del DAMASKVERSION From d00e3105ed655f68a5a4baba9faf70e3cd8f1f6d Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Wed, 21 Nov 2018 16:10:17 +0100 Subject: [PATCH 23/89] Made hdferr < 0 and removed the unnecessary call to h5getspace in hyperslab --- src/HDF5_utilities.f90 | 250 +++++++++++++++++++---------------------- 1 file changed, 118 insertions(+), 132 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index f30781fc7..c0ee3d472 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -120,30 +120,30 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel) endif call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pcreate_f') #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pset_fapl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pset_fapl_mpio_f') endif; endif #endif if (m == 'w') then call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr,access_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f') elseif(m == 'a') then call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr,access_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)') elseif(m == 'r') then call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr,access_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)') else call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f unknown access mode') endif call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pclose_f') end function HDF5_openFile @@ -288,11 +288,11 @@ subroutine HDF5_read_pReal_1(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dclose_f') end subroutine HDF5_read_pReal_1 @@ -312,11 +312,11 @@ subroutine HDF5_read_pReal_2(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dclose_f') end subroutine HDF5_read_pReal_2 @@ -335,11 +335,11 @@ subroutine HDF5_read_pReal_3(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dclose_f') end subroutine HDF5_read_pReal_3 @@ -359,11 +359,11 @@ subroutine HDF5_read_pReal_4(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dclose_f') end subroutine HDF5_read_pReal_4 @@ -383,11 +383,11 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dclose_f') end subroutine HDF5_read_pReal_5 @@ -407,11 +407,11 @@ subroutine HDF5_read_pReal_6(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dclose_f') end subroutine HDF5_read_pReal_6 @@ -431,11 +431,11 @@ subroutine HDF5_read_pReal_7(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dclose_f') end subroutine HDF5_read_pReal_7 @@ -455,11 +455,11 @@ subroutine HDF5_read_pInt_1(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dclose_f') end subroutine HDF5_read_pInt_1 @@ -479,11 +479,11 @@ subroutine HDF5_read_pInt_2(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dclose_f') end subroutine HDF5_read_pInt_2 @@ -503,11 +503,11 @@ subroutine HDF5_read_pInt_3(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dclose_f') end subroutine HDF5_read_pInt_3 @@ -527,11 +527,11 @@ subroutine HDF5_read_pInt_4(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dclose_f') end subroutine HDF5_read_pInt_4 @@ -551,11 +551,11 @@ subroutine HDF5_read_pInt_5(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dclose_f') end subroutine HDF5_read_pInt_5 @@ -575,11 +575,11 @@ subroutine HDF5_read_pInt_6(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dclose_f') end subroutine HDF5_read_pInt_6 @@ -599,11 +599,11 @@ subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) myShape = shape(dataset) call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dopen_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dread_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dread_f') call h5dclose_f(dset_id,hdferr) - if (hdferr /= 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dclose_f') + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dclose_f') end subroutine HDF5_read_pInt_7 @@ -642,7 +642,7 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') endif; endif @@ -655,42 +655,40 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/filespace_id') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dcreate_f') !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dget_space_f') call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dcreate_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal1 @@ -729,7 +727,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') endif; endif @@ -742,42 +740,40 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/filespace_id') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dcreate_f') !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dget_space_f') call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dcreate_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal2 @@ -816,7 +812,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') endif; endif @@ -829,42 +825,40 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/filespace_id') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dcreate_f') !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dget_space_f') call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dcreate_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal3 @@ -903,7 +897,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') endif; endif @@ -916,42 +910,40 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/filespace_id') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dcreate_f') !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dget_space_f') call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dcreate_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal4 @@ -990,7 +982,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') endif; endif @@ -1003,42 +995,40 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/filespace_id') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dcreate_f') !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dget_space_f') call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dcreate_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal5 @@ -1077,7 +1067,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') endif; endif @@ -1090,42 +1080,40 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/filespace_id') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dcreate_f') !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dget_space_f') call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dcreate_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal6 @@ -1164,7 +1152,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') endif; endif @@ -1177,42 +1165,40 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/filespace_id') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dcreate_f') !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dget_space_f') call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dcreate_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr /= 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal7 From 5cc6d86c6104f4ae4b3c23c69c12093cdc649af8 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Wed, 21 Nov 2018 16:57:36 +0100 Subject: [PATCH 24/89] Added the parallelized functionality for integer datatypes (works for groups?) --- src/HDF5_utilities.f90 | 563 +++++++++++++++++++++++++++++++++-------- 1 file changed, 457 insertions(+), 106 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index c0ee3d472..c6543ffa4 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1206,255 +1206,606 @@ end subroutine HDF5_write_pReal7 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of the type pInt with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt1(dataset,loc_id,datasetName) +subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none integer(pInt), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 2 dimensions +!> @brief subroutine for writing dataset of type pInt with 2 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt2(dataset,loc_id,datasetName) +subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none integer(pInt), intent(inout), dimension(:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 3 dimensions +!> @brief subroutine for writing dataset of type pInt with 3 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt3(dataset,loc_id,datasetName) +subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 4 dimensions +!> @brief subroutine for writing dataset of type pInt with 4 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt4(dataset,loc_id,datasetName) +subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 5 dimensions +!> @brief subroutine for writing dataset of type pInt with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt5(dataset,loc_id,datasetName) +subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 6 dimensions +!> @brief subroutine for writing dataset of type pInt with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt6(dataset,loc_id,datasetName) +subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! @brief subroutine for writing dataset of the type pInt with 7 dimensions +!> @brief subroutine for writing dataset of type pInt with 7 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt7(dataset,loc_id,datasetName) +subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: myShape ! Date: Wed, 21 Nov 2018 19:35:37 +0100 Subject: [PATCH 25/89] Parallel works for groups and included write function for integer data --- src/HDF5_utilities.f90 | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index c6543ffa4..cda585363 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -166,13 +166,17 @@ end subroutine HDF5_closeFile !-------------------------------------------------------------------------------------------------- !> @brief adds a new group to the fileHandle (additional to addGroup2) !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_addGroup2(fileHandle,groupName) +integer(HID_T) function HDF5_addGroup2(fileHandle,groupName,parallel) implicit none character(len=*), intent(in) :: groupName integer(HID_T), intent(in) :: fileHandle integer(HDF5_ERR_TYPE) :: hdferr + logical,intent(in), optional :: parallel + + integer(HID_T) :: plist_id,gapl_id + call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup2, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup2: h5gcreate_f ('//trim(groupName)//')') From 8b4781cf285f9d8f74b56b14fbdd3d48655556b6 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 23 Nov 2018 05:37:31 +0100 Subject: [PATCH 26/89] no need to repeat code --- src/constitutive.f90 | 3 +- src/plastic_kinematichardening.f90 | 53 +++++++++--------------------- 2 files changed, 16 insertions(+), 40 deletions(-) diff --git a/src/constitutive.f90 b/src/constitutive.f90 index c4d2cacbf..820715d80 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -516,8 +516,7 @@ subroutine constitutive_LpAndItsTangents(Lp, dLp_dS, dLp_dFi, S6, Fi, ipc, ip, e call plastic_phenopowerlaw_LpAndItsTangent (Lp,dLp_dMp, Mp,instance,of) case (PLASTICITY_KINEHARDENING_ID) plasticityType - call plastic_kinehardening_LpAndItsTangent (Lp,dLp_dMp99, math_Mandel33to6(Mp),ipc,ip,el) - dLp_dMp = math_Plain99to3333(dLp_dMp99) ! ToDo: We revert here the last statement in plastic_xx_LpAndItsTanget + call plastic_kinehardening_LpAndItsTangent (Lp,dLp_dMp, math_Mandel33to6(Mp),ipc,ip,el) case (PLASTICITY_NONLOCAL_ID) plasticityType call plastic_nonlocal_LpAndItsTangent (Lp,dLp_dMp99, math_Mandel33to6(Mp), & diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 5089cd5ca..2eb6ac4aa 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -140,18 +140,17 @@ subroutine plastic_kinehardening_init(fileUnit) IO_timeStamp, & IO_EOF use material, only: & - PLASTICITY_kinehardening_label, & - PLASTICITY_kinehardening_ID, & phase_plasticity, & phase_plasticityInstance, & phase_Noutput, & + material_allocatePlasticState, & + PLASTICITY_kinehardening_label, & + PLASTICITY_kinehardening_ID, & material_phase, & plasticState use config, only: & MATERIAL_partPhase use lattice - use numerics,only: & - numerics_integrator implicit none integer(pInt), intent(in) :: fileUnit @@ -422,29 +421,11 @@ subroutine plastic_kinehardening_init(fileUnit) + nSlip !< accumulated shear at last switch of stress sense sizeState = sizeDotState + sizeDeltaState - plasticState(phase)%sizeState = sizeState - plasticState(phase)%sizeDotState = sizeDotState - plasticState(phase)%sizeDeltaState = sizeDeltaState - plasticState(phase)%offsetDeltaState = sizeDotState - plasticState(phase)%sizePostResults = plastic_kinehardening_sizePostResults(instance) - plasticState(phase)%nSlip = nSlip - - allocate(plasticState(phase)%state0 ( sizeState,NipcMyPhase), source=0.0_pReal) - allocate(plasticState(phase)%partionedState0 ( sizeState,NipcMyPhase), source=0.0_pReal) - allocate(plasticState(phase)%subState0 ( sizeState,NipcMyPhase), source=0.0_pReal) - allocate(plasticState(phase)%state ( sizeState,NipcMyPhase), source=0.0_pReal) - allocate(plasticState(phase)%aTolState (sizeDotState), source=0.0_pReal) - allocate(plasticState(phase)%dotState (sizeDotState,NipcMyPhase), source=0.0_pReal) - allocate(plasticState(phase)%deltaState (sizeDeltaState,NipcMyPhase), source=0.0_pReal) ! allocate space for deltaState - if (any(numerics_integrator == 1_pInt)) then - allocate(plasticState(phase)%previousDotState (sizeDotState,NipcMyPhase),source=0.0_pReal) - allocate(plasticState(phase)%previousDotState2(sizeDotState,NipcMyPhase),source=0.0_pReal) - endif - if (any(numerics_integrator == 4_pInt)) & - allocate(plasticState(phase)%RK4dotState (sizeDotState,NipcMyPhase), source=0.0_pReal) - if (any(numerics_integrator == 5_pInt)) & - allocate(plasticState(phase)%RKCK45dotState (6,sizeDotState,NipcMyPhase), source=0.0_pReal) + call material_allocatePlasticState(phase,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & + nSlip,0_pInt,0_pInt) + plasticState(phase)%sizePostResults = plastic_kinehardening_sizePostResults(instance) + offset_slip = plasticState(phase)%nSlip+plasticState(phase)%nTwin+2_pInt plasticState(phase)%slipRate => & plasticState(phase)%dotState(offset_slip+1:offset_slip+plasticState(phase)%nSlip,1:NipcMyPhase) @@ -611,7 +592,7 @@ end subroutine plastic_kinehardening_shearRates !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dTstar99, & +subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & Tstar_v,ipc,ip,el) use prec, only: & dNeq0 @@ -639,8 +620,8 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dTstar99, & implicit none real(pReal), dimension(3,3), intent(out) :: & Lp !< plastic velocity gradient - real(pReal), dimension(9,9), intent(out) :: & - dLp_dTstar99 !< derivative of Lp with respect to 2nd Piola Kirchhoff stress + real(pReal), dimension(3,3,3,3), intent(out) :: & + dLp_dMp !< derivative of Lp with respect to the Mandel stress integer(pInt), intent(in) :: & ipc, & !< component-ID of integration point @@ -661,8 +642,6 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dTstar99, & tau_pos,tau_neg real(pReal) :: & dgdot_dtau_pos,dgdot_dtau_neg - real(pReal), dimension(3,3,3,3) :: & - dLp_dTstar3333 !< derivative of Lp with respect to Tstar as 4th order tensor real(pReal), dimension(3,3,2) :: & nonSchmid_tensor @@ -671,8 +650,7 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dTstar99, & instance = phase_plasticityInstance(ph) Lp = 0.0_pReal - dLp_dTstar3333 = 0.0_pReal - dLp_dTstar99 = 0.0_pReal + dLp_dMp = 0.0_pReal call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Tstar_v,ph,instance,of) @@ -702,22 +680,21 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dTstar99, & if (dNeq0(gdot_pos(j))) then dgdot_dtau_pos = gdot_pos(j)*param(instance)%n_slip/(tau_pos(j)-state(instance)%crss_back(j,of)) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & - dLp_dTstar3333(k,l,m,n) = & - dLp_dTstar3333(k,l,m,n) + dgdot_dtau_pos*lattice_Sslip(k,l,1,index_myFamily+i,ph)* & + dLp_dMp(k,l,m,n) = & + dLp_dMp(k,l,m,n) + dgdot_dtau_pos*lattice_Sslip(k,l,1,index_myFamily+i,ph)* & nonSchmid_tensor(m,n,1) endif if (dNeq0(gdot_neg(j))) then dgdot_dtau_neg = gdot_neg(j)*param(instance)%n_slip/(tau_neg(j)-state(instance)%crss_back(j,of)) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & - dLp_dTstar3333(k,l,m,n) = & - dLp_dTstar3333(k,l,m,n) + dgdot_dtau_neg*lattice_Sslip(k,l,1,index_myFamily+i,ph)* & + dLp_dMp(k,l,m,n) = & + dLp_dMp(k,l,m,n) + dgdot_dtau_neg*lattice_Sslip(k,l,1,index_myFamily+i,ph)* & nonSchmid_tensor(m,n,2) endif enddo slipSystems enddo slipFamilies - dLp_dTstar99 = math_Plain3333to99(dLp_dTstar3333) end subroutine plastic_kinehardening_LpAndItsTangent From 6df68d9428b3b9f16da40a4275dc9406242f1ae1 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 23 Nov 2018 06:04:39 +0100 Subject: [PATCH 27/89] no need do constantly convert 3x3 matrix <-> 6 vector --- src/constitutive.f90 | 8 ++--- src/plastic_kinematichardening.f90 | 52 ++++++++++++++++-------------- 2 files changed, 31 insertions(+), 29 deletions(-) diff --git a/src/constitutive.f90 b/src/constitutive.f90 index 820715d80..6fd0161f9 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -516,7 +516,7 @@ subroutine constitutive_LpAndItsTangents(Lp, dLp_dS, dLp_dFi, S6, Fi, ipc, ip, e call plastic_phenopowerlaw_LpAndItsTangent (Lp,dLp_dMp, Mp,instance,of) case (PLASTICITY_KINEHARDENING_ID) plasticityType - call plastic_kinehardening_LpAndItsTangent (Lp,dLp_dMp, math_Mandel33to6(Mp),ipc,ip,el) + call plastic_kinehardening_LpAndItsTangent (Lp,dLp_dMp, Mp,ipc,ip,el) case (PLASTICITY_NONLOCAL_ID) plasticityType call plastic_nonlocal_LpAndItsTangent (Lp,dLp_dMp99, math_Mandel33to6(Mp), & @@ -918,7 +918,7 @@ subroutine constitutive_collectDotState(S6, FeArray, Fi, FpArray, subdt, subfrac call plastic_phenopowerlaw_dotState(Mp,instance,of) case (PLASTICITY_KINEHARDENING_ID) plasticityType - call plastic_kinehardening_dotState(math_Mandel33to6(Mp),ipc,ip,el) + call plastic_kinehardening_dotState(Mp,ipc,ip,el) case (PLASTICITY_DISLOTWIN_ID) plasticityType call plastic_dislotwin_dotState (math_Mandel33to6(Mp),temperature(ho)%p(tme), & @@ -1012,7 +1012,7 @@ subroutine constitutive_collectDeltaState(S6, Fe, Fi, ipc, ip, el) plasticityType: select case (phase_plasticity(material_phase(ipc,ip,el))) case (PLASTICITY_KINEHARDENING_ID) plasticityType - call plastic_kinehardening_deltaState(math_Mandel33to6(Mstar),ipc,ip,el) + call plastic_kinehardening_deltaState(Mstar,ipc,ip,el) case (PLASTICITY_NONLOCAL_ID) plasticityType call plastic_nonlocal_deltaState(math_Mandel33to6(Mstar),ip,el) @@ -1141,7 +1141,7 @@ function constitutive_postResults(S6, Fi, FeArray, ipc, ip, el) plastic_phenopowerlaw_postResults(Mp,instance,of) case (PLASTICITY_KINEHARDENING_ID) plasticityType constitutive_postResults(startPos:endPos) = & - plastic_kinehardening_postResults(S6,ipc,ip,el) + plastic_kinehardening_postResults(Mp,ipc,ip,el) case (PLASTICITY_DISLOTWIN_ID) plasticityType constitutive_postResults(startPos:endPos) = & plastic_dislotwin_postResults(S6,temperature(ho)%p(tme),ipc,ip,el) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 2eb6ac4aa..858e71a84 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -534,17 +534,18 @@ end subroutine plastic_kinehardening_init !> @brief calculation of shear rates (\dot \gamma) !-------------------------------------------------------------------------------------------------- subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Tstar_v,ph,instance,of) + Mp,ph,instance,of) + use math use lattice, only: & lattice_NslipSystem, & - lattice_Sslip_v, & + lattice_Sslip, & lattice_maxNslipFamily, & lattice_NnonSchmid implicit none - real(pReal), dimension(6), intent(in) :: & - Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation + real(pReal), dimension(3,3), intent(in) :: & + Mp integer(pInt), intent(in) :: & ph, & !< phase ID instance, & !< instance of that phase @@ -565,13 +566,13 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & index_myFamily = sum(lattice_NslipSystem(1:f-1_pInt,ph)) ! at which index starts my family slipSystems: do i = 1_pInt,plastic_kinehardening_Nslip(f,instance) j = j + 1_pInt - tau_pos(j) = dot_product(Tstar_v,lattice_Sslip_v(1:6,1,index_myFamily+i,ph)) + tau_pos(j) = math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,1,index_myFamily+i,ph)) tau_neg(j) = tau_pos(j) nonSchmidSystems: do k = 1,lattice_NnonSchmid(ph) tau_pos(j) = tau_pos(j) + param(instance)%nonSchmidCoeff(k)* & - dot_product(Tstar_v,lattice_Sslip_v(1:6,2*k+0,index_myFamily+i,ph)) + math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,2*k+0,index_myFamily+i,ph)) tau_neg(j) = tau_neg(j) + param(instance)%nonSchmidCoeff(k)* & - dot_product(Tstar_v,lattice_Sslip_v(1:6,2*k+1,index_myFamily+i,ph)) + math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,2*k+1,index_myFamily+i,ph)) enddo nonSchmidSystems enddo slipSystems enddo slipFamilies @@ -593,7 +594,7 @@ end subroutine plastic_kinehardening_shearRates !> @brief calculates plastic velocity gradient and its tangent !-------------------------------------------------------------------------------------------------- subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & - Tstar_v,ipc,ip,el) + Mp,ipc,ip,el) use prec, only: & dNeq0 use debug, only: & @@ -627,8 +628,8 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & ipc, & !< component-ID of integration point ip, & !< integration point el !< element - real(pReal), dimension(6), intent(in) :: & - Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation + real(pReal), dimension(3,3), intent(in) :: & + Mp integer(pInt) :: & instance, & @@ -653,7 +654,7 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & dLp_dMp = 0.0_pReal call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Tstar_v,ph,instance,of) + Mp,ph,instance,of) j = 0_pInt ! reading and marking the starting index for each slip family @@ -701,7 +702,7 @@ end subroutine plastic_kinehardening_LpAndItsTangent !-------------------------------------------------------------------------------------------------- !> @brief calculates (instantaneous) incremental change of microstructure !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_deltaState(Tstar_v,ipc,ip,el) +subroutine plastic_kinehardening_deltaState(Mp,ipc,ip,el) use prec, only: & dNeq, & dEq0 @@ -719,8 +720,8 @@ subroutine plastic_kinehardening_deltaState(Tstar_v,ipc,ip,el) phase_plasticityInstance implicit none - real(pReal), dimension(6), intent(in):: & - Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation + real(pReal), dimension(3,3), intent(in) :: & + Mp integer(pInt), intent(in) :: & ipc, & !< component-ID of integration point ip, & !< integration point @@ -740,7 +741,7 @@ subroutine plastic_kinehardening_deltaState(Tstar_v,ipc,ip,el) instance = phase_plasticityInstance(ph) call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Tstar_v,ph,instance,of) + Mp,ph,instance,of) sense = merge(state(instance)%sense(:,of), & ! keep existing... sign(1.0_pReal,gdot_pos+gdot_neg), & ! ...or have a defined dEq0(gdot_pos+gdot_neg,1e-10_pReal)) ! current sense of shear direction @@ -781,7 +782,7 @@ end subroutine plastic_kinehardening_deltaState !-------------------------------------------------------------------------------------------------- !> @brief calculates the rate of change of microstructure !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_dotState(Tstar_v,ipc,ip,el) +subroutine plastic_kinehardening_dotState(Mp,ipc,ip,el) use lattice, only: & lattice_maxNslipFamily use material, only: & @@ -790,8 +791,8 @@ subroutine plastic_kinehardening_dotState(Tstar_v,ipc,ip,el) phase_plasticityInstance implicit none - real(pReal), dimension(6), intent(in) :: & - Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation, vector form + real(pReal), dimension(3,3), intent(in) :: & + Mp integer(pInt), intent(in) :: & ipc, & !< component-ID of integration point ip, & !< integration point @@ -815,7 +816,7 @@ subroutine plastic_kinehardening_dotState(Tstar_v,ipc,ip,el) dotState(instance)%sumGamma(of) = 0.0_pReal call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Tstar_v,ph,instance,of) + Mp,ph,instance,of) j = 0_pInt slipFamilies: do f = 1_pInt,lattice_maxNslipFamily @@ -848,19 +849,20 @@ end subroutine plastic_kinehardening_dotState !-------------------------------------------------------------------------------------------------- !> @brief return array of constitutive results !-------------------------------------------------------------------------------------------------- -function plastic_kinehardening_postResults(Tstar_v,ipc,ip,el) +function plastic_kinehardening_postResults(Mp,ipc,ip,el) + use math use material, only: & material_phase, & phaseAt, phasememberAt, & phase_plasticityInstance use lattice, only: & - lattice_Sslip_v, & + lattice_Sslip, & lattice_maxNslipFamily, & lattice_NslipSystem implicit none - real(pReal), dimension(6), intent(in) :: & - Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation + real(pReal), dimension(3,3), intent(in) :: & + Mp integer(pInt), intent(in) :: & ipc, & !< component-ID of integration point ip, & !< integration point @@ -889,7 +891,7 @@ function plastic_kinehardening_postResults(Tstar_v,ipc,ip,el) c = 0_pInt call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Tstar_v,ph,instance,of) + Mp,ph,instance,of) outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) select case(param(instance)%outputID(o)) @@ -932,7 +934,7 @@ function plastic_kinehardening_postResults(Tstar_v,ipc,ip,el) slipSystems: do i = 1_pInt,plastic_kinehardening_Nslip(f,instance) j = j + 1_pInt plastic_kinehardening_postResults(c+j) = & - dot_product(Tstar_v,lattice_Sslip_v(1:6,1,index_myFamily+i,ph)) + math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,1,index_myFamily+i,ph)) enddo slipSystems enddo slipFamilies c = c + nSlip From c7fb868b40b9846a5057325837d4434b792ba197 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 23 Nov 2018 07:01:04 +0100 Subject: [PATCH 28/89] state layout was broken --- src/plastic_kinematichardening.f90 | 1 + 1 file changed, 1 insertion(+) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 858e71a84..30088ac4f 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -424,6 +424,7 @@ subroutine plastic_kinehardening_init(fileUnit) call material_allocatePlasticState(phase,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & nSlip,0_pInt,0_pInt) + plasticState(phase)%offsetDeltaState = sizeDotState plasticState(phase)%sizePostResults = plastic_kinehardening_sizePostResults(instance) offset_slip = plasticState(phase)%nSlip+plasticState(phase)%nTwin+2_pInt From 0e55bd61401c5bbe3b7e89d037c807375314caa1 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 23 Nov 2018 15:49:43 +0100 Subject: [PATCH 29/89] started read parallelization but getting errors --- src/HDF5_utilities.f90 | 71 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 64 insertions(+), 7 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index cda585363..500be4278 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -374,7 +374,10 @@ end subroutine HDF5_read_pReal_4 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of the type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) +subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset @@ -382,16 +385,70 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) + logical, intent(in), optional :: parallel + integer :: ierr + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + + integer(HDF5_ERR_TYPE) :: hdferr + integer(HSIZE_T), dimension(5) :: myStart + + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id + + myShape = shape(dataset) + + + localShape = shape(dataset) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(5) + +!>>>>>>>>>!New additions + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + write(6,*) plist_id +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal5: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:4),sum(readSize)] + +!>>>>>>>>>!New additions +!------------------------------------------------------------------------------------------------- +! Open the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) +!------------------------------------------------------------------------------------------------- +! get the dataspace_id of the dataset + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_5: h5dget_space_f') +!------------------------------------------------------------------------------------------------- +! select hyperslab (part to be read by the current process) + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart,int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_5: h5sselect_hyperslab_f') + write(6,*) filespace_id +!------------------------------------------------------------------------------------------------- +! read the part of the file + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dclose_f') + +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dclose_f') + !call h5sclose_f(filespace_id, hdferr) + !if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/filespace_id') + !call h5sclose_f(memspace_id, hdferr) + !if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal_5 From e7581f06d95a0062211a9dbd5789bd1fda3f7507 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 23 Nov 2018 17:54:02 +0100 Subject: [PATCH 30/89] Fixed error with parallel write --- src/HDF5_utilities.f90 | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index cda585363..57c505645 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -381,11 +381,18 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape + + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id myShape = shape(dataset) + +!-------------------------------------------------------------------------------------------------- +!creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) From c0ce95183c1927aa42c3b00a7782420e38a2aa2c Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 25 Nov 2018 10:59:36 +0100 Subject: [PATCH 31/89] can be easily calculated during post processing and does not have to be a state --- src/plastic_kinematichardening.f90 | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 30088ac4f..0fc1d1464 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -40,7 +40,6 @@ module plastic_kinehardening chi0_ID, & !< backstress at last switch of stress sense (positive?) gamma0_ID, & !< accumulated shear at last switch of stress sense (at current switch?) accshear_ID, & - sumGamma_ID, & shearrate_ID, & resolvedstress_ID @@ -260,9 +259,6 @@ subroutine plastic_kinehardening_init(fileUnit) case ('accumulatedshear') output_ID = accshear_ID - case ('totalshear') - output_ID = sumGamma_ID - case ('shearrate') output_ID = shearrate_ID @@ -399,8 +395,6 @@ subroutine plastic_kinehardening_init(fileUnit) shearrate_ID, & resolvedstress_ID) mySize = nSlip - case(sumGamma_ID) - mySize = 1_pInt case default end select @@ -920,10 +914,6 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = state(instance)%accshear(:,of) c = c + nSlip - case (sumGamma_ID) - plastic_kinehardening_postResults(c+1_pInt) = state(instance)%sumGamma(of) - c = c + 1_pInt - case (shearrate_ID) plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = gdot_pos+gdot_neg c = c + nSlip From 917453d1918fcb3af6889703b5ed0cf31cd017b2 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 25 Nov 2018 11:14:09 +0100 Subject: [PATCH 32/89] polishing --- src/plastic_kinematichardening.f90 | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 0fc1d1464..457ec3b48 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -1,9 +1,9 @@ !-------------------------------------------------------------------------------------------------- !> @author Philip Eisenlohr, Michigan State University !> @author Zhuowen Zhao, Michigan State University -!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH -!> @brief Introducing Voce-type kinematic hardening rule into crystal plasticity -!! formulation using a power law fitting +!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH +!> @brief Phenomenological crystal plasticity using a power law formulation for the shear rates +!! and a Voce-type kinematic hardening rule !-------------------------------------------------------------------------------------------------- module plastic_kinehardening use prec, only: & @@ -33,19 +33,19 @@ module plastic_kinehardening enum, bind(c) - enumerator :: undefined_ID, & - crss_ID, & !< critical resolved stress - crss_back_ID, & !< critical resolved back stress - sense_ID, & !< sense of acting shear stress (-1 or +1) - chi0_ID, & !< backstress at last switch of stress sense (positive?) - gamma0_ID, & !< accumulated shear at last switch of stress sense (at current switch?) - accshear_ID, & - shearrate_ID, & - resolvedstress_ID - + enumerator :: & + undefined_ID, & + crss_ID, & !< critical resolved stress + crss_back_ID, & !< critical resolved back stress + sense_ID, & !< sense of acting shear stress (-1 or +1) + chi0_ID, & !< backstress at last switch of stress sense (positive?) + gamma0_ID, & !< accumulated shear at last switch of stress sense (at current switch?) + accshear_ID, & + shearrate_ID, & + resolvedstress_ID end enum - - + + type, private :: tParameters !< container type for internal constitutive parameters integer(kind(undefined_ID)), dimension(:), allocatable, private :: & outputID !< ID of each post result output From 6f93f8de04ffe49a34c5f95fa6f7a504703042f2 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 25 Nov 2018 19:37:46 +0100 Subject: [PATCH 33/89] starting to introduce parallel structure for new style parameter reading --- src/plastic_kinematichardening.f90 | 105 +++++++++++++++++++++++++++-- 1 file changed, 98 insertions(+), 7 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 457ec3b48..11f49202a 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -47,9 +47,6 @@ module plastic_kinehardening type, private :: tParameters !< container type for internal constitutive parameters - integer(kind(undefined_ID)), dimension(:), allocatable, private :: & - outputID !< ID of each post result output - real(pReal) :: & gdot0, & !< reference shear strain rate for slip (input parameter) n_slip, & !< stress exponent for slip (input parameter) @@ -67,9 +64,21 @@ module plastic_kinehardening tau1_b, & interaction_slipslip, & !< latent hardening matrix nonSchmidCoeff + + real(pReal), allocatable, dimension(:,:,:) :: & + Schmid_slip, & + Schmid_twin, & + nonSchmid_pos, & + nonSchmid_neg real(pReal), dimension(:,:), allocatable, private :: & hardeningMatrix_SlipSlip + integer(pInt) :: & + totalNslip !< total number of active slip system + integer(pInt), allocatable, dimension(:) :: & + Nslip !< number of active slip systems for each family + integer(kind(undefined_ID)), allocatable, dimension(:) :: & + outputID !< ID of each post result output end type type, private :: tKinehardeningState @@ -86,8 +95,9 @@ module plastic_kinehardening end type type(tParameters), dimension(:), allocatable, private :: & - param !< containers of constitutive parameters (len Ninstance) - + param, & !< containers of constitutive parameters (len Ninstance) + paramNew ! temp + type(tKinehardeningState), allocatable, dimension(:), private :: & dotState, & deltaState, & @@ -148,6 +158,7 @@ subroutine plastic_kinehardening_init(fileUnit) material_phase, & plasticState use config, only: & + config_phase, & MATERIAL_partPhase use lattice @@ -158,11 +169,12 @@ subroutine plastic_kinehardening_init(fileUnit) integer(kind(undefined_ID)) :: & output_ID integer(pInt) :: & - o, j, k, f, & + o, i,j, k, f, p, & phase, & instance, & maxNinstance, & NipcMyPhase, & + outputSize, & Nchunks_SlipSlip = 0_pInt, Nchunks_SlipFamilies = 0_pInt, & Nchunks_nonSchmid = 0_pInt, & offset_slip, index_myFamily, index_otherFamily, & @@ -172,12 +184,21 @@ subroutine plastic_kinehardening_init(fileUnit) sizeState, & sizeDeltaState + integer(pInt), dimension(0), parameter :: emptyIntArray = [integer(pInt)::] + real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::] + character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::] + real(pReal), dimension(:), allocatable :: tempPerSlip + integer(kind(undefined_ID)) :: & + outputID !< ID of each post result output + character(len=65536), dimension(:), allocatable :: & + outputs character(len=65536) :: & tag = '', & line = '', & - extmsg = '' + extmsg = '', & + structure = '' write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_KINEHARDENING_label//' init -+>>>' write(6,'(a15,a)') ' Current time: ',IO_timeStamp() @@ -198,7 +219,77 @@ subroutine plastic_kinehardening_init(fileUnit) allocate(plastic_kinehardening_Nslip(lattice_maxNslipFamily,maxNinstance), source=0_pInt) allocate(plastic_kinehardening_totalNslip(maxNinstance), source=0_pInt) allocate(param(maxNinstance)) ! one container of parameters per instance + allocate(paramNew(maxNinstance)) ! one container of parameters per instance + + do p = 1_pInt, size(phase_plasticityInstance) + if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle + associate(prm => paramNew(phase_plasticityInstance(p)), & + dot => dotState(phase_plasticityInstance(p)), & + stt => state(phase_plasticityInstance(p))) + + structure = config_phase(p)%getString('lattice_structure') + +!-------------------------------------------------------------------------------------------------- +! optional parameters that need to be defined + prm%aTolResistance = config_phase(p)%getFloat('atol_resistance',defaultVal=1.0_pReal) + prm%aTolShear = config_phase(p)%getFloat('atol_shear', defaultVal=1.0e-6_pReal) + +!-------------------------------------------------------------------------------------------------- +! slip related parameters + prm%Nslip = config_phase(p)%getInts('nslip',defaultVal=emptyIntArray) + prm%totalNslip = sum(prm%Nslip) + slipActive: if (prm%totalNslip > 0_pInt) then + prm%Schmid_slip = lattice_SchmidMatrix_slip(prm%Nslip,structure(1:3),& + config_phase(p)%getFloat('c/a',defaultVal=0.0_pReal)) + if(structure=='bcc') then + prm%nonSchmidCoeff = config_phase(p)%getFloats('nonschmid_coefficients',& + defaultVal = emptyRealArray) + prm%nonSchmid_pos = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,+1_pInt) + prm%nonSchmid_neg = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,-1_pInt) + else + prm%nonSchmid_pos = prm%Schmid_slip + prm%nonSchmid_neg = prm%Schmid_slip + endif + !prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & + ! config_phase(p)%getFloats('interaction_slipslip'), & + ! structure(1:3)) + endif slipActive + +!-------------------------------------------------------------------------------------------------- +! output pararameters + outputs = config_phase(p)%getStrings('(output)',defaultVal=emptyStringArray) + allocate(prm%outputID(0)) + do i=1_pInt, size(outputs) + outputID = undefined_ID + select case(outputs(i)) + case ('resistance') + outputID = merge(crss_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip + case ('accumulatedshear') + outputID = merge(accshear_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip + case ('shearrate') + outputID = merge(shearrate_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip + case ('resolvedstress') + outputID = merge(resolvedstress_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip + + end select + + if (outputID /= undefined_ID) then + plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) + plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = outputSize + prm%outputID = [prm%outputID , outputID] + endif + + end do + + end associate + end do + + rewind(fileUnit) phase = 0_pInt do while (trim(line) /= IO_EOF .and. IO_lc(IO_getTag(line,'<','>')) /= material_partPhase) ! wind forward to From 9f81fa8e9a902af78de75e8638f9322cb4075f29 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 25 Nov 2018 21:39:59 +0100 Subject: [PATCH 34/89] prevent segmentation fault post_results better readable --- src/plastic_kinematichardening.f90 | 33 +++++++++++++++--------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 11f49202a..bd97c7de3 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -219,7 +219,11 @@ subroutine plastic_kinehardening_init(fileUnit) allocate(plastic_kinehardening_Nslip(lattice_maxNslipFamily,maxNinstance), source=0_pInt) allocate(plastic_kinehardening_totalNslip(maxNinstance), source=0_pInt) allocate(param(maxNinstance)) ! one container of parameters per instance - allocate(paramNew(maxNinstance)) ! one container of parameters per instance + allocate(paramNew(maxNinstance)) + allocate(state(maxNinstance)) + allocate(state0(maxNinstance)) + allocate(dotState(maxNinstance)) + allocate(deltaState(maxNinstance)) do p = 1_pInt, size(phase_plasticityInstance) if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle @@ -435,10 +439,7 @@ subroutine plastic_kinehardening_init(fileUnit) !-------------------------------------------------------------------------------------------------- ! allocation of variables whose size depends on the total number of active slip systems - allocate(state(maxNinstance)) - allocate(state0(maxNinstance)) - allocate(dotState(maxNinstance)) - allocate(deltaState(maxNinstance)) + initializeInstances: do phase = 1_pInt, size(phase_plasticity) ! loop through all phases in material.config @@ -935,7 +936,7 @@ end subroutine plastic_kinehardening_dotState !-------------------------------------------------------------------------------------------------- !> @brief return array of constitutive results !-------------------------------------------------------------------------------------------------- -function plastic_kinehardening_postResults(Mp,ipc,ip,el) +function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) use math use material, only: & material_phase, & @@ -955,7 +956,7 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) el !< element !< microstructure state real(pReal), dimension(plastic_kinehardening_sizePostResults(phase_plasticityInstance(material_phase(ipc,ip,el)))) :: & - plastic_kinehardening_postResults + postResults integer(pInt) :: & instance,ph, of, & @@ -973,7 +974,7 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) nSlip = plastic_kinehardening_totalNslip(instance) - plastic_kinehardening_postResults = 0.0_pReal + postResults = 0.0_pReal c = 0_pInt call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & @@ -982,31 +983,31 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) select case(param(instance)%outputID(o)) case (crss_ID) - plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = state(instance)%crss(:,of) + postResults(c+1_pInt:c+nSlip) = state(instance)%crss(:,of) c = c + nSlip case(crss_back_ID) - plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = state(instance)%crss_back(:,of) + postResults(c+1_pInt:c+nSlip) = state(instance)%crss_back(:,of) c = c + nSlip case (sense_ID) - plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = state(instance)%sense(:,of) + postResults(c+1_pInt:c+nSlip) = state(instance)%sense(:,of) c = c + nSlip case (chi0_ID) - plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = state(instance)%chi0(:,of) + postResults(c+1_pInt:c+nSlip) = state(instance)%chi0(:,of) c = c + nSlip case (gamma0_ID) - plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = state(instance)%gamma0(:,of) + postResults(c+1_pInt:c+nSlip) = state(instance)%gamma0(:,of) c = c + nSlip case (accshear_ID) - plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = state(instance)%accshear(:,of) + postResults(c+1_pInt:c+nSlip) = state(instance)%accshear(:,of) c = c + nSlip case (shearrate_ID) - plastic_kinehardening_postResults(c+1_pInt:c+nSlip) = gdot_pos+gdot_neg + postResults(c+1_pInt:c+nSlip) = gdot_pos+gdot_neg c = c + nSlip case (resolvedstress_ID) @@ -1015,7 +1016,7 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) index_myFamily = sum(lattice_NslipSystem(1:f-1_pInt,ph)) ! at which index starts my family slipSystems: do i = 1_pInt,plastic_kinehardening_Nslip(f,instance) j = j + 1_pInt - plastic_kinehardening_postResults(c+j) = & + postResults(c+j) = & math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,1,index_myFamily+i,ph)) enddo slipSystems enddo slipFamilies From 4c46f3daa591df7603bf6d0acdb6b9bfd47a1067 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 26 Nov 2018 00:14:27 +0100 Subject: [PATCH 35/89] adopting argument parsing to Phenopowerlaw --- src/plastic_kinematichardening.f90 | 184 +++++++++++++++-------------- 1 file changed, 93 insertions(+), 91 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index bd97c7de3..6fb469b92 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -229,6 +229,7 @@ subroutine plastic_kinehardening_init(fileUnit) if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle associate(prm => paramNew(phase_plasticityInstance(p)), & dot => dotState(phase_plasticityInstance(p)), & + delta => deltaState(phase_plasticityInstance(p)), & stt => state(phase_plasticityInstance(p))) structure = config_phase(p)%getString('lattice_structure') @@ -238,6 +239,10 @@ subroutine plastic_kinehardening_init(fileUnit) prm%aTolResistance = config_phase(p)%getFloat('atol_resistance',defaultVal=1.0_pReal) prm%aTolShear = config_phase(p)%getFloat('atol_shear', defaultVal=1.0e-6_pReal) + ! sanity checks + if (prm%aTolResistance <= 0.0_pReal) extmsg = trim(extmsg)//'aTolresistance ' + if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'aTolShear ' + !-------------------------------------------------------------------------------------------------- ! slip related parameters prm%Nslip = config_phase(p)%getInts('nslip',defaultVal=emptyIntArray) @@ -254,6 +259,16 @@ subroutine plastic_kinehardening_init(fileUnit) prm%nonSchmid_pos = prm%Schmid_slip prm%nonSchmid_neg = prm%Schmid_slip endif + + prm%crss0 = config_phase(p)%getFloats('crss0', requiredShape=shape(prm%Nslip)) + prm%tau1 = config_phase(p)%getFloats('tau1', requiredShape=shape(prm%Nslip)) + prm%tau1_b = config_phase(p)%getFloats('tau1_b', requiredShape=shape(prm%Nslip)) + prm%theta0 = config_phase(p)%getFloats('theta0', requiredShape=shape(prm%Nslip)) + prm%theta1 = config_phase(p)%getFloats('theta1', requiredShape=shape(prm%Nslip)) + prm%theta0_b = config_phase(p)%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) + prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) + + !prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & ! config_phase(p)%getFloats('interaction_slipslip'), & ! structure(1:3)) @@ -282,13 +297,85 @@ subroutine plastic_kinehardening_init(fileUnit) end select - if (outputID /= undefined_ID) then - plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) - plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = outputSize - prm%outputID = [prm%outputID , outputID] - endif + !if (outputID /= undefined_ID) then + ! plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) + ! plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = outputSize + ! prm%outputID = [prm%outputID , outputID] + !endif end do + nslip = prm%totalNslip +!-------------------------------------------------------------------------------------------------- +! allocate state arrays + sizeDotState = nSlip & !< crss + + nSlip & !< crss_back + + nSlip & !< accumulated (absolute) shear + + 1_pInt !< sum(gamma) + + sizeDeltaState = nSlip & !< sense of acting shear stress (-1 or +1) + + nSlip & !< backstress at last switch of stress sense + + nSlip !< accumulated shear at last switch of stress sense + + sizeState = sizeDotState + sizeDeltaState + NipcMyPhase = count(material_phase == p) ! number of IPCs containing my phase + call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & + nSlip,0_pInt,0_pInt) + plasticState(p)%offsetDeltaState = sizeDotState + + + endindex = 0_pInt + o = endIndex ! offset of dotstate index relative to state index + + startIndex = endIndex + 1_pInt + endIndex = endIndex + nSlip + stt%crss => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) + dot%crss => plasticState(p)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) + plasticState(p)%aTolState(startIndex-o:endIndex-o) = prm%aTolResistance + +! ............................................. + startIndex = endIndex + 1_pInt + endIndex = endIndex + nSlip + stt%crss_back => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) + dot%crss_back => plasticState(p)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) + plasticState(p)%aTolState(startIndex-o:endIndex-o) = prm%aTolResistance + +! ............................................. + startIndex = endIndex + 1_pInt + endIndex = endIndex + nSlip + stt%accshear => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) + dot%accshear => plasticState(p)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) + plasticState(p)%aTolState(startIndex-o:endIndex-o) = prm%aTolShear + +! ............................................. + startIndex = endIndex + 1_pInt + endIndex = endIndex + 1_pInt + stt%sumGamma => plasticState(p)%state (startIndex ,1:NipcMyPhase) + dot%sumGamma => plasticState(p)%dotState (startIndex-o ,1:NipcMyPhase) + plasticState(p)%aTolState(startIndex-o:endIndex-o) =prm%aTolShear + +!---------------------------------------------------------------------------------------------- +!locally define deltaState alias + o = endIndex + +! ............................................. + startIndex = endIndex + 1_pInt + endIndex = endIndex + nSlip + stt%sense => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) + delta%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) + +! ............................................. + startIndex = endIndex + 1_pInt + endIndex = endIndex + nSlip + stt%chi0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) + delta%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) + + +! ............................................. + startIndex = endIndex + 1_pInt + endIndex = endIndex + nSlip + stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) + delta%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) + end associate end do @@ -464,9 +551,7 @@ subroutine plastic_kinehardening_init(fileUnit) if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & .and. param(instance)%tau1_b(1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' if (param(instance)%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' - if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' - if (param(instance)%aTolResistance <= 0.0_pReal) param(instance)%aTolResistance = 1.0_pReal ! default absolute tolerance 1 Pa - if (param(instance)%aTolShear <= 0.0_pReal) param(instance)%aTolShear = 1.0e-6_pReal ! default absolute tolerance 1e-6 + if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' if (extmsg /= '') then extmsg = trim(extmsg)//' ('//PLASTICITY_KINEHARDENING_label//')' ! prepare error message identifier call IO_error(211_pInt,ip=instance,ext_msg=extmsg) @@ -495,22 +580,7 @@ subroutine plastic_kinehardening_init(fileUnit) plastic_kinehardening_sizePostResults(instance) = plastic_kinehardening_sizePostResults(instance) + mySize endif outputFound enddo outputsLoop -!-------------------------------------------------------------------------------------------------- -! allocate state arrays - sizeDotState = nSlip & !< crss - + nSlip & !< crss_back - + nSlip & !< accumulated (absolute) shear - + 1_pInt !< sum(gamma) - - sizeDeltaState = nSlip & !< sense of acting shear stress (-1 or +1) - + nSlip & !< backstress at last switch of stress sense - + nSlip !< accumulated shear at last switch of stress sense - sizeState = sizeDotState + sizeDeltaState - - call material_allocatePlasticState(phase,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & - nSlip,0_pInt,0_pInt) - plasticState(phase)%offsetDeltaState = sizeDotState plasticState(phase)%sizePostResults = plastic_kinehardening_sizePostResults(instance) offset_slip = plasticState(phase)%nSlip+plasticState(phase)%nTwin+2_pInt @@ -534,84 +604,16 @@ subroutine plastic_kinehardening_init(fileUnit) enddo; enddo enddo; enddo -!---------------------------------------------------------------------------------------------- -!locally define dotState alias - endindex = 0_pInt o = endIndex ! offset of dotstate index relative to state index startIndex = endIndex + 1_pInt endIndex = endIndex + nSlip - state (instance)%crss => plasticState(phase)%state (startIndex :endIndex ,1:NipcMyPhase) state0 (instance)%crss => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - dotState(instance)%crss => plasticState(phase)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) state0(instance)%crss = spread(math_expand(param(instance)%crss0,& plastic_kinehardening_Nslip(:,instance)), & 2, NipcMyPhase) - plasticState(phase)%aTolState(startIndex-o:endIndex-o) = param(instance)%aTolResistance - -! ............................................. - startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip - state (instance)%crss_back => plasticState(phase)%state (startIndex :endIndex ,1:NipcMyPhase) - state0 (instance)%crss_back => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - dotState(instance)%crss_back => plasticState(phase)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) - - state0(instance)%crss_back = 0.0_pReal - plasticState(phase)%aTolState(startIndex-o:endIndex-o) = param(instance)%aTolResistance - -! ............................................. - startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip - state (instance)%accshear => plasticState(phase)%state (startIndex :endIndex ,1:NipcMyPhase) - state0 (instance)%accshear => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - dotState(instance)%accshear => plasticState(phase)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) - - state0(instance)%accshear = 0.0_pReal - plasticState(phase)%aTolState(startIndex-o:endIndex-o) = param(instance)%aTolShear - -! ............................................. - startIndex = endIndex + 1_pInt - endIndex = endIndex + 1_pInt - state (instance)%sumGamma => plasticState(phase)%state (startIndex ,1:NipcMyPhase) - state0 (instance)%sumGamma => plasticState(phase)%state0 (startIndex ,1:NipcMyPhase) - dotState(instance)%sumGamma => plasticState(phase)%dotState (startIndex-o ,1:NipcMyPhase) - - state0(instance)%sumGamma = 0.0_pReal - plasticState(phase)%aTolState(startIndex-o:endIndex-o) = param(instance)%aTolShear - -!---------------------------------------------------------------------------------------------- -!locally define deltaState alias - o = endIndex - -! ............................................. - startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip - state (instance)%sense => plasticState(phase)%state (startIndex :endIndex ,1:NipcMyPhase) - state0 (instance)%sense => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - deltaState(instance)%sense => plasticState(phase)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - - state0(instance)%sense = 0.0_pReal - -! ............................................. - startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip - state (instance)%chi0 => plasticState(phase)%state (startIndex :endIndex ,1:NipcMyPhase) - state0 (instance)%chi0 => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - deltaState(instance)%chi0 => plasticState(phase)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - - state0(instance)%chi0 = 0.0_pReal - -! ............................................. - startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip - state (instance)%gamma0 => plasticState(phase)%state (startIndex :endIndex ,1:NipcMyPhase) - state0 (instance)%gamma0 => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - deltaState(instance)%gamma0 => plasticState(phase)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - - state0(instance)%gamma0 = 0.0_pReal - endif myPhase2 enddo initializeInstances From 8f59a40f482ef1a0e6d32c6007d951e77c751172 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 26 Nov 2018 01:34:38 +0100 Subject: [PATCH 36/89] using new parameter structure for output --- src/plastic_kinematichardening.f90 | 102 +++++++---------------------- 1 file changed, 23 insertions(+), 79 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 6fb469b92..5db5a3b5a 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -12,9 +12,6 @@ module plastic_kinehardening implicit none private - integer(pInt), dimension(:), allocatable, public, protected :: & - plastic_kinehardening_sizePostResults !< cumulative size of post results - integer(pInt), dimension(:,:), allocatable, target, public :: & plastic_kinehardening_sizePostResult !< size of each post result output @@ -210,7 +207,6 @@ subroutine plastic_kinehardening_init(fileUnit) if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0_pInt) & write(6,'(a,1x,i5,/)') '# instances:',maxNinstance - allocate(plastic_kinehardening_sizePostResults(maxNinstance), source=0_pInt) allocate(plastic_kinehardening_sizePostResult(maxval(phase_Noutput),maxNinstance), & source=0_pInt) allocate(plastic_kinehardening_output(maxval(phase_Noutput),maxNinstance)) @@ -224,9 +220,9 @@ subroutine plastic_kinehardening_init(fileUnit) allocate(state0(maxNinstance)) allocate(dotState(maxNinstance)) allocate(deltaState(maxNinstance)) - do p = 1_pInt, size(phase_plasticityInstance) if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle + instance = phase_plasticityInstance(p) ! which instance of my phase associate(prm => paramNew(phase_plasticityInstance(p)), & dot => dotState(phase_plasticityInstance(p)), & delta => deltaState(phase_plasticityInstance(p)), & @@ -294,16 +290,30 @@ subroutine plastic_kinehardening_init(fileUnit) case ('resolvedstress') outputID = merge(resolvedstress_ID,undefined_ID,prm%totalNslip>0_pInt) outputSize = prm%totalNslip + case ('backstress') + outputID = merge(crss_back_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip + case ('sense') + outputID = merge(sense_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip + case ('chi0') + outputID = merge(chi0_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip + case ('gamma0') + outputID = merge(gamma0_ID,undefined_ID,prm%totalNslip>0_pInt) + outputSize = prm%totalNslip end select - !if (outputID /= undefined_ID) then - ! plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) - ! plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = outputSize - ! prm%outputID = [prm%outputID , outputID] - !endif + if (outputID /= undefined_ID) then + plastic_kinehardening_Noutput(instance) = plastic_kinehardening_Noutput(instance) + 1_pInt + plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) + plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = outputSize + prm%outputID = [prm%outputID , outputID] + endif end do +param(instance)%outputID = prm%outputID nslip = prm%totalNslip !-------------------------------------------------------------------------------------------------- ! allocate state arrays @@ -320,6 +330,7 @@ subroutine plastic_kinehardening_init(fileUnit) NipcMyPhase = count(material_phase == p) ! number of IPCs containing my phase call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & nSlip,0_pInt,0_pInt) + plasticState(p)%sizePostResults = sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(p))) plasticState(p)%offsetDeltaState = sizeDotState @@ -412,7 +423,6 @@ subroutine plastic_kinehardening_init(fileUnit) allocate(param(instance)%nonSchmidCoeff(Nchunks_nonSchmid), source=0.0_pReal) if(allocated(tempPerSlip)) deallocate(tempPerSlip) allocate(tempPerSlip(Nchunks_SlipFamilies)) - allocate(param(instance)%outputID(0)) endif cycle ! skip to next line endif @@ -420,41 +430,7 @@ subroutine plastic_kinehardening_init(fileUnit) chunkPos = IO_stringPos(line) tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key select case(tag) - case ('(output)') - output_ID = undefined_ID - select case(IO_lc(IO_stringValue(line,chunkPos,2_pInt))) - case ('resistance') - output_ID = crss_ID - case ('backstress') - output_ID = crss_back_ID - - case ('sense') - output_ID = sense_ID - - case ('chi0') - output_ID = chi0_ID - - case ('gamma0') - output_ID = gamma0_ID - - case ('accumulatedshear') - output_ID = accshear_ID - - case ('shearrate') - output_ID = shearrate_ID - - case ('resolvedstress') - output_ID = resolvedstress_ID - - end select - - if (output_ID /= undefined_ID) then - plastic_kinehardening_Noutput(instance) = plastic_kinehardening_Noutput(instance) + 1_pInt - plastic_kinehardening_output(plastic_kinehardening_Noutput(instance),instance) = & - IO_lc(IO_stringValue(line,chunkPos,2_pInt)) - param(instance)%outputID = [param(instance)%outputID, output_ID] - endif !-------------------------------------------------------------------------------------------------- ! parameters depending on number of slip families @@ -511,12 +487,6 @@ subroutine plastic_kinehardening_init(fileUnit) case ('n_slip') param(instance)%n_slip = IO_floatValue(line,chunkPos,2_pInt) - - case ('atol_resistance') - param(instance)%aTolResistance = IO_floatValue(line,chunkPos,2_pInt) - - case ('atol_shear') - param(instance)%aTolShear = IO_floatValue(line,chunkPos,2_pInt) case default @@ -556,32 +526,7 @@ subroutine plastic_kinehardening_init(fileUnit) extmsg = trim(extmsg)//' ('//PLASTICITY_KINEHARDENING_label//')' ! prepare error message identifier call IO_error(211_pInt,ip=instance,ext_msg=extmsg) endif - - -!-------------------------------------------------------------------------------------------------- -! Determine size of postResults array - - outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) - select case(param(instance)%outputID(o)) - case(crss_ID, & !< critical resolved stress - crss_back_ID, & !< critical resolved back stress - sense_ID, & !< sense of acting shear stress (-1 or +1) - chi0_ID, & !< backstress at last switch of stress sense - gamma0_ID, & !< accumulated shear at last switch of stress sense - accshear_ID, & - shearrate_ID, & - resolvedstress_ID) - mySize = nSlip - case default - end select - - outputFound: if (mySize > 0_pInt) then - plastic_kinehardening_sizePostResult(o,instance) = mySize - plastic_kinehardening_sizePostResults(instance) = plastic_kinehardening_sizePostResults(instance) + mySize - endif outputFound - enddo outputsLoop - - plasticState(phase)%sizePostResults = plastic_kinehardening_sizePostResults(instance) + offset_slip = plasticState(phase)%nSlip+plasticState(phase)%nTwin+2_pInt plasticState(phase)%slipRate => & @@ -957,9 +902,8 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) ip, & !< integration point el !< element !< microstructure state - real(pReal), dimension(plastic_kinehardening_sizePostResults(phase_plasticityInstance(material_phase(ipc,ip,el)))) :: & + real(pReal), dimension(sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(material_phase(ipc,ip,el))))) :: & postResults - integer(pInt) :: & instance,ph, of, & nSlip,& From c63d297145fd5efa7ffa06f9413d02d84f6a1e8d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 26 Nov 2018 07:10:43 +0100 Subject: [PATCH 37/89] copied from phenopowerlaw --- src/plastic_kinematichardening.f90 | 79 ++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 5db5a3b5a..777e7e16b 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -265,6 +265,11 @@ subroutine plastic_kinehardening_init(fileUnit) prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) + prm%gdot0 = config_phase(p)%getFloat('gdot0') + prm%n_slip = config_phase(p)%getFloat('n_slip') + + + !prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & ! config_phase(p)%getFloats('interaction_slipslip'), & ! structure(1:3)) @@ -973,4 +978,78 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) end function plastic_kinehardening_postResults + +!-------------------------------------------------------------------------------------------------- +!> @brief calculates shear rates on slip systems and derivatives with respect to resolved stress +!> @details: Shear rates are calculated only optionally. NOTE: Against the common convention, the +!> result (i.e. intent(out)) variables are the last to have the optional arguments at the end +!-------------------------------------------------------------------------------------------------- +pure subroutine kinetics(prm,stt,of,Mp,gdot_pos,gdot_neg, & + dgdot_dtau_pos,dgdot_dtau_neg) + use prec, only: & + dNeq0 + use math, only: & + math_mul33xx33 + + implicit none + type(tParameters), intent(in) :: & + prm + type(tKinehardeningState), intent(in) :: & + stt + integer(pInt), intent(in) :: & + of + real(pReal), dimension(prm%totalNslip), intent(out) :: & + gdot_pos, & + gdot_neg + real(pReal), dimension(prm%totalNslip), optional, intent(out) :: & + dgdot_dtau_pos, & + dgdot_dtau_neg + real(pReal), dimension(3,3), intent(in) :: & + Mp + + real(pReal), dimension(prm%totalNslip) :: & + tau_pos, & + tau_neg + integer(pInt) :: i + logical :: nonSchmidActive + + nonSchmidActive = size(prm%nonSchmidCoeff) > 0_pInt + + do i = 1_pInt, prm%totalNslip + tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) + tau_neg(i) = merge(math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)), & + 0.0_pReal, nonSchmidActive) + enddo + + where(dNeq0(tau_pos)) + gdot_pos = prm%gdot0 * merge(0.5_pReal,1.0_pReal, nonSchmidActive) & ! 1/2 if non-Schmid active + * sign(abs((tau_pos-stt%crss_back(:,of))/stt%crss(:,of))**prm%n_slip, tau_pos-stt%crss_back(:,of)) + else where + gdot_pos = 0.0_pReal + end where + + where(dNeq0(tau_neg)) + gdot_pos = prm%gdot0 * 0.5_pReal & ! only used if non-Schmid active, always 1/2 + * sign(abs((tau_pos-stt%crss_back(:,of))/stt%crss(:,of))**prm%n_slip, tau_pos-stt%crss_back(:,of)) + else where + gdot_neg = 0.0_pReal + end where + + if (present(dgdot_dtau_pos)) then + where(dNeq0(gdot_pos)) + !dgdot_dtau_slip_pos = gdot_slip_pos*prm%n_slip/tau_slip_pos + else where + dgdot_dtau_pos = 0.0_pReal + end where + endif + if (present(dgdot_dtau_neg)) then + where(dNeq0(gdot_neg)) + ! dgdot_dtau_slip_neg = gdot_slip_neg*prm%n_slip/tau_slip_neg + else where + dgdot_dtau_neg = 0.0_pReal + end where + endif + +end subroutine kinetics + end module plastic_kinehardening From ef1e9cce0df29fc0b080466ca960f58eba991b33 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Mon, 26 Nov 2018 15:38:31 +0100 Subject: [PATCH 38/89] Functionality to avoid creating datasets in HDF5 of zero dimensions --- src/HDF5_utilities.f90 | 220 ++++++++++++++++++++++++++++------------- 1 file changed, 151 insertions(+), 69 deletions(-) mode change 100644 => 100755 src/HDF5_utilities.f90 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100644 new mode 100755 index 57c505645..3d882da7b --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -374,7 +374,10 @@ end subroutine HDF5_read_pReal_4 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of the type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) +subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset @@ -383,8 +386,16 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) integer(pInt),dimension(:), allocatable :: myShape integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id + logical, intent(in), optional :: parallel + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer(HSIZE_T), dimension(5) :: myStart + myShape = shape(dataset) @@ -392,13 +403,44 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) !creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - +!-------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(5) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal5: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:4),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) +!-------------------------------------------------------------------------------------------------- +!get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dget_space_f') +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5sselect_hyperslab_f') +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T),hdferr) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dclose_f') +!-------------------------------------------------------------------------------------------------- +! close property lists and datatypes + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dclose_f') end subroutine HDF5_read_pReal_5 @@ -618,7 +660,6 @@ subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) end subroutine HDF5_read_pInt_7 - !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 1 dimensions !-------------------------------------------------------------------------------------------------- @@ -638,26 +679,29 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(1) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(1) -#ifdef PETSc + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') endif; endif -#endif + #endif myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(outputSize)] @@ -693,15 +737,15 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5sclose_f/memspace_id') -end subroutine HDF5_write_pReal1 +end subroutine HDF5_write_PReal1 !-------------------------------------------------------------------------------------------------- @@ -723,16 +767,19 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(2) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(2) #ifdef PETSc @@ -740,7 +787,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt2: MPI_allreduce') endif; endif #endif myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -808,16 +855,19 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(3) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(3) #ifdef PETSc @@ -825,7 +875,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt3: MPI_allreduce') endif; endif #endif myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -893,16 +943,19 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(4) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(4) #ifdef PETSc @@ -910,7 +963,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt4: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -978,16 +1031,19 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(5) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(5) #ifdef PETSc @@ -995,7 +1051,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt5: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1063,16 +1119,19 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(6) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(6) #ifdef PETSc @@ -1080,7 +1139,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt6: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1148,16 +1207,19 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(7) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(7) #ifdef PETSc @@ -1165,7 +1227,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt7: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1214,8 +1276,10 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) end subroutine HDF5_write_pReal7 + + !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of the type pInt with 1 dimensions +!> @brief subroutine for writing dataset of type pInt with 1 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) use numerics, only: & @@ -1238,12 +1302,14 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(1) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(1) #ifdef PETSc @@ -1324,12 +1390,14 @@ subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(2) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(2) #ifdef PETSc @@ -1410,12 +1478,14 @@ subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(3) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(3) #ifdef PETSc @@ -1496,12 +1566,14 @@ subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(4) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(4) #ifdef PETSc @@ -1582,12 +1654,14 @@ subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(5) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(5) #ifdef PETSc @@ -1668,12 +1742,14 @@ subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(6) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(6) #ifdef PETSc @@ -1754,12 +1830,14 @@ subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(7) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(7) #ifdef PETSc @@ -1820,3 +1898,7 @@ end module HDF5_Utilities + + + + From bfad81848ae044312c2a6cf2cf3c2f98e15a2346 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 26 Nov 2018 22:36:32 +0100 Subject: [PATCH 39/89] kinetics similar to phenopowerlaw --- src/plastic_kinematichardening.f90 | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 777e7e16b..590267890 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -1021,30 +1021,33 @@ pure subroutine kinetics(prm,stt,of,Mp,gdot_pos,gdot_neg, & 0.0_pReal, nonSchmidActive) enddo + tau_pos = tau_pos - stt%crss_back(:,of) + tau_neg = tau_neg - stt%crss_back(:,of) + where(dNeq0(tau_pos)) - gdot_pos = prm%gdot0 * merge(0.5_pReal,1.0_pReal, nonSchmidActive) & ! 1/2 if non-Schmid active - * sign(abs((tau_pos-stt%crss_back(:,of))/stt%crss(:,of))**prm%n_slip, tau_pos-stt%crss_back(:,of)) + gdot_pos = prm%gdot0 * merge(0.5_pReal,1.0_pReal, nonSchmidActive) & ! 1/2 if non-Schmid active + * sign(abs(tau_pos/stt%crss(:,of))**prm%n_slip, tau_pos) else where gdot_pos = 0.0_pReal end where where(dNeq0(tau_neg)) - gdot_pos = prm%gdot0 * 0.5_pReal & ! only used if non-Schmid active, always 1/2 - * sign(abs((tau_pos-stt%crss_back(:,of))/stt%crss(:,of))**prm%n_slip, tau_pos-stt%crss_back(:,of)) + gdot_neg = prm%gdot0 * 0.5_pReal & ! only used if non-Schmid active, always 1/2 + * sign(abs(tau_neg/stt%crss(:,of))**prm%n_slip, tau_neg) else where gdot_neg = 0.0_pReal end where if (present(dgdot_dtau_pos)) then where(dNeq0(gdot_pos)) - !dgdot_dtau_slip_pos = gdot_slip_pos*prm%n_slip/tau_slip_pos + dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos else where dgdot_dtau_pos = 0.0_pReal end where endif if (present(dgdot_dtau_neg)) then where(dNeq0(gdot_neg)) - ! dgdot_dtau_slip_neg = gdot_slip_neg*prm%n_slip/tau_slip_neg + dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg else where dgdot_dtau_neg = 0.0_pReal end where From 0265732e084f8e3c9cdcd286fb1b06a5fc0d99d6 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Tue, 27 Nov 2018 18:50:51 +0100 Subject: [PATCH 40/89] Parallel writing and reading of integer datasets working --- src/HDF5_utilities.f90 | 1401 ++++++++++++++++++++++++++++++++-------- 1 file changed, 1130 insertions(+), 271 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 3d882da7b..01ca3407d 100755 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -5,11 +5,11 @@ !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !-------------------------------------------------------------------------------------------------- module HDF5_utilities - use prec - use IO - use HDF5 + use prec + use IO + use HDF5 #ifdef PETSc - use PETSC + use PETSC #endif implicit none @@ -21,21 +21,21 @@ module HDF5_utilities !> @brief reads pInt or pReal data of defined shape from file !-------------------------------------------------------------------------------------------------- interface HDF5_read - module procedure HDF5_read_pReal_1 - module procedure HDF5_read_pReal_2 - module procedure HDF5_read_pReal_3 - module procedure HDF5_read_pReal_4 - module procedure HDF5_read_pReal_5 - module procedure HDF5_read_pReal_6 - module procedure HDF5_read_pReal_7 + module procedure HDF5_read_pReal1 + module procedure HDF5_read_pReal2 + module procedure HDF5_read_pReal3 + module procedure HDF5_read_pReal4 + module procedure HDF5_read_pReal5 + module procedure HDF5_read_pReal6 + module procedure HDF5_read_pReal7 - module procedure HDF5_read_pInt_1 - module procedure HDF5_read_pInt_2 - module procedure HDF5_read_pInt_3 - module procedure HDF5_read_pInt_4 - module procedure HDF5_read_pInt_5 - module procedure HDF5_read_pInt_6 - module procedure HDF5_read_pInt_7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK + module procedure HDF5_read_pInt1 + module procedure HDF5_read_pInt2 + module procedure HDF5_read_pInt3 + module procedure HDF5_read_pInt4 + module procedure HDF5_read_pInt5 + module procedure HDF5_read_pInt6 + module procedure HDF5_read_pInt7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK end interface HDF5_read @@ -277,135 +277,392 @@ subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) end subroutine HDF5_addIntegerAttribute -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 1 dimension -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_1(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dclose_f') - -end subroutine HDF5_read_pReal_1 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 2 dimensions +!> @brief subroutine for reading dataset of type pReal with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_2(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dclose_f') - -end subroutine HDF5_read_pReal_2 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 3 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_3(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dclose_f') - -end subroutine HDF5_read_pReal_3 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 4 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_4(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dclose_f') - -end subroutine HDF5_read_pReal_4 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 5 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) use numerics, only: & worldrank, & worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id logical, intent(in), optional :: parallel - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr + integer(pInt), dimension(:), allocatable :: & globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) readSize !< contribution of all processes - integer(HSIZE_T), dimension(5) :: myStart - - myShape = shape(dataset) + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(1) :: myStart - -!-------------------------------------------------------------------------------------------------- -!creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(1) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal1: MPI_allreduce') + endif; endif +#endif + myStart = int([sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:0),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal1 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal2(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(2) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(2) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal2: MPI_allreduce') + endif; endif +#endif + myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:1),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal2 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(3) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(3) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal3: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:2),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal3 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(4) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(4) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal4: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:3),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal4 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(5) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- allocate(readSize(worldsize), source = 0_pInt) readSize(worldrank+1) = localShape(5) #ifdef PETSc @@ -419,246 +676,845 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') ! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dopen_f') + !-------------------------------------------------------------------------------------------------- -!get the space_id of dataset in the file +! get the space_id of dataset in the file call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dget_space_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dget_space_f') + !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5sselect_hyperslab_f') + !-------------------------------------------------------------------------------------------------- ! read - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dread_f') + !-------------------------------------------------------------------------------------------------- -! close property lists and datatypes +!close types, dataspaces call h5pclose_f(plist_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: plist_id') call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal5 -end subroutine HDF5_read_pReal_5 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 6 dimensions +!> @brief subroutine for reading dataset of type pReal with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_6(dataset,loc_id,datasetName) +subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(6) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(6) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal6: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:5),sum(readSize)] -end subroutine HDF5_read_pReal_6 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 7 dimensions +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dopen_f') + !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_7(dataset,loc_id,datasetName) +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal6 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(7) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(7) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal7: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:6),sum(readSize)] -end subroutine HDF5_read_pReal_7 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 1 dimension +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dopen_f') + !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_1(dataset,loc_id,datasetName) +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal7 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 1 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:) :: dataset + integer(pInt), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(1) :: myStart -end subroutine HDF5_read_pInt_1 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 2 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_2(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(1) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') + endif; endif +#endif + + myStart = int([sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:0),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + mem_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt1 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(2) :: myStart -end subroutine HDF5_read_pInt_2 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 3 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_3(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(2) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:1),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + mem_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt2 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(3) :: myStart -end subroutine HDF5_read_pInt_3 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 4 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_4(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(3) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:2),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt3 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(4) :: myStart -end subroutine HDF5_read_pInt_4 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 5 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_5(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(4) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:3),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt4 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(5) :: myStart -end subroutine HDF5_read_pInt_5 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 6 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_6(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(5) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:4),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt5 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 6 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(6) :: myStart -end subroutine HDF5_read_pInt_6 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 7 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(6) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:5),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt6 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(7) :: myStart -end subroutine HDF5_read_pInt_7 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(7) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:6),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt7 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 1 dimensions @@ -667,7 +1523,7 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) use numerics, only: & worldrank, & worldsize - + implicit none real(pReal), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle @@ -694,14 +1550,14 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(1) - #ifdef PETSc +#ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') endif; endif - #endif +#endif myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(outputSize)] @@ -787,7 +1643,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt2: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') endif; endif #endif myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -875,7 +1731,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt3: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') endif; endif #endif myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -963,7 +1819,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt4: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1051,7 +1907,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt5: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1139,7 +1995,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt6: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1227,7 +2083,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt7: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1896,6 +2752,9 @@ end subroutine HDF5_write_pInt7 end module HDF5_Utilities +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + From d33df38b429ebf2749a2a6c381881cdd2ce994b8 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Tue, 27 Nov 2018 18:52:54 +0100 Subject: [PATCH 41/89] made it non-executable --- src/HDF5_utilities.f90 | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 src/HDF5_utilities.f90 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100755 new mode 100644 From ab59274c357c40be21ebba6c9572df37f16dce78 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 30 Nov 2018 10:16:04 +0100 Subject: [PATCH 42/89] Able to read the independent datasets (outside the groups) in parallel --- src/HDF5_utilities.f90 | 144 ++++++++++++++++++++++++++++++++--------- 1 file changed, 115 insertions(+), 29 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 01ca3407d..60d4c705d 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -193,7 +193,19 @@ integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T), intent(in) :: FileReadID - call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr) + integer(HID_T) :: aplist_id + + !------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pcreate_f ('//trim(groupName)//')') + !------------------------------------------------------------------------------------------------- +! setting I/O mode to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') + !------------------------------------------------------------------------------------------------- +! opening the group + call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr, gapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5gopen_f ('//trim(groupName)//')') end function HDF5_openGroup2 @@ -321,14 +333,18 @@ subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal1: MPI_allreduce') endif; endif #endif myStart = int([sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -408,14 +424,18 @@ subroutine HDF5_read_pReal2(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal2: MPI_allreduce') endif; endif #endif myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:1),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -495,14 +515,18 @@ subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal3: MPI_allreduce') endif; endif #endif myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:2),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -582,14 +606,19 @@ subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal4: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:3),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -669,13 +698,19 @@ subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal5: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] + + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective @@ -698,7 +733,7 @@ subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id,mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -756,14 +791,18 @@ subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal6: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:5),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -843,14 +882,19 @@ subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal7: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:6),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -931,7 +975,7 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') endif; endif #endif @@ -939,10 +983,16 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) myStart = int([sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1021,7 +1071,7 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') endif; endif #endif @@ -1029,10 +1079,16 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:1),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1111,7 +1167,7 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') endif; endif #endif @@ -1119,10 +1175,16 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:2),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1201,7 +1263,7 @@ subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') endif; endif #endif @@ -1209,10 +1271,16 @@ subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:3),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1291,7 +1359,7 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') endif; endif #endif @@ -1299,10 +1367,16 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1381,7 +1455,7 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') endif; endif #endif @@ -1389,10 +1463,16 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:5),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1471,7 +1551,7 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') endif; endif #endif @@ -1479,10 +1559,16 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:6),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file From e3d8022776640180b0b2e024171351f19af89718 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 30 Nov 2018 17:33:30 +0100 Subject: [PATCH 43/89] Read and write works for all types of data in parallel with test module passed --- src/HDF5_utilities.f90 | 317 ++++++++++++++++++----------------------- 1 file changed, 140 insertions(+), 177 deletions(-) mode change 100644 => 100755 src/HDF5_utilities.f90 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100644 new mode 100755 index ba04773d2..28a9fbde0 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -175,9 +175,19 @@ integer(HID_T) function HDF5_addGroup2(fileHandle,groupName,parallel) logical,intent(in), optional :: parallel - integer(HID_T) :: plist_id,gapl_id + integer(HID_T) :: plist_id,gapl_id, gcpl_id, aplist_id - call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup2, hdferr) + !------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pcreate_f ('//trim(groupName)//')') + !------------------------------------------------------------------------------------------------- +! setting I/O mode to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') + !------------------------------------------------------------------------------------------------- +! Create group + call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup2, hdferr, OBJECT_NAMELEN_DEFAULT_F,gapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup2: h5gcreate_f ('//trim(groupName)//')') end function HDF5_addGroup2 @@ -194,6 +204,8 @@ integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) integer(HID_T), intent(in) :: FileReadID integer(HID_T) :: aplist_id + logical :: is_collective + !------------------------------------------------------------------------------------------------- ! creating a property list for data access properties @@ -201,7 +213,7 @@ integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pcreate_f ('//trim(groupName)//')') !------------------------------------------------------------------------------------------------- ! setting I/O mode to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + call h5pget_all_coll_metadata_ops_f(aplist_id, is_collective, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') !------------------------------------------------------------------------------------------------- ! opening the group @@ -340,15 +352,17 @@ subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) myStart = int([sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(readSize)] - !-------------------------------------------------------------------------------------------------- + +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') +!-------------------------------------------------------------------------------------------------- ! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dopen_f') @@ -366,7 +380,7 @@ subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -377,6 +391,8 @@ subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal1 @@ -431,15 +447,17 @@ subroutine HDF5_read_pReal2(dataset,loc_id,datasetName,parallel) myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:1),sum(readSize)] - !-------------------------------------------------------------------------------------------------- + +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') +!-------------------------------------------------------------------------------------------------- ! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dopen_f') @@ -452,24 +470,26 @@ subroutine HDF5_read_pReal2(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5sselect_hyperslab_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5sselect_hyperslab_f') !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dread_f') + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/memspace_id') -end subroutine HDF5_read_pReal3 +end subroutine HDF5_read_pReal2 !-------------------------------------------------------------------------------------------------- @@ -522,15 +542,17 @@ subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:2),sum(readSize)] - !-------------------------------------------------------------------------------------------------- + +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') +!-------------------------------------------------------------------------------------------------- ! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dopen_f') @@ -548,7 +570,7 @@ subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -559,6 +581,8 @@ subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal3 @@ -613,16 +637,17 @@ subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:3),sum(readSize)] - !-------------------------------------------------------------------------------------------------- + +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') +!-------------------------------------------------------------------------------------------------- ! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dopen_f') @@ -640,7 +665,7 @@ subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -651,6 +676,8 @@ subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal4 @@ -705,17 +732,17 @@ subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] - - !-------------------------------------------------------------------------------------------------- + +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') +!-------------------------------------------------------------------------------------------------- ! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dopen_f') @@ -733,7 +760,7 @@ subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id,mem_space_id = memspace_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -744,6 +771,8 @@ subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal5 @@ -798,15 +827,17 @@ subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:5),sum(readSize)] - !-------------------------------------------------------------------------------------------------- + +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') +!-------------------------------------------------------------------------------------------------- ! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dopen_f') @@ -824,7 +855,7 @@ subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -835,6 +866,8 @@ subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal6 @@ -889,16 +922,17 @@ subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:6),sum(readSize)] - !-------------------------------------------------------------------------------------------------- + +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') +!-------------------------------------------------------------------------------------------------- ! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dopen_f') @@ -916,7 +950,7 @@ subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -927,6 +961,8 @@ subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal7 @@ -975,7 +1011,7 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') endif; endif #endif @@ -983,12 +1019,11 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) myStart = int([sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(readSize)] - !-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1012,7 +1047,7 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & - mem_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -1023,6 +1058,8 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt1 @@ -1071,7 +1108,7 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') endif; endif #endif @@ -1079,12 +1116,11 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:1),sum(readSize)] - !-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1108,7 +1144,7 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & - mem_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -1119,6 +1155,8 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt2 @@ -1167,7 +1205,7 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') endif; endif #endif @@ -1175,12 +1213,11 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:2),sum(readSize)] - !-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1204,7 +1241,7 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -1215,6 +1252,8 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt3 @@ -1263,7 +1302,7 @@ subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') endif; endif #endif @@ -1271,12 +1310,11 @@ subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:3),sum(readSize)] - !-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1293,89 +1331,16 @@ subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dget_space_f') !-------------------------------------------------------------------------------------------------- -<<<<<<< HEAD -subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize -======= ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sselect_hyperslab_f') ->>>>>>> ab59274c357c40be21ebba6c9572df37f16dce78 !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dread_f') -<<<<<<< HEAD - logical, intent(in), optional :: parallel - integer :: ierr - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - - integer(HDF5_ERR_TYPE) :: hdferr - integer(HSIZE_T), dimension(5) :: myStart - - integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - - myShape = shape(dataset) - - - localShape = shape(dataset) - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(5) - -!>>>>>>>>>!New additions - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - write(6,*) plist_id -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal5: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:4),sum(readSize)] - -!>>>>>>>>>!New additions -!------------------------------------------------------------------------------------------------- -! Open the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') -!------------------------------------------------------------------------------------------------- -! get the dataspace_id of the dataset - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_5: h5dget_space_f') -!------------------------------------------------------------------------------------------------- -! select hyperslab (part to be read by the current process) - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart,int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_5: h5sselect_hyperslab_f') - write(6,*) filespace_id -!------------------------------------------------------------------------------------------------- -! read the part of the file - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr, & - file_space_id = filespace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') - -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dclose_f') - !call h5sclose_f(filespace_id, hdferr) - !if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/filespace_id') - !call h5sclose_f(memspace_id, hdferr) - !if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/memspace_id') -======= !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) @@ -1384,9 +1349,10 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt4 ->>>>>>> ab59274c357c40be21ebba6c9572df37f16dce78 !-------------------------------------------------------------------------------------------------- @@ -1433,7 +1399,7 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') endif; endif #endif @@ -1441,12 +1407,11 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] - !-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1470,7 +1435,7 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -1481,6 +1446,8 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt5 @@ -1529,7 +1496,7 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') endif; endif #endif @@ -1537,12 +1504,11 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:5),sum(readSize)] - !-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1566,7 +1532,7 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -1577,6 +1543,8 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt6 @@ -1625,7 +1593,7 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') endif; endif #endif @@ -1633,12 +1601,11 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:6),sum(readSize)] - !-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5screate_simple_f/memspace_id') - + int(localShape,HSIZE_T)) +if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1662,7 +1629,7 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -1673,6 +1640,8 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt7 @@ -1714,7 +1683,7 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') endif; endif #endif @@ -1802,7 +1771,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') endif; endif #endif @@ -1890,7 +1859,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') endif; endif #endif @@ -1978,7 +1947,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') endif; endif #endif @@ -2066,7 +2035,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') endif; endif #endif @@ -2154,7 +2123,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') endif; endif #endif @@ -2242,7 +2211,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') endif; endif #endif @@ -2332,7 +2301,7 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt1: MPI_allreduce') endif; endif #endif @@ -2420,7 +2389,7 @@ subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt2: MPI_allreduce') endif; endif #endif @@ -2508,7 +2477,7 @@ subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt3: MPI_allreduce') endif; endif #endif @@ -2596,7 +2565,7 @@ subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt4: MPI_allreduce') endif; endif #endif @@ -2684,7 +2653,7 @@ subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt5: MPI_allreduce') endif; endif #endif @@ -2772,7 +2741,7 @@ subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt6: MPI_allreduce') endif; endif #endif @@ -2860,7 +2829,7 @@ subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt7: MPI_allreduce') endif; endif #endif @@ -2915,9 +2884,3 @@ end module HDF5_Utilities !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - - - - - From 801e472497e08fc8edf9728708d72786191c1aa7 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 30 Nov 2018 17:34:26 +0100 Subject: [PATCH 44/89] Made it non-executable --- src/HDF5_utilities.f90 | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 src/HDF5_utilities.f90 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100755 new mode 100644 From ecb00af1470bb122437e9c5bdc3f0b891096d184 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 4 Dec 2018 23:09:25 +0100 Subject: [PATCH 45/89] cleaning and separating functions --- src/CPFEM2.f90 | 49 +++++++++++++------------- src/HDF5_utilities.f90 | 78 +++++++++++++++++++++--------------------- src/results.f90 | 43 +++++++++-------------- 3 files changed, 82 insertions(+), 88 deletions(-) diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index 126e9240b..54774cf59 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -108,8 +108,7 @@ subroutine CPFEM_init debug_levelBasic, & debug_levelExtensive use FEsolving, only: & - restartRead, & - modelName + restartRead use material, only: & material_phase, & homogState, & @@ -128,16 +127,17 @@ subroutine CPFEM_init use hdf5 use HDF5_utilities, only: & HDF5_openFile, & - HDF5_openGroup2, & + HDF5_closeFile, & + HDF5_openGroup, & + HDF5_closeGroup, & HDF5_read use DAMASK_interface, only: & getSolverJobName implicit none - integer(pInt) :: k,l,m,ph,homog + integer(pInt) :: ph,homog character(len=1024) :: rankStr, PlasticItem, HomogItem - integer(HID_T) :: fileReadID, groupPlasticID, groupHomogID - integer :: hdferr + integer(HID_T) :: fileHandle, groupPlasticID, groupHomogID mainProcess: if (worldrank == 0) then write(6,'(/,a)') ' <<<+- CPFEM init -+>>>' @@ -155,28 +155,33 @@ subroutine CPFEM_init write(rankStr,'(a1,i0)')'_',worldrank - fileReadID = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5') + fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5') - call HDF5_read(material_phase, fileReadID,'recordedPhase') - call HDF5_read(crystallite_F0, fileReadID,'convergedF') - call HDF5_read(crystallite_Fp0, fileReadID,'convergedFp') - call HDF5_read(crystallite_Fi0, fileReadID,'convergedFi') - call HDF5_read(crystallite_Lp0, fileReadID,'convergedLp') - call HDF5_read(crystallite_Li0, fileReadID,'convergedLi') - call HDF5_read(crystallite_dPdF0, fileReadID,'convergeddPdF') - call HDF5_read(crystallite_Tstar0_v,fileReadID,'convergedTstar') + call HDF5_read(material_phase, fileHandle,'recordedPhase') + call HDF5_read(crystallite_F0, fileHandle,'convergedF') + call HDF5_read(crystallite_Fp0, fileHandle,'convergedFp') + call HDF5_read(crystallite_Fi0, fileHandle,'convergedFi') + call HDF5_read(crystallite_Lp0, fileHandle,'convergedLp') + call HDF5_read(crystallite_Li0, fileHandle,'convergedLi') + call HDF5_read(crystallite_dPdF0, fileHandle,'convergeddPdF') + call HDF5_read(crystallite_Tstar0_v,fileHandle,'convergedTstar') - groupPlasticID = HDF5_openGroup2(fileReadID,'PlasticPhases') + groupPlasticID = HDF5_openGroup(fileHandle,'PlasticPhases') do ph = 1_pInt,size(phase_plasticity) write(PlasticItem,*) ph,'_' call HDF5_read(plasticState(ph)%state0,groupPlasticID,trim(PlasticItem)//'convergedStateConst') enddo + call HDF5_closeGroup(groupPlasticID) - groupHomogID = HDF5_openGroup2(fileReadID,'HomogStates') + groupHomogID = HDF5_openGroup(fileHandle,'HomogStates') do homog = 1_pInt, material_Nhomogenization write(HomogItem,*) homog,'_' call HDF5_read(homogState(homog)%state0, groupHomogID,trim(HomogItem)//'convergedStateHomog') enddo + call HDF5_closeGroup(groupHomogID) + + + call HDF5_closeFile(fileHandle) restartRead = .false. endif @@ -234,8 +239,8 @@ subroutine CPFEM_age() use HDF5_utilities, only: & HDF5_openFile, & HDF5_closeFile, & + HDF5_addGroup, & HDF5_closeGroup, & - HDF5_addGroup2, & HDF5_write use hdf5 use DAMASK_interface, only: & @@ -243,11 +248,9 @@ subroutine CPFEM_age() implicit none - integer(pInt) :: i, k, l, m, ph, homog, mySource + integer(pInt) :: i, ph, homog, mySource character(len=32) :: rankStr, PlasticItem, HomogItem integer(HID_T) :: fileHandle, groupPlastic, groupHomog - integer :: hdferr - integer(HSIZE_T) :: hdfsize if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) & write(6,'(a)') '<< CPFEM >> aging states' @@ -291,14 +294,14 @@ if (restartWrite) then call HDF5_write(crystallite_dPdF0, fileHandle,'convergeddPdF') call HDF5_write(crystallite_Tstar0_v,fileHandle,'convergedTstar') - groupPlastic = HDF5_addGroup2(fileHandle,'PlasticPhases') + groupPlastic = HDF5_addGroup(fileHandle,'PlasticPhases') do ph = 1_pInt,size(phase_plasticity) write(PlasticItem,*) ph,'_' call HDF5_write(plasticState(ph)%state0,groupPlastic,trim(PlasticItem)//'convergedStateConst') enddo call HDF5_closeGroup(groupPlastic) - groupHomog = HDF5_addGroup2(fileHandle,'HomogStates') + groupHomog = HDF5_addGroup(fileHandle,'HomogStates') do homog = 1_pInt, material_Nhomogenization write(HomogItem,*) homog,'_' call HDF5_write(homogState(homog)%state0,groupHomog,trim(HomogItem)//'convergedStateHomog') diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 28a9fbde0..144bc9098 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -68,8 +68,8 @@ module HDF5_utilities HDF5_addStringAttribute, & HDF5_addIntegerAttribute, & HDF5_closeGroup ,& - HDF5_openGroup2, & - HDF5_addGroup2, & + HDF5_openGroup, & + HDF5_addGroup, & HDF5_read, & HDF5_write contains @@ -154,9 +154,10 @@ end function HDF5_openFile subroutine HDF5_closeFile(fileHandle) implicit none - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T), intent(in) :: fileHandle + integer(HDF5_ERR_TYPE) :: hdferr + call h5fclose_f(fileHandle,hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeFile: h5fclose_f') @@ -164,63 +165,66 @@ end subroutine HDF5_closeFile !-------------------------------------------------------------------------------------------------- -!> @brief adds a new group to the fileHandle (additional to addGroup2) +!> @brief adds a new group to the fileHandle !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_addGroup2(fileHandle,groupName,parallel) +integer(HID_T) function HDF5_addGroup(fileHandle,groupName) implicit none - character(len=*), intent(in) :: groupName integer(HID_T), intent(in) :: fileHandle + character(len=*), intent(in) :: groupName + integer(HDF5_ERR_TYPE) :: hdferr - - logical,intent(in), optional :: parallel - - integer(HID_T) :: plist_id,gapl_id, gcpl_id, aplist_id + integer(HID_T) :: aplist_id !------------------------------------------------------------------------------------------------- -! creating a property list for data access properties + ! creating a property list for data access properties call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pcreate_f ('//trim(groupName)//')') - !------------------------------------------------------------------------------------------------- -! setting I/O mode to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') - !------------------------------------------------------------------------------------------------- -! Create group - call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup2, hdferr, OBJECT_NAMELEN_DEFAULT_F,gapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup2: h5gcreate_f ('//trim(groupName)//')') + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5pcreate_f ('//trim(groupName)//')') -end function HDF5_addGroup2 + !------------------------------------------------------------------------------------------------- + ! setting I/O mode to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') + + !------------------------------------------------------------------------------------------------- + ! Create group + call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup, hdferr, OBJECT_NAMELEN_DEFAULT_F,gapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')') + +end function HDF5_addGroup !-------------------------------------------------------------------------------------------------- !> @brief open an existing group of a file !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) +integer(HID_T) function HDF5_openGroup(fileHandle,groupName) implicit none + integer(HID_T), intent(in) :: fileHandle character(len=*), intent(in) :: groupName - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T), intent(in) :: FileReadID + + integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: aplist_id logical :: is_collective !------------------------------------------------------------------------------------------------- -! creating a property list for data access properties + ! creating a property list for data access properties call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pcreate_f ('//trim(groupName)//')') - !------------------------------------------------------------------------------------------------- -! setting I/O mode to collective - call h5pget_all_coll_metadata_ops_f(aplist_id, is_collective, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') - !------------------------------------------------------------------------------------------------- -! opening the group - call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr, gapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5gopen_f ('//trim(groupName)//')') + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5pcreate_f ('//trim(groupName)//')') -end function HDF5_openGroup2 + !------------------------------------------------------------------------------------------------- + ! setting I/O mode to collective + call h5pget_all_coll_metadata_ops_f(aplist_id, is_collective, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') + + !------------------------------------------------------------------------------------------------- + ! opening the group + call h5gopen_f(fileHandle, trim(groupName), HDF5_openGroup, hdferr, gapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(groupName)//')') + +end function HDF5_openGroup !-------------------------------------------------------------------------------------------------- @@ -2880,7 +2884,3 @@ end subroutine HDF5_write_pInt7 end module HDF5_Utilities - -!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - - diff --git a/src/results.f90 b/src/results.f90 index 2a695c55c..ae78ab8c1 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -31,9 +31,8 @@ module results HDF5_backwardMappingHomog, & HDF5_backwardMappingCrystallite, & HDF5_mappingCells, & - HDF5_addGroup ,& - HDF5_closeGroup ,& - HDF5_openGroup, & + results_addGroup, & + results_openGroup, & HDF5_writeVectorDataset, & HDF5_writeScalarDataset, & HDF5_writeTensorDataset, & @@ -72,8 +71,6 @@ end subroutine results_openJobFile !> @brief closes the results file !-------------------------------------------------------------------------------------------------- subroutine results_closeJobFile() - use DAMASK_interface, only: & - getSolverJobName implicit none call HDF5_closeFile(resultsFile) @@ -94,33 +91,27 @@ end subroutine results_addIncrement !-------------------------------------------------------------------------------------------------- !> @brief open a group from the results file !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_openGroup(groupName) - use hdf5 +integer(HID_T) function results_openGroup(groupName) implicit none character(len=*), intent(in) :: groupName - integer :: hdferr + + results_openGroup = HDF5_openGroup(resultsFile,groupName) - call h5gopen_f(resultsFile, trim(groupName), HDF5_openGroup, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(groupName)//')') - -end function HDF5_openGroup +end function results_openGroup !-------------------------------------------------------------------------------------------------- !> @brief adds a new group to the results file !-------------------------------------------------------------------------------------------------- -integer(HID_T) function HDF5_addGroup(groupName) - use hdf5 +integer(HID_T) function results_addGroup(groupName) implicit none character(len=*), intent(in) :: groupName - integer :: hdferr + + results_addGroup = HDF5_addGroup(resultsFile,groupName) - call h5gcreate_f(resultsFile, trim(groupName), HDF5_addGroup, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')') - -end function HDF5_addGroup +end function results_addGroup !-------------------------------------------------------------------------------------------------- !> @brief set link to object in results file @@ -189,7 +180,7 @@ subroutine HDF5_mappingPhase(mapping,mapping2,Nconstituents,material_phase,phase a = n allocate(namesNA(0:size(phase_name)),source=[a,phase_name]) NmatPoints = size(mapping,1)/Nconstituents - mapping_ID = HDF5_openGroup("current/mapGeometry") + mapping_ID = results_openGroup("current/mapGeometry") allocate(arrOffset(Nconstituents,NmatPoints)) do i=1_pInt, NmatPoints @@ -336,7 +327,7 @@ subroutine HDF5_backwardMappingPhase(material_phase,phasememberat,phase_name,dat do i=1_pInt, size(phase_name) write(phaseID, '(i0)') i - mapping_ID = HDF5_openGroup('/current/constitutive/'//trim(phaseID)//'_'//phase_name(i)) + mapping_ID = results_openGroup('/current/constitutive/'//trim(phaseID)//'_'//phase_name(i)) NmatPoints = count(material_phase == i) !-------------------------------------------------------------------------------------------------- @@ -436,7 +427,7 @@ subroutine HDF5_mappingHomog(material_homog,homogmemberat,homogenization_name,da integer(pInt), dimension(:), allocatable :: arrOffset NmatPoints = count(material_homog /=0_pInt) - mapping_ID = HDF5_openGroup("current/mapGeometry") + mapping_ID = results_openGroup("current/mapGeometry") allocate(arrOffset(NmatPoints)) do i=1_pInt, NmatPoints @@ -573,7 +564,7 @@ subroutine HDF5_backwardMappingHomog(material_homog,homogmemberat,homogenization do i=1_pInt, size(homogenization_name) write(homogID, '(i0)') i - mapping_ID = HDF5_openGroup('/current/homogenization/'//trim(homogID)//'_'//homogenization_name(i)) + mapping_ID = results_openGroup('/current/homogenization/'//trim(homogID)//'_'//homogenization_name(i)) !-------------------------------------------------------------------------------------------------- ! create dataspace @@ -679,7 +670,7 @@ subroutine HDF5_mappingCrystallite(crystalliteAt,crystmemberAt,crystallite_name, Nconstituents = size(crystmemberAt,1) NmatPoints = count(crystalliteAt /=0_pInt) - mapping_ID = HDF5_openGroup("current/mapGeometry") + mapping_ID = results_openGroup("current/mapGeometry") allocate(position_id(Nconstituents)) @@ -842,7 +833,7 @@ subroutine HDF5_backwardMappingCrystallite(crystalliteAt,crystmemberAt,crystalli do i=1_pInt, size(crystallite_name) if (crystallite_name(i) == 'none') cycle write(crystallID, '(i0)') i - mapping_ID = HDF5_openGroup('/current/crystallite/'//trim(crystallID)//'_'//crystallite_name(i)) + mapping_ID = results_openGroup('/current/crystallite/'//trim(crystallID)//'_'//crystallite_name(i)) NmatPoints = count(crystalliteAt == i) !-------------------------------------------------------------------------------------------------- @@ -933,7 +924,7 @@ subroutine HDF5_mappingCells(mapping) integer(HID_T) :: mapping_id, dset_id, space_id Nnodes=size(mapping) - mapping_ID = HDF5_openGroup("mapping") + mapping_ID = results_openGroup("mapping") !-------------------------------------------------------------------------------------------------- ! create dataspace From adffe41ffe5534d0a1adb8f5334736c44d05925d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 4 Dec 2018 23:55:39 +0100 Subject: [PATCH 46/89] writing group structure in file root --- src/CPFEM2.f90 | 156 ++++++++++++++++++++++------------------ src/DAMASK_spectral.f90 | 4 +- src/constitutive.f90 | 11 ++- 3 files changed, 101 insertions(+), 70 deletions(-) diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index 54774cf59..731fcf231 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -10,8 +10,8 @@ module CPFEM2 public :: & CPFEM_age, & - CPFEM_initAll - + CPFEM_initAll, & + CPFEM_results contains @@ -20,8 +20,7 @@ contains !-------------------------------------------------------------------------------------------------- subroutine CPFEM_initAll() use prec, only: & - pInt - use prec, only: & + pInt, & prec_init use numerics, only: & numerics_init @@ -139,12 +138,10 @@ subroutine CPFEM_init character(len=1024) :: rankStr, PlasticItem, HomogItem integer(HID_T) :: fileHandle, groupPlasticID, groupHomogID - mainProcess: if (worldrank == 0) then - write(6,'(/,a)') ' <<<+- CPFEM init -+>>>' - write(6,'(a15,a)') ' Current time: ',IO_timeStamp() + write(6,'(/,a)') ' <<<+- CPFEM init -+>>>' + write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" - flush(6) - endif mainProcess + flush(6) ! *** restore the last converged values of each essential variable from the binary file if (restartRead) then @@ -188,6 +185,7 @@ subroutine CPFEM_init end subroutine CPFEM_init + !-------------------------------------------------------------------------------------------------- !> @brief forwards data after successful increment !-------------------------------------------------------------------------------------------------- @@ -247,74 +245,96 @@ subroutine CPFEM_age() getSolverJobName implicit none - integer(pInt) :: i, ph, homog, mySource character(len=32) :: rankStr, PlasticItem, HomogItem integer(HID_T) :: fileHandle, groupPlastic, groupHomog -if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) & - write(6,'(a)') '<< CPFEM >> aging states' + if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) & + write(6,'(a)') '<< CPFEM >> aging states' - crystallite_F0 = crystallite_partionedF ! crystallite deformation (_subF is perturbed...) - crystallite_Fp0 = crystallite_Fp ! crystallite plastic deformation - crystallite_Lp0 = crystallite_Lp ! crystallite plastic velocity - crystallite_Fi0 = crystallite_Fi ! crystallite intermediate deformation - crystallite_Li0 = crystallite_Li ! crystallite intermediate velocity - crystallite_dPdF0 = crystallite_dPdF ! crystallite stiffness - crystallite_Tstar0_v = crystallite_Tstar_v ! crystallite 2nd Piola Kirchhoff stress - - forall (i = 1:size(plasticState)) plasticState(i)%state0 = plasticState(i)%state ! copy state in this lengthy way because: A component cannot be an array if the encompassing structure is an array - - do i = 1, size(sourceState) - do mySource = 1,phase_Nsources(i) - sourceState(i)%p(mySource)%state0 = sourceState(i)%p(mySource)%state ! copy state in this lengthy way because: A component cannot be an array if the encompassing structure is an array - enddo; enddo - - do homog = 1_pInt, material_Nhomogenization - homogState (homog)%state0 = homogState (homog)%state - thermalState (homog)%state0 = thermalState (homog)%state - damageState (homog)%state0 = damageState (homog)%state - vacancyfluxState (homog)%state0 = vacancyfluxState (homog)%state - hydrogenfluxState(homog)%state0 = hydrogenfluxState(homog)%state - enddo + crystallite_F0 = crystallite_partionedF + crystallite_Fp0 = crystallite_Fp + crystallite_Lp0 = crystallite_Lp + crystallite_Fi0 = crystallite_Fi + crystallite_Li0 = crystallite_Li + crystallite_dPdF0 = crystallite_dPdF + crystallite_Tstar0_v = crystallite_Tstar_v + + forall (i = 1:size(plasticState)) plasticState(i)%state0 = plasticState(i)%state ! copy state in this lengthy way because: A component cannot be an array if the encompassing structure is an array + + do i = 1, size(sourceState) + do mySource = 1,phase_Nsources(i) + sourceState(i)%p(mySource)%state0 = sourceState(i)%p(mySource)%state ! copy state in this lengthy way because: A component cannot be an array if the encompassing structure is an array + enddo; enddo + + do homog = 1_pInt, material_Nhomogenization + homogState (homog)%state0 = homogState (homog)%state + thermalState (homog)%state0 = thermalState (homog)%state + damageState (homog)%state0 = damageState (homog)%state + vacancyfluxState (homog)%state0 = vacancyfluxState (homog)%state + hydrogenfluxState(homog)%state0 = hydrogenfluxState(homog)%state + enddo -if (restartWrite) then - if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) & - write(6,'(a)') '<< CPFEM >> writing restart variables of last converged step to hdf5 file' - write(rankStr,'(a1,i0)')'_',worldrank + if (restartWrite) then + if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) & + write(6,'(a)') '<< CPFEM >> writing restart variables of last converged step to hdf5 file' + + write(rankStr,'(a1,i0)')'_',worldrank + fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5','w') + + call HDF5_write(material_phase, fileHandle,'recordedPhase') + call HDF5_write(crystallite_F0, fileHandle,'convergedF') + call HDF5_write(crystallite_Fp0, fileHandle,'convergedFp') + call HDF5_write(crystallite_Fi0, fileHandle,'convergedFi') + call HDF5_write(crystallite_Lp0, fileHandle,'convergedLp') + call HDF5_write(crystallite_Li0, fileHandle,'convergedLi') + call HDF5_write(crystallite_dPdF0, fileHandle,'convergeddPdF') + call HDF5_write(crystallite_Tstar0_v,fileHandle,'convergedTstar') + + groupPlastic = HDF5_addGroup(fileHandle,'PlasticPhases') + do ph = 1_pInt,size(phase_plasticity) + write(PlasticItem,*) ph,'_' + call HDF5_write(plasticState(ph)%state0,groupPlastic,trim(PlasticItem)//'convergedStateConst') + enddo + call HDF5_closeGroup(groupPlastic) - fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5','w') - - call HDF5_write(material_phase, fileHandle,'recordedPhase') - call HDF5_write(crystallite_F0, fileHandle,'convergedF') - call HDF5_write(crystallite_Fp0, fileHandle,'convergedFp') - call HDF5_write(crystallite_Fi0, fileHandle,'convergedFi') - call HDF5_write(crystallite_Lp0, fileHandle,'convergedLp') - call HDF5_write(crystallite_Li0, fileHandle,'convergedLi') - call HDF5_write(crystallite_dPdF0, fileHandle,'convergeddPdF') - call HDF5_write(crystallite_Tstar0_v,fileHandle,'convergedTstar') - - groupPlastic = HDF5_addGroup(fileHandle,'PlasticPhases') - do ph = 1_pInt,size(phase_plasticity) - write(PlasticItem,*) ph,'_' - call HDF5_write(plasticState(ph)%state0,groupPlastic,trim(PlasticItem)//'convergedStateConst') - enddo - call HDF5_closeGroup(groupPlastic) + groupHomog = HDF5_addGroup(fileHandle,'HomogStates') + do homog = 1_pInt, material_Nhomogenization + write(HomogItem,*) homog,'_' + call HDF5_write(homogState(homog)%state0,groupHomog,trim(HomogItem)//'convergedStateHomog') + enddo + call HDF5_closeGroup(groupHomog) + + call HDF5_closeFile(fileHandle) + restartWrite = .false. + endif - groupHomog = HDF5_addGroup(fileHandle,'HomogStates') - do homog = 1_pInt, material_Nhomogenization - write(HomogItem,*) homog,'_' - call HDF5_write(homogState(homog)%state0,groupHomog,trim(HomogItem)//'convergedStateHomog') - enddo - call HDF5_closeGroup(groupHomog) - - call HDF5_closeFile(fileHandle) - restartWrite = .false. -endif - -if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) & - write(6,'(a)') '<< CPFEM >> done aging states' + if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) & + write(6,'(a)') '<< CPFEM >> done aging states' end subroutine CPFEM_age +!-------------------------------------------------------------------------------------------------- +!> @brief triggers writing of the results +!-------------------------------------------------------------------------------------------------- +subroutine CPFEM_results(inc) + use prec, only: & + pInt + use results + use HDF5_utilities + use constitutive, only: & + constitutive_results + + implicit none + integer(pInt), intent(in) :: inc + character(len=16) :: incChar + + call results_openJobFile + write(incChar,*) inc + call HDF5_closeGroup(results_addGroup(trim('inc'//trim(adjustl(incChar))))) + call constitutive_results() + call results_closeJobFile + +end subroutine CPFEM_results + end module CPFEM2 diff --git a/src/DAMASK_spectral.f90 b/src/DAMASK_spectral.f90 index 1e75f2761..74e81f126 100644 --- a/src/DAMASK_spectral.f90 +++ b/src/DAMASK_spectral.f90 @@ -46,7 +46,8 @@ program DAMASK_spectral grid, & geomSize use CPFEM2, only: & - CPFEM_initAll + CPFEM_initAll, & + CPFEM_results use FEsolving, only: & restartWrite, & restartInc @@ -601,6 +602,7 @@ program DAMASK_spectral if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write') enddo fileOffset = fileOffset + sum(outputSize) ! forward to current file position + call CPFEM_results(inc) endif if ( loadCases(currentLoadCase)%restartFrequency > 0_pInt & ! writing of restart info requested ... .and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! ... and at frequency of writing restart information diff --git a/src/constitutive.f90 b/src/constitutive.f90 index eca8af08a..cbb072471 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -25,7 +25,8 @@ module constitutive constitutive_SandItsTangents, & constitutive_collectDotState, & constitutive_collectDeltaState, & - constitutive_postResults + constitutive_postResults, & + constitutive_results private :: & constitutive_hooke_SandItsTangents @@ -1179,4 +1180,12 @@ function constitutive_postResults(S6, Fi, FeArray, ipc, ip, el) end function constitutive_postResults + +!-------------------------------------------------------------------------------------------------- +!> @brief contains the constitutive equation for calculating the velocity gradient +!-------------------------------------------------------------------------------------------------- +subroutine constitutive_results() + +end subroutine constitutive_results + end module constitutive From 0ed1bd11bd0e307401aba34a2a1947e22e895156 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 5 Dec 2018 13:51:24 +0100 Subject: [PATCH 47/89] need to write out total increments otherwise, group/folder of the same name would exist --- src/DAMASK_spectral.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/DAMASK_spectral.f90 b/src/DAMASK_spectral.f90 index 74e81f126..781598f3d 100644 --- a/src/DAMASK_spectral.f90 +++ b/src/DAMASK_spectral.f90 @@ -602,7 +602,7 @@ program DAMASK_spectral if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write') enddo fileOffset = fileOffset + sum(outputSize) ! forward to current file position - call CPFEM_results(inc) + call CPFEM_results(totalIncsCounter) endif if ( loadCases(currentLoadCase)%restartFrequency > 0_pInt & ! writing of restart info requested ... .and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! ... and at frequency of writing restart information From cb28d10d79d38694348e416f8fdc0f0d8ba70edb Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 12 Dec 2018 06:40:57 +0100 Subject: [PATCH 48/89] dummy structure to write plasticity results --- src/constitutive.f90 | 16 ++++++++++++- src/plastic_phenopowerlaw.f90 | 11 ++++++++- src/results.f90 | 42 +++++------------------------------ 3 files changed, 30 insertions(+), 39 deletions(-) diff --git a/src/constitutive.f90 b/src/constitutive.f90 index cbb072471..61bb55542 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -1182,9 +1182,23 @@ end function constitutive_postResults !-------------------------------------------------------------------------------------------------- -!> @brief contains the constitutive equation for calculating the velocity gradient +!> @brief writes constitutive results to HDF5 output file !-------------------------------------------------------------------------------------------------- subroutine constitutive_results() + use material, only: & + PLASTICITY_ISOTROPIC_ID, & + PLASTICITY_PHENOPOWERLAW_ID, & + PLASTICITY_KINEHARDENING_ID, & + PLASTICITY_DISLOTWIN_ID, & + PLASTICITY_DISLOUCLA_ID, & + PLASTICITY_NONLOCAL_ID +#if defined(PETSc) || defined(DAMASKHDF5) + use plastic_phenopowerlaw, only: & + plastic_phenopowerlaw_results + + call plastic_phenopowerlaw_results +#endif + end subroutine constitutive_results diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index 57d48d109..ebfab0560 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -99,7 +99,8 @@ module plastic_phenopowerlaw plastic_phenopowerlaw_init, & plastic_phenopowerlaw_LpAndItsTangent, & plastic_phenopowerlaw_dotState, & - plastic_phenopowerlaw_postResults + plastic_phenopowerlaw_postResults, & + plastic_phenopowerlaw_results contains @@ -745,4 +746,12 @@ function plastic_phenopowerlaw_postResults(Mp,instance,of) result(postResults) end function plastic_phenopowerlaw_postResults +!-------------------------------------------------------------------------------------------------- +!> @brief writes results to HDF5 output file +!-------------------------------------------------------------------------------------------------- +subroutine plastic_phenopowerlaw_results() +#if defined(PETSc) || defined(DAMASKHDF5) +#endif +end subroutine plastic_phenopowerlaw_results + end module plastic_phenopowerlaw diff --git a/src/results.f90 b/src/results.f90 index ae78ab8c1..aff53b1ba 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -33,7 +33,7 @@ module results HDF5_mappingCells, & results_addGroup, & results_openGroup, & - HDF5_writeVectorDataset, & + results_writeVectorDataset, & HDF5_writeScalarDataset, & HDF5_writeTensorDataset, & HDF5_removeLink @@ -988,16 +988,16 @@ subroutine HDF5_addTensor3DDataset(group,Nnodes,tensorSize,label,SIunit) end subroutine HDF5_addTensor3DDataset + !-------------------------------------------------------------------------------------------------- -!> @brief creates a new vector dataset in the given group location !!!TODO: really necessary? +!> @brief creates a new vector dataset in the given group location !-------------------------------------------------------------------------------------------------- -subroutine HDF5_writeVectorDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) +subroutine results_writeVectorDataset(group,dataset,label,SIunit) use hdf5 implicit none integer(HID_T), intent(in) :: group character(len=*), intent(in) :: SIunit,label - integer(pInt), intent(in) :: dataspace_size, mpiOffset real(pReal), intent(in), dimension(:,:) :: dataset integer :: hdferr, vectorSize @@ -1009,43 +1009,11 @@ subroutine HDF5_writeVectorDataset(group,dataset,label,SIunit,dataspace_size,mpi if(any(shape(dataset) == 0)) return vectorSize = size(dataset,1) - - call HDF5_addVectorDataset(group,dataspace_size,vectorSize,label,SIunit) ! here nNodes need to be global call h5dopen_f(group, label, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dopen_f') - ! Define and select hyperslabs - counter(1) = vectorSize ! how big i am - counter(2) = size(dataset,2) - fileOffset(1) = 0 ! where i start to write my data - fileOffset(2) = mpiOffset - call h5screate_simple_f(2, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5sselect_hyperslab_f') - - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5pset_dxpl_mpio_f') -#endif - - ! Write the dataset collectively - call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([vectorSize, dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dwrite_f') - - call h5sclose_f(space_id, hdferr) - call h5sclose_f(memspace, hdferr) - call h5dclose_f(dset_id, hdferr) - call h5pclose_f(plist_id, hdferr) - -end subroutine HDF5_writeVectorDataset +end subroutine results_writeVectorDataset !-------------------------------------------------------------------------------------------------- !> @brief creates a new tensor dataset in the given group location From fd2d4d856bdca634111370c9537eed4ae8e19643 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 12 Dec 2018 07:45:20 +0100 Subject: [PATCH 49/89] cleaned + suggested structure to write data --- src/CPFEM2.f90 | 3 +- src/HDF5_utilities.f90 | 26 +++- src/constitutive.f90 | 19 ++- src/plastic_phenopowerlaw.f90 | 23 ++- src/results.f90 | 267 ++-------------------------------- 5 files changed, 78 insertions(+), 260 deletions(-) diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index 731fcf231..e22909231 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -331,7 +331,8 @@ subroutine CPFEM_results(inc) call results_openJobFile write(incChar,*) inc - call HDF5_closeGroup(results_addGroup(trim('inc'//trim(adjustl(incChar))))) + call HDF5_closeGroup(results_addGroup(trim('inc'//trim(adjustl(incChar))))) + call results_setLink(trim('inc'//trim(adjustl(incChar))),'current') call constitutive_results() call results_closeJobFile diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 144bc9098..32747218c 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -13,7 +13,7 @@ module HDF5_utilities #endif implicit none - private + public integer(pInt), parameter, private :: & HDF5_ERR_TYPE = 4_pInt !< kind of the integer return in the HDF5 library @@ -71,7 +71,8 @@ module HDF5_utilities HDF5_openGroup, & HDF5_addGroup, & HDF5_read, & - HDF5_write + HDF5_write, & + HDF5_setLink contains subroutine HDF5_utilities_init @@ -304,7 +305,28 @@ subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) end subroutine HDF5_addIntegerAttribute +!-------------------------------------------------------------------------------------------------- +!> @brief set link to object in results file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_setLink(fileHandle,path,link) + use hdf5 + implicit none + character(len=*), intent(in) :: path, link + integer(HID_T), intent(in) :: fileHandle + integer(HDF5_ERR_TYPE) :: hdferr + logical :: linkExists + + call h5lexists_f(fileHandle, link,linkExists, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link)//')') + if (linkExists) then + call h5ldelete_f(fileHandle,link, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link)//')') + endif + call h5lcreate_soft_f(path, fileHandle, link, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(path)//' '//trim(link)//')') + +end subroutine HDF5_setLink !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 1 dimensions diff --git a/src/constitutive.f90 b/src/constitutive.f90 index 61bb55542..db90bfc20 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -1193,10 +1193,25 @@ subroutine constitutive_results() PLASTICITY_DISLOUCLA_ID, & PLASTICITY_NONLOCAL_ID #if defined(PETSc) || defined(DAMASKHDF5) + use results + use HDF5_utilities + use config, only: & + config_name_phase => phase_name ! anticipate logical name + use material, only: & + material_phase_plasticity_type => phase_plasticity use plastic_phenopowerlaw, only: & plastic_phenopowerlaw_results - - call plastic_phenopowerlaw_results + + implicit none + integer(pInt) :: p + call HDF5_closeGroup(results_addGroup('current/phase')) + do p=1,size(config_name_phase) + call HDF5_closeGroup(results_addGroup('current/phase/'//trim(config_name_phase(p)))) + if (material_phase_plasticity_type(p) == PLASTICITY_PHENOPOWERLAW_ID) then + call plastic_phenopowerlaw_results(p,'current/phase/'//trim(config_name_phase(p))) + endif + enddo + #endif diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index ebfab0560..677d2872c 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -749,8 +749,29 @@ end function plastic_phenopowerlaw_postResults !-------------------------------------------------------------------------------------------------- !> @brief writes results to HDF5 output file !-------------------------------------------------------------------------------------------------- -subroutine plastic_phenopowerlaw_results() +subroutine plastic_phenopowerlaw_results(instance,group) #if defined(PETSc) || defined(DAMASKHDF5) + use results + + implicit none + integer(pInt), intent(in) :: instance + character(len=*) :: group + integer(pInt) :: o + + associate(prm => param(instance), stt => state(instance)) + outputsLoop: do o = 1_pInt,size(prm%outputID) + select case(prm%outputID(o)) + case (resistance_slip_ID) + call results_writeVectorDataset(group,stt%xi_slip,'xi_slip','Pa') + case (accumulatedshear_slip_ID) + call results_writeVectorDataset(group,stt%gamma_slip,'gamma_slip','1/s') + end select + enddo outputsLoop + end associate + !results_writeVectorDataset +#else + integer(pInt), intent(in) :: instance + character(len=*) :: group #endif end subroutine plastic_phenopowerlaw_results diff --git a/src/results.f90 b/src/results.f90 index aff53b1ba..5fe35f0ee 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -34,8 +34,7 @@ module results results_addGroup, & results_openGroup, & results_writeVectorDataset, & - HDF5_writeScalarDataset, & - HDF5_writeTensorDataset, & + results_setLink, & HDF5_removeLink contains @@ -116,24 +115,16 @@ end function results_addGroup !-------------------------------------------------------------------------------------------------- !> @brief set link to object in results file !-------------------------------------------------------------------------------------------------- -subroutine HDF5_setLink(path,link) - use hdf5 +subroutine results_setLink(path,link) + use hdf5_utilities, only: & + HDF5_setLink implicit none character(len=*), intent(in) :: path, link - integer :: hdferr - logical :: linkExists - call h5lexists_f(resultsFile, link,linkExists, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link)//')') - if (linkExists) then - call h5ldelete_f(resultsFile,link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link)//')') - endif - call h5lcreate_soft_f(path, resultsFile, link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(path)//' '//trim(link)//')') + call HDF5_setLink(resultsFile,path,link) -end subroutine HDF5_setLink +end subroutine results_setLink !-------------------------------------------------------------------------------------------------- !> @brief remove link to an object @@ -952,253 +943,21 @@ subroutine HDF5_mappingCells(mapping) end subroutine HDF5_mappingCells -!-------------------------------------------------------------------------------------------------- -!> @brief creates a new 3D Tensor dataset in the given group location !!!TODO: really necessary? -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addTensor3DDataset(group,Nnodes,tensorSize,label,SIunit) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - integer(pInt), intent(in) :: Nnodes, tensorSize - character(len=*), intent(in) :: SIunit, label - - integer :: hdferr - integer(HID_T) :: space_id, dset_id - integer(HSIZE_T), dimension(3) :: dataShape - - dataShape = int([tensorSize,tensorSize,Nnodes], HSIZE_T) - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(3, dataShape, space_id, hdferr, dataShape) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5screate_simple_f') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5dcreate_f') - call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addTensor3DDataset: h5dclose_f') - call h5sclose_f(space_id, hdferr) - -end subroutine HDF5_addTensor3DDataset - - !-------------------------------------------------------------------------------------------------- !> @brief creates a new vector dataset in the given group location !-------------------------------------------------------------------------------------------------- subroutine results_writeVectorDataset(group,dataset,label,SIunit) - use hdf5 implicit none - integer(HID_T), intent(in) :: group - character(len=*), intent(in) :: SIunit,label - real(pReal), intent(in), dimension(:,:) :: dataset - - integer :: hdferr, vectorSize - integer(HID_T) :: dset_id, space_id, memspace, plist_id - - integer(HSIZE_T), dimension(2) :: counter - integer(HSSIZE_T), dimension(2) :: fileOffset - - if(any(shape(dataset) == 0)) return - - vectorSize = size(dataset,1) - call h5dopen_f(group, label, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeVectorDataset: h5dopen_f') - + character(len=*), intent(in) :: SIunit,label,group + real(pReal), intent(inout), dimension(:,:) :: dataset + integer(HID_T) :: groupHandle + + groupHandle = results_openGroup(group) + call HDF5_write(dataset,groupHandle,label) + call HDF5_closeGroup(groupHandle) end subroutine results_writeVectorDataset -!-------------------------------------------------------------------------------------------------- -!> @brief creates a new tensor dataset in the given group location -! by default, a 3x3 tensor is assumed !!!TODO: really necessary? -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_writeTensorDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - character(len=*), intent(in) :: SIunit,label - integer(pInt), intent(in) :: dataspace_size, mpiOffset - real(pReal), intent(in), dimension(:,:,:) :: dataset - - integer :: hdferr, tensorSize - integer(HID_T) :: dset_id, space_id, memspace, plist_id - - integer(HSIZE_T), dimension(3) :: counter - integer(HSSIZE_T), dimension(3) :: fileOffset - - if(any(shape(dataset) == 0)) return - - tensorSize = size(dataset,1) - - call HDF5_addTensor3DDataset(group,dataspace_size,tensorSize,label,SIunit) - call h5dopen_f(group, label, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dopen_f') - - ! Define and select hyperslabs - counter(1) = tensorSize ! how big i am - counter(2) = tensorSize - counter(3) = size(dataset,3) - fileOffset(1) = 0 ! where i start to write my data - fileOffset(2) = 0 - fileOffset(3) = mpiOffset - - call h5screate_simple_f(3, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5sselect_hyperslab_f') - - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5pset_dxpl_mpio_f') -#endif - - ! Write the dataset collectively - call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([tensorSize, dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeTensorDataset: h5dwrite_f') - - call h5sclose_f(space_id, hdferr) - call h5sclose_f(memspace, hdferr) - call h5dclose_f(dset_id, hdferr) - call h5pclose_f(plist_id, hdferr) - - end subroutine HDF5_writeTensorDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief adds a new vector dataset to the given group location -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addVectorDataset(group,nnodes,vectorSize,label,SIunit) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - integer(pInt), intent(in) :: nnodes,vectorSize - character(len=*), intent(in) :: SIunit,label - - integer :: hdferr - integer(HID_T) :: space_id, dset_id - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(2, int([vectorSize,Nnodes],HSIZE_T), space_id, hdferr, & - int([vectorSize,Nnodes],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5screate_simple_f') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(group, trim(label), H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5dcreate_f') - call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addVectorDataset: h5dclose_f') - call h5sclose_f(space_id, hdferr) - -end subroutine HDF5_addVectorDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief writes to a new scalar dataset in the given group location -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_writeScalarDataset(group,dataset,label,SIunit,dataspace_size,mpiOffset) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - character(len=*), intent(in) :: SIunit,label - integer(pInt), intent(in) :: dataspace_size, mpiOffset - real(pReal), intent(in), dimension(:) :: dataset - - integer :: hdferr, nNodes - integer(HID_T) :: dset_id, space_id, memspace, plist_id - - integer(HSIZE_T), dimension(1) :: counter - integer(HSIZE_T), dimension(1) :: fileOffset - - nNodes = size(dataset) - if (nNodes < 1) return - - call HDF5_addScalarDataset(group,dataspace_size,label,SIunit) - call h5dopen_f(group, label, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dopen_f') - - ! Define and select hyperslabs - counter = size(dataset) ! how big i am - fileOffset = mpiOffset ! where i start to write my data - - call h5screate_simple_f(1, counter, memspace, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5screate_simple_f') - call h5dget_space_f(dset_id, space_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dget_space_f') - call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5sselect_hyperslab_f') - - ! Create property list for collective dataset write -#ifdef PETSc - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5pcreate_f') - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5pset_dxpl_mpio_f') -#endif - - ! Write the dataset collectively - call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, dataset, int([dataspace_size],HSIZE_T), hdferr, & - file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_writeScalarDataset: h5dwrite_f') - - call h5sclose_f(space_id, hdferr) - call h5sclose_f(memspace, hdferr) - call h5dclose_f(dset_id, hdferr) - call h5pclose_f(plist_id, hdferr) - -end subroutine HDF5_writeScalarDataset - -!-------------------------------------------------------------------------------------------------- -!> @brief adds a new scalar dataset to the given group location -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_addScalarDataset(group,nnodes,label,SIunit) - use hdf5 - - implicit none - integer(HID_T), intent(in) :: group - integer(pInt), intent(in) :: nnodes - character(len=*), intent(in) :: SIunit,label - - integer :: hdferr - integer(HID_T) :: space_id, dset_id - -!-------------------------------------------------------------------------------------------------- -! create dataspace - call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, & - int([Nnodes],HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5screate_simple_f') - -!-------------------------------------------------------------------------------------------------- -! create Dataset - call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dcreate_f') - call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit)) - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dclose_f') - call h5sclose_f(space_id, hdferr) - -end subroutine HDF5_addScalarDataset end module results From dfafddec57b329c23bd13da9868f526220b3b275 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 06:59:56 +0100 Subject: [PATCH 50/89] simplifying --- src/plastic_kinematichardening.f90 | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 590267890..8fde0e54e 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -930,31 +930,31 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Mp,ph,instance,of) - + associate( prm => paramNew(instance), stt => state(instance)) outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) - select case(param(instance)%outputID(o)) + select case(prm%outputID(o)) case (crss_ID) - postResults(c+1_pInt:c+nSlip) = state(instance)%crss(:,of) + postResults(c+1_pInt:c+nSlip) = stt%crss(:,of) c = c + nSlip case(crss_back_ID) - postResults(c+1_pInt:c+nSlip) = state(instance)%crss_back(:,of) + postResults(c+1_pInt:c+nSlip) = stt%crss_back(:,of) c = c + nSlip case (sense_ID) - postResults(c+1_pInt:c+nSlip) = state(instance)%sense(:,of) + postResults(c+1_pInt:c+nSlip) = stt%sense(:,of) c = c + nSlip case (chi0_ID) - postResults(c+1_pInt:c+nSlip) = state(instance)%chi0(:,of) + postResults(c+1_pInt:c+nSlip) = stt%chi0(:,of) c = c + nSlip case (gamma0_ID) - postResults(c+1_pInt:c+nSlip) = state(instance)%gamma0(:,of) + postResults(c+1_pInt:c+nSlip) = stt%gamma0(:,of) c = c + nSlip case (accshear_ID) - postResults(c+1_pInt:c+nSlip) = state(instance)%accshear(:,of) + postResults(c+1_pInt:c+nSlip) = stt%accshear(:,of) c = c + nSlip case (shearrate_ID) @@ -975,6 +975,7 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) end select enddo outputsLoop + end associate end function plastic_kinehardening_postResults @@ -984,8 +985,7 @@ end function plastic_kinehardening_postResults !> @details: Shear rates are calculated only optionally. NOTE: Against the common convention, the !> result (i.e. intent(out)) variables are the last to have the optional arguments at the end !-------------------------------------------------------------------------------------------------- -pure subroutine kinetics(prm,stt,of,Mp,gdot_pos,gdot_neg, & - dgdot_dtau_pos,dgdot_dtau_neg) +pure subroutine kinetics(prm,stt,of,Mp,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) use prec, only: & dNeq0 use math, only: & From 6b5131e0f3ac494a198ff1d04b8cd128ad3f413d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 07:06:12 +0100 Subject: [PATCH 51/89] no need to have as a separate state --- src/plastic_kinematichardening.f90 | 39 +++++++++++++----------------- 1 file changed, 17 insertions(+), 22 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 8fde0e54e..bfb80cd7a 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -87,8 +87,6 @@ module plastic_kinehardening gamma0, & !< accumulated shear at last switch of stress sense accshear !< accumulated (absolute) shear - real(pReal), pointer, dimension(:) :: & !< scalars along NipcMyInstance - sumGamma !< accumulated shear across all systems end type type(tParameters), dimension(:), allocatable, private :: & @@ -258,11 +256,11 @@ subroutine plastic_kinehardening_init(fileUnit) prm%crss0 = config_phase(p)%getFloats('crss0', requiredShape=shape(prm%Nslip)) prm%tau1 = config_phase(p)%getFloats('tau1', requiredShape=shape(prm%Nslip)) - prm%tau1_b = config_phase(p)%getFloats('tau1_b', requiredShape=shape(prm%Nslip)) - prm%theta0 = config_phase(p)%getFloats('theta0', requiredShape=shape(prm%Nslip)) - prm%theta1 = config_phase(p)%getFloats('theta1', requiredShape=shape(prm%Nslip)) - prm%theta0_b = config_phase(p)%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) - prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) + prm%tau1_b = config_phase(p)%getFloats('tau1_b', requiredShape=shape(prm%Nslip)) + prm%theta0 = config_phase(p)%getFloats('theta0', requiredShape=shape(prm%Nslip)) + prm%theta1 = config_phase(p)%getFloats('theta1', requiredShape=shape(prm%Nslip)) + prm%theta0_b = config_phase(p)%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) + prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) prm%gdot0 = config_phase(p)%getFloat('gdot0') @@ -324,8 +322,7 @@ param(instance)%outputID = prm%outputID ! allocate state arrays sizeDotState = nSlip & !< crss + nSlip & !< crss_back - + nSlip & !< accumulated (absolute) shear - + 1_pInt !< sum(gamma) + + nSlip !< accumulated (absolute) shear sizeDeltaState = nSlip & !< sense of acting shear stress (-1 or +1) + nSlip & !< backstress at last switch of stress sense @@ -362,13 +359,6 @@ param(instance)%outputID = prm%outputID dot%accshear => plasticState(p)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) plasticState(p)%aTolState(startIndex-o:endIndex-o) = prm%aTolShear -! ............................................. - startIndex = endIndex + 1_pInt - endIndex = endIndex + 1_pInt - stt%sumGamma => plasticState(p)%state (startIndex ,1:NipcMyPhase) - dot%sumGamma => plasticState(p)%dotState (startIndex-o ,1:NipcMyPhase) - plasticState(p)%aTolState(startIndex-o:endIndex-o) =prm%aTolShear - !---------------------------------------------------------------------------------------------- !locally define deltaState alias o = endIndex @@ -846,16 +836,22 @@ subroutine plastic_kinehardening_dotState(Mp,ipc,ip,el) real(pReal), dimension(plastic_kinehardening_totalNslip(phase_plasticityInstance(material_phase(ipc,ip,el)))) :: & gdot_pos,gdot_neg, & tau_pos,tau_neg + real(pReal) :: & + sumGamma of = phasememberAt(ipc,ip,el) ph = phaseAt(ipc,ip,el) instance = phase_plasticityInstance(ph) nSlip = plastic_kinehardening_totalNslip(instance) - - dotState(instance)%sumGamma(of) = 0.0_pReal + + associate( prm => paramNew(instance), stt => state(instance), dot => dotState(instance)) + call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Mp,ph,instance,of) + + dot%accshear(:,of) = abs(gdot_pos+gdot_neg) + sumGamma = sum(stt%accshear(:,of)) j = 0_pInt slipFamilies: do f = 1_pInt,lattice_maxNslipFamily @@ -865,8 +861,8 @@ subroutine plastic_kinehardening_dotState(Mp,ipc,ip,el) dot_product(param(instance)%hardeningMatrix_SlipSlip(j,1:nSlip),abs(gdot_pos+gdot_neg)) * & ( param(instance)%theta1(f) + & (param(instance)%theta0(f) - param(instance)%theta1(f) & - + param(instance)%theta0(f)*param(instance)%theta1(f)*state(instance)%sumGamma(of)/param(instance)%tau1(f)) & - *exp(-state(instance)%sumGamma(of)*param(instance)%theta0(f)/param(instance)%tau1(f)) & ! V term depending on the harding law + + param(instance)%theta0(f)*param(instance)%theta1(f)*sumGamma/param(instance)%tau1(f)) & + *exp(-sumGamma*param(instance)%theta0(f)/param(instance)%tau1(f)) & ! V term depending on the harding law ) dotState(instance)%crss_back(j,of) = & ! evolution of back stress resistance j state(instance)%sense(j,of)*abs(gdot_pos(j)+gdot_neg(j)) * & @@ -878,10 +874,9 @@ subroutine plastic_kinehardening_dotState(Mp,ipc,ip,el) *param(instance)%theta0_b(f)/(param(instance)%tau1_b(f)+state(instance)%chi0(j,of))) & ) ! V term depending on the harding law for back stress - dotState(instance)%accshear(j,of) = abs(gdot_pos(j)+gdot_neg(j)) - dotState(instance)%sumGamma(of) = dotState(instance)%sumGamma(of) + dotState(instance)%accshear(j,of) enddo slipSystems enddo slipFamilies + end associate end subroutine plastic_kinehardening_dotState From a7351deab073ee36dd3fe7ed8193a3e48f3fc5b8 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 07:31:56 +0100 Subject: [PATCH 52/89] simplified --- src/plastic_kinematichardening.f90 | 200 ++++++++++------------------- 1 file changed, 66 insertions(+), 134 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index bfb80cd7a..e5b6547cf 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -59,9 +59,9 @@ module plastic_kinehardening theta1_b, & !< asymptotic hardening rate of back stress for each slip > tau1, & tau1_b, & - interaction_slipslip, & !< latent hardening matrix nonSchmidCoeff - + real(pReal), dimension(:,:), allocatable, private :: & + interaction_slipslip !< latent hardening matrix real(pReal), allocatable, dimension(:,:,:) :: & Schmid_slip, & Schmid_twin, & @@ -253,6 +253,9 @@ subroutine plastic_kinehardening_init(fileUnit) prm%nonSchmid_pos = prm%Schmid_slip prm%nonSchmid_neg = prm%Schmid_slip endif + prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & + config_phase(p)%getFloats('interaction_slipslip'), & + structure(1:3)) prm%crss0 = config_phase(p)%getFloats('crss0', requiredShape=shape(prm%Nslip)) prm%tau1 = config_phase(p)%getFloats('tau1', requiredShape=shape(prm%Nslip)) @@ -262,15 +265,20 @@ subroutine plastic_kinehardening_init(fileUnit) prm%theta0_b = config_phase(p)%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) + ! expand: family => system + prm%crss0 = math_expand(prm%crss0, prm%Nslip) + prm%tau1 = math_expand(prm%tau1,prm%Nslip) + prm%tau1_b = math_expand(prm%tau1_b, prm%Nslip) + prm%theta0 = math_expand(prm%theta0,prm%Nslip) + prm%theta1 = math_expand(prm%theta1,prm%Nslip) + prm%theta0_b = math_expand(prm%theta0_b,prm%Nslip) + prm%theta1_b = math_expand(prm%theta1_b,prm%Nslip) prm%gdot0 = config_phase(p)%getFloat('gdot0') - prm%n_slip = config_phase(p)%getFloat('n_slip') + prm%n_slip = config_phase(p)%getFloat('n_slip') - !prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & - ! config_phase(p)%getFloats('interaction_slipslip'), & - ! structure(1:3)) endif slipActive @@ -414,7 +422,6 @@ param(instance)%outputID = prm%outputID allocate(param(instance)%theta1 (Nchunks_SlipFamilies), source=0.0_pReal) allocate(param(instance)%theta0_b(Nchunks_SlipFamilies), source=0.0_pReal) allocate(param(instance)%theta1_b(Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%interaction_slipslip(Nchunks_SlipSlip), source=0.0_pReal) allocate(param(instance)%nonSchmidCoeff(Nchunks_nonSchmid), source=0.0_pReal) if(allocated(tempPerSlip)) deallocate(tempPerSlip) allocate(tempPerSlip(Nchunks_SlipFamilies)) @@ -464,12 +471,6 @@ param(instance)%outputID = prm%outputID !-------------------------------------------------------------------------------------------------- ! parameters depending on number of interactions - case ('interaction_slipslip') - if (chunkPos(1) < 1_pInt + Nchunks_SlipSlip) & - call IO_warning(52_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_KINEHARDENING_label//')') - do j = 1_pInt, Nchunks_SlipSlip - param(instance)%interaction_slipslip(j) = IO_floatValue(line,chunkPos,1_pInt+j) - enddo case ('nonschmidcoeff') if (chunkPos(1) < 1_pInt + Nchunks_nonSchmid) & call IO_warning(52_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_KINEHARDENING_label//')') @@ -529,20 +530,6 @@ param(instance)%outputID = prm%outputID plasticState(phase)%accumulatedSlip => & plasticState(phase)%state(offset_slip+1:offset_slip+plasticState(phase)%nSlip,1:NipcMyPhase) - allocate(param(instance)%hardeningMatrix_SlipSlip(nSlip,nSlip), source=0.0_pReal) - do f = 1_pInt,lattice_maxNslipFamily ! >>> interaction slip -- X - index_myFamily = sum(plastic_kinehardening_Nslip(1:f-1_pInt,instance)) - do j = 1_pInt,plastic_kinehardening_Nslip(f,instance) ! loop over (active) systems in my family (slip) - do o = 1_pInt,lattice_maxNslipFamily - index_otherFamily = sum(plastic_kinehardening_Nslip(1:o-1_pInt,instance)) - do k = 1_pInt,plastic_kinehardening_Nslip(o,instance) ! loop over (active) systems in other family (slip) - param(instance)%hardeningMatrix_SlipSlip(index_myFamily+j,index_otherFamily+k) = & - param(instance)%interaction_SlipSlip(lattice_interactionSlipSlip( & - sum(lattice_NslipSystem(1:f-1,phase))+j, & - sum(lattice_NslipSystem(1:o-1,phase))+k, & - phase)) - enddo; enddo - enddo; enddo endindex = 0_pInt o = endIndex ! offset of dotstate index relative to state index @@ -626,23 +613,6 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & Mp,ipc,ip,el) use prec, only: & dNeq0 - use debug, only: & - debug_level, & - debug_constitutive, & - debug_levelExtensive, & - debug_levelSelective, & - debug_e, & - debug_i, & - debug_g - use math, only: & - math_Plain3333to99, & - math_Mandel6to33, & - math_transpose33 - use lattice, only: & - lattice_Sslip, & !< schmid matrix - lattice_maxNslipFamily, & - lattice_NslipSystem, & - lattice_NnonSchmid use material, only: & phaseAt, phasememberAt, & phase_plasticityInstance @@ -662,7 +632,6 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & integer(pInt) :: & instance, & - index_myFamily, & f,i,j,k,l,m,n, & of, & ph @@ -672,59 +641,41 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & tau_pos,tau_neg real(pReal) :: & dgdot_dtau_pos,dgdot_dtau_neg - real(pReal), dimension(3,3,2) :: & - nonSchmid_tensor ph = phaseAt(ipc,ip,el) !< figures phase for each material point of = phasememberAt(ipc,ip,el) !< index of the positions of each constituent of material point, phasememberAt is a function in material that helps figure them out instance = phase_plasticityInstance(ph) + associate(prm => paramNew(instance), stt => state(instance)) Lp = 0.0_pReal dLp_dMp = 0.0_pReal call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Mp,ph,instance,of) + tau_pos = tau_pos - stt%crss_back(:,of) + tau_neg = tau_neg - stt%crss_back(:,of) + do j = 1_pInt, prm%totalNslip - j = 0_pInt ! reading and marking the starting index for each slip family - slipFamilies: do f = 1_pInt,lattice_maxNslipFamily - index_myFamily = sum(lattice_NslipSystem(1:f-1_pInt,ph)) ! at which index starts my family - slipSystems: do i = 1_pInt,plastic_kinehardening_Nslip(f,instance) - j = j + 1_pInt - - ! build nonSchmid tensor - nonSchmid_tensor(1:3,1:3,1) = lattice_Sslip(1:3,1:3,1,index_myFamily+i,ph) - nonSchmid_tensor(1:3,1:3,2) = nonSchmid_tensor(1:3,1:3,1) - do k = 1,lattice_NnonSchmid(ph) - nonSchmid_tensor(1:3,1:3,1) = & - nonSchmid_tensor(1:3,1:3,1) + param(instance)%nonSchmidCoeff(k) * & - lattice_Sslip(1:3,1:3,2*k,index_myFamily+i,ph) - nonSchmid_tensor(1:3,1:3,2) = & - nonSchmid_tensor(1:3,1:3,2) + param(instance)%nonSchmidCoeff(k) * & - lattice_Sslip(1:3,1:3,2*k+1,index_myFamily+i,ph) - enddo - - Lp = Lp + (gdot_pos(j)+gdot_neg(j))*lattice_Sslip(1:3,1:3,1,index_myFamily+i,ph) ! sum of all gdot*SchmidTensor gives Lp + Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) ! sum of all gdot*SchmidTensor gives Lp ! Calculation of the tangent of Lp ! sensitivity of Lp if (dNeq0(gdot_pos(j))) then - dgdot_dtau_pos = gdot_pos(j)*param(instance)%n_slip/(tau_pos(j)-state(instance)%crss_back(j,of)) + dgdot_dtau_pos = gdot_pos(j)*param(instance)%n_slip/tau_pos(j) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = & - dLp_dMp(k,l,m,n) + dgdot_dtau_pos*lattice_Sslip(k,l,1,index_myFamily+i,ph)* & - nonSchmid_tensor(m,n,1) + dLp_dMp(k,l,m,n) + dgdot_dtau_pos*prm%Schmid_slip(k,l,j)*prm%nonSchmid_pos(m,n,j) endif if (dNeq0(gdot_neg(j))) then - dgdot_dtau_neg = gdot_neg(j)*param(instance)%n_slip/(tau_neg(j)-state(instance)%crss_back(j,of)) + dgdot_dtau_neg = gdot_neg(j)*param(instance)%n_slip/tau_neg(j) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = & - dLp_dMp(k,l,m,n) + dgdot_dtau_neg*lattice_Sslip(k,l,1,index_myFamily+i,ph)* & - nonSchmid_tensor(m,n,2) + dLp_dMp(k,l,m,n) + dgdot_dtau_neg*prm%Schmid_slip(k,l,j)*prm%nonSchmid_neg(m,n,j) endif - enddo slipSystems - enddo slipFamilies + enddo +end associate end subroutine plastic_kinehardening_LpAndItsTangent @@ -735,14 +686,6 @@ subroutine plastic_kinehardening_deltaState(Mp,ipc,ip,el) use prec, only: & dNeq, & dEq0 - use debug, only: & - debug_level, & - debug_constitutive, & - debug_levelExtensive, & - debug_levelSelective, & - debug_e, & - debug_i, & - debug_g use material, only: & phaseAt, & phasememberAt, & @@ -776,33 +719,32 @@ subroutine plastic_kinehardening_deltaState(Mp,ipc,ip,el) dEq0(gdot_pos+gdot_neg,1e-10_pReal)) ! current sense of shear direction #ifdef DEBUG - if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & - .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & - .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then - write(6,'(a)') '======= kinehardening delta state =======' - endif +! if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & +! .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & +! .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then +! write(6,'(a)') '======= kinehardening delta state =======' +! endif #endif + +#ifdef DEBUG +! if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & +! .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & +! .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then +! write(6,'(i2,1x,f7.4,1x,f7.4)') j,sense(j),state(instance)%sense(j,of) +! endif +#endif !-------------------------------------------------------------------------------------------------- ! switch in sense of shear? - do j = 1,plastic_kinehardening_totalNslip(instance) -#ifdef DEBUG - if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & - .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & - .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then - write(6,'(i2,1x,f7.4,1x,f7.4)') j,sense(j),state(instance)%sense(j,of) - endif -#endif - if (dNeq(sense(j),state(instance)%sense(j,of),0.1_pReal)) then - deltaState(instance)%sense (j,of) = sense(j) - state(instance)%sense(j,of) ! switch sense - deltaState(instance)%chi0 (j,of) = abs(state(instance)%crss_back(j,of)) - state(instance)%chi0(j,of) ! remember current backstress magnitude - deltaState(instance)%gamma0(j,of) = state(instance)%accshear(j,of) - state(instance)%gamma0(j,of) ! remember current accumulated shear - else - deltaState(instance)%sense (j,of) = 0.0_pReal ! no change - deltaState(instance)%chi0 (j,of) = 0.0_pReal - deltaState(instance)%gamma0(j,of) = 0.0_pReal - endif - enddo + where(dNeq(sense,state(instance)%sense(:,of),0.1_pReal)) + deltaState(instance)%sense (:,of) = sense - state(instance)%sense(:,of) ! switch sense + deltaState(instance)%chi0 (:,of) = abs(state(instance)%crss_back(:,of)) - state(instance)%chi0(:,of) ! remember current backstress magnitude + deltaState(instance)%gamma0(:,of) = state(instance)%accshear(:,of) - state(instance)%gamma0(:,of) ! remember current accumulated shear + else where + deltaState(instance)%sense (:,of) = 0.0_pReal ! no change + deltaState(instance)%chi0 (:,of) = 0.0_pReal + deltaState(instance)%gamma0(:,of) = 0.0_pReal + end where end subroutine plastic_kinehardening_deltaState @@ -852,30 +794,26 @@ subroutine plastic_kinehardening_dotState(Mp,ipc,ip,el) dot%accshear(:,of) = abs(gdot_pos+gdot_neg) sumGamma = sum(stt%accshear(:,of)) - - j = 0_pInt - slipFamilies: do f = 1_pInt,lattice_maxNslipFamily - slipSystems: do i = 1_pInt,plastic_kinehardening_Nslip(f,instance) - j = j+1_pInt - dotState(instance)%crss(j,of) = & ! evolution of slip resistance j - dot_product(param(instance)%hardeningMatrix_SlipSlip(j,1:nSlip),abs(gdot_pos+gdot_neg)) * & - ( param(instance)%theta1(f) + & - (param(instance)%theta0(f) - param(instance)%theta1(f) & - + param(instance)%theta0(f)*param(instance)%theta1(f)*sumGamma/param(instance)%tau1(f)) & - *exp(-sumGamma*param(instance)%theta0(f)/param(instance)%tau1(f)) & ! V term depending on the harding law + + do j = 1_pInt, prm%totalNslip + dot%crss(j,of) = & ! evolution of slip resistance j + dot_product(prm%interaction_SlipSlip(j,:),abs(gdot_pos+gdot_neg)) * & + ( prm%theta1(j) + & + (prm%theta0(j) - prm%theta1(j) & + + prm%theta0(j)*prm%theta1(j)*sumGamma/prm%tau1(j)) & + *exp(-sumGamma*prm%theta0(j)/prm%tau1(j)) & ! V term depending on the harding law ) - dotState(instance)%crss_back(j,of) = & ! evolution of back stress resistance j - state(instance)%sense(j,of)*abs(gdot_pos(j)+gdot_neg(j)) * & - ( param(instance)%theta1_b(f) + & - (param(instance)%theta0_b(f) - param(instance)%theta1_b(f) & - + param(instance)%theta0_b(f)*param(instance)%theta1_b(f)/(param(instance)%tau1_b(f)+state(instance)%chi0(j,of)) & - *(state(instance)%accshear(j,of)-state(instance)%gamma0(j,of))) & + dot%crss_back(j,of) = & ! evolution of back stress resistance j + stt%sense(j,of)*abs(gdot_pos(j)+gdot_neg(j)) * & + ( prm%theta1_b(j) + & + (prm%theta0_b(j) - prm%theta1_b(j) & + + prm%theta0_b(j)*prm%theta1_b(j)/(prm%tau1_b(j)+stt%chi0(j,of)) & + *(stt%accshear(j,of)-state(instance)%gamma0(j,of))) & *exp(-(state(instance)%accshear(j,of)-state(instance)%gamma0(j,of)) & - *param(instance)%theta0_b(f)/(param(instance)%tau1_b(f)+state(instance)%chi0(j,of))) & + *prm%theta0_b(j)/(prm%tau1_b(j)+state(instance)%chi0(j,of))) & ) ! V term depending on the harding law for back stress - enddo slipSystems - enddo slipFamilies + enddo end associate end subroutine plastic_kinehardening_dotState @@ -957,15 +895,9 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) c = c + nSlip case (resolvedstress_ID) - j = 0_pInt - slipFamilies: do f = 1_pInt,lattice_maxNslipFamily - index_myFamily = sum(lattice_NslipSystem(1:f-1_pInt,ph)) ! at which index starts my family - slipSystems: do i = 1_pInt,plastic_kinehardening_Nslip(f,instance) - j = j + 1_pInt - postResults(c+j) = & - math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,1,index_myFamily+i,ph)) - enddo slipSystems - enddo slipFamilies + do j = 1_pInt, prm%totalNslip + postResults(c+j) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,j)) + enddo c = c + nSlip end select From d99778dd9625b5b0f47352d9c574935645ed565f Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 08:43:26 +0100 Subject: [PATCH 53/89] further cleaning --- src/plastic_kinematichardening.f90 | 34 +++++++----------------------- 1 file changed, 8 insertions(+), 26 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index e5b6547cf..6a2fd97dd 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -416,12 +416,6 @@ param(instance)%outputID = prm%outputID Nchunks_SlipSlip = maxval(lattice_interactionSlipSlip(:,:,phase)) Nchunks_nonSchmid = lattice_NnonSchmid(phase) allocate(param(instance)%crss0 (Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%tau1 (Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%tau1_b (Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%theta0 (Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%theta1 (Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%theta0_b(Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%theta1_b(Nchunks_SlipFamilies), source=0.0_pReal) allocate(param(instance)%nonSchmidCoeff(Nchunks_nonSchmid), source=0.0_pReal) if(allocated(tempPerSlip)) deallocate(tempPerSlip) allocate(tempPerSlip(Nchunks_SlipFamilies)) @@ -455,18 +449,6 @@ param(instance)%outputID = prm%outputID select case(tag) case ('crss0') param(instance)%crss0(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) - case ('tau1') - param(instance)%tau1(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) - case ('tau1_b') - param(instance)%tau1_b(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) - case ('theta0') - param(instance)%theta0(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) - case ('theta1') - param(instance)%theta1(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) - case ('theta0_b') - param(instance)%theta0_b(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) - case ('theta1_b') - param(instance)%theta1_b(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) end select !-------------------------------------------------------------------------------------------------- @@ -510,14 +492,14 @@ param(instance)%outputID = prm%outputID !-------------------------------------------------------------------------------------------------- ! sanity checks - if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - .and. param(instance)%crss0 (1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' crss0' - if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - .and. param(instance)%tau1 (1:nSlipFamilies) <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' - if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - .and. param(instance)%tau1_b(1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' - if (param(instance)%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' - if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' + ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & + ! .and. param(instance)%crss0 (1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' crss0' + ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & + ! .and. param(instance)%tau1 (1:nSlipFamilies) <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' + ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & + ! .and. param(instance)%tau1_b(1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' + ! if (param(instance)%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' + ! if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' if (extmsg /= '') then extmsg = trim(extmsg)//' ('//PLASTICITY_KINEHARDENING_label//')' ! prepare error message identifier call IO_error(211_pInt,ip=instance,ext_msg=extmsg) From 98cc79d629ced8e7e8c11ab658525a4364544e44 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 09:08:18 +0100 Subject: [PATCH 54/89] ph not needed any more --- src/plastic_kinematichardening.f90 | 64 +++++++++--------------------- 1 file changed, 18 insertions(+), 46 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 6a2fd97dd..582511064 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -450,22 +450,7 @@ param(instance)%outputID = prm%outputID case ('crss0') param(instance)%crss0(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) end select - -!-------------------------------------------------------------------------------------------------- -! parameters depending on number of interactions - case ('nonschmidcoeff') - if (chunkPos(1) < 1_pInt + Nchunks_nonSchmid) & - call IO_warning(52_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_KINEHARDENING_label//')') - do j = 1_pInt,Nchunks_nonSchmid - param(instance)%nonSchmidCoeff(j) = IO_floatValue(line,chunkPos,1_pInt+j) - enddo -!-------------------------------------------------------------------------------------------------- - case ('gdot0') - param(instance)%gdot0 = IO_floatValue(line,chunkPos,2_pInt) - - case ('n_slip') - param(instance)%n_slip = IO_floatValue(line,chunkPos,2_pInt) - + case default end select @@ -532,7 +517,7 @@ end subroutine plastic_kinehardening_init !> @brief calculation of shear rates (\dot \gamma) !-------------------------------------------------------------------------------------------------- subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,ph,instance,of) + Mp,instance,of) use math use lattice, only: & @@ -545,7 +530,6 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & real(pReal), dimension(3,3), intent(in) :: & Mp integer(pInt), intent(in) :: & - ph, & !< phase ID instance, & !< instance of that phase of !< index of phaseMember real(pReal), dimension(plastic_kinehardening_totalNslip(instance)), intent(out) :: & @@ -557,34 +541,22 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & integer(pInt) :: & index_myFamily, & f,i,j,k + associate(prm => paramNew(instance), stt => state(instance)) + do i = 1_pInt, prm%totalNslip + tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) + tau_neg(i) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)) + enddo - - j = 0_pInt - slipFamilies: do f = 1_pInt,lattice_maxNslipFamily - index_myFamily = sum(lattice_NslipSystem(1:f-1_pInt,ph)) ! at which index starts my family - slipSystems: do i = 1_pInt,plastic_kinehardening_Nslip(f,instance) - j = j + 1_pInt - tau_pos(j) = math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,1,index_myFamily+i,ph)) - tau_neg(j) = tau_pos(j) - nonSchmidSystems: do k = 1,lattice_NnonSchmid(ph) - tau_pos(j) = tau_pos(j) + param(instance)%nonSchmidCoeff(k)* & - math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,2*k+0,index_myFamily+i,ph)) - tau_neg(j) = tau_neg(j) + param(instance)%nonSchmidCoeff(k)* & - math_mul33xx33(Mp,lattice_Sslip(1:3,1:3,2*k+1,index_myFamily+i,ph)) - enddo nonSchmidSystems - enddo slipSystems - enddo slipFamilies - - gdot_pos = 0.5_pReal * param(instance)%gdot0 * & + gdot_pos = 0.5_pReal * prm%gdot0 * & (abs(tau_pos-state(instance)%crss_back(:,of))/ & - state(instance)%crss(:,of))**param(instance)%n_slip & + state(instance)%crss(:,of))**prm%n_slip & *sign(1.0_pReal,tau_pos-state(instance)%crss_back(:,of)) - gdot_neg = 0.5_pReal * param(instance)%gdot0 * & + gdot_neg = 0.5_pReal * prm%gdot0 * & (abs(tau_neg-state(instance)%crss_back(:,of))/ & - state(instance)%crss(:,of))**param(instance)%n_slip & + state(instance)%crss(:,of))**prm%n_slip & *sign(1.0_pReal,tau_neg-state(instance)%crss_back(:,of)) - +end associate end subroutine plastic_kinehardening_shearRates @@ -633,7 +605,7 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & dLp_dMp = 0.0_pReal call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,ph,instance,of) + Mp,instance,of) tau_pos = tau_pos - stt%crss_back(:,of) tau_neg = tau_neg - stt%crss_back(:,of) @@ -643,14 +615,14 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & ! Calculation of the tangent of Lp ! sensitivity of Lp if (dNeq0(gdot_pos(j))) then - dgdot_dtau_pos = gdot_pos(j)*param(instance)%n_slip/tau_pos(j) + dgdot_dtau_pos = gdot_pos(j)*prm%n_slip/tau_pos(j) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = & dLp_dMp(k,l,m,n) + dgdot_dtau_pos*prm%Schmid_slip(k,l,j)*prm%nonSchmid_pos(m,n,j) endif if (dNeq0(gdot_neg(j))) then - dgdot_dtau_neg = gdot_neg(j)*param(instance)%n_slip/tau_neg(j) + dgdot_dtau_neg = gdot_neg(j)*prm%n_slip/tau_neg(j) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = & dLp_dMp(k,l,m,n) + dgdot_dtau_neg*prm%Schmid_slip(k,l,j)*prm%nonSchmid_neg(m,n,j) @@ -695,7 +667,7 @@ subroutine plastic_kinehardening_deltaState(Mp,ipc,ip,el) instance = phase_plasticityInstance(ph) call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,ph,instance,of) + Mp,instance,of) sense = merge(state(instance)%sense(:,of), & ! keep existing... sign(1.0_pReal,gdot_pos+gdot_neg), & ! ...or have a defined dEq0(gdot_pos+gdot_neg,1e-10_pReal)) ! current sense of shear direction @@ -772,7 +744,7 @@ subroutine plastic_kinehardening_dotState(Mp,ipc,ip,el) call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,ph,instance,of) + Mp,instance,of) dot%accshear(:,of) = abs(gdot_pos+gdot_neg) sumGamma = sum(stt%accshear(:,of)) @@ -844,7 +816,7 @@ function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) c = 0_pInt call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,ph,instance,of) + Mp,instance,of) associate( prm => paramNew(instance), stt => state(instance)) outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) select case(prm%outputID(o)) From 416d3411c1e6ea4b106917dc91983c84d69ff967 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 09:34:40 +0100 Subject: [PATCH 55/89] leaner APIs --- src/constitutive.f90 | 27 +++-- src/plastic_kinematichardening.f90 | 160 +++++++++-------------------- 2 files changed, 69 insertions(+), 118 deletions(-) diff --git a/src/constitutive.f90 b/src/constitutive.f90 index 6fd0161f9..a8e57034b 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -516,7 +516,9 @@ subroutine constitutive_LpAndItsTangents(Lp, dLp_dS, dLp_dFi, S6, Fi, ipc, ip, e call plastic_phenopowerlaw_LpAndItsTangent (Lp,dLp_dMp, Mp,instance,of) case (PLASTICITY_KINEHARDENING_ID) plasticityType - call plastic_kinehardening_LpAndItsTangent (Lp,dLp_dMp, Mp,ipc,ip,el) + of = phasememberAt(ipc,ip,el) + instance = phase_plasticityInstance(material_phase(ipc,ip,el)) + call plastic_kinehardening_LpAndItsTangent (Lp,dLp_dMp, Mp,instance,of) case (PLASTICITY_NONLOCAL_ID) plasticityType call plastic_nonlocal_LpAndItsTangent (Lp,dLp_dMp99, math_Mandel33to6(Mp), & @@ -918,7 +920,9 @@ subroutine constitutive_collectDotState(S6, FeArray, Fi, FpArray, subdt, subfrac call plastic_phenopowerlaw_dotState(Mp,instance,of) case (PLASTICITY_KINEHARDENING_ID) plasticityType - call plastic_kinehardening_dotState(Mp,ipc,ip,el) + of = phasememberAt(ipc,ip,el) + instance = phase_plasticityInstance(material_phase(ipc,ip,el)) + call plastic_kinehardening_dotState(Mp,instance,of) case (PLASTICITY_DISLOTWIN_ID) plasticityType call plastic_dislotwin_dotState (math_Mandel33to6(Mp),temperature(ho)%p(tme), & @@ -972,6 +976,8 @@ subroutine constitutive_collectDeltaState(S6, Fe, Fi, ipc, ip, el) math_Mandel33to6, & math_mul33x33 use material, only: & + phasememberAt, & + phase_plasticityInstance, & phase_plasticity, & phase_source, & phase_Nsources, & @@ -1003,19 +1009,22 @@ subroutine constitutive_collectDeltaState(S6, Fe, Fi, ipc, ip, el) Fe, & !< elastic deformation gradient Fi !< intermediate deformation gradient real(pReal), dimension(3,3) :: & - Mstar + Mp integer(pInt) :: & - s !< counter in source loop + s, & !< counter in source loop + instance, of - Mstar = math_mul33x33(math_mul33x33(transpose(Fi),Fi),math_Mandel6to33(S6)) + Mp = math_mul33x33(math_mul33x33(transpose(Fi),Fi),math_Mandel6to33(S6)) plasticityType: select case (phase_plasticity(material_phase(ipc,ip,el))) case (PLASTICITY_KINEHARDENING_ID) plasticityType - call plastic_kinehardening_deltaState(Mstar,ipc,ip,el) + of = phasememberAt(ipc,ip,el) + instance = phase_plasticityInstance(material_phase(ipc,ip,el)) + call plastic_kinehardening_deltaState(Mp,instance,of) case (PLASTICITY_NONLOCAL_ID) plasticityType - call plastic_nonlocal_deltaState(math_Mandel33to6(Mstar),ip,el) + call plastic_nonlocal_deltaState(math_Mandel33to6(Mp),ip,el) end select plasticityType @@ -1140,8 +1149,10 @@ function constitutive_postResults(S6, Fi, FeArray, ipc, ip, el) constitutive_postResults(startPos:endPos) = & plastic_phenopowerlaw_postResults(Mp,instance,of) case (PLASTICITY_KINEHARDENING_ID) plasticityType + of = phasememberAt(ipc,ip,el) + instance = phase_plasticityInstance(material_phase(ipc,ip,el)) constitutive_postResults(startPos:endPos) = & - plastic_kinehardening_postResults(Mp,ipc,ip,el) + plastic_kinehardening_postResults(Mp,instance,of) case (PLASTICITY_DISLOTWIN_ID) plasticityType constitutive_postResults(startPos:endPos) = & plastic_dislotwin_postResults(S6,temperature(ho)%p(tme),ipc,ip,el) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 582511064..1c533b0b2 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -520,11 +520,6 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Mp,instance,of) use math - use lattice, only: & - lattice_NslipSystem, & - lattice_Sslip, & - lattice_maxNslipFamily, & - lattice_NnonSchmid implicit none real(pReal), dimension(3,3), intent(in) :: & @@ -532,7 +527,7 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & integer(pInt), intent(in) :: & instance, & !< instance of that phase of !< index of phaseMember - real(pReal), dimension(plastic_kinehardening_totalNslip(instance)), intent(out) :: & + real(pReal), dimension(paramNew(instance)%totalNslip), intent(out) :: & gdot_pos, & !< shear rates from positive line segments gdot_neg, & !< shear rates from negative line segments tau_pos, & !< shear stress on positive line segments @@ -563,43 +558,32 @@ end subroutine plastic_kinehardening_shearRates !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp, & - Mp,ipc,ip,el) +subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) use prec, only: & dNeq0 - use material, only: & - phaseAt, phasememberAt, & - phase_plasticityInstance - + implicit none real(pReal), dimension(3,3), intent(out) :: & Lp !< plastic velocity gradient real(pReal), dimension(3,3,3,3), intent(out) :: & dLp_dMp !< derivative of Lp with respect to the Mandel stress - integer(pInt), intent(in) :: & - ipc, & !< component-ID of integration point - ip, & !< integration point - el !< element real(pReal), dimension(3,3), intent(in) :: & - Mp + Mp !< Mandel stress + integer(pInt), intent(in) :: & + instance, & + of integer(pInt) :: & - instance, & - f,i,j,k,l,m,n, & - of, & - ph + f,i,j,k,l,m,n + - real(pReal), dimension(plastic_kinehardening_totalNslip(phase_plasticityInstance(phaseAt(ipc,ip,el)))) :: & + real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & tau_pos,tau_neg real(pReal) :: & dgdot_dtau_pos,dgdot_dtau_neg - ph = phaseAt(ipc,ip,el) !< figures phase for each material point - of = phasememberAt(ipc,ip,el) !< index of the positions of each constituent of material point, phasememberAt is a function in material that helps figure them out - instance = phase_plasticityInstance(ph) - associate(prm => paramNew(instance), stt => state(instance)) Lp = 0.0_pReal dLp_dMp = 0.0_pReal @@ -636,35 +620,22 @@ end subroutine plastic_kinehardening_LpAndItsTangent !-------------------------------------------------------------------------------------------------- !> @brief calculates (instantaneous) incremental change of microstructure !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_deltaState(Mp,ipc,ip,el) +subroutine plastic_kinehardening_deltaState(Mp,instance,of) use prec, only: & dNeq, & dEq0 - use material, only: & - phaseAt, & - phasememberAt, & - phase_plasticityInstance - + implicit none - real(pReal), dimension(3,3), intent(in) :: & - Mp - integer(pInt), intent(in) :: & - ipc, & !< component-ID of integration point - ip, & !< integration point - el !< element - real(pReal), dimension(plastic_kinehardening_totalNslip(phase_plasticityInstance(phaseAt(ipc,ip,el)))) :: & + real(pReal), dimension(3,3), intent(in) :: & + Mp !< Mandel stress + integer(pInt), intent(in) :: & + instance, & + of + + real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & tau_pos,tau_neg, & sense - integer(pInt) :: & - ph, & - instance, & !< instance of my instance (unique number of my constitutive model) - of, & - j !< shortcut notation for offset position in state array - - ph = phaseAt(ipc,ip,el) - of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember - instance = phase_plasticityInstance(ph) call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Mp,instance,of) @@ -707,38 +678,24 @@ end subroutine plastic_kinehardening_deltaState !-------------------------------------------------------------------------------------------------- !> @brief calculates the rate of change of microstructure !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_dotState(Mp,ipc,ip,el) - use lattice, only: & - lattice_maxNslipFamily - use material, only: & - material_phase, & - phaseAt, phasememberAt, & - phase_plasticityInstance +subroutine plastic_kinehardening_dotState(Mp,instance,of) implicit none real(pReal), dimension(3,3), intent(in) :: & - Mp + Mp !< Mandel stress integer(pInt), intent(in) :: & - ipc, & !< component-ID of integration point - ip, & !< integration point - el !< element !< microstructure state + instance, & + of !< element !< microstructure state integer(pInt) :: & - instance,ph, & - f,i,j, & - nSlip, & - of + f,i,j - real(pReal), dimension(plastic_kinehardening_totalNslip(phase_plasticityInstance(material_phase(ipc,ip,el)))) :: & + real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & tau_pos,tau_neg real(pReal) :: & sumGamma - of = phasememberAt(ipc,ip,el) - ph = phaseAt(ipc,ip,el) - instance = phase_plasticityInstance(ph) - nSlip = plastic_kinehardening_totalNslip(instance) associate( prm => paramNew(instance), stt => state(instance), dot => dotState(instance)) @@ -775,84 +732,67 @@ end subroutine plastic_kinehardening_dotState !-------------------------------------------------------------------------------------------------- !> @brief return array of constitutive results !-------------------------------------------------------------------------------------------------- -function plastic_kinehardening_postResults(Mp,ipc,ip,el) result(postResults) - use math - use material, only: & - material_phase, & - phaseAt, phasememberAt, & - phase_plasticityInstance - use lattice, only: & - lattice_Sslip, & - lattice_maxNslipFamily, & - lattice_NslipSystem +function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) + use math, only: & + math_mul33xx33 implicit none real(pReal), dimension(3,3), intent(in) :: & - Mp + Mp !< Mandel stress integer(pInt), intent(in) :: & - ipc, & !< component-ID of integration point - ip, & !< integration point - el !< element !< microstructure state + instance, & + of - real(pReal), dimension(sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(material_phase(ipc,ip,el))))) :: & + real(pReal), dimension(sum(plastic_kinehardening_sizePostResult(:,instance))) :: & postResults integer(pInt) :: & - instance,ph, of, & - nSlip,& - o,f,i,c,j,& - index_myFamily + o,c,f,j - real(pReal), dimension(plastic_kinehardening_totalNslip(phase_plasticityInstance(material_phase(ipc,ip,el)))) :: & + real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & tau_pos,tau_neg - of = phasememberAt(ipc,ip,el) - ph = phaseAt(ipc,ip,el) - instance = phase_plasticityInstance(ph) - - nSlip = plastic_kinehardening_totalNslip(instance) - postResults = 0.0_pReal c = 0_pInt call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of) + Mp,instance,of) associate( prm => paramNew(instance), stt => state(instance)) outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) select case(prm%outputID(o)) case (crss_ID) - postResults(c+1_pInt:c+nSlip) = stt%crss(:,of) - c = c + nSlip + postResults(c+1_pInt:c+prm%totalNslip) = stt%crss(:,of) + c = c + prm%totalNslip case(crss_back_ID) - postResults(c+1_pInt:c+nSlip) = stt%crss_back(:,of) - c = c + nSlip + postResults(c+1_pInt:c+prm%totalNslip) = stt%crss_back(:,of) + c = c + prm%totalNslip case (sense_ID) - postResults(c+1_pInt:c+nSlip) = stt%sense(:,of) - c = c + nSlip + postResults(c+1_pInt:c+prm%totalNslip) = stt%sense(:,of) + c = c + prm%totalNslip case (chi0_ID) - postResults(c+1_pInt:c+nSlip) = stt%chi0(:,of) - c = c + nSlip + postResults(c+1_pInt:c+prm%totalNslip) = stt%chi0(:,of) + c = c + prm%totalNslip case (gamma0_ID) - postResults(c+1_pInt:c+nSlip) = stt%gamma0(:,of) - c = c + nSlip + postResults(c+1_pInt:c+prm%totalNslip) = stt%gamma0(:,of) + c = c + prm%totalNslip case (accshear_ID) - postResults(c+1_pInt:c+nSlip) = stt%accshear(:,of) - c = c + nSlip + postResults(c+1_pInt:c+prm%totalNslip) = stt%accshear(:,of) + c = c + prm%totalNslip case (shearrate_ID) - postResults(c+1_pInt:c+nSlip) = gdot_pos+gdot_neg - c = c + nSlip + postResults(c+1_pInt:c+prm%totalNslip) = gdot_pos+gdot_neg + c = c + prm%totalNslip case (resolvedstress_ID) do j = 1_pInt, prm%totalNslip postResults(c+j) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,j)) enddo - c = c + nSlip + c = c + prm%totalNslip end select enddo outputsLoop From 20671b8ed38427ee0f41e82464549b289902c890 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 09:51:43 +0100 Subject: [PATCH 56/89] cleaning --- src/constitutive.f90 | 2 +- src/plastic_kinematichardening.f90 | 135 +++++------------------------ 2 files changed, 23 insertions(+), 114 deletions(-) diff --git a/src/constitutive.f90 b/src/constitutive.f90 index a8e57034b..28d95f4ea 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -162,7 +162,7 @@ subroutine constitutive_init() if (any(phase_plasticity == PLASTICITY_NONE_ID)) call plastic_none_init if (any(phase_plasticity == PLASTICITY_ISOTROPIC_ID)) call plastic_isotropic_init if (any(phase_plasticity == PLASTICITY_PHENOPOWERLAW_ID)) call plastic_phenopowerlaw_init - if (any(phase_plasticity == PLASTICITY_KINEHARDENING_ID)) call plastic_kinehardening_init(FILEUNIT) + if (any(phase_plasticity == PLASTICITY_KINEHARDENING_ID)) call plastic_kinehardening_init if (any(phase_plasticity == PLASTICITY_DISLOTWIN_ID)) call plastic_dislotwin_init(FILEUNIT) if (any(phase_plasticity == PLASTICITY_DISLOUCLA_ID)) call plastic_disloucla_init(FILEUNIT) if (any(phase_plasticity == PLASTICITY_NONLOCAL_ID)) then diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 1c533b0b2..85daab322 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -118,7 +118,7 @@ contains !> @brief module initialization !> @details reads in material parameters, allocates arrays, and does sanity checks !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_init(fileUnit) +subroutine plastic_kinehardening_init use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) use prec, only: & dEq0 @@ -127,22 +127,10 @@ subroutine plastic_kinehardening_init(fileUnit) debug_constitutive,& debug_levelBasic use math, only: & - math_Mandel3333to66, & - math_Voigt66to3333, & math_expand use IO, only: & - IO_read, & - IO_lc, & - IO_getTag, & - IO_isBlank, & - IO_stringPos, & - IO_stringValue, & - IO_floatValue, & - IO_intValue, & - IO_warning, & IO_error, & - IO_timeStamp, & - IO_EOF + IO_timeStamp use material, only: & phase_plasticity, & phase_plasticityInstance, & @@ -158,23 +146,19 @@ subroutine plastic_kinehardening_init(fileUnit) use lattice implicit none - integer(pInt), intent(in) :: fileUnit - integer(pInt), allocatable, dimension(:) :: chunkPos integer(kind(undefined_ID)) :: & output_ID integer(pInt) :: & - o, i,j, k, f, p, & + o, i, p, & phase, & instance, & maxNinstance, & NipcMyPhase, & outputSize, & - Nchunks_SlipSlip = 0_pInt, Nchunks_SlipFamilies = 0_pInt, & - Nchunks_nonSchmid = 0_pInt, & - offset_slip, index_myFamily, index_otherFamily, & + offset_slip, & startIndex, endIndex, & - mySize, nSlip, nSlipFamilies, & + nSlip, & sizeDotState, & sizeState, & sizeDeltaState @@ -183,7 +167,6 @@ subroutine plastic_kinehardening_init(fileUnit) real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::] character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::] - real(pReal), dimension(:), allocatable :: tempPerSlip integer(kind(undefined_ID)) :: & outputID !< ID of each post result output @@ -191,7 +174,6 @@ subroutine plastic_kinehardening_init(fileUnit) outputs character(len=65536) :: & tag = '', & - line = '', & extmsg = '', & structure = '' @@ -266,7 +248,7 @@ subroutine plastic_kinehardening_init(fileUnit) prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) ! expand: family => system - prm%crss0 = math_expand(prm%crss0, prm%Nslip) + !prm%crss0 = math_expand(prm%crss0, prm%Nslip) prm%tau1 = math_expand(prm%tau1,prm%Nslip) prm%tau1_b = math_expand(prm%tau1_b, prm%Nslip) prm%theta0 = math_expand(prm%theta0,prm%Nslip) @@ -277,8 +259,6 @@ subroutine plastic_kinehardening_init(fileUnit) prm%gdot0 = config_phase(p)%getFloat('gdot0') prm%n_slip = config_phase(p)%getFloat('n_slip') - - endif slipActive @@ -394,85 +374,13 @@ param(instance)%outputID = prm%outputID end associate end do - - rewind(fileUnit) - phase = 0_pInt - do while (trim(line) /= IO_EOF .and. IO_lc(IO_getTag(line,'<','>')) /= material_partPhase) ! wind forward to - line = IO_read(fileUnit) - enddo - - parsingFile: do while (trim(line) /= IO_EOF) ! read through sections of phase part - line = IO_read(fileUnit) - if (IO_isBlank(line)) cycle ! skip empty lines - if (IO_getTag(line,'<','>') /= '') then ! stop at next part - line = IO_read(fileUnit, .true.) ! reset IO_read - exit - endif - if (IO_getTag(line,'[',']') /= '') then ! next phase - phase = phase + 1_pInt ! advance phase section counter - if (phase_plasticity(phase) == PLASTICITY_KINEHARDENING_ID) then - instance = phase_plasticityInstance(phase) ! count instances of my constitutive law - Nchunks_SlipFamilies = count(lattice_NslipSystem(:,phase) > 0_pInt) ! maximum number of slip families according to lattice type of current phase - Nchunks_SlipSlip = maxval(lattice_interactionSlipSlip(:,:,phase)) - Nchunks_nonSchmid = lattice_NnonSchmid(phase) - allocate(param(instance)%crss0 (Nchunks_SlipFamilies), source=0.0_pReal) - allocate(param(instance)%nonSchmidCoeff(Nchunks_nonSchmid), source=0.0_pReal) - if(allocated(tempPerSlip)) deallocate(tempPerSlip) - allocate(tempPerSlip(Nchunks_SlipFamilies)) - endif - cycle ! skip to next line - endif - if (phase > 0_pInt ) then; if (phase_plasticity(phase) == PLASTICITY_KINEHARDENING_ID) then ! one of my phases. Do not short-circuit here (.and. between if-statements), it's not safe in Fortran - chunkPos = IO_stringPos(line) - tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key - select case(tag) - - -!-------------------------------------------------------------------------------------------------- -! parameters depending on number of slip families - case ('nslip') - if (chunkPos(1) < Nchunks_SlipFamilies + 1_pInt) & - call IO_warning(50_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_KINEHARDENING_label//')') - if (chunkPos(1) > Nchunks_SlipFamilies + 1_pInt) & - call IO_error(150_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_KINEHARDENING_label//')') - Nchunks_SlipFamilies = chunkPos(1) - 1_pInt ! user specified number of (possibly) active slip families (e.g. 6 0 6 --> 3) - do j = 1_pInt, Nchunks_SlipFamilies - plastic_kinehardening_Nslip(j,instance) = IO_intValue(line,chunkPos,1_pInt+j) - enddo - - case ('crss0','tau1','tau1_b','theta0','theta1','theta0_b','theta1_b') - tempPerSlip = 0.0_pReal - do j = 1_pInt, Nchunks_SlipFamilies - if (plastic_kinehardening_Nslip(j,instance) > 0_pInt) & - tempPerSlip(j) = IO_floatValue(line,chunkPos,1_pInt+j) - enddo - select case(tag) - case ('crss0') - param(instance)%crss0(1:Nchunks_SlipFamilies) = tempPerSlip(1:Nchunks_SlipFamilies) - end select - - case default - - end select - endif; endif - enddo parsingFile !-------------------------------------------------------------------------------------------------- ! allocation of variables whose size depends on the total number of active slip systems - - - initializeInstances: do phase = 1_pInt, size(phase_plasticity) ! loop through all phases in material.config myPhase2: if (phase_plasticity(phase) == PLASTICITY_KINEHARDENING_ID) then ! only consider my phase NipcMyPhase = count(material_phase == phase) ! number of IPCs containing my phase instance = phase_plasticityInstance(phase) ! which instance of my phase - plastic_kinehardening_Nslip(1:lattice_maxNslipFamily,instance) = & - min(lattice_NslipSystem(1:lattice_maxNslipFamily,phase),& ! limit active slip systems per family to min of available and requested - plastic_kinehardening_Nslip(1:lattice_maxNslipFamily,instance)) - - plastic_kinehardening_totalNslip(instance) = sum(plastic_kinehardening_Nslip(:,instance)) ! how many slip systems altogether - nSlipFamilies = count(plastic_kinehardening_Nslip(:,instance) > 0_pInt) - nSlip = plastic_kinehardening_totalNslip(instance) ! total number of active slip systems !-------------------------------------------------------------------------------------------------- ! sanity checks @@ -491,7 +399,7 @@ param(instance)%outputID = prm%outputID endif - offset_slip = plasticState(phase)%nSlip+plasticState(phase)%nTwin+2_pInt + offset_slip = plasticState(phase)%nSlip plasticState(phase)%slipRate => & plasticState(phase)%dotState(offset_slip+1:offset_slip+plasticState(phase)%nSlip,1:NipcMyPhase) plasticState(phase)%accumulatedSlip => & @@ -502,11 +410,11 @@ param(instance)%outputID = prm%outputID o = endIndex ! offset of dotstate index relative to state index startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip + endIndex = endIndex + paramNew(instance)%totalNslip state0 (instance)%crss => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - state0(instance)%crss = spread(math_expand(param(instance)%crss0,& - plastic_kinehardening_Nslip(:,instance)), & + state0(instance)%crss = spread(math_expand(paramNew(instance)%crss0,& + paramNew(instance)%Nslip), & 2, NipcMyPhase) endif myPhase2 enddo initializeInstances @@ -534,22 +442,25 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & tau_neg !< shear stress on negative line segments integer(pInt) :: & - index_myFamily, & - f,i,j,k + i + associate(prm => paramNew(instance), stt => state(instance)) do i = 1_pInt, prm%totalNslip tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) tau_neg(i) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)) enddo + + tau_pos = tau_pos - stt%crss_back(:,of) + tau_neg = tau_neg - stt%crss_back(:,of) gdot_pos = 0.5_pReal * prm%gdot0 * & - (abs(tau_pos-state(instance)%crss_back(:,of))/ & + (abs(tau_pos)/ & state(instance)%crss(:,of))**prm%n_slip & - *sign(1.0_pReal,tau_pos-state(instance)%crss_back(:,of)) + *sign(1.0_pReal,tau_pos) gdot_neg = 0.5_pReal * prm%gdot0 * & - (abs(tau_neg-state(instance)%crss_back(:,of))/ & + (abs(tau_neg)/ & state(instance)%crss(:,of))**prm%n_slip & - *sign(1.0_pReal,tau_neg-state(instance)%crss_back(:,of)) + *sign(1.0_pReal,tau_neg) end associate end subroutine plastic_kinehardening_shearRates @@ -575,7 +486,7 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) of integer(pInt) :: & - f,i,j,k,l,m,n + j,k,l,m,n real(pReal), dimension(paramNew(instance)%totalNslip) :: & @@ -590,8 +501,6 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Mp,instance,of) - tau_pos = tau_pos - stt%crss_back(:,of) - tau_neg = tau_neg - stt%crss_back(:,of) do j = 1_pInt, prm%totalNslip @@ -688,7 +597,7 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) of !< element !< microstructure state integer(pInt) :: & - f,i,j + j real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & @@ -746,7 +655,7 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) real(pReal), dimension(sum(plastic_kinehardening_sizePostResult(:,instance))) :: & postResults integer(pInt) :: & - o,c,f,j + o,c,j real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & From 3352cbac4bbb15488b4d1f1ca7cf58dbbacb4b90 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 13 Dec 2018 10:51:35 +0100 Subject: [PATCH 57/89] segmentation fault expected instance but passed in phase --- src/constitutive.f90 | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/constitutive.f90 b/src/constitutive.f90 index db90bfc20..f85641f8d 100644 --- a/src/constitutive.f90 +++ b/src/constitutive.f90 @@ -1197,7 +1197,9 @@ subroutine constitutive_results() use HDF5_utilities use config, only: & config_name_phase => phase_name ! anticipate logical name + use material, only: & + phase_plasticityInstance, & material_phase_plasticity_type => phase_plasticity use plastic_phenopowerlaw, only: & plastic_phenopowerlaw_results @@ -1208,7 +1210,7 @@ subroutine constitutive_results() do p=1,size(config_name_phase) call HDF5_closeGroup(results_addGroup('current/phase/'//trim(config_name_phase(p)))) if (material_phase_plasticity_type(p) == PLASTICITY_PHENOPOWERLAW_ID) then - call plastic_phenopowerlaw_results(p,'current/phase/'//trim(config_name_phase(p))) + call plastic_phenopowerlaw_results(phase_plasticityInstance(p),'current/phase/'//trim(config_name_phase(p))) endif enddo From 7e41ae264d42cbcbdbfd096fdb55b96989e99b67 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 14 Dec 2018 11:35:41 +0100 Subject: [PATCH 58/89] Made changes with the calling signature --- src/CPFEM2.f90 | 40 ++-- src/HDF5_utilities.f90 | 401 +++++++++++++++++++++++------------------ src/results.f90 | 2 +- 3 files changed, 243 insertions(+), 200 deletions(-) mode change 100644 => 100755 src/CPFEM2.f90 mode change 100644 => 100755 src/HDF5_utilities.f90 mode change 100644 => 100755 src/results.f90 diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 old mode 100644 new mode 100755 index e22909231..b7de1d346 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -154,26 +154,26 @@ subroutine CPFEM_init fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5') - call HDF5_read(material_phase, fileHandle,'recordedPhase') - call HDF5_read(crystallite_F0, fileHandle,'convergedF') - call HDF5_read(crystallite_Fp0, fileHandle,'convergedFp') - call HDF5_read(crystallite_Fi0, fileHandle,'convergedFi') - call HDF5_read(crystallite_Lp0, fileHandle,'convergedLp') - call HDF5_read(crystallite_Li0, fileHandle,'convergedLi') - call HDF5_read(crystallite_dPdF0, fileHandle,'convergeddPdF') - call HDF5_read(crystallite_Tstar0_v,fileHandle,'convergedTstar') + call HDF5_read(fileHandle,material_phase,'recordedPhase') + call HDF5_read(fileHandle, crystallite_F0,'convergedF') + call HDF5_read(fileHandle, crystallite_Fp0,'convergedFp') + call HDF5_read(fileHandle, crystallite_Fi0,'convergedFi') + call HDF5_read(fileHandle, crystallite_Lp0,'convergedLp') + call HDF5_read(fileHandle, crystallite_Li0,'convergedLi') + call HDF5_read(fileHandle, crystallite_dPdF0, 'convergeddPdF') + call HDF5_read(fileHandle,crystallite_Tstar0_v,'convergedTstar') groupPlasticID = HDF5_openGroup(fileHandle,'PlasticPhases') do ph = 1_pInt,size(phase_plasticity) write(PlasticItem,*) ph,'_' - call HDF5_read(plasticState(ph)%state0,groupPlasticID,trim(PlasticItem)//'convergedStateConst') + call HDF5_read(groupPlasticID,plasticState(ph)%state0,trim(PlasticItem)//'convergedStateConst') enddo call HDF5_closeGroup(groupPlasticID) groupHomogID = HDF5_openGroup(fileHandle,'HomogStates') do homog = 1_pInt, material_Nhomogenization write(HomogItem,*) homog,'_' - call HDF5_read(homogState(homog)%state0, groupHomogID,trim(HomogItem)//'convergedStateHomog') + call HDF5_read(groupHomogID,homogState(homog)%state0, trim(HomogItem)//'convergedStateHomog') enddo call HDF5_closeGroup(groupHomogID) @@ -282,26 +282,26 @@ subroutine CPFEM_age() write(rankStr,'(a1,i0)')'_',worldrank fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5','w') - call HDF5_write(material_phase, fileHandle,'recordedPhase') - call HDF5_write(crystallite_F0, fileHandle,'convergedF') - call HDF5_write(crystallite_Fp0, fileHandle,'convergedFp') - call HDF5_write(crystallite_Fi0, fileHandle,'convergedFi') - call HDF5_write(crystallite_Lp0, fileHandle,'convergedLp') - call HDF5_write(crystallite_Li0, fileHandle,'convergedLi') - call HDF5_write(crystallite_dPdF0, fileHandle,'convergeddPdF') - call HDF5_write(crystallite_Tstar0_v,fileHandle,'convergedTstar') + call HDF5_write(fileHandle, material_phase,'recordedPhase') + call HDF5_write(fileHandle, crystallite_F0,'convergedF') + call HDF5_write(fileHandle, crystallite_Fp0,'convergedFp') + call HDF5_write(fileHandle, crystallite_Fi0,'convergedFi') + call HDF5_write(fileHandle, crystallite_Lp0,'convergedLp') + call HDF5_write(fileHandle, crystallite_Li0,'convergedLi') + call HDF5_write(fileHandle, crystallite_dPdF0,'convergeddPdF') + call HDF5_write(fileHandle,crystallite_Tstar0_v,'convergedTstar') groupPlastic = HDF5_addGroup(fileHandle,'PlasticPhases') do ph = 1_pInt,size(phase_plasticity) write(PlasticItem,*) ph,'_' - call HDF5_write(plasticState(ph)%state0,groupPlastic,trim(PlasticItem)//'convergedStateConst') + call HDF5_write(groupPlastic,plasticState(ph)%state0,trim(PlasticItem)//'convergedStateConst') enddo call HDF5_closeGroup(groupPlastic) groupHomog = HDF5_addGroup(fileHandle,'HomogStates') do homog = 1_pInt, material_Nhomogenization write(HomogItem,*) homog,'_' - call HDF5_write(homogState(homog)%state0,groupHomog,trim(HomogItem)//'convergedStateHomog') + call HDF5_write(groupHomog,homogState(homog)%state0,trim(HomogItem)//'convergedStateHomog') enddo call HDF5_closeGroup(groupHomog) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100644 new mode 100755 index 32747218c..d6c3748f7 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -331,7 +331,7 @@ end subroutine HDF5_setLink !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal1(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -416,9 +416,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simpl call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal1 @@ -426,7 +426,7 @@ end subroutine HDF5_read_pReal1 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 2 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal2(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal2(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -511,9 +511,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simpl call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal2 @@ -521,7 +521,7 @@ end subroutine HDF5_read_pReal2 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 3 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal3(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -606,9 +606,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simpl call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal3 @@ -616,7 +616,7 @@ end subroutine HDF5_read_pReal3 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 4 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal4(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -701,9 +701,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simpl call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal4 @@ -711,7 +711,7 @@ end subroutine HDF5_read_pReal4 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal5(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -796,9 +796,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simpl call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal5 @@ -806,7 +806,7 @@ end subroutine HDF5_read_pReal5 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal6(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -891,9 +891,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simpl call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal6 @@ -901,7 +901,7 @@ end subroutine HDF5_read_pReal6 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pReal with 7 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal7(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -986,9 +986,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simpl call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/memspace_id') end subroutine HDF5_read_pReal7 @@ -996,7 +996,7 @@ end subroutine HDF5_read_pReal7 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pInt with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pInt1(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1083,9 +1083,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt1 @@ -1093,7 +1093,7 @@ end subroutine HDF5_read_pInt1 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pInt with 2 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pInt2(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1180,9 +1180,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt2 @@ -1190,7 +1190,7 @@ end subroutine HDF5_read_pInt2 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pInt with 3 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pInt3(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1277,9 +1277,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt3 @@ -1287,7 +1287,7 @@ end subroutine HDF5_read_pInt3 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pInt with 4 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pInt4(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1374,9 +1374,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt4 @@ -1384,7 +1384,7 @@ end subroutine HDF5_read_pInt4 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pInt with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pInt5(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1471,9 +1471,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt5 @@ -1481,7 +1481,7 @@ end subroutine HDF5_read_pInt5 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pInt with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pInt6(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1568,9 +1568,9 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt6 @@ -1578,7 +1578,7 @@ end subroutine HDF5_read_pInt6 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of type pInt with 7 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pInt7(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1665,20 +1665,20 @@ if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5sclose_f/memspace_id') end subroutine HDF5_read_pInt7 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pReal1(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize - + implicit none real(pReal), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle @@ -1700,11 +1700,14 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) localShape = shape(dataset) if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(1) + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) @@ -1713,6 +1716,7 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') endif; endif #endif + myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(outputSize)] @@ -1721,19 +1725,18 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -1743,26 +1746,26 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/memspace_id') -end subroutine HDF5_write_PReal1 +end subroutine HDF5_write_pReal1 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 2 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pReal2(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1788,11 +1791,14 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) localShape = shape(dataset) if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(2) + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) @@ -1801,6 +1807,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') endif; endif #endif + myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:1),sum(outputSize)] @@ -1809,19 +1816,18 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -1831,7 +1837,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces @@ -1840,9 +1846,9 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal2 @@ -1850,7 +1856,7 @@ end subroutine HDF5_write_pReal2 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 3 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pReal3(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1876,11 +1882,14 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) localShape = shape(dataset) if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(3) + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) @@ -1889,6 +1898,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') endif; endif #endif + myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:2),sum(outputSize)] @@ -1897,19 +1907,18 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -1919,7 +1928,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces @@ -1928,9 +1937,9 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal3 @@ -1938,7 +1947,7 @@ end subroutine HDF5_write_pReal3 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 4 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pReal4(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -1964,11 +1973,14 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) localShape = shape(dataset) if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(4) + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) @@ -1977,6 +1989,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') endif; endif #endif + myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:3),sum(outputSize)] @@ -1985,19 +1998,18 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2007,7 +2019,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces @@ -2016,9 +2028,9 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal4 @@ -2026,7 +2038,7 @@ end subroutine HDF5_write_pReal4 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pReal5(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2052,11 +2064,14 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) localShape = shape(dataset) if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(5) + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) @@ -2065,6 +2080,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') endif; endif #endif + myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(outputSize)] @@ -2073,19 +2089,18 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2095,7 +2110,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces @@ -2104,9 +2119,9 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal5 @@ -2114,7 +2129,7 @@ end subroutine HDF5_write_pReal5 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pReal6(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2140,11 +2155,14 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) localShape = shape(dataset) if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(6) + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) @@ -2153,6 +2171,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') endif; endif #endif + myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:5),sum(outputSize)] @@ -2161,19 +2180,18 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2183,7 +2201,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces @@ -2192,9 +2210,9 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal6 @@ -2202,7 +2220,7 @@ end subroutine HDF5_write_pReal6 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 7 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pReal7(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2228,11 +2246,14 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) localShape = shape(dataset) if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(7) + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) @@ -2241,6 +2262,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') endif; endif #endif + myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:6),sum(outputSize)] @@ -2249,19 +2271,18 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2271,7 +2292,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces @@ -2280,9 +2301,9 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/memspace_id') end subroutine HDF5_write_pReal7 @@ -2292,7 +2313,7 @@ end subroutine HDF5_write_pReal7 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pInt with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pInt1(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2316,8 +2337,10 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return +if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- @@ -2326,9 +2349,9 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt1: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') endif; endif #endif myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) @@ -2339,19 +2362,18 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2361,14 +2383,14 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) @@ -2380,7 +2402,7 @@ end subroutine HDF5_write_pInt1 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pInt with 2 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pInt2(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2404,8 +2426,10 @@ subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return +if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- @@ -2414,9 +2438,9 @@ subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt2: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') endif; endif #endif myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -2427,19 +2451,18 @@ subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2449,14 +2472,14 @@ subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) @@ -2468,7 +2491,7 @@ end subroutine HDF5_write_pInt2 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pInt with 3 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pInt3(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2492,8 +2515,10 @@ subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return +if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- @@ -2502,9 +2527,9 @@ subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt3: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') endif; endif #endif myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -2515,19 +2540,18 @@ subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2537,14 +2561,14 @@ subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) @@ -2556,7 +2580,7 @@ end subroutine HDF5_write_pInt3 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pInt with 4 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pInt4(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2580,8 +2604,10 @@ subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return +if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- @@ -2590,9 +2616,9 @@ subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt4: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -2603,19 +2629,18 @@ subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2625,14 +2650,14 @@ subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) @@ -2644,7 +2669,7 @@ end subroutine HDF5_write_pInt4 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pInt with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pInt5(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2668,8 +2693,10 @@ subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return +if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- @@ -2678,9 +2705,9 @@ subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt5: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -2691,19 +2718,18 @@ subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2713,14 +2739,14 @@ subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) @@ -2732,7 +2758,7 @@ end subroutine HDF5_write_pInt5 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pInt with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pInt6(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2756,8 +2782,10 @@ subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return +if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- @@ -2766,9 +2794,9 @@ subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt6: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -2779,19 +2807,18 @@ subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2801,14 +2828,14 @@ subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) @@ -2820,7 +2847,7 @@ end subroutine HDF5_write_pInt6 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pInt with 7 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) +subroutine HDF5_write_pInt7(loc_id,dataset,datasetName,parallel) use numerics, only: & worldrank, & worldsize @@ -2844,8 +2871,10 @@ subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return +if (any(localShape(1:size(localShape)) == 0)) return +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- @@ -2854,9 +2883,9 @@ subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5pset_dxpl_mpio_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt7: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -2867,19 +2896,18 @@ subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5screate_simple_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dopen_f') !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5screate_simple_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dget_space_f') !-------------------------------------------------------------------------------------------------- ! create dataset call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dcreate_f') - !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) @@ -2889,14 +2917,14 @@ subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) ! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dcreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dread_f') !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dclose_f') call h5sclose_f(filespace_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) @@ -2906,3 +2934,18 @@ end subroutine HDF5_write_pInt7 end module HDF5_Utilities + +!!!!!!!!!!!! + + + + + + + + + + + + + diff --git a/src/results.f90 b/src/results.f90 old mode 100644 new mode 100755 index 5fe35f0ee..e8c5477f0 --- a/src/results.f90 +++ b/src/results.f90 @@ -954,7 +954,7 @@ subroutine results_writeVectorDataset(group,dataset,label,SIunit) integer(HID_T) :: groupHandle groupHandle = results_openGroup(group) - call HDF5_write(dataset,groupHandle,label) + call HDF5_write(groupHandle,dataset,label) call HDF5_closeGroup(groupHandle) end subroutine results_writeVectorDataset From 669d0c6c8f88b671275ebb4aa9d315c9e6ea3a84 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 14 Dec 2018 11:37:44 +0100 Subject: [PATCH 59/89] made it nonexecutable --- src/CPFEM2.f90 | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 src/CPFEM2.f90 diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 old mode 100755 new mode 100644 From d2c7b33cf6707aeaaa26ae618e456a492f462ba5 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 14 Dec 2018 11:39:08 +0100 Subject: [PATCH 60/89] New files made nonexecutable --- src/HDF5_utilities.f90 | 0 src/results.f90 | 0 2 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 src/HDF5_utilities.f90 mode change 100755 => 100644 src/results.f90 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100755 new mode 100644 diff --git a/src/results.f90 b/src/results.f90 old mode 100755 new mode 100644 From 3e38c4ef8cb1b8740f2cb7357cde0b838846e8e2 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Sat, 15 Dec 2018 17:21:03 +0100 Subject: [PATCH 61/89] The attribute interface works for single processor output and single valued attribute --- src/HDF5_utilities.f90 | 146 ++++++++++++++++++++++++++--------------- 1 file changed, 93 insertions(+), 53 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index d6c3748f7..c24df5bf8 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -60,13 +60,22 @@ module HDF5_utilities module procedure HDF5_write_pInt7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK end interface HDF5_write - +!-------------------------------------------------------------------------------------------------- +!> @brief attached attributes of type char,pInt or pReal to a file/dataset/group +!-------------------------------------------------------------------------------------------------- + interface HDF5_attributes + module procedure HDF5_addStringAttribute + module procedure HDF5_addIntegerAttribute + module procedure HDF5_addRealAttribute + end interface HDF5_attributes +!-------------------------------------------------------------------------------------------------- public :: & HDF5_utilities_init, & HDF5_openFile, & HDF5_closeFile, & HDF5_addStringAttribute, & HDF5_addIntegerAttribute, & + HDF5_addRealAttribute, & HDF5_closeGroup ,& HDF5_openGroup, & HDF5_addGroup, & @@ -275,7 +284,7 @@ end subroutine HDF5_addStringAttribute !-------------------------------------------------------------------------------------------------- -!> @brief adds a StringAttribute to the results file +!> @brief adds a IntegerAttribute to the results file !-------------------------------------------------------------------------------------------------- subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) @@ -288,7 +297,7 @@ subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) call h5screate_f(H5S_SCALAR_F,space_id,hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5screate_f') - call h5tcopy_f(H5T_NATIVE_Integer, type_id, hdferr) + call h5tcopy_f(H5T_NATIVE_INTEGER, type_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tcopy_f') call h5tset_size_f(type_id, 1_HSIZE_T, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tset_size_f') @@ -305,6 +314,37 @@ subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) end subroutine HDF5_addIntegerAttribute +!-------------------------------------------------------------------------------------------------- +!> @brief adds a Real number Attribute to the results file +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_addRealAttribute(entity,attrLabel,attrValue) + + implicit none + integer(HID_T), intent(in) :: entity + character(len=*), intent(in) :: attrLabel + real(pReal), intent(in) :: attrValue + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: attr_id, space_id, type_id + + call h5screate_f(H5S_SCALAR_F,space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5screate_f') + call h5tcopy_f(H5T_NATIVE_DOUBLE, type_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5tcopy_f') + call h5tset_size_f(type_id, 8_HSIZE_T, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5tset_size_f') + call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5acreate_f') + call h5awrite_f(attr_id, type_id, attrValue, int([1],HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5awrite_f') + call h5aclose_f(attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5aclose_f') + call h5tclose_f(type_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5tclose_f') + call h5sclose_f(space_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5sclose_f') + +end subroutine HDF5_addRealAttribute + !-------------------------------------------------------------------------------------------------- !> @brief set link to object in results file !-------------------------------------------------------------------------------------------------- @@ -382,8 +422,8 @@ subroutine HDF5_read_pReal1(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -477,8 +517,8 @@ subroutine HDF5_read_pReal2(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -572,8 +612,8 @@ subroutine HDF5_read_pReal3(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -667,8 +707,8 @@ subroutine HDF5_read_pReal4(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -762,8 +802,8 @@ subroutine HDF5_read_pReal5(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -857,8 +897,8 @@ subroutine HDF5_read_pReal6(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -952,8 +992,8 @@ subroutine HDF5_read_pReal7(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1048,8 +1088,8 @@ subroutine HDF5_read_pInt1(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1145,8 +1185,8 @@ subroutine HDF5_read_pInt2(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1242,8 +1282,8 @@ subroutine HDF5_read_pInt3(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1339,8 +1379,8 @@ subroutine HDF5_read_pInt4(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1436,8 +1476,8 @@ subroutine HDF5_read_pInt5(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1533,8 +1573,8 @@ subroutine HDF5_read_pInt6(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1630,8 +1670,8 @@ subroutine HDF5_read_pInt7(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) -if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -1729,7 +1769,7 @@ subroutine HDF5_write_pReal1(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dget_space_f') @@ -1820,7 +1860,7 @@ subroutine HDF5_write_pReal2(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dget_space_f') @@ -1911,7 +1951,7 @@ subroutine HDF5_write_pReal3(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dget_space_f') @@ -2002,7 +2042,7 @@ subroutine HDF5_write_pReal4(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dget_space_f') @@ -2093,7 +2133,7 @@ subroutine HDF5_write_pReal5(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dget_space_f') @@ -2184,7 +2224,7 @@ subroutine HDF5_write_pReal6(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dget_space_f') @@ -2275,7 +2315,7 @@ subroutine HDF5_write_pReal7(loc_id,dataset,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dget_space_f') @@ -2337,7 +2377,7 @@ subroutine HDF5_write_pInt1(loc_id,dataset,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) -if (any(localShape(1:size(localShape)) == 0)) return + if (any(localShape(1:size(localShape)) == 0)) return !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties @@ -2366,7 +2406,7 @@ if (any(localShape(1:size(localShape)) == 0)) return !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dget_space_f') @@ -2426,7 +2466,7 @@ subroutine HDF5_write_pInt2(loc_id,dataset,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) -if (any(localShape(1:size(localShape)) == 0)) return + if (any(localShape(1:size(localShape)) == 0)) return !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties @@ -2455,7 +2495,7 @@ if (any(localShape(1:size(localShape)) == 0)) return !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dget_space_f') @@ -2515,7 +2555,7 @@ subroutine HDF5_write_pInt3(loc_id,dataset,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) -if (any(localShape(1:size(localShape)) == 0)) return + if (any(localShape(1:size(localShape)) == 0)) return !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties @@ -2544,7 +2584,7 @@ if (any(localShape(1:size(localShape)) == 0)) return !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dget_space_f') @@ -2604,7 +2644,7 @@ subroutine HDF5_write_pInt4(loc_id,dataset,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) -if (any(localShape(1:size(localShape)) == 0)) return + if (any(localShape(1:size(localShape)) == 0)) return !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties @@ -2633,7 +2673,7 @@ if (any(localShape(1:size(localShape)) == 0)) return !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dget_space_f') @@ -2693,7 +2733,7 @@ subroutine HDF5_write_pInt5(loc_id,dataset,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) -if (any(localShape(1:size(localShape)) == 0)) return + if (any(localShape(1:size(localShape)) == 0)) return !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties @@ -2722,7 +2762,7 @@ if (any(localShape(1:size(localShape)) == 0)) return !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dget_space_f') @@ -2782,7 +2822,7 @@ subroutine HDF5_write_pInt6(loc_id,dataset,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) -if (any(localShape(1:size(localShape)) == 0)) return + if (any(localShape(1:size(localShape)) == 0)) return !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties @@ -2811,7 +2851,7 @@ if (any(localShape(1:size(localShape)) == 0)) return !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dget_space_f') @@ -2871,7 +2911,7 @@ subroutine HDF5_write_pInt7(loc_id,dataset,datasetName,parallel) !------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) -if (any(localShape(1:size(localShape)) == 0)) return + if (any(localShape(1:size(localShape)) == 0)) return !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties @@ -2900,7 +2940,7 @@ if (any(localShape(1:size(localShape)) == 0)) return !-------------------------------------------------------------------------------------------------- ! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & + call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & int(globalShape,HSIZE_T)) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dget_space_f') @@ -2935,7 +2975,7 @@ end subroutine HDF5_write_pInt7 end module HDF5_Utilities -!!!!!!!!!!!! + From 36c7157ee956869085356d3f1a962867f471d24f Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 17 Dec 2018 08:33:46 +0100 Subject: [PATCH 62/89] vectorized --- src/plastic_kinematichardening.f90 | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 85daab322..53cd2b08e 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -504,9 +504,8 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) do j = 1_pInt, prm%totalNslip - Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) ! sum of all gdot*SchmidTensor gives Lp + Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) - ! Calculation of the tangent of Lp ! sensitivity of Lp if (dNeq0(gdot_pos(j))) then dgdot_dtau_pos = gdot_pos(j)*prm%n_slip/tau_pos(j) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & @@ -616,24 +615,22 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) sumGamma = sum(stt%accshear(:,of)) do j = 1_pInt, prm%totalNslip - dot%crss(j,of) = & ! evolution of slip resistance j - dot_product(prm%interaction_SlipSlip(j,:),abs(gdot_pos+gdot_neg)) * & - ( prm%theta1(j) + & - (prm%theta0(j) - prm%theta1(j) & + dot%crss(j,of) = & + dot_product(prm%interaction_SlipSlip(j,:),dot%accshear(:,of)) * & + ( prm%theta1(j) + (prm%theta0(j) - prm%theta1(j) & + prm%theta0(j)*prm%theta1(j)*sumGamma/prm%tau1(j)) & *exp(-sumGamma*prm%theta0(j)/prm%tau1(j)) & ! V term depending on the harding law ) - dot%crss_back(j,of) = & ! evolution of back stress resistance j - stt%sense(j,of)*abs(gdot_pos(j)+gdot_neg(j)) * & - ( prm%theta1_b(j) + & - (prm%theta0_b(j) - prm%theta1_b(j) & - + prm%theta0_b(j)*prm%theta1_b(j)/(prm%tau1_b(j)+stt%chi0(j,of)) & - *(stt%accshear(j,of)-state(instance)%gamma0(j,of))) & - *exp(-(state(instance)%accshear(j,of)-state(instance)%gamma0(j,of)) & - *prm%theta0_b(j)/(prm%tau1_b(j)+state(instance)%chi0(j,of))) & - ) ! V term depending on the harding law for back stress + enddo + dot%crss_back(:,of) = & + stt%sense(:,of)*dot%accshear(:,of) * & + ( prm%theta1_b + & + (prm%theta0_b - prm%theta1_b & + + prm%theta0_b*prm%theta1_b/(prm%tau1_b+stt%chi0(:,of))*(stt%accshear(:,of)-stt%gamma0(:,of))& + ) & + *exp(-(stt%accshear(:,of)-stt%gamma0(:,of)) *prm%theta0_b/(prm%tau1_b+stt%chi0(:,of))) & + ) ! V term depending on the harding law for back stress - enddo end associate end subroutine plastic_kinehardening_dotState From b2062f2a1218ea0635ba9e4fc29c9db0904f4c4e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 17 Dec 2018 16:11:01 +0100 Subject: [PATCH 63/89] label were stored including [] and comments --- src/config.f90 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/config.f90 b/src/config.f90 index 7ae800f30..3fa90684b 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -1,4 +1,4 @@ -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !> @brief Reads in the material configuration from file !> @details Reads the material configuration file, where solverJobName.materialConfig takes @@ -220,7 +220,7 @@ subroutine parseFile(sectionNames,part,line, & partPosition = [partPosition, i] ! needed when actually storing content do i = 1_pInt, size(partPosition) -1_pInt - sectionNames(i) = trim(adjustl(fileContent(partPosition(i)))) + sectionNames(i) = trim(adjustl(IO_getTag(fileContent(partPosition(i)),'[',']'))) do j = partPosition(i) + 1_pInt, partPosition(i+1) -1_pInt call part(i)%add(trim(adjustl(fileContent(j)))) enddo From 3f3e23c2c89839f55122124095b41ce3fb070de6 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 17 Dec 2018 16:15:16 +0100 Subject: [PATCH 64/89] polished addAttribute and use it to store meta data --- src/CPFEM2.f90 | 37 ++++---- src/DAMASK_spectral.f90 | 4 +- src/HDF5_utilities.f90 | 168 ++++++++++++++++++++++------------ src/plastic_phenopowerlaw.f90 | 3 +- src/results.f90 | 65 +++++++------ 5 files changed, 164 insertions(+), 113 deletions(-) diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index b7de1d346..50d9cd312 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -154,13 +154,13 @@ subroutine CPFEM_init fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5') - call HDF5_read(fileHandle,material_phase,'recordedPhase') - call HDF5_read(fileHandle, crystallite_F0,'convergedF') - call HDF5_read(fileHandle, crystallite_Fp0,'convergedFp') - call HDF5_read(fileHandle, crystallite_Fi0,'convergedFi') - call HDF5_read(fileHandle, crystallite_Lp0,'convergedLp') - call HDF5_read(fileHandle, crystallite_Li0,'convergedLi') - call HDF5_read(fileHandle, crystallite_dPdF0, 'convergeddPdF') + call HDF5_read(fileHandle,material_phase, 'recordedPhase') + call HDF5_read(fileHandle,crystallite_F0, 'convergedF') + call HDF5_read(fileHandle,crystallite_Fp0, 'convergedFp') + call HDF5_read(fileHandle,crystallite_Fi0, 'convergedFi') + call HDF5_read(fileHandle,crystallite_Lp0, 'convergedLp') + call HDF5_read(fileHandle,crystallite_Li0, 'convergedLi') + call HDF5_read(fileHandle,crystallite_dPdF0, 'convergeddPdF') call HDF5_read(fileHandle,crystallite_Tstar0_v,'convergedTstar') groupPlasticID = HDF5_openGroup(fileHandle,'PlasticPhases') @@ -282,13 +282,13 @@ subroutine CPFEM_age() write(rankStr,'(a1,i0)')'_',worldrank fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5','w') - call HDF5_write(fileHandle, material_phase,'recordedPhase') - call HDF5_write(fileHandle, crystallite_F0,'convergedF') - call HDF5_write(fileHandle, crystallite_Fp0,'convergedFp') - call HDF5_write(fileHandle, crystallite_Fi0,'convergedFi') - call HDF5_write(fileHandle, crystallite_Lp0,'convergedLp') - call HDF5_write(fileHandle, crystallite_Li0,'convergedLi') - call HDF5_write(fileHandle, crystallite_dPdF0,'convergeddPdF') + call HDF5_write(fileHandle,material_phase, 'recordedPhase') + call HDF5_write(fileHandle,crystallite_F0, 'convergedF') + call HDF5_write(fileHandle,crystallite_Fp0, 'convergedFp') + call HDF5_write(fileHandle,crystallite_Fi0, 'convergedFi') + call HDF5_write(fileHandle,crystallite_Lp0, 'convergedLp') + call HDF5_write(fileHandle,crystallite_Li0, 'convergedLi') + call HDF5_write(fileHandle,crystallite_dPdF0, 'convergeddPdF') call HDF5_write(fileHandle,crystallite_Tstar0_v,'convergedTstar') groupPlastic = HDF5_addGroup(fileHandle,'PlasticPhases') @@ -317,7 +317,7 @@ end subroutine CPFEM_age !-------------------------------------------------------------------------------------------------- !> @brief triggers writing of the results !-------------------------------------------------------------------------------------------------- -subroutine CPFEM_results(inc) +subroutine CPFEM_results(inc,time) use prec, only: & pInt use results @@ -327,13 +327,12 @@ subroutine CPFEM_results(inc) implicit none integer(pInt), intent(in) :: inc - character(len=16) :: incChar + real(pReal), intent(in) :: time call results_openJobFile - write(incChar,*) inc - call HDF5_closeGroup(results_addGroup(trim('inc'//trim(adjustl(incChar))))) - call results_setLink(trim('inc'//trim(adjustl(incChar))),'current') + call results_addIncrement(inc,time) call constitutive_results() + call results_removeLink('current') ! put this into closeJobFile call results_closeJobFile end subroutine CPFEM_results diff --git a/src/DAMASK_spectral.f90 b/src/DAMASK_spectral.f90 index 781598f3d..fca67c97d 100644 --- a/src/DAMASK_spectral.f90 +++ b/src/DAMASK_spectral.f90 @@ -161,7 +161,6 @@ program DAMASK_spectral call results_openJobFile() - call results_addIncrement() call results_closeJobFile() !-------------------------------------------------------------------------------------------------- ! initialize field solver information @@ -426,6 +425,7 @@ program DAMASK_spectral writeUndeformed: if (interface_restartInc < 1_pInt) then write(6,'(1/,a)') ' ... writing initial configuration to file ........................' + call CPFEM_results(0_pInt,0.0_pReal) do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & ! QUESTION: why not starting i at 0 instead of murky 1? min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) @@ -602,7 +602,7 @@ program DAMASK_spectral if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write') enddo fileOffset = fileOffset + sum(outputSize) ! forward to current file position - call CPFEM_results(totalIncsCounter) + call CPFEM_results(totalIncsCounter,time) endif if ( loadCases(currentLoadCase)%restartFrequency > 0_pInt & ! writing of restart info requested ... .and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! ... and at frequency of writing restart information diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index c24df5bf8..c04694265 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -35,7 +35,7 @@ module HDF5_utilities module procedure HDF5_read_pInt4 module procedure HDF5_read_pInt5 module procedure HDF5_read_pInt6 - module procedure HDF5_read_pInt7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK + module procedure HDF5_read_pInt7 end interface HDF5_read @@ -57,25 +57,26 @@ module HDF5_utilities module procedure HDF5_write_pInt4 module procedure HDF5_write_pInt5 module procedure HDF5_write_pInt6 - module procedure HDF5_write_pInt7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK + module procedure HDF5_write_pInt7 end interface HDF5_write + !-------------------------------------------------------------------------------------------------- !> @brief attached attributes of type char,pInt or pReal to a file/dataset/group !-------------------------------------------------------------------------------------------------- - interface HDF5_attributes - module procedure HDF5_addStringAttribute - module procedure HDF5_addIntegerAttribute - module procedure HDF5_addRealAttribute - end interface HDF5_attributes + interface HDF5_addAttribute + module procedure HDF5_addAttribute_str + module procedure HDF5_addAttribute_pInt + module procedure HDF5_addAttribute_pReal + end interface HDF5_addAttribute + + !-------------------------------------------------------------------------------------------------- public :: & HDF5_utilities_init, & HDF5_openFile, & HDF5_closeFile, & - HDF5_addStringAttribute, & - HDF5_addIntegerAttribute, & - HDF5_addRealAttribute, & + HDF5_addAttribute, & HDF5_closeGroup ,& HDF5_openGroup, & HDF5_addGroup, & @@ -253,118 +254,165 @@ end subroutine HDF5_closeGroup !-------------------------------------------------------------------------------------------------- -!> @brief adds a StringAttribute to the results file +!> @brief adds a string attribute to the path given relative to the location !-------------------------------------------------------------------------------------------------- -subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue) +subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path) implicit none - integer(HID_T), intent(in) :: entity + integer(HID_T), intent(in) :: loc_id character(len=*), intent(in) :: attrLabel, attrValue + character(len=*), intent(in), optional :: path integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: attr_id, space_id, type_id + logical :: attrExists + character(len=256) :: p + + if (present(path)) then + p = trim(path) + else + p = '.' + endif call h5screate_f(H5S_SCALAR_F,space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5screate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5screate_f') call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5tcopy_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5tcopy_f') call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5tset_size_f') - call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5acreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5tset_size_f') + call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5aexists_by_name_f') + if (attrExists) then + call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5adelete_by_name_f') + endif + call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5acreate_f') call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5awrite_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5awrite_f') call h5aclose_f(attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5aclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5aclose_f') call h5tclose_f(type_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5tclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5tclose_f') call h5sclose_f(space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addStringAttribute: h5sclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5sclose_f') -end subroutine HDF5_addStringAttribute +end subroutine HDF5_addAttribute_str !-------------------------------------------------------------------------------------------------- -!> @brief adds a IntegerAttribute to the results file +!> @brief adds a integer attribute to the path given relative to the location !-------------------------------------------------------------------------------------------------- -subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) +subroutine HDF5_addAttribute_pInt(loc_id,attrLabel,attrValue,path) implicit none - integer(HID_T), intent(in) :: entity + integer(HID_T), intent(in) :: loc_id character(len=*), intent(in) :: attrLabel integer(pInt), intent(in) :: attrValue + character(len=*), intent(in), optional :: path integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: attr_id, space_id, type_id + logical :: attrExists + character(len=256) :: p + + if (present(path)) then + p = trim(path) + else + p = '.' + endif call h5screate_f(H5S_SCALAR_F,space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5screate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5screate_f') call h5tcopy_f(H5T_NATIVE_INTEGER, type_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tcopy_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5tcopy_f') call h5tset_size_f(type_id, 1_HSIZE_T, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tset_size_f') - call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5acreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5tset_size_f') + call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5aexists_by_name_f') + if (attrExists) then + call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5adelete_by_name_f') + endif + call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5acreate_f') call h5awrite_f(attr_id, type_id, attrValue, int([1],HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5awrite_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5awrite_f') call h5aclose_f(attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5aclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5aclose_f') call h5tclose_f(type_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5tclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5tclose_f') call h5sclose_f(space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addIntegerAttribute: h5sclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5sclose_f') + +end subroutine HDF5_addAttribute_pInt -end subroutine HDF5_addIntegerAttribute !-------------------------------------------------------------------------------------------------- -!> @brief adds a Real number Attribute to the results file +!> @brief adds a integer attribute to the path given relative to the location !-------------------------------------------------------------------------------------------------- -subroutine HDF5_addRealAttribute(entity,attrLabel,attrValue) +subroutine HDF5_addAttribute_pReal(loc_id,attrLabel,attrValue,path) implicit none - integer(HID_T), intent(in) :: entity + integer(HID_T), intent(in) :: loc_id character(len=*), intent(in) :: attrLabel - real(pReal), intent(in) :: attrValue + real(pReal), intent(in) :: attrValue + character(len=*), intent(in), optional :: path integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: attr_id, space_id, type_id + logical :: attrExists + character(len=256) :: p + + if (present(path)) then + p = trim(path) + else + p = '.' + endif call h5screate_f(H5S_SCALAR_F,space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5screate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5screate_f') call h5tcopy_f(H5T_NATIVE_DOUBLE, type_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5tcopy_f') - call h5tset_size_f(type_id, 8_HSIZE_T, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5tset_size_f') - call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5acreate_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5tcopy_f') + call h5tset_size_f(type_id, 8_HSIZE_T, hdferr) ! ToDo + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5tset_size_f') + call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5aexists_by_name_f') + if (attrExists) then + call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5adelete_by_name_f') + endif + call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5acreate_f') call h5awrite_f(attr_id, type_id, attrValue, int([1],HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5awrite_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5awrite_f') call h5aclose_f(attr_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5aclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5aclose_f') call h5tclose_f(type_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5tclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5tclose_f') call h5sclose_f(space_id,hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addRealAttribute: h5sclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5sclose_f') + +end subroutine HDF5_addAttribute_pReal -end subroutine HDF5_addRealAttribute !-------------------------------------------------------------------------------------------------- !> @brief set link to object in results file !-------------------------------------------------------------------------------------------------- -subroutine HDF5_setLink(fileHandle,path,link) +subroutine HDF5_setLink(loc_id,target_name,link_name) use hdf5 implicit none - character(len=*), intent(in) :: path, link - integer(HID_T), intent(in) :: fileHandle + character(len=*), intent(in) :: target_name, link_name + integer(HID_T), intent(in) :: loc_id integer(HDF5_ERR_TYPE) :: hdferr logical :: linkExists - call h5lexists_f(fileHandle, link,linkExists, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link)//')') + call h5lexists_f(loc_id, link_name,linkExists, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link_name)//')') if (linkExists) then - call h5ldelete_f(fileHandle,link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link)//')') + call h5ldelete_f(loc_id,link_name, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link_name)//')') endif - call h5lcreate_soft_f(path, fileHandle, link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(path)//' '//trim(link)//')') + call h5lcreate_soft_f(target_name, loc_id, link_name, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(target_name)//' '//trim(link_name)//')') end subroutine HDF5_setLink diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index 677d2872c..719292ac5 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -757,7 +757,7 @@ subroutine plastic_phenopowerlaw_results(instance,group) integer(pInt), intent(in) :: instance character(len=*) :: group integer(pInt) :: o - + associate(prm => param(instance), stt => state(instance)) outputsLoop: do o = 1_pInt,size(prm%outputID) select case(prm%outputID(o)) @@ -768,7 +768,6 @@ subroutine plastic_phenopowerlaw_results(instance,group) end select enddo outputsLoop end associate - !results_writeVectorDataset #else integer(pInt), intent(in) :: instance character(len=*) :: group diff --git a/src/results.f90 b/src/results.f90 index e8c5477f0..b1329a477 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -24,18 +24,11 @@ module results results_openJobFile, & results_closeJobFile, & results_addIncrement, & - HDF5_mappingPhase, & - HDF5_mappingHomog, & - HDF5_mappingCrystallite, & - HDF5_backwardMappingPhase, & - HDF5_backwardMappingHomog, & - HDF5_backwardMappingCrystallite, & - HDF5_mappingCells, & results_addGroup, & results_openGroup, & results_writeVectorDataset, & results_setLink, & - HDF5_removeLink + results_removeLink contains subroutine results_init @@ -62,7 +55,9 @@ subroutine results_openJobFile() implicit none resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','a',.true.) - + call HDF5_addAttribute(resultsFile,'DADF5version',0.1_pReal) + call HDF5_addAttribute(resultsFile,'DAMASKversion',DAMASKVERSION) + end subroutine results_openJobFile @@ -80,10 +75,16 @@ end subroutine results_closeJobFile !-------------------------------------------------------------------------------------------------- !> @brief closes the results file !-------------------------------------------------------------------------------------------------- -subroutine results_addIncrement() +subroutine results_addIncrement(inc,time) + implicit none + integer(pInt), intent(in) :: inc + real(pReal), intent(in) :: time + character(len=pStringLen) :: incChar - call HDF5_addIntegerAttribute(resultsFile,'test',1) + call HDF5_closeGroup(results_addGroup(trim('inc'//trim(adjustl(incChar))))) + call results_setLink(trim('inc'//trim(adjustl(incChar))),'current') + call HDF5_addAttribute(resultsFile,'time/s',time,trim('inc'//trim(adjustl(incChar)))) end subroutine results_addIncrement @@ -112,6 +113,7 @@ integer(HID_T) function results_addGroup(groupName) end function results_addGroup + !-------------------------------------------------------------------------------------------------- !> @brief set link to object in results file !-------------------------------------------------------------------------------------------------- @@ -126,10 +128,11 @@ subroutine results_setLink(path,link) end subroutine results_setLink + !-------------------------------------------------------------------------------------------------- !> @brief remove link to an object !-------------------------------------------------------------------------------------------------- -subroutine HDF5_removeLink(link) +subroutine results_removeLink(link) use hdf5 implicit none @@ -137,9 +140,27 @@ subroutine HDF5_removeLink(link) integer :: hdferr call h5ldelete_f(resultsFile,link, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_removeLink: h5ldelete_soft_f ('//trim(link)//')') + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'results_removeLink: h5ldelete_soft_f ('//trim(link)//')') -end subroutine HDF5_removeLink +end subroutine results_removeLink + + +!-------------------------------------------------------------------------------------------------- +!> @brief stores a vector dataset in a group +!-------------------------------------------------------------------------------------------------- +subroutine results_writeVectorDataset(group,dataset,label,SIunit) + + implicit none + character(len=*), intent(in) :: SIunit,label,group + real(pReal), intent(inout), dimension(:,:) :: dataset + integer(HID_T) :: groupHandle + + groupHandle = results_openGroup(group) + call HDF5_write(groupHandle,dataset,label) + call HDF5_addAttribute(groupHandle,'Unit',SIunit,label) + call HDF5_closeGroup(groupHandle) + +end subroutine results_writeVectorDataset !-------------------------------------------------------------------------------------------------- @@ -943,21 +964,5 @@ subroutine HDF5_mappingCells(mapping) end subroutine HDF5_mappingCells -!-------------------------------------------------------------------------------------------------- -!> @brief creates a new vector dataset in the given group location -!-------------------------------------------------------------------------------------------------- -subroutine results_writeVectorDataset(group,dataset,label,SIunit) - - implicit none - character(len=*), intent(in) :: SIunit,label,group - real(pReal), intent(inout), dimension(:,:) :: dataset - integer(HID_T) :: groupHandle - - groupHandle = results_openGroup(group) - call HDF5_write(groupHandle,dataset,label) - call HDF5_closeGroup(groupHandle) - -end subroutine results_writeVectorDataset - end module results From d00154299bd584859aff1fe948e175c95b38be7a Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 17 Dec 2018 20:13:45 +0100 Subject: [PATCH 65/89] missing information on increment cause HDF5 error mistook write() statement with a left-over debug message --- src/results.f90 | 1 + 1 file changed, 1 insertion(+) diff --git a/src/results.f90 b/src/results.f90 index b1329a477..ccb3ec13c 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -82,6 +82,7 @@ subroutine results_addIncrement(inc,time) real(pReal), intent(in) :: time character(len=pStringLen) :: incChar + write(incChar,*) inc call HDF5_closeGroup(results_addGroup(trim('inc'//trim(adjustl(incChar))))) call results_setLink(trim('inc'//trim(adjustl(incChar))),'current') call HDF5_addAttribute(resultsFile,'time/s',time,trim('inc'//trim(adjustl(incChar)))) From 27322847a48c21d9ade9fc83642b0bc3bf52d53e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 17 Dec 2018 21:30:10 +0100 Subject: [PATCH 66/89] reporting command line call --- src/results.f90 | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/results.f90 b/src/results.f90 index ccb3ec13c..718a5dbd9 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -52,11 +52,15 @@ end subroutine results_init subroutine results_openJobFile() use DAMASK_interface, only: & getSolverJobName + implicit none + character(len=pStringLen) :: commandLine resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','a',.true.) - call HDF5_addAttribute(resultsFile,'DADF5version',0.1_pReal) - call HDF5_addAttribute(resultsFile,'DAMASKversion',DAMASKVERSION) + call HDF5_addAttribute(resultsFile,'DADF5',0.1_pReal) + call HDF5_addAttribute(resultsFile,'DAMASK',DAMASKVERSION) + call get_command(commandLine) + call HDF5_addAttribute(resultsFile,'call',trim(commandLine)) end subroutine results_openJobFile From 63c417fbe0d960f4af3590bf69f0de5b6100978c Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 18 Dec 2018 06:34:18 +0100 Subject: [PATCH 67/89] failed if dataset does not exists empty datasets are not written out --- src/HDF5_utilities.f90 | 39 ++++++++++++++++++++++++++++++++++----- src/results.f90 | 3 ++- 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index c04694265..2a05f101c 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -82,7 +82,8 @@ module HDF5_utilities HDF5_addGroup, & HDF5_read, & HDF5_write, & - HDF5_setLink + HDF5_setLink, & + HDF5_objectExists contains subroutine HDF5_utilities_init @@ -241,18 +242,46 @@ end function HDF5_openGroup !-------------------------------------------------------------------------------------------------- !> @brief close a group !-------------------------------------------------------------------------------------------------- -subroutine HDF5_closeGroup(ID) +subroutine HDF5_closeGroup(group_id) implicit none - integer(HID_T), intent(in) :: ID + integer(HID_T), intent(in) :: group_id integer(HDF5_ERR_TYPE) :: hdferr - call h5gclose_f(ID, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(ID,pInt)) + call h5gclose_f(group_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(group_id,pInt)) end subroutine HDF5_closeGroup +!-------------------------------------------------------------------------------------------------- +!> @brief check whether a group or a dataset exists +!-------------------------------------------------------------------------------------------------- +logical function HDF5_objectExists(loc_id,path) + + implicit none + integer(HID_T), intent(in) :: loc_id + character(len=*), intent(in), optional :: path + integer(HDF5_ERR_TYPE) :: hdferr + character(len=256) :: p + + if (present(path)) then + p = trim(path) + else + p = '.' + endif + + call h5lexists_f(loc_id, p, HDF5_objectExists, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f') + + if(HDF5_objectExists) then + call h5oexists_by_name_f(loc_id, p, HDF5_objectExists, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f') + endif + +end function HDF5_objectExists + + !-------------------------------------------------------------------------------------------------- !> @brief adds a string attribute to the path given relative to the location !-------------------------------------------------------------------------------------------------- diff --git a/src/results.f90 b/src/results.f90 index 718a5dbd9..d38178993 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -162,7 +162,8 @@ subroutine results_writeVectorDataset(group,dataset,label,SIunit) groupHandle = results_openGroup(group) call HDF5_write(groupHandle,dataset,label) - call HDF5_addAttribute(groupHandle,'Unit',SIunit,label) + if (HDF5_objectExists(groupHandle,label)) & + call HDF5_addAttribute(groupHandle,'Unit',SIunit,label) call HDF5_closeGroup(groupHandle) end subroutine results_writeVectorDataset From 0e4dede6062ca314443285c4718ea733c31f84bb Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 18 Dec 2018 09:52:13 +0100 Subject: [PATCH 68/89] slowly approaching style of phenopowerlaw --- src/plastic_kinematichardening.f90 | 36 ++++++++++++++---------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 53cd2b08e..a06ccfeef 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -491,8 +491,7 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & - tau_pos,tau_neg - real(pReal) :: & + tau_pos,tau_neg, & dgdot_dtau_pos,dgdot_dtau_neg associate(prm => paramNew(instance), stt => state(instance)) @@ -501,25 +500,24 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & Mp,instance,of) + where (dNeq0(gdot_pos)) + dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos + else where + dgdot_dtau_pos = 0.0_pReal + end where + +where (dNeq0(gdot_neg)) + dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg +else where + dgdot_dtau_neg = 0.0_pReal +end where do j = 1_pInt, prm%totalNslip - - Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) - - if (dNeq0(gdot_pos(j))) then - dgdot_dtau_pos = gdot_pos(j)*prm%n_slip/tau_pos(j) - forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & - dLp_dMp(k,l,m,n) = & - dLp_dMp(k,l,m,n) + dgdot_dtau_pos*prm%Schmid_slip(k,l,j)*prm%nonSchmid_pos(m,n,j) - endif - - if (dNeq0(gdot_neg(j))) then - dgdot_dtau_neg = gdot_neg(j)*prm%n_slip/tau_neg(j) - forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & - dLp_dMp(k,l,m,n) = & - dLp_dMp(k,l,m,n) + dgdot_dtau_neg*prm%Schmid_slip(k,l,j)*prm%nonSchmid_neg(m,n,j) - endif - + Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) + forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & + dLp_dMp(k,l,m,n) = dLp_dMp(k,l,m,n) & + + dgdot_dtau_pos(j)*prm%Schmid_slip(k,l,j)*prm%nonSchmid_pos(m,n,j) & + + dgdot_dtau_neg(j)*prm%Schmid_slip(k,l,j)*prm%nonSchmid_neg(m,n,j) enddo end associate From 89196b953ff4437eda32f359fb2e86f6d4da2923 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 19 Dec 2018 06:57:28 +0100 Subject: [PATCH 69/89] simplified --- src/plastic_kinematichardening.f90 | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index a06ccfeef..ce2ede265 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -453,14 +453,8 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & tau_pos = tau_pos - stt%crss_back(:,of) tau_neg = tau_neg - stt%crss_back(:,of) - gdot_pos = 0.5_pReal * prm%gdot0 * & - (abs(tau_pos)/ & - state(instance)%crss(:,of))**prm%n_slip & - *sign(1.0_pReal,tau_pos) - gdot_neg = 0.5_pReal * prm%gdot0 * & - (abs(tau_neg)/ & - state(instance)%crss(:,of))**prm%n_slip & - *sign(1.0_pReal,tau_neg) + gdot_pos = sign(0.5_pReal * prm%gdot0 *(abs(tau_pos)/ state(instance)%crss(:,of))**prm%n_slip,tau_pos) + gdot_neg = sign(0.5_pReal * prm%gdot0 *(abs(tau_neg)/ state(instance)%crss(:,of))**prm%n_slip,tau_neg) end associate end subroutine plastic_kinehardening_shearRates From 2476dd4d8b3ae4625d33713cac51dbe577bc972d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Wed, 19 Dec 2018 07:30:43 +0100 Subject: [PATCH 70/89] shearRates more similar to kinetics --- src/plastic_kinematichardening.f90 | 38 ++++++++++++++++++------------ 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index ce2ede265..98ef866ac 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -425,8 +425,9 @@ end subroutine plastic_kinehardening_init !> @brief calculation of shear rates (\dot \gamma) !-------------------------------------------------------------------------------------------------- subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of) - + Mp,instance,of, dgdot_dtau_pos, & + dgdot_dtau_neg) + use prec use math implicit none @@ -440,6 +441,9 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & gdot_neg, & !< shear rates from negative line segments tau_pos, & !< shear stress on positive line segments tau_neg !< shear stress on negative line segments + real(pReal), dimension(paramNew(instance)%totalNslip), intent(out),optional :: & + dgdot_dtau_pos, & + dgdot_dtau_neg integer(pInt) :: & i @@ -455,7 +459,22 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & gdot_pos = sign(0.5_pReal * prm%gdot0 *(abs(tau_pos)/ state(instance)%crss(:,of))**prm%n_slip,tau_pos) gdot_neg = sign(0.5_pReal * prm%gdot0 *(abs(tau_neg)/ state(instance)%crss(:,of))**prm%n_slip,tau_neg) - + + if (present(dgdot_dtau_pos)) then + where(dNeq0(gdot_pos)) + dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos + else where + dgdot_dtau_pos = 0.0_pReal + end where + endif + if (present(dgdot_dtau_neg)) then + where(dNeq0(gdot_neg)) + dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg + else where + dgdot_dtau_neg = 0.0_pReal + end where + endif + end associate end subroutine plastic_kinehardening_shearRates @@ -493,18 +512,7 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) dLp_dMp = 0.0_pReal call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of) - where (dNeq0(gdot_pos)) - dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos - else where - dgdot_dtau_pos = 0.0_pReal - end where - -where (dNeq0(gdot_neg)) - dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg -else where - dgdot_dtau_neg = 0.0_pReal -end where + Mp,instance,of,dgdot_dtau_pos,dgdot_dtau_neg) do j = 1_pInt, prm%totalNslip Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) From 9094bb9a646e0560edd33314adf85baf75a85a45 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 21 Dec 2018 22:41:39 +0100 Subject: [PATCH 71/89] private functions at the end --- src/plastic_kinematichardening.f90 | 123 +++++++++++++++-------------- 1 file changed, 63 insertions(+), 60 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 98ef866ac..03713d300 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -21,8 +21,6 @@ module plastic_kinehardening integer(pInt), dimension(:), allocatable, target, public :: & plastic_kinehardening_Noutput !< number of outputs per instance - integer(pInt), dimension(:), allocatable, public, protected :: & - plastic_kinehardening_totalNslip !< no. of slip system used in simulation integer(pInt), dimension(:,:), allocatable, private :: & @@ -193,13 +191,14 @@ subroutine plastic_kinehardening_init plastic_kinehardening_output = '' allocate(plastic_kinehardening_Noutput(maxNinstance), source=0_pInt) allocate(plastic_kinehardening_Nslip(lattice_maxNslipFamily,maxNinstance), source=0_pInt) - allocate(plastic_kinehardening_totalNslip(maxNinstance), source=0_pInt) + allocate(param(maxNinstance)) ! one container of parameters per instance allocate(paramNew(maxNinstance)) allocate(state(maxNinstance)) allocate(state0(maxNinstance)) allocate(dotState(maxNinstance)) allocate(deltaState(maxNinstance)) + do p = 1_pInt, size(phase_plasticityInstance) if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle instance = phase_plasticityInstance(p) ! which instance of my phase @@ -421,63 +420,6 @@ param(instance)%outputID = prm%outputID end subroutine plastic_kinehardening_init -!-------------------------------------------------------------------------------------------------- -!> @brief calculation of shear rates (\dot \gamma) -!-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of, dgdot_dtau_pos, & - dgdot_dtau_neg) - use prec - use math - - implicit none - real(pReal), dimension(3,3), intent(in) :: & - Mp - integer(pInt), intent(in) :: & - instance, & !< instance of that phase - of !< index of phaseMember - real(pReal), dimension(paramNew(instance)%totalNslip), intent(out) :: & - gdot_pos, & !< shear rates from positive line segments - gdot_neg, & !< shear rates from negative line segments - tau_pos, & !< shear stress on positive line segments - tau_neg !< shear stress on negative line segments - real(pReal), dimension(paramNew(instance)%totalNslip), intent(out),optional :: & - dgdot_dtau_pos, & - dgdot_dtau_neg - - integer(pInt) :: & - i - - associate(prm => paramNew(instance), stt => state(instance)) - do i = 1_pInt, prm%totalNslip - tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) - tau_neg(i) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)) - enddo - - tau_pos = tau_pos - stt%crss_back(:,of) - tau_neg = tau_neg - stt%crss_back(:,of) - - gdot_pos = sign(0.5_pReal * prm%gdot0 *(abs(tau_pos)/ state(instance)%crss(:,of))**prm%n_slip,tau_pos) - gdot_neg = sign(0.5_pReal * prm%gdot0 *(abs(tau_neg)/ state(instance)%crss(:,of))**prm%n_slip,tau_neg) - - if (present(dgdot_dtau_pos)) then - where(dNeq0(gdot_pos)) - dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos - else where - dgdot_dtau_pos = 0.0_pReal - end where - endif - if (present(dgdot_dtau_neg)) then - where(dNeq0(gdot_neg)) - dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg - else where - dgdot_dtau_neg = 0.0_pReal - end where - endif - -end associate -end subroutine plastic_kinehardening_shearRates - !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent @@ -525,6 +467,7 @@ end associate end subroutine plastic_kinehardening_LpAndItsTangent + !-------------------------------------------------------------------------------------------------- !> @brief calculates (instantaneous) incremental change of microstructure !-------------------------------------------------------------------------------------------------- @@ -635,6 +578,7 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) end subroutine plastic_kinehardening_dotState + !-------------------------------------------------------------------------------------------------- !> @brief return array of constitutive results !-------------------------------------------------------------------------------------------------- @@ -782,4 +726,63 @@ pure subroutine kinetics(prm,stt,of,Mp,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dt end subroutine kinetics + +!-------------------------------------------------------------------------------------------------- +!> @brief calculation of shear rates (\dot \gamma) +!-------------------------------------------------------------------------------------------------- +subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & + Mp,instance,of, dgdot_dtau_pos, & + dgdot_dtau_neg) + use prec + use math + + implicit none + real(pReal), dimension(3,3), intent(in) :: & + Mp + integer(pInt), intent(in) :: & + instance, & !< instance of that phase + of !< index of phaseMember + real(pReal), dimension(paramNew(instance)%totalNslip), intent(out) :: & + gdot_pos, & !< shear rates from positive line segments + gdot_neg, & !< shear rates from negative line segments + tau_pos, & !< shear stress on positive line segments + tau_neg !< shear stress on negative line segments + real(pReal), dimension(paramNew(instance)%totalNslip), intent(out),optional :: & + dgdot_dtau_pos, & + dgdot_dtau_neg + + integer(pInt) :: & + i + + associate(prm => paramNew(instance), stt => state(instance)) + do i = 1_pInt, prm%totalNslip + tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) + tau_neg(i) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)) + enddo + + tau_pos = tau_pos - stt%crss_back(:,of) + tau_neg = tau_neg - stt%crss_back(:,of) + + gdot_pos = sign(0.5_pReal * prm%gdot0 *(abs(tau_pos)/ state(instance)%crss(:,of))**prm%n_slip,tau_pos) + gdot_neg = sign(0.5_pReal * prm%gdot0 *(abs(tau_neg)/ state(instance)%crss(:,of))**prm%n_slip,tau_neg) + + if (present(dgdot_dtau_pos)) then + where(dNeq0(gdot_pos)) + dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos + else where + dgdot_dtau_pos = 0.0_pReal + end where + endif + if (present(dgdot_dtau_neg)) then + where(dNeq0(gdot_neg)) + dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg + else where + dgdot_dtau_neg = 0.0_pReal + end where + endif + +end associate + +end subroutine plastic_kinehardening_shearRates + end module plastic_kinehardening From b3d14b00b655cc201545f5978cf4417690bb782d Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 21 Dec 2018 23:07:31 +0100 Subject: [PATCH 72/89] cleaning --- src/plastic_kinematichardening.f90 | 142 ++++++++++++----------------- 1 file changed, 60 insertions(+), 82 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 03713d300..edd0d3a8e 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -47,8 +47,6 @@ module plastic_kinehardening n_slip, & !< stress exponent for slip (input parameter) aTolResistance, & aTolShear - - real(pReal), dimension(:), allocatable, private :: & crss0, & !< initial critical shear stress for slip (input parameter, per family) theta0, & !< initial hardening rate of forward stress for each slip @@ -58,16 +56,13 @@ module plastic_kinehardening tau1, & tau1_b, & nonSchmidCoeff - real(pReal), dimension(:,:), allocatable, private :: & + real(pReal), dimension(:,:), allocatable, private :: & interaction_slipslip !< latent hardening matrix real(pReal), allocatable, dimension(:,:,:) :: & Schmid_slip, & Schmid_twin, & nonSchmid_pos, & nonSchmid_neg - - real(pReal), dimension(:,:), allocatable, private :: & - hardeningMatrix_SlipSlip integer(pInt) :: & totalNslip !< total number of active slip system integer(pInt), allocatable, dimension(:) :: & @@ -151,7 +146,7 @@ subroutine plastic_kinehardening_init o, i, p, & phase, & instance, & - maxNinstance, & + Ninstance, & NipcMyPhase, & outputSize, & offset_slip, & @@ -179,25 +174,25 @@ subroutine plastic_kinehardening_init write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" - maxNinstance = int(count(phase_plasticity == PLASTICITY_KINEHARDENING_ID),pInt) - if (maxNinstance == 0_pInt) return + Ninstance = int(count(phase_plasticity == PLASTICITY_KINEHARDENING_ID),pInt) + if (Ninstance == 0_pInt) return if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0_pInt) & - write(6,'(a,1x,i5,/)') '# instances:',maxNinstance + write(6,'(a,1x,i5,/)') '# instances:',Ninstance - allocate(plastic_kinehardening_sizePostResult(maxval(phase_Noutput),maxNinstance), & + allocate(plastic_kinehardening_sizePostResult(maxval(phase_Noutput),Ninstance), & source=0_pInt) - allocate(plastic_kinehardening_output(maxval(phase_Noutput),maxNinstance)) + allocate(plastic_kinehardening_output(maxval(phase_Noutput),Ninstance)) plastic_kinehardening_output = '' - allocate(plastic_kinehardening_Noutput(maxNinstance), source=0_pInt) - allocate(plastic_kinehardening_Nslip(lattice_maxNslipFamily,maxNinstance), source=0_pInt) + allocate(plastic_kinehardening_Noutput(Ninstance), source=0_pInt) + allocate(plastic_kinehardening_Nslip(lattice_maxNslipFamily,Ninstance), source=0_pInt) - allocate(param(maxNinstance)) ! one container of parameters per instance - allocate(paramNew(maxNinstance)) - allocate(state(maxNinstance)) - allocate(state0(maxNinstance)) - allocate(dotState(maxNinstance)) - allocate(deltaState(maxNinstance)) + allocate(param(Ninstance)) ! one container of parameters per instance + allocate(paramNew(Ninstance)) + allocate(state(Ninstance)) + allocate(state0(Ninstance)) + allocate(dotState(Ninstance)) + allocate(deltaState(Ninstance)) do p = 1_pInt, size(phase_plasticityInstance) if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle @@ -303,73 +298,59 @@ subroutine plastic_kinehardening_init endif end do -param(instance)%outputID = prm%outputID + param(instance)%outputID = prm%outputID nslip = prm%totalNslip !-------------------------------------------------------------------------------------------------- ! allocate state arrays - sizeDotState = nSlip & !< crss - + nSlip & !< crss_back - + nSlip !< accumulated (absolute) shear - - sizeDeltaState = nSlip & !< sense of acting shear stress (-1 or +1) - + nSlip & !< backstress at last switch of stress sense - + nSlip !< accumulated shear at last switch of stress sense + NipcMyPhase = count(material_phase == p) ! number of constituents with my phase + sizeDotState = size(['crss ','crss_back', 'accshear ']) * prm%TotalNslip + sizeDeltaState = size(['sense ', 'chi0 ', 'gamma0']) * prm%TotalNslip + sizeState = sizeDotState + sizeDeltaState - sizeState = sizeDotState + sizeDeltaState - NipcMyPhase = count(material_phase == p) ! number of IPCs containing my phase - call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & - nSlip,0_pInt,0_pInt) - plasticState(p)%sizePostResults = sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(p))) - plasticState(p)%offsetDeltaState = sizeDotState + call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & + nSlip,0_pInt,0_pInt) + + plasticState(p)%sizePostResults = sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(p))) + plasticState(p)%offsetDeltaState = sizeDotState - endindex = 0_pInt - o = endIndex ! offset of dotstate index relative to state index + startIndex = 1_pInt + endIndex = nSlip + stt%crss => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) + dot%crss => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) + plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance - startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip - stt%crss => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) - dot%crss => plasticState(p)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) - plasticState(p)%aTolState(startIndex-o:endIndex-o) = prm%aTolResistance - -! ............................................. startIndex = endIndex + 1_pInt endIndex = endIndex + nSlip - stt%crss_back => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) - dot%crss_back => plasticState(p)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) - plasticState(p)%aTolState(startIndex-o:endIndex-o) = prm%aTolResistance + stt%crss_back => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) + dot%crss_back => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) + plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance -! ............................................. startIndex = endIndex + 1_pInt endIndex = endIndex + nSlip - stt%accshear => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) - dot%accshear => plasticState(p)%dotState (startIndex-o:endIndex-o,1:NipcMyPhase) - plasticState(p)%aTolState(startIndex-o:endIndex-o) = prm%aTolShear + stt%accshear => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) + dot%accshear => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) + plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolShear !---------------------------------------------------------------------------------------------- !locally define deltaState alias o = endIndex -! ............................................. startIndex = endIndex + 1_pInt endIndex = endIndex + nSlip stt%sense => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) delta%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) -! ............................................. startIndex = endIndex + 1_pInt endIndex = endIndex + nSlip stt%chi0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) delta%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - -! ............................................. startIndex = endIndex + 1_pInt endIndex = endIndex + nSlip stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) delta%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - end associate end do @@ -558,21 +539,17 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) sumGamma = sum(stt%accshear(:,of)) do j = 1_pInt, prm%totalNslip - dot%crss(j,of) = & - dot_product(prm%interaction_SlipSlip(j,:),dot%accshear(:,of)) * & - ( prm%theta1(j) + (prm%theta0(j) - prm%theta1(j) & - + prm%theta0(j)*prm%theta1(j)*sumGamma/prm%tau1(j)) & - *exp(-sumGamma*prm%theta0(j)/prm%tau1(j)) & ! V term depending on the harding law - ) + dot%crss(j,of) = dot_product(prm%interaction_SlipSlip(j,:),dot%accshear(:,of)) & + * ( prm%theta1(j) + prm%theta0(j) - prm%theta1(j) & + + prm%theta0(j)*prm%theta1(j)*sumGamma/prm%tau1(j)*exp(-sumGamma*prm%theta0(j)/prm%tau1(j)) & + ) enddo - dot%crss_back(:,of) = & - stt%sense(:,of)*dot%accshear(:,of) * & + dot%crss_back(:,of) = stt%sense(:,of)*dot%accshear(:,of) * & ( prm%theta1_b + & - (prm%theta0_b - prm%theta1_b & - + prm%theta0_b*prm%theta1_b/(prm%tau1_b+stt%chi0(:,of))*(stt%accshear(:,of)-stt%gamma0(:,of))& - ) & - *exp(-(stt%accshear(:,of)-stt%gamma0(:,of)) *prm%theta0_b/(prm%tau1_b+stt%chi0(:,of))) & - ) ! V term depending on the harding law for back stress + (prm%theta0_b - prm%theta1_b & + + prm%theta0_b*prm%theta1_b/(prm%tau1_b+stt%chi0(:,of))*(stt%accshear(:,of)-stt%gamma0(:,of))& + ) *exp(-(stt%accshear(:,of)-stt%gamma0(:,of)) *prm%theta0_b/(prm%tau1_b+stt%chi0(:,of))) & + ) end associate @@ -656,34 +633,34 @@ end function plastic_kinehardening_postResults !> @details: Shear rates are calculated only optionally. NOTE: Against the common convention, the !> result (i.e. intent(out)) variables are the last to have the optional arguments at the end !-------------------------------------------------------------------------------------------------- -pure subroutine kinetics(prm,stt,of,Mp,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) +pure subroutine kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) use prec, only: & dNeq0 use math, only: & math_mul33xx33 implicit none - type(tParameters), intent(in) :: & - prm - type(tKinehardeningState), intent(in) :: & - stt - integer(pInt), intent(in) :: & - of - real(pReal), dimension(prm%totalNslip), intent(out) :: & - gdot_pos, & - gdot_neg - real(pReal), dimension(prm%totalNslip), optional, intent(out) :: & - dgdot_dtau_pos, & - dgdot_dtau_neg real(pReal), dimension(3,3), intent(in) :: & Mp + integer(pInt), intent(in) :: & + instance, & + of + real(pReal), dimension(paramNew(instance)%totalNslip), intent(out) :: & + gdot_pos, & + gdot_neg + real(pReal), dimension(paramNew(instance)%totalNslip), optional, intent(out) :: & + dgdot_dtau_pos, & + dgdot_dtau_neg - real(pReal), dimension(prm%totalNslip) :: & + + real(pReal), dimension(paramNew(instance)%totalNslip) :: & tau_pos, & tau_neg integer(pInt) :: i logical :: nonSchmidActive + associate( prm => paramNew(instance), stt => state(instance)) + nonSchmidActive = size(prm%nonSchmidCoeff) > 0_pInt do i = 1_pInt, prm%totalNslip @@ -723,6 +700,7 @@ pure subroutine kinetics(prm,stt,of,Mp,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dt dgdot_dtau_neg = 0.0_pReal end where endif + end associate end subroutine kinetics From 8a27431c6d9e29b8977d786d88cfb350e0637731 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 22 Dec 2018 13:28:16 +0100 Subject: [PATCH 73/89] bugfix bracket falsely removed in last commit --- src/plastic_kinematichardening.f90 | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index edd0d3a8e..8cbf80726 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -540,8 +540,9 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) do j = 1_pInt, prm%totalNslip dot%crss(j,of) = dot_product(prm%interaction_SlipSlip(j,:),dot%accshear(:,of)) & - * ( prm%theta1(j) + prm%theta0(j) - prm%theta1(j) & - + prm%theta0(j)*prm%theta1(j)*sumGamma/prm%tau1(j)*exp(-sumGamma*prm%theta0(j)/prm%tau1(j)) & + * ( prm%theta1(j) & + + (prm%theta0(j) - prm%theta1(j) + prm%theta0(j)*prm%theta1(j)*sumGamma/prm%tau1(j)) & + *exp(-sumGamma*prm%theta0(j)/prm%tau1(j)) & ) enddo dot%crss_back(:,of) = stt%sense(:,of)*dot%accshear(:,of) * & From c97a46826a943f7c7ae0eee09e3fb6ab6047f64f Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 22 Dec 2018 14:11:34 +0100 Subject: [PATCH 74/89] simplified --- src/plastic_kinematichardening.f90 | 95 ++++++++++-------------------- 1 file changed, 32 insertions(+), 63 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 8cbf80726..2a9245140 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -89,8 +89,7 @@ module plastic_kinehardening type(tKinehardeningState), allocatable, dimension(:), private :: & dotState, & deltaState, & - state, & - state0 + state public :: & @@ -144,7 +143,6 @@ subroutine plastic_kinehardening_init output_ID integer(pInt) :: & o, i, p, & - phase, & instance, & Ninstance, & NipcMyPhase, & @@ -190,7 +188,6 @@ subroutine plastic_kinehardening_init allocate(param(Ninstance)) ! one container of parameters per instance allocate(paramNew(Ninstance)) allocate(state(Ninstance)) - allocate(state0(Ninstance)) allocate(dotState(Ninstance)) allocate(deltaState(Ninstance)) @@ -242,7 +239,7 @@ subroutine plastic_kinehardening_init prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) ! expand: family => system - !prm%crss0 = math_expand(prm%crss0, prm%Nslip) + prm%crss0 = math_expand(prm%crss0, prm%Nslip) prm%tau1 = math_expand(prm%tau1,prm%Nslip) prm%tau1_b = math_expand(prm%tau1_b, prm%Nslip) prm%theta0 = math_expand(prm%theta0,prm%Nslip) @@ -304,7 +301,7 @@ subroutine plastic_kinehardening_init ! allocate state arrays NipcMyPhase = count(material_phase == p) ! number of constituents with my phase sizeDotState = size(['crss ','crss_back', 'accshear ']) * prm%TotalNslip - sizeDeltaState = size(['sense ', 'chi0 ', 'gamma0']) * prm%TotalNslip + sizeDeltaState = size(['sense ', 'chi0 ', 'gamma0' ]) * prm%TotalNslip sizeState = sizeDotState + sizeDeltaState call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & @@ -318,6 +315,7 @@ subroutine plastic_kinehardening_init endIndex = nSlip stt%crss => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) dot%crss => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) + stt%crss = spread(prm%crss0, 2, NipcMyPhase) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance startIndex = endIndex + 1_pInt @@ -350,17 +348,19 @@ subroutine plastic_kinehardening_init endIndex = endIndex + nSlip stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) delta%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) + + plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally + + offset_slip = plasticState(p)%nSlip + plasticState(p)%slipRate => & + plasticState(p)%dotState(offset_slip+1:offset_slip+plasticState(p)%nSlip,1:NipcMyPhase) + plasticState(p)%accumulatedSlip => & + plasticState(p)%state(offset_slip+1:offset_slip+plasticState(p)%nSlip,1:NipcMyPhase) + end associate end do - - -!-------------------------------------------------------------------------------------------------- -! allocation of variables whose size depends on the total number of active slip systems - initializeInstances: do phase = 1_pInt, size(phase_plasticity) ! loop through all phases in material.config - myPhase2: if (phase_plasticity(phase) == PLASTICITY_KINEHARDENING_ID) then ! only consider my phase - NipcMyPhase = count(material_phase == phase) ! number of IPCs containing my phase - instance = phase_plasticityInstance(phase) ! which instance of my phase + end subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- ! sanity checks @@ -373,33 +373,7 @@ subroutine plastic_kinehardening_init ! .and. param(instance)%tau1_b(1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' ! if (param(instance)%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' ! if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' - if (extmsg /= '') then - extmsg = trim(extmsg)//' ('//PLASTICITY_KINEHARDENING_label//')' ! prepare error message identifier - call IO_error(211_pInt,ip=instance,ext_msg=extmsg) - endif - - - offset_slip = plasticState(phase)%nSlip - plasticState(phase)%slipRate => & - plasticState(phase)%dotState(offset_slip+1:offset_slip+plasticState(phase)%nSlip,1:NipcMyPhase) - plasticState(phase)%accumulatedSlip => & - plasticState(phase)%state(offset_slip+1:offset_slip+plasticState(phase)%nSlip,1:NipcMyPhase) - - endindex = 0_pInt - o = endIndex ! offset of dotstate index relative to state index - - startIndex = endIndex + 1_pInt - endIndex = endIndex + paramNew(instance)%totalNslip - state0 (instance)%crss => plasticState(phase)%state0 (startIndex :endIndex ,1:NipcMyPhase) - - state0(instance)%crss = spread(math_expand(paramNew(instance)%crss0,& - paramNew(instance)%Nslip), & - 2, NipcMyPhase) - endif myPhase2 - enddo initializeInstances - -end subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- @@ -427,15 +401,13 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & - tau_pos,tau_neg, & dgdot_dtau_pos,dgdot_dtau_neg associate(prm => paramNew(instance), stt => state(instance)) Lp = 0.0_pReal dLp_dMp = 0.0_pReal - call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of,dgdot_dtau_pos,dgdot_dtau_neg) + call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) do j = 1_pInt, prm%totalNslip Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) @@ -466,11 +438,9 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) real(pReal), dimension(paramNew(instance)%totalNslip) :: & gdot_pos,gdot_neg, & - tau_pos,tau_neg, & sense - call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of) + call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg) sense = merge(state(instance)%sense(:,of), & ! keep existing... sign(1.0_pReal,gdot_pos+gdot_neg), & ! ...or have a defined dEq0(gdot_pos+gdot_neg,1e-10_pReal)) ! current sense of shear direction @@ -523,18 +493,14 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) j real(pReal), dimension(paramNew(instance)%totalNslip) :: & - gdot_pos,gdot_neg, & - tau_pos,tau_neg + gdot_pos,gdot_neg real(pReal) :: & sumGamma associate( prm => paramNew(instance), stt => state(instance), dot => dotState(instance)) - - call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of) - + call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg) dot%accshear(:,of) = abs(gdot_pos+gdot_neg) sumGamma = sum(stt%accshear(:,of)) @@ -577,15 +543,16 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) o,c,j real(pReal), dimension(paramNew(instance)%totalNslip) :: & - gdot_pos,gdot_neg, & - tau_pos,tau_neg + gdot_pos,gdot_neg postResults = 0.0_pReal c = 0_pInt - call plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of) + associate( prm => paramNew(instance), stt => state(instance)) + + call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg) + outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) select case(prm%outputID(o)) case (crss_ID) @@ -613,10 +580,12 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) c = c + prm%totalNslip case (shearrate_ID) + postResults(c+1_pInt:c+prm%totalNslip) = gdot_pos+gdot_neg c = c + prm%totalNslip case (resolvedstress_ID) + do j = 1_pInt, prm%totalNslip postResults(c+j) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,j)) enddo @@ -709,9 +678,8 @@ end subroutine kinetics !-------------------------------------------------------------------------------------------------- !> @brief calculation of shear rates (\dot \gamma) !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & - Mp,instance,of, dgdot_dtau_pos, & - dgdot_dtau_neg) +subroutine plastic_kinehardening_shearRates(Mp,instance,of, & + gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) use prec use math @@ -723,13 +691,13 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & of !< index of phaseMember real(pReal), dimension(paramNew(instance)%totalNslip), intent(out) :: & gdot_pos, & !< shear rates from positive line segments - gdot_neg, & !< shear rates from negative line segments - tau_pos, & !< shear stress on positive line segments - tau_neg !< shear stress on negative line segments + gdot_neg !< shear rates from negative line segments real(pReal), dimension(paramNew(instance)%totalNslip), intent(out),optional :: & dgdot_dtau_pos, & dgdot_dtau_neg - + real(pReal), dimension(paramNew(instance)%totalNslip) :: & + tau_pos, & !< shear stress on positive line segments + tau_neg !< shear stress on negative line segments integer(pInt) :: & i @@ -741,6 +709,7 @@ subroutine plastic_kinehardening_shearRates(gdot_pos,gdot_neg,tau_pos,tau_neg, & tau_pos = tau_pos - stt%crss_back(:,of) tau_neg = tau_neg - stt%crss_back(:,of) + gdot_pos = sign(0.5_pReal * prm%gdot0 *(abs(tau_pos)/ state(instance)%crss(:,of))**prm%n_slip,tau_pos) gdot_neg = sign(0.5_pReal * prm%gdot0 *(abs(tau_neg)/ state(instance)%crss(:,of))**prm%n_slip,tau_neg) From b46a5b3135883e18e241d8a16da3a29bd866190e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 22 Dec 2018 14:47:02 +0100 Subject: [PATCH 75/89] save space --- src/plastic_kinematichardening.f90 | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 2a9245140..8188c1480 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -440,6 +440,8 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) gdot_pos,gdot_neg, & sense + associate( prm => paramNew(instance), stt => state(instance), del => deltaState(instance)) + call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg) sense = merge(state(instance)%sense(:,of), & ! keep existing... sign(1.0_pReal,gdot_pos+gdot_neg), & ! ...or have a defined @@ -463,15 +465,17 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) #endif !-------------------------------------------------------------------------------------------------- ! switch in sense of shear? - where(dNeq(sense,state(instance)%sense(:,of),0.1_pReal)) - deltaState(instance)%sense (:,of) = sense - state(instance)%sense(:,of) ! switch sense - deltaState(instance)%chi0 (:,of) = abs(state(instance)%crss_back(:,of)) - state(instance)%chi0(:,of) ! remember current backstress magnitude - deltaState(instance)%gamma0(:,of) = state(instance)%accshear(:,of) - state(instance)%gamma0(:,of) ! remember current accumulated shear + where(dNeq(sense,stt%sense(:,of),0.1_pReal)) + del%sense (:,of) = sense - stt%sense(:,of) ! switch sense + del%chi0 (:,of) = abs(stt%crss_back(:,of)) - stt%chi0(:,of) ! remember current backstress magnitude + del%gamma0(:,of) = stt%accshear(:,of) - stt%gamma0(:,of) ! remember current accumulated shear else where - deltaState(instance)%sense (:,of) = 0.0_pReal ! no change - deltaState(instance)%chi0 (:,of) = 0.0_pReal - deltaState(instance)%gamma0(:,of) = 0.0_pReal + del%sense (:,of) = 0.0_pReal + del%chi0 (:,of) = 0.0_pReal + del%gamma0(:,of) = 0.0_pReal end where + + end associate end subroutine plastic_kinehardening_deltaState From e5ef7edbd2f80b1689525b2341f045837932c3fe Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 22 Dec 2018 15:22:41 +0100 Subject: [PATCH 76/89] kinetics similar to phenopowerlaw --- src/plastic_kinematichardening.f90 | 138 +++++++---------------------- 1 file changed, 32 insertions(+), 106 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 8188c1480..58c8c4529 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -83,8 +83,7 @@ module plastic_kinehardening end type type(tParameters), dimension(:), allocatable, private :: & - param, & !< containers of constitutive parameters (len Ninstance) - paramNew ! temp + param !< containers of constitutive parameters (len Ninstance) type(tKinehardeningState), allocatable, dimension(:), private :: & dotState, & @@ -99,7 +98,7 @@ module plastic_kinehardening plastic_kinehardening_deltaState, & plastic_kinehardening_postResults private :: & - plastic_kinehardening_shearRates + kinetics contains @@ -149,7 +148,6 @@ subroutine plastic_kinehardening_init outputSize, & offset_slip, & startIndex, endIndex, & - nSlip, & sizeDotState, & sizeState, & sizeDeltaState @@ -164,7 +162,6 @@ subroutine plastic_kinehardening_init character(len=65536), dimension(:), allocatable :: & outputs character(len=65536) :: & - tag = '', & extmsg = '', & structure = '' @@ -186,7 +183,6 @@ subroutine plastic_kinehardening_init allocate(plastic_kinehardening_Nslip(lattice_maxNslipFamily,Ninstance), source=0_pInt) allocate(param(Ninstance)) ! one container of parameters per instance - allocate(paramNew(Ninstance)) allocate(state(Ninstance)) allocate(dotState(Ninstance)) allocate(deltaState(Ninstance)) @@ -194,7 +190,7 @@ subroutine plastic_kinehardening_init do p = 1_pInt, size(phase_plasticityInstance) if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle instance = phase_plasticityInstance(p) ! which instance of my phase - associate(prm => paramNew(phase_plasticityInstance(p)), & + associate(prm => param(phase_plasticityInstance(p)), & dot => dotState(phase_plasticityInstance(p)), & delta => deltaState(phase_plasticityInstance(p)), & stt => state(phase_plasticityInstance(p))) @@ -295,8 +291,7 @@ subroutine plastic_kinehardening_init endif end do - param(instance)%outputID = prm%outputID - nslip = prm%totalNslip + !-------------------------------------------------------------------------------------------------- ! allocate state arrays NipcMyPhase = count(material_phase == p) ! number of constituents with my phase @@ -305,27 +300,27 @@ subroutine plastic_kinehardening_init sizeState = sizeDotState + sizeDeltaState call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & - nSlip,0_pInt,0_pInt) + prm%totalNslip,0_pInt,0_pInt) plasticState(p)%sizePostResults = sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(p))) plasticState(p)%offsetDeltaState = sizeDotState startIndex = 1_pInt - endIndex = nSlip + endIndex = prm%totalNslip stt%crss => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) dot%crss => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) stt%crss = spread(prm%crss0, 2, NipcMyPhase) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip + endIndex = endIndex + prm%totalNslip stt%crss_back => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) dot%crss_back => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip + endIndex = endIndex + prm%totalNslip stt%accshear => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) dot%accshear => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolShear @@ -335,17 +330,17 @@ subroutine plastic_kinehardening_init o = endIndex startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip + endIndex = endIndex + prm%totalNslip stt%sense => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) delta%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip + endIndex = endIndex + prm%totalNslip stt%chi0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) delta%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) startIndex = endIndex + 1_pInt - endIndex = endIndex + nSlip + endIndex = endIndex + prm%totalNslip stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) delta%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) @@ -399,15 +394,15 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) j,k,l,m,n - real(pReal), dimension(paramNew(instance)%totalNslip) :: & + real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg, & dgdot_dtau_pos,dgdot_dtau_neg - associate(prm => paramNew(instance), stt => state(instance)) + associate(prm => param(instance), stt => state(instance)) Lp = 0.0_pReal dLp_dMp = 0.0_pReal - call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) + call kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) do j = 1_pInt, prm%totalNslip Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) @@ -436,33 +431,30 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) instance, & of - real(pReal), dimension(paramNew(instance)%totalNslip) :: & + real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg, & sense - associate( prm => paramNew(instance), stt => state(instance), del => deltaState(instance)) + associate( prm => param(instance), stt => state(instance), del => deltaState(instance)) - call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg) + call kinetics(Mp,instance,of,gdot_pos,gdot_neg) sense = merge(state(instance)%sense(:,of), & ! keep existing... sign(1.0_pReal,gdot_pos+gdot_neg), & ! ...or have a defined dEq0(gdot_pos+gdot_neg,1e-10_pReal)) ! current sense of shear direction #ifdef DEBUG -! if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & +! if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & ! ToDo: We need an inverse mapping of ->el, ip, co ! .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & ! .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then ! write(6,'(a)') '======= kinehardening delta state =======' ! endif -#endif - - -#ifdef DEBUG ! if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & ! .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & ! .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then ! write(6,'(i2,1x,f7.4,1x,f7.4)') j,sense(j),state(instance)%sense(j,of) ! endif #endif + !-------------------------------------------------------------------------------------------------- ! switch in sense of shear? where(dNeq(sense,stt%sense(:,of),0.1_pReal)) @@ -480,7 +472,6 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) end subroutine plastic_kinehardening_deltaState - !-------------------------------------------------------------------------------------------------- !> @brief calculates the rate of change of microstructure !-------------------------------------------------------------------------------------------------- @@ -491,20 +482,19 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) Mp !< Mandel stress integer(pInt), intent(in) :: & instance, & - of !< element !< microstructure state + of integer(pInt) :: & j - - real(pReal), dimension(paramNew(instance)%totalNslip) :: & + real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg real(pReal) :: & sumGamma - associate( prm => paramNew(instance), stt => state(instance), dot => dotState(instance)) + associate( prm => param(instance), stt => state(instance), dot => dotState(instance)) - call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg) + call kinetics(Mp,instance,of,gdot_pos,gdot_neg) dot%accshear(:,of) = abs(gdot_pos+gdot_neg) sumGamma = sum(stt%accshear(:,of)) @@ -546,16 +536,16 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) integer(pInt) :: & o,c,j - real(pReal), dimension(paramNew(instance)%totalNslip) :: & + real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg postResults = 0.0_pReal c = 0_pInt - associate( prm => paramNew(instance), stt => state(instance)) + associate( prm => param(instance), stt => state(instance)) - call plastic_kinehardening_shearRates(Mp,instance,of,gdot_pos,gdot_neg) + call kinetics(Mp,instance,of,gdot_pos,gdot_neg) outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) select case(prm%outputID(o)) @@ -584,12 +574,10 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) c = c + prm%totalNslip case (shearrate_ID) - postResults(c+1_pInt:c+prm%totalNslip) = gdot_pos+gdot_neg c = c + prm%totalNslip case (resolvedstress_ID) - do j = 1_pInt, prm%totalNslip postResults(c+j) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,j)) enddo @@ -619,33 +607,30 @@ pure subroutine kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_d integer(pInt), intent(in) :: & instance, & of - real(pReal), dimension(paramNew(instance)%totalNslip), intent(out) :: & + real(pReal), dimension(param(instance)%totalNslip), intent(out) :: & gdot_pos, & gdot_neg - real(pReal), dimension(paramNew(instance)%totalNslip), optional, intent(out) :: & + real(pReal), dimension(param(instance)%totalNslip), optional, intent(out) :: & dgdot_dtau_pos, & dgdot_dtau_neg - real(pReal), dimension(paramNew(instance)%totalNslip) :: & + real(pReal), dimension(param(instance)%totalNslip) :: & tau_pos, & tau_neg integer(pInt) :: i logical :: nonSchmidActive - associate( prm => paramNew(instance), stt => state(instance)) + associate( prm => param(instance), stt => state(instance)) nonSchmidActive = size(prm%nonSchmidCoeff) > 0_pInt do i = 1_pInt, prm%totalNslip - tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) - tau_neg(i) = merge(math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)), & + tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) - stt%crss_back(i,of) + tau_neg(i) = merge(math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)) - stt%crss_back(i,of), & 0.0_pReal, nonSchmidActive) enddo - tau_pos = tau_pos - stt%crss_back(:,of) - tau_neg = tau_neg - stt%crss_back(:,of) - where(dNeq0(tau_pos)) gdot_pos = prm%gdot0 * merge(0.5_pReal,1.0_pReal, nonSchmidActive) & ! 1/2 if non-Schmid active * sign(abs(tau_pos/stt%crss(:,of))**prm%n_slip, tau_pos) @@ -678,63 +663,4 @@ pure subroutine kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_d end subroutine kinetics - -!-------------------------------------------------------------------------------------------------- -!> @brief calculation of shear rates (\dot \gamma) -!-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_shearRates(Mp,instance,of, & - gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) - use prec - use math - - implicit none - real(pReal), dimension(3,3), intent(in) :: & - Mp - integer(pInt), intent(in) :: & - instance, & !< instance of that phase - of !< index of phaseMember - real(pReal), dimension(paramNew(instance)%totalNslip), intent(out) :: & - gdot_pos, & !< shear rates from positive line segments - gdot_neg !< shear rates from negative line segments - real(pReal), dimension(paramNew(instance)%totalNslip), intent(out),optional :: & - dgdot_dtau_pos, & - dgdot_dtau_neg - real(pReal), dimension(paramNew(instance)%totalNslip) :: & - tau_pos, & !< shear stress on positive line segments - tau_neg !< shear stress on negative line segments - integer(pInt) :: & - i - - associate(prm => paramNew(instance), stt => state(instance)) - do i = 1_pInt, prm%totalNslip - tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) - tau_neg(i) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)) - enddo - - tau_pos = tau_pos - stt%crss_back(:,of) - tau_neg = tau_neg - stt%crss_back(:,of) - - - gdot_pos = sign(0.5_pReal * prm%gdot0 *(abs(tau_pos)/ state(instance)%crss(:,of))**prm%n_slip,tau_pos) - gdot_neg = sign(0.5_pReal * prm%gdot0 *(abs(tau_neg)/ state(instance)%crss(:,of))**prm%n_slip,tau_neg) - - if (present(dgdot_dtau_pos)) then - where(dNeq0(gdot_pos)) - dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos - else where - dgdot_dtau_pos = 0.0_pReal - end where - endif - if (present(dgdot_dtau_neg)) then - where(dNeq0(gdot_neg)) - dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg - else where - dgdot_dtau_neg = 0.0_pReal - end where - endif - -end associate - -end subroutine plastic_kinehardening_shearRates - end module plastic_kinehardening From fa88065591695e82c3e93f8d0c9c771a760adfa1 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 25 Dec 2018 14:20:01 +0100 Subject: [PATCH 77/89] small polishing --- src/plastic_kinematichardening.f90 | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 58c8c4529..d8076d1a9 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -374,9 +374,7 @@ subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent !-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) - use prec, only: & - dNeq0 +pure subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) implicit none real(pReal), dimension(3,3), intent(out) :: & @@ -392,13 +390,12 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) integer(pInt) :: & j,k,l,m,n - - real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg, & dgdot_dtau_pos,dgdot_dtau_neg associate(prm => param(instance), stt => state(instance)) + Lp = 0.0_pReal dLp_dMp = 0.0_pReal @@ -411,7 +408,8 @@ subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) + dgdot_dtau_pos(j)*prm%Schmid_slip(k,l,j)*prm%nonSchmid_pos(m,n,j) & + dgdot_dtau_neg(j)*prm%Schmid_slip(k,l,j)*prm%nonSchmid_neg(m,n,j) enddo -end associate + + end associate end subroutine plastic_kinehardening_LpAndItsTangent @@ -628,7 +626,7 @@ pure subroutine kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_d do i = 1_pInt, prm%totalNslip tau_pos(i) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,i)) - stt%crss_back(i,of) tau_neg(i) = merge(math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,i)) - stt%crss_back(i,of), & - 0.0_pReal, nonSchmidActive) + 0.0_pReal, nonSchmidActive) enddo where(dNeq0(tau_pos)) From 2000eff578f0efd6873be85127802c541ff393d4 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 30 Dec 2018 15:39:48 +0100 Subject: [PATCH 78/89] re-enabled debug --- src/plastic_kinematichardening.f90 | 47 ++++++++++++++++++++---------- 1 file changed, 32 insertions(+), 15 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index d8076d1a9..b30131535 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -64,7 +64,8 @@ module plastic_kinehardening nonSchmid_pos, & nonSchmid_neg integer(pInt) :: & - totalNslip !< total number of active slip system + totalNslip, & !< total number of active slip system + of_debug = 0_pInt integer(pInt), allocatable, dimension(:) :: & Nslip !< number of active slip systems for each family integer(kind(undefined_ID)), allocatable, dimension(:) :: & @@ -114,6 +115,12 @@ subroutine plastic_kinehardening_init use prec, only: & dEq0 use debug, only: & +#ifdef DEBUG + debug_e, & + debug_i, & + debug_g, & + debug_levelExtensive, & +#endif debug_level, & debug_constitutive,& debug_levelBasic @@ -123,6 +130,9 @@ subroutine plastic_kinehardening_init IO_error, & IO_timeStamp use material, only: & +#ifdef DEBUG + phasememberAt, & +#endif phase_plasticity, & phase_plasticityInstance, & phase_Noutput, & @@ -195,8 +205,13 @@ subroutine plastic_kinehardening_init delta => deltaState(phase_plasticityInstance(p)), & stt => state(phase_plasticityInstance(p))) - structure = config_phase(p)%getString('lattice_structure') +#ifdef DEBUG + if (p==material_phase(debug_g,debug_i,debug_e)) then + prm%of_debug = phasememberAt(debug_g,debug_i,debug_e) + endif +#endif + structure = config_phase(p)%getString('lattice_structure') !-------------------------------------------------------------------------------------------------- ! optional parameters that need to be defined prm%aTolResistance = config_phase(p)%getFloat('atol_resistance',defaultVal=1.0_pReal) @@ -421,7 +436,14 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) use prec, only: & dNeq, & dEq0 - +#ifdef DEBUG + use debug, only: & + debug_level, & + debug_constitutive,& + debug_levelExtensive, & + debug_levelSelective +#endif + implicit none real(pReal), dimension(3,3), intent(in) :: & Mp !< Mandel stress @@ -441,16 +463,12 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) dEq0(gdot_pos+gdot_neg,1e-10_pReal)) ! current sense of shear direction #ifdef DEBUG -! if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & ! ToDo: We need an inverse mapping of ->el, ip, co -! .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & -! .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then -! write(6,'(a)') '======= kinehardening delta state =======' -! endif -! if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & -! .and. ((el == debug_e .and. ip == debug_i .and. ipc == debug_g) & -! .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then -! write(6,'(i2,1x,f7.4,1x,f7.4)') j,sense(j),state(instance)%sense(j,of) -! endif + if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & + .and. (of == prm%of_debug & + .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then + write(6,'(a)') '======= kinehardening delta state =======' + write(6,*) sense,state(instance)%sense(:,of) + endif #endif !-------------------------------------------------------------------------------------------------- @@ -537,9 +555,8 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg - postResults = 0.0_pReal - c = 0_pInt + c = 0_pInt associate( prm => param(instance), stt => state(instance)) From 8277e960c081c62cd9d8fd7aa44edb47be8e35be Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 6 Jan 2019 15:07:50 +0100 Subject: [PATCH 79/89] [skip ci] updated version information after successful test of v2.0.2-1291-g19df6f8a --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 8d5912448..6efd0b994 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-1277-g53bc24cc +v2.0.2-1291-g19df6f8a From ebf028421b9968400a17727f56381358ff0c6c5e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 6 Jan 2019 20:57:40 +0100 Subject: [PATCH 80/89] corrected unit --- src/plastic_phenopowerlaw.f90 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index 5df23da94..0fe63737e 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -595,7 +595,7 @@ subroutine plastic_phenopowerlaw_results(instance,group) case (resistance_slip_ID) call results_writeVectorDataset(group,stt%xi_slip,'xi_slip','Pa') case (accumulatedshear_slip_ID) - call results_writeVectorDataset(group,stt%gamma_slip,'gamma_slip','1/s') + call results_writeVectorDataset(group,stt%gamma_slip,'gamma_slip','-') end select enddo outputsLoop end associate From 15d1789a195d5bdfcff5f2f10d69993c179023d4 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 6 Jan 2019 21:18:35 +0100 Subject: [PATCH 81/89] following best practise from phenopowerlaw --- src/plastic_kinematichardening.f90 | 341 +++++++++++++---------------- src/plastic_phenopowerlaw.f90 | 13 +- 2 files changed, 164 insertions(+), 190 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index b30131535..20a09c7e9 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -3,52 +3,41 @@ !> @author Zhuowen Zhao, Michigan State University !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !> @brief Phenomenological crystal plasticity using a power law formulation for the shear rates -!! and a Voce-type kinematic hardening rule +!! and a Voce-type kinematic hardening rule !-------------------------------------------------------------------------------------------------- module plastic_kinehardening use prec, only: & pReal,& pInt - + implicit none private integer(pInt), dimension(:,:), allocatable, target, public :: & plastic_kinehardening_sizePostResult !< size of each post result output - character(len=64), dimension(:,:), allocatable, target, public :: & plastic_kinehardening_output !< name of each post result output - - integer(pInt), dimension(:), allocatable, target, public :: & - plastic_kinehardening_Noutput !< number of outputs per instance - - - - integer(pInt), dimension(:,:), allocatable, private :: & - plastic_kinehardening_Nslip !< active number of slip systems per family (input parameter, per family) - enum, bind(c) enumerator :: & undefined_ID, & - crss_ID, & !< critical resolved stress - crss_back_ID, & !< critical resolved back stress - sense_ID, & !< sense of acting shear stress (-1 or +1) - chi0_ID, & !< backstress at last switch of stress sense (positive?) - gamma0_ID, & !< accumulated shear at last switch of stress sense (at current switch?) + crss_ID, & !< critical resolved stress + crss_back_ID, & !< critical resolved back stress + sense_ID, & !< sense of acting shear stress (-1 or +1) + chi0_ID, & !< backstress at last switch of stress sense (positive?) + gamma0_ID, & !< accumulated shear at last switch of stress sense (at current switch?) accshear_ID, & shearrate_ID, & resolvedstress_ID end enum - - type, private :: tParameters !< container type for internal constitutive parameters + type, private :: tParameters real(pReal) :: & gdot0, & !< reference shear strain rate for slip (input parameter) n_slip, & !< stress exponent for slip (input parameter) aTolResistance, & aTolShear - real(pReal), dimension(:), allocatable, private :: & - crss0, & !< initial critical shear stress for slip (input parameter, per family) + real(pReal), allocatable, dimension(:) :: & + crss0, & !< initial critical shear stress for slip (input parameter, per family) theta0, & !< initial hardening rate of forward stress for each slip theta1, & !< asymptotic hardening rate of forward stress for each slip > theta0_b, & !< initial hardening rate of back stress for each slip > @@ -56,8 +45,8 @@ module plastic_kinehardening tau1, & tau1_b, & nonSchmidCoeff - real(pReal), dimension(:,:), allocatable, private :: & - interaction_slipslip !< latent hardening matrix + real(pReal), allocatable, dimension(:,:) :: & + interaction_slipslip !< slip resistance from slip activity real(pReal), allocatable, dimension(:,:,:) :: & Schmid_slip, & Schmid_twin, & @@ -72,6 +61,8 @@ module plastic_kinehardening outputID !< ID of each post result output end type + type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) + type, private :: tKinehardeningState real(pReal), pointer, dimension(:,:) :: & !< vectors along NipcMyInstance crss, & !< critical resolved stress @@ -80,18 +71,13 @@ module plastic_kinehardening chi0, & !< backstress at last switch of stress sense gamma0, & !< accumulated shear at last switch of stress sense accshear !< accumulated (absolute) shear - end type - type(tParameters), dimension(:), allocatable, private :: & - param !< containers of constitutive parameters (len Ninstance) - type(tKinehardeningState), allocatable, dimension(:), private :: & dotState, & deltaState, & state - public :: & plastic_kinehardening_init, & plastic_kinehardening_LpAndItsTangent, & @@ -105,15 +91,19 @@ module plastic_kinehardening contains - !-------------------------------------------------------------------------------------------------- !> @brief module initialization !> @details reads in material parameters, allocates arrays, and does sanity checks !-------------------------------------------------------------------------------------------------- subroutine plastic_kinehardening_init - use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment) +#if defined(__GFORTRAN__) || __INTEL_COMPILER >= 1800 + use, intrinsic :: iso_fortran_env, only: & + compiler_version, & + compiler_options +#endif use prec, only: & - dEq0 + dEq0, & + pStringLen use debug, only: & #ifdef DEBUG debug_e, & @@ -142,68 +132,55 @@ subroutine plastic_kinehardening_init material_phase, & plasticState use config, only: & - config_phase, & - MATERIAL_partPhase + MATERIAL_partPhase, & + config_phase use lattice implicit none - - integer(kind(undefined_ID)) :: & - output_ID integer(pInt) :: & - o, i, p, & - instance, & Ninstance, & - NipcMyPhase, & - outputSize, & - offset_slip, & - startIndex, endIndex, & - sizeDotState, & - sizeState, & - sizeDeltaState + p, i, o, & + NipcMyPhase, outputSize, & + sizeState, sizeDeltaState, sizeDotState, & + startIndex, endIndex integer(pInt), dimension(0), parameter :: emptyIntArray = [integer(pInt)::] real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::] character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::] integer(kind(undefined_ID)) :: & - outputID !< ID of each post result output - - character(len=65536), dimension(:), allocatable :: & - outputs - character(len=65536) :: & - extmsg = '', & - structure = '' + outputID - write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_KINEHARDENING_label//' init -+>>>' + character(len=pStringLen) :: & + structure = '',& + extmsg = '' + character(len=65536), dimension(:), allocatable :: & + outputs + + write(6,'(/,a)') ' <<<+- plastic_'//PLASTICITY_KINEHARDENING_label//' init -+>>>' write(6,'(a15,a)') ' Current time: ',IO_timeStamp() #include "compilation_info.f90" Ninstance = int(count(phase_plasticity == PLASTICITY_KINEHARDENING_ID),pInt) - if (Ninstance == 0_pInt) return - if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0_pInt) & - write(6,'(a,1x,i5,/)') '# instances:',Ninstance - - allocate(plastic_kinehardening_sizePostResult(maxval(phase_Noutput),Ninstance), & - source=0_pInt) + write(6,'(a16,1x,i5,/)') '# instances:',Ninstance + + allocate(plastic_kinehardening_sizePostResult(maxval(phase_Noutput),Ninstance),source=0_pInt) allocate(plastic_kinehardening_output(maxval(phase_Noutput),Ninstance)) - plastic_kinehardening_output = '' - allocate(plastic_kinehardening_Noutput(Ninstance), source=0_pInt) - allocate(plastic_kinehardening_Nslip(lattice_maxNslipFamily,Ninstance), source=0_pInt) - - allocate(param(Ninstance)) ! one container of parameters per instance + plastic_kinehardening_output = '' + + allocate(param(Ninstance)) allocate(state(Ninstance)) allocate(dotState(Ninstance)) allocate(deltaState(Ninstance)) - + do p = 1_pInt, size(phase_plasticityInstance) if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle - instance = phase_plasticityInstance(p) ! which instance of my phase associate(prm => param(phase_plasticityInstance(p)), & dot => dotState(phase_plasticityInstance(p)), & delta => deltaState(phase_plasticityInstance(p)), & - stt => state(phase_plasticityInstance(p))) + stt => state(phase_plasticityInstance(p)),& + config => config_phase(p)) #ifdef DEBUG if (p==material_phase(debug_g,debug_i,debug_e)) then @@ -211,11 +188,12 @@ subroutine plastic_kinehardening_init endif #endif - structure = config_phase(p)%getString('lattice_structure') + structure = config%getString('lattice_structure') + !-------------------------------------------------------------------------------------------------- ! optional parameters that need to be defined - prm%aTolResistance = config_phase(p)%getFloat('atol_resistance',defaultVal=1.0_pReal) - prm%aTolShear = config_phase(p)%getFloat('atol_shear', defaultVal=1.0e-6_pReal) + prm%aTolResistance = config%getFloat('atol_resistance',defaultVal=1.0_pReal) + prm%aTolShear = config%getFloat('atol_shear', defaultVal=1.0e-6_pReal) ! sanity checks if (prm%aTolResistance <= 0.0_pReal) extmsg = trim(extmsg)//'aTolresistance ' @@ -223,13 +201,13 @@ subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- ! slip related parameters - prm%Nslip = config_phase(p)%getInts('nslip',defaultVal=emptyIntArray) + prm%Nslip = config%getInts('nslip',defaultVal=emptyIntArray) prm%totalNslip = sum(prm%Nslip) slipActive: if (prm%totalNslip > 0_pInt) then prm%Schmid_slip = lattice_SchmidMatrix_slip(prm%Nslip,structure(1:3),& - config_phase(p)%getFloat('c/a',defaultVal=0.0_pReal)) + config%getFloat('c/a',defaultVal=0.0_pReal)) if(structure=='bcc') then - prm%nonSchmidCoeff = config_phase(p)%getFloats('nonschmid_coefficients',& + prm%nonSchmidCoeff = config%getFloats('nonschmid_coefficients',& defaultVal = emptyRealArray) prm%nonSchmid_pos = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,+1_pInt) prm%nonSchmid_neg = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,-1_pInt) @@ -238,16 +216,16 @@ subroutine plastic_kinehardening_init prm%nonSchmid_neg = prm%Schmid_slip endif prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & - config_phase(p)%getFloats('interaction_slipslip'), & + config%getFloats('interaction_slipslip'), & structure(1:3)) - prm%crss0 = config_phase(p)%getFloats('crss0', requiredShape=shape(prm%Nslip)) - prm%tau1 = config_phase(p)%getFloats('tau1', requiredShape=shape(prm%Nslip)) - prm%tau1_b = config_phase(p)%getFloats('tau1_b', requiredShape=shape(prm%Nslip)) - prm%theta0 = config_phase(p)%getFloats('theta0', requiredShape=shape(prm%Nslip)) - prm%theta1 = config_phase(p)%getFloats('theta1', requiredShape=shape(prm%Nslip)) - prm%theta0_b = config_phase(p)%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) - prm%theta1_b = config_phase(p)%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) + prm%crss0 = config%getFloats('crss0', requiredShape=shape(prm%Nslip)) + prm%tau1 = config%getFloats('tau1', requiredShape=shape(prm%Nslip)) + prm%tau1_b = config%getFloats('tau1_b', requiredShape=shape(prm%Nslip)) + prm%theta0 = config%getFloats('theta0', requiredShape=shape(prm%Nslip)) + prm%theta1 = config%getFloats('theta1', requiredShape=shape(prm%Nslip)) + prm%theta0_b = config%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) + prm%theta1_b = config%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) ! expand: family => system prm%crss0 = math_expand(prm%crss0, prm%Nslip) @@ -258,15 +236,27 @@ subroutine plastic_kinehardening_init prm%theta0_b = math_expand(prm%theta0_b,prm%Nslip) prm%theta1_b = math_expand(prm%theta1_b,prm%Nslip) - prm%gdot0 = config_phase(p)%getFloat('gdot0') - prm%n_slip = config_phase(p)%getFloat('n_slip') + prm%gdot0 = config%getFloat('gdot0') + prm%n_slip = config%getFloat('n_slip') + +!-------------------------------------------------------------------------------------------------- +! sanity checks + + ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & + ! .and. param(instance)%crss0 (1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' crss0' + ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & + ! .and. param(instance)%tau1 (1:nSlipFamilies) <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' + ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & + ! .and. param(instance)%tau1_b(1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' + ! if (param(instance)%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' + ! if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' endif slipActive !-------------------------------------------------------------------------------------------------- ! output pararameters - outputs = config_phase(p)%getStrings('(output)',defaultVal=emptyStringArray) + outputs = config%getStrings('(output)',defaultVal=emptyStringArray) allocate(prm%outputID(0)) do i=1_pInt, size(outputs) outputID = undefined_ID @@ -299,7 +289,6 @@ subroutine plastic_kinehardening_init end select if (outputID /= undefined_ID) then - plastic_kinehardening_Noutput(instance) = plastic_kinehardening_Noutput(instance) + 1_pInt plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = outputSize prm%outputID = [prm%outputID , outputID] @@ -309,90 +298,71 @@ subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- ! allocate state arrays - NipcMyPhase = count(material_phase == p) ! number of constituents with my phase + NipcMyPhase = count(material_phase == p) sizeDotState = size(['crss ','crss_back', 'accshear ']) * prm%TotalNslip sizeDeltaState = size(['sense ', 'chi0 ', 'gamma0' ]) * prm%TotalNslip sizeState = sizeDotState + sizeDeltaState call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & prm%totalNslip,0_pInt,0_pInt) - plasticState(p)%sizePostResults = sum(plastic_kinehardening_sizePostResult(:,phase_plasticityInstance(p))) plasticState(p)%offsetDeltaState = sizeDotState - - startIndex = 1_pInt - endIndex = prm%totalNslip - stt%crss => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) - dot%crss => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) - stt%crss = spread(prm%crss0, 2, NipcMyPhase) - plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance - - startIndex = endIndex + 1_pInt - endIndex = endIndex + prm%totalNslip - stt%crss_back => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) - dot%crss_back => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) - plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance - - startIndex = endIndex + 1_pInt - endIndex = endIndex + prm%totalNslip - stt%accshear => plasticState(p)%state (startIndex:endIndex,1:NipcMyPhase) - dot%accshear => plasticState(p)%dotState (startIndex:endIndex,1:NipcMyPhase) - plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolShear - -!---------------------------------------------------------------------------------------------- -!locally define deltaState alias - o = endIndex - - startIndex = endIndex + 1_pInt - endIndex = endIndex + prm%totalNslip - stt%sense => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) - delta%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - - startIndex = endIndex + 1_pInt - endIndex = endIndex + prm%totalNslip - stt%chi0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) - delta%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - - startIndex = endIndex + 1_pInt - endIndex = endIndex + prm%totalNslip - stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,1:NipcMyPhase) - delta%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,1:NipcMyPhase) - - plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally - - - offset_slip = plasticState(p)%nSlip - plasticState(p)%slipRate => & - plasticState(p)%dotState(offset_slip+1:offset_slip+plasticState(p)%nSlip,1:NipcMyPhase) - plasticState(p)%accumulatedSlip => & - plasticState(p)%state(offset_slip+1:offset_slip+plasticState(p)%nSlip,1:NipcMyPhase) - - end associate - end do - end subroutine plastic_kinehardening_init - !-------------------------------------------------------------------------------------------------- -! sanity checks - - ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - ! .and. param(instance)%crss0 (1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' crss0' - ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - ! .and. param(instance)%tau1 (1:nSlipFamilies) <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' - ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - ! .and. param(instance)%tau1_b(1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' - ! if (param(instance)%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' - ! if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' +! locally defined state aliases and initialization of state0 and aTolState + startIndex = 1_pInt + endIndex = prm%totalNslip + stt%crss => plasticState(p)%state (startIndex:endIndex,:) + dot%crss => plasticState(p)%dotState(startIndex:endIndex,:) + stt%crss = spread(prm%crss0, 2, NipcMyPhase) + plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance + startIndex = endIndex + 1_pInt + endIndex = endIndex + prm%totalNslip + stt%crss_back => plasticState(p)%state (startIndex:endIndex,:) + dot%crss_back => plasticState(p)%dotState(startIndex:endIndex,:) + plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance + + startIndex = endIndex + 1_pInt + endIndex = endIndex + prm%totalNslip + stt%accshear => plasticState(p)%state (startIndex:endIndex,:) + dot%accshear => plasticState(p)%dotState(startIndex:endIndex,:) + plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolShear + ! global alias + plasticState(p)%slipRate => plasticState(p)%dotState(startIndex:endIndex,:) + plasticState(p)%accumulatedSlip => plasticState(p)%state(startIndex:endIndex,:) + + o = plasticState(p)%offsetDeltaState + startIndex = endIndex + 1_pInt + endIndex = endIndex + prm%totalNslip + stt%sense => plasticState(p)%state (startIndex :endIndex ,:) + delta%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + + startIndex = endIndex + 1_pInt + endIndex = endIndex + prm%totalNslip + stt%chi0 => plasticState(p)%state (startIndex :endIndex ,:) + delta%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + + startIndex = endIndex + 1_pInt + endIndex = endIndex + prm%totalNslip + stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,:) + delta%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + + plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally + + end associate + enddo + +end subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent !-------------------------------------------------------------------------------------------------- pure subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) - + implicit none - real(pReal), dimension(3,3), intent(out) :: & + real(pReal), dimension(3,3), intent(out) :: & Lp !< plastic velocity gradient real(pReal), dimension(3,3,3,3), intent(out) :: & dLp_dMp !< derivative of Lp with respect to the Mandel stress @@ -404,24 +374,24 @@ pure subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) of integer(pInt) :: & - j,k,l,m,n + i,k,l,m,n real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg, & dgdot_dtau_pos,dgdot_dtau_neg - associate(prm => param(instance), stt => state(instance)) - - Lp = 0.0_pReal + Lp = 0.0_pReal dLp_dMp = 0.0_pReal + associate(prm => param(instance), stt => state(instance)) + call kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) - do j = 1_pInt, prm%totalNslip - Lp = Lp + (gdot_pos(j)+gdot_neg(j))*prm%Schmid_slip(1:3,1:3,j) + do i = 1_pInt, prm%totalNslip + Lp = Lp + (gdot_pos(i)+gdot_neg(i))*prm%Schmid_slip(1:3,1:3,i) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = dLp_dMp(k,l,m,n) & - + dgdot_dtau_pos(j)*prm%Schmid_slip(k,l,j)*prm%nonSchmid_pos(m,n,j) & - + dgdot_dtau_neg(j)*prm%Schmid_slip(k,l,j)*prm%nonSchmid_neg(m,n,j) + + dgdot_dtau_pos(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_pos(m,n,i) & + + dgdot_dtau_neg(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_neg(m,n,i) enddo end associate @@ -447,7 +417,7 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) implicit none real(pReal), dimension(3,3), intent(in) :: & Mp !< Mandel stress - integer(pInt), intent(in) :: & + integer(pInt), intent(in) :: & instance, & of @@ -455,7 +425,7 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) gdot_pos,gdot_neg, & sense - associate( prm => param(instance), stt => state(instance), del => deltaState(instance)) + associate(prm => param(instance), stt => state(instance), del => deltaState(instance)) call kinetics(Mp,instance,of,gdot_pos,gdot_neg) sense = merge(state(instance)%sense(:,of), & ! keep existing... @@ -496,12 +466,13 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) implicit none real(pReal), dimension(3,3), intent(in) :: & Mp !< Mandel stress - integer(pInt), intent(in) :: & + integer(pInt), intent(in) :: & instance, & of + integer(pInt) :: & - j + i real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg real(pReal) :: & @@ -514,11 +485,11 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) dot%accshear(:,of) = abs(gdot_pos+gdot_neg) sumGamma = sum(stt%accshear(:,of)) - do j = 1_pInt, prm%totalNslip - dot%crss(j,of) = dot_product(prm%interaction_SlipSlip(j,:),dot%accshear(:,of)) & - * ( prm%theta1(j) & - + (prm%theta0(j) - prm%theta1(j) + prm%theta0(j)*prm%theta1(j)*sumGamma/prm%tau1(j)) & - *exp(-sumGamma*prm%theta0(j)/prm%tau1(j)) & + do i = 1_pInt, prm%totalNslip + dot%crss(i,of) = dot_product(prm%interaction_SlipSlip(i,:),dot%accshear(:,of)) & + * ( prm%theta1(i) & + + (prm%theta0(i) - prm%theta1(i) + prm%theta0(i)*prm%theta1(i)*sumGamma/prm%tau1(i)) & + *exp(-sumGamma*prm%theta0(i)/prm%tau1(i)) & ) enddo dot%crss_back(:,of) = stt%sense(:,of)*dot%accshear(:,of) * & @@ -550,19 +521,18 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) real(pReal), dimension(sum(plastic_kinehardening_sizePostResult(:,instance))) :: & postResults integer(pInt) :: & - o,c,j - + o,c,i real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg c = 0_pInt - associate( prm => param(instance), stt => state(instance)) + associate(prm => param(instance), stt => state(instance)) call kinetics(Mp,instance,of,gdot_pos,gdot_neg) - outputsLoop: do o = 1_pInt,plastic_kinehardening_Noutput(instance) + outputsLoop: do o = 1_pInt,size(prm%outputID) select case(prm%outputID(o)) case (crss_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%crss(:,of) @@ -593,13 +563,14 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) c = c + prm%totalNslip case (resolvedstress_ID) - do j = 1_pInt, prm%totalNslip - postResults(c+j) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,j)) + do i = 1_pInt, prm%totalNslip + postResults(c+i) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,i)) enddo c = c + prm%totalNslip - + end select enddo outputsLoop + end associate end function plastic_kinehardening_postResults @@ -607,25 +578,27 @@ end function plastic_kinehardening_postResults !-------------------------------------------------------------------------------------------------- !> @brief calculates shear rates on slip systems and derivatives with respect to resolved stress -!> @details: Shear rates are calculated only optionally. NOTE: Against the common convention, the -!> result (i.e. intent(out)) variables are the last to have the optional arguments at the end +!> @details: Shear rates are calculated only optionally. +! NOTE: Against the common convention, the result (i.e. intent(out)) variables are the last to +! have the optional arguments at the end !-------------------------------------------------------------------------------------------------- -pure subroutine kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) +pure subroutine kinetics(Mp,instance,of, & + gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) use prec, only: & dNeq0 use math, only: & math_mul33xx33 implicit none - real(pReal), dimension(3,3), intent(in) :: & - Mp - integer(pInt), intent(in) :: & - instance, & + real(pReal), dimension(3,3), intent(in) :: & + Mp !< Mandel stress + integer(pInt), intent(in) :: & + instance, & of real(pReal), dimension(param(instance)%totalNslip), intent(out) :: & gdot_pos, & gdot_neg - real(pReal), dimension(param(instance)%totalNslip), optional, intent(out) :: & + real(pReal), dimension(param(instance)%totalNslip), intent(out), optional :: & dgdot_dtau_pos, & dgdot_dtau_neg @@ -636,7 +609,7 @@ pure subroutine kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_d integer(pInt) :: i logical :: nonSchmidActive - associate( prm => param(instance), stt => state(instance)) + associate(prm => param(instance), stt => state(instance)) nonSchmidActive = size(prm%nonSchmidCoeff) > 0_pInt diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index 1e42876f9..16aac1ead 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -77,6 +77,7 @@ module plastic_phenopowerlaw type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) + type, private :: tPhenopowerlawState real(pReal), pointer, dimension(:,:) :: & xi_slip, & @@ -392,9 +393,9 @@ end subroutine plastic_phenopowerlaw_init !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent !> @details asumme that deformation by dislocation glide affects twinned and untwinned volume -! equally (Taylor assumption). Twinning happens only in untwinned volume ( +! equally (Taylor assumption). Twinning happens only in untwinned volume !-------------------------------------------------------------------------------------------------- -subroutine plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) +pure subroutine plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) implicit none real(pReal), dimension(3,3), intent(out) :: & @@ -411,11 +412,11 @@ subroutine plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) integer(pInt) :: & i,k,l,m,n real(pReal), dimension(param(instance)%totalNslip) :: & - dgdot_dtauslip_pos,dgdot_dtauslip_neg, & - gdot_slip_pos,gdot_slip_neg + gdot_slip_pos,gdot_slip_neg, & + dgdot_dtauslip_pos,dgdot_dtauslip_neg real(pReal), dimension(param(instance)%totalNtwin) :: & gdot_twin,dgdot_dtautwin - + Lp = 0.0_pReal dLp_dMp = 0.0_pReal @@ -626,7 +627,7 @@ pure subroutine kinetics_slip(Mp,instance,of, & end where where(dNeq0(tau_slip_neg)) - gdot_slip_neg = 0.5_pReal*prm%gdot0_slip & + gdot_slip_neg = prm%gdot0_slip * 0.5_pReal & ! only used if non-Schmid active, always 1/2 * sign(abs(tau_slip_neg/stt%xi_slip(:,of))**prm%n_slip, tau_slip_neg) else where gdot_slip_neg = 0.0_pReal From aa5d3bf9a3214d47e538850959e92076d5dbd313 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 7 Jan 2019 07:07:55 +0100 Subject: [PATCH 82/89] simplifications --- src/plastic_kinematichardening.f90 | 103 ++++++++++++----------------- 1 file changed, 43 insertions(+), 60 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 20a09c7e9..f514ac78d 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -178,7 +178,7 @@ subroutine plastic_kinehardening_init if (phase_plasticity(p) /= PLASTICITY_KINEHARDENING_ID) cycle associate(prm => param(phase_plasticityInstance(p)), & dot => dotState(phase_plasticityInstance(p)), & - delta => deltaState(phase_plasticityInstance(p)), & + dlt => deltaState(phase_plasticityInstance(p)), & stt => state(phase_plasticityInstance(p)),& config => config_phase(p)) @@ -196,8 +196,8 @@ subroutine plastic_kinehardening_init prm%aTolShear = config%getFloat('atol_shear', defaultVal=1.0e-6_pReal) ! sanity checks - if (prm%aTolResistance <= 0.0_pReal) extmsg = trim(extmsg)//'aTolresistance ' - if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'aTolShear ' + if (prm%aTolResistance <= 0.0_pReal) extmsg = trim(extmsg)//' aTolresistance' + if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//' aTolShear' !-------------------------------------------------------------------------------------------------- ! slip related parameters @@ -208,7 +208,7 @@ subroutine plastic_kinehardening_init config%getFloat('c/a',defaultVal=0.0_pReal)) if(structure=='bcc') then prm%nonSchmidCoeff = config%getFloats('nonschmid_coefficients',& - defaultVal = emptyRealArray) + defaultVal = emptyRealArray) prm%nonSchmid_pos = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,+1_pInt) prm%nonSchmid_neg = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,-1_pInt) else @@ -219,25 +219,27 @@ subroutine plastic_kinehardening_init config%getFloats('interaction_slipslip'), & structure(1:3)) - prm%crss0 = config%getFloats('crss0', requiredShape=shape(prm%Nslip)) - prm%tau1 = config%getFloats('tau1', requiredShape=shape(prm%Nslip)) - prm%tau1_b = config%getFloats('tau1_b', requiredShape=shape(prm%Nslip)) - prm%theta0 = config%getFloats('theta0', requiredShape=shape(prm%Nslip)) - prm%theta1 = config%getFloats('theta1', requiredShape=shape(prm%Nslip)) - prm%theta0_b = config%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) - prm%theta1_b = config%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) + prm%crss0 = config%getFloats('crss0', requiredShape=shape(prm%Nslip)) + prm%tau1 = config%getFloats('tau1', requiredShape=shape(prm%Nslip)) + prm%tau1_b = config%getFloats('tau1_b', requiredShape=shape(prm%Nslip)) + prm%theta0 = config%getFloats('theta0', requiredShape=shape(prm%Nslip)) + prm%theta1 = config%getFloats('theta1', requiredShape=shape(prm%Nslip)) + prm%theta0_b = config%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) + prm%theta1_b = config%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) + + prm%gdot0 = config%getFloat('gdot0') + prm%n_slip = config%getFloat('n_slip') ! expand: family => system - prm%crss0 = math_expand(prm%crss0, prm%Nslip) - prm%tau1 = math_expand(prm%tau1,prm%Nslip) - prm%tau1_b = math_expand(prm%tau1_b, prm%Nslip) - prm%theta0 = math_expand(prm%theta0,prm%Nslip) - prm%theta1 = math_expand(prm%theta1,prm%Nslip) + prm%crss0 = math_expand(prm%crss0, prm%Nslip) + prm%tau1 = math_expand(prm%tau1, prm%Nslip) + prm%tau1_b = math_expand(prm%tau1_b, prm%Nslip) + prm%theta0 = math_expand(prm%theta0, prm%Nslip) + prm%theta1 = math_expand(prm%theta1, prm%Nslip) prm%theta0_b = math_expand(prm%theta0_b,prm%Nslip) prm%theta1_b = math_expand(prm%theta1_b,prm%Nslip) - prm%gdot0 = config%getFloat('gdot0') - prm%n_slip = config%getFloat('n_slip') + !-------------------------------------------------------------------------------------------------- ! sanity checks @@ -260,31 +262,25 @@ subroutine plastic_kinehardening_init allocate(prm%outputID(0)) do i=1_pInt, size(outputs) outputID = undefined_ID + outputSize = prm%totalNslip select case(outputs(i)) + case ('resistance') outputID = merge(crss_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip case ('accumulatedshear') outputID = merge(accshear_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip case ('shearrate') outputID = merge(shearrate_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip case ('resolvedstress') outputID = merge(resolvedstress_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip case ('backstress') outputID = merge(crss_back_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip case ('sense') outputID = merge(sense_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip case ('chi0') outputID = merge(chi0_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip case ('gamma0') outputID = merge(gamma0_ID,undefined_ID,prm%totalNslip>0_pInt) - outputSize = prm%totalNslip end select @@ -335,18 +331,18 @@ subroutine plastic_kinehardening_init o = plasticState(p)%offsetDeltaState startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%sense => plasticState(p)%state (startIndex :endIndex ,:) - delta%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + stt%sense => plasticState(p)%state (startIndex :endIndex ,:) + dlt%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%chi0 => plasticState(p)%state (startIndex :endIndex ,:) - delta%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + stt%chi0 => plasticState(p)%state (startIndex :endIndex ,:) + dlt%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,:) - delta%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,:) + dlt%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally @@ -425,7 +421,7 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) gdot_pos,gdot_neg, & sense - associate(prm => param(instance), stt => state(instance), del => deltaState(instance)) + associate(prm => param(instance), stt => state(instance), dlt => deltaState(instance)) call kinetics(Mp,instance,of,gdot_pos,gdot_neg) sense = merge(state(instance)%sense(:,of), & ! keep existing... @@ -444,13 +440,13 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) !-------------------------------------------------------------------------------------------------- ! switch in sense of shear? where(dNeq(sense,stt%sense(:,of),0.1_pReal)) - del%sense (:,of) = sense - stt%sense(:,of) ! switch sense - del%chi0 (:,of) = abs(stt%crss_back(:,of)) - stt%chi0(:,of) ! remember current backstress magnitude - del%gamma0(:,of) = stt%accshear(:,of) - stt%gamma0(:,of) ! remember current accumulated shear + dlt%sense (:,of) = sense - stt%sense(:,of) ! switch sense + dlt%chi0 (:,of) = abs(stt%crss_back(:,of)) - stt%chi0(:,of) ! remember current backstress magnitude + dlt%gamma0(:,of) = stt%accshear(:,of) - stt%gamma0(:,of) ! remember current accumulated shear else where - del%sense (:,of) = 0.0_pReal - del%chi0 (:,of) = 0.0_pReal - del%gamma0(:,of) = 0.0_pReal + dlt%sense (:,of) = 0.0_pReal + dlt%chi0 (:,of) = 0.0_pReal + dlt%gamma0(:,of) = 0.0_pReal end where end associate @@ -470,7 +466,6 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) instance, & of - integer(pInt) :: & i real(pReal), dimension(param(instance)%totalNslip) :: & @@ -478,8 +473,7 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) real(pReal) :: & sumGamma - - associate( prm => param(instance), stt => state(instance), dot => dotState(instance)) + associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) call kinetics(Mp,instance,of,gdot_pos,gdot_neg) dot%accshear(:,of) = abs(gdot_pos+gdot_neg) @@ -489,7 +483,7 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) dot%crss(i,of) = dot_product(prm%interaction_SlipSlip(i,:),dot%accshear(:,of)) & * ( prm%theta1(i) & + (prm%theta0(i) - prm%theta1(i) + prm%theta0(i)*prm%theta1(i)*sumGamma/prm%tau1(i)) & - *exp(-sumGamma*prm%theta0(i)/prm%tau1(i)) & + * exp(-sumGamma*prm%theta0(i)/prm%tau1(i)) & ) enddo dot%crss_back(:,of) = stt%sense(:,of)*dot%accshear(:,of) * & @@ -534,41 +528,30 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) outputsLoop: do o = 1_pInt,size(prm%outputID) select case(prm%outputID(o)) + case (crss_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%crss(:,of) - c = c + prm%totalNslip - case(crss_back_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%crss_back(:,of) - c = c + prm%totalNslip - case (sense_ID) - postResults(c+1_pInt:c+prm%totalNslip) = stt%sense(:,of) - c = c + prm%totalNslip - + postResults(c+1_pInt:c+prm%totalNslip) = stt%sense(:,of) case (chi0_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%chi0(:,of) - c = c + prm%totalNslip - case (gamma0_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%gamma0(:,of) - c = c + prm%totalNslip - case (accshear_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%accshear(:,of) - c = c + prm%totalNslip - case (shearrate_ID) postResults(c+1_pInt:c+prm%totalNslip) = gdot_pos+gdot_neg - c = c + prm%totalNslip - case (resolvedstress_ID) do i = 1_pInt, prm%totalNslip postResults(c+i) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,i)) enddo - c = c + prm%totalNslip - + end select + + c = c + prm%totalNslip + enddo outputsLoop end associate From 705d55a3a5bca7c18ef2352daa688ab9ea46bf93 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 7 Jan 2019 07:20:13 +0100 Subject: [PATCH 83/89] re-enabled sanity checks + slight adjustments to layout --- src/plastic_kinematichardening.f90 | 70 +++++++++++++++--------------- src/plastic_phenopowerlaw.f90 | 9 ++-- 2 files changed, 41 insertions(+), 38 deletions(-) diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index f514ac78d..559f305ff 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -7,7 +7,7 @@ !-------------------------------------------------------------------------------------------------- module plastic_kinehardening use prec, only: & - pReal,& + pReal, & pInt implicit none @@ -243,19 +243,21 @@ subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- ! sanity checks - - ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - ! .and. param(instance)%crss0 (1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' crss0' - ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - ! .and. param(instance)%tau1 (1:nSlipFamilies) <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' - ! if (any(plastic_kinehardening_Nslip (1:nSlipFamilies,instance) > 0_pInt & - ! .and. param(instance)%tau1_b(1:nSlipFamilies) < 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' - ! if (param(instance)%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' - ! if (param(instance)%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' + if (prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' + if (prm%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' + if (any(prm%crss0 <= 0.0_pReal)) extmsg = trim(extmsg)//' crss0' + if (any(prm%tau1 <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' + if (any(prm%tau1_b <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' + + !ToDo: Any sensible checks for theta? endif slipActive - +!-------------------------------------------------------------------------------------------------- +! exit if any parameter is out of range + if (extmsg /= '') & + call IO_error(211_pInt,ext_msg=trim(extmsg)//'('//PLASTICITY_KINEHARDENING_label//')') + !-------------------------------------------------------------------------------------------------- ! output pararameters outputs = config%getStrings('(output)',defaultVal=emptyStringArray) @@ -308,21 +310,21 @@ subroutine plastic_kinehardening_init ! locally defined state aliases and initialization of state0 and aTolState startIndex = 1_pInt endIndex = prm%totalNslip - stt%crss => plasticState(p)%state (startIndex:endIndex,:) - dot%crss => plasticState(p)%dotState(startIndex:endIndex,:) + stt%crss => plasticState(p)%state (startIndex:endIndex,:) stt%crss = spread(prm%crss0, 2, NipcMyPhase) + dot%crss => plasticState(p)%dotState(startIndex:endIndex,:) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%crss_back => plasticState(p)%state (startIndex:endIndex,:) - dot%crss_back => plasticState(p)%dotState(startIndex:endIndex,:) + stt%crss_back => plasticState(p)%state (startIndex:endIndex,:) + dot%crss_back => plasticState(p)%dotState(startIndex:endIndex,:) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%accshear => plasticState(p)%state (startIndex:endIndex,:) - dot%accshear => plasticState(p)%dotState(startIndex:endIndex,:) + stt%accshear => plasticState(p)%state (startIndex:endIndex,:) + dot%accshear => plasticState(p)%dotState(startIndex:endIndex,:) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolShear ! global alias plasticState(p)%slipRate => plasticState(p)%dotState(startIndex:endIndex,:) @@ -331,18 +333,18 @@ subroutine plastic_kinehardening_init o = plasticState(p)%offsetDeltaState startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%sense => plasticState(p)%state (startIndex :endIndex ,:) - dlt%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + stt%sense => plasticState(p)%state (startIndex :endIndex ,:) + dlt%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%chi0 => plasticState(p)%state (startIndex :endIndex ,:) - dlt%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + stt%chi0 => plasticState(p)%state (startIndex :endIndex ,:) + dlt%chi0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,:) - dlt%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) + stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,:) + dlt%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally @@ -508,33 +510,33 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) implicit none real(pReal), dimension(3,3), intent(in) :: & Mp !< Mandel stress - integer(pInt), intent(in) :: & + integer(pInt), intent(in) :: & instance, & of real(pReal), dimension(sum(plastic_kinehardening_sizePostResult(:,instance))) :: & postResults + integer(pInt) :: & o,c,i real(pReal), dimension(param(instance)%totalNslip) :: & gdot_pos,gdot_neg - c = 0_pInt associate(prm => param(instance), stt => state(instance)) - + call kinetics(Mp,instance,of,gdot_pos,gdot_neg) - + outputsLoop: do o = 1_pInt,size(prm%outputID) select case(prm%outputID(o)) - + case (crss_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%crss(:,of) case(crss_back_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%crss_back(:,of) case (sense_ID) - postResults(c+1_pInt:c+prm%totalNslip) = stt%sense(:,of) + postResults(c+1_pInt:c+prm%totalNslip) = stt%sense(:,of) case (chi0_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%chi0(:,of) case (gamma0_ID) @@ -547,7 +549,7 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) do i = 1_pInt, prm%totalNslip postResults(c+i) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,i)) enddo - + end select c = c + prm%totalNslip @@ -568,7 +570,7 @@ end function plastic_kinehardening_postResults pure subroutine kinetics(Mp,instance,of, & gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) use prec, only: & - dNeq0 + dNeq0 use math, only: & math_mul33xx33 @@ -578,14 +580,14 @@ pure subroutine kinetics(Mp,instance,of, & integer(pInt), intent(in) :: & instance, & of - real(pReal), dimension(param(instance)%totalNslip), intent(out) :: & + + real(pReal), intent(out), dimension(param(instance)%totalNslip) :: & gdot_pos, & gdot_neg - real(pReal), dimension(param(instance)%totalNslip), intent(out), optional :: & + real(pReal), intent(out), optional, dimension(param(instance)%totalNslip) :: & dgdot_dtau_pos, & dgdot_dtau_neg - real(pReal), dimension(param(instance)%totalNslip) :: & tau_pos, & tau_neg diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index e2b56cce6..51ffd6eff 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -191,7 +191,7 @@ subroutine plastic_phenopowerlaw_init prm%aTolResistance = config%getFloat('atol_resistance',defaultVal=1.0_pReal) prm%aTolShear = config%getFloat('atol_shear', defaultVal=1.0e-6_pReal) prm%aTolTwinfrac = config%getFloat('atol_twinfrac', defaultVal=1.0e-6_pReal) - + ! sanity checks if (prm%aTolResistance <= 0.0_pReal) extmsg = trim(extmsg)//' aTolresistance' if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//' aTolShear' @@ -392,7 +392,7 @@ end subroutine plastic_phenopowerlaw_init !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent -!> @details asumme that deformation by dislocation glide affects twinned and untwinned volume +!> @details asummes that deformation by dislocation glide affects twinned and untwinned volume ! equally (Taylor assumption). Twinning happens only in untwinned volume !-------------------------------------------------------------------------------------------------- pure subroutine plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) @@ -523,7 +523,7 @@ function plastic_phenopowerlaw_postResults(Mp,instance,of) result(postResults) of real(pReal), dimension(sum(plastic_phenopowerlaw_sizePostResult(:,instance))) :: & - postResults + postResults integer(pInt) :: & o,c,i @@ -595,13 +595,14 @@ pure subroutine kinetics_slip(Mp,instance,of, & integer(pInt), intent(in) :: & instance, & of - + real(pReal), intent(out), dimension(param(instance)%totalNslip) :: & gdot_slip_pos, & gdot_slip_neg real(pReal), intent(out), optional, dimension(param(instance)%totalNslip) :: & dgdot_dtau_slip_pos, & dgdot_dtau_slip_neg + real(pReal), dimension(param(instance)%totalNslip) :: & tau_slip_pos, & tau_slip_neg From 4037085f6c29854a7c277c0e7583c1cb41fa6d32 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 7 Jan 2019 07:24:02 +0100 Subject: [PATCH 84/89] whitespace not needed --- src/plastic_disloUCLA.f90 | 30 ++++++++--------- src/plastic_isotropic.f90 | 54 +++++++++++++++--------------- src/plastic_kinematichardening.f90 | 36 ++++++++++---------- src/plastic_phenopowerlaw.f90 | 28 ++++++++-------- 4 files changed, 74 insertions(+), 74 deletions(-) diff --git a/src/plastic_disloUCLA.f90 b/src/plastic_disloUCLA.f90 index 67adb083b..15c050934 100644 --- a/src/plastic_disloUCLA.f90 +++ b/src/plastic_disloUCLA.f90 @@ -227,7 +227,7 @@ subroutine plastic_disloUCLA_init() prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & config%getFloats('interaction_slipslip'), & structure(1:3)) - prm%rho0 = config%getFloats('rhoedge0', requiredShape=shape(prm%Nslip)) + prm%rho0 = config%getFloats('rhoedge0', requiredShape=shape(prm%Nslip)) prm%rhoDip0 = config%getFloats('rhoedgedip0', requiredShape=shape(prm%Nslip)) prm%v0 = config%getFloats('v0', requiredShape=shape(prm%Nslip)) prm%burgers = config%getFloats('slipburgers', requiredShape=shape(prm%Nslip)) @@ -268,7 +268,7 @@ subroutine plastic_disloUCLA_init() prm%clambda = math_expand(prm%clambda, prm%Nslip) prm%atomicVolume = math_expand(prm%atomicVolume, prm%Nslip) prm%minDipDistance = math_expand(prm%minDipDistance, prm%Nslip) - + prm%tau0 = prm%tau_peierls + prm%SolidSolutionStrength ! sanity checks @@ -280,7 +280,7 @@ subroutine plastic_disloUCLA_init() if (any(prm%tau_peierls < 0.0_pReal)) extmsg = trim(extmsg)//' tau_peierls' if ( prm%D0 <= 0.0_pReal) extmsg = trim(extmsg)//' d0' if ( prm%Qsd <= 0.0_pReal) extmsg = trim(extmsg)//' qsd' - + !if (plastic_disloUCLA_CAtomicVolume(instance) <= 0.0_pReal) & ! call IO_error(211_pInt,el=instance,ext_msg='cAtomicVolume ('//PLASTICITY_DISLOUCLA_label//')') else slipActive @@ -338,7 +338,7 @@ subroutine plastic_disloUCLA_init() plasticState(p)%sizePostResults = sum(plastic_disloUCLA_sizePostResult(:,phase_plasticityInstance(p))) allocate(prm%forestProjectionEdge(prm%totalNslip,prm%totalNslip),source = 0.0_pReal) - + i = 0_pInt mySlipFamilies: do f = 1_pInt,size(prm%Nslip,1) index_myFamily = sum(prm%Nslip(1:f-1_pInt)) @@ -416,7 +416,7 @@ subroutine plastic_disloUCLA_dependentState(instance,of) dst%mfp(:,of) = prm%grainSize/(1.0_pReal+prm%grainSize*dst%dislocationSpacing(:,of)/prm%Clambda) dst%dislocationSpacing(:,of) = dst%mfp(:,of) ! ToDo: Hack to recover wrong behavior for the moment - + end associate @@ -450,7 +450,7 @@ pure subroutine plastic_disloUCLA_LpAndItsTangent(Lp,dLp_dMp,Mp,Temperature,inst Lp = 0.0_pReal dLp_dMp = 0.0_pReal - + associate(prm => param(instance)) call kinetics(Mp,Temperature,instance,of,gdot_slip_pos,gdot_slip_neg,dgdot_dtauslip_pos,dgdot_dtauslip_neg) @@ -461,7 +461,7 @@ pure subroutine plastic_disloUCLA_LpAndItsTangent(Lp,dLp_dMp,Mp,Temperature,inst + dgdot_dtauslip_pos(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_pos(m,n,i) & + dgdot_dtauslip_neg(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_neg(m,n,i) enddo slipSystems - + end associate end subroutine plastic_disloUCLA_LpAndItsTangent @@ -600,12 +600,12 @@ function plastic_disloUCLA_postResults(Mp,Temperature,instance,of) result(postRe end function plastic_disloUCLA_postResults -!-------------------------------------------------------------------------------------------------- +!-------------------------------------------------------------------------------------------------- !> @brief Shear rates on slip systems, their derivatives with respect to resolved stress and the ! resolved stresss -!> @details Derivatives and resolved stress are calculated only optionally. -! NOTE: Against the common convention, the result (i.e. intent(out)) variables are the last to -! have the optional arguments at the end +!> @details Derivatives and resolved stress are calculated only optionally. +! NOTE: Against the common convention, the result (i.e. intent(out)) variables are the last to +! have the optional arguments at the end !-------------------------------------------------------------------------------------------------- pure subroutine kinetics(Mp,Temperature,instance,of, & gdot_slip_pos,gdot_slip_neg,dgdot_dtauslip_pos,dgdot_dtauslip_neg,tau_slip_pos1,tau_slip_neg1) @@ -642,13 +642,13 @@ pure subroutine kinetics(Mp,Temperature,instance,of, & integer(pInt) :: j associate(prm => param(instance), stt => state(instance), dst => dependentState(instance)) - + do j = 1_pInt, prm%totalNslip tau_slip_pos(j) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,j)) tau_slip_neg(j) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,j)) enddo - - + + if (present(tau_slip_pos1)) tau_slip_pos1 = tau_slip_pos if (present(tau_slip_neg1)) tau_slip_neg1 = tau_slip_neg @@ -735,7 +735,7 @@ pure subroutine kinetics(Mp,Temperature,instance,of, & ) & /(2.0_pReal*prm%burgers**2.0_pReal*tau_slip_neg & + prm%omega * prm%B* effectiveLength**2.0_pReal* needsGoodName )**2.0_pReal - + dgdot_dtauslip_neg = DotGamma0 * dvel_slip * 0.5_pReal else where significantNegativeTau2 dgdot_dtauslip_neg = 0.0_pReal diff --git a/src/plastic_isotropic.f90 b/src/plastic_isotropic.f90 index c7d92651a..4f96a5648 100644 --- a/src/plastic_isotropic.f90 +++ b/src/plastic_isotropic.f90 @@ -44,7 +44,7 @@ module plastic_isotropic aTolShear integer(pInt) :: & of_debug = 0_pInt - integer(kind(undefined_ID)), allocatable, dimension(:) :: & + integer(kind(undefined_ID)), allocatable, dimension(:) :: & outputID logical :: & dilatation @@ -119,7 +119,7 @@ subroutine plastic_isotropic_init() p, i, & NipcMyPhase, & sizeState, sizeDotState - + character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::] integer(kind(undefined_ID)) :: & @@ -154,7 +154,7 @@ subroutine plastic_isotropic_init() dot => dotState(phase_plasticityInstance(p)), & stt => state(phase_plasticityInstance(p)), & config => config_phase(p)) - + #ifdef DEBUG if (p==material_phase(debug_g,debug_i,debug_e)) then prm%of_debug = phasememberAt(debug_g,debug_i,debug_e) @@ -175,22 +175,22 @@ subroutine plastic_isotropic_init() prm%a = config%getFloat('a') prm%aTolFlowStress = config%getFloat('atol_flowstress',defaultVal=1.0_pReal) prm%aTolShear = config%getFloat('atol_shear', defaultVal=1.0e-6_pReal) - + prm%dilatation = config%keyExists('/dilatation/') !-------------------------------------------------------------------------------------------------- ! sanity checks extmsg = '' - if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'aTolShear ' - if (prm%tau0 < 0.0_pReal) extmsg = trim(extmsg)//'tau0 ' - if (prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//'gdot0 ' - if (prm%n <= 0.0_pReal) extmsg = trim(extmsg)//'n ' + if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'aTolShear ' + if (prm%tau0 < 0.0_pReal) extmsg = trim(extmsg)//'tau0 ' + if (prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//'gdot0 ' + if (prm%n <= 0.0_pReal) extmsg = trim(extmsg)//'n ' if (prm%tausat <= prm%tau0) extmsg = trim(extmsg)//'tausat ' - if (prm%a <= 0.0_pReal) extmsg = trim(extmsg)//'a ' - if (prm%fTaylor <= 0.0_pReal) extmsg = trim(extmsg)//'m ' + if (prm%a <= 0.0_pReal) extmsg = trim(extmsg)//'a ' + if (prm%fTaylor <= 0.0_pReal) extmsg = trim(extmsg)//'m ' if (prm%aTolFlowstress <= 0.0_pReal) extmsg = trim(extmsg)//'atol_flowstress ' - if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'atol_shear ' - + if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'atol_shear ' + !-------------------------------------------------------------------------------------------------- ! exit if any parameter is out of range if (extmsg /= '') & @@ -242,7 +242,7 @@ subroutine plastic_isotropic_init() ! global alias plasticState(p)%slipRate => plasticState(p)%dotState(2:2,1:NipcMyPhase) plasticState(p)%accumulatedSlip => plasticState(p)%state (2:2,1:NipcMyPhase) - + plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally end associate @@ -289,15 +289,15 @@ subroutine plastic_isotropic_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) k, l, m, n associate(prm => param(instance), stt => state(instance)) - + Mp_dev = math_deviatoric33(Mp) squarenorm_Mp_dev = math_mul33xx33(Mp_dev,Mp_dev) - norm_Mp_dev = sqrt(squarenorm_Mp_dev) + norm_Mp_dev = sqrt(squarenorm_Mp_dev) if (norm_Mp_dev > 0.0_pReal) then gamma_dot = prm%gdot0 * (sqrt(1.5_pReal) * norm_Mp_dev/(prm%fTaylor*stt%flowstress(of))) **prm%n - Lp = Mp_dev/norm_Mp_dev * gamma_dot/prm%fTaylor + Lp = Mp_dev/norm_Mp_dev * gamma_dot/prm%fTaylor #ifdef DEBUG if (iand(debug_level(debug_constitutive), debug_levelExtensive) /= 0_pInt & .and. (of == prm%of_debug .or. .not. iand(debug_level(debug_constitutive),debug_levelSelective) /= 0_pInt)) then @@ -318,7 +318,7 @@ subroutine plastic_isotropic_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) Lp = 0.0_pReal dLp_dMp = 0.0_pReal end if - + end associate end subroutine plastic_isotropic_LpAndItsTangent @@ -338,7 +338,7 @@ subroutine plastic_isotropic_LiAndItsTangent(Li,dLi_dTstar,Tstar,instance,of) Li !< inleastic velocity gradient real(pReal), dimension(3,3,3,3), intent(out) :: & dLi_dTstar !< derivative of Li with respect to the Mandel stress - + real(pReal), dimension(3,3), intent(in) :: & Tstar !< Mandel stress ToDo: Mi? integer(pInt), intent(in) :: & @@ -355,10 +355,10 @@ subroutine plastic_isotropic_LiAndItsTangent(Li,dLi_dTstar,Tstar,instance,of) k, l, m, n associate(prm => param(instance), stt => state(instance)) - + Tstar_sph = math_spherical33(Tstar) squarenorm_Tstar_sph = math_mul33xx33(Tstar_sph,Tstar_sph) - norm_Tstar_sph = sqrt(squarenorm_Tstar_sph) + norm_Tstar_sph = sqrt(squarenorm_Tstar_sph) if (prm%dilatation .and. norm_Tstar_sph > 0.0_pReal) then ! no stress or J2 plastitiy --> Li and its derivative are zero gamma_dot = prm%gdot0 * (sqrt(1.5_pReal) * norm_Tstar_sph /(prm%fTaylor*stt%flowstress(of))) **prm%n @@ -404,15 +404,15 @@ subroutine plastic_isotropic_dotState(Mp,instance,of) norm_Mp !< norm of the (deviatoric) Mandel stress associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) - + if (prm%dilatation) then norm_Mp = sqrt(math_mul33xx33(Mp,Mp)) else norm_Mp = sqrt(math_mul33xx33(math_deviatoric33(Mp),math_deviatoric33(Mp))) endif - + gamma_dot = prm%gdot0 * (sqrt(1.5_pReal) * norm_Mp /(prm%fTaylor*stt%flowstress(of))) **prm%n - + if (abs(gamma_dot) > 1e-12_pReal) then if (dEq0(prm%tausat_SinhFitA)) then saturation = prm%tausat @@ -431,7 +431,7 @@ subroutine plastic_isotropic_dotState(Mp,instance,of) dot%flowstress (of) = hardening * gamma_dot dot%accumulatedShear(of) = gamma_dot - + end associate end subroutine plastic_isotropic_dotState @@ -461,13 +461,13 @@ function plastic_isotropic_postResults(Mp,instance,of) result(postResults) o,c associate(prm => param(instance), stt => state(instance)) - + if (prm%dilatation) then norm_Mp = sqrt(math_mul33xx33(Mp,Mp)) else norm_Mp = sqrt(math_mul33xx33(math_deviatoric33(Mp),math_deviatoric33(Mp))) endif - + c = 0_pInt outputsLoop: do o = 1_pInt,size(prm%outputID) @@ -483,7 +483,7 @@ function plastic_isotropic_postResults(Mp,instance,of) result(postResults) end select enddo outputsLoop - + end associate end function plastic_isotropic_postResults diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 559f305ff..f8add7937 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -40,7 +40,7 @@ module plastic_kinehardening crss0, & !< initial critical shear stress for slip (input parameter, per family) theta0, & !< initial hardening rate of forward stress for each slip theta1, & !< asymptotic hardening rate of forward stress for each slip > - theta0_b, & !< initial hardening rate of back stress for each slip > + theta0_b, & !< initial hardening rate of back stress for each slip > theta1_b, & !< asymptotic hardening rate of back stress for each slip > tau1, & tau1_b, & @@ -226,7 +226,7 @@ subroutine plastic_kinehardening_init prm%theta1 = config%getFloats('theta1', requiredShape=shape(prm%Nslip)) prm%theta0_b = config%getFloats('theta0_b', requiredShape=shape(prm%Nslip)) prm%theta1_b = config%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) - + prm%gdot0 = config%getFloat('gdot0') prm%n_slip = config%getFloat('n_slip') @@ -266,7 +266,7 @@ subroutine plastic_kinehardening_init outputID = undefined_ID outputSize = prm%totalNslip select case(outputs(i)) - + case ('resistance') outputID = merge(crss_ID,undefined_ID,prm%totalNslip>0_pInt) case ('accumulatedshear') @@ -316,7 +316,7 @@ subroutine plastic_kinehardening_init plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance startIndex = endIndex + 1_pInt - endIndex = endIndex + prm%totalNslip + endIndex = endIndex + prm%totalNslip stt%crss_back => plasticState(p)%state (startIndex:endIndex,:) dot%crss_back => plasticState(p)%dotState(startIndex:endIndex,:) plasticState(p)%aTolState(startIndex:endIndex) = prm%aTolResistance @@ -335,7 +335,7 @@ subroutine plastic_kinehardening_init endIndex = endIndex + prm%totalNslip stt%sense => plasticState(p)%state (startIndex :endIndex ,:) dlt%sense => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) - + startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip stt%chi0 => plasticState(p)%state (startIndex :endIndex ,:) @@ -345,7 +345,7 @@ subroutine plastic_kinehardening_init endIndex = endIndex + prm%totalNslip stt%gamma0 => plasticState(p)%state (startIndex :endIndex ,:) dlt%gamma0 => plasticState(p)%deltaState(startIndex-o:endIndex-o,:) - + plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally end associate @@ -391,7 +391,7 @@ pure subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) + dgdot_dtau_pos(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_pos(m,n,i) & + dgdot_dtau_neg(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_neg(m,n,i) enddo - + end associate end subroutine plastic_kinehardening_LpAndItsTangent @@ -424,10 +424,10 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) sense associate(prm => param(instance), stt => state(instance), dlt => deltaState(instance)) - + call kinetics(Mp,instance,of,gdot_pos,gdot_neg) sense = merge(state(instance)%sense(:,of), & ! keep existing... - sign(1.0_pReal,gdot_pos+gdot_neg), & ! ...or have a defined + sign(1.0_pReal,gdot_pos+gdot_neg), & ! ...or have a defined dEq0(gdot_pos+gdot_neg,1e-10_pReal)) ! current sense of shear direction #ifdef DEBUG @@ -450,7 +450,7 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) dlt%chi0 (:,of) = 0.0_pReal dlt%gamma0(:,of) = 0.0_pReal end where - + end associate end subroutine plastic_kinehardening_deltaState @@ -474,13 +474,13 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) gdot_pos,gdot_neg real(pReal) :: & sumGamma - + associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) call kinetics(Mp,instance,of,gdot_pos,gdot_neg) dot%accshear(:,of) = abs(gdot_pos+gdot_neg) - sumGamma = sum(stt%accshear(:,of)) - + sumGamma = sum(stt%accshear(:,of)) + do i = 1_pInt, prm%totalNslip dot%crss(i,of) = dot_product(prm%interaction_SlipSlip(i,:),dot%accshear(:,of)) & * ( prm%theta1(i) & @@ -493,8 +493,8 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) (prm%theta0_b - prm%theta1_b & + prm%theta0_b*prm%theta1_b/(prm%tau1_b+stt%chi0(:,of))*(stt%accshear(:,of)-stt%gamma0(:,of))& ) *exp(-(stt%accshear(:,of)-stt%gamma0(:,of)) *prm%theta0_b/(prm%tau1_b+stt%chi0(:,of))) & - ) - + ) + end associate end subroutine plastic_kinehardening_dotState @@ -551,9 +551,9 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) enddo end select - + c = c + prm%totalNslip - + enddo outputsLoop end associate @@ -595,7 +595,7 @@ pure subroutine kinetics(Mp,instance,of, & logical :: nonSchmidActive associate(prm => param(instance), stt => state(instance)) - + nonSchmidActive = size(prm%nonSchmidCoeff) > 0_pInt do i = 1_pInt, prm%totalNslip diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index 51ffd6eff..abcb10bdb 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -221,7 +221,7 @@ subroutine plastic_phenopowerlaw_init prm%xi_slip_sat = config%getFloats('tausat_slip', requiredSize=size(prm%Nslip)) prm%H_int = config%getFloats('h_int', requiredSize=size(prm%Nslip), & defaultVal=[(0.0_pReal,i=1_pInt,size(prm%Nslip))]) - + prm%gdot0_slip = config%getFloat('gdot0_slip') prm%n_slip = config%getFloat('n_slip') prm%a_slip = config%getFloat('a_slip') @@ -392,7 +392,7 @@ end subroutine plastic_phenopowerlaw_init !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent -!> @details asummes that deformation by dislocation glide affects twinned and untwinned volume +!> @details asummes that deformation by dislocation glide affects twinned and untwinned volume ! equally (Taylor assumption). Twinning happens only in untwinned volume !-------------------------------------------------------------------------------------------------- pure subroutine plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) @@ -419,9 +419,9 @@ pure subroutine plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) Lp = 0.0_pReal dLp_dMp = 0.0_pReal - + associate(prm => param(instance)) - + call kinetics_slip(Mp,instance,of,gdot_slip_pos,gdot_slip_neg,dgdot_dtauslip_pos,dgdot_dtauslip_neg) slipSystems: do i = 1_pInt, prm%totalNslip Lp = Lp + (gdot_slip_pos(i)+gdot_slip_neg(i))*prm%Schmid_slip(1:3,1:3,i) @@ -438,7 +438,7 @@ pure subroutine plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) dLp_dMp(k,l,m,n) = dLp_dMp(k,l,m,n) & + dgdot_dtautwin(i)*prm%Schmid_twin(k,l,i)*prm%Schmid_twin(m,n,i) enddo twinSystems - + end associate end subroutine plastic_phenopowerlaw_LpAndItsTangent @@ -578,8 +578,8 @@ end function plastic_phenopowerlaw_postResults !-------------------------------------------------------------------------------------------------- !> @brief Shear rates on slip systems and their derivatives with respect to resolved stress -!> @details Derivatives are calculated only optionally. -! NOTE: Against the common convention, the result (i.e. intent(out)) variables are the last to +!> @details Derivatives are calculated only optionally. +! NOTE: Against the common convention, the result (i.e. intent(out)) variables are the last to ! have the optional arguments at the end !-------------------------------------------------------------------------------------------------- pure subroutine kinetics_slip(Mp,instance,of, & @@ -610,7 +610,7 @@ pure subroutine kinetics_slip(Mp,instance,of, & logical :: nonSchmidActive associate(prm => param(instance), stt => state(instance)) - + nonSchmidActive = size(prm%nonSchmidCoeff) > 0_pInt do i = 1_pInt, prm%totalNslip @@ -656,7 +656,7 @@ end subroutine kinetics_slip !> @brief Shear rates on twin systems and their derivatives with respect to resolved stress. ! twinning is assumed to take place only in untwinned volume. !> @details Derivates are calculated only optionally. -! NOTE: Against the common convention, the result (i.e. intent(out)) variables are the last to +! NOTE: Against the common convention, the result (i.e. intent(out)) variables are the last to ! have the optional arguments at the end. !-------------------------------------------------------------------------------------------------- pure subroutine kinetics_twin(Mp,instance,of,& @@ -672,7 +672,7 @@ pure subroutine kinetics_twin(Mp,instance,of,& integer(pInt), intent(in) :: & instance, & of - + real(pReal), dimension(param(instance)%totalNtwin), intent(out) :: & gdot_twin real(pReal), dimension(param(instance)%totalNtwin), intent(out), optional :: & @@ -681,17 +681,17 @@ pure subroutine kinetics_twin(Mp,instance,of,& real(pReal), dimension(param(instance)%totalNtwin) :: & tau_twin integer(pInt) :: i - + associate(prm => param(instance), stt => state(instance)) do i = 1_pInt, prm%totalNtwin tau_twin(i) = math_mul33xx33(Mp,prm%Schmid_twin(1:3,1:3,i)) enddo - + where(tau_twin > 0.0_pReal) gdot_twin = (1.0_pReal-sum(stt%gamma_twin(:,of)/prm%gamma_twin_char)) & ! only twin in untwinned volume fraction * prm%gdot0_twin*(abs(tau_twin)/stt%xi_twin(:,of))**prm%n_twin - else where + else where gdot_twin = 0.0_pReal end where @@ -702,7 +702,7 @@ pure subroutine kinetics_twin(Mp,instance,of,& dgdot_dtau_twin = 0.0_pReal end where endif - + end associate end subroutine kinetics_twin From e06fc036c5a20da2375baff49dd79f72417596ed Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 7 Jan 2019 07:36:11 +0100 Subject: [PATCH 85/89] have dotState after Lp/Li --- src/plastic_disloUCLA.f90 | 60 ++++++++++---------- src/plastic_kinematichardening.f90 | 88 +++++++++++++++--------------- src/plastic_none.f90 | 1 - 3 files changed, 74 insertions(+), 75 deletions(-) diff --git a/src/plastic_disloUCLA.f90 b/src/plastic_disloUCLA.f90 index 15c050934..734d077e3 100644 --- a/src/plastic_disloUCLA.f90 +++ b/src/plastic_disloUCLA.f90 @@ -393,36 +393,6 @@ subroutine plastic_disloUCLA_init() end subroutine plastic_disloUCLA_init -!-------------------------------------------------------------------------------------------------- -!> @brief calculates derived quantities from state -!-------------------------------------------------------------------------------------------------- -subroutine plastic_disloUCLA_dependentState(instance,of) - - implicit none - integer(pInt), intent(in) :: instance, of - - integer(pInt) :: & - i - - associate(prm => param(instance), stt => state(instance),dst => dependentState(instance)) - - forall (i = 1_pInt:prm%totalNslip) - dst%dislocationSpacing(i,of) = sqrt(dot_product(stt%rhoEdge(:,of)+stt%rhoEdgeDip(:,of), & - prm%forestProjectionEdge(:,i))) - dst%threshold_stress(i,of) = prm%mu*prm%burgers(i) & - * sqrt(dot_product(stt%rhoEdge(:,of)+stt%rhoEdgeDip(:,of), & - prm%interaction_SlipSlip(i,:))) - end forall - - dst%mfp(:,of) = prm%grainSize/(1.0_pReal+prm%grainSize*dst%dislocationSpacing(:,of)/prm%Clambda) - dst%dislocationSpacing(:,of) = dst%mfp(:,of) ! ToDo: Hack to recover wrong behavior for the moment - - end associate - - -end subroutine plastic_disloUCLA_dependentState - - !-------------------------------------------------------------------------------------------------- !> @brief calculates plastic velocity gradient and its tangent !-------------------------------------------------------------------------------------------------- @@ -531,6 +501,36 @@ subroutine plastic_disloUCLA_dotState(Mp,Temperature,instance,of) end subroutine plastic_disloUCLA_dotState +!-------------------------------------------------------------------------------------------------- +!> @brief calculates derived quantities from state +!-------------------------------------------------------------------------------------------------- +subroutine plastic_disloUCLA_dependentState(instance,of) + + implicit none + integer(pInt), intent(in) :: instance, of + + integer(pInt) :: & + i + + associate(prm => param(instance), stt => state(instance),dst => dependentState(instance)) + + forall (i = 1_pInt:prm%totalNslip) + dst%dislocationSpacing(i,of) = sqrt(dot_product(stt%rhoEdge(:,of)+stt%rhoEdgeDip(:,of), & + prm%forestProjectionEdge(:,i))) + dst%threshold_stress(i,of) = prm%mu*prm%burgers(i) & + * sqrt(dot_product(stt%rhoEdge(:,of)+stt%rhoEdgeDip(:,of), & + prm%interaction_SlipSlip(i,:))) + end forall + + dst%mfp(:,of) = prm%grainSize/(1.0_pReal+prm%grainSize*dst%dislocationSpacing(:,of)/prm%Clambda) + dst%dislocationSpacing(:,of) = dst%mfp(:,of) ! ToDo: Hack to recover wrong behavior for the moment + + end associate + + +end subroutine plastic_disloUCLA_dependentState + + !-------------------------------------------------------------------------------------------------- !> @brief return array of constitutive results !-------------------------------------------------------------------------------------------------- diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index f8add7937..20d748a88 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -397,6 +397,50 @@ pure subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) end subroutine plastic_kinehardening_LpAndItsTangent +!-------------------------------------------------------------------------------------------------- +!> @brief calculates the rate of change of microstructure +!-------------------------------------------------------------------------------------------------- +subroutine plastic_kinehardening_dotState(Mp,instance,of) + + implicit none + real(pReal), dimension(3,3), intent(in) :: & + Mp !< Mandel stress + integer(pInt), intent(in) :: & + instance, & + of + + integer(pInt) :: & + i + real(pReal), dimension(param(instance)%totalNslip) :: & + gdot_pos,gdot_neg + real(pReal) :: & + sumGamma + + associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) + + call kinetics(Mp,instance,of,gdot_pos,gdot_neg) + dot%accshear(:,of) = abs(gdot_pos+gdot_neg) + sumGamma = sum(stt%accshear(:,of)) + + do i = 1_pInt, prm%totalNslip + dot%crss(i,of) = dot_product(prm%interaction_SlipSlip(i,:),dot%accshear(:,of)) & + * ( prm%theta1(i) & + + (prm%theta0(i) - prm%theta1(i) + prm%theta0(i)*prm%theta1(i)*sumGamma/prm%tau1(i)) & + * exp(-sumGamma*prm%theta0(i)/prm%tau1(i)) & + ) + enddo + dot%crss_back(:,of) = stt%sense(:,of)*dot%accshear(:,of) * & + ( prm%theta1_b + & + (prm%theta0_b - prm%theta1_b & + + prm%theta0_b*prm%theta1_b/(prm%tau1_b+stt%chi0(:,of))*(stt%accshear(:,of)-stt%gamma0(:,of))& + ) *exp(-(stt%accshear(:,of)-stt%gamma0(:,of)) *prm%theta0_b/(prm%tau1_b+stt%chi0(:,of))) & + ) + + end associate + +end subroutine plastic_kinehardening_dotState + + !-------------------------------------------------------------------------------------------------- !> @brief calculates (instantaneous) incremental change of microstructure !-------------------------------------------------------------------------------------------------- @@ -456,50 +500,6 @@ subroutine plastic_kinehardening_deltaState(Mp,instance,of) end subroutine plastic_kinehardening_deltaState -!-------------------------------------------------------------------------------------------------- -!> @brief calculates the rate of change of microstructure -!-------------------------------------------------------------------------------------------------- -subroutine plastic_kinehardening_dotState(Mp,instance,of) - - implicit none - real(pReal), dimension(3,3), intent(in) :: & - Mp !< Mandel stress - integer(pInt), intent(in) :: & - instance, & - of - - integer(pInt) :: & - i - real(pReal), dimension(param(instance)%totalNslip) :: & - gdot_pos,gdot_neg - real(pReal) :: & - sumGamma - - associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) - - call kinetics(Mp,instance,of,gdot_pos,gdot_neg) - dot%accshear(:,of) = abs(gdot_pos+gdot_neg) - sumGamma = sum(stt%accshear(:,of)) - - do i = 1_pInt, prm%totalNslip - dot%crss(i,of) = dot_product(prm%interaction_SlipSlip(i,:),dot%accshear(:,of)) & - * ( prm%theta1(i) & - + (prm%theta0(i) - prm%theta1(i) + prm%theta0(i)*prm%theta1(i)*sumGamma/prm%tau1(i)) & - * exp(-sumGamma*prm%theta0(i)/prm%tau1(i)) & - ) - enddo - dot%crss_back(:,of) = stt%sense(:,of)*dot%accshear(:,of) * & - ( prm%theta1_b + & - (prm%theta0_b - prm%theta1_b & - + prm%theta0_b*prm%theta1_b/(prm%tau1_b+stt%chi0(:,of))*(stt%accshear(:,of)-stt%gamma0(:,of))& - ) *exp(-(stt%accshear(:,of)-stt%gamma0(:,of)) *prm%theta0_b/(prm%tau1_b+stt%chi0(:,of))) & - ) - - end associate - -end subroutine plastic_kinehardening_dotState - - !-------------------------------------------------------------------------------------------------- !> @brief return array of constitutive results !-------------------------------------------------------------------------------------------------- diff --git a/src/plastic_none.f90 b/src/plastic_none.f90 index 2c6ca6e93..0b3df43ef 100644 --- a/src/plastic_none.f90 +++ b/src/plastic_none.f90 @@ -60,7 +60,6 @@ subroutine plastic_none_init !-------------------------------------------------------------------------------------------------- ! allocate state arrays NipcMyPhase = count(material_phase == p) - call material_allocatePlasticState(p,NipcMyPhase,0_pInt,0_pInt,0_pInt, & 0_pInt,0_pInt,0_pInt) plasticState(p)%sizePostResults = 0_pInt From 995122504ebac50a9ec8f596bf2c5c14c6f78664 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 7 Jan 2019 08:04:02 +0100 Subject: [PATCH 86/89] cross comparing --- src/plastic_disloUCLA.f90 | 58 +++++++++++----------- src/plastic_isotropic.f90 | 70 +++++++++++++-------------- src/plastic_kinematichardening.f90 | 77 +++++++++++++++--------------- src/plastic_phenopowerlaw.f90 | 17 ++++--- 4 files changed, 111 insertions(+), 111 deletions(-) diff --git a/src/plastic_disloUCLA.f90 b/src/plastic_disloUCLA.f90 index 734d077e3..c9a885e68 100644 --- a/src/plastic_disloUCLA.f90 +++ b/src/plastic_disloUCLA.f90 @@ -77,8 +77,6 @@ module plastic_disloUCLA dipoleformation end type !< container type for internal constitutive parameters - type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) - type, private :: tDisloUCLAState real(pReal), pointer, dimension(:,:) :: & rhoEdge, & @@ -93,6 +91,8 @@ module plastic_disloUCLA threshold_stress end type tDisloUCLAdependentState + + type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) type(tDisloUCLAState ), allocatable, dimension(:), private :: & dotState, & state @@ -110,6 +110,7 @@ module plastic_disloUCLA contains + !-------------------------------------------------------------------------------------------------- !> @brief module initialization !> @details reads in material parameters, allocates arrays, and does sanity checks @@ -152,7 +153,7 @@ subroutine plastic_disloUCLA_init() f,j,k,o, & Ninstance, & p, i, & - NipcMyPhase, outputSize, & + NipcMyPhase, & sizeState, sizeDotState, & startIndex, endIndex @@ -217,7 +218,7 @@ subroutine plastic_disloUCLA_init() config%getFloat('c/a',defaultVal=0.0_pReal)) if(structure=='bcc') then prm%nonSchmidCoeff = config%getFloats('nonschmid_coefficients',& - defaultVal = emptyRealArray) + defaultVal = emptyRealArray) prm%nonSchmid_pos = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,+1_pInt) prm%nonSchmid_neg = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,-1_pInt) else @@ -272,17 +273,17 @@ subroutine plastic_disloUCLA_init() prm%tau0 = prm%tau_peierls + prm%SolidSolutionStrength ! sanity checks - if (any(prm%rho0 < 0.0_pReal)) extmsg = trim(extmsg)//' rhoedge0' - if (any(prm%rhoDip0 < 0.0_pReal)) extmsg = trim(extmsg)//' rhoedgedip0' - if (any(prm%v0 < 0.0_pReal)) extmsg = trim(extmsg)//' v0' - if (any(prm%burgers <= 0.0_pReal)) extmsg = trim(extmsg)//' slipburgers' - if (any(prm%H0kp <= 0.0_pReal)) extmsg = trim(extmsg)//' qedge' - if (any(prm%tau_peierls < 0.0_pReal)) extmsg = trim(extmsg)//' tau_peierls' - if ( prm%D0 <= 0.0_pReal) extmsg = trim(extmsg)//' d0' - if ( prm%Qsd <= 0.0_pReal) extmsg = trim(extmsg)//' qsd' + if ( prm%D0 <= 0.0_pReal) extmsg = trim(extmsg)//' d0' + if ( prm%Qsd <= 0.0_pReal) extmsg = trim(extmsg)//' qsd' + if (any(prm%rho0 < 0.0_pReal)) extmsg = trim(extmsg)//' rhoedge0' + if (any(prm%rhoDip0 < 0.0_pReal)) extmsg = trim(extmsg)//' rhoedgedip0' + if (any(prm%v0 < 0.0_pReal)) extmsg = trim(extmsg)//' v0' + if (any(prm%burgers <= 0.0_pReal)) extmsg = trim(extmsg)//' slipburgers' + if (any(prm%H0kp <= 0.0_pReal)) extmsg = trim(extmsg)//' qedge' + if (any(prm%tau_peierls < 0.0_pReal)) extmsg = trim(extmsg)//' tau_peierls' + if (any(prm%minDipDistance <= 0.0_pReal)) extmsg = trim(extmsg)//' cedgedipmindistance or slipburgers' + if (any(prm%atomicVolume <= 0.0_pReal)) extmsg = trim(extmsg)//' catomicvolume or slipburgers' - !if (plastic_disloUCLA_CAtomicVolume(instance) <= 0.0_pReal) & - ! call IO_error(211_pInt,el=instance,ext_msg='cAtomicVolume ('//PLASTICITY_DISLOUCLA_label//')') else slipActive allocate(prm%rho0(0)) allocate(prm%rhoDip0(0)) @@ -299,7 +300,6 @@ subroutine plastic_disloUCLA_init() allocate(prm%outputID(0)) do i=1_pInt, size(outputs) outputID = undefined_ID - outputSize = prm%totalNslip select case(trim(outputs(i))) case ('edge_density') @@ -321,7 +321,7 @@ subroutine plastic_disloUCLA_init() if (outputID /= undefined_ID) then plastic_disloUCLA_output(i,phase_plasticityInstance(p)) = outputs(i) - plastic_disloUCLA_sizePostResult(i,phase_plasticityInstance(p)) = outputSize + plastic_disloUCLA_sizePostResult(i,phase_plasticityInstance(p)) = prm%totalNslip prm%outputID = [prm%outputID, outputID] endif @@ -329,7 +329,7 @@ subroutine plastic_disloUCLA_init() !-------------------------------------------------------------------------------------------------- ! allocate state arrays - NipcMyPhase = count(material_phase==p) + NipcMyPhase = count(material_phase == p) sizeDotState = int(size(['rhoEdge ','rhoEdgeDip ','accshearslip']),pInt) * prm%totalNslip sizeState = sizeDotState @@ -375,7 +375,7 @@ subroutine plastic_disloUCLA_init() endIndex = endIndex + prm%totalNslip stt%accshear_slip=>plasticState(p)%state(startIndex:endIndex,:) dot%accshear_slip=>plasticState(p)%dotState(startIndex:endIndex,:) - plasticState(p)%aTolState(startIndex:endIndex) = 1e6_pReal + plasticState(p)%aTolState(startIndex:endIndex) = 1e6_pReal !ToDo: better make optional parameter ! global alias plasticState(p)%slipRate => plasticState(p)%dotState(startIndex:endIndex,:) plasticState(p)%accumulatedSlip => plasticState(p)%state(startIndex:endIndex,:) @@ -415,8 +415,8 @@ pure subroutine plastic_disloUCLA_LpAndItsTangent(Lp,dLp_dMp,Mp,Temperature,inst integer(pInt) :: & i,k,l,m,n real(pReal), dimension(param(instance)%totalNslip) :: & - dgdot_dtauslip_pos,dgdot_dtauslip_neg, & - gdot_slip_pos,gdot_slip_neg + gdot_slip_pos,gdot_slip_neg, & + dgdot_dtauslip_pos,dgdot_dtauslip_neg Lp = 0.0_pReal dLp_dMp = 0.0_pReal @@ -424,13 +424,13 @@ pure subroutine plastic_disloUCLA_LpAndItsTangent(Lp,dLp_dMp,Mp,Temperature,inst associate(prm => param(instance)) call kinetics(Mp,Temperature,instance,of,gdot_slip_pos,gdot_slip_neg,dgdot_dtauslip_pos,dgdot_dtauslip_neg) - slipSystems: do i = 1_pInt, prm%totalNslip + do i = 1_pInt, prm%totalNslip Lp = Lp + (gdot_slip_pos(i)+gdot_slip_neg(i))*prm%Schmid_slip(1:3,1:3,i) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = dLp_dMp(k,l,m,n) & + dgdot_dtauslip_pos(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_pos(m,n,i) & + dgdot_dtauslip_neg(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_neg(m,n,i) - enddo slipSystems + enddo end associate @@ -449,12 +449,13 @@ subroutine plastic_disloUCLA_dotState(Mp,Temperature,instance,of) math_clip implicit none - real(pReal), dimension(3,3), intent(in):: & + real(pReal), dimension(3,3), intent(in) :: & Mp !< Mandel stress - real(pReal), intent(in) :: & + real(pReal), intent(in) :: & temperature !< temperature - integer(pInt), intent(in) :: & - instance, of + integer(pInt), intent(in) :: & + instance, & + of real(pReal) :: & VacancyDiffusion @@ -507,7 +508,9 @@ end subroutine plastic_disloUCLA_dotState subroutine plastic_disloUCLA_dependentState(instance,of) implicit none - integer(pInt), intent(in) :: instance, of + integer(pInt), intent(in) :: & + instance, & + of integer(pInt) :: & i @@ -527,7 +530,6 @@ subroutine plastic_disloUCLA_dependentState(instance,of) end associate - end subroutine plastic_disloUCLA_dependentState diff --git a/src/plastic_isotropic.f90 b/src/plastic_isotropic.f90 index 4f96a5648..219226ad4 100644 --- a/src/plastic_isotropic.f90 +++ b/src/plastic_isotropic.f90 @@ -50,14 +50,14 @@ module plastic_isotropic dilatation end type - type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) - type, private :: tIsotropicState real(pReal), pointer, dimension(:) :: & flowstress, & accumulatedShear end type + + type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) type(tIsotropicState), allocatable, dimension(:), private :: & dotState, & state @@ -140,8 +140,8 @@ subroutine plastic_isotropic_init() if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0_pInt) & write(6,'(a16,1x,i5,/)') '# instances:',Ninstance - allocate(plastic_isotropic_sizePostResult(maxval(phase_Noutput), Ninstance),source=0_pInt) - allocate(plastic_isotropic_output(maxval(phase_Noutput), Ninstance)) + allocate(plastic_isotropic_sizePostResult(maxval(phase_Noutput),Ninstance),source=0_pInt) + allocate(plastic_isotropic_output(maxval(phase_Noutput),Ninstance)) plastic_isotropic_output = '' allocate(param(Ninstance)) @@ -161,35 +161,35 @@ subroutine plastic_isotropic_init() endif #endif - prm%tau0 = config%getFloat('tau0') - prm%tausat = config%getFloat('tausat') - prm%gdot0 = config%getFloat('gdot0') - prm%n = config%getFloat('n') - prm%h0 = config%getFloat('h0') - prm%fTaylor = config%getFloat('m') - prm%h0_slopeLnRate = config%getFloat('h0_slopelnrate', defaultVal=0.0_pReal) - prm%tausat_SinhFitA = config%getFloat('tausat_sinhfita',defaultVal=0.0_pReal) - prm%tausat_SinhFitB = config%getFloat('tausat_sinhfitb',defaultVal=0.0_pReal) - prm%tausat_SinhFitC = config%getFloat('tausat_sinhfitc',defaultVal=0.0_pReal) - prm%tausat_SinhFitD = config%getFloat('tausat_sinhfitd',defaultVal=0.0_pReal) - prm%a = config%getFloat('a') - prm%aTolFlowStress = config%getFloat('atol_flowstress',defaultVal=1.0_pReal) - prm%aTolShear = config%getFloat('atol_shear', defaultVal=1.0e-6_pReal) + prm%tau0 = config%getFloat('tau0') + prm%tausat = config%getFloat('tausat') + prm%gdot0 = config%getFloat('gdot0') + prm%n = config%getFloat('n') + prm%h0 = config%getFloat('h0') + prm%fTaylor = config%getFloat('m') + prm%h0_slopeLnRate = config%getFloat('h0_slopelnrate', defaultVal=0.0_pReal) + prm%tausat_SinhFitA = config%getFloat('tausat_sinhfita',defaultVal=0.0_pReal) + prm%tausat_SinhFitB = config%getFloat('tausat_sinhfitb',defaultVal=0.0_pReal) + prm%tausat_SinhFitC = config%getFloat('tausat_sinhfitc',defaultVal=0.0_pReal) + prm%tausat_SinhFitD = config%getFloat('tausat_sinhfitd',defaultVal=0.0_pReal) + prm%a = config%getFloat('a') + prm%aTolFlowStress = config%getFloat('atol_flowstress',defaultVal=1.0_pReal) + prm%aTolShear = config%getFloat('atol_shear', defaultVal=1.0e-6_pReal) prm%dilatation = config%keyExists('/dilatation/') !-------------------------------------------------------------------------------------------------- ! sanity checks extmsg = '' - if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'aTolShear ' - if (prm%tau0 < 0.0_pReal) extmsg = trim(extmsg)//'tau0 ' - if (prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//'gdot0 ' - if (prm%n <= 0.0_pReal) extmsg = trim(extmsg)//'n ' - if (prm%tausat <= prm%tau0) extmsg = trim(extmsg)//'tausat ' - if (prm%a <= 0.0_pReal) extmsg = trim(extmsg)//'a ' - if (prm%fTaylor <= 0.0_pReal) extmsg = trim(extmsg)//'m ' - if (prm%aTolFlowstress <= 0.0_pReal) extmsg = trim(extmsg)//'atol_flowstress ' - if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//'atol_shear ' + if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//' aTolShear' + if (prm%tau0 < 0.0_pReal) extmsg = trim(extmsg)//' tau0' + if (prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' + if (prm%n <= 0.0_pReal) extmsg = trim(extmsg)//' n' + if (prm%tausat <= prm%tau0) extmsg = trim(extmsg)//' tausat' + if (prm%a <= 0.0_pReal) extmsg = trim(extmsg)//' a' + if (prm%fTaylor <= 0.0_pReal) extmsg = trim(extmsg)//' m' + if (prm%aTolFlowstress <= 0.0_pReal) extmsg = trim(extmsg)//' atol_flowstress' + if (prm%aTolShear <= 0.0_pReal) extmsg = trim(extmsg)//' atol_shear' !-------------------------------------------------------------------------------------------------- ! exit if any parameter is out of range @@ -231,17 +231,17 @@ subroutine plastic_isotropic_init() !-------------------------------------------------------------------------------------------------- ! locally defined state aliases and initialization of state0 and aTolState - stt%flowstress => plasticState(p)%state (1,1:NipcMyPhase) + stt%flowstress => plasticState(p)%state (1,:) stt%flowstress = prm%tau0 - dot%flowstress => plasticState(p)%dotState (1,1:NipcMyPhase) - plasticState(p)%aTolState(1) = prm%aTolFlowstress + dot%flowstress => plasticState(p)%dotState(1,:) + plasticState(p)%aTolState(1) = prm%aTolFlowstress - stt%accumulatedShear => plasticState(p)%state (2,1:NipcMyPhase) - dot%accumulatedShear => plasticState(p)%dotState (2,1:NipcMyPhase) - plasticState(p)%aTolState(2) = prm%aTolShear + stt%accumulatedShear => plasticState(p)%state (2,:) + dot%accumulatedShear => plasticState(p)%dotState(2,:) + plasticState(p)%aTolState(2) = prm%aTolShear ! global alias - plasticState(p)%slipRate => plasticState(p)%dotState(2:2,1:NipcMyPhase) - plasticState(p)%accumulatedSlip => plasticState(p)%state (2:2,1:NipcMyPhase) + plasticState(p)%slipRate => plasticState(p)%dotState(2:2,:) + plasticState(p)%accumulatedSlip => plasticState(p)%state (2:2,:) plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 20d748a88..8576d5425 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -61,8 +61,6 @@ module plastic_kinehardening outputID !< ID of each post result output end type - type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) - type, private :: tKinehardeningState real(pReal), pointer, dimension(:,:) :: & !< vectors along NipcMyInstance crss, & !< critical resolved stress @@ -73,6 +71,8 @@ module plastic_kinehardening accshear !< accumulated (absolute) shear end type + + type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) type(tKinehardeningState), allocatable, dimension(:), private :: & dotState, & deltaState, & @@ -87,7 +87,6 @@ module plastic_kinehardening private :: & kinetics - contains @@ -140,7 +139,7 @@ subroutine plastic_kinehardening_init integer(pInt) :: & Ninstance, & p, i, o, & - NipcMyPhase, outputSize, & + NipcMyPhase, & sizeState, sizeDeltaState, sizeDotState, & startIndex, endIndex @@ -243,11 +242,11 @@ subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- ! sanity checks - if (prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' - if (prm%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' + if ( prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' + if ( prm%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' if (any(prm%crss0 <= 0.0_pReal)) extmsg = trim(extmsg)//' crss0' - if (any(prm%tau1 <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' - if (any(prm%tau1_b <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' + if (any(prm%tau1 <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' + if (any(prm%tau1_b <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' !ToDo: Any sensible checks for theta? @@ -264,41 +263,40 @@ subroutine plastic_kinehardening_init allocate(prm%outputID(0)) do i=1_pInt, size(outputs) outputID = undefined_ID - outputSize = prm%totalNslip select case(outputs(i)) - case ('resistance') - outputID = merge(crss_ID,undefined_ID,prm%totalNslip>0_pInt) - case ('accumulatedshear') - outputID = merge(accshear_ID,undefined_ID,prm%totalNslip>0_pInt) - case ('shearrate') - outputID = merge(shearrate_ID,undefined_ID,prm%totalNslip>0_pInt) - case ('resolvedstress') - outputID = merge(resolvedstress_ID,undefined_ID,prm%totalNslip>0_pInt) - case ('backstress') - outputID = merge(crss_back_ID,undefined_ID,prm%totalNslip>0_pInt) - case ('sense') - outputID = merge(sense_ID,undefined_ID,prm%totalNslip>0_pInt) - case ('chi0') - outputID = merge(chi0_ID,undefined_ID,prm%totalNslip>0_pInt) - case ('gamma0') - outputID = merge(gamma0_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('resistance') + outputID = merge(crss_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('accumulatedshear') + outputID = merge(accshear_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('shearrate') + outputID = merge(shearrate_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('resolvedstress') + outputID = merge(resolvedstress_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('backstress') + outputID = merge(crss_back_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('sense') + outputID = merge(sense_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('chi0') + outputID = merge(chi0_ID,undefined_ID,prm%totalNslip>0_pInt) + case ('gamma0') + outputID = merge(gamma0_ID,undefined_ID,prm%totalNslip>0_pInt) - end select + end select - if (outputID /= undefined_ID) then - plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) - plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = outputSize - prm%outputID = [prm%outputID , outputID] - endif + if (outputID /= undefined_ID) then + plastic_kinehardening_output(i,phase_plasticityInstance(p)) = outputs(i) + plastic_kinehardening_sizePostResult(i,phase_plasticityInstance(p)) = prm%totalNslip + prm%outputID = [prm%outputID , outputID] + endif - end do + enddo !-------------------------------------------------------------------------------------------------- ! allocate state arrays NipcMyPhase = count(material_phase == p) - sizeDotState = size(['crss ','crss_back', 'accshear ']) * prm%TotalNslip - sizeDeltaState = size(['sense ', 'chi0 ', 'gamma0' ]) * prm%TotalNslip + sizeDotState = size(['crss ','crss_back', 'accshear ']) * prm%totalNslip + sizeDeltaState = size(['sense ', 'chi0 ', 'gamma0' ]) * prm%totalNslip sizeState = sizeDotState + sizeDeltaState call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, & @@ -349,6 +347,7 @@ subroutine plastic_kinehardening_init plasticState(p)%state0 = plasticState(p)%state ! ToDo: this could be done centrally end associate + enddo end subroutine plastic_kinehardening_init @@ -380,7 +379,7 @@ pure subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) Lp = 0.0_pReal dLp_dMp = 0.0_pReal - associate(prm => param(instance), stt => state(instance)) + associate(prm => param(instance)) call kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) @@ -411,10 +410,11 @@ subroutine plastic_kinehardening_dotState(Mp,instance,of) integer(pInt) :: & i - real(pReal), dimension(param(instance)%totalNslip) :: & - gdot_pos,gdot_neg real(pReal) :: & sumGamma + real(pReal), dimension(param(instance)%totalNslip) :: & + gdot_pos,gdot_neg + associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) @@ -526,8 +526,6 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) associate(prm => param(instance), stt => state(instance)) - call kinetics(Mp,instance,of,gdot_pos,gdot_neg) - outputsLoop: do o = 1_pInt,size(prm%outputID) select case(prm%outputID(o)) @@ -544,6 +542,7 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) case (accshear_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%accshear(:,of) case (shearrate_ID) + call kinetics(Mp,instance,of,gdot_pos,gdot_neg) postResults(c+1_pInt:c+prm%totalNslip) = gdot_pos+gdot_neg case (resolvedstress_ID) do i = 1_pInt, prm%totalNslip diff --git a/src/plastic_phenopowerlaw.f90 b/src/plastic_phenopowerlaw.f90 index abcb10bdb..b6387d88f 100644 --- a/src/plastic_phenopowerlaw.f90 +++ b/src/plastic_phenopowerlaw.f90 @@ -74,9 +74,6 @@ module plastic_phenopowerlaw outputID !< ID of each post result output end type !< container type for internal constitutive parameters - type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) - - type, private :: tPhenopowerlawState real(pReal), pointer, dimension(:,:) :: & xi_slip, & @@ -85,6 +82,8 @@ module plastic_phenopowerlaw gamma_twin end type + + type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance) type(tPhenopowerlawState), allocatable, dimension(:), private :: & dotState, & state @@ -233,9 +232,9 @@ subroutine plastic_phenopowerlaw_init prm%H_int = math_expand(prm%H_int, prm%Nslip) ! sanity checks - if (prm%gdot0_slip <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0_slip' - if (prm%a_slip <= 0.0_pReal) extmsg = trim(extmsg)//' a_slip' - if (prm%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' + if ( prm%gdot0_slip <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0_slip' + if ( prm%a_slip <= 0.0_pReal) extmsg = trim(extmsg)//' a_slip' + if ( prm%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' if (any(prm%xi_slip_0 <= 0.0_pReal)) extmsg = trim(extmsg)//' xi_slip_0' if (any(prm%xi_slip_sat < prm%xi_slip_0)) extmsg = trim(extmsg)//' xi_slip_sat' else slipActive @@ -342,8 +341,8 @@ subroutine plastic_phenopowerlaw_init !-------------------------------------------------------------------------------------------------- ! allocate state arrays NipcMyPhase = count(material_phase == p) - sizeDotState = size(['tau_slip ','gamma_slip']) * prm%TotalNslip & - + size(['tau_twin ','gamma_twin']) * prm%TotalNtwin + sizeDotState = size(['tau_slip ','gamma_slip']) * prm%totalNslip & + + size(['tau_twin ','gamma_twin']) * prm%totalNtwin sizeState = sizeDotState call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0_pInt, & @@ -466,7 +465,7 @@ subroutine plastic_phenopowerlaw_dotState(Mp,instance,of) left_SlipSlip,right_SlipSlip, & gdot_slip_pos,gdot_slip_neg - associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) + associate(prm => param(instance), stt => state(instance), dot => dotState(instance)) sumGamma = sum(stt%gamma_slip(:,of)) sumF = sum(stt%gamma_twin(:,of)/prm%gamma_twin_char) From 996d686a8968adcfae69f5997412666d38926f65 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 7 Jan 2019 08:15:27 +0100 Subject: [PATCH 87/89] slip suffix not needed for slip only models --- src/plastic_disloUCLA.f90 | 151 ++++++++++++++--------------- src/plastic_kinematichardening.f90 | 45 +++++---- 2 files changed, 97 insertions(+), 99 deletions(-) diff --git a/src/plastic_disloUCLA.f90 b/src/plastic_disloUCLA.f90 index c9a885e68..9d8703277 100644 --- a/src/plastic_disloUCLA.f90 +++ b/src/plastic_disloUCLA.f90 @@ -63,8 +63,7 @@ module plastic_disloUCLA interaction_SlipSlip, & !< slip resistance from slip activity forestProjectionEdge real(pReal), allocatable, dimension(:,:,:) :: & - Schmid_slip, & - Schmid_twin, & + Schmid, & nonSchmid_pos, & nonSchmid_neg integer(pInt) :: & @@ -81,7 +80,7 @@ module plastic_disloUCLA real(pReal), pointer, dimension(:,:) :: & rhoEdge, & rhoEdgeDip, & - accshear_slip + accshear end type type, private :: tDisloUCLAdependentState @@ -214,16 +213,16 @@ subroutine plastic_disloUCLA_init() prm%Nslip = config%getInts('nslip',defaultVal=emptyIntArray) prm%totalNslip = sum(prm%Nslip) slipActive: if (prm%totalNslip > 0_pInt) then - prm%Schmid_slip = lattice_SchmidMatrix_slip(prm%Nslip,structure(1:3),& - config%getFloat('c/a',defaultVal=0.0_pReal)) + prm%Schmid = lattice_SchmidMatrix_slip(prm%Nslip,structure(1:3),& + config%getFloat('c/a',defaultVal=0.0_pReal)) if(structure=='bcc') then prm%nonSchmidCoeff = config%getFloats('nonschmid_coefficients',& defaultVal = emptyRealArray) prm%nonSchmid_pos = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,+1_pInt) prm%nonSchmid_neg = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,-1_pInt) else - prm%nonSchmid_pos = prm%Schmid_slip - prm%nonSchmid_neg = prm%Schmid_slip + prm%nonSchmid_pos = prm%Schmid + prm%nonSchmid_neg = prm%Schmid endif prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & config%getFloats('interaction_slipslip'), & @@ -373,8 +372,8 @@ subroutine plastic_disloUCLA_init() startIndex = endIndex + 1_pInt endIndex = endIndex + prm%totalNslip - stt%accshear_slip=>plasticState(p)%state(startIndex:endIndex,:) - dot%accshear_slip=>plasticState(p)%dotState(startIndex:endIndex,:) + stt%accshear=>plasticState(p)%state(startIndex:endIndex,:) + dot%accshear=>plasticState(p)%dotState(startIndex:endIndex,:) plasticState(p)%aTolState(startIndex:endIndex) = 1e6_pReal !ToDo: better make optional parameter ! global alias plasticState(p)%slipRate => plasticState(p)%dotState(startIndex:endIndex,:) @@ -415,21 +414,21 @@ pure subroutine plastic_disloUCLA_LpAndItsTangent(Lp,dLp_dMp,Mp,Temperature,inst integer(pInt) :: & i,k,l,m,n real(pReal), dimension(param(instance)%totalNslip) :: & - gdot_slip_pos,gdot_slip_neg, & - dgdot_dtauslip_pos,dgdot_dtauslip_neg + gdot_pos,gdot_neg, & + dgdot_dtau_pos,dgdot_dtau_neg Lp = 0.0_pReal dLp_dMp = 0.0_pReal associate(prm => param(instance)) - call kinetics(Mp,Temperature,instance,of,gdot_slip_pos,gdot_slip_neg,dgdot_dtauslip_pos,dgdot_dtauslip_neg) + call kinetics(Mp,Temperature,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) do i = 1_pInt, prm%totalNslip - Lp = Lp + (gdot_slip_pos(i)+gdot_slip_neg(i))*prm%Schmid_slip(1:3,1:3,i) + Lp = Lp + (gdot_pos(i)+gdot_neg(i))*prm%Schmid(1:3,1:3,i) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = dLp_dMp(k,l,m,n) & - + dgdot_dtauslip_pos(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_pos(m,n,i) & - + dgdot_dtauslip_neg(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_neg(m,n,i) + + dgdot_dtau_pos(i) * prm%Schmid(k,l,i) * prm%nonSchmid_pos(m,n,i) & + + dgdot_dtau_neg(i) * prm%Schmid(k,l,i) * prm%nonSchmid_neg(m,n,i) enddo end associate @@ -460,29 +459,29 @@ subroutine plastic_disloUCLA_dotState(Mp,Temperature,instance,of) real(pReal) :: & VacancyDiffusion real(pReal), dimension(param(instance)%totalNslip) :: & - gdot_slip_pos, gdot_slip_neg,& - tau_slip_pos,& - tau_slip_neg, & + gdot_pos, gdot_neg,& + tau_pos,& + tau_neg, & DotRhoDipFormation, ClimbVelocity, EdgeDipDistance, & DotRhoEdgeDipClimb associate(prm => param(instance), stt => state(instance),dot => dotState(instance), dst => dependentState(instance)) call kinetics(Mp,Temperature,instance,of,& - gdot_slip_pos,gdot_slip_neg, & - tau_slip_pos1 = tau_slip_pos,tau_slip_neg1 = tau_slip_neg) + gdot_pos,gdot_neg, & + tau_pos1 = tau_pos,tau_neg1 = tau_neg) - dot%accshear_slip(:,of) = (gdot_slip_pos+gdot_slip_neg) ! ToDo: needs to be abs + dot%accshear(:,of) = (gdot_pos+gdot_neg) ! ToDo: needs to be abs VacancyDiffusion = prm%D0*exp(-prm%Qsd/(kB*Temperature)) - where(dEq0(tau_slip_pos)) ! ToDo: use avg of pos and neg + where(dEq0(tau_pos)) ! ToDo: use avg of pos and neg DotRhoDipFormation = 0.0_pReal DotRhoEdgeDipClimb = 0.0_pReal else where - EdgeDipDistance = math_clip((3.0_pReal*prm%mu*prm%burgers)/(16.0_pReal*PI*abs(tau_slip_pos)), & + EdgeDipDistance = math_clip((3.0_pReal*prm%mu*prm%burgers)/(16.0_pReal*PI*abs(tau_pos)), & prm%minDipDistance, & ! lower limit dst%mfp(:,of)) ! upper limit - DotRhoDipFormation = merge(((2.0_pReal*EdgeDipDistance)/prm%burgers)* stt%rhoEdge(:,of)*abs(dot%accshear_slip(:,of)), & ! ToDo: ignore region of spontaneous annihilation + DotRhoDipFormation = merge(((2.0_pReal*EdgeDipDistance)/prm%burgers)* stt%rhoEdge(:,of)*abs(dot%accshear(:,of)), & ! ToDo: ignore region of spontaneous annihilation 0.0_pReal, & prm%dipoleformation) ClimbVelocity = (3.0_pReal*prm%mu*VacancyDiffusion*prm%atomicVolume/(2.0_pReal*pi*kB*Temperature)) & @@ -490,11 +489,11 @@ subroutine plastic_disloUCLA_dotState(Mp,Temperature,instance,of) DotRhoEdgeDipClimb = (4.0_pReal*ClimbVelocity*stt%rhoEdgeDip(:,of))/(EdgeDipDistance-prm%minDipDistance) ! ToDo: Discuss with Franz: Stress dependency? end where - dot%rhoEdge(:,of) = abs(dot%accshear_slip(:,of))/(prm%burgers*dst%mfp(:,of)) & ! multiplication + dot%rhoEdge(:,of) = abs(dot%accshear(:,of))/(prm%burgers*dst%mfp(:,of)) & ! multiplication - DotRhoDipFormation & - - (2.0_pReal*prm%minDipDistance)/prm%burgers*stt%rhoEdge(:,of)*abs(dot%accshear_slip(:,of)) !* Spontaneous annihilation of 2 single edge dislocations + - (2.0_pReal*prm%minDipDistance)/prm%burgers*stt%rhoEdge(:,of)*abs(dot%accshear(:,of)) !* Spontaneous annihilation of 2 single edge dislocations dot%rhoEdgeDip(:,of) = DotRhoDipFormation & - - (2.0_pReal*prm%minDipDistance)/prm%burgers*stt%rhoEdgeDip(:,of)*abs(dot%accshear_slip(:,of)) & !* Spontaneous annihilation of a single edge dislocation with a dipole constituent + - (2.0_pReal*prm%minDipDistance)/prm%burgers*stt%rhoEdgeDip(:,of)*abs(dot%accshear(:,of)) & !* Spontaneous annihilation of a single edge dislocation with a dipole constituent - DotRhoEdgeDipClimb end associate @@ -558,7 +557,7 @@ function plastic_disloUCLA_postResults(Mp,Temperature,instance,of) result(postRe integer(pInt) :: & o,c,i real(pReal), dimension(param(instance)%totalNslip) :: & - gdot_slip_pos,gdot_slip_neg + gdot_pos,gdot_neg c = 0_pInt @@ -572,10 +571,10 @@ function plastic_disloUCLA_postResults(Mp,Temperature,instance,of) result(postRe case (rhoDip_ID) postResults(c+1_pInt:c+prm%totalNslip) = stt%rhoEdgeDip(1_pInt:prm%totalNslip,of) case (shearrate_ID) - call kinetics(Mp,Temperature,instance,of,gdot_slip_pos,gdot_slip_neg) - postResults(c+1:c+prm%totalNslip) = gdot_slip_pos + gdot_slip_neg + call kinetics(Mp,Temperature,instance,of,gdot_pos,gdot_neg) + postResults(c+1:c+prm%totalNslip) = gdot_pos + gdot_neg case (accumulatedshear_ID) - postResults(c+1_pInt:c+prm%totalNslip) = stt%accshear_slip(1_pInt:prm%totalNslip, of) + postResults(c+1_pInt:c+prm%totalNslip) = stt%accshear(1_pInt:prm%totalNslip, of) case (mfp_ID) postResults(c+1_pInt:c+prm%totalNslip) = dst%mfp(1_pInt:prm%totalNslip, of) case (thresholdstress_ID) @@ -610,7 +609,7 @@ end function plastic_disloUCLA_postResults ! have the optional arguments at the end !-------------------------------------------------------------------------------------------------- pure subroutine kinetics(Mp,Temperature,instance,of, & - gdot_slip_pos,gdot_slip_neg,dgdot_dtauslip_pos,dgdot_dtauslip_neg,tau_slip_pos1,tau_slip_neg1) + gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg,tau_pos1,tau_neg1) use prec, only: & tol_math_check, & dEq, dNeq0 @@ -628,119 +627,119 @@ pure subroutine kinetics(Mp,Temperature,instance,of, & of real(pReal), intent(out), dimension(param(instance)%totalNslip) :: & - gdot_slip_pos, & - gdot_slip_neg + gdot_pos, & + gdot_neg real(pReal), intent(out), optional, dimension(param(instance)%totalNslip) :: & - dgdot_dtauslip_pos, & - dgdot_dtauslip_neg, & - tau_slip_pos1, & - tau_slip_neg1 + dgdot_dtau_pos, & + dgdot_dtau_neg, & + tau_pos1, & + tau_neg1 real(pReal), dimension(param(instance)%totalNslip) :: & StressRatio, & StressRatio_p,StressRatio_pminus1, & - dvel_slip, vel_slip, & - tau_slip_pos,tau_slip_neg, & + dvel, vel, & + tau_pos,tau_neg, & needsGoodName ! ToDo: @Karo: any idea? integer(pInt) :: j associate(prm => param(instance), stt => state(instance), dst => dependentState(instance)) do j = 1_pInt, prm%totalNslip - tau_slip_pos(j) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,j)) - tau_slip_neg(j) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,j)) + tau_pos(j) = math_mul33xx33(Mp,prm%nonSchmid_pos(1:3,1:3,j)) + tau_neg(j) = math_mul33xx33(Mp,prm%nonSchmid_neg(1:3,1:3,j)) enddo - if (present(tau_slip_pos1)) tau_slip_pos1 = tau_slip_pos - if (present(tau_slip_neg1)) tau_slip_neg1 = tau_slip_neg + if (present(tau_pos1)) tau_pos1 = tau_pos + if (present(tau_neg1)) tau_neg1 = tau_neg associate(BoltzmannRatio => prm%H0kp/(kB*Temperature), & DotGamma0 => stt%rhoEdge(:,of)*prm%burgers*prm%v0, & effectiveLength => dst%mfp(:,of) - prm%w) - significantPositiveTau: where(abs(tau_slip_pos)-dst%threshold_stress(:,of) > tol_math_check) - StressRatio = (abs(tau_slip_pos)-dst%threshold_stress(:,of))/prm%tau0 + significantPositiveTau: where(abs(tau_pos)-dst%threshold_stress(:,of) > tol_math_check) + StressRatio = (abs(tau_pos)-dst%threshold_stress(:,of))/prm%tau0 StressRatio_p = StressRatio** prm%p StressRatio_pminus1 = StressRatio**(prm%p-1.0_pReal) needsGoodName = exp(-BoltzmannRatio*(1-StressRatio_p) ** prm%q) - vel_slip = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega & - * effectiveLength * tau_slip_pos * needsGoodName & - / ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_slip_pos & + vel = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega & + * effectiveLength * tau_pos * needsGoodName & + / ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_pos & + prm%omega * prm%B * effectiveLength**2.0_pReal* needsGoodName & ) - gdot_slip_pos = DotGamma0 * sign(vel_slip,tau_slip_pos) * 0.5_pReal + gdot_pos = DotGamma0 * sign(vel,tau_pos) * 0.5_pReal else where significantPositiveTau - gdot_slip_pos = 0.0_pReal + gdot_pos = 0.0_pReal end where significantPositiveTau - if (present(dgdot_dtauslip_pos)) then - significantPositiveTau2: where(abs(tau_slip_pos)-dst%threshold_stress(:,of) > tol_math_check) - dvel_slip = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega* effectiveLength & + if (present(dgdot_dtau_pos)) then + significantPositiveTau2: where(abs(tau_pos)-dst%threshold_stress(:,of) > tol_math_check) + dvel = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega* effectiveLength & * ( & - (needsGoodName + tau_slip_pos * abs(needsGoodName)*BoltzmannRatio*prm%p & + (needsGoodName + tau_pos * abs(needsGoodName)*BoltzmannRatio*prm%p & * prm%q/prm%tau0 & * StressRatio_pminus1*(1-StressRatio_p)**(prm%q-1.0_pReal) & ) & - * ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_slip_pos & + * ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_pos & + prm%omega * prm%B* effectiveLength **2.0_pReal* needsGoodName & ) & - - tau_slip_pos * needsGoodName * (2.0_pReal*prm%burgers**2.0_pReal & + - tau_pos * needsGoodName * (2.0_pReal*prm%burgers**2.0_pReal & + prm%omega * prm%B *effectiveLength **2.0_pReal& * (abs(needsGoodName)*BoltzmannRatio*prm%p *prm%q/prm%tau0 & *StressRatio_pminus1*(1-StressRatio_p)**(prm%q-1.0_pReal) )& ) & ) & - /(2.0_pReal*prm%burgers**2.0_pReal*tau_slip_pos & + /(2.0_pReal*prm%burgers**2.0_pReal*tau_pos & + prm%omega * prm%B* effectiveLength**2.0_pReal* needsGoodName )**2.0_pReal - dgdot_dtauslip_pos = DotGamma0 * dvel_slip* 0.5_pReal + dgdot_dtau_pos = DotGamma0 * dvel* 0.5_pReal else where significantPositiveTau2 - dgdot_dtauslip_pos = 0.0_pReal + dgdot_dtau_pos = 0.0_pReal end where significantPositiveTau2 endif - significantNegativeTau: where(abs(tau_slip_neg)-dst%threshold_stress(:,of) > tol_math_check) - StressRatio = (abs(tau_slip_neg)-dst%threshold_stress(:,of))/prm%tau0 + significantNegativeTau: where(abs(tau_neg)-dst%threshold_stress(:,of) > tol_math_check) + StressRatio = (abs(tau_neg)-dst%threshold_stress(:,of))/prm%tau0 StressRatio_p = StressRatio** prm%p StressRatio_pminus1 = StressRatio**(prm%p-1.0_pReal) needsGoodName = exp(-BoltzmannRatio*(1-StressRatio_p) ** prm%q) - vel_slip = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega & - * effectiveLength * tau_slip_neg * needsGoodName & - / ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_slip_neg & + vel = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega & + * effectiveLength * tau_neg * needsGoodName & + / ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_neg & + prm%omega * prm%B * effectiveLength**2.0_pReal* needsGoodName & ) - gdot_slip_neg = DotGamma0 * sign(vel_slip,tau_slip_neg) * 0.5_pReal + gdot_neg = DotGamma0 * sign(vel,tau_neg) * 0.5_pReal else where significantNegativeTau - gdot_slip_neg = 0.0_pReal + gdot_neg = 0.0_pReal end where significantNegativeTau - if (present(dgdot_dtauslip_neg)) then - significantNegativeTau2: where(abs(tau_slip_neg)-dst%threshold_stress(:,of) > tol_math_check) - dvel_slip = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega* effectiveLength & + if (present(dgdot_dtau_neg)) then + significantNegativeTau2: where(abs(tau_neg)-dst%threshold_stress(:,of) > tol_math_check) + dvel = 2.0_pReal*prm%burgers * prm%kink_height * prm%omega* effectiveLength & * ( & - (needsGoodName + tau_slip_neg * abs(needsGoodName)*BoltzmannRatio*prm%p & + (needsGoodName + tau_neg * abs(needsGoodName)*BoltzmannRatio*prm%p & * prm%q/prm%tau0 & * StressRatio_pminus1*(1-StressRatio_p)**(prm%q-1.0_pReal) & ) & - * ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_slip_neg & + * ( 2.0_pReal*(prm%burgers**2.0_pReal)*tau_neg & + prm%omega * prm%B* effectiveLength **2.0_pReal* needsGoodName & ) & - - tau_slip_neg * needsGoodName * (2.0_pReal*prm%burgers**2.0_pReal & + - tau_neg * needsGoodName * (2.0_pReal*prm%burgers**2.0_pReal & + prm%omega * prm%B *effectiveLength **2.0_pReal& * (abs(needsGoodName)*BoltzmannRatio*prm%p *prm%q/prm%tau0 & *StressRatio_pminus1*(1-StressRatio_p)**(prm%q-1.0_pReal) )& ) & ) & - /(2.0_pReal*prm%burgers**2.0_pReal*tau_slip_neg & + /(2.0_pReal*prm%burgers**2.0_pReal*tau_neg & + prm%omega * prm%B* effectiveLength**2.0_pReal* needsGoodName )**2.0_pReal - dgdot_dtauslip_neg = DotGamma0 * dvel_slip * 0.5_pReal + dgdot_dtau_neg = DotGamma0 * dvel * 0.5_pReal else where significantNegativeTau2 - dgdot_dtauslip_neg = 0.0_pReal + dgdot_dtau_neg = 0.0_pReal end where significantNegativeTau2 end if end associate diff --git a/src/plastic_kinematichardening.f90 b/src/plastic_kinematichardening.f90 index 8576d5425..fe7fa5ef1 100644 --- a/src/plastic_kinematichardening.f90 +++ b/src/plastic_kinematichardening.f90 @@ -32,24 +32,23 @@ module plastic_kinehardening type, private :: tParameters real(pReal) :: & - gdot0, & !< reference shear strain rate for slip (input parameter) - n_slip, & !< stress exponent for slip (input parameter) + gdot0, & !< reference shear strain rate for slip + n, & !< stress exponent for slip aTolResistance, & aTolShear real(pReal), allocatable, dimension(:) :: & - crss0, & !< initial critical shear stress for slip (input parameter, per family) + crss0, & !< initial critical shear stress for slip theta0, & !< initial hardening rate of forward stress for each slip - theta1, & !< asymptotic hardening rate of forward stress for each slip > - theta0_b, & !< initial hardening rate of back stress for each slip > - theta1_b, & !< asymptotic hardening rate of back stress for each slip > + theta1, & !< asymptotic hardening rate of forward stress for each slip + theta0_b, & !< initial hardening rate of back stress for each slip + theta1_b, & !< asymptotic hardening rate of back stress for each slip tau1, & tau1_b, & nonSchmidCoeff real(pReal), allocatable, dimension(:,:) :: & interaction_slipslip !< slip resistance from slip activity real(pReal), allocatable, dimension(:,:,:) :: & - Schmid_slip, & - Schmid_twin, & + Schmid, & nonSchmid_pos, & nonSchmid_neg integer(pInt) :: & @@ -203,16 +202,16 @@ subroutine plastic_kinehardening_init prm%Nslip = config%getInts('nslip',defaultVal=emptyIntArray) prm%totalNslip = sum(prm%Nslip) slipActive: if (prm%totalNslip > 0_pInt) then - prm%Schmid_slip = lattice_SchmidMatrix_slip(prm%Nslip,structure(1:3),& - config%getFloat('c/a',defaultVal=0.0_pReal)) + prm%Schmid = lattice_SchmidMatrix_slip(prm%Nslip,structure(1:3),& + config%getFloat('c/a',defaultVal=0.0_pReal)) if(structure=='bcc') then prm%nonSchmidCoeff = config%getFloats('nonschmid_coefficients',& defaultVal = emptyRealArray) prm%nonSchmid_pos = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,+1_pInt) prm%nonSchmid_neg = lattice_nonSchmidMatrix(prm%Nslip,prm%nonSchmidCoeff,-1_pInt) else - prm%nonSchmid_pos = prm%Schmid_slip - prm%nonSchmid_neg = prm%Schmid_slip + prm%nonSchmid_pos = prm%Schmid + prm%nonSchmid_neg = prm%Schmid endif prm%interaction_SlipSlip = lattice_interaction_SlipSlip(prm%Nslip, & config%getFloats('interaction_slipslip'), & @@ -227,7 +226,7 @@ subroutine plastic_kinehardening_init prm%theta1_b = config%getFloats('theta1_b', requiredShape=shape(prm%Nslip)) prm%gdot0 = config%getFloat('gdot0') - prm%n_slip = config%getFloat('n_slip') + prm%n = config%getFloat('n_slip') ! expand: family => system prm%crss0 = math_expand(prm%crss0, prm%Nslip) @@ -242,8 +241,8 @@ subroutine plastic_kinehardening_init !-------------------------------------------------------------------------------------------------- ! sanity checks - if ( prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' - if ( prm%n_slip <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' + if ( prm%gdot0 <= 0.0_pReal) extmsg = trim(extmsg)//' gdot0' + if ( prm%n <= 0.0_pReal) extmsg = trim(extmsg)//' n_slip' if (any(prm%crss0 <= 0.0_pReal)) extmsg = trim(extmsg)//' crss0' if (any(prm%tau1 <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1' if (any(prm%tau1_b <= 0.0_pReal)) extmsg = trim(extmsg)//' tau1_b' @@ -384,11 +383,11 @@ pure subroutine plastic_kinehardening_LpAndItsTangent(Lp,dLp_dMp,Mp,instance,of) call kinetics(Mp,instance,of,gdot_pos,gdot_neg,dgdot_dtau_pos,dgdot_dtau_neg) do i = 1_pInt, prm%totalNslip - Lp = Lp + (gdot_pos(i)+gdot_neg(i))*prm%Schmid_slip(1:3,1:3,i) + Lp = Lp + (gdot_pos(i)+gdot_neg(i))*prm%Schmid(1:3,1:3,i) forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) & dLp_dMp(k,l,m,n) = dLp_dMp(k,l,m,n) & - + dgdot_dtau_pos(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_pos(m,n,i) & - + dgdot_dtau_neg(i) * prm%Schmid_slip(k,l,i) * prm%nonSchmid_neg(m,n,i) + + dgdot_dtau_pos(i) * prm%Schmid(k,l,i) * prm%nonSchmid_pos(m,n,i) & + + dgdot_dtau_neg(i) * prm%Schmid(k,l,i) * prm%nonSchmid_neg(m,n,i) enddo end associate @@ -546,7 +545,7 @@ function plastic_kinehardening_postResults(Mp,instance,of) result(postResults) postResults(c+1_pInt:c+prm%totalNslip) = gdot_pos+gdot_neg case (resolvedstress_ID) do i = 1_pInt, prm%totalNslip - postResults(c+i) = math_mul33xx33(Mp,prm%Schmid_slip(1:3,1:3,i)) + postResults(c+i) = math_mul33xx33(Mp,prm%Schmid(1:3,1:3,i)) enddo end select @@ -605,28 +604,28 @@ pure subroutine kinetics(Mp,instance,of, & where(dNeq0(tau_pos)) gdot_pos = prm%gdot0 * merge(0.5_pReal,1.0_pReal, nonSchmidActive) & ! 1/2 if non-Schmid active - * sign(abs(tau_pos/stt%crss(:,of))**prm%n_slip, tau_pos) + * sign(abs(tau_pos/stt%crss(:,of))**prm%n, tau_pos) else where gdot_pos = 0.0_pReal end where where(dNeq0(tau_neg)) gdot_neg = prm%gdot0 * 0.5_pReal & ! only used if non-Schmid active, always 1/2 - * sign(abs(tau_neg/stt%crss(:,of))**prm%n_slip, tau_neg) + * sign(abs(tau_neg/stt%crss(:,of))**prm%n, tau_neg) else where gdot_neg = 0.0_pReal end where if (present(dgdot_dtau_pos)) then where(dNeq0(gdot_pos)) - dgdot_dtau_pos = gdot_pos*prm%n_slip/tau_pos + dgdot_dtau_pos = gdot_pos*prm%n/tau_pos else where dgdot_dtau_pos = 0.0_pReal end where endif if (present(dgdot_dtau_neg)) then where(dNeq0(gdot_neg)) - dgdot_dtau_neg = gdot_neg*prm%n_slip/tau_neg + dgdot_dtau_neg = gdot_neg*prm%n/tau_neg else where dgdot_dtau_neg = 0.0_pReal end where From 13c64d79a5bbd500aceca91cbea4ff4c1993970b Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 7 Jan 2019 21:51:08 +0100 Subject: [PATCH 88/89] [skip ci] updated version information after successful test of v2.0.2-1347-gd0a06607 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 6efd0b994..f9287287d 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-1291-g19df6f8a +v2.0.2-1347-gd0a06607 From 8f18581b91ad244194b73043fff2364bc87edbb2 Mon Sep 17 00:00:00 2001 From: Test User Date: Tue, 8 Jan 2019 05:25:01 +0100 Subject: [PATCH 89/89] [skip ci] updated version information after successful test of v2.0.2-1389-g070952db --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index f9287287d..87227cb88 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-1347-gd0a06607 +v2.0.2-1389-g070952db