parent
f36db86b3c
commit
12e7922faf
|
@ -10,8 +10,9 @@ module HDF5_utilities
|
||||||
#include <petsc/finclude/petscsys.h>
|
#include <petsc/finclude/petscsys.h>
|
||||||
use PETScSys
|
use PETScSys
|
||||||
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
||||||
use MPI
|
use MPI_f08
|
||||||
#endif
|
#endif
|
||||||
|
use MPI, only: MPI_INFO_NULL_F90 => MPI_INFO_NULL
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
use prec
|
use prec
|
||||||
|
@ -162,9 +163,9 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
|
||||||
character, intent(in), optional :: mode
|
character, intent(in), optional :: mode
|
||||||
logical, intent(in), optional :: parallel
|
logical, intent(in), optional :: parallel
|
||||||
|
|
||||||
character :: m
|
character :: m
|
||||||
integer(HID_T) :: plist_id
|
integer(HID_T) :: plist_id
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
|
|
||||||
|
|
||||||
if (present(mode)) then
|
if (present(mode)) then
|
||||||
|
@ -178,9 +179,9 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
|
||||||
|
|
||||||
#ifdef PETSC
|
#ifdef PETSC
|
||||||
if (present(parallel)) then
|
if (present(parallel)) then
|
||||||
if (parallel) call H5Pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
|
if (parallel) call H5Pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL_F90, hdferr)
|
||||||
else
|
else
|
||||||
call H5Pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
|
call H5Pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL_F90, hdferr)
|
||||||
end if
|
end if
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
#endif
|
#endif
|
||||||
|
@ -1860,7 +1861,7 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
|
||||||
globalShape !< shape of the dataset (all processes)
|
globalShape !< shape of the dataset (all processes)
|
||||||
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id
|
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id
|
||||||
|
|
||||||
integer, dimension(worldsize) :: &
|
integer(MPI_INTEGER_KIND), dimension(worldsize) :: &
|
||||||
readSize !< contribution of all processes
|
readSize !< contribution of all processes
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
@ -1871,13 +1872,13 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
readSize = 0
|
readSize = 0_MPI_INTEGER_KIND
|
||||||
readSize(worldrank+1) = int(localShape(ubound(localShape,1)))
|
readSize(worldrank+1) = int(localShape(ubound(localShape,1)),MPI_INTEGER_KIND)
|
||||||
#ifdef PETSC
|
#ifdef PETSC
|
||||||
if (parallel) then
|
if (parallel) then
|
||||||
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
|
call MPI_Allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get total output size over each process
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if
|
end if
|
||||||
#endif
|
#endif
|
||||||
|
@ -1954,8 +1955,8 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
totalShape !< shape of the dataset (all processes)
|
totalShape !< shape of the dataset (all processes)
|
||||||
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id
|
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id
|
||||||
|
|
||||||
integer, dimension(worldsize) :: writeSize !< contribution of all processes
|
integer(MPI_INTEGER_KIND), dimension(worldsize) :: writeSize !< contribution of all processes
|
||||||
integer(HID_T) :: dcpl
|
integer(HID_T) :: dcpl
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
|
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
|
||||||
|
@ -1974,11 +1975,11 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! determine the global data layout among all processes
|
! determine the global data layout among all processes
|
||||||
writeSize = 0
|
writeSize = 0_MPI_INTEGER_KIND
|
||||||
writeSize(worldrank+1) = int(myShape(ubound(myShape,1)))
|
writeSize(worldrank+1) = int(myShape(ubound(myShape,1)),MPI_INTEGER_KIND)
|
||||||
#ifdef PETSC
|
#ifdef PETSC
|
||||||
if (parallel) then
|
if (parallel) then
|
||||||
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
|
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get total output size over each process
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if
|
end if
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -27,7 +27,7 @@ module function thermalexpansion_init(kinematics_length) result(myKinematics)
|
||||||
integer, intent(in) :: kinematics_length
|
integer, intent(in) :: kinematics_length
|
||||||
logical, dimension(:,:), allocatable :: myKinematics
|
logical, dimension(:,:), allocatable :: myKinematics
|
||||||
|
|
||||||
integer :: Ninstances,p,i,k
|
integer :: Ninstances, p, k
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
phases, &
|
phases, &
|
||||||
phase, &
|
phase, &
|
||||||
|
|
Loading…
Reference in New Issue