fix bug in MPI output

This commit is contained in:
Pratheek Shanthraj 2016-04-12 11:05:01 +02:00
parent c8b3f08743
commit 8b27de7d16
1 changed files with 1 additions and 1 deletions

View File

@ -427,7 +427,7 @@ program DAMASK_spectral
! prepare MPI parallel out (including opening of file) ! prepare MPI parallel out (including opening of file)
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND) allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND) outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce') if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce')
call MPI_file_open(PETSC_COMM_WORLD, & call MPI_file_open(PETSC_COMM_WORLD, &
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', & trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &