diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index 4bf2c6658..2980eb65d 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -235,10 +235,18 @@ subroutine grid_mechanical_FEM_init fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r') groupHandle = HDF5_openGroup(fileHandle,'solver') - call HDF5_read(groupHandle,P_aim, 'P_aim') - call HDF5_read(groupHandle,F_aim, 'F_aim') - call HDF5_read(groupHandle,F_aim_lastInc,'F_aim_lastInc') - call HDF5_read(groupHandle,F_aimDot, 'F_aimDot') + call HDF5_read(groupHandle,P_aim, 'P_aim',.false.) + call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aim, 'F_aim',.false.) + call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aim_lastInc,'F_aim_lastInc',.false.) + call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aimDot, 'F_aimDot',.false.) + call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_read(groupHandle,F, 'F') call HDF5_read(groupHandle,F_lastInc, 'F_lastInc') call HDF5_read(groupHandle,u_current, 'u') @@ -262,7 +270,11 @@ subroutine grid_mechanical_FEM_init restartRead2: if (interface_restartInc > 0) then print'(a,i0,a)', ' reading more restart data of increment ', interface_restartInc, ' from file' call HDF5_read(groupHandle,C_volAvg, 'C_volAvg') + call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc') + call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle) diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 8249a2503..b2d84a66f 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -183,10 +183,18 @@ subroutine grid_mechanical_spectral_basic_init fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r') groupHandle = HDF5_openGroup(fileHandle,'solver') - call HDF5_read(groupHandle,P_aim, 'P_aim') - call HDF5_read(groupHandle,F_aim, 'F_aim') - call HDF5_read(groupHandle,F_aim_lastInc,'F_aim_lastInc') - call HDF5_read(groupHandle,F_aimDot, 'F_aimDot') + call HDF5_read(groupHandle,P_aim, 'P_aim',.false.) + call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aim, 'F_aim',.false.) + call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aim_lastInc,'F_aim_lastInc',.false.) + call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aimDot, 'F_aimDot',.false.) + call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_read(groupHandle,F, 'F') call HDF5_read(groupHandle,F_lastInc, 'F_lastInc') @@ -204,8 +212,12 @@ subroutine grid_mechanical_spectral_basic_init restartRead2: if (interface_restartInc > 0) then print'(a,i0,a)', ' reading more restart data of increment ', interface_restartInc, ' from file' - call HDF5_read(groupHandle,C_volAvg, 'C_volAvg') - call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc') + call HDF5_read(groupHandle,C_volAvg, 'C_volAvg',.false.) + call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc',.false.) + call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle) diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index 676252e6d..47ea30d11 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -205,10 +205,18 @@ subroutine grid_mechanical_spectral_polarisation_init fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r') groupHandle = HDF5_openGroup(fileHandle,'solver') - call HDF5_read(groupHandle,P_aim, 'P_aim') - call HDF5_read(groupHandle,F_aim, 'F_aim') - call HDF5_read(groupHandle,F_aim_lastInc,'F_aim_lastInc') - call HDF5_read(groupHandle,F_aimDot, 'F_aimDot') + call HDF5_read(groupHandle,P_aim, 'P_aim',.false.) + call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aim, 'F_aim',.false.) + call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aim_lastInc,'F_aim_lastInc',.false.) + call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' + call HDF5_read(groupHandle,F_aimDot, 'F_aimDot',.false.) + call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_read(groupHandle,F, 'F') call HDF5_read(groupHandle,F_lastInc, 'F_lastInc') call HDF5_read(groupHandle,F_tau, 'F_tau') @@ -231,7 +239,11 @@ subroutine grid_mechanical_spectral_polarisation_init restartRead2: if (interface_restartInc > 0) then print'(a,i0,a)', ' reading more restart data of increment ', interface_restartInc, ' from file' call HDF5_read(groupHandle,C_volAvg, 'C_volAvg') + call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_read(groupHandle,C_volAvgLastInc,'C_volAvgLastInc') + call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr) + if(ierr /=0) error stop 'MPI error' call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle)