Merge branch 'mpi-single-process-read' into 'development'
MPI single process read of grid See merge request damask/DAMASK!277
This commit is contained in:
commit
523a0979eb
|
@ -56,25 +56,33 @@ subroutine discretization_grid_init(restart)
|
||||||
myGrid !< domain grid of this process
|
myGrid !< domain grid of this process
|
||||||
|
|
||||||
integer, dimension(:), allocatable :: &
|
integer, dimension(:), allocatable :: &
|
||||||
materialAt
|
materialAt, materialAt_global
|
||||||
|
|
||||||
integer :: &
|
integer :: &
|
||||||
j, &
|
j, &
|
||||||
debug_element, &
|
debug_element, debug_ip, &
|
||||||
debug_ip
|
ierr
|
||||||
integer(C_INTPTR_T) :: &
|
integer(C_INTPTR_T) :: &
|
||||||
devNull, z, z_offset
|
devNull, z, z_offset
|
||||||
|
integer, dimension(worldsize) :: &
|
||||||
|
displs, sendcounts
|
||||||
|
|
||||||
print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
|
print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
call readVTR(grid,geomSize,origin,materialAt)
|
if(worldrank == 0) call readVTR(grid,geomSize,origin,materialAt_global)
|
||||||
|
|
||||||
|
|
||||||
|
call MPI_Bcast(grid,3,MPI_INTEGER,0,PETSC_COMM_WORLD, ierr)
|
||||||
|
if (ierr /= 0) error stop 'MPI error'
|
||||||
|
call MPI_Bcast(geomSize,3,MPI_DOUBLE,0,PETSC_COMM_WORLD, ierr)
|
||||||
|
if (ierr /= 0) error stop 'MPI error'
|
||||||
|
call MPI_Bcast(origin,3,MPI_DOUBLE,0,PETSC_COMM_WORLD, ierr)
|
||||||
|
if (ierr /= 0) error stop 'MPI error'
|
||||||
|
|
||||||
print'(/,a,3(i12 ))', ' grid a b c: ', grid
|
print'(/,a,3(i12 ))', ' grid a b c: ', grid
|
||||||
print'(a,3(es12.5))', ' size x y z: ', geomSize
|
print'(a,3(es12.5))', ' size x y z: ', geomSize
|
||||||
print'(a,3(es12.5))', ' origin x y z: ', origin
|
print'(a,3(es12.5))', ' origin x y z: ', origin
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! grid solver specific quantities
|
|
||||||
if(worldsize>grid(3)) call IO_error(894, ext_msg='number of processes exceeds grid(3)')
|
if(worldsize>grid(3)) call IO_error(894, ext_msg='number of processes exceeds grid(3)')
|
||||||
|
|
||||||
call fftw_mpi_init
|
call fftw_mpi_init
|
||||||
|
@ -93,14 +101,14 @@ subroutine discretization_grid_init(restart)
|
||||||
myGrid = [grid(1:2),grid3]
|
myGrid = [grid(1:2),grid3]
|
||||||
mySize = [geomSize(1:2),size3]
|
mySize = [geomSize(1:2),size3]
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
call MPI_Gather(product(grid(1:2))*grid3Offset,1,MPI_INTEGER,displs, 1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr)
|
||||||
! debug parameters
|
if (ierr /= 0) error stop 'MPI error'
|
||||||
debug_element = config_debug%get_asInt('element',defaultVal=1)
|
call MPI_Gather(product(myGrid), 1,MPI_INTEGER,sendcounts,1,MPI_INTEGER,0,PETSC_COMM_WORLD,ierr)
|
||||||
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
|
if (ierr /= 0) error stop 'MPI error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
allocate(materialAt(product(myGrid)))
|
||||||
! general discretization
|
call MPI_scatterv(materialAt_global,sendcounts,displs,MPI_INTEGER,materialAt,size(materialAt),MPI_INTEGER,0,PETSC_COMM_WORLD,ierr)
|
||||||
materialAt = materialAt(product(grid(1:2))*grid3Offset+1:product(grid(1:2))*(grid3Offset+grid3)) ! reallocate/shrink in case of MPI
|
if (ierr /= 0) error stop 'MPI error'
|
||||||
|
|
||||||
call discretization_init(materialAt, &
|
call discretization_init(materialAt, &
|
||||||
IPcoordinates0(myGrid,mySize,grid3Offset), &
|
IPcoordinates0(myGrid,mySize,grid3Offset), &
|
||||||
|
@ -131,10 +139,12 @@ subroutine discretization_grid_init(restart)
|
||||||
call geometry_plastic_nonlocal_setIPareaNormal (cellSurfaceNormal(product(myGrid)))
|
call geometry_plastic_nonlocal_setIPareaNormal (cellSurfaceNormal(product(myGrid)))
|
||||||
call geometry_plastic_nonlocal_setIPneighborhood(IPneighborhood(myGrid))
|
call geometry_plastic_nonlocal_setIPneighborhood(IPneighborhood(myGrid))
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! sanity checks for debugging
|
! debug parameters
|
||||||
if (debug_element < 1 .or. debug_element > product(myGrid)) call IO_error(602,ext_msg='element') ! selected element does not exist
|
debug_element = config_debug%get_asInt('element',defaultVal=1)
|
||||||
if (debug_ip /= 1) call IO_error(602,ext_msg='IP') ! selected IP does not exist
|
if (debug_element < 1 .or. debug_element > product(myGrid)) call IO_error(602,ext_msg='element')
|
||||||
|
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
|
||||||
|
if (debug_ip /= 1) call IO_error(602,ext_msg='IP')
|
||||||
|
|
||||||
end subroutine discretization_grid_init
|
end subroutine discretization_grid_init
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue