Merge branch 'parallel-mesh' into 'development'

avoid deadlock for MPI runs

See merge request damask/DAMASK!526
This commit is contained in:
Philip Eisenlohr 2022-02-17 21:37:37 +00:00
commit 66e4632655
2 changed files with 5 additions and 6 deletions

@ -1 +1 @@
Subproject commit 0d639a9ba41db279b0d2825c8e8eddf0ccd91326
Subproject commit e663a548b10ee3f9ced35c5a5105bd6824d3eebf

View File

@ -49,9 +49,6 @@ module discretization_mesh
mesh_ipVolume, & !< volume associated with IP (initially!)
mesh_node0 !< node x,y,z coordinates (initially!)
real(pReal), pointer, dimension(:) :: &
mesh_node0_temp
real(pReal), dimension(:,:,:), allocatable :: &
mesh_ipCoordinates !< IP x,y,z coordinates (after deformation!)
@ -88,6 +85,8 @@ subroutine discretization_mesh_init(restart)
num_mesh
integer :: p_i, dim !< integration order (quadrature rule)
type(tvec) :: coords_node0
real(pReal), pointer, dimension(:) :: &
mesh_node0_temp
print'(/,1x,a)', '<<<+- discretization_mesh init -+>>>'
@ -124,7 +123,7 @@ subroutine discretization_mesh_init(restart)
dimPlex = int(dim,pPETSCINT)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank == 0) then
if (worldsize == 1) then
call DMClone(globalMesh,geomMesh,err_PETSc)
else
call DMPlexDistribute(globalMesh,0_pPETSCINT,sf,geomMesh,err_PETSc)
@ -155,7 +154,7 @@ subroutine discretization_mesh_init(restart)
CHKERRQ(err_PETSc)
! Get initial nodal coordinates
call DMGetCoordinates(geomMesh,coords_node0,err_PETSc)
call DMGetCoordinatesLocal(geomMesh,coords_node0,err_PETSc)
CHKERRQ(err_PETSc)
call VecGetArrayF90(coords_node0, mesh_node0_temp,err_PETSc)
CHKERRQ(err_PETSc)