support for PETSc with 64bit integers

compiles, but untested
This commit is contained in:
Martin Diehl 2021-12-21 23:01:00 +01:00
parent b4f3ac4577
commit 1ddf1e5694
6 changed files with 42 additions and 33 deletions

View File

@ -108,16 +108,16 @@ subroutine grid_damage_spectral_init()
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,damage_snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,damage_snes,ierr); CHKERRQ(ierr)
call SNESSetOptionsPrefix(damage_snes,'damage_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(damage_snes,'damage_',ierr);CHKERRQ(ierr)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
call DMDACreate3D(PETSC_COMM_WORLD, & call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1, 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
1, 0, & ! #dof (damage phase field), ghost boundary width (domain overlap) 1_pPetscInt, 0_pPetscInt, & ! #dof (phi, scalar), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
damage_grid,ierr) ! handle, error damage_grid,ierr) ! handle, error
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetDM(damage_snes,damage_grid,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(damage_snes,damage_grid,ierr); CHKERRQ(ierr) ! connect snes to da

View File

@ -164,16 +164,16 @@ subroutine grid_mechanical_FEM_init
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr) call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, & DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
DMDA_STENCIL_BOX, & DMDA_STENCIL_BOX, &
grid(1),grid(2),grid(3), & int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1, 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
3, 1, & 3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
mechanical_grid,ierr) mechanical_grid,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetDM(mechanical_snes,mechanical_grid,ierr) call SNESSetDM(mechanical_snes,mechanical_grid,ierr)
@ -196,7 +196,7 @@ subroutine grid_mechanical_FEM_init
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged" call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged"
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), ierr) ! ignore linear solve failures call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPetscInt), ierr) ! ignore linear solve failures
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetFromOptions(mechanical_snes,ierr) ! pull it all together with additional cli arguments call SNESSetFromOptions(mechanical_snes,ierr) ! pull it all together with additional cli arguments
CHKERRQ(ierr) CHKERRQ(ierr)
@ -687,7 +687,7 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
matmul(transpose(BMatFull), & matmul(transpose(BMatFull), &
matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), & matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), &
shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ
call MatSetValuesStencil(Jac,24,row,24,col,K_ele,ADD_VALUES,ierr) call MatSetValuesStencil(Jac,24_pPETScInt,row,24_pPetscInt,col,K_ele,ADD_VALUES,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
enddo; enddo; enddo enddo; enddo; enddo
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
@ -700,7 +700,7 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
diag = (C_volAvg(1,1,1,1)/delta(1)**2 + & diag = (C_volAvg(1,1,1,1)/delta(1)**2 + &
C_volAvg(2,2,2,2)/delta(2)**2 + & C_volAvg(2,2,2,2)/delta(2)**2 + &
C_volAvg(3,3,3,3)/delta(3)**2)*detJ C_volAvg(3,3,3,3)/delta(3)**2)*detJ
call MatZeroRowsColumns(Jac,size(rows),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr) call MatZeroRowsColumns(Jac,size(rows,kind=pPetscInt),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr)
CHKERRQ(ierr) CHKERRQ(ierr)
call DMGetGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr) call DMGetGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr)
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr)

View File

@ -159,16 +159,16 @@ subroutine grid_mechanical_spectral_basic_init
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr)
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1 , 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
9, 0, & ! #dof (F tensor), ghost boundary width (domain overlap) 9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
da,ierr) ! handle, error da,ierr) ! handle, error
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da

View File

@ -179,16 +179,16 @@ subroutine grid_mechanical_spectral_polarisation_init
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr)
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1 , 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
18, 0, & ! #dof (F tensor), ghost boundary width (domain overlap) 18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
da,ierr) ! handle, error da,ierr) ! handle, error
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da

View File

@ -103,16 +103,16 @@ subroutine grid_thermal_spectral_init(T_0)
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,thermal_snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,thermal_snes,ierr); CHKERRQ(ierr)
call SNESSetOptionsPrefix(thermal_snes,'thermal_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(thermal_snes,'thermal_',ierr);CHKERRQ(ierr)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
call DMDACreate3D(PETSC_COMM_WORLD, & call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1, 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
1, 0, & ! #dof (T field), ghost boundary width (domain overlap) 1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
thermal_grid,ierr) ! handle, error thermal_grid,ierr) ! handle, error
CHKERRQ(ierr) CHKERRQ(ierr)
call SNESSetDM(thermal_snes,thermal_grid,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(thermal_snes,thermal_grid,ierr); CHKERRQ(ierr) ! connect snes to da

View File

@ -10,6 +10,11 @@ module prec
use, intrinsic :: IEEE_arithmetic use, intrinsic :: IEEE_arithmetic
use, intrinsic :: ISO_C_binding use, intrinsic :: ISO_C_binding
#ifdef PETSC
#include <petsc/finclude/petscsys.h>
use PETScSys
#endif
implicit none implicit none
public public
@ -21,6 +26,10 @@ module prec
integer, parameter :: pInt = pI64 integer, parameter :: pInt = pI64
#else #else
integer, parameter :: pInt = pI32 integer, parameter :: pInt = pI32
#endif
#ifdef PETSC
PetscInt, private :: dummy
integer, parameter :: pPetscInt = kind(dummy)
#endif #endif
integer, parameter :: pStringLen = 256 !< default string length integer, parameter :: pStringLen = 256 !< default string length
integer, parameter :: pPathLen = 4096 !< maximum length of a path name on linux integer, parameter :: pPathLen = 4096 !< maximum length of a path name on linux