From 1ddf1e569499829bba4ccbff288dad3711cd62cf Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Tue, 21 Dec 2021 23:01:00 +0100 Subject: [PATCH] support for PETSc with 64bit integers compiles, but untested --- src/grid/grid_damage_spectral.f90 | 12 ++++++------ src/grid/grid_mech_FEM.f90 | 18 +++++++++--------- src/grid/grid_mech_spectral_basic.f90 | 12 ++++++------ src/grid/grid_mech_spectral_polarisation.f90 | 12 ++++++------ src/grid/grid_thermal_spectral.f90 | 12 ++++++------ src/prec.f90 | 9 +++++++++ 6 files changed, 42 insertions(+), 33 deletions(-) diff --git a/src/grid/grid_damage_spectral.f90 b/src/grid/grid_damage_spectral.f90 index ef229368e..69370b211 100644 --- a/src/grid/grid_damage_spectral.f90 +++ b/src/grid/grid_damage_spectral.f90 @@ -108,16 +108,16 @@ subroutine grid_damage_spectral_init() ! initialize solver specific parts of PETSc call SNESCreate(PETSC_COMM_WORLD,damage_snes,ierr); CHKERRQ(ierr) call SNESSetOptionsPrefix(damage_snes,'damage_',ierr);CHKERRQ(ierr) - localK = 0 - localK(worldrank) = grid3 + localK = 0_pPetscInt + localK(worldrank) = int(grid3,pPetscInt) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call DMDACreate3D(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - grid(1),grid(2),grid(3), & ! global grid - 1, 1, worldsize, & - 1, 0, & ! #dof (damage phase field), ghost boundary width (domain overlap) - [grid(1)],[grid(2)],localK, & ! local grid + int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid + 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & + 1_pPetscInt, 0_pPetscInt, & ! #dof (phi, scalar), ghost boundary width (domain overlap) + [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid damage_grid,ierr) ! handle, error CHKERRQ(ierr) call SNESSetDM(damage_snes,damage_grid,ierr); CHKERRQ(ierr) ! connect snes to da diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index e1dc3dabe..c4a8588d6 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -164,16 +164,16 @@ subroutine grid_mechanical_FEM_init CHKERRQ(ierr) call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr) CHKERRQ(ierr) - localK = 0 - localK(worldrank) = grid3 + localK = 0_pPetscInt + localK(worldrank) = int(grid3,pPetscInt) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call DMDACreate3d(PETSC_COMM_WORLD, & DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, & DMDA_STENCIL_BOX, & - grid(1),grid(2),grid(3), & - 1, 1, worldsize, & - 3, 1, & - [grid(1)],[grid(2)],localK, & + int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid + 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & + 3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap) + [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid mechanical_grid,ierr) CHKERRQ(ierr) call SNESSetDM(mechanical_snes,mechanical_grid,ierr) @@ -196,7 +196,7 @@ subroutine grid_mechanical_FEM_init CHKERRQ(ierr) call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged" CHKERRQ(ierr) - call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), ierr) ! ignore linear solve failures + call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPetscInt), ierr) ! ignore linear solve failures CHKERRQ(ierr) call SNESSetFromOptions(mechanical_snes,ierr) ! pull it all together with additional cli arguments CHKERRQ(ierr) @@ -687,7 +687,7 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr) matmul(transpose(BMatFull), & matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), & shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ - call MatSetValuesStencil(Jac,24,row,24,col,K_ele,ADD_VALUES,ierr) + call MatSetValuesStencil(Jac,24_pPETScInt,row,24_pPetscInt,col,K_ele,ADD_VALUES,ierr) CHKERRQ(ierr) enddo; enddo; enddo call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) @@ -700,7 +700,7 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr) diag = (C_volAvg(1,1,1,1)/delta(1)**2 + & C_volAvg(2,2,2,2)/delta(2)**2 + & C_volAvg(3,3,3,3)/delta(3)**2)*detJ - call MatZeroRowsColumns(Jac,size(rows),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr) + call MatZeroRowsColumns(Jac,size(rows,kind=pPetscInt),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr) CHKERRQ(ierr) call DMGetGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr) diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 66b703ed6..0a267ecca 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -159,16 +159,16 @@ subroutine grid_mechanical_spectral_basic_init ! initialize solver specific parts of PETSc call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) - localK = 0 - localK(worldrank) = grid3 + localK = 0_pPetscInt + localK(worldrank) = int(grid3,pPetscInt) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call DMDACreate3d(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - grid(1),grid(2),grid(3), & ! global grid - 1 , 1, worldsize, & - 9, 0, & ! #dof (F tensor), ghost boundary width (domain overlap) - [grid(1)],[grid(2)],localK, & ! local grid + int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid + 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & + 9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap) + [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid da,ierr) ! handle, error CHKERRQ(ierr) call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index 3d524029c..5bfce5e3f 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -179,16 +179,16 @@ subroutine grid_mechanical_spectral_polarisation_init ! initialize solver specific parts of PETSc call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) - localK = 0 - localK(worldrank) = grid3 + localK = 0_pPetscInt + localK(worldrank) = int(grid3,pPetscInt) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call DMDACreate3d(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - grid(1),grid(2),grid(3), & ! global grid - 1 , 1, worldsize, & - 18, 0, & ! #dof (F tensor), ghost boundary width (domain overlap) - [grid(1)],[grid(2)],localK, & ! local grid + int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid + 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & + 18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap) + [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid da,ierr) ! handle, error CHKERRQ(ierr) call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da diff --git a/src/grid/grid_thermal_spectral.f90 b/src/grid/grid_thermal_spectral.f90 index 67c7ba1c3..548fa4b3c 100644 --- a/src/grid/grid_thermal_spectral.f90 +++ b/src/grid/grid_thermal_spectral.f90 @@ -103,16 +103,16 @@ subroutine grid_thermal_spectral_init(T_0) ! initialize solver specific parts of PETSc call SNESCreate(PETSC_COMM_WORLD,thermal_snes,ierr); CHKERRQ(ierr) call SNESSetOptionsPrefix(thermal_snes,'thermal_',ierr);CHKERRQ(ierr) - localK = 0 - localK(worldrank) = grid3 + localK = 0_pPetscInt + localK(worldrank) = int(grid3,pPetscInt) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call DMDACreate3D(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - grid(1),grid(2),grid(3), & ! global grid - 1, 1, worldsize, & - 1, 0, & ! #dof (T field), ghost boundary width (domain overlap) - [grid(1)],[grid(2)],localK, & ! local grid + int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid + 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & + 1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap) + [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid thermal_grid,ierr) ! handle, error CHKERRQ(ierr) call SNESSetDM(thermal_snes,thermal_grid,ierr); CHKERRQ(ierr) ! connect snes to da diff --git a/src/prec.f90 b/src/prec.f90 index 843033219..fe3e7fc20 100644 --- a/src/prec.f90 +++ b/src/prec.f90 @@ -10,6 +10,11 @@ module prec use, intrinsic :: IEEE_arithmetic use, intrinsic :: ISO_C_binding +#ifdef PETSC +#include + use PETScSys +#endif + implicit none public @@ -21,6 +26,10 @@ module prec integer, parameter :: pInt = pI64 #else integer, parameter :: pInt = pI32 +#endif +#ifdef PETSC + PetscInt, private :: dummy + integer, parameter :: pPetscInt = kind(dummy) #endif integer, parameter :: pStringLen = 256 !< default string length integer, parameter :: pPathLen = 4096 !< maximum length of a path name on linux