From 09b0cc31015227b4239732c658deb174cd2ce2f3 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 16 Jul 2023 03:14:49 +0200 Subject: [PATCH 1/4] avoid duplicated storage of solution vector/T polished variable names and simplified expressions --- src/grid/grid_thermal_spectral.f90 | 155 +++++++++++++++-------------- 1 file changed, 78 insertions(+), 77 deletions(-) diff --git a/src/grid/grid_thermal_spectral.f90 b/src/grid/grid_thermal_spectral.f90 index 1c3f2129a..0c7cf3a54 100644 --- a/src/grid/grid_thermal_spectral.f90 +++ b/src/grid/grid_thermal_spectral.f90 @@ -46,9 +46,8 @@ module grid_thermal_spectral !-------------------------------------------------------------------------------------------------- ! PETSc data SNES :: SNES_thermal - Vec :: solution_vec + Vec :: T_PETSc real(pREAL), dimension(:,:,:), allocatable :: & - T, & !< field of current temperature T_lastInc, & !< field of previous temperature T_stagInc, & !< field of staggered temperature dotT_lastInc @@ -73,8 +72,8 @@ subroutine grid_thermal_spectral_init() PetscInt, dimension(0:worldsize-1) :: localK integer :: i, j, k, ce - DM :: thermal_grid - real(pREAL), dimension(:,:,:), pointer :: T_PETSc + DM :: DM_thermal + real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed integer(MPI_INTEGER_KIND) :: err_MPI PetscErrorCode :: err_PETSc integer(HID_T) :: fileHandle, groupHandle @@ -108,13 +107,6 @@ subroutine grid_thermal_spectral_init() call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asStr('petsc_options',defaultVal=''),err_PETSc) CHKERRQ(err_PETSc) -!-------------------------------------------------------------------------------------------------- -! init fields - T = discretization_grid_getInitialCondition('T') - T_lastInc = T - T_stagInc = T - dotT_lastInc = 0.0_pREAL * T - !-------------------------------------------------------------------------------------------------- ! initialize solver specific parts of PETSc call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc) @@ -132,21 +124,23 @@ subroutine grid_thermal_spectral_init() 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & 1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap) [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells - thermal_grid,err_PETSc) ! handle, error + DM_thermal,err_PETSc) ! handle, error CHKERRQ(err_PETSc) - call DMsetFromOptions(thermal_grid,err_PETSc) + call DMsetFromOptions(DM_thermal,err_PETSc) CHKERRQ(err_PETSc) - call DMsetUp(thermal_grid,err_PETSc) + call DMsetUp(DM_thermal,err_PETSc) CHKERRQ(err_PETSc) - call DMCreateGlobalVector(thermal_grid,solution_vec,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor) + call DMCreateGlobalVector(DM_thermal,T_PETSc,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor) CHKERRQ(err_PETSc) - call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector + call DMDASNESSetFunctionLocal(DM_thermal,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector CHKERRQ(err_PETSc) - call SNESSetDM(SNES_thermal,thermal_grid,err_PETSc) + call SNESSetDM(SNES_thermal,DM_thermal,err_PETSc) CHKERRQ(err_PETSc) call SNESSetFromOptions(SNES_thermal,err_PETSc) ! pull it all together with additional CLI arguments CHKERRQ(err_PETSc) + call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T + CHKERRQ(err_PETSc) restartRead: if (CLI_restartInc > 0) then print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc @@ -158,20 +152,23 @@ subroutine grid_thermal_spectral_init() T = reshape(tempN,[cells(1),cells(2),cells3]) call HDF5_read(tempN,groupHandle,'T_lastInc',.false.) T_lastInc = reshape(tempN,[cells(1),cells(2),cells3]) + T_stagInc = T_lastInc call HDF5_read(tempN,groupHandle,'dotT_lastInc',.false.) dotT_lastInc = reshape(tempN,[cells(1),cells(2),cells3]) + else + T = discretization_grid_getInitialCondition('T') + T_lastInc = T(0:,0:,0:) + T_stagInc = T_lastInc + dotT_lastInc = 0.0_pREAL * T_lastInc end if restartRead ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1) + do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0, cells(1)-1 ce = ce + 1 call homogenization_thermal_setField(T(i,j,k),0.0_pREAL,ce) end do; end do; end do - call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc) - CHKERRQ(err_PETSc) - T_PETSc = T - call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc) + call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc) CHKERRQ(err_PETSc) call updateReference() @@ -186,37 +183,43 @@ function grid_thermal_spectral_solution(Delta_t) result(solution) real(pREAL), intent(in) :: & Delta_t !< increment in time for current solution + integer :: i, j, k, ce type(tSolutionState) :: solution PetscInt :: devNull PetscReal :: T_min, T_max, stagNorm - + DM :: DM_thermal + real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed integer(MPI_INTEGER_KIND) :: err_MPI PetscErrorCode :: err_PETSc SNESConvergedReason :: reason + solution%converged = .false. !-------------------------------------------------------------------------------------------------- ! set module wide availabe data params%Delta_t = Delta_t - call SNESSolve(SNES_thermal,PETSC_NULL_VEC,solution_vec,err_PETSc) + call SNESSolve(SNES_thermal,PETSC_NULL_VEC,T_PETSc,err_PETSc) CHKERRQ(err_PETSc) call SNESGetConvergedReason(SNES_thermal,reason,err_PETSc) CHKERRQ(err_PETSc) - if (reason < 1) then - solution%converged = .false. - solution%iterationsNeeded = num%itmax - else - solution%converged = .true. - solution%iterationsNeeded = totalIter - end if + solution%converged = reason > 0 + solution%iterationsNeeded = merge(totalIter,num%itmax,solution%converged) + + call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc) + CHKERRQ(err_PETSc) + call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T + CHKERRQ(err_PETSc) + stagNorm = maxval(abs(T - T_stagInc)) + T_min = minval(T) + T_max = maxval(T) call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' - solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T)) + solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*T_max) call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' T_stagInc = T @@ -224,15 +227,14 @@ function grid_thermal_spectral_solution(Delta_t) result(solution) !-------------------------------------------------------------------------------------------------- ! updating thermal state ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) + do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0, cells(1)-1 ce = ce + 1 - call homogenization_thermal_setField(T(i,j,k),(T(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce) + call homogenization_thermal_setField(T(i,j,k),(T(i,j,k)-T_lastInc(i+1,j+1,k+1))/params%Delta_t,ce) end do; end do; end do - call VecMin(solution_vec,devNull,T_min,err_PETSc) - CHKERRQ(err_PETSc) - call VecMax(solution_vec,devNull,T_max,err_PETSc) + call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc) CHKERRQ(err_PETSc) + if (solution%converged) & print'(/,1x,a)', '... thermal conduction converged ..................................' print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm @@ -243,42 +245,40 @@ end function grid_thermal_spectral_solution !-------------------------------------------------------------------------------------------------- -!> @brief forwarding routine +!> @brief Set DAMASK data to current solver status. !-------------------------------------------------------------------------------------------------- subroutine grid_thermal_spectral_forward(cutBack) logical, intent(in) :: cutBack integer :: i, j, k, ce - DM :: dm_local - real(pREAL), dimension(:,:,:), pointer :: T_PETSc + DM :: DM_thermal + real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed PetscErrorCode :: err_PETSc - if (cutBack) then - T = T_lastInc - T_stagInc = T_lastInc + call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc) + CHKERRQ(err_PETSc) + call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T + CHKERRQ(err_PETSc) -!-------------------------------------------------------------------------------------------------- -! reverting thermal field state - call SNESGetDM(SNES_thermal,dm_local,err_PETSc) - CHKERRQ(err_PETSc) - call DMDAVecGetArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc) !< get the data out of PETSc to work with - CHKERRQ(err_PETSc) - T_PETSc = T - call DMDAVecRestoreArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc) - CHKERRQ(err_PETSc) + if (cutBack) then ce = 0 do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) ce = ce + 1 - call homogenization_thermal_setField(T(i,j,k),dotT_lastInc(i,j,k),ce) + call homogenization_thermal_setField(T_lastInc(i,j,k),dotT_lastInc(i,j,k),ce) end do; end do; end do + T = T_lastInc + T_stagInc = T_lastInc else dotT_lastInc = (T - T_lastInc)/params%Delta_t T_lastInc = T call updateReference() end if + call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc) + CHKERRQ(err_PETSc) + end subroutine grid_thermal_spectral_forward @@ -288,13 +288,13 @@ end subroutine grid_thermal_spectral_forward subroutine grid_thermal_spectral_restartWrite() PetscErrorCode :: err_PETSc - DM :: dm_local + DM :: DM_thermal integer(HID_T) :: fileHandle, groupHandle - real(pREAL), dimension(:,:,:), pointer :: T + real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed - call SNESGetDM(SNES_thermal,dm_local,err_PETSc); + call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc) CHKERRQ(err_PETSc) - call DMDAVecGetArrayReadF90(dm_local,solution_vec,T,err_PETSc); + call DMDAVecGetArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T CHKERRQ(err_PETSc) print'(1x,a)', 'saving thermal solver data required for restart'; flush(IO_STDOUT) @@ -307,7 +307,7 @@ subroutine grid_thermal_spectral_restartWrite() call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle) - call DMDAVecRestoreArrayReadF90(dm_local,solution_vec,T,err_PETSc); + call DMDAVecRestoreArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc); CHKERRQ(err_PETSc) end subroutine grid_thermal_spectral_restartWrite @@ -324,7 +324,7 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc) real(pREAL), dimension(cells(1),cells(2),cells3), intent(in) :: & x_scal real(pREAL), dimension(cells(1),cells(2),cells3), intent(out) :: & - r !< residual + r !< residual PetscObject :: dummy PetscErrorCode, intent(out) :: err_PETSc @@ -332,25 +332,26 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc) real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField - T = x_scal - vectorField = utilities_ScalarGradient(T) - ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) - ce = ce + 1 - vectorField(1:3,i,j,k) = matmul(homogenization_K_T(ce) - K_ref, vectorField(1:3,i,j,k)) - end do; end do; end do - r = utilities_VectorDivergence(vectorField) - ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) - ce = ce + 1 - r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_T(ce)) & - + homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) & - + mu_ref*T(i,j,k) - end do; end do; end do + associate(T => x_scal) + vectorField = utilities_ScalarGradient(T) + ce = 0 + do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) + ce = ce + 1 + vectorField(1:3,i,j,k) = matmul(homogenization_K_T(ce) - K_ref, vectorField(1:3,i,j,k)) + end do; end do; end do + r = utilities_VectorDivergence(vectorField) + ce = 0 + do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) + ce = ce + 1 + r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_T(ce)) & + + homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) & + + mu_ref*T(i,j,k) + end do; end do; end do - r = T & - - utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t) - err_PETSc = 0 + r = T & + - utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t) + err_PETSc = 0 + end associate end subroutine formResidual From b54cf03d6d018c5841066c750268343f9583bb14 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 16 Jul 2023 06:35:38 +0200 Subject: [PATCH 2/4] allgather instead of allreduce+sum with contribution from 1 proc --- src/HDF5_utilities.f90 | 29 +++++++------- src/grid/DAMASK_grid.f90 | 2 +- src/grid/discretization_grid.f90 | 12 +++--- src/grid/grid_damage_spectral.f90 | 15 ++++---- src/grid/grid_mech_FEM.f90 | 17 ++++----- src/grid/grid_mech_spectral_basic.f90 | 15 ++++---- src/grid/grid_mech_spectral_polarisation.f90 | 15 ++++---- src/grid/grid_thermal_spectral.f90 | 17 ++++----- src/result.f90 | 40 +++++++++----------- 9 files changed, 75 insertions(+), 87 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 857fd30d1..b87dbad3d 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1836,15 +1836,13 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_ integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in) :: parallel - integer(HSIZE_T), intent(in), dimension(:) :: & - localShape - integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: & + integer(HSIZE_T), intent(in), dimension(:) :: localShape + integer(HSIZE_T), intent(out), dimension(size(localShape)) :: & myStart, & globalShape !< shape of the dataset (all processes) integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(MPI_INTEGER_KIND), dimension(worldsize) :: & - readSize !< contribution of all processes + integer(MPI_INTEGER_KIND), dimension(worldsize) :: readSize !< contribution of all processes integer :: hdferr integer(MPI_INTEGER_KIND) :: err_MPI @@ -1860,7 +1858,8 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_ if (parallel) then call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call HDF5_chkerr(hdferr) - call MPI_Allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get total output size over each process + call MPI_Allgather(int(localShape(ubound(localShape,1)),MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& + readSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' end if #endif @@ -1930,15 +1929,14 @@ end subroutine finalize_read !-------------------------------------------------------------------------------------------------- subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & myStart, totalShape, & - loc_id,myShape,datasetName,datatype,parallel) + loc_id,localShape,datasetName,datatype,parallel) integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in) :: parallel integer(HID_T), intent(in) :: datatype - integer(HSIZE_T), intent(in), dimension(:) :: & - myShape - integer(HSIZE_T), intent(out), dimension(size(myShape,1)):: & + integer(HSIZE_T), intent(in), dimension(:) :: localShape + integer(HSIZE_T), intent(out), dimension(size(localShape)) :: & myStart, & totalShape !< shape of the dataset (all processes) integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id @@ -1964,16 +1962,17 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & !-------------------------------------------------------------------------------------------------- ! determine the global data layout among all processes writeSize = 0_MPI_INTEGER_KIND - writeSize(worldrank+1) = int(myShape(ubound(myShape,1)),MPI_INTEGER_KIND) + writeSize(worldrank+1) = int(localShape(ubound(localShape,1)),MPI_INTEGER_KIND) #ifdef PETSC if (parallel) then - call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get total output size over each process + call MPI_Allgather(int(localShape(ubound(localShape,1)),MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& + writeSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' end if #endif myStart = int(0,HSIZE_T) myStart(ubound(myStart)) = int(sum(writeSize(1:worldrank)),HSIZE_T) - totalShape = [myShape(1:ubound(myShape,1)-1),int(sum(writeSize),HSIZE_T)] + totalShape = [localShape(1:ubound(localShape,1)-1),int(sum(writeSize),HSIZE_T)] !-------------------------------------------------------------------------------------------------- ! chunk dataset, enable compression for larger datasets @@ -2001,7 +2000,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) and in file (global shape) - call H5Screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape) + call H5Screate_simple_f(size(localShape), localShape, memspace_id, hdferr, localShape) call HDF5_chkerr(hdferr) call H5Screate_simple_f(size(totalShape), totalShape, filespace_id, hdferr, totalShape) call HDF5_chkerr(hdferr) @@ -2010,7 +2009,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & ! create dataset in the file and select a hyperslab from it (the portion of the current process) call H5Dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr, dcpl) call HDF5_chkerr(hdferr) - call H5Sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, myShape, hdferr) + call H5Sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, localShape, hdferr) call HDF5_chkerr(hdferr) call H5Pclose_f(dcpl , hdferr) diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index 443f69f9c..8efb2358a 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -366,7 +366,7 @@ program DAMASK_grid end if Delta_t = Delta_t * real(subStepFactor,pREAL)**real(-cutBackLevel,pREAL) ! depending on cut back level, decrease time step - skipping: if (totalIncsCounter <= CLI_restartInc) then ! not yet at restart inc? + skipping: if (totalIncsCounter <= CLI_restartInc) then ! not yet at restart inc? t = t + Delta_t ! just advance time, skip already performed calculation guess = .true. ! QUESTION:why forced guessing instead of inheriting loadcase preference else skipping diff --git a/src/grid/discretization_grid.f90 b/src/grid/discretization_grid.f90 index e77a173e3..2cb5dbf9f 100644 --- a/src/grid/discretization_grid.f90 +++ b/src/grid/discretization_grid.f90 @@ -68,7 +68,7 @@ subroutine discretization_grid_init(restart) j integer(MPI_INTEGER_KIND) :: err_MPI integer(C_INTPTR_T) :: & - devNull, z, z_offset + devNull, cells3_, cells3Offset_ integer, dimension(worldsize) :: & displs, sendcounts character(len=:), allocatable :: & @@ -113,12 +113,12 @@ subroutine discretization_grid_init(restart) call fftw_mpi_init() devNull = fftw_mpi_local_size_3d(int(cells(3),C_INTPTR_T),int(cells(2),C_INTPTR_T),int(cells(1)/2+1,C_INTPTR_T), & PETSC_COMM_WORLD, & - z, & ! domain cells size along z - z_offset) ! domain cells offset along z - if (z==0_C_INTPTR_T) call IO_error(894, ext_msg='Cannot distribute MPI processes') + cells3_, & ! domain cells size along z + cells3Offset_) ! domain cells offset along z + if (cells3_==0_C_INTPTR_T) call IO_error(894, ext_msg='Cannot distribute MPI processes') - cells3 = int(z) - cells3Offset = int(z_offset) + cells3 = int(cells3_) + cells3Offset = int(cells3Offset_) size3 = geomSize(3)*real(cells3,pREAL) /real(cells(3),pREAL) size3Offset = geomSize(3)*real(cells3Offset,pREAL)/real(cells(3),pREAL) myGrid = [cells(1:2),cells3] diff --git a/src/grid/grid_damage_spectral.f90 b/src/grid/grid_damage_spectral.f90 index 90680daea..a2e60cb01 100644 --- a/src/grid/grid_damage_spectral.f90 +++ b/src/grid/grid_damage_spectral.f90 @@ -72,7 +72,7 @@ contains !-------------------------------------------------------------------------------------------------- subroutine grid_damage_spectral_init() - PetscInt, dimension(0:worldsize-1) :: localK + integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global integer :: i, j, k, ce DM :: damage_grid real(pREAL), dimension(:,:,:), pointer :: phi_PETSc @@ -129,17 +129,16 @@ subroutine grid_damage_spectral_init() CHKERRQ(err_PETSc) call SNESSetOptionsPrefix(SNES_damage,'damage_',err_PETSc) CHKERRQ(err_PETSc) - localK = 0_pPetscInt - localK(worldrank) = int(cells3,pPetscInt) - call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) + call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& + cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' call DMDACreate3D(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells - 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & - 1_pPetscInt, 0_pPetscInt, & ! #dof (phi, scalar), ghost boundary width (domain overlap) - [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells + int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells + 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & + 1_pPETSCINT, 0_pPETSCINT, & ! #dof (phi, scalar), ghost boundary width (domain overlap) + [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],int(cells3_global,pPETSCINT), & ! local cells damage_grid,err_PETSc) ! handle, error CHKERRQ(err_PETSc) call DMsetFromOptions(damage_grid,err_PETSc) diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index 17bf01ac0..14f750d73 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -115,7 +115,7 @@ subroutine grid_mechanical_FEM_init integer(MPI_INTEGER_KIND) :: err_MPI PetscScalar, pointer, dimension(:,:,:,:) :: & u,u_lastInc - PetscInt, dimension(0:worldsize-1) :: localK + integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global integer(HID_T) :: fileHandle, groupHandle type(tDict), pointer :: & num_grid @@ -167,17 +167,16 @@ subroutine grid_mechanical_FEM_init CHKERRQ(err_PETSc) call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc) CHKERRQ(err_PETSc) - localK = 0_pPetscInt - localK(worldrank) = int(cells3,pPetscInt) - call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) + call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& + cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' call DMDACreate3d(PETSC_COMM_WORLD, & DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, & DMDA_STENCIL_BOX, & - int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells - 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & - 3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap) - [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells + int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells + 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & + 3_pPETSCINT, 1_pPETSCINT, & ! #dof (u, vector), ghost boundary width (domain overlap) + [int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells mechanical_grid,err_PETSc) CHKERRQ(err_PETSc) call DMsetFromOptions(mechanical_grid,err_PETSc) @@ -198,7 +197,7 @@ subroutine grid_mechanical_FEM_init CHKERRQ(err_PETSc) call SNESSetConvergenceTest(SNES_mechanical,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged" CHKERRQ(err_PETSc) - call SNESSetMaxLinearSolveFailures(SNES_mechanical, huge(1_pPetscInt), err_PETSc) ! ignore linear solve failures + call SNESSetMaxLinearSolveFailures(SNES_mechanical, huge(1_pPETSCINT), err_PETSc) ! ignore linear solve failures CHKERRQ(err_PETSc) call SNESSetDM(SNES_mechanical,mechanical_grid,err_PETSc) CHKERRQ(err_PETSc) diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 4bd9c5a96..0ada208c1 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -110,7 +110,7 @@ subroutine grid_mechanical_spectral_basic_init() integer(MPI_INTEGER_KIND) :: err_MPI real(pREAL), pointer, dimension(:,:,:,:) :: & F ! pointer to solution data - PetscInt, dimension(0:worldsize-1) :: localK + integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global real(pREAL), dimension(3,3,product(cells(1:2))*cells3) :: temp33n integer(HID_T) :: fileHandle, groupHandle type(tDict), pointer :: & @@ -166,17 +166,16 @@ subroutine grid_mechanical_spectral_basic_init() CHKERRQ(err_PETSc) call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc) CHKERRQ(err_PETSc) - localK = 0_pPetscInt - localK(worldrank) = int(cells3,pPetscInt) - call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) + call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& + cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' call DMDACreate3d(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells - 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & - 9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap) - [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells + int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells + 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & + 9_pPETSCINT, 0_pPETSCINT, & ! #dof (F, tensor), ghost boundary width (domain overlap) + [int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells da,err_PETSc) ! handle, error CHKERRQ(err_PETSc) call DMsetFromOptions(da,err_PETSc) diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index 60e7d676e..bc1c0484c 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -123,7 +123,7 @@ subroutine grid_mechanical_spectral_polarisation_init() FandF_tau, & ! overall pointer to solution data F, & ! specific (sub)pointer F_tau ! specific (sub)pointer - PetscInt, dimension(0:worldsize-1) :: localK + integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global real(pREAL), dimension(3,3,product(cells(1:2))*cells3) :: temp33n integer(HID_T) :: fileHandle, groupHandle type(tDict), pointer :: & @@ -187,17 +187,16 @@ subroutine grid_mechanical_spectral_polarisation_init() CHKERRQ(err_PETSc) call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc) CHKERRQ(err_PETSc) - localK = 0_pPetscInt - localK(worldrank) = int(cells3,pPetscInt) - call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) + call MPI_Allgather(int(cells3,pPetscInt),1_MPI_INTEGER_KIND,MPI_INTEGER,& + cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' call DMDACreate3d(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells - 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & - 18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap) - [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells + int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells + 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & + 18_pPETSCINT, 0_pPETSCINT, & ! #dof (2xtensor), ghost boundary width (domain overlap) + [int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells da,err_PETSc) ! handle, error CHKERRQ(err_PETSc) call DMsetFromOptions(da,err_PETSc) diff --git a/src/grid/grid_thermal_spectral.f90 b/src/grid/grid_thermal_spectral.f90 index 0c7cf3a54..e8fd0c914 100644 --- a/src/grid/grid_thermal_spectral.f90 +++ b/src/grid/grid_thermal_spectral.f90 @@ -70,7 +70,7 @@ contains !-------------------------------------------------------------------------------------------------- subroutine grid_thermal_spectral_init() - PetscInt, dimension(0:worldsize-1) :: localK + integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global integer :: i, j, k, ce DM :: DM_thermal real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed @@ -113,17 +113,16 @@ subroutine grid_thermal_spectral_init() CHKERRQ(err_PETSc) call SNESSetOptionsPrefix(SNES_thermal,'thermal_',err_PETSc) CHKERRQ(err_PETSc) - localK = 0_pPetscInt - localK(worldrank) = int(cells3,pPetscInt) - call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) + call MPI_Allgather(int(cells3,pPETSCINT),1_MPI_INTEGER_KIND,MPI_INTEGER,& + cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' call DMDACreate3D(PETSC_COMM_WORLD, & DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point - int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells - 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & - 1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap) - [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells + int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells + 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & + 1_pPETSCINT, 0_pPETSCINT, & ! #dof (T, scalar), ghost boundary width (domain overlap) + [int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells DM_thermal,err_PETSc) ! handle, error CHKERRQ(err_PETSc) call DMsetFromOptions(DM_thermal,err_PETSc) @@ -214,9 +213,9 @@ function grid_thermal_spectral_solution(Delta_t) result(solution) call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T CHKERRQ(err_PETSc) - stagNorm = maxval(abs(T - T_stagInc)) T_min = minval(T) T_max = maxval(T) + stagNorm = maxval(abs(T - T_stagInc)) call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*T_max) diff --git a/src/result.f90 b/src/result.f90 index de9db4091..242bdbe28 100644 --- a/src/result.f90 +++ b/src/result.f90 @@ -495,10 +495,9 @@ subroutine result_mapping_phase(ID,entry,label) integer, dimension(:,:), intent(in) :: entry !< phase entry at (co,ce) character(len=*), dimension(:), intent(in) :: label !< label of each phase section - integer(pI64), dimension(size(entry,1),size(entry,2)) :: & - entryGlobal + integer(pI64), dimension(size(entry,1),size(entry,2)) :: entryGlobal integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process - integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process + integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: writeSize !< amount of data written per process integer(HSIZE_T), dimension(2) :: & myShape, & !< shape of the dataset (this process) myOffset, & @@ -521,21 +520,19 @@ subroutine result_mapping_phase(ID,entry,label) integer(MPI_INTEGER_KIND) :: err_MPI - writeSize = 0 - writeSize(worldrank) = size(entry(1,:)) ! total number of entries of this process - call H5Pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) call HDF5_chkerr(hdferr) #ifndef PETSC - entryGlobal = int(entry -1,pI64) ! 0-based + entryGlobal = int(entry-1,pI64) ! 0-based + writeSize(0) = size(entry,dim=2,kind=MPI_INTEGER_KIND) ! total number of entries of this process #else !-------------------------------------------------------------------------------------------------- ! MPI settings and communication call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call HDF5_chkerr(hdferr) - - call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process + call MPI_Allgather(size(entry,dim=2,kind=MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& + writeSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) call parallelization_chkerr(err_MPI) entryOffset = 0_pI64 @@ -554,9 +551,9 @@ subroutine result_mapping_phase(ID,entry,label) end do #endif - myShape = int([size(ID,1),writeSize(worldrank)], HSIZE_T) - myOffset = int([0,sum(writeSize(0:worldrank-1))], HSIZE_T) - totalShape = int([size(ID,1),sum(writeSize)], HSIZE_T) + myShape = int([size(ID,1,MPI_INTEGER_KIND),writeSize(worldrank)], HSIZE_T) + myOffset = int([0_MPI_INTEGER_KIND,sum(writeSize(0:worldrank-1))], HSIZE_T) + totalShape = int([size(ID,1,MPI_INTEGER_KIND),sum(writeSize)], HSIZE_T) !--------------------------------------------------------------------------------------------------- ! compound type: label(ID) + entry @@ -651,10 +648,9 @@ subroutine result_mapping_homogenization(ID,entry,label) integer, dimension(:), intent(in) :: entry !< homogenization entry at (ce) character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section - integer(pI64), dimension(size(entry,1)) :: & - entryGlobal + integer(pI64), dimension(size(entry,1)) :: entryGlobal integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process - integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process + integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: writeSize !< amount of data written per process integer(HSIZE_T), dimension(1) :: & myShape, & !< shape of the dataset (this process) myOffset, & @@ -677,31 +673,29 @@ subroutine result_mapping_homogenization(ID,entry,label) integer(MPI_INTEGER_KIND) :: err_MPI - writeSize = 0 - writeSize(worldrank) = size(entry) ! total number of entries of this process - call H5Pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) call HDF5_chkerr(hdferr) #ifndef PETSC - entryGlobal = int(entry -1,pI64) ! 0-based + entryGlobal = int(entry-1,pI64) + writeSize(0) = size(entry,kind=MPI_INTEGER_KIND) ! total number of entries of this process ! 0-based #else !-------------------------------------------------------------------------------------------------- ! MPI settings and communication call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call HDF5_chkerr(hdferr) - - call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process + call MPI_Allgather(size(entry,kind=MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& + writeSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) call parallelization_chkerr(err_MPI) entryOffset = 0_pI64 - do ce = 1, size(ID,1) + do ce = 1, size(ID) entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1_pI64 end do call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INTEGER8,MPI_SUM,MPI_COMM_WORLD,err_MPI)! get offset at each process call parallelization_chkerr(err_MPI) entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) - do ce = 1, size(ID,1) + do ce = 1, size(ID) entryGlobal(ce) = int(entry(ce),pI64) -1_pI64 + entryOffset(ID(ce),worldrank) end do #endif From 8682df0e861d3e40f4a52d1af5fe1f54092449a2 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 16 Jul 2023 13:17:57 +0200 Subject: [PATCH 3/4] avoiding double storage of phi general adjustments of damage solver to follow thermal solver --- src/grid/grid_damage_spectral.f90 | 154 +++++++++++++++-------------- src/grid/grid_thermal_spectral.f90 | 5 +- 2 files changed, 81 insertions(+), 78 deletions(-) diff --git a/src/grid/grid_damage_spectral.f90 b/src/grid/grid_damage_spectral.f90 index a2e60cb01..5f88e283f 100644 --- a/src/grid/grid_damage_spectral.f90 +++ b/src/grid/grid_damage_spectral.f90 @@ -47,9 +47,8 @@ module grid_damage_spectral !-------------------------------------------------------------------------------------------------- ! PETSc data SNES :: SNES_damage - Vec :: solution_vec + Vec :: phi_PETSc real(pREAL), dimension(:,:,:), allocatable :: & - phi, & !< field of current damage phi_lastInc, & !< field of previous damage phi_stagInc !< field of staggered damage @@ -74,8 +73,8 @@ subroutine grid_damage_spectral_init() integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global integer :: i, j, k, ce - DM :: damage_grid - real(pREAL), dimension(:,:,:), pointer :: phi_PETSc + DM :: DM_damage + real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed Vec :: uBound, lBound integer(MPI_INTEGER_KIND) :: err_MPI PetscErrorCode :: err_PETSc @@ -87,6 +86,7 @@ subroutine grid_damage_spectral_init() character(len=pSTRLEN) :: & snes_type + print'(/,1x,a)', '<<<+- grid_spectral_damage init -+>>>' print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019' @@ -117,12 +117,6 @@ subroutine grid_damage_spectral_init() call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asStr('petsc_options',defaultVal=''),err_PETSc) CHKERRQ(err_PETSc) -!-------------------------------------------------------------------------------------------------- -! init fields - phi = discretization_grid_getInitialCondition('phi') - phi_lastInc = phi - phi_stagInc = phi - !-------------------------------------------------------------------------------------------------- ! initialize solver specific parts of PETSc call SNESCreate(PETSC_COMM_WORLD,SNES_damage,err_PETSc) @@ -139,17 +133,17 @@ subroutine grid_damage_spectral_init() 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & 1_pPETSCINT, 0_pPETSCINT, & ! #dof (phi, scalar), ghost boundary width (domain overlap) [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],int(cells3_global,pPETSCINT), & ! local cells - damage_grid,err_PETSc) ! handle, error + DM_damage,err_PETSc) ! handle, error CHKERRQ(err_PETSc) - call DMsetFromOptions(damage_grid,err_PETSc) + call DMsetFromOptions(DM_damage,err_PETSc) CHKERRQ(err_PETSc) - call DMsetUp(damage_grid,err_PETSc) + call DMsetUp(DM_damage,err_PETSc) CHKERRQ(err_PETSc) - call DMCreateGlobalVector(damage_grid,solution_vec,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor) + call DMCreateGlobalVector(DM_damage,phi_PETSc,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor) CHKERRQ(err_PETSc) - call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector + call DMDASNESSetFunctionLocal(DM_damage,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector CHKERRQ(err_PETSc) - call SNESSetDM(SNES_damage,damage_grid,err_PETSc) + call SNESSetDM(SNES_damage,DM_damage,err_PETSc) CHKERRQ(err_PETSc) call SNESSetFromOptions(SNES_damage,err_PETSc) ! pull it all together with additional CLI arguments CHKERRQ(err_PETSc) @@ -157,9 +151,9 @@ subroutine grid_damage_spectral_init() CHKERRQ(err_PETSc) if (trim(snes_type) == 'vinewtonrsls' .or. & trim(snes_type) == 'vinewtonssls') then - call DMGetGlobalVector(damage_grid,lBound,err_PETSc) + call DMGetGlobalVector(DM_damage,lBound,err_PETSc) CHKERRQ(err_PETSc) - call DMGetGlobalVector(damage_grid,uBound,err_PETSc) + call DMGetGlobalVector(DM_damage,uBound,err_PETSc) CHKERRQ(err_PETSc) call VecSet(lBound,0.0_pREAL,err_PETSc) CHKERRQ(err_PETSc) @@ -167,12 +161,15 @@ subroutine grid_damage_spectral_init() CHKERRQ(err_PETSc) call SNESVISetVariableBounds(SNES_damage,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities CHKERRQ(err_PETSc) - call DMRestoreGlobalVector(damage_grid,lBound,err_PETSc) + call DMRestoreGlobalVector(DM_damage,lBound,err_PETSc) CHKERRQ(err_PETSc) - call DMRestoreGlobalVector(damage_grid,uBound,err_PETSc) + call DMRestoreGlobalVector(DM_damage,uBound,err_PETSc) CHKERRQ(err_PETSc) end if + call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed phi + CHKERRQ(err_PETSc) + restartRead: if (CLI_restartInc > 0) then print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc @@ -183,18 +180,20 @@ subroutine grid_damage_spectral_init() phi = reshape(tempN,[cells(1),cells(2),cells3]) call HDF5_read(tempN,groupHandle,'phi_lastInc',.false.) phi_lastInc = reshape(tempN,[cells(1),cells(2),cells3]) + phi_stagInc = phi_lastInc + else + phi = discretization_grid_getInitialCondition('phi') + phi_lastInc = phi(0:,0:,0:) + phi_stagInc = phi_lastInc end if restartRead ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1) + do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0, cells(1)-1 ce = ce + 1 call homogenization_set_phi(phi(i,j,k),ce) end do; end do; end do - call DMDAVecGetArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc) - CHKERRQ(err_PETSc) - phi_PETSc = phi - call DMDAVecRestoreArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc) + call DMDAVecRestoreArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) CHKERRQ(err_PETSc) call updateReference() @@ -209,37 +208,43 @@ function grid_damage_spectral_solution(Delta_t) result(solution) real(pREAL), intent(in) :: & Delta_t !< increment in time for current solution + integer :: i, j, k, ce type(tSolutionState) :: solution PetscInt :: devNull PetscReal :: phi_min, phi_max, stagNorm - + DM :: DM_damage + real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed integer(MPI_INTEGER_KIND) :: err_MPI PetscErrorCode :: err_PETSc SNESConvergedReason :: reason + solution%converged = .false. !-------------------------------------------------------------------------------------------------- ! set module wide availabe data params%Delta_t = Delta_t - call SNESSolve(SNES_damage,PETSC_NULL_VEC,solution_vec,err_PETSc) + call SNESSolve(SNES_damage,PETSC_NULL_VEC,phi_PETSc,err_PETSc) CHKERRQ(err_PETSc) call SNESGetConvergedReason(SNES_damage,reason,err_PETSc) CHKERRQ(err_PETSc) - if (reason < 1) then - solution%converged = .false. - solution%iterationsNeeded = num%itmax - else - solution%converged = .true. - solution%iterationsNeeded = totalIter - end if + solution%converged = reason > 0 + solution%iterationsNeeded = merge(totalIter,num%itmax,solution%converged) + + call SNESGetDM(SNES_damage,DM_damage,err_PETSc) + CHKERRQ(err_PETSc) + call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed phi + CHKERRQ(err_PETSc) + + phi_min = minval(phi) + phi_max = maxval(phi) stagNorm = maxval(abs(phi - phi_stagInc)) call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' - solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi)) + solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*phi_max) call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' phi_stagInc = phi @@ -247,15 +252,14 @@ function grid_damage_spectral_solution(Delta_t) result(solution) !-------------------------------------------------------------------------------------------------- ! updating damage state ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) + do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0,cells(1)-1 ce = ce + 1 call homogenization_set_phi(phi(i,j,k),ce) end do; end do; end do - call VecMin(solution_vec,devNull,phi_min,err_PETSc) - CHKERRQ(err_PETSc) - call VecMax(solution_vec,devNull,phi_max,err_PETSc) + call DMDAVecRestoreArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) CHKERRQ(err_PETSc) + if (solution%converged) & print'(/,1x,a)', '... nonlocal damage converged .....................................' print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm @@ -266,35 +270,31 @@ end function grid_damage_spectral_solution !-------------------------------------------------------------------------------------------------- -!> @brief spectral damage forwarding routine +!> @brief Set DAMASK data to current solver status. !-------------------------------------------------------------------------------------------------- subroutine grid_damage_spectral_forward(cutBack) logical, intent(in) :: cutBack integer :: i, j, k, ce - DM :: dm_local - real(pREAL), dimension(:,:,:), pointer :: phi_PETSc + DM :: DM_damage + real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed PetscErrorCode :: err_PETSc + call SNESGetDM(SNES_damage,DM_damage,err_PETSc) + CHKERRQ(err_PETSc) + call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed T + CHKERRQ(err_PETSc) + if (cutBack) then - phi = phi_lastInc - phi_stagInc = phi_lastInc -!-------------------------------------------------------------------------------------------------- -! reverting damage field state - call SNESGetDM(SNES_damage,dm_local,err_PETSc) - CHKERRQ(err_PETSc) - call DMDAVecGetArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc) !< get the data out of PETSc to work with - CHKERRQ(err_PETSc) - phi_PETSc = phi - call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc) - CHKERRQ(err_PETSc) ce = 0 do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) ce = ce + 1 - call homogenization_set_phi(phi(i,j,k),ce) + call homogenization_set_phi(phi_lastInc(i,j,k),ce) end do; end do; end do + phi = phi_lastInc + phi_stagInc = phi_lastInc else phi_lastInc = phi call updateReference() @@ -309,13 +309,14 @@ end subroutine grid_damage_spectral_forward subroutine grid_damage_spectral_restartWrite() PetscErrorCode :: err_PETSc - DM :: dm_local + DM :: DM_damage integer(HID_T) :: fileHandle, groupHandle - PetscScalar, dimension(:,:,:), pointer :: phi + real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed - call SNESGetDM(SNES_damage,dm_local,err_PETSc); + + call SNESGetDM(SNES_damage,DM_damage,err_PETSc) CHKERRQ(err_PETSc) - call DMDAVecGetArrayReadF90(dm_local,solution_vec,phi,err_PETSc); + call DMDAVecGetArrayReadF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed T CHKERRQ(err_PETSc) print'(1x,a)', 'saving damage solver data required for restart'; flush(IO_STDOUT) @@ -327,7 +328,7 @@ subroutine grid_damage_spectral_restartWrite() call HDF5_closeGroup(groupHandle) call HDF5_closeFile(fileHandle) - call DMDAVecRestoreArrayReadF90(dm_local,solution_vec,phi,err_PETSc); + call DMDAVecRestoreArrayReadF90(DM_damage,phi_PETSc,phi,err_PETSc); CHKERRQ(err_PETSc) end subroutine grid_damage_spectral_restartWrite @@ -351,24 +352,25 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc) real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField - phi = x_scal - vectorField = utilities_ScalarGradient(phi) - ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) - ce = ce + 1 - vectorField(1:3,i,j,k) = matmul(homogenization_K_phi(ce) - K_ref, vectorField(1:3,i,j,k)) - end do; end do; end do - r = utilities_VectorDivergence(vectorField) - ce = 0 - do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) - ce = ce + 1 - r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi(i,j,k),ce)) & - + homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi(i,j,k)) & - + mu_ref*phi(i,j,k) - end do; end do; end do + associate(phi => x_scal) + vectorField = utilities_ScalarGradient(phi) + ce = 0 + do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) + ce = ce + 1 + vectorField(1:3,i,j,k) = matmul(homogenization_K_phi(ce) - K_ref, vectorField(1:3,i,j,k)) + end do; end do; end do + r = utilities_VectorDivergence(vectorField) + ce = 0 + do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) + ce = ce + 1 + r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi(i,j,k),ce)) & + + homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi(i,j,k)) & + + mu_ref*phi(i,j,k) + end do; end do; end do - r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) & - - phi + r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) & + - phi + end associate err_PETSc = 0 end subroutine formResidual diff --git a/src/grid/grid_thermal_spectral.f90 b/src/grid/grid_thermal_spectral.f90 index e8fd0c914..a3e2ae673 100644 --- a/src/grid/grid_thermal_spectral.f90 +++ b/src/grid/grid_thermal_spectral.f90 @@ -81,6 +81,7 @@ subroutine grid_thermal_spectral_init() type(tDict), pointer :: & num_grid + print'(/,1x,a)', '<<<+- grid_thermal_spectral init -+>>>' print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019' @@ -291,6 +292,7 @@ subroutine grid_thermal_spectral_restartWrite() integer(HID_T) :: fileHandle, groupHandle real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed + call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc) CHKERRQ(err_PETSc) call DMDAVecGetArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T @@ -312,7 +314,6 @@ subroutine grid_thermal_spectral_restartWrite() end subroutine grid_thermal_spectral_restartWrite - !-------------------------------------------------------------------------------------------------- !> @brief Construct the residual vector. !-------------------------------------------------------------------------------------------------- @@ -349,8 +350,8 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc) r = T & - utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t) - err_PETSc = 0 end associate + err_PETSc = 0 end subroutine formResidual From ddeb21872800fa17e3509af37c8009bd079b2704 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 17 Jul 2023 02:52:26 +0200 Subject: [PATCH 4/4] unified naming scheme with damage/thermal --- src/grid/grid_mech_FEM.f90 | 76 ++++++++++---------- src/grid/grid_mech_spectral_basic.f90 | 50 ++++++------- src/grid/grid_mech_spectral_polarisation.f90 | 50 ++++++------- 3 files changed, 88 insertions(+), 88 deletions(-) diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index 14f750d73..f7f72dd9f 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -52,9 +52,9 @@ module grid_mechanical_FEM !-------------------------------------------------------------------------------------------------- ! PETSc data - DM :: mechanical_grid - SNES :: SNES_mechanical - Vec :: solution_current, solution_lastInc, solution_rate + DM :: DM_mech + SNES :: SNES_mech + Vec :: u_PETSc, u_lastInc_PETSc, uDot_PETSc !-------------------------------------------------------------------------------------------------- ! common pointwise data @@ -163,9 +163,9 @@ subroutine grid_mechanical_FEM_init !-------------------------------------------------------------------------------------------------- ! initialize solver specific parts of PETSc - call SNESCreate(PETSC_COMM_WORLD,SNES_mechanical,err_PETSc) + call SNESCreate(PETSC_COMM_WORLD,SNES_mech,err_PETSc) CHKERRQ(err_PETSc) - call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc) + call SNESSetOptionsPrefix(SNES_mech,'mechanical_',err_PETSc) CHKERRQ(err_PETSc) call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) @@ -177,44 +177,44 @@ subroutine grid_mechanical_FEM_init 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & 3_pPETSCINT, 1_pPETSCINT, & ! #dof (u, vector), ghost boundary width (domain overlap) [int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells - mechanical_grid,err_PETSc) + DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call DMsetFromOptions(mechanical_grid,err_PETSc) + call DMsetFromOptions(DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call DMsetUp(mechanical_grid,err_PETSc) + call DMsetUp(DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call DMDASetUniformCoordinates(mechanical_grid,0.0_pREAL,geomSize(1),0.0_pREAL,geomSize(2),0.0_pREAL,geomSize(3),err_PETSc) + call DMDASetUniformCoordinates(DM_mech,0.0_pREAL,geomSize(1),0.0_pREAL,geomSize(2),0.0_pREAL,geomSize(3),err_PETSc) CHKERRQ(err_PETSc) - call DMCreateGlobalVector(mechanical_grid,solution_current,err_PETSc) + call DMCreateGlobalVector(DM_mech,u_PETSc,err_PETSc) CHKERRQ(err_PETSc) - call DMCreateGlobalVector(mechanical_grid,solution_lastInc,err_PETSc) + call DMCreateGlobalVector(DM_mech,u_lastInc_PETSc,err_PETSc) CHKERRQ(err_PETSc) - call DMCreateGlobalVector(mechanical_grid,solution_rate ,err_PETSc) + call DMCreateGlobalVector(DM_mech,uDot_PETSc,err_PETSc) CHKERRQ(err_PETSc) - call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,err_PETSc) + call DMSNESSetFunctionLocal(DM_mech,formResidual,PETSC_NULL_SNES,err_PETSc) CHKERRQ(err_PETSc) - call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,err_PETSc) + call DMSNESSetJacobianLocal(DM_mech,formJacobian,PETSC_NULL_SNES,err_PETSc) CHKERRQ(err_PETSc) - call SNESSetConvergenceTest(SNES_mechanical,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged" + call SNESSetConvergenceTest(SNES_mech,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged" CHKERRQ(err_PETSc) - call SNESSetMaxLinearSolveFailures(SNES_mechanical, huge(1_pPETSCINT), err_PETSc) ! ignore linear solve failures + call SNESSetMaxLinearSolveFailures(SNES_mech, huge(1_pPETSCINT), err_PETSc) ! ignore linear solve failures CHKERRQ(err_PETSc) - call SNESSetDM(SNES_mechanical,mechanical_grid,err_PETSc) + call SNESSetDM(SNES_mech,DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call SNESSetFromOptions(SNES_mechanical,err_PETSc) ! pull it all together with additional cli arguments + call SNESSetFromOptions(SNES_mech,err_PETSc) ! pull it all together with additional cli arguments CHKERRQ(err_PETSc) !-------------------------------------------------------------------------------------------------- ! init fields - call VecSet(solution_current,0.0_pREAL,err_PETSc) + call VecSet(u_PETSc,0.0_pREAL,err_PETSc) CHKERRQ(err_PETSc) - call VecSet(solution_lastInc,0.0_pREAL,err_PETSc) + call VecSet(u_lastInc_PETSc,0.0_pREAL,err_PETSc) CHKERRQ(err_PETSc) - call VecSet(solution_rate ,0.0_pREAL,err_PETSc) + call VecSet(uDot_PETSc ,0.0_pREAL,err_PETSc) CHKERRQ(err_PETSc) - call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc) + call DMDAVecGetArrayF90(DM_mech,u_PETSc,u,err_PETSc) CHKERRQ(err_PETSc) - call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc) + call DMDAVecGetArrayF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc) CHKERRQ(err_PETSc) delta = geomSize/real(cells,pREAL) ! grid spacing @@ -271,9 +271,9 @@ subroutine grid_mechanical_FEM_init call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2 F, & ! target F 0.0_pREAL) ! time increment - call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc) + call DMDAVecRestoreArrayF90(DM_mech,u_PETSc,u,err_PETSc) CHKERRQ(err_PETSc) - call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc) + call DMDAVecRestoreArrayF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc) CHKERRQ(err_PETSc) restartRead2: if (CLI_restartInc > 0) then @@ -315,9 +315,9 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution) ! update stiffness (and gamma operator) S = utilities_maskedCompliance(params%rotation_BC,params%stress_mask,C_volAvg) - call SNESsolve(SNES_mechanical,PETSC_NULL_VEC,solution_current,err_PETSc) + call SNESsolve(SNES_mech,PETSC_NULL_VEC,u_PETSc,err_PETSc) CHKERRQ(err_PETSc) - call SNESGetConvergedReason(SNES_mechanical,reason,err_PETSc) + call SNESGetConvergedReason(SNES_mech,reason,err_PETSc) CHKERRQ(err_PETSc) solution%converged = reason > 0 @@ -374,15 +374,15 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai end if if (guess) then - call VecWAXPY(solution_rate,-1.0_pREAL,solution_lastInc,solution_current,err_PETSc) + call VecWAXPY(uDot_PETSc,-1.0_pREAL,u_lastInc_PETSc,u_PETSc,err_PETSc) CHKERRQ(err_PETSc) - call VecScale(solution_rate,1.0_pREAL/Delta_t_old,err_PETSc) + call VecScale(uDot_PETSc,1.0_pREAL/Delta_t_old,err_PETSc) CHKERRQ(err_PETSc) else - call VecSet(solution_rate,0.0_pREAL,err_PETSc) + call VecSet(uDot_PETSc,0.0_pREAL,err_PETSc) CHKERRQ(err_PETSc) end if - call VecCopy(solution_current,solution_lastInc,err_PETSc) + call VecCopy(u_PETSc,u_lastInc_PETSc,err_PETSc) CHKERRQ(err_PETSc) F_lastInc = F @@ -398,7 +398,7 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai if (stress_BC%myType=='dot_P') P_aim = P_aim & + merge(.0_pREAL,stress_BC%values,stress_BC%mask)*Delta_t - call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc) + call VecAXPY(u_PETSc,Delta_t,uDot_PETSc,err_PETSc) CHKERRQ(err_PETSc) !-------------------------------------------------------------------------------------------------- @@ -430,9 +430,9 @@ subroutine grid_mechanical_FEM_restartWrite() PetscScalar, dimension(:,:,:,:), pointer :: u,u_lastInc - call DMDAVecGetArrayReadF90(mechanical_grid,solution_current,u,err_PETSc) + call DMDAVecGetArrayReadF90(DM_mech,u_PETSc,u,err_PETSc) CHKERRQ(err_PETSc) - call DMDAVecGetArrayReadF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc) + call DMDAVecGetArrayReadF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc) CHKERRQ(err_PETSc) print'(1x,a)', 'saving solver data required for restart'; flush(IO_STDOUT) @@ -459,9 +459,9 @@ subroutine grid_mechanical_FEM_restartWrite() call HDF5_closeFile(fileHandle) end if - call DMDAVecRestoreArrayReadF90(mechanical_grid,solution_current,u,err_PETSc) + call DMDAVecRestoreArrayReadF90(DM_mech,u_PETSc,u,err_PETSc) CHKERRQ(err_PETSc) - call DMDAVecRestoreArrayReadF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc) + call DMDAVecRestoreArrayReadF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc) CHKERRQ(err_PETSc) end subroutine grid_mechanical_FEM_restartWrite @@ -531,9 +531,9 @@ subroutine formResidual(da_local,x_local, & integer(MPI_INTEGER_KIND) :: err_MPI real(pREAL), dimension(3,3,3,3) :: devNull - call SNESGetNumberFunctionEvals(SNES_mechanical,nfuncs,err_PETSc) + call SNESGetNumberFunctionEvals(SNES_mech,nfuncs,err_PETSc) CHKERRQ(err_PETSc) - call SNESGetIterationNumber(SNES_mechanical,PETScIter,err_PETSc) + call SNESGetIterationNumber(SNES_mech,PETScIter,err_PETSc) CHKERRQ(err_PETSc) diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 0ada208c1..c35c4f8e0 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -51,9 +51,9 @@ module grid_mechanical_spectral_basic !-------------------------------------------------------------------------------------------------- ! PETSc data - DM :: da - SNES :: SNES_mechanical - Vec :: solution_vec + DM :: DM_mech + SNES :: SNES_mech + Vec :: F_PETSc !-------------------------------------------------------------------------------------------------- ! common pointwise data @@ -162,9 +162,9 @@ subroutine grid_mechanical_spectral_basic_init() !-------------------------------------------------------------------------------------------------- ! initialize solver specific parts of PETSc - call SNESCreate(PETSC_COMM_WORLD,SNES_mechanical,err_PETSc) + call SNESCreate(PETSC_COMM_WORLD,SNES_mech,err_PETSc) CHKERRQ(err_PETSc) - call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc) + call SNESSetOptionsPrefix(SNES_mech,'mechanical_',err_PETSc) CHKERRQ(err_PETSc) call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,& cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) @@ -176,26 +176,26 @@ subroutine grid_mechanical_spectral_basic_init() 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & 9_pPETSCINT, 0_pPETSCINT, & ! #dof (F, tensor), ghost boundary width (domain overlap) [int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells - da,err_PETSc) ! handle, error + DM_mech,err_PETSc) ! handle, error CHKERRQ(err_PETSc) - call DMsetFromOptions(da,err_PETSc) + call DMsetFromOptions(DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call DMsetUp(da,err_PETSc) + call DMsetUp(DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call DMcreateGlobalVector(da,solution_vec,err_PETSc) ! global solution vector (cells x 9, i.e. every def grad tensor) + call DMcreateGlobalVector(DM_mech,F_PETSc,err_PETSc) ! global solution vector (cells x 9, i.e. every def grad tensor) CHKERRQ(err_PETSc) - call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector + call DMDASNESsetFunctionLocal(DM_mech,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector CHKERRQ(err_PETSc) - call SNESsetConvergenceTest(SNES_mechanical,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged" + call SNESsetConvergenceTest(SNES_mech,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged" CHKERRQ(err_PETSc) - call SNESSetDM(SNES_mechanical,da,err_PETSc) + call SNESSetDM(SNES_mech,DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call SNESsetFromOptions(SNES_mechanical,err_PETSc) ! pull it all together with additional CLI arguments + call SNESsetFromOptions(SNES_mech,err_PETSc) ! pull it all together with additional CLI arguments CHKERRQ(err_PETSc) !-------------------------------------------------------------------------------------------------- ! init fields - call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc) ! places pointer on PETSc data + call DMDAVecGetArrayF90(DM_mech,F_PETSc,F,err_PETSc) ! places pointer on PETSc data CHKERRQ(err_PETSc) restartRead: if (CLI_restartInc > 0) then @@ -231,7 +231,7 @@ subroutine grid_mechanical_spectral_basic_init() call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2 reshape(F,shape(F_lastInc)), & ! target F 0.0_pREAL) ! time increment - call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc) ! deassociate pointer + call DMDAVecRestoreArrayF90(DM_mech,F_PETSc,F,err_PETSc) ! deassociate pointer CHKERRQ(err_PETSc) restartRead2: if (CLI_restartInc > 0) then @@ -280,9 +280,9 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution) S = utilities_maskedCompliance(params%rotation_BC,params%stress_mask,C_volAvg) if (num%update_gamma) call utilities_updateGamma(C_minMaxAvg) - call SNESsolve(SNES_mechanical,PETSC_NULL_VEC,solution_vec,err_PETSc) + call SNESsolve(SNES_mech,PETSC_NULL_VEC,F_PETSc,err_PETSc) CHKERRQ(err_PETSc) - call SNESGetConvergedReason(SNES_mechanical,reason,err_PETSc) + call SNESGetConvergedReason(SNES_mech,reason,err_PETSc) CHKERRQ(err_PETSc) solution%converged = reason > 0 @@ -317,7 +317,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_ real(pREAL), pointer, dimension(:,:,:,:) :: F - call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc) + call DMDAVecGetArrayF90(DM_mech,F_PETSc,F,err_PETSc) CHKERRQ(err_PETSc) if (cutBack) then @@ -361,7 +361,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_ F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average rotation_BC%rotate(F_aim,active=.true.)),[9,cells(1),cells(2),cells3]) - call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc) + call DMDAVecRestoreArrayF90(DM_mech,F_PETSc,F,err_PETSc) CHKERRQ(err_PETSc) !-------------------------------------------------------------------------------------------------- @@ -381,10 +381,10 @@ subroutine grid_mechanical_spectral_basic_updateCoords() PetscErrorCode :: err_PETSc real(pREAL), dimension(:,:,:,:), pointer :: F - call DMDAVecGetArrayReadF90(da,solution_vec,F,err_PETSc) + call DMDAVecGetArrayReadF90(DM_mech,F_PETSc,F,err_PETSc) CHKERRQ(err_PETSc) call utilities_updateCoords(reshape(F,[3,3,size(F,2),size(F,3),size(F,4)])) - call DMDAVecRestoreArrayReadF90(da,solution_vec,F,err_PETSc) + call DMDAVecRestoreArrayReadF90(DM_mech,F_PETSc,F,err_PETSc) CHKERRQ(err_PETSc) end subroutine grid_mechanical_spectral_basic_updateCoords @@ -399,7 +399,7 @@ subroutine grid_mechanical_spectral_basic_restartWrite() integer(HID_T) :: fileHandle, groupHandle real(pREAL), dimension(:,:,:,:), pointer :: F - call DMDAVecGetArrayReadF90(da,solution_vec,F,err_PETSc) + call DMDAVecGetArrayReadF90(DM_mech,F_PETSc,F,err_PETSc) CHKERRQ(err_PETSc) if (num%update_gamma) C_minMaxAvgRestart = C_minMaxAvg @@ -427,7 +427,7 @@ subroutine grid_mechanical_spectral_basic_restartWrite() call HDF5_closeFile(fileHandle) end if - call DMDAVecRestoreArrayReadF90(da,solution_vec,F,err_PETSc) + call DMDAVecRestoreArrayReadF90(DM_mech,F_PETSc,F,err_PETSc) CHKERRQ(err_PETSc) end subroutine grid_mechanical_spectral_basic_restartWrite @@ -498,9 +498,9 @@ subroutine formResidual(residual_subdomain, F, & integer(MPI_INTEGER_KIND) :: err_MPI - call SNESGetNumberFunctionEvals(SNES_mechanical,nfuncs,err_PETSc) + call SNESGetNumberFunctionEvals(SNES_mech,nfuncs,err_PETSc) CHKERRQ(err_PETSc) - call SNESGetIterationNumber(SNES_mechanical,PETScIter,err_PETSc) + call SNESGetIterationNumber(SNES_mech,PETScIter,err_PETSc) CHKERRQ(err_PETSc) if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index bc1c0484c..a5ad09969 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -56,9 +56,9 @@ module grid_mechanical_spectral_polarisation !-------------------------------------------------------------------------------------------------- ! PETSc data - DM :: da - SNES :: SNES_mechanical - Vec :: solution_vec + DM :: DM_mech + SNES :: SNES_mech + Vec :: FandF_tau_PETSc !-------------------------------------------------------------------------------------------------- ! common pointwise data @@ -183,9 +183,9 @@ subroutine grid_mechanical_spectral_polarisation_init() !-------------------------------------------------------------------------------------------------- ! initialize solver specific parts of PETSc - call SNESCreate(PETSC_COMM_WORLD,SNES_mechanical,err_PETSc) + call SNESCreate(PETSC_COMM_WORLD,SNES_mech,err_PETSc) CHKERRQ(err_PETSc) - call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc) + call SNESSetOptionsPrefix(SNES_mech,'mechanical_',err_PETSc) CHKERRQ(err_PETSc) call MPI_Allgather(int(cells3,pPetscInt),1_MPI_INTEGER_KIND,MPI_INTEGER,& cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI) @@ -197,26 +197,26 @@ subroutine grid_mechanical_spectral_polarisation_init() 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & 18_pPETSCINT, 0_pPETSCINT, & ! #dof (2xtensor), ghost boundary width (domain overlap) [int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells - da,err_PETSc) ! handle, error + DM_mech,err_PETSc) ! handle, error CHKERRQ(err_PETSc) - call DMsetFromOptions(da,err_PETSc) + call DMsetFromOptions(DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call DMsetUp(da,err_PETSc) + call DMsetUp(DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call DMcreateGlobalVector(da,solution_vec,err_PETSc) ! global solution vector (cells x 18, i.e. every def grad tensor) + call DMcreateGlobalVector(DM_mech,FandF_tau_PETSc,err_PETSc) ! global solution vector (cells x 18, i.e. every def grad tensor) CHKERRQ(err_PETSc) - call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector + call DMDASNESsetFunctionLocal(DM_mech,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector CHKERRQ(err_PETSc) - call SNESsetConvergenceTest(SNES_mechanical,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged" + call SNESsetConvergenceTest(SNES_mech,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged" CHKERRQ(err_PETSc) - call SNESSetDM(SNES_mechanical,da,err_PETSc) + call SNESSetDM(SNES_mech,DM_mech,err_PETSc) CHKERRQ(err_PETSc) - call SNESsetFromOptions(SNES_mechanical,err_PETSc) ! pull it all together with additional CLI arguments + call SNESsetFromOptions(SNES_mech,err_PETSc) ! pull it all together with additional CLI arguments CHKERRQ(err_PETSc) !-------------------------------------------------------------------------------------------------- ! init fields - call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! places pointer on PETSc data + call DMDAVecGetArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) ! places pointer on PETSc data CHKERRQ(err_PETSc) F => FandF_tau(0: 8,:,:,:) F_tau => FandF_tau(9:17,:,:,:) @@ -260,7 +260,7 @@ subroutine grid_mechanical_spectral_polarisation_init() call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2 reshape(F,shape(F_lastInc)), & ! target F 0.0_pREAL) ! time increment - call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! deassociate pointer + call DMDAVecRestoreArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) ! deassociate pointer CHKERRQ(err_PETSc) restartRead2: if (CLI_restartInc > 0) then @@ -315,9 +315,9 @@ function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(soluti S_scale = math_invSym3333(C_minMaxAvg) end if - call SNESSolve(SNES_mechanical,PETSC_NULL_VEC,solution_vec,err_PETSc) + call SNESSolve(SNES_mech,PETSC_NULL_VEC,FandF_tau_PETSc,err_PETSc) CHKERRQ(err_PETSc) - call SNESGetConvergedReason(SNES_mechanical,reason,err_PETSc) + call SNESGetConvergedReason(SNES_mech,reason,err_PETSc) CHKERRQ(err_PETSc) solution%converged = reason > 0 @@ -354,7 +354,7 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D real(pREAL), dimension(3,3) :: F_lambda33 - call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc) + call DMDAVecGetArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) CHKERRQ(err_PETSc) F => FandF_tau(0: 8,:,:,:) F_tau => FandF_tau(9:17,:,:,:) @@ -418,7 +418,7 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D end do; end do; end do end if - call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc) + call DMDAVecRestoreArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) CHKERRQ(err_PETSc) !-------------------------------------------------------------------------------------------------- @@ -438,10 +438,10 @@ subroutine grid_mechanical_spectral_polarisation_updateCoords() PetscErrorCode :: err_PETSc real(pREAL), dimension(:,:,:,:), pointer :: FandF_tau - call DMDAVecGetArrayReadF90(da,solution_vec,FandF_tau,err_PETSc) + call DMDAVecGetArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) CHKERRQ(err_PETSc) call utilities_updateCoords(reshape(FandF_tau(0:8,:,:,:),[3,3,size(FandF_tau,2),size(FandF_tau,3),size(FandF_tau,4)])) - call DMDAVecRestoreArrayReadF90(da,solution_vec,FandF_tau,err_PETSc) + call DMDAVecRestoreArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) CHKERRQ(err_PETSc) end subroutine grid_mechanical_spectral_polarisation_updateCoords @@ -456,7 +456,7 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite() integer(HID_T) :: fileHandle, groupHandle real(pREAL), dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau - call DMDAVecGetArrayReadF90(da,solution_vec,FandF_tau,err_PETSc) + call DMDAVecGetArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) CHKERRQ(err_PETSc) F => FandF_tau(0: 8,:,:,:) F_tau => FandF_tau(9:17,:,:,:) @@ -488,7 +488,7 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite() call HDF5_closeFile(fileHandle) end if - call DMDAVecRestoreArrayReadF90(da,solution_vec,FandF_tau,err_PETSc) + call DMDAVecRestoreArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) CHKERRQ(err_PETSc) end subroutine grid_mechanical_spectral_polarisation_restartWrite @@ -576,9 +576,9 @@ subroutine formResidual(residual_subdomain, FandF_tau, & call MPI_Allreduce(MPI_IN_PLACE,F_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' - call SNESGetNumberFunctionEvals(SNES_mechanical,nfuncs,err_PETSc) + call SNESGetNumberFunctionEvals(SNES_mech,nfuncs,err_PETSc) CHKERRQ(err_PETSc) - call SNESGetIterationNumber(SNES_mechanical,PETScIter,err_PETSc) + call SNESGetIterationNumber(SNES_mech,PETScIter,err_PETSc) CHKERRQ(err_PETSc) if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment