avoid (x/y/z)end and (x/y/z)start, same information is in grid
This commit is contained in:
parent
96ff3314a3
commit
25ddec76e7
|
@ -41,7 +41,6 @@ module grid_damage_spectral
|
|||
! PETSc data
|
||||
SNES :: damage_snes
|
||||
Vec :: solution_vec
|
||||
PetscInt :: xstart, xend, ystart, yend, zstart, zend
|
||||
real(pReal), dimension(:,:,:), allocatable :: &
|
||||
phi_current, & !< field of current damage
|
||||
phi_lastInc, & !< field of previous damage
|
||||
|
@ -143,11 +142,6 @@ subroutine grid_damage_spectral_init()
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! init fields
|
||||
call DMDAGetCorners(damage_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
xend = xstart + xend - 1
|
||||
yend = ystart + yend - 1
|
||||
zend = zstart + zend - 1
|
||||
allocate(phi_current(grid(1),grid(2),grid3), source=1.0_pReal)
|
||||
allocate(phi_lastInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
||||
allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
||||
|
@ -239,7 +233,7 @@ subroutine grid_damage_spectral_forward(cutBack)
|
|||
call SNESGetDM(damage_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
|
||||
CHKERRQ(err_PETSc)
|
||||
x_scal(xstart:xend,ystart:yend,zstart:zend) = phi_current
|
||||
x_scal = phi_current
|
||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
ce = 0
|
||||
|
|
|
@ -560,14 +560,14 @@ subroutine formResidual(da_local,x_local, &
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! get deformation gradient
|
||||
call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||
do k = zstart+1, zend+1; do j = ystart+1, yend+1; do i = xstart+1, xend+1
|
||||
do k = zstart+1, zend+1; do j = 1, grid(2); do i = 1, grid(1)
|
||||
ctr = 0
|
||||
do kk = -1, 0; do jj = -1, 0; do ii = -1, 0
|
||||
ctr = ctr + 1
|
||||
x_elem(ctr,1:3) = x_scal(0:2,i+ii,j+jj,k+kk)
|
||||
enddo; enddo; enddo
|
||||
ii = i-xstart; jj = j-ystart; kk = k-zstart
|
||||
F(1:3,1:3,ii,jj,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem))
|
||||
kk = k-zstart
|
||||
F(1:3,1:3,i,j,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem))
|
||||
enddo; enddo; enddo
|
||||
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||
|
||||
|
@ -590,15 +590,15 @@ subroutine formResidual(da_local,x_local, &
|
|||
call DMDAVecGetArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||
ele = 0
|
||||
do k = zstart+1, zend+1; do j = ystart+1, yend+1; do i = xstart+1, xend+1
|
||||
do k = zstart+1, zend+1; do j = 1, grid(2); do i = 1, grid(1)
|
||||
ctr = 0
|
||||
do kk = -1, 0; do jj = -1, 0; do ii = -1, 0
|
||||
ctr = ctr + 1
|
||||
x_elem(ctr,1:3) = x_scal(0:2,i+ii,j+jj,k+kk)
|
||||
enddo; enddo; enddo
|
||||
ii = i-xstart; jj = j-ystart; kk = k-zstart
|
||||
kk = k-zstart
|
||||
ele = ele + 1
|
||||
f_elem = matmul(transpose(BMat),transpose(P_current(1:3,1:3,ii,jj,kk)))*detJ + &
|
||||
f_elem = matmul(transpose(BMat),transpose(P_current(1:3,1:3,i,j,kk)))*detJ + &
|
||||
matmul(HGMat,x_elem)*(homogenization_dPdF(1,1,1,1,ele) + &
|
||||
homogenization_dPdF(2,2,2,2,ele) + &
|
||||
homogenization_dPdF(3,3,3,3,ele))/3.0_pReal
|
||||
|
@ -712,9 +712,9 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,err_PETSc)
|
|||
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
ele = 0
|
||||
do k = zstart+1, zend+1; do j = ystart+1, yend+1; do i = xstart+1, xend+1
|
||||
do k = grid3offset, grid3offset+grid3-1; do j = 0, grid(2)-1; do i = 0, grid(1)-1
|
||||
ele = ele + 1
|
||||
x_scal(0:2,i-1,j-1,k-1) = discretization_IPcoords(1:3,ele)
|
||||
x_scal(0:2,i,j,k) = discretization_IPcoords(1:3,ele)
|
||||
enddo; enddo; enddo
|
||||
call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,err_PETSc)
|
||||
CHKERRQ(err_PETSc) ! initialize to undeformed coordinates (ToDo: use ip coordinates)
|
||||
|
|
|
@ -40,7 +40,6 @@ module grid_thermal_spectral
|
|||
! PETSc data
|
||||
SNES :: thermal_snes
|
||||
Vec :: solution_vec
|
||||
PetscInt :: xstart, xend, ystart, yend, zstart, zend
|
||||
real(pReal), dimension(:,:,:), allocatable :: &
|
||||
T_current, & !< field of current temperature
|
||||
T_lastInc, & !< field of previous temperature
|
||||
|
@ -128,11 +127,6 @@ subroutine grid_thermal_spectral_init(T_0)
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! init fields
|
||||
call DMDAGetCorners(thermal_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
xend = xstart + xend - 1
|
||||
yend = ystart + yend - 1
|
||||
zend = zstart + zend - 1
|
||||
allocate(T_current(grid(1),grid(2),grid3), source=0.0_pReal)
|
||||
allocate(T_lastInc(grid(1),grid(2),grid3), source=0.0_pReal)
|
||||
allocate(T_stagInc(grid(1),grid(2),grid3), source=0.0_pReal)
|
||||
|
@ -148,7 +142,7 @@ subroutine grid_thermal_spectral_init(T_0)
|
|||
|
||||
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
T_PETSc(xstart:xend,ystart:yend,zstart:zend) = T_current
|
||||
T_PETSc = T_current
|
||||
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
||||
|
@ -239,7 +233,7 @@ subroutine grid_thermal_spectral_forward(cutBack)
|
|||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
|
||||
CHKERRQ(err_PETSc)
|
||||
x_scal(xstart:xend,ystart:yend,zstart:zend) = T_current
|
||||
x_scal = T_current
|
||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
ce = 0
|
||||
|
|
|
@ -114,7 +114,7 @@ subroutine parallelization_init
|
|||
if (worldrank /= 0) then
|
||||
close(OUTPUT_UNIT) ! disable output
|
||||
write(rank_str,'(i4.4)') worldrank ! use for MPI debug filenames
|
||||
open(OUTPUT_UNIT,file='/dev/null',status='replace') ! close() alone will leave some temp files in cwd
|
||||
open(OUTPUT_UNIT,file='./log.'//rank_str,status='replace') ! close() alone will leave some temp files in cwd
|
||||
endif
|
||||
|
||||
!$ call get_environment_variable(name='OMP_NUM_THREADS',value=NumThreadsString,STATUS=got_env)
|
||||
|
|
Loading…
Reference in New Issue