more specific name
'interface' can be an interface to anything, 'CLI' is an established abbreviation for 'command line interface'
This commit is contained in:
parent
0e65d44bdc
commit
b80b406ad5
|
@ -3,11 +3,7 @@
|
|||
!> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH
|
||||
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
|
||||
!> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH
|
||||
!> @brief Interfacing between the PETSc-based solvers and the material subroutines provided
|
||||
!! by DAMASK
|
||||
!> @details Interfacing between the PETSc-based solvers and the material subroutines provided
|
||||
!> by DAMASK. Interpreting the command line arguments to get load case, geometry file,
|
||||
!> and working directory.
|
||||
!> @brief Parse command line interface for PETSc-based solvers
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
#define PETSC_MAJOR 3
|
||||
#define PETSC_MINOR_MIN 12
|
||||
|
@ -25,14 +21,14 @@ module DAMASK_interface
|
|||
implicit none
|
||||
private
|
||||
integer, public, protected :: &
|
||||
interface_restartInc = 0 !< Increment at which calculation starts
|
||||
CLI_restartInc = 0 !< Increment at which calculation starts
|
||||
character(len=:), allocatable, public, protected :: &
|
||||
interface_geomFile, & !< parameter given for geometry file
|
||||
interface_loadFile !< parameter given for load case file
|
||||
CLI_geomFile, & !< parameter given for geometry file
|
||||
CLI_loadFile !< parameter given for load case file
|
||||
|
||||
public :: &
|
||||
getSolverJobName, &
|
||||
DAMASK_interface_init
|
||||
CLI_init
|
||||
|
||||
contains
|
||||
|
||||
|
@ -40,7 +36,7 @@ contains
|
|||
!> @brief initializes the solver by interpreting the command line arguments. Also writes
|
||||
!! information on computation to screen
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine DAMASK_interface_init
|
||||
subroutine CLI_init
|
||||
#include <petsc/finclude/petscsys.h>
|
||||
|
||||
#if PETSC_VERSION_MAJOR!=3 || PETSC_VERSION_MINOR<PETSC_MINOR_MIN || PETSC_VERSION_MINOR>PETSC_MINOR_MAX
|
||||
|
@ -156,8 +152,8 @@ subroutine DAMASK_interface_init
|
|||
call get_command_argument(i+1,workingDirArg,status=err)
|
||||
case ('-r', '--rs', '--restart')
|
||||
call get_command_argument(i+1,arg,status=err)
|
||||
read(arg,*,iostat=stat) interface_restartInc
|
||||
if (interface_restartInc < 0 .or. stat /=0) then
|
||||
read(arg,*,iostat=stat) CLI_restartInc
|
||||
if (CLI_restartInc < 0 .or. stat /=0) then
|
||||
print'(/,a)', ' ERROR: Could not parse restart increment: '//trim(arg)
|
||||
call quit(1)
|
||||
endif
|
||||
|
@ -171,8 +167,8 @@ subroutine DAMASK_interface_init
|
|||
endif
|
||||
|
||||
if (len_trim(workingDirArg) > 0) call setWorkingDirectory(trim(workingDirArg))
|
||||
interface_geomFile = getGeometryFile(geometryArg)
|
||||
interface_loadFile = getLoadCaseFile(loadCaseArg)
|
||||
CLI_geomFile = getGeometryFile(geometryArg)
|
||||
CLI_loadFile = getLoadCaseFile(loadCaseArg)
|
||||
|
||||
call get_command(commandLine)
|
||||
print'(/,a)', ' Host name: '//getHostName()
|
||||
|
@ -184,13 +180,13 @@ subroutine DAMASK_interface_init
|
|||
print'(a)', ' Geometry argument: '//trim(geometryArg)
|
||||
print'(a)', ' Load case argument: '//trim(loadcaseArg)
|
||||
print'(/,a)', ' Working directory: '//getCWD()
|
||||
print'(a)', ' Geometry file: '//interface_geomFile
|
||||
print'(a)', ' Load case file: '//interface_loadFile
|
||||
print'(a)', ' Geometry file: '//CLI_geomFile
|
||||
print'(a)', ' Load case file: '//CLI_loadFile
|
||||
print'(a)', ' Solver job name: '//getSolverJobName()
|
||||
if (interface_restartInc > 0) &
|
||||
print'(a,i6.6)', ' Restart from increment: ', interface_restartInc
|
||||
if (CLI_restartInc > 0) &
|
||||
print'(a,i6.6)', ' Restart from increment: ', CLI_restartInc
|
||||
|
||||
end subroutine DAMASK_interface_init
|
||||
end subroutine CLI_init
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -229,15 +225,15 @@ function getSolverJobName()
|
|||
character(len=:), allocatable :: getSolverJobName
|
||||
integer :: posExt,posSep
|
||||
|
||||
posExt = scan(interface_geomFile,'.',back=.true.)
|
||||
posSep = scan(interface_geomFile,'/',back=.true.)
|
||||
posExt = scan(CLI_geomFile,'.',back=.true.)
|
||||
posSep = scan(CLI_geomFile,'/',back=.true.)
|
||||
|
||||
getSolverJobName = interface_geomFile(posSep+1:posExt-1)
|
||||
getSolverJobName = CLI_geomFile(posSep+1:posExt-1)
|
||||
|
||||
posExt = scan(interface_loadFile,'.',back=.true.)
|
||||
posSep = scan(interface_loadFile,'/',back=.true.)
|
||||
posExt = scan(CLI_loadFile,'.',back=.true.)
|
||||
posSep = scan(CLI_loadFile,'/',back=.true.)
|
||||
|
||||
getSolverJobName = getSolverJobName//'_'//interface_loadFile(posSep+1:posExt-1)
|
||||
getSolverJobName = getSolverJobName//'_'//CLI_loadFile(posSep+1:posExt-1)
|
||||
|
||||
end function getSolverJobName
|
||||
|
|
@ -43,7 +43,7 @@ contains
|
|||
subroutine CPFEM_initAll
|
||||
|
||||
call parallelization_init
|
||||
call DAMASK_interface_init ! Spectral and FEM interface to commandline
|
||||
call CLI_init ! Spectral and FEM interface to commandline
|
||||
call signals_init
|
||||
call prec_init
|
||||
call IO_init
|
||||
|
@ -55,18 +55,18 @@ subroutine CPFEM_initAll
|
|||
call YAML_types_init
|
||||
call YAML_parse_init
|
||||
call HDF5_utilities_init
|
||||
call results_init(restart=interface_restartInc>0)
|
||||
call results_init(restart=CLI_restartInc>0)
|
||||
call config_init
|
||||
call math_init
|
||||
call rotations_init
|
||||
call polynomials_init
|
||||
call lattice_init
|
||||
#if defined(MESH)
|
||||
call discretization_mesh_init(restart=interface_restartInc>0)
|
||||
call discretization_mesh_init(restart=CLI_restartInc>0)
|
||||
#elif defined(GRID)
|
||||
call discretization_grid_init(restart=interface_restartInc>0)
|
||||
call discretization_grid_init(restart=CLI_restartInc>0)
|
||||
#endif
|
||||
call material_init(restart=interface_restartInc>0)
|
||||
call material_init(restart=CLI_restartInc>0)
|
||||
call phase_init
|
||||
call homogenization_init
|
||||
call CPFEM_init
|
||||
|
@ -86,7 +86,7 @@ subroutine CPFEM_init
|
|||
print'(/,1x,a)', '<<<+- CPFEM init -+>>>'; flush(IO_STDOUT)
|
||||
|
||||
|
||||
if (interface_restartInc > 0) then
|
||||
if (CLI_restartInc > 0) then
|
||||
print'(/,a,i0,a)', ' reading restart information of increment from file'; flush(IO_STDOUT)
|
||||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||
|
|
|
@ -134,8 +134,8 @@ program DAMASK_grid
|
|||
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
||||
|
||||
if (worldrank == 0) then
|
||||
fileContent = IO_read(interface_loadFile)
|
||||
fname = interface_loadFile
|
||||
fileContent = IO_read(CLI_loadFile)
|
||||
fname = CLI_loadFile
|
||||
if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:)
|
||||
call results_openJobFile(parallel=.false.)
|
||||
call results_writeDataset_str(fileContent,'setup',fname,'load case definition (grid solver)')
|
||||
|
@ -315,7 +315,7 @@ program DAMASK_grid
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! write header of output file
|
||||
if (worldrank == 0) then
|
||||
writeHeader: if (interface_restartInc < 1) then
|
||||
writeHeader: if (CLI_restartInc < 1) then
|
||||
open(newunit=statUnit,file=trim(getSolverJobName())//'.sta',form='FORMATTED',status='REPLACE')
|
||||
write(statUnit,'(a)') 'Increment Time CutbackLevel Converged IterationsNeeded' ! statistics file
|
||||
else writeHeader
|
||||
|
@ -324,7 +324,7 @@ program DAMASK_grid
|
|||
endif writeHeader
|
||||
endif
|
||||
|
||||
writeUndeformed: if (interface_restartInc < 1) then
|
||||
writeUndeformed: if (CLI_restartInc < 1) then
|
||||
print'(/,1x,a)', '... writing initial configuration to file .................................'
|
||||
flush(IO_STDOUT)
|
||||
call CPFEM_results(0,0.0_pReal)
|
||||
|
@ -348,7 +348,7 @@ program DAMASK_grid
|
|||
endif
|
||||
Delta_t = Delta_t * real(subStepFactor,pReal)**real(-cutBackLevel,pReal) ! depending on cut back level, decrease time step
|
||||
|
||||
skipping: if (totalIncsCounter <= interface_restartInc) then ! not yet at restart inc?
|
||||
skipping: if (totalIncsCounter <= CLI_restartInc) then ! not yet at restart inc?
|
||||
t = t + Delta_t ! just advance time, skip already performed calculation
|
||||
guess = .true. ! QUESTION:why forced guessing instead of inheriting loadcase preference
|
||||
else skipping
|
||||
|
|
|
@ -76,14 +76,14 @@ subroutine discretization_grid_init(restart)
|
|||
|
||||
|
||||
if (worldrank == 0) then
|
||||
fileContent = IO_read(interface_geomFile)
|
||||
fileContent = IO_read(CLI_geomFile)
|
||||
call VTI_readCellsSizeOrigin(cells,geomSize,origin,fileContent)
|
||||
materialAt_global = VTI_readDataset_int(fileContent,'material') + 1
|
||||
if (any(materialAt_global < 1)) &
|
||||
call IO_error(180,ext_msg='material ID < 1')
|
||||
if (size(materialAt_global) /= product(cells)) &
|
||||
call IO_error(180,ext_msg='mismatch in # of material IDs and cells')
|
||||
fname = interface_geomFile
|
||||
fname = CLI_geomFile
|
||||
if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:)
|
||||
call results_openJobFile(parallel=.false.)
|
||||
call results_writeDataset_str(fileContent,'setup',fname,'geometry definition (grid solver)')
|
||||
|
@ -329,7 +329,7 @@ function discretization_grid_getInitialCondition(label) result(ic)
|
|||
displs, sendcounts
|
||||
|
||||
if (worldrank == 0) then
|
||||
ic_global = VTI_readDataset_real(IO_read(interface_geomFile),label)
|
||||
ic_global = VTI_readDataset_real(IO_read(CLI_geomFile),label)
|
||||
else
|
||||
allocate(ic_global(0)) ! needed for IntelMPI
|
||||
endif
|
||||
|
|
|
@ -231,8 +231,8 @@ subroutine grid_mechanical_FEM_init
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! init fields
|
||||
restartRead: if (interface_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
|
||||
restartRead: if (CLI_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
|
||||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||
|
@ -254,7 +254,7 @@ subroutine grid_mechanical_FEM_init
|
|||
call HDF5_read(u_current,groupHandle,'u')
|
||||
call HDF5_read(u_lastInc,groupHandle,'u_lastInc')
|
||||
|
||||
elseif (interface_restartInc == 0) then restartRead
|
||||
elseif (CLI_restartInc == 0) then restartRead
|
||||
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
|
||||
F = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3)
|
||||
endif restartRead
|
||||
|
@ -269,8 +269,8 @@ subroutine grid_mechanical_FEM_init
|
|||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
||||
restartRead2: if (interface_restartInc > 0) then
|
||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
||||
restartRead2: if (CLI_restartInc > 0) then
|
||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', CLI_restartInc, ' from file'
|
||||
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
||||
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
|
|
|
@ -201,8 +201,8 @@ subroutine grid_mechanical_spectral_basic_init
|
|||
call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc) ! places pointer on PETSc data
|
||||
CHKERRQ(err_PETSc)
|
||||
|
||||
restartRead: if (interface_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
|
||||
restartRead: if (CLI_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
|
||||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||
|
@ -222,7 +222,7 @@ subroutine grid_mechanical_spectral_basic_init
|
|||
call HDF5_read(F,groupHandle,'F')
|
||||
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
||||
|
||||
elseif (interface_restartInc == 0) then restartRead
|
||||
elseif (CLI_restartInc == 0) then restartRead
|
||||
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
|
||||
F = reshape(F_lastInc,[9,cells(1),cells(2),cells3])
|
||||
end if restartRead
|
||||
|
@ -235,8 +235,8 @@ subroutine grid_mechanical_spectral_basic_init
|
|||
call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc) ! deassociate pointer
|
||||
CHKERRQ(err_PETSc)
|
||||
|
||||
restartRead2: if (interface_restartInc > 0) then
|
||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
||||
restartRead2: if (CLI_restartInc > 0) then
|
||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', CLI_restartInc, ' from file'
|
||||
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
||||
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
|
|
|
@ -223,8 +223,8 @@ subroutine grid_mechanical_spectral_polarisation_init
|
|||
F => FandF_tau(0: 8,:,:,:)
|
||||
F_tau => FandF_tau(9:17,:,:,:)
|
||||
|
||||
restartRead: if (interface_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
|
||||
restartRead: if (CLI_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
|
||||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||
|
@ -246,7 +246,7 @@ subroutine grid_mechanical_spectral_polarisation_init
|
|||
call HDF5_read(F_tau,groupHandle,'F_tau')
|
||||
call HDF5_read(F_tau_lastInc,groupHandle,'F_tau_lastInc')
|
||||
|
||||
elseif (interface_restartInc == 0) then restartRead
|
||||
elseif (CLI_restartInc == 0) then restartRead
|
||||
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
|
||||
F = reshape(F_lastInc,[9,cells(1),cells(2),cells3])
|
||||
F_tau = 2.0_pReal*F
|
||||
|
@ -261,8 +261,8 @@ subroutine grid_mechanical_spectral_polarisation_init
|
|||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! deassociate pointer
|
||||
CHKERRQ(err_PETSc)
|
||||
|
||||
restartRead2: if (interface_restartInc > 0) then
|
||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
||||
restartRead2: if (CLI_restartInc > 0) then
|
||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', CLI_restartInc, ' from file'
|
||||
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
||||
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
|
|
|
@ -140,8 +140,8 @@ subroutine grid_thermal_spectral_init()
|
|||
CHKERRQ(err_PETSc)
|
||||
|
||||
|
||||
restartRead: if (interface_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
|
||||
restartRead: if (CLI_restartInc > 0) then
|
||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
|
||||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||
|
|
Loading…
Reference in New Issue