Merge branch 'language-polish' into 'development'

Additional language polish

See merge request damask/DAMASK!765
This commit is contained in:
Franz Roters 2023-06-28 10:23:00 +00:00
commit a8875677cd
11 changed files with 80 additions and 80 deletions

View File

@ -66,7 +66,7 @@ subroutine CLI_init()
! http://patorjk.com/software/taag/#p=display&f=Lean&t=DAMASK%203
#ifdef DEBUG
print*, achar(27)//'[31m'
print '(a)', achar(27)//'[31m'
print '(1x,a,/)', 'debug version - debug version - debug version - debug version - debug version'
#else
print '(a)', achar(27)//'[94m'

View File

@ -343,7 +343,7 @@ program DAMASK_grid
end if
writeUndeformed: if (CLI_restartInc < 1) then
print'(/,1x,a)', '... writing initial configuration to file .................................'
print'(/,1x,a)', '... saving initial configuration ..........................................'
flush(IO_STDOUT)
call materialpoint_result(0,0.0_pREAL)
end if writeUndeformed
@ -462,15 +462,15 @@ program DAMASK_grid
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc
if (all(solres(:)%converged)) then
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' converged'
print'(/,1x,a,1x,i0,1x,a)', 'increment', totalIncsCounter, 'converged'
else
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged'
print'(/,1x,a,1x,i0,1x,a)', 'increment', totalIncsCounter, 'NOT converged'
end if; flush(IO_STDOUT)
call MPI_Allreduce(signal_SIGUSR1,sig,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (mod(inc,loadCases(l)%f_out) == 0 .or. sig) then
print'(/,1x,a)', '... writing results to file ...............................................'
print'(/,1x,a)', '... saving results ........................................................'
flush(IO_STDOUT)
call materialpoint_result(totalIncsCounter,t)
end if

View File

@ -175,7 +175,7 @@ subroutine grid_damage_spectral_init()
end if
restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')

View File

@ -236,7 +236,7 @@ subroutine grid_mechanical_FEM_init
!--------------------------------------------------------------------------------------------------
! init fields
restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
@ -278,7 +278,7 @@ subroutine grid_mechanical_FEM_init
CHKERRQ(err_PETSc)
restartRead2: if (CLI_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', CLI_restartInc, ' from file'
print'(1x,a,1x,i0)', 'loading additional restart data of increment', CLI_restartInc
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'

View File

@ -200,7 +200,7 @@ subroutine grid_mechanical_spectral_basic_init()
CHKERRQ(err_PETSc)
restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
@ -236,7 +236,7 @@ subroutine grid_mechanical_spectral_basic_init()
CHKERRQ(err_PETSc)
restartRead2: if (CLI_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', CLI_restartInc, ' from file'
print'(1x,a,1x,i0)', 'loading additional restart data of increment', CLI_restartInc
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'

View File

@ -223,7 +223,7 @@ subroutine grid_mechanical_spectral_polarisation_init()
F_tau => FandF_tau(9:17,:,:,:)
restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
print '(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
@ -265,7 +265,7 @@ subroutine grid_mechanical_spectral_polarisation_init()
CHKERRQ(err_PETSc)
restartRead2: if (CLI_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', CLI_restartInc, ' from file'
print '(1x,a,1x,i0)', 'loading additional restart data of increment', CLI_restartInc
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'

View File

@ -149,7 +149,7 @@ subroutine grid_thermal_spectral_init()
restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')

View File

@ -91,7 +91,7 @@ subroutine materialpoint_init()
if (CLI_restartInc > 0) then
print'(/,a,i0,a)', ' reading restart information of increment from file'; flush(IO_STDOUT)
print'(/,1x,a,1x,i0)', 'loading restart information of increment',CLI_restartInc; flush(IO_STDOUT)
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')

View File

@ -204,7 +204,7 @@ program DAMASK_mesh
errorID = 0
checkLoadcases: do currentLoadCase = 1, size(loadCases)
write (loadcase_string, '(i0)' ) currentLoadCase
print'(/,1x,a,i0)', 'load case: ', currentLoadCase
print'(/,1x,a,1x,i0)', 'load case:', currentLoadCase
if (.not. loadCases(currentLoadCase)%followFormerTrajectory) &
print'(2x,a)', 'drop guessing along trajectory'
print'(2x,a)', 'Field '//trim(FIELD_MECH_label)
@ -238,7 +238,7 @@ program DAMASK_mesh
write(statUnit,'(a)') 'Increment Time CutbackLevel Converged IterationsNeeded' ! statistics file
end if
print'(/,1x,a)', '... writing initial configuration to file .................................'
print'(/,1x,a)', '... saving initial configuration ..........................................'
flush(IO_STDOUT)
call materialpoint_result(0,0.0_pREAL)
@ -318,13 +318,13 @@ program DAMASK_mesh
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc
if (all(solres(:)%converged)) then
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' converged'
print'(/,1x,a,1x,i0,1x,a)', 'increment', totalIncsCounter, 'converged'
else
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged'
print'(/,1x,a,1x,i0,1x,a)', 'increment', totalIncsCounter, 'NOT converged'
end if; flush(IO_STDOUT)
if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0) then ! at output frequency
print'(/,1x,a)', '... writing results to file ...............................................'
print'(/,1x,a)', '... saving results ........................................................'
call FEM_mechanical_updateCoords()
call materialpoint_result(totalIncsCounter,time)
end if