Merge branch 'development' of magit1.mpie.de:damask/DAMASK into miscImprovements

This commit is contained in:
Martin Diehl 2016-09-03 14:35:53 +02:00
commit c05307a273
12 changed files with 474 additions and 298 deletions

View File

@ -1 +1 @@
v2.0.1-43-g64ac05f v2.0.1-59-g4b02a55

View File

@ -81,7 +81,7 @@ program DAMASK_spectral
use spectral_mech_Polarisation use spectral_mech_Polarisation
use spectral_damage use spectral_damage
use spectral_thermal use spectral_thermal
implicit none implicit none
@ -93,9 +93,9 @@ program DAMASK_spectral
logical, dimension(9) :: temp_maskVector = .false. !< temporarily from loadcase file when reading in tensors logical, dimension(9) :: temp_maskVector = .false. !< temporarily from loadcase file when reading in tensors
integer(pInt), parameter :: FILEUNIT = 234_pInt !< file unit, DAMASK IO does not support newunit feature integer(pInt), parameter :: FILEUNIT = 234_pInt !< file unit, DAMASK IO does not support newunit feature
integer(pInt), allocatable, dimension(:) :: chunkPos integer(pInt), allocatable, dimension(:) :: chunkPos
integer(pInt) :: & integer(pInt) :: &
N_t = 0_pInt, & !< # of time indicators found in load case file N_t = 0_pInt, & !< # of time indicators found in load case file
N_n = 0_pInt, & !< # of increment specifiers found in load case file N_n = 0_pInt, & !< # of increment specifiers found in load case file
N_def = 0_pInt !< # of rate of deformation specifiers found in load case file N_def = 0_pInt !< # of rate of deformation specifiers found in load case file
character(len=65536) :: & character(len=65536) :: &
@ -105,7 +105,7 @@ program DAMASK_spectral
! loop variables, convergence etc. ! loop variables, convergence etc.
real(pReal), dimension(3,3), parameter :: & real(pReal), dimension(3,3), parameter :: &
ones = 1.0_pReal, & ones = 1.0_pReal, &
zeros = 0.0_pReal zeros = 0.0_pReal
integer(pInt), parameter :: & integer(pInt), parameter :: &
subStepFactor = 2_pInt !< for each substep, divide the last time increment by 2.0 subStepFactor = 2_pInt !< for each substep, divide the last time increment by 2.0
real(pReal) :: & real(pReal) :: &
@ -150,6 +150,7 @@ program DAMASK_spectral
MPI_file_get_position, & MPI_file_get_position, &
MPI_file_write, & MPI_file_write, &
MPI_abort, & MPI_abort, &
MPI_finalize, &
MPI_allreduce, & MPI_allreduce, &
PETScFinalize PETScFinalize
@ -159,7 +160,7 @@ program DAMASK_spectral
write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>' write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>'
write(6,'(a15,a)') ' Current time: ',IO_timeStamp() write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
#include "compilation_info.f90" #include "compilation_info.f90"
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize field solver information ! initialize field solver information
nActiveFields = 1 nActiveFields = 1
@ -192,14 +193,14 @@ program DAMASK_spectral
call IO_error(error_ID=837_pInt,ext_msg = trim(loadCaseFile)) ! error message for incomplete loadcase call IO_error(error_ID=837_pInt,ext_msg = trim(loadCaseFile)) ! error message for incomplete loadcase
allocate (loadCases(N_n)) ! array of load cases allocate (loadCases(N_n)) ! array of load cases
loadCases%P%myType='p' loadCases%P%myType='p'
do i = 1, size(loadCases) do i = 1, size(loadCases)
allocate(loadCases(i)%ID(nActiveFields)) allocate(loadCases(i)%ID(nActiveFields))
field = 1 field = 1
loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default
thermalActive: if (any(thermal_type == THERMAL_conduction_ID)) then thermalActive: if (any(thermal_type == THERMAL_conduction_ID)) then
field = field + 1 field = field + 1
loadCases(i)%ID(field) = FIELD_THERMAL_ID loadCases(i)%ID(field) = FIELD_THERMAL_ID
endif thermalActive endif thermalActive
damageActive: if (any(damage_type == DAMAGE_nonlocal_ID)) then damageActive: if (any(damage_type == DAMAGE_nonlocal_ID)) then
field = field + 1 field = field + 1
@ -230,12 +231,10 @@ program DAMASK_spectral
endif endif
do j = 1_pInt, 9_pInt do j = 1_pInt, 9_pInt
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a * temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a *
enddo
do j = 1_pInt,9_pInt
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
enddo enddo
loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation
transpose(reshape(temp_maskVector,[ 3,3])) transpose(reshape(temp_maskVector,[ 3,3]))
loadCases(currentLoadCase)%deformation%maskFloat = & ! float (1.0/0.0) mask in 3x3 notation loadCases(currentLoadCase)%deformation%maskFloat = & ! float (1.0/0.0) mask in 3x3 notation
merge(ones,zeros,loadCases(currentLoadCase)%deformation%maskLogical) merge(ones,zeros,loadCases(currentLoadCase)%deformation%maskLogical)
loadCases(currentLoadCase)%deformation%values = math_plain9to33(temp_valueVector) ! values in 3x3 notation loadCases(currentLoadCase)%deformation%values = math_plain9to33(temp_valueVector) ! values in 3x3 notation
@ -243,8 +242,6 @@ program DAMASK_spectral
temp_valueVector = 0.0_pReal temp_valueVector = 0.0_pReal
do j = 1_pInt, 9_pInt do j = 1_pInt, 9_pInt
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk
enddo
do j = 1_pInt,9_pInt
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
enddo enddo
loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3])) loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3]))
@ -259,10 +256,10 @@ program DAMASK_spectral
loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1_pInt) loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1_pInt)
loadCases(currentLoadCase)%logscale = 1_pInt loadCases(currentLoadCase)%logscale = 1_pInt
case('freq','frequency','outputfreq') ! frequency of result writings case('freq','frequency','outputfreq') ! frequency of result writings
loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt) loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt)
case('r','restart','restartwrite') ! frequency of writing restart information case('r','restart','restartwrite') ! frequency of writing restart information
loadCases(currentLoadCase)%restartfrequency = & loadCases(currentLoadCase)%restartfrequency = &
max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt)) max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt))
case('guessreset','dropguessing') case('guessreset','dropguessing')
loadCases(currentLoadCase)%followFormerTrajectory = .false. ! do not continue to predict deformation along former trajectory loadCases(currentLoadCase)%followFormerTrajectory = .false. ! do not continue to predict deformation along former trajectory
case('euler') ! rotation of currentLoadCase given in euler angles case('euler') ! rotation of currentLoadCase given in euler angles
@ -271,10 +268,10 @@ program DAMASK_spectral
k = 1_pInt ! assuming keyword indicating degree/radians present k = 1_pInt ! assuming keyword indicating degree/radians present
select case (IO_lc(IO_stringValue(line,chunkPos,i+1_pInt))) select case (IO_lc(IO_stringValue(line,chunkPos,i+1_pInt)))
case('deg','degree') case('deg','degree')
case('rad','radian') ! don't convert from degree to radian case('rad','radian') ! don't convert from degree to radian
l = 0_pInt l = 0_pInt
case default case default
k = 0_pInt k = 0_pInt
end select end select
do j = 1_pInt, 3_pInt do j = 1_pInt, 3_pInt
temp_valueVector(j) = IO_floatValue(line,chunkPos,i+k+j) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+k+j)
@ -289,7 +286,7 @@ program DAMASK_spectral
loadCases(currentLoadCase)%rotation = math_plain9to33(temp_valueVector) loadCases(currentLoadCase)%rotation = math_plain9to33(temp_valueVector)
end select end select
enddo; enddo enddo; enddo
close(FILEUNIT) close(FILEUNIT)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! consistency checks and output of load case ! consistency checks and output of load case
@ -301,14 +298,14 @@ program DAMASK_spectral
write(6,'(1x,a,i6)') 'load case: ', currentLoadCase write(6,'(1x,a,i6)') 'load case: ', currentLoadCase
if (.not. loadCases(currentLoadCase)%followFormerTrajectory) & if (.not. loadCases(currentLoadCase)%followFormerTrajectory) &
write(6,'(2x,a)') 'drop guessing along trajectory' write(6,'(2x,a)') 'drop guessing along trajectory'
if (loadCases(currentLoadCase)%deformation%myType=='l') then if (loadCases(currentLoadCase)%deformation%myType == 'l') then
do j = 1_pInt, 3_pInt do j = 1_pInt, 3_pInt
if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. & if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. &
any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) & any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) &
errorID = 832_pInt ! each row should be either fully or not at all defined errorID = 832_pInt ! each row should be either fully or not at all defined
enddo enddo
write(6,'(2x,a)') 'velocity gradient:' write(6,'(2x,a)') 'velocity gradient:'
else if (loadCases(currentLoadCase)%deformation%myType=='f') then else if (loadCases(currentLoadCase)%deformation%myType == 'f') then
write(6,'(2x,a)') 'deformation gradient at end of load case:' write(6,'(2x,a)') 'deformation gradient at end of load case:'
else else
write(6,'(2x,a)') 'deformation gradient rate:' write(6,'(2x,a)') 'deformation gradient rate:'
@ -317,12 +314,12 @@ program DAMASK_spectral
if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j) write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j)
else else
write(6,'(2x,12a)',advance='no') ' * ' write(6,'(2x,12a)',advance='no') ' * '
endif endif
enddo; write(6,'(/)',advance='no') enddo; write(6,'(/)',advance='no')
enddo enddo
if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. & if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. &
loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only
if (any(loadCases(currentLoadCase)%P%maskLogical .and. & if (any(loadCases(currentLoadCase)%P%maskLogical .and. &
transpose(loadCases(currentLoadCase)%P%maskLogical) .and. & transpose(loadCases(currentLoadCase)%P%maskLogical) .and. &
reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) & reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) &
@ -332,12 +329,12 @@ program DAMASK_spectral
if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal
else else
write(6,'(2x,12a)',advance='no') ' * ' write(6,'(2x,12a)',advance='no') ' * '
endif endif
enddo; write(6,'(/)',advance='no') enddo; write(6,'(/)',advance='no')
enddo enddo
if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, & if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, &
math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) >& math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) > &
reshape(spread(tol_math_check,1,9),[ 3,3]))& reshape(spread(tol_math_check,1,9),[ 3,3]))&
.or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > & .or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > &
1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain 1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain
@ -358,7 +355,7 @@ program DAMASK_spectral
endif endif
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! doing initialization depending on selected solver ! doing initialization depending on selected solver
call Utilities_init() call Utilities_init()
do field = 1, nActiveFields do field = 1, nActiveFields
select case (loadCases(1)%ID(field)) select case (loadCases(1)%ID(field))
@ -370,26 +367,26 @@ program DAMASK_spectral
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) & if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) &
call IO_warning(42_pInt, ext_msg='debug Divergence') call IO_warning(42_pInt, ext_msg='debug Divergence')
call AL_init call AL_init
case (DAMASK_spectral_SolverPolarisation_label) case (DAMASK_spectral_SolverPolarisation_label)
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) & if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) &
call IO_warning(42_pInt, ext_msg='debug Divergence') call IO_warning(42_pInt, ext_msg='debug Divergence')
call Polarisation_init call Polarisation_init
case default case default
call IO_error(error_ID = 891, ext_msg = trim(spectral_solver)) call IO_error(error_ID = 891_pInt, ext_msg = trim(spectral_solver))
end select end select
case(FIELD_THERMAL_ID) case(FIELD_THERMAL_ID)
call spectral_thermal_init call spectral_thermal_init
case(FIELD_DAMAGE_ID) case(FIELD_DAMAGE_ID)
call spectral_damage_init() call spectral_damage_init()
end select end select
enddo enddo
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! write header of output file ! write header of output file
if (worldrank == 0) then if (worldrank == 0) then
@ -408,7 +405,7 @@ program DAMASK_spectral
write(resUnit) 'logscales:', loadCases%logscale write(resUnit) 'logscales:', loadCases%logscale
write(resUnit) 'increments:', loadCases%incs ! one entry per LoadCase write(resUnit) 'increments:', loadCases%incs ! one entry per LoadCase
write(resUnit) 'startingIncrement:', restartInc - 1_pInt ! start with writing out the previous inc write(resUnit) 'startingIncrement:', restartInc - 1_pInt ! start with writing out the previous inc
write(resUnit) 'eoh' write(resUnit) 'eoh'
close(resUnit) ! end of header close(resUnit) ! end of header
open(newunit=statUnit,file=trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//& open(newunit=statUnit,file=trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//&
'.sta',form='FORMATTED',status='REPLACE') '.sta',form='FORMATTED',status='REPLACE')
@ -427,29 +424,30 @@ program DAMASK_spectral
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND) allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND) outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce') if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce')
call MPI_file_open(PETSC_COMM_WORLD, & call MPI_file_open(PETSC_COMM_WORLD, &
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', & trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &
MPI_MODE_WRONLY + MPI_MODE_APPEND, & MPI_MODE_WRONLY + MPI_MODE_APPEND, &
MPI_INFO_NULL, & MPI_INFO_NULL, &
resUnit, & resUnit, &
ierr) ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open') if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open')
call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position') if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position')
fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me) fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr) call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek') if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek')
if (.not. appendToOutFile) then ! if not restarting, write 0th increment if (.not. appendToOutFile) then ! if not restarting, write 0th increment
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt) min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),& call MPI_file_write(resUnit, &
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), & reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,& [(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults, &
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr) MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write') if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write')
enddo enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position fileOffset = fileOffset + sum(outputSize) ! forward to current file position
if (worldrank == 0) & if (worldrank == 0) &
@ -458,7 +456,7 @@ program DAMASK_spectral
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! loopping over loadcases ! loopping over loadcases
loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases) loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases)
time0 = time ! currentLoadCase start time time0 = time ! currentLoadCase start time
guess = loadCases(currentLoadCase)%followFormerTrajectory ! change of load case? homogeneous guess for the first inc guess = loadCases(currentLoadCase)%followFormerTrajectory ! change of load case? homogeneous guess for the first inc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -472,9 +470,9 @@ program DAMASK_spectral
if (loadCases(currentLoadCase)%logscale == 0_pInt) then ! linear scale if (loadCases(currentLoadCase)%logscale == 0_pInt) then ! linear scale
timeinc = loadCases(currentLoadCase)%time/real(loadCases(currentLoadCase)%incs,pReal) ! only valid for given linear time scale. will be overwritten later in case loglinear scale is used timeinc = loadCases(currentLoadCase)%time/real(loadCases(currentLoadCase)%incs,pReal) ! only valid for given linear time scale. will be overwritten later in case loglinear scale is used
else else
if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale
if (inc == 1_pInt) then ! 1st inc of 1st currentLoadCase of logarithmic scale if (inc == 1_pInt) then ! 1st inc of 1st currentLoadCase of logarithmic scale
timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd
else ! not-1st inc of 1st currentLoadCase of logarithmic scale else ! not-1st inc of 1st currentLoadCase of logarithmic scale
timeinc = loadCases(1)%time*(2.0_pReal**real(inc-1_pInt-loadCases(1)%incs ,pReal)) timeinc = loadCases(1)%time*(2.0_pReal**real(inc-1_pInt-loadCases(1)%incs ,pReal))
endif endif
@ -488,16 +486,16 @@ program DAMASK_spectral
endif endif
timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step
forwarding: if(totalIncsCounter >= restartInc) then forwarding: if (totalIncsCounter >= restartInc) then
stepFraction = 0_pInt stepFraction = 0_pInt
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! loop over sub incs ! loop over sub incs
subIncLooping: do while (stepFraction/subStepFactor**cutBackLevel <1_pInt) subIncLooping: do while (stepFraction/subStepFactor**cutBackLevel <1_pInt)
time = time + timeinc ! forward time time = time + timeinc ! forward time
stepFraction = stepFraction + 1_pInt stepFraction = stepFraction + 1_pInt
remainingLoadCaseTime = time0 - time + loadCases(currentLoadCase)%time + timeInc remainingLoadCaseTime = time0 - time + loadCases(currentLoadCase)%time + timeInc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! report begin of new increment ! report begin of new increment
if (worldrank == 0) then if (worldrank == 0) then
@ -515,7 +513,7 @@ program DAMASK_spectral
',a,'//IO_intOut(stepFraction)//',a,'//IO_intOut(subStepFactor**cutBackLevel)//')') & ',a,'//IO_intOut(stepFraction)//',a,'//IO_intOut(subStepFactor**cutBackLevel)//')') &
'Increment ',totalIncsCounter,'/',sum(loadCases%incs),& 'Increment ',totalIncsCounter,'/',sum(loadCases%incs),&
'-',stepFraction, '/', subStepFactor**cutBackLevel '-',stepFraction, '/', subStepFactor**cutBackLevel
endif endif
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! forward fields ! forward fields
@ -541,18 +539,18 @@ program DAMASK_spectral
F_BC = loadCases(currentLoadCase)%deformation, & F_BC = loadCases(currentLoadCase)%deformation, &
P_BC = loadCases(currentLoadCase)%P, & P_BC = loadCases(currentLoadCase)%P, &
rotation_BC = loadCases(currentLoadCase)%rotation) rotation_BC = loadCases(currentLoadCase)%rotation)
end select end select
case(FIELD_THERMAL_ID) case(FIELD_THERMAL_ID)
call spectral_thermal_forward (& call spectral_thermal_forward (&
guess,timeinc,timeIncOld,remainingLoadCaseTime) guess,timeinc,timeIncOld,remainingLoadCaseTime)
case(FIELD_DAMAGE_ID) case(FIELD_DAMAGE_ID)
call spectral_damage_forward (& call spectral_damage_forward (&
guess,timeinc,timeIncOld,remainingLoadCaseTime) guess,timeinc,timeIncOld,remainingLoadCaseTime)
end select end select
enddo enddo
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve fields ! solve fields
stagIter = 0_pInt stagIter = 0_pInt
@ -568,43 +566,43 @@ program DAMASK_spectral
P_BC = loadCases(currentLoadCase)%P, & P_BC = loadCases(currentLoadCase)%P, &
F_BC = loadCases(currentLoadCase)%deformation, & F_BC = loadCases(currentLoadCase)%deformation, &
rotation_BC = loadCases(currentLoadCase)%rotation) rotation_BC = loadCases(currentLoadCase)%rotation)
case (DAMASK_spectral_SolverAL_label) case (DAMASK_spectral_SolverAL_label)
solres(field) = AL_solution (& solres(field) = AL_solution (&
incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, & incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, &
P_BC = loadCases(currentLoadCase)%P, & P_BC = loadCases(currentLoadCase)%P, &
F_BC = loadCases(currentLoadCase)%deformation, & F_BC = loadCases(currentLoadCase)%deformation, &
rotation_BC = loadCases(currentLoadCase)%rotation) rotation_BC = loadCases(currentLoadCase)%rotation)
case (DAMASK_spectral_SolverPolarisation_label) case (DAMASK_spectral_SolverPolarisation_label)
solres(field) = Polarisation_solution (& solres(field) = Polarisation_solution (&
incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, & incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, &
P_BC = loadCases(currentLoadCase)%P, & P_BC = loadCases(currentLoadCase)%P, &
F_BC = loadCases(currentLoadCase)%deformation, & F_BC = loadCases(currentLoadCase)%deformation, &
rotation_BC = loadCases(currentLoadCase)%rotation) rotation_BC = loadCases(currentLoadCase)%rotation)
end select end select
case(FIELD_THERMAL_ID) case(FIELD_THERMAL_ID)
solres(field) = spectral_thermal_solution (& solres(field) = spectral_thermal_solution (&
guess,timeinc,timeIncOld,remainingLoadCaseTime) guess,timeinc,timeIncOld,remainingLoadCaseTime)
case(FIELD_DAMAGE_ID) case(FIELD_DAMAGE_ID)
solres(field) = spectral_damage_solution (& solres(field) = spectral_damage_solution (&
guess,timeinc,timeIncOld,remainingLoadCaseTime) guess,timeinc,timeIncOld,remainingLoadCaseTime)
end select end select
if(.not. solres(field)%converged) exit ! no solution found if (.not. solres(field)%converged) exit ! no solution found
enddo enddo
stagIter = stagIter + 1_pInt stagIter = stagIter + 1_pInt
stagIterate = stagIter < stagItMax .and. & stagIterate = stagIter < stagItMax .and. &
all(solres(:)%converged) .and. & all(solres(:)%converged) .and. &
.not. all(solres(:)%stagConverged) .not. all(solres(:)%stagConverged)
enddo enddo
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check solution ! check solution
cutBack = .False. cutBack = .False.
if(solres(1)%termIll .or. .not. all(solres(:)%converged .and. solres(:)%stagConverged)) then ! no solution found if(solres(1)%termIll .or. .not. all(solres(:)%converged .and. solres(:)%stagConverged)) then ! no solution found
if (cutBackLevel < maxCutBack) then ! do cut back if (cutBackLevel < maxCutBack) then ! do cut back
if (worldrank == 0) write(6,'(/,a)') ' cut back detected' if (worldrank == 0) write(6,'(/,a)') ' cut back detected'
@ -617,8 +615,8 @@ program DAMASK_spectral
call IO_warning(850_pInt) call IO_warning(850_pInt)
call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written
elseif (continueCalculation == 1_pInt) then elseif (continueCalculation == 1_pInt) then
guess = .true. ! accept non converged BVP solution guess = .true. ! accept non converged BVP solution
else ! default behavior, exit if spectral solver does not converge else ! default behavior, exit if spectral solver does not converge
call IO_warning(850_pInt) call IO_warning(850_pInt)
call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written
endif endif
@ -630,8 +628,8 @@ program DAMASK_spectral
write(statUnit,*) totalIncsCounter, time, cutBackLevel, & write(statUnit,*) totalIncsCounter, time, cutBackLevel, &
solres%converged, solres%iterationsNeeded ! write statistics about accepted solution solres%converged, solres%iterationsNeeded ! write statistics about accepted solution
flush(statUnit) flush(statUnit)
endif endif
endif endif
enddo subIncLooping enddo subIncLooping
cutBackLevel = max(0_pInt, cutBackLevel - 1_pInt) ! try half number of subincs next inc cutBackLevel = max(0_pInt, cutBackLevel - 1_pInt) ! try half number of subincs next inc
if(all(solres(:)%converged)) then ! report converged inc if(all(solres(:)%converged)) then ! report converged inc
@ -662,11 +660,11 @@ program DAMASK_spectral
enddo enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position fileOffset = fileOffset + sum(outputSize) ! forward to current file position
endif endif
if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving
mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write? mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write?
restartWrite = .true. restartWrite = .true.
lastRestartWritten = inc lastRestartWritten = inc
endif endif
else forwarding else forwarding
time = time + timeinc time = time + timeinc
guess = .true. guess = .true.
@ -698,7 +696,7 @@ program DAMASK_spectral
call AL_destroy() call AL_destroy()
case (DAMASK_spectral_SolverPolarisation_label) case (DAMASK_spectral_SolverPolarisation_label)
call Polarisation_destroy() call Polarisation_destroy()
end select end select
case(FIELD_THERMAL_ID) case(FIELD_THERMAL_ID)
call spectral_thermal_destroy() call spectral_thermal_destroy()
case(FIELD_DAMAGE_ID) case(FIELD_DAMAGE_ID)
@ -709,6 +707,13 @@ program DAMASK_spectral
call PETScFinalize(ierr); CHKERRQ(ierr) call PETScFinalize(ierr); CHKERRQ(ierr)
#ifdef _OPENMP
call MPI_finalize(i)
if (i /= 0_pInt) then
call IO_error(error_ID=894, el=i, ext_msg="Finalize()")
endif
#endif
if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged
call quit(0_pInt) ! no complains ;) call quit(0_pInt) ! no complains ;)
@ -719,7 +724,7 @@ end program DAMASK_spectral
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @brief quit subroutine to mimic behavior of FEM solvers !> @brief quit subroutine to mimic behavior of FEM solvers
!> @details exits the Spectral solver and reports time and duration. Exit code 0 signals !> @details exits the Spectral solver and reports time and duration. Exit code 0 signals
!> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code !> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code
!> 2 signals no converged solution and increment of last saved restart information is written to !> 2 signals no converged solution and increment of last saved restart information is written to
!> stderr. Exit code 3 signals no severe problems, but some increments did not converge !> stderr. Exit code 3 signals no severe problems, but some increments did not converge
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -739,7 +744,7 @@ subroutine quit(stop_id)
write(6,'(a,2(i2.2,a),i2.2)') 'Time: ',dateAndTime(5),':',& write(6,'(a,2(i2.2,a),i2.2)') 'Time: ',dateAndTime(5),':',&
dateAndTime(6),':',& dateAndTime(6),':',&
dateAndTime(7) dateAndTime(7)
if (stop_id == 0_pInt) stop 0 ! normal termination if (stop_id == 0_pInt) stop 0 ! normal termination
if (stop_id < 0_pInt) then ! terminally ill, restart might help if (stop_id < 0_pInt) then ! terminally ill, restart might help
write(0,'(a,i6)') 'restart information available at ', stop_id*(-1_pInt) write(0,'(a,i6)') 'restart information available at ', stop_id*(-1_pInt)

View File

@ -257,10 +257,10 @@ COMPILE_OPTIONS_gfortran :=-DDAMASKVERSION=\"${DAMASKVERSION}\"\
#-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions. #-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
#-Wstrict-overflow: #-Wstrict-overflow:
DEBUG_OPTIONS_gfortran :=-g\ DEBUG_OPTIONS_gfortran :=-g \
-fbacktrace\ -fbacktrace \
-fdump-core\ -fdump-core \
-fcheck=all\ -fcheck=all \
-ffpe-trap=invalid,zero,overflow -ffpe-trap=invalid,zero,overflow
################################################################################################### ###################################################################################################
@ -300,37 +300,60 @@ COMPILE =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$
COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90)) COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90))
################################################################################################### ###################################################################################################
SOURCE_FILES = \ SOURCE_FILES = \
source_thermal_dissipation.o source_thermal_externalheat.o \ source_thermal_dissipation.o \
source_damage_isoBrittle.o source_damage_isoDuctile.o source_damage_anisoBrittle.o source_damage_anisoDuctile.o \ source_thermal_externalheat.o \
source_vacancy_phenoplasticity.o source_vacancy_irradiation.o source_vacancy_thermalfluc.o source_damage_isoBrittle.o \
source_damage_isoDuctile.o \
source_damage_anisoBrittle.o \
source_damage_anisoDuctile.o \
source_vacancy_phenoplasticity.o \
source_vacancy_irradiation.o \
source_vacancy_thermalfluc.o
KINEMATICS_FILES = \ KINEMATICS_FILES = \
kinematics_cleavage_opening.o kinematics_slipplane_opening.o \ kinematics_cleavage_opening.o \
kinematics_slipplane_opening.o \
kinematics_thermal_expansion.o \ kinematics_thermal_expansion.o \
kinematics_vacancy_strain.o kinematics_hydrogen_strain.o kinematics_vacancy_strain.o \
kinematics_hydrogen_strain.o
PLASTIC_FILES = \ PLASTIC_FILES = \
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \ plastic_dislotwin.o \
plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \ plastic_disloUCLA.o \
plastic_isotropic.o \
plastic_phenopowerlaw.o \
plastic_titanmod.o \
plastic_nonlocal.o \
plastic_none.o \
plastic_phenoplus.o plastic_phenoplus.o
THERMAL_FILES = \ THERMAL_FILES = \
thermal_isothermal.o thermal_adiabatic.o thermal_conduction.o thermal_isothermal.o \
thermal_adiabatic.o \
thermal_conduction.o
DAMAGE_FILES = \ DAMAGE_FILES = \
damage_none.o damage_local.o damage_nonlocal.o damage_none.o \
damage_local.o \
damage_nonlocal.o
VACANCYFLUX_FILES = \ VACANCYFLUX_FILES = \
vacancyflux_isoconc.o vacancyflux_isochempot.o vacancyflux_cahnhilliard.o vacancyflux_isoconc.o \
vacancyflux_isochempot.o \
vacancyflux_cahnhilliard.o
POROSITY_FILES = \ POROSITY_FILES = \
porosity_none.o porosity_phasefield.o porosity_none.o \
porosity_phasefield.o
HYDROGENFLUX_FILES = \ HYDROGENFLUX_FILES = \
hydrogenflux_isoconc.o hydrogenflux_cahnhilliard.o hydrogenflux_isoconc.o \
hydrogenflux_cahnhilliard.o
HOMOGENIZATION_FILES = \ HOMOGENIZATION_FILES = \
homogenization_RGC.o homogenization_isostrain.o homogenization_none.o homogenization_RGC.o \
homogenization_isostrain.o \
homogenization_none.o
##################### #####################
# Spectral Solver # Spectral Solver
@ -351,11 +374,28 @@ DAMASK_spectral.o: INTERFACENAME := spectral_interface.f90
SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \ SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \
spectral_thermal.o spectral_damage.o spectral_thermal.o spectral_damage.o
SPECTRAL_FILES = C_routines.o system_routines.o prec.o DAMASK_interface.o IO.o numerics.o debug.o math.o \ SPECTRAL_FILES = C_routines.o \
FEsolving.o mesh.o material.o lattice.o \ system_routines.o \
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \ prec.o \
DAMASK_interface.o \
IO.o \
numerics.o \
debug.o \
math.o \
FEsolving.o \
mesh.o \
material.o \
lattice.o \
$(SOURCE_FILES) \
$(KINEMATICS_FILES) \
$(PLASTIC_FILES) \
constitutive.o \
crystallite.o \ crystallite.o \
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \ $(THERMAL_FILES) \
$(DAMAGE_FILES) \
$(VACANCYFLUX_FILES) \
$(HYDROGENFLUX_FILES) \
$(POROSITY_FILES) \
$(HOMOGENIZATION_FILES) homogenization.o \ $(HOMOGENIZATION_FILES) homogenization.o \
CPFEM2.o \ CPFEM2.o \
spectral_utilities.o \ spectral_utilities.o \
@ -401,14 +441,31 @@ DAMASK_FEM.exe: INCLUDE_DIRS += -I./
FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o
FEM_FILES = prec.o DAMASK_interface.o FEZoo.o IO.o numerics.o debug.o math.o \ FEM_FILES = prec.o \
FEsolving.o mesh.o material.o lattice.o \ DAMASK_interface.o \
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \ FEZoo.o \
IO.o \
numerics.o \
debug.o \
math.o \
FEsolving.o \
mesh.o \
material.o \
lattice.o \
$(SOURCE_FILES) \
$(KINEMATICS_FILES) \
$(PLASTIC_FILES) \
constitutive.o \
crystallite.o \ crystallite.o \
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \ $(THERMAL_FILES) \
$(DAMAGE_FILES) \
$(VACANCYFLUX_FILES) \
$(HYDROGENFLUX_FILES) \
$(POROSITY_FILES) \
$(HOMOGENIZATION_FILES) homogenization.o \ $(HOMOGENIZATION_FILES) homogenization.o \
CPFEM.o \ CPFEM.o \
FEM_utilities.o $(FEM_SOLVER_FILES) FEM_utilities.o \
$(FEM_SOLVER_FILES)
DAMASK_FEM.exe: DAMASK_FEM_driver.o DAMASK_FEM.exe: DAMASK_FEM_driver.o
$(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \ $(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \
@ -658,8 +715,8 @@ tidy:
@rm -rf *.inst.f90 # for instrumentation @rm -rf *.inst.f90 # for instrumentation
@rm -rf *.pomp.f90 # for instrumentation @rm -rf *.pomp.f90 # for instrumentation
@rm -rf *.pp.f90 # for instrumentation @rm -rf *.pp.f90 # for instrumentation
@rm -rf *.pdb # for instrumnentation @rm -rf *.pdb # for instrumentation
@rm -rf *.opari.inc # for instrumnentation @rm -rf *.opari.inc # for instrumentation
.PHONY: cleanDAMASK .PHONY: cleanDAMASK
cleanDAMASK: cleanDAMASK:

View File

@ -11,7 +11,15 @@ bin_link = { \
], ],
} }
MarcReleases =[2011,2012,2013,2013.1,2014,2014.2,2015] MarcReleases =[ \
'2011',
'2012',
'2013',
'2013.1',
'2014',
'2014.2',
'2015',
]
damaskEnv = damask.Environment() damaskEnv = damask.Environment()
baseDir = damaskEnv.relPath('code/') baseDir = damaskEnv.relPath('code/')
@ -20,27 +28,41 @@ binDir = damaskEnv.options['DAMASK_BIN']
if not os.path.isdir(binDir): if not os.path.isdir(binDir):
os.mkdir(binDir) os.mkdir(binDir)
for dir in bin_link: sys.stdout.write('\nsymbolic linking...\n')
for file in bin_link[dir]:
src = os.path.abspath(os.path.join(baseDir,dir,file))
if os.path.exists(src):
sym_link = os.path.abspath(os.path.join(binDir,\
{True: dir,
False:os.path.splitext(file)[0]}[file == '']))
if os.path.lexists(sym_link): os.remove(sym_link)
os.symlink(src,sym_link)
sys.stdout.write(sym_link+' -> '+src+'\n')
for subDir in bin_link:
theDir = os.path.abspath(os.path.join(baseDir,subDir))
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
for theFile in bin_link[subDir]:
theName,theExt = os.path.splitext(theFile)
src = os.path.abspath(os.path.join(theDir,theFile))
if os.path.exists(src):
sym_link = os.path.abspath(os.path.join(binDir,subDir if theFile == '' else theName))
if os.path.lexists(sym_link):
os.remove(sym_link)
output = theName+damask.util.deemph(theExt)
else:
output = damask.util.emph(theName)+damask.util.deemph(theExt)
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
os.symlink(src,sym_link)
sys.stdout.write('\nMSC.Marc versioning...\n\n')
theMaster = 'DAMASK_marc.f90'
for version in MarcReleases: for version in MarcReleases:
src = os.path.abspath(os.path.join(baseDir,'DAMASK_marc.f90')) src = os.path.abspath(os.path.join(baseDir,theMaster))
if os.path.exists(src): if os.path.exists(src):
sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc'+str(version)+'.f90')) sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc{}.f90'.format(version)))
if os.path.lexists(sym_link): if os.path.lexists(sym_link):
os.remove(sym_link) os.remove(sym_link)
sys.stdout.write(sym_link) output = version
else: else:
sys.stdout.write(damask.util.emph(sym_link)) output = damask.util.emph(version)
os.symlink(src,sym_link) sys.stdout.write(' '+output+'\n')
sys.stdout.write(' -> '+src+'\n') os.symlink(theMaster,sym_link)

View File

@ -13,23 +13,47 @@ if not os.path.isdir(binDir):
os.mkdir(binDir) os.mkdir(binDir)
#define ToDo list #define ToDo list
processing_subDirs = ['pre','post','misc',] processing_subDirs = ['pre',
processing_extensions = ['.py','.sh',] 'post',
'misc',
]
processing_extensions = ['.py',
'.sh',
]
sys.stdout.write('\nsymbolic linking...\n')
for subDir in processing_subDirs: for subDir in processing_subDirs:
theDir = os.path.abspath(os.path.join(baseDir,subDir)) theDir = os.path.abspath(os.path.join(baseDir,subDir))
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
for theFile in os.listdir(theDir): for theFile in os.listdir(theDir):
if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions theName,theExt = os.path.splitext(theFile)
if theExt in processing_extensions: # only consider files with proper extensions
src = os.path.abspath(os.path.join(theDir,theFile)) src = os.path.abspath(os.path.join(theDir,theFile))
sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0])) sym_link = os.path.abspath(os.path.join(binDir,theName))
if os.path.lexists(sym_link): if os.path.lexists(sym_link):
os.remove(sym_link) os.remove(sym_link)
sys.stdout.write(sym_link) output = theName+damask.util.deemph(theExt)
else: else:
sys.stdout.write(damask.util.emph(sym_link)) output = damask.util.emph(theName)+damask.util.deemph(theExt)
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
os.symlink(src,sym_link) os.symlink(src,sym_link)
sys.stdout.write(' -> '+src+'\n')
sys.stdout.write('\npruning broken links...\n')
brokenLinks = 0
for filename in os.listdir(binDir):
path = os.path.join(binDir,filename)
if os.path.islink(path) and not os.path.exists(path):
sys.stdout.write(' '+damask.util.delete(path)+'\n')
os.remove(path)
brokenLinks += 1
sys.stdout.write(('none.' if brokenLinks == 0 else '')+'\n')

View File

@ -16,41 +16,48 @@ class Test():
""" """
variants = [] variants = []
def __init__(self,test_description):
logger = logging.getLogger() def __init__(self,description = ''):
logger.setLevel(0)
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
fh.setLevel(logging.DEBUG) fh.setLevel(logging.DEBUG)
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s') fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s'))
fh.setFormatter(full)
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
ch.setLevel(logging.INFO) ch.setLevel(logging.INFO)
# create formatter and add it to the handlers ch.setFormatter(logging.Formatter('%(message)s'))
plain = logging.Formatter('%(message)s')
ch.setFormatter(plain) logger = logging.getLogger()
# add the handlers to the logger
logger.addHandler(fh) logger.addHandler(fh)
logger.addHandler(ch) logger.addHandler(ch)
logger.setLevel(0)
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \ logging.info('\n'.join(['+'*40,
+'----------------------------------------------------------------\n' \ '-'*40,
+'| '+test_description+'\n' \ '| '+description,
+'----------------------------------------------------------------') '-'*40,
]))
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__)) self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
self.parser = OptionParser(
description = test_description+' (using class: {})'.format(damask.version), self.parser = OptionParser(description = '{} (using class: {})'.format(description,damask.version),
usage='./test.py [options]') usage = './test.py [options]')
self.updateRequested = False self.parser.add_option("-d", "--debug",
self.parser.add_option("-d", "--debug", action="store_true",\ action = "store_true",
dest="debug",\ dest = "debug",
help="debug run, don't calculate but use existing results") help = "debug run, don't calculate but use existing results")
self.parser.add_option("-p", "--pass", action="store_true",\ self.parser.add_option("-p", "--pass",
dest="accept",\ action = "store_true",
help="calculate results but always consider test as successfull") dest = "accept",
self.parser.set_defaults(debug=False, help = "calculate results but always consider test as successfull")
accept=False) self.parser.add_option("-u", "--update",
action = "store_true",
dest = "update",
help = "use current test results as new reference"
)
self.parser.set_defaults(debug = False,
accept = False,
update = False,
)
def execute(self): def execute(self):
"""Run all variants and report first failure.""" """Run all variants and report first failure."""
@ -65,15 +72,17 @@ class Test():
return variant+1 # return culprit return variant+1 # return culprit
return 0 return 0
else: else:
if not self.testPossible(): return -1 if not self.feasible(): return -1
self.clean() self.clean()
self.prepareAll() self.prepareAll()
for variant in xrange(len(self.variants)):
for variant,name in enumerate(self.variants):
try: try:
self.prepare(variant) self.prepare(variant)
self.run(variant) self.run(variant)
self.postprocess(variant) self.postprocess(variant)
if self.updateRequested: # update requested if self.options.update: # update requested
self.update(variant) self.update(variant)
elif not (self.options.accept or self.compare(variant)): # no update, do comparison elif not (self.options.accept or self.compare(variant)): # no update, do comparison
return variant+1 # return culprit return variant+1 # return culprit
@ -81,11 +90,11 @@ class Test():
logging.critical('\nWARNING:\n {}\n'.format(e)) logging.critical('\nWARNING:\n {}\n'.format(e))
return variant+1 # return culprit return variant+1 # return culprit
return 0 return 0
def testPossible(self): def feasible(self):
"""Check if test is possible or not (e.g. no license available).""" """Check whether test is possible or not (e.g. no license available)."""
return True return True
def clean(self): def clean(self):
"""Delete directory tree containing current results.""" """Delete directory tree containing current results."""
status = True status = True
@ -103,7 +112,7 @@ class Test():
status = status and False status = status and False
return status return status
def prepareAll(self): def prepareAll(self):
"""Do all necessary preparations for the whole test""" """Do all necessary preparations for the whole test"""
return True return True
@ -111,7 +120,7 @@ class Test():
def prepare(self,variant): def prepare(self,variant):
"""Do all necessary preparations for the run of each test variant""" """Do all necessary preparations for the run of each test variant"""
return True return True
def run(self,variant): def run(self,variant):
"""Execute the requested test variant.""" """Execute the requested test variant."""
@ -143,17 +152,17 @@ class Test():
"""Directory containing current results of the test.""" """Directory containing current results of the test."""
return os.path.normpath(os.path.join(self.dirBase,'current/')) return os.path.normpath(os.path.join(self.dirBase,'current/'))
def dirProof(self): def dirProof(self):
"""Directory containing human readable proof of correctness for the test.""" """Directory containing human readable proof of correctness for the test."""
return os.path.normpath(os.path.join(self.dirBase,'proof/')) return os.path.normpath(os.path.join(self.dirBase,'proof/'))
def fileInRoot(self,dir,file): def fileInRoot(self,dir,file):
"""Path to a file in the root directory of DAMASK.""" """Path to a file in the root directory of DAMASK."""
return os.path.join(damask.Environment().rootDir(),dir,file) return os.path.join(damask.Environment().rootDir(),dir,file)
def fileInReference(self,file): def fileInReference(self,file):
"""Path to a file in the refrence directory for the test.""" """Path to a file in the refrence directory for the test."""
return os.path.join(self.dirReference(),file) return os.path.join(self.dirReference(),file)
@ -163,7 +172,7 @@ class Test():
"""Path to a file in the current results directory for the test.""" """Path to a file in the current results directory for the test."""
return os.path.join(self.dirCurrent(),file) return os.path.join(self.dirCurrent(),file)
def fileInProof(self,file): def fileInProof(self,file):
"""Path to a file in the proof directory for the test.""" """Path to a file in the proof directory for the test."""
return os.path.join(self.dirProof(),file) return os.path.join(self.dirProof(),file)
@ -180,58 +189,58 @@ class Test():
for source,target in zip(map(mapA,A),map(mapB,B)): for source,target in zip(map(mapA,A),map(mapB,B)):
try: try:
shutil.copy2(source,target) shutil.copy2(source,target)
except: except:
logging.critical('error copying {} to {}'.format(source,target)) logging.critical('error copying {} to {}'.format(source,target))
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]): def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
if len(targetfiles) == 0: targetfiles = sourcefiles if len(targetfiles) == 0: targetfiles = sourcefiles
for i,file in enumerate(sourcefiles): for i,file in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
except: except:
logging.critical('Reference2Current: Unable to copy file "{}"'.format(file)) logging.critical('Reference2Current: Unable to copy file "{}"'.format(file))
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]): def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir)) source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
if len(targetfiles) == 0: targetfiles = sourcefiles if len(targetfiles) == 0: targetfiles = sourcefiles
for i,file in enumerate(sourcefiles): for i,file in enumerate(sourcefiles):
try: try:
shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i])) shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i]))
except: except:
logging.error(os.path.join(source,file)) logging.error(os.path.join(source,file))
logging.critical('Base2Current: Unable to copy file "{}"'.format(file)) logging.critical('Base2Current: Unable to copy file "{}"'.format(file))
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]): def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
if len(targetfiles) == 0: targetfiles = sourcefiles if len(targetfiles) == 0: targetfiles = sourcefiles
for i,file in enumerate(sourcefiles): for i,file in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i])) shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i]))
except: except:
logging.critical('Current2Reference: Unable to copy file "{}"'.format(file)) logging.critical('Current2Reference: Unable to copy file "{}"'.format(file))
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]): def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
if len(targetfiles) == 0: targetfiles = sourcefiles if len(targetfiles) == 0: targetfiles = sourcefiles
for i,file in enumerate(sourcefiles): for i,file in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i]))
except: except:
logging.critical('Proof2Current: Unable to copy file "{}"'.format(file)) logging.critical('Proof2Current: Unable to copy file "{}"'.format(file))
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]): def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
for i,file in enumerate(sourcefiles): for i,file in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
except: except:
logging.critical('Current2Current: Unable to copy file "{}"'.format(file)) logging.critical('Current2Current: Unable to copy file "{}"'.format(file))
@ -243,11 +252,11 @@ class Test():
logging.info(error) logging.info(error)
logging.debug(out) logging.debug(out)
return out,error
return out,error
def compare_Array(self,File1,File2): def compare_Array(self,File1,File2):
import numpy as np import numpy as np
@ -278,28 +287,28 @@ class Test():
def compare_ArrayRefCur(self,ref,cur=''): def compare_ArrayRefCur(self,ref,cur=''):
if cur =='': cur = ref if cur =='': cur = ref
refName = self.fileInReference(ref) refName = self.fileInReference(ref)
curName = self.fileInCurrent(cur) curName = self.fileInCurrent(cur)
return self.compare_Array(refName,curName) return self.compare_Array(refName,curName)
def compare_ArrayCurCur(self,cur0,cur1): def compare_ArrayCurCur(self,cur0,cur1):
cur0Name = self.fileInCurrent(cur0) cur0Name = self.fileInCurrent(cur0)
cur1Name = self.fileInCurrent(cur1) cur1Name = self.fileInCurrent(cur1)
return self.compare_Array(cur0Name,cur1Name) return self.compare_Array(cur0Name,cur1Name)
def compare_Table(self,headings0,file0,headings1,file1,normHeadings='',normType=None, def compare_Table(self,headings0,file0,headings1,file1,normHeadings='',normType=None,
absoluteTolerance=False,perLine=False,skipLines=[]): absoluteTolerance=False,perLine=False,skipLines=[]):
import numpy as np import numpy as np
logging.info('\n '.join(['comparing ASCII Tables',file0,file1])) logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
if normHeadings == '': normHeadings = headings0 if normHeadings == '': normHeadings = headings0
# check if comparison is possible and determine lenght of columns # check if comparison is possible and determine lenght of columns
if len(headings0) == len(headings1) == len(normHeadings): if len(headings0) == len(headings1) == len(normHeadings):
dataLength = len(headings0) dataLength = len(headings0)
length = [1 for i in xrange(dataLength)] length = [1 for i in xrange(dataLength)]
shape = [[] for i in xrange(dataLength)] shape = [[] for i in xrange(dataLength)]
@ -307,14 +316,14 @@ class Test():
maxError = [0.0 for i in xrange(dataLength)] maxError = [0.0 for i in xrange(dataLength)]
absTol = [absoluteTolerance for i in xrange(dataLength)] absTol = [absoluteTolerance for i in xrange(dataLength)]
column = [[1 for i in xrange(dataLength)] for j in xrange(2)] column = [[1 for i in xrange(dataLength)] for j in xrange(2)]
norm = [[] for i in xrange(dataLength)] norm = [[] for i in xrange(dataLength)]
normLength = [1 for i in xrange(dataLength)] normLength = [1 for i in xrange(dataLength)]
normShape = [[] for i in xrange(dataLength)] normShape = [[] for i in xrange(dataLength)]
normColumn = [1 for i in xrange(dataLength)] normColumn = [1 for i in xrange(dataLength)]
for i in xrange(dataLength): for i in xrange(dataLength):
if headings0[i]['shape'] != headings1[i]['shape']: if headings0[i]['shape'] != headings1[i]['shape']:
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label'])) raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
shape[i] = headings0[i]['shape'] shape[i] = headings0[i]['shape']
for j in xrange(np.shape(shape[i])[0]): for j in xrange(np.shape(shape[i])[0]):
@ -330,7 +339,7 @@ class Test():
table0 = damask.ASCIItable(name=file0,readonly=True) table0 = damask.ASCIItable(name=file0,readonly=True)
table0.head_read() table0.head_read()
table1 = damask.ASCIItable(name=file1,readonly=True) table1 = damask.ASCIItable(name=file1,readonly=True)
table1.head_read() table1.head_read()
for i in xrange(dataLength): for i in xrange(dataLength):
key0 = ('1_' if length[i]>1 else '') + headings0[i]['label'] key0 = ('1_' if length[i]>1 else '') + headings0[i]['label']
@ -346,7 +355,7 @@ class Test():
column[0][i] = table0.label_index(key0) column[0][i] = table0.label_index(key0)
column[1][i] = table1.label_index(key1) column[1][i] = table1.label_index(key1)
normColumn[i] = table0.label_index(normKey) normColumn[i] = table0.label_index(normKey)
line0 = 0 line0 = 0
while table0.data_read(): # read next data line of ASCII table while table0.data_read(): # read next data line of ASCII table
if line0 not in skipLines: if line0 not in skipLines:
@ -361,7 +370,7 @@ class Test():
else: else:
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType)) norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
line0 += 1 line0 += 1
for i in xrange(dataLength): for i in xrange(dataLength):
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))] if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]]) data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
@ -432,14 +441,14 @@ class Test():
logging.info(files[i]+':'+','.join(columns[i])) logging.info(files[i]+':'+','.join(columns[i]))
if len(files) < 2: return True # single table is always close to itself... if len(files) < 2: return True # single table is always close to itself...
data = [] data = []
for table,labels in zip(tables,columns): for table,labels in zip(tables,columns):
table.data_readArray(labels) table.data_readArray(labels)
data.append(table.data) data.append(table.data)
table.close() table.close()
for i in xrange(1,len(data)): for i in xrange(1,len(data)):
delta = data[i]-data[i-1] delta = data[i]-data[i-1]
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5 normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
@ -448,7 +457,7 @@ class Test():
std = np.amax(np.std(normedDelta,0)) std = np.amax(np.std(normedDelta,0))
logging.info('mean: {:f}'.format(mean)) logging.info('mean: {:f}'.format(mean))
logging.info('std: {:f}'.format(std)) logging.info('std: {:f}'.format(std))
return (mean<meanTol) & (std < stdTol) return (mean<meanTol) & (std < stdTol)
@ -458,17 +467,13 @@ class Test():
columns = [None], # list of list of column labels (per file) columns = [None], # list of list of column labels (per file)
rtol = 1e-5, rtol = 1e-5,
atol = 1e-8, atol = 1e-8,
preFilter = -1.0,
postFilter = -1.0,
debug = False): debug = False):
""" """compare tables with np.allclose"""
compare tables with np.allclose
threshold can be used to ignore small values (a negative number disables this feature)
"""
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
files = [str(files)] files = [str(files)]
if len(files) < 2: return True # single table is always close to itself...
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files] tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
for table in tables: for table in tables:
table.head_read() table.head_read()
@ -477,7 +482,7 @@ class Test():
columns = columns[:len(files)] # truncate to same length as files columns = columns[:len(files)] # truncate to same length as files
for i,column in enumerate(columns): for i,column in enumerate(columns):
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, use all
logging.info('comparing ASCIItables') logging.info('comparing ASCIItables')
for i in xrange(len(columns)): for i in xrange(len(columns)):
@ -487,44 +492,44 @@ class Test():
) )
logging.info(files[i]+':'+','.join(columns[i])) logging.info(files[i]+':'+','.join(columns[i]))
if len(files) < 2: return True # single table is always close to itself... # peek into the ASCII table to figure out real table size
# the cryptic table header does not share the same size as real
maximum = np.zeros(len(columns[0]),dtype='f') # table
data = [] table.data_readArray(columns[0])
for table,labels in zip(tables,columns): maximum = np.zeros(table.data.shape[1], dtype='f')
data = [] # list of feature table extracted from each file (ASCII table)
for table, labels in zip(tables, columns):
table.data_readArray(labels) table.data_readArray(labels)
data.append(np.where(np.abs(table.data)<preFilter,np.zeros_like(table.data),table.data)) for label in labels:
maximum += np.abs(table.data).max(axis=0) idx = table.label_indexrange(label)
maximum[idx] = np.maximum(maximum[idx],
np.amax(np.linalg.norm(table.data[:,idx],axis=1)))
data.append(table.data)
table.close() table.close()
maximum /= len(tables) maximum = np.where(maximum > 0.0, maximum, 1) # avoid div by zero for empty columns
maximum = np.where(maximum >0.0, maximum, 1) # avoid div by zero for empty columns
# normalize each table
for i in xrange(len(data)): for i in xrange(len(data)):
data[i] /= maximum data[i] /= maximum
mask = np.zeros_like(table.data,dtype='bool')
for table in data: if debug:
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values logging.debug(str(maximum))
allclose = np.absolute(data[0]-data[1]) <= (atol + rtol*np.absolute(data[1]))
for ok,valA,valB in zip(allclose,data[0],data[1]):
logging.debug('{}:\n {}\n{}\n'.format(ok,valA,valB))
allclose = True # start optimistic allclose = True # start optimistic
for i in xrange(1,len(data)): for i in xrange(1,len(data)):
if debug: allclose &= np.allclose(data[i-1],data[i],rtol,atol) # accumulate "pessimism"
t0 = np.where(mask,0.0,data[i-1])
t1 = np.where(mask,0.0,data[i ])
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
logging.info('{:f}'.format(np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol))))
logging.info('{:f} {:f}'.format((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
return allclose return allclose
def compare_TableRefCur(self,headingsRef,ref,headingsCur='',cur='',normHeadings='',normType=None,\ def compare_TableRefCur(self,headingsRef,ref,headingsCur='',cur='',normHeadings='',normType=None,\
absoluteTolerance=False,perLine=False,skipLines=[]): absoluteTolerance=False,perLine=False,skipLines=[]):
if cur == '': cur = ref if cur == '': cur = ref
if headingsCur == '': headingsCur = headingsRef if headingsCur == '': headingsCur = headingsRef
refName = self.fileInReference(ref) refName = self.fileInReference(ref)
@ -532,10 +537,10 @@ class Test():
return self.compare_Table(headingsRef,refName,headingsCur,curName,normHeadings,normType, return self.compare_Table(headingsRef,refName,headingsCur,curName,normHeadings,normType,
absoluteTolerance,perLine,skipLines) absoluteTolerance,perLine,skipLines)
def compare_TableCurCur(self,headingsCur0,Cur0,Cur1,headingsCur1='',normHeadings='',normType=None,\ def compare_TableCurCur(self,headingsCur0,Cur0,Cur1,headingsCur1='',normHeadings='',normType=None,\
absoluteTolerance=False,perLine=False,skipLines=[]): absoluteTolerance=False,perLine=False,skipLines=[]):
if headingsCur1 == '': headingsCur1 = headingsCur0 if headingsCur1 == '': headingsCur1 = headingsCur0
cur0Name = self.fileInCurrent(Cur0) cur0Name = self.fileInCurrent(Cur0)
cur1Name = self.fileInCurrent(Cur1) cur1Name = self.fileInCurrent(Cur1)
@ -544,15 +549,17 @@ class Test():
def report_Success(self,culprit): def report_Success(self,culprit):
ret = culprit
if culprit == 0: if culprit == 0:
logging.critical(('The test' if len(self.variants) == 1 else 'All {} tests'.format(len(self.variants))) + ' passed') msg = 'The test passed' if len(self.variants) == 1 \
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n') else 'All {} tests passed.'.format(len(self.variants))
return 0 elif culprit == -1:
if culprit == -1: msg = 'Warning: Could not start test...'
logging.warning('Warning: Could not start test') ret = 0
return 0
else: else:
logging.critical(' ********\n * Test {} failed...\n ********'.format(culprit)) msg = ' * Test "{}" failed.'.format(self.variants[culprit-1])
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
return culprit logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
return ret

View File

@ -20,6 +20,7 @@ class bcolors:
FAIL = '\033[91m' FAIL = '\033[91m'
ENDC = '\033[0m' ENDC = '\033[0m'
BOLD = '\033[1m' BOLD = '\033[1m'
DIM = '\033[2m'
UNDERLINE = '\033[4m' UNDERLINE = '\033[4m'
def disable(self): def disable(self):
@ -70,9 +71,19 @@ def report_geom(info,
# ----------------------------- # -----------------------------
def emph(what): def emph(what):
"""emphasizes string on screen""" """boldens string"""
return bcolors.BOLD+srepr(what)+bcolors.ENDC return bcolors.BOLD+srepr(what)+bcolors.ENDC
# -----------------------------
def deemph(what):
"""dims string"""
return bcolors.DIM+srepr(what)+bcolors.ENDC
# -----------------------------
def delete(what):
"""dims string"""
return bcolors.DIM+srepr(what)+bcolors.ENDC
# ----------------------------- # -----------------------------
def execute(cmd, def execute(cmd,
streamIn = None, streamIn = None,

View File

@ -2,6 +2,7 @@
# -*- coding: UTF-8 no BOM -*- # -*- coding: UTF-8 no BOM -*-
import os,sys import os,sys
import math # noqa
import numpy as np import numpy as np
from optparse import OptionParser from optparse import OptionParser
import damask import damask
@ -14,7 +15,7 @@ scriptID = ' '.join([scriptName,damask.version])
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Replace all rows for which column 'label' has identical values by a single row containing their average. Apply a user-specified function to condense all rows for which column 'label' has identical values into a single row.
Output table will contain as many rows as there are different (unique) values in the grouping column. Output table will contain as many rows as there are different (unique) values in the grouping column.
Examples: Examples:
@ -25,11 +26,33 @@ parser.add_option('-l','--label',
dest = 'label', dest = 'label',
type = 'string', metavar = 'string', type = 'string', metavar = 'string',
help = 'column label for grouping rows') help = 'column label for grouping rows')
parser.add_option('-f','--function',
dest = 'function',
type = 'string', metavar = 'string',
help = 'mapping function [%default]')
parser.add_option('-a','--all',
dest = 'all',
action = 'store_true',
help = 'apply mapping function also to grouping column')
parser.set_defaults(function = 'np.average')
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
funcModule,funcName = options.function.split('.')
try:
mapFunction = getattr(locals().get(funcModule) or
globals().get(funcModule) or
__import__(funcModule),
funcName)
except:
mapFunction = None
if options.label is None: if options.label is None:
parser.error('no grouping column specified.') parser.error('no grouping column specified.')
if not hasattr(mapFunction,'__call__'):
parser.error('function "{}" is not callable.'.format(options.function))
# --- loop over input files ------------------------------------------------------------------------- # --- loop over input files -------------------------------------------------------------------------
@ -38,10 +61,6 @@ if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name, try: table = damask.ASCIItable(name = name,
outname = os.path.join(
os.path.split(name)[0],
options.label+'_averaged_'+os.path.split(name)[1]
) if name else name,
buffered = False) buffered = False)
except: continue except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
@ -53,6 +72,8 @@ for name in filenames:
damask.util.croak('column {} is not of scalar dimension.'.format(options.label)) damask.util.croak('column {} is not of scalar dimension.'.format(options.label))
table.close(dismiss = True) # close ASCIItable and remove empty file table.close(dismiss = True) # close ASCIItable and remove empty file
continue continue
else:
grpColumn = table.label_index(options.label)
# ------------------------------------------ assemble info --------------------------------------- # ------------------------------------------ assemble info ---------------------------------------
@ -64,17 +85,17 @@ for name in filenames:
table.data_readArray() table.data_readArray()
rows,cols = table.data.shape rows,cols = table.data.shape
table.data = table.data[np.lexsort([table.data[:,table.label_index(options.label)]])] table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn
values,index = np.unique(table.data[:,table.label_index(options.label)], return_index = True) values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions
index = np.append(index,rows) index = np.append(index,rows) # add termination position
avgTable = np.empty((len(values), cols)) grpTable = np.empty((len(values), cols)) # initialize output
for j in xrange(cols) : for i in xrange(len(values)): # iterate over groups (unique values in grpColumn)
for i in xrange(len(values)) : grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function
avgTable[i,j] = np.average(table.data[index[i]:index[i+1],j]) if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value
table.data = avgTable table.data = grpTable
# ------------------------------------------ output result ------------------------------- # ------------------------------------------ output result -------------------------------

View File

@ -3,6 +3,7 @@
import os,vtk import os,vtk
import damask import damask
import numpy as np
from collections import defaultdict from collections import defaultdict
from optparse import OptionParser from optparse import OptionParser
@ -37,12 +38,17 @@ parser.add_option('-v', '--vector',
dest = 'vector', dest = 'vector',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help = 'vector value label(s)') help = 'vector value label(s)')
parser.add_option('-t', '--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'tensor (3x3) value label(s)')
parser.add_option('-c', '--color', dest='color', action='extend', parser.add_option('-c', '--color', dest='color', action='extend',
metavar ='<string LIST>', metavar ='<string LIST>',
help = 'RGB color tuples') help = 'RGB color tuples')
parser.set_defaults(scalar = [], parser.set_defaults(scalar = [],
vector = [], vector = [],
tensor = [],
color = [], color = [],
inplace = False, inplace = False,
render = False, render = False,
@ -94,9 +100,10 @@ for name in filenames:
errors = [] errors = []
VTKarray = {} VTKarray = {}
active = defaultdict(list) active = defaultdict(list)
for datatype,dimension,label in [['scalar',1,options.scalar], for datatype,dimension,label in [['scalar',1,options.scalar],
['vector',3,options.vector], ['vector',3,options.vector],
['tensor',9,options.tensor],
['color',3,options.color], ['color',3,options.color],
]: ]:
for i,dim in enumerate(table.label_dimension(label)): for i,dim in enumerate(table.label_dimension(label)):
@ -107,7 +114,7 @@ for name in filenames:
remarks.append('adding {} "{}"...'.format(datatype,me)) remarks.append('adding {} "{}"...'.format(datatype,me))
active[datatype].append(me) active[datatype].append(me)
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray() if datatype in ['scalar','vector', 'tensor']: VTKarray[me] = vtk.vtkDoubleArray()
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
VTKarray[me].SetNumberOfComponents(dimension) VTKarray[me].SetNumberOfComponents(dimension)
@ -119,20 +126,24 @@ for name in filenames:
table.close(dismiss = True) table.close(dismiss = True)
continue continue
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ---------------------------------------
while table.data_read(): # read next data line of ASCII table while table.data_read(): # read next data line of ASCII table
for datatype,labels in active.items(): # loop over scalar,color for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items for me in labels: # loop over all requested items
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings theData = [float(table.data[i]) for i in table.label_indexrange(me)] # read strings
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*x),theData))
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData)) elif datatype == 'scalar': VTKarray[me].InsertNextValue(theData[0])
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*theData)
elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+
np.array(theData) \
.reshape(3,3).T \
.reshape(9)))
table.input_close() # close input ASCII table table.input_close() # close input ASCII table
# ------------------------------------------ add data --------------------------------------- # ------------------------------------------ add data ---------------------------------------
for datatype,labels in active.items(): # loop over scalar,color for datatype,labels in active.items(): # loop over scalar,color
if datatype == 'color': if datatype == 'color':
@ -145,7 +156,7 @@ for name in filenames:
Polydata.Modified() Polydata.Modified()
if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update() if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update()
# ------------------------------------------ output result --------------------------------------- # ------------------------------------------ output result ---------------------------------------
writer = vtk.vtkXMLPolyDataWriter() writer = vtk.vtkXMLPolyDataWriter()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()
@ -155,7 +166,7 @@ for name in filenames:
else: writer.SetInputData(Polydata) else: writer.SetInputData(Polydata)
writer.Write() writer.Write()
# ------------------------------------------ render result --------------------------------------- # ------------------------------------------ render result ---------------------------------------
if options.render: if options.render:
mapper = vtk.vtkDataSetMapper() mapper = vtk.vtkDataSetMapper()
@ -179,7 +190,7 @@ if options.render:
iren = vtk.vtkRenderWindowInteractor() iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin) iren.SetRenderWindow(renWin)
iren.Initialize() iren.Initialize()
renWin.Render() renWin.Render()
iren.Start() iren.Start()

View File

@ -3,6 +3,7 @@
import os,vtk import os,vtk
import damask import damask
import numpy as np
from collections import defaultdict from collections import defaultdict
from optparse import OptionParser from optparse import OptionParser
@ -38,6 +39,10 @@ parser.add_option('-v', '--vector',
dest = 'vector', dest = 'vector',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help = 'vector value label(s)') help = 'vector value label(s)')
parser.add_option('-t', '--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'tensor (3x3) value label(s)')
parser.add_option('-c', '--color', parser.add_option('-c', '--color',
dest = 'color', dest = 'color',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
@ -45,6 +50,7 @@ parser.add_option('-c', '--color',
parser.set_defaults(scalar = [], parser.set_defaults(scalar = [],
vector = [], vector = [],
tensor = [],
color = [], color = [],
inplace = False, inplace = False,
render = False, render = False,
@ -92,9 +98,10 @@ for name in filenames:
errors = [] errors = []
VTKarray = {} VTKarray = {}
active = defaultdict(list) active = defaultdict(list)
for datatype,dimension,label in [['scalar',1,options.scalar], for datatype,dimension,label in [['scalar',1,options.scalar],
['vector',3,options.vector], ['vector',3,options.vector],
['tensor',9,options.tensor],
['color',3,options.color], ['color',3,options.color],
]: ]:
for i,dim in enumerate(table.label_dimension(label)): for i,dim in enumerate(table.label_dimension(label)):
@ -105,7 +112,7 @@ for name in filenames:
remarks.append('adding {} "{}"...'.format(datatype,me)) remarks.append('adding {} "{}"...'.format(datatype,me))
active[datatype].append(me) active[datatype].append(me)
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray() if datatype in ['scalar','vector','tensor']: VTKarray[me] = vtk.vtkDoubleArray()
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray() elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
VTKarray[me].SetNumberOfComponents(dimension) VTKarray[me].SetNumberOfComponents(dimension)
@ -117,7 +124,7 @@ for name in filenames:
table.close(dismiss = True) table.close(dismiss = True)
continue continue
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ---------------------------------------
datacount = 0 datacount = 0
@ -128,12 +135,16 @@ for name in filenames:
for me in labels: # loop over all requested items for me in labels: # loop over all requested items
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData)) if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0])) elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
elif datatype == 'tensor': VTKarray[me].InsertNextTuple9(*0.5*(np.array(theData)+
np.array(theData) \
.reshape(3,3).T \
.reshape(9)))
table.close() # close input ASCII table table.close() # close input ASCII table
# ------------------------------------------ add data --------------------------------------- # ------------------------------------------ add data ---------------------------------------
if datacount == Npoints: mode = 'point' if datacount == Npoints: mode = 'point'
elif datacount == Ncells: mode = 'cell' elif datacount == Ncells: mode = 'cell'
@ -154,7 +165,7 @@ for name in filenames:
rGrid.Modified() rGrid.Modified()
if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update() if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update()
# ------------------------------------------ output result --------------------------------------- # ------------------------------------------ output result ---------------------------------------
writer = vtk.vtkXMLRectilinearGridWriter() writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()
@ -164,7 +175,7 @@ for name in filenames:
else: writer.SetInputData(rGrid) else: writer.SetInputData(rGrid)
writer.Write() writer.Write()
# ------------------------------------------ render result --------------------------------------- # ------------------------------------------ render result ---------------------------------------
if options.render: if options.render:
mapper = vtk.vtkDataSetMapper() mapper = vtk.vtkDataSetMapper()
@ -188,7 +199,7 @@ if options.render:
iren = vtk.vtkRenderWindowInteractor() iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin) iren.SetRenderWindow(renWin)
iren.Initialize() iren.Initialize()
renWin.Render() renWin.Render()
iren.Start() iren.Start()

View File

@ -79,9 +79,9 @@ for name in filenames:
coords = [np.unique(table.data[:,i]) for i in xrange(3)] coords = [np.unique(table.data[:,i]) for i in xrange(3)]
if options.mode == 'cell': if options.mode == 'cell':
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + len(coords[i]) > 1]] + \ coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
[coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \ [coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \
[3.0 * coords[i][-1] - coords[i][-1 - (len(coords[i]) > 1)]]) for i in xrange(3)] [3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in xrange(3)]
grid = np.array(map(len,coords),'i') grid = np.array(map(len,coords),'i')
N = grid.prod() if options.mode == 'point' else (grid-1).prod() N = grid.prod() if options.mode == 'point' else (grid-1).prod()

View File

@ -30,30 +30,37 @@ parser.add_option('-s', '--substitute',
dest = 'substitute', dest = 'substitute',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help = 'substitutions of microstructure indices from,to,from,to,...') help = 'substitutions of microstructure indices from,to,from,to,...')
parser.add_option('--float',
dest = 'real',
action = 'store_true',
help = 'use float input')
parser.set_defaults(origin = (0.0,0.0,0.0), parser.set_defaults(origin = (0.0,0.0,0.0),
microstructure = 0, microstructure = 0,
substitute = [], substitute = [],
real = False,
) )
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
datatype = 'f' if options.real else 'i'
sub = {} sub = {}
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to" for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1]) sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
# --- loop over input files ------------------------------------------------------------------------- # --- loop over input files ----------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: try: table = damask.ASCIItable(name = name,
table = damask.ASCIItable(name = name, buffered = False,
buffered = False, labeled = False) labeled = False)
except: continue except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- # --- interpret header ---------------------------------------------------------------------------
table.head_read() table.head_read()
info,extra_header = table.head_getGeom() info,extra_header = table.head_getGeom()
@ -73,9 +80,9 @@ for name in filenames:
table.close(dismiss = True) table.close(dismiss = True)
continue continue
# --- read data ------------------------------------------------------------------------------------ # --- read data ----------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']) # read microstructure microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
# --- do work ------------------------------------------------------------------------------------ # --- do work ------------------------------------------------------------------------------------
@ -90,9 +97,9 @@ for name in filenames:
substituted += options.microstructure # shift microstructure indices substituted += options.microstructure # shift microstructure indices
newInfo['origin'] = info['origin'] + options.origin newInfo['origin'] = info['origin'] + options.origin
newInfo['microstructures'] = substituted.max() newInfo['microstructures'] = len(np.unique(substituted))
# --- report --------------------------------------------------------------------------------------- # --- report -------------------------------------------------------------------------------------
remarks = [] remarks = []
if (any(newInfo['origin'] != info['origin'])): if (any(newInfo['origin'] != info['origin'])):
@ -101,7 +108,7 @@ for name in filenames:
remarks.append('--> microstructures: %i'%newInfo['microstructures']) remarks.append('--> microstructures: %i'%newInfo['microstructures'])
if remarks != []: damask.util.croak(remarks) if remarks != []: damask.util.croak(remarks)
# --- write header --------------------------------------------------------------------------------- # --- write header -------------------------------------------------------------------------------
table.labels_clear() table.labels_clear()
table.info_clear() table.info_clear()
@ -116,12 +123,12 @@ for name in filenames:
]) ])
table.head_write() table.head_write()
# --- write microstructure information ------------------------------------------------------------ # --- write microstructure information -----------------------------------------------------------
formatwidth = int(math.floor(math.log10(microstructure.max())+1)) format = '%g' if options.real else '%{}i'.format(int(math.floor(math.log10(microstructure.max())+1)))
table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose() table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ') table.data_writeArray(format,delimiter = ' ')
# --- output finalization -------------------------------------------------------------------------- # --- output finalization ------------------------------------------------------------------------
table.close() # close ASCII table table.close() # close ASCII table