Merge branch 'development' of magit1.mpie.de:damask/DAMASK into development
This commit is contained in:
commit
f8901220ab
|
@ -41,7 +41,7 @@ if [ ! -z "$PS1" ]; then
|
|||
echo Max-Planck-Institut für Eisenforschung GmbH, Düsseldorf
|
||||
echo https://damask.mpie.de
|
||||
echo
|
||||
echo Using environment with ...
|
||||
echo "Using environment with ..."
|
||||
echo "DAMASK $DAMASK_ROOT"
|
||||
echo "Spectral Solver $SOLVER"
|
||||
echo "Post Processing $PROCESSING"
|
||||
|
|
|
@ -81,7 +81,7 @@ program DAMASK_spectral
|
|||
use spectral_mech_Polarisation
|
||||
use spectral_damage
|
||||
use spectral_thermal
|
||||
|
||||
|
||||
|
||||
implicit none
|
||||
|
||||
|
@ -93,9 +93,9 @@ program DAMASK_spectral
|
|||
logical, dimension(9) :: temp_maskVector = .false. !< temporarily from loadcase file when reading in tensors
|
||||
integer(pInt), parameter :: FILEUNIT = 234_pInt !< file unit, DAMASK IO does not support newunit feature
|
||||
integer(pInt), allocatable, dimension(:) :: chunkPos
|
||||
|
||||
|
||||
integer(pInt) :: &
|
||||
N_t = 0_pInt, & !< # of time indicators found in load case file
|
||||
N_t = 0_pInt, & !< # of time indicators found in load case file
|
||||
N_n = 0_pInt, & !< # of increment specifiers found in load case file
|
||||
N_def = 0_pInt !< # of rate of deformation specifiers found in load case file
|
||||
character(len=65536) :: &
|
||||
|
@ -105,7 +105,7 @@ program DAMASK_spectral
|
|||
! loop variables, convergence etc.
|
||||
real(pReal), dimension(3,3), parameter :: &
|
||||
ones = 1.0_pReal, &
|
||||
zeros = 0.0_pReal
|
||||
zeros = 0.0_pReal
|
||||
integer(pInt), parameter :: &
|
||||
subStepFactor = 2_pInt !< for each substep, divide the last time increment by 2.0
|
||||
real(pReal) :: &
|
||||
|
@ -150,18 +150,17 @@ program DAMASK_spectral
|
|||
MPI_file_get_position, &
|
||||
MPI_file_write, &
|
||||
MPI_abort, &
|
||||
MPI_finalize, &
|
||||
MPI_allreduce, &
|
||||
PETScFinalize
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! init DAMASK (all modules)
|
||||
call CPFEM_initAll(el = 1_pInt, ip = 1_pInt)
|
||||
mainProcess: if (worldrank == 0) then
|
||||
write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>'
|
||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||
write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>'
|
||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||
#include "compilation_info.f90"
|
||||
endif mainProcess
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! initialize field solver information
|
||||
nActiveFields = 1
|
||||
|
@ -194,19 +193,19 @@ program DAMASK_spectral
|
|||
call IO_error(error_ID=837_pInt,ext_msg = trim(loadCaseFile)) ! error message for incomplete loadcase
|
||||
allocate (loadCases(N_n)) ! array of load cases
|
||||
loadCases%P%myType='p'
|
||||
|
||||
|
||||
do i = 1, size(loadCases)
|
||||
allocate(loadCases(i)%ID(nActiveFields))
|
||||
field = 1
|
||||
loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default
|
||||
if (any(thermal_type == THERMAL_conduction_ID)) then ! thermal field active
|
||||
thermalActive: if (any(thermal_type == THERMAL_conduction_ID)) then
|
||||
field = field + 1
|
||||
loadCases(i)%ID(field) = FIELD_THERMAL_ID
|
||||
endif
|
||||
if (any(damage_type == DAMAGE_nonlocal_ID)) then ! damage field active
|
||||
loadCases(i)%ID(field) = FIELD_THERMAL_ID
|
||||
endif thermalActive
|
||||
damageActive: if (any(damage_type == DAMAGE_nonlocal_ID)) then
|
||||
field = field + 1
|
||||
loadCases(i)%ID(field) = FIELD_DAMAGE_ID
|
||||
endif
|
||||
endif damageActive
|
||||
enddo
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -232,12 +231,10 @@ program DAMASK_spectral
|
|||
endif
|
||||
do j = 1_pInt, 9_pInt
|
||||
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a *
|
||||
enddo
|
||||
do j = 1_pInt,9_pInt
|
||||
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
|
||||
enddo
|
||||
loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation
|
||||
transpose(reshape(temp_maskVector,[ 3,3]))
|
||||
transpose(reshape(temp_maskVector,[ 3,3]))
|
||||
loadCases(currentLoadCase)%deformation%maskFloat = & ! float (1.0/0.0) mask in 3x3 notation
|
||||
merge(ones,zeros,loadCases(currentLoadCase)%deformation%maskLogical)
|
||||
loadCases(currentLoadCase)%deformation%values = math_plain9to33(temp_valueVector) ! values in 3x3 notation
|
||||
|
@ -245,8 +242,6 @@ program DAMASK_spectral
|
|||
temp_valueVector = 0.0_pReal
|
||||
do j = 1_pInt, 9_pInt
|
||||
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk
|
||||
enddo
|
||||
do j = 1_pInt,9_pInt
|
||||
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
|
||||
enddo
|
||||
loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3]))
|
||||
|
@ -261,10 +256,10 @@ program DAMASK_spectral
|
|||
loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1_pInt)
|
||||
loadCases(currentLoadCase)%logscale = 1_pInt
|
||||
case('freq','frequency','outputfreq') ! frequency of result writings
|
||||
loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt)
|
||||
loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt)
|
||||
case('r','restart','restartwrite') ! frequency of writing restart information
|
||||
loadCases(currentLoadCase)%restartfrequency = &
|
||||
max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt))
|
||||
max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt))
|
||||
case('guessreset','dropguessing')
|
||||
loadCases(currentLoadCase)%followFormerTrajectory = .false. ! do not continue to predict deformation along former trajectory
|
||||
case('euler') ! rotation of currentLoadCase given in euler angles
|
||||
|
@ -273,10 +268,10 @@ program DAMASK_spectral
|
|||
k = 1_pInt ! assuming keyword indicating degree/radians present
|
||||
select case (IO_lc(IO_stringValue(line,chunkPos,i+1_pInt)))
|
||||
case('deg','degree')
|
||||
case('rad','radian') ! don't convert from degree to radian
|
||||
case('rad','radian') ! don't convert from degree to radian
|
||||
l = 0_pInt
|
||||
case default
|
||||
k = 0_pInt
|
||||
case default
|
||||
k = 0_pInt
|
||||
end select
|
||||
do j = 1_pInt, 3_pInt
|
||||
temp_valueVector(j) = IO_floatValue(line,chunkPos,i+k+j)
|
||||
|
@ -291,7 +286,7 @@ program DAMASK_spectral
|
|||
loadCases(currentLoadCase)%rotation = math_plain9to33(temp_valueVector)
|
||||
end select
|
||||
enddo; enddo
|
||||
close(FILEUNIT)
|
||||
close(FILEUNIT)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! consistency checks and output of load case
|
||||
|
@ -303,14 +298,14 @@ program DAMASK_spectral
|
|||
write(6,'(1x,a,i6)') 'load case: ', currentLoadCase
|
||||
if (.not. loadCases(currentLoadCase)%followFormerTrajectory) &
|
||||
write(6,'(2x,a)') 'drop guessing along trajectory'
|
||||
if (loadCases(currentLoadCase)%deformation%myType=='l') then
|
||||
if (loadCases(currentLoadCase)%deformation%myType == 'l') then
|
||||
do j = 1_pInt, 3_pInt
|
||||
if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. &
|
||||
any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) &
|
||||
errorID = 832_pInt ! each row should be either fully or not at all defined
|
||||
enddo
|
||||
write(6,'(2x,a)') 'velocity gradient:'
|
||||
else if (loadCases(currentLoadCase)%deformation%myType=='f') then
|
||||
else if (loadCases(currentLoadCase)%deformation%myType == 'f') then
|
||||
write(6,'(2x,a)') 'deformation gradient at end of load case:'
|
||||
else
|
||||
write(6,'(2x,a)') 'deformation gradient rate:'
|
||||
|
@ -319,12 +314,12 @@ program DAMASK_spectral
|
|||
if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then
|
||||
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j)
|
||||
else
|
||||
write(6,'(2x,12a)',advance='no') ' * '
|
||||
write(6,'(2x,12a)',advance='no') ' * '
|
||||
endif
|
||||
enddo; write(6,'(/)',advance='no')
|
||||
enddo
|
||||
if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. &
|
||||
loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only
|
||||
loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only
|
||||
if (any(loadCases(currentLoadCase)%P%maskLogical .and. &
|
||||
transpose(loadCases(currentLoadCase)%P%maskLogical) .and. &
|
||||
reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) &
|
||||
|
@ -334,12 +329,12 @@ program DAMASK_spectral
|
|||
if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then
|
||||
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal
|
||||
else
|
||||
write(6,'(2x,12a)',advance='no') ' * '
|
||||
write(6,'(2x,12a)',advance='no') ' * '
|
||||
endif
|
||||
enddo; write(6,'(/)',advance='no')
|
||||
enddo
|
||||
if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, &
|
||||
math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) >&
|
||||
math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) > &
|
||||
reshape(spread(tol_math_check,1,9),[ 3,3]))&
|
||||
.or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > &
|
||||
1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain
|
||||
|
@ -360,7 +355,7 @@ program DAMASK_spectral
|
|||
endif
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! doing initialization depending on selected solver
|
||||
! doing initialization depending on selected solver
|
||||
call Utilities_init()
|
||||
do field = 1, nActiveFields
|
||||
select case (loadCases(1)%ID(field))
|
||||
|
@ -372,26 +367,26 @@ program DAMASK_spectral
|
|||
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) &
|
||||
call IO_warning(42_pInt, ext_msg='debug Divergence')
|
||||
call AL_init
|
||||
|
||||
|
||||
case (DAMASK_spectral_SolverPolarisation_label)
|
||||
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0 .and. worldrank == 0_pInt) &
|
||||
call IO_warning(42_pInt, ext_msg='debug Divergence')
|
||||
call Polarisation_init
|
||||
|
||||
|
||||
case default
|
||||
call IO_error(error_ID = 891, ext_msg = trim(spectral_solver))
|
||||
call IO_error(error_ID = 891_pInt, ext_msg = trim(spectral_solver))
|
||||
|
||||
end select
|
||||
|
||||
case(FIELD_THERMAL_ID)
|
||||
call spectral_thermal_init
|
||||
|
||||
|
||||
case(FIELD_DAMAGE_ID)
|
||||
call spectral_damage_init()
|
||||
|
||||
end select
|
||||
enddo
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! write header of output file
|
||||
if (worldrank == 0) then
|
||||
|
@ -410,7 +405,7 @@ program DAMASK_spectral
|
|||
write(resUnit) 'logscales:', loadCases%logscale
|
||||
write(resUnit) 'increments:', loadCases%incs ! one entry per LoadCase
|
||||
write(resUnit) 'startingIncrement:', restartInc - 1_pInt ! start with writing out the previous inc
|
||||
write(resUnit) 'eoh'
|
||||
write(resUnit) 'eoh'
|
||||
close(resUnit) ! end of header
|
||||
open(newunit=statUnit,file=trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//&
|
||||
'.sta',form='FORMATTED',status='REPLACE')
|
||||
|
@ -429,29 +424,30 @@ program DAMASK_spectral
|
|||
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
|
||||
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
|
||||
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce')
|
||||
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce')
|
||||
call MPI_file_open(PETSC_COMM_WORLD, &
|
||||
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &
|
||||
MPI_MODE_WRONLY + MPI_MODE_APPEND, &
|
||||
MPI_INFO_NULL, &
|
||||
resUnit, &
|
||||
ierr)
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open')
|
||||
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open')
|
||||
call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position')
|
||||
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position')
|
||||
fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
|
||||
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
|
||||
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek')
|
||||
|
||||
if (.not. appendToOutFile) then ! if not restarting, write 0th increment
|
||||
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
|
||||
outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
|
||||
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
|
||||
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
|
||||
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
|
||||
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
|
||||
do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
|
||||
outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
|
||||
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
|
||||
call MPI_file_write(resUnit, &
|
||||
reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
|
||||
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
|
||||
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults, &
|
||||
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
|
||||
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write')
|
||||
enddo
|
||||
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
|
||||
if (worldrank == 0) &
|
||||
|
@ -460,7 +456,7 @@ program DAMASK_spectral
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! loopping over loadcases
|
||||
loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases)
|
||||
time0 = time ! currentLoadCase start time
|
||||
time0 = time ! currentLoadCase start time
|
||||
guess = loadCases(currentLoadCase)%followFormerTrajectory ! change of load case? homogeneous guess for the first inc
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -474,9 +470,9 @@ program DAMASK_spectral
|
|||
if (loadCases(currentLoadCase)%logscale == 0_pInt) then ! linear scale
|
||||
timeinc = loadCases(currentLoadCase)%time/real(loadCases(currentLoadCase)%incs,pReal) ! only valid for given linear time scale. will be overwritten later in case loglinear scale is used
|
||||
else
|
||||
if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale
|
||||
if (currentLoadCase == 1_pInt) then ! 1st currentLoadCase of logarithmic scale
|
||||
if (inc == 1_pInt) then ! 1st inc of 1st currentLoadCase of logarithmic scale
|
||||
timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd
|
||||
timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd
|
||||
else ! not-1st inc of 1st currentLoadCase of logarithmic scale
|
||||
timeinc = loadCases(1)%time*(2.0_pReal**real(inc-1_pInt-loadCases(1)%incs ,pReal))
|
||||
endif
|
||||
|
@ -490,16 +486,16 @@ program DAMASK_spectral
|
|||
endif
|
||||
timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step
|
||||
|
||||
forwarding: if(totalIncsCounter >= restartInc) then
|
||||
forwarding: if (totalIncsCounter >= restartInc) then
|
||||
stepFraction = 0_pInt
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! loop over sub incs
|
||||
! loop over sub incs
|
||||
subIncLooping: do while (stepFraction/subStepFactor**cutBackLevel <1_pInt)
|
||||
time = time + timeinc ! forward time
|
||||
stepFraction = stepFraction + 1_pInt
|
||||
stepFraction = stepFraction + 1_pInt
|
||||
remainingLoadCaseTime = time0 - time + loadCases(currentLoadCase)%time + timeInc
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! report begin of new increment
|
||||
if (worldrank == 0) then
|
||||
|
@ -517,7 +513,7 @@ program DAMASK_spectral
|
|||
',a,'//IO_intOut(stepFraction)//',a,'//IO_intOut(subStepFactor**cutBackLevel)//')') &
|
||||
'Increment ',totalIncsCounter,'/',sum(loadCases%incs),&
|
||||
'-',stepFraction, '/', subStepFactor**cutBackLevel
|
||||
endif
|
||||
endif
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! forward fields
|
||||
|
@ -543,18 +539,18 @@ program DAMASK_spectral
|
|||
F_BC = loadCases(currentLoadCase)%deformation, &
|
||||
P_BC = loadCases(currentLoadCase)%P, &
|
||||
rotation_BC = loadCases(currentLoadCase)%rotation)
|
||||
end select
|
||||
|
||||
end select
|
||||
|
||||
case(FIELD_THERMAL_ID)
|
||||
call spectral_thermal_forward (&
|
||||
guess,timeinc,timeIncOld,remainingLoadCaseTime)
|
||||
|
||||
|
||||
case(FIELD_DAMAGE_ID)
|
||||
call spectral_damage_forward (&
|
||||
guess,timeinc,timeIncOld,remainingLoadCaseTime)
|
||||
end select
|
||||
enddo
|
||||
|
||||
enddo
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! solve fields
|
||||
stagIter = 0_pInt
|
||||
|
@ -570,43 +566,43 @@ program DAMASK_spectral
|
|||
P_BC = loadCases(currentLoadCase)%P, &
|
||||
F_BC = loadCases(currentLoadCase)%deformation, &
|
||||
rotation_BC = loadCases(currentLoadCase)%rotation)
|
||||
|
||||
|
||||
case (DAMASK_spectral_SolverAL_label)
|
||||
solres(field) = AL_solution (&
|
||||
incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, &
|
||||
P_BC = loadCases(currentLoadCase)%P, &
|
||||
F_BC = loadCases(currentLoadCase)%deformation, &
|
||||
rotation_BC = loadCases(currentLoadCase)%rotation)
|
||||
|
||||
|
||||
case (DAMASK_spectral_SolverPolarisation_label)
|
||||
solres(field) = Polarisation_solution (&
|
||||
incInfo,guess,timeinc,timeIncOld,remainingLoadCaseTime, &
|
||||
P_BC = loadCases(currentLoadCase)%P, &
|
||||
F_BC = loadCases(currentLoadCase)%deformation, &
|
||||
rotation_BC = loadCases(currentLoadCase)%rotation)
|
||||
|
||||
end select
|
||||
|
||||
|
||||
end select
|
||||
|
||||
case(FIELD_THERMAL_ID)
|
||||
solres(field) = spectral_thermal_solution (&
|
||||
guess,timeinc,timeIncOld,remainingLoadCaseTime)
|
||||
|
||||
|
||||
case(FIELD_DAMAGE_ID)
|
||||
solres(field) = spectral_damage_solution (&
|
||||
guess,timeinc,timeIncOld,remainingLoadCaseTime)
|
||||
|
||||
end select
|
||||
if(.not. solres(field)%converged) exit ! no solution found
|
||||
if (.not. solres(field)%converged) exit ! no solution found
|
||||
enddo
|
||||
stagIter = stagIter + 1_pInt
|
||||
stagIterate = stagIter < stagItMax .and. &
|
||||
all(solres(:)%converged) .and. &
|
||||
.not. all(solres(:)%stagConverged)
|
||||
enddo
|
||||
enddo
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! check solution
|
||||
cutBack = .False.
|
||||
! check solution
|
||||
cutBack = .False.
|
||||
if(solres(1)%termIll .or. .not. all(solres(:)%converged .and. solres(:)%stagConverged)) then ! no solution found
|
||||
if (cutBackLevel < maxCutBack) then ! do cut back
|
||||
if (worldrank == 0) write(6,'(/,a)') ' cut back detected'
|
||||
|
@ -619,8 +615,8 @@ program DAMASK_spectral
|
|||
call IO_warning(850_pInt)
|
||||
call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written
|
||||
elseif (continueCalculation == 1_pInt) then
|
||||
guess = .true. ! accept non converged BVP solution
|
||||
else ! default behavior, exit if spectral solver does not converge
|
||||
guess = .true. ! accept non converged BVP solution
|
||||
else ! default behavior, exit if spectral solver does not converge
|
||||
call IO_warning(850_pInt)
|
||||
call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written
|
||||
endif
|
||||
|
@ -632,8 +628,8 @@ program DAMASK_spectral
|
|||
write(statUnit,*) totalIncsCounter, time, cutBackLevel, &
|
||||
solres%converged, solres%iterationsNeeded ! write statistics about accepted solution
|
||||
flush(statUnit)
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
enddo subIncLooping
|
||||
cutBackLevel = max(0_pInt, cutBackLevel - 1_pInt) ! try half number of subincs next inc
|
||||
if(all(solres(:)%converged)) then ! report converged inc
|
||||
|
@ -664,11 +660,11 @@ program DAMASK_spectral
|
|||
enddo
|
||||
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
|
||||
endif
|
||||
if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving
|
||||
mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write?
|
||||
if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving
|
||||
mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write?
|
||||
restartWrite = .true.
|
||||
lastRestartWritten = inc
|
||||
endif
|
||||
endif
|
||||
else forwarding
|
||||
time = time + timeinc
|
||||
guess = .true.
|
||||
|
@ -700,7 +696,7 @@ program DAMASK_spectral
|
|||
call AL_destroy()
|
||||
case (DAMASK_spectral_SolverPolarisation_label)
|
||||
call Polarisation_destroy()
|
||||
end select
|
||||
end select
|
||||
case(FIELD_THERMAL_ID)
|
||||
call spectral_thermal_destroy()
|
||||
case(FIELD_DAMAGE_ID)
|
||||
|
@ -711,6 +707,13 @@ program DAMASK_spectral
|
|||
|
||||
call PETScFinalize(ierr); CHKERRQ(ierr)
|
||||
|
||||
#ifdef _OPENMP
|
||||
call MPI_finalize(i)
|
||||
if (i /= 0_pInt) then
|
||||
call IO_error(error_ID=894, el=i, ext_msg="Finalize()")
|
||||
endif
|
||||
#endif
|
||||
|
||||
if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged
|
||||
call quit(0_pInt) ! no complains ;)
|
||||
|
||||
|
@ -721,7 +724,7 @@ end program DAMASK_spectral
|
|||
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
|
||||
!> @brief quit subroutine to mimic behavior of FEM solvers
|
||||
!> @details exits the Spectral solver and reports time and duration. Exit code 0 signals
|
||||
!> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code
|
||||
!> everything went fine. Exit code 1 signals an error, message according to IO_error. Exit code
|
||||
!> 2 signals no converged solution and increment of last saved restart information is written to
|
||||
!> stderr. Exit code 3 signals no severe problems, but some increments did not converge
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -741,7 +744,7 @@ subroutine quit(stop_id)
|
|||
write(6,'(a,2(i2.2,a),i2.2)') 'Time: ',dateAndTime(5),':',&
|
||||
dateAndTime(6),':',&
|
||||
dateAndTime(7)
|
||||
|
||||
|
||||
if (stop_id == 0_pInt) stop 0 ! normal termination
|
||||
if (stop_id < 0_pInt) then ! terminally ill, restart might help
|
||||
write(0,'(a,i6)') 'restart information available at ', stop_id*(-1_pInt)
|
||||
|
|
|
@ -60,8 +60,6 @@ subroutine FE_init
|
|||
IO_warning, &
|
||||
IO_timeStamp
|
||||
use DAMASK_interface
|
||||
use numerics, only: &
|
||||
worldrank
|
||||
|
||||
implicit none
|
||||
#if defined(Marc4DAMASK) || defined(Abaqus)
|
||||
|
|
|
@ -129,6 +129,7 @@ recursive function IO_read(fileUnit,reset) result(line)
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! normal case
|
||||
if (input == '') return ! regular line
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! recursion case
|
||||
if (stack >= 10_pInt) call IO_error(104_pInt,ext_msg=input) ! recursion limit reached
|
||||
|
@ -141,7 +142,7 @@ recursive function IO_read(fileUnit,reset) result(line)
|
|||
pathOn(stack) = path(1:scan(path,SEP,.true.))//input ! glue include to current file's dir
|
||||
endif
|
||||
|
||||
open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack)) ! open included file
|
||||
open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack),action='read') ! open included file
|
||||
if (myStat /= 0_pInt) call IO_error(100_pInt,el=myStat,ext_msg=pathOn(stack))
|
||||
|
||||
line = IO_read(fileUnit)
|
||||
|
|
113
code/Makefile
113
code/Makefile
|
@ -257,10 +257,10 @@ COMPILE_OPTIONS_gfortran :=-DDAMASKVERSION=\"${DAMASKVERSION}\"\
|
|||
#-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
|
||||
#-Wstrict-overflow:
|
||||
|
||||
DEBUG_OPTIONS_gfortran :=-g\
|
||||
-fbacktrace\
|
||||
-fdump-core\
|
||||
-fcheck=all\
|
||||
DEBUG_OPTIONS_gfortran :=-g \
|
||||
-fbacktrace \
|
||||
-fdump-core \
|
||||
-fcheck=all \
|
||||
-ffpe-trap=invalid,zero,overflow
|
||||
|
||||
###################################################################################################
|
||||
|
@ -300,37 +300,60 @@ COMPILE =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$
|
|||
COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90))
|
||||
###################################################################################################
|
||||
SOURCE_FILES = \
|
||||
source_thermal_dissipation.o source_thermal_externalheat.o \
|
||||
source_damage_isoBrittle.o source_damage_isoDuctile.o source_damage_anisoBrittle.o source_damage_anisoDuctile.o \
|
||||
source_vacancy_phenoplasticity.o source_vacancy_irradiation.o source_vacancy_thermalfluc.o
|
||||
source_thermal_dissipation.o \
|
||||
source_thermal_externalheat.o \
|
||||
source_damage_isoBrittle.o \
|
||||
source_damage_isoDuctile.o \
|
||||
source_damage_anisoBrittle.o \
|
||||
source_damage_anisoDuctile.o \
|
||||
source_vacancy_phenoplasticity.o \
|
||||
source_vacancy_irradiation.o \
|
||||
source_vacancy_thermalfluc.o
|
||||
|
||||
KINEMATICS_FILES = \
|
||||
kinematics_cleavage_opening.o kinematics_slipplane_opening.o \
|
||||
kinematics_cleavage_opening.o \
|
||||
kinematics_slipplane_opening.o \
|
||||
kinematics_thermal_expansion.o \
|
||||
kinematics_vacancy_strain.o kinematics_hydrogen_strain.o
|
||||
kinematics_vacancy_strain.o \
|
||||
kinematics_hydrogen_strain.o
|
||||
|
||||
PLASTIC_FILES = \
|
||||
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \
|
||||
plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \
|
||||
plastic_dislotwin.o \
|
||||
plastic_disloUCLA.o \
|
||||
plastic_isotropic.o \
|
||||
plastic_phenopowerlaw.o \
|
||||
plastic_titanmod.o \
|
||||
plastic_nonlocal.o \
|
||||
plastic_none.o \
|
||||
plastic_phenoplus.o
|
||||
|
||||
THERMAL_FILES = \
|
||||
thermal_isothermal.o thermal_adiabatic.o thermal_conduction.o
|
||||
thermal_isothermal.o \
|
||||
thermal_adiabatic.o \
|
||||
thermal_conduction.o
|
||||
|
||||
DAMAGE_FILES = \
|
||||
damage_none.o damage_local.o damage_nonlocal.o
|
||||
damage_none.o \
|
||||
damage_local.o \
|
||||
damage_nonlocal.o
|
||||
|
||||
VACANCYFLUX_FILES = \
|
||||
vacancyflux_isoconc.o vacancyflux_isochempot.o vacancyflux_cahnhilliard.o
|
||||
vacancyflux_isoconc.o \
|
||||
vacancyflux_isochempot.o \
|
||||
vacancyflux_cahnhilliard.o
|
||||
|
||||
POROSITY_FILES = \
|
||||
porosity_none.o porosity_phasefield.o
|
||||
porosity_none.o \
|
||||
porosity_phasefield.o
|
||||
|
||||
HYDROGENFLUX_FILES = \
|
||||
hydrogenflux_isoconc.o hydrogenflux_cahnhilliard.o
|
||||
hydrogenflux_isoconc.o \
|
||||
hydrogenflux_cahnhilliard.o
|
||||
|
||||
HOMOGENIZATION_FILES = \
|
||||
homogenization_RGC.o homogenization_isostrain.o homogenization_none.o
|
||||
homogenization_RGC.o \
|
||||
homogenization_isostrain.o \
|
||||
homogenization_none.o
|
||||
|
||||
#####################
|
||||
# Spectral Solver
|
||||
|
@ -351,11 +374,28 @@ DAMASK_spectral.o: INTERFACENAME := spectral_interface.f90
|
|||
SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \
|
||||
spectral_thermal.o spectral_damage.o
|
||||
|
||||
SPECTRAL_FILES = C_routines.o system_routines.o prec.o DAMASK_interface.o IO.o numerics.o debug.o math.o \
|
||||
FEsolving.o mesh.o material.o lattice.o \
|
||||
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \
|
||||
SPECTRAL_FILES = C_routines.o \
|
||||
system_routines.o \
|
||||
prec.o \
|
||||
DAMASK_interface.o \
|
||||
IO.o \
|
||||
numerics.o \
|
||||
debug.o \
|
||||
math.o \
|
||||
FEsolving.o \
|
||||
mesh.o \
|
||||
material.o \
|
||||
lattice.o \
|
||||
$(SOURCE_FILES) \
|
||||
$(KINEMATICS_FILES) \
|
||||
$(PLASTIC_FILES) \
|
||||
constitutive.o \
|
||||
crystallite.o \
|
||||
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \
|
||||
$(THERMAL_FILES) \
|
||||
$(DAMAGE_FILES) \
|
||||
$(VACANCYFLUX_FILES) \
|
||||
$(HYDROGENFLUX_FILES) \
|
||||
$(POROSITY_FILES) \
|
||||
$(HOMOGENIZATION_FILES) homogenization.o \
|
||||
CPFEM2.o \
|
||||
spectral_utilities.o \
|
||||
|
@ -401,14 +441,31 @@ DAMASK_FEM.exe: INCLUDE_DIRS += -I./
|
|||
|
||||
FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o
|
||||
|
||||
FEM_FILES = prec.o DAMASK_interface.o FEZoo.o IO.o numerics.o debug.o math.o \
|
||||
FEsolving.o mesh.o material.o lattice.o \
|
||||
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \
|
||||
FEM_FILES = prec.o \
|
||||
DAMASK_interface.o \
|
||||
FEZoo.o \
|
||||
IO.o \
|
||||
numerics.o \
|
||||
debug.o \
|
||||
math.o \
|
||||
FEsolving.o \
|
||||
mesh.o \
|
||||
material.o \
|
||||
lattice.o \
|
||||
$(SOURCE_FILES) \
|
||||
$(KINEMATICS_FILES) \
|
||||
$(PLASTIC_FILES) \
|
||||
constitutive.o \
|
||||
crystallite.o \
|
||||
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \
|
||||
$(THERMAL_FILES) \
|
||||
$(DAMAGE_FILES) \
|
||||
$(VACANCYFLUX_FILES) \
|
||||
$(HYDROGENFLUX_FILES) \
|
||||
$(POROSITY_FILES) \
|
||||
$(HOMOGENIZATION_FILES) homogenization.o \
|
||||
CPFEM.o \
|
||||
FEM_utilities.o $(FEM_SOLVER_FILES)
|
||||
FEM_utilities.o \
|
||||
$(FEM_SOLVER_FILES)
|
||||
|
||||
DAMASK_FEM.exe: DAMASK_FEM_driver.o
|
||||
$(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \
|
||||
|
@ -658,8 +715,8 @@ tidy:
|
|||
@rm -rf *.inst.f90 # for instrumentation
|
||||
@rm -rf *.pomp.f90 # for instrumentation
|
||||
@rm -rf *.pp.f90 # for instrumentation
|
||||
@rm -rf *.pdb # for instrumnentation
|
||||
@rm -rf *.opari.inc # for instrumnentation
|
||||
@rm -rf *.pdb # for instrumentation
|
||||
@rm -rf *.opari.inc # for instrumentation
|
||||
|
||||
.PHONY: cleanDAMASK
|
||||
cleanDAMASK:
|
||||
|
|
|
@ -104,7 +104,6 @@ contains
|
|||
subroutine debug_init
|
||||
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
||||
use numerics, only: &
|
||||
worldrank, &
|
||||
nStress, &
|
||||
nState, &
|
||||
nCryst, &
|
||||
|
@ -130,47 +129,27 @@ subroutine debug_init
|
|||
integer(pInt), allocatable, dimension(:) :: chunkPos
|
||||
character(len=65536) :: tag, line
|
||||
|
||||
mainProcess: if (worldrank == 0) then
|
||||
write(6,'(/,a)') ' <<<+- debug init -+>>>'
|
||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||
write(6,'(/,a)') ' <<<+- debug init -+>>>'
|
||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||
#include "compilation_info.f90"
|
||||
endif mainProcess
|
||||
|
||||
if (allocated(debug_StressLoopLpDistribution)) &
|
||||
deallocate(debug_StressLoopLpDistribution)
|
||||
allocate(debug_StressLoopLpDistribution(nStress+1,2))
|
||||
debug_StressLoopLpDistribution = 0_pInt
|
||||
if (allocated(debug_StressLoopLiDistribution)) &
|
||||
deallocate(debug_StressLoopLiDistribution)
|
||||
allocate(debug_StressLoopLiDistribution(nStress+1,2))
|
||||
debug_StressLoopLiDistribution = 0_pInt
|
||||
if (allocated(debug_StateLoopDistribution)) &
|
||||
deallocate(debug_StateLoopDistribution)
|
||||
allocate(debug_StateLoopDistribution(nState+1,2))
|
||||
debug_StateLoopDistribution = 0_pInt
|
||||
if (allocated(debug_CrystalliteLoopDistribution)) &
|
||||
deallocate(debug_CrystalliteLoopDistribution)
|
||||
allocate(debug_CrystalliteLoopDistribution(nCryst+1))
|
||||
debug_CrystalliteLoopDistribution = 0_pInt
|
||||
if (allocated(debug_MaterialpointStateLoopDistribution)) &
|
||||
deallocate(debug_MaterialpointStateLoopDistribution)
|
||||
allocate(debug_MaterialpointStateLoopDistribution(nMPstate))
|
||||
debug_MaterialpointStateLoopDistribution = 0_pInt
|
||||
if (allocated(debug_MaterialpointLoopDistribution)) &
|
||||
deallocate(debug_MaterialpointLoopDistribution)
|
||||
allocate(debug_MaterialpointLoopDistribution(nHomog+1))
|
||||
debug_MaterialpointLoopDistribution = 0_pInt
|
||||
allocate(debug_StressLoopLpDistribution(nStress+1,2), source=0_pInt)
|
||||
allocate(debug_StressLoopLiDistribution(nStress+1,2), source=0_pInt)
|
||||
allocate(debug_StateLoopDistribution(nState+1,2), source=0_pInt)
|
||||
allocate(debug_CrystalliteLoopDistribution(nCryst+1), source=0_pInt)
|
||||
allocate(debug_MaterialpointStateLoopDistribution(nMPstate), source=0_pInt)
|
||||
allocate(debug_MaterialpointLoopDistribution(nHomog+1), source=0_pInt)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! try to open the config file
|
||||
|
||||
line = ''
|
||||
fileExists: if(IO_open_file_stat(FILEUNIT,debug_configFile)) then
|
||||
do while (trim(line) /= IO_EOF) ! read thru sections of phase part
|
||||
do while (trim(line) /= IO_EOF) ! read thru sections of phase part
|
||||
line = IO_read(FILEUNIT)
|
||||
if (IO_isBlank(line)) cycle ! skip empty lines
|
||||
chunkPos = IO_stringPos(line)
|
||||
tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key
|
||||
tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key
|
||||
select case(tag)
|
||||
case ('element','e','el')
|
||||
debug_e = IO_intValue(line,chunkPos,2_pInt)
|
||||
|
|
|
@ -158,7 +158,8 @@ module math
|
|||
math_areaTriangle, &
|
||||
math_rotate_forward33, &
|
||||
math_rotate_backward33, &
|
||||
math_rotate_forward3333
|
||||
math_rotate_forward3333, &
|
||||
math_limit
|
||||
private :: &
|
||||
math_partition, &
|
||||
halton, &
|
||||
|
@ -178,7 +179,6 @@ subroutine math_init
|
|||
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
||||
use prec, only: tol_math_check
|
||||
use numerics, only: &
|
||||
worldrank, &
|
||||
fixedSeed
|
||||
use IO, only: IO_error, IO_timeStamp
|
||||
|
||||
|
|
|
@ -481,6 +481,7 @@ subroutine mesh_init(ip,el)
|
|||
#endif
|
||||
#ifdef Spectral
|
||||
IO_open_file, &
|
||||
IO_error, &
|
||||
#else
|
||||
IO_open_InputFile, &
|
||||
#endif
|
||||
|
@ -507,7 +508,8 @@ subroutine mesh_init(ip,el)
|
|||
|
||||
implicit none
|
||||
#ifdef Spectral
|
||||
integer(C_INTPTR_T) :: gridMPI(3), alloc_local, local_K, local_K_offset
|
||||
integer(C_INTPTR_T) :: devNull, local_K, local_K_offset
|
||||
integer :: ierr, worldsize
|
||||
#endif
|
||||
integer(pInt), parameter :: FILEUNIT = 222_pInt
|
||||
integer(pInt), intent(in) :: el, ip
|
||||
|
@ -547,10 +549,13 @@ subroutine mesh_init(ip,el)
|
|||
call IO_open_file(FILEUNIT,geometryFile) ! parse info from geometry file...
|
||||
if (myDebug) write(6,'(a)') ' Opened geometry file'; flush(6)
|
||||
grid = mesh_spectral_getGrid(fileUnit)
|
||||
call MPI_comm_size(MPI_COMM_WORLD, worldsize, ierr)
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_comm_size')
|
||||
if(worldsize>grid(3)) call IO_error(894_pInt, ext_msg='number of processes exceeds grid(3)')
|
||||
|
||||
geomSize = mesh_spectral_getSize(fileUnit)
|
||||
gridMPI = int(grid,C_INTPTR_T)
|
||||
alloc_local = fftw_mpi_local_size_3d(gridMPI(3), gridMPI(2), gridMPI(1)/2 +1, &
|
||||
MPI_COMM_WORLD, local_K, local_K_offset)
|
||||
devNull = fftw_mpi_local_size_3d(int(grid(3),C_INTPTR_T),int(grid(2),C_INTPTR_T),&
|
||||
int(grid(1),C_INTPTR_T)/2+1,MPI_COMM_WORLD,local_K,local_K_offset)
|
||||
grid3 = int(local_K,pInt)
|
||||
grid3Offset = int(local_K_offset,pInt)
|
||||
size3 = geomSize(3)*real(grid3,pReal) /real(grid(3),pReal)
|
||||
|
|
|
@ -1115,7 +1115,7 @@ allocate(nonSchmidProjection(3,3,4,maxTotalNslip,maxNinstances),
|
|||
|
||||
initializeInstances: do phase = 1_pInt, size(phase_plasticity)
|
||||
NofMyPhase=count(material_phase==phase)
|
||||
myPhase2: if (phase_plasticity(phase) == PLASTICITY_NONLOCAL_ID .and. NofMyPhase/=0) then
|
||||
myPhase2: if (phase_plasticity(phase) == PLASTICITY_NONLOCAL_ID) then
|
||||
instance = phase_plasticityInstance(phase)
|
||||
!*** Inverse lookup of my slip system family and the slip system in lattice
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ module prec
|
|||
nTwin = 0_pInt, &
|
||||
nTrans = 0_pInt
|
||||
logical :: &
|
||||
nonlocal = .false. !< absolute tolerance for state integration
|
||||
nonlocal = .false.
|
||||
real(pReal), pointer, dimension(:,:), contiguous :: &
|
||||
slipRate, & !< slip rate
|
||||
accumulatedSlip !< accumulated plastic slip
|
||||
|
|
|
@ -57,7 +57,8 @@ subroutine DAMASK_interface_init()
|
|||
integer :: &
|
||||
i, &
|
||||
threadLevel, &
|
||||
worldrank = 0
|
||||
worldrank = 0, &
|
||||
worldsize = 0
|
||||
integer, allocatable, dimension(:) :: &
|
||||
chunkPos
|
||||
integer, dimension(8) :: &
|
||||
|
@ -66,6 +67,7 @@ subroutine DAMASK_interface_init()
|
|||
external :: &
|
||||
quit,&
|
||||
MPI_Comm_rank,&
|
||||
MPI_Comm_size,&
|
||||
PETScInitialize, &
|
||||
MPI_Init_Thread, &
|
||||
MPI_abort
|
||||
|
@ -77,17 +79,17 @@ subroutine DAMASK_interface_init()
|
|||
#ifdef _OPENMP
|
||||
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,ierr);CHKERRQ(ierr) ! in case of OpenMP, don't rely on PETScInitialize doing MPI init
|
||||
if (threadLevel<MPI_THREAD_FUNNELED) then
|
||||
write(6,'(a)') 'MPI library does not support OpenMP'
|
||||
write(6,'(a)') ' MPI library does not support OpenMP'
|
||||
call quit(1_pInt)
|
||||
endif
|
||||
#endif
|
||||
call PetscInitialize(PETSC_NULL_CHARACTER,ierr) ! according to PETSc manual, that should be the first line in the code
|
||||
CHKERRQ(ierr) ! this is a macro definition, it is case sensitive
|
||||
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,ierr);CHKERRQ(ierr)
|
||||
|
||||
call MPI_Comm_size(MPI_COMM_WORLD, worldsize, ierr);CHKERRQ(ierr)
|
||||
mainProcess: if (worldrank == 0) then
|
||||
if (output_unit /= 6) then
|
||||
write(output_unit,'(a)') 'STDOUT != 6'
|
||||
write(output_unit,'(a)') ' STDOUT != 6'
|
||||
call quit(1_pInt)
|
||||
endif
|
||||
else mainProcess
|
||||
|
@ -104,6 +106,7 @@ subroutine DAMASK_interface_init()
|
|||
write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':',&
|
||||
dateAndTime(6),':',&
|
||||
dateAndTime(7)
|
||||
write(6,'(/,a,i4.1)') ' MPI processes: ',worldsize
|
||||
write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>'
|
||||
#include "compilation_info.f90"
|
||||
|
||||
|
|
|
@ -245,7 +245,7 @@ type(tSolutionState) function spectral_thermal_solution(guess,timeinc,timeinc_ol
|
|||
if (worldrank == 0) then
|
||||
if (spectral_thermal_solution%converged) &
|
||||
write(6,'(/,a)') ' ... thermal conduction converged ..................................'
|
||||
write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature = ',&
|
||||
write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',&
|
||||
minTemperature, maxTemperature, stagNorm
|
||||
write(6,'(/,a)') ' ==========================================================================='
|
||||
flush(6)
|
||||
|
|
|
@ -172,8 +172,7 @@ subroutine utilities_init()
|
|||
memory_efficient, &
|
||||
petsc_defaultOptions, &
|
||||
petsc_options, &
|
||||
divergence_correction, &
|
||||
worldrank
|
||||
divergence_correction
|
||||
use debug, only: &
|
||||
debug_level, &
|
||||
debug_SPECTRAL, &
|
||||
|
@ -212,11 +211,9 @@ subroutine utilities_init()
|
|||
vecSize = 3_C_INTPTR_T, &
|
||||
tensorSize = 9_C_INTPTR_T
|
||||
|
||||
mainProcess: if (worldrank == 0) then
|
||||
write(6,'(/,a)') ' <<<+- spectral_utilities init -+>>>'
|
||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||
write(6,'(/,a)') ' <<<+- spectral_utilities init -+>>>'
|
||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||
#include "compilation_info.f90"
|
||||
endif mainProcess
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! set debugging parameters
|
||||
|
@ -224,11 +221,11 @@ subroutine utilities_init()
|
|||
debugRotation = iand(debug_level(debug_SPECTRAL),debug_SPECTRALROTATION) /= 0
|
||||
debugPETSc = iand(debug_level(debug_SPECTRAL),debug_SPECTRALPETSC) /= 0
|
||||
|
||||
if(debugPETSc .and. worldrank == 0_pInt) write(6,'(3(/,a),/)') &
|
||||
if(debugPETSc) write(6,'(3(/,a),/)') &
|
||||
' Initializing PETSc with debug options: ', &
|
||||
trim(PETScDebug), &
|
||||
' add more using the PETSc_Options keyword in numerics.config '
|
||||
flush(6)
|
||||
' add more using the PETSc_Options keyword in numerics.config '; flush(6)
|
||||
|
||||
call PetscOptionsClear(ierr); CHKERRQ(ierr)
|
||||
if(debugPETSc) call PetscOptionsInsertString(trim(PETSCDEBUG),ierr); CHKERRQ(ierr)
|
||||
call PetscOptionsInsertString(trim(petsc_defaultOptions),ierr); CHKERRQ(ierr)
|
||||
|
@ -237,10 +234,8 @@ subroutine utilities_init()
|
|||
grid1Red = grid(1)/2_pInt + 1_pInt
|
||||
wgt = 1.0/real(product(grid),pReal)
|
||||
|
||||
if (worldrank == 0) then
|
||||
write(6,'(a,3(i12 ))') ' grid a b c: ', grid
|
||||
write(6,'(a,3(es12.5))') ' size x y z: ', geomSize
|
||||
endif
|
||||
write(6,'(a,3(i12 ))') ' grid a b c: ', grid
|
||||
write(6,'(a,3(es12.5))') ' size x y z: ', geomSize
|
||||
|
||||
select case (spectral_derivative)
|
||||
case ('continuous') ! default, no weighting
|
||||
|
@ -342,8 +337,7 @@ subroutine utilities_init()
|
|||
if (pReal /= C_DOUBLE .or. pInt /= C_INT) call IO_error(0_pInt,ext_msg='Fortran to C') ! check for correct precision in C
|
||||
call fftw_set_timelimit(fftw_timelimit) ! set timelimit for plan creation
|
||||
|
||||
if (debugGeneral .and. worldrank == 0_pInt) write(6,'(/,a)') ' FFTW initialized'
|
||||
flush(6)
|
||||
if (debugGeneral) write(6,'(/,a)') ' FFTW initialized'; flush(6)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! calculation of discrete angular frequencies, ordered as in FFTW (wrap around)
|
||||
|
@ -527,8 +521,6 @@ subroutine utilities_fourierGammaConvolution(fieldAim)
|
|||
use math, only: &
|
||||
math_det33, &
|
||||
math_invert
|
||||
use numerics, only: &
|
||||
worldrank
|
||||
use mesh, only: &
|
||||
grid3, &
|
||||
grid, &
|
||||
|
@ -545,10 +537,8 @@ subroutine utilities_fourierGammaConvolution(fieldAim)
|
|||
logical :: err
|
||||
|
||||
|
||||
if (worldrank == 0_pInt) then
|
||||
write(6,'(/,a)') ' ... doing gamma convolution ...............................................'
|
||||
flush(6)
|
||||
endif
|
||||
write(6,'(/,a)') ' ... doing gamma convolution ...............................................'
|
||||
flush(6)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! do the actual spectral method calculation (mechanical equilibrium)
|
||||
|
@ -624,8 +614,6 @@ end subroutine utilities_fourierGreenConvolution
|
|||
real(pReal) function utilities_divergenceRMS()
|
||||
use IO, only: &
|
||||
IO_error
|
||||
use numerics, only: &
|
||||
worldrank
|
||||
use mesh, only: &
|
||||
geomSize, &
|
||||
grid, &
|
||||
|
@ -638,10 +626,9 @@ real(pReal) function utilities_divergenceRMS()
|
|||
external :: &
|
||||
MPI_Allreduce
|
||||
|
||||
if (worldrank == 0_pInt) then
|
||||
write(6,'(/,a)') ' ... calculating divergence ................................................'
|
||||
flush(6)
|
||||
endif
|
||||
write(6,'(/,a)') ' ... calculating divergence ................................................'
|
||||
flush(6)
|
||||
|
||||
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -680,8 +667,6 @@ end function utilities_divergenceRMS
|
|||
real(pReal) function utilities_curlRMS()
|
||||
use IO, only: &
|
||||
IO_error
|
||||
use numerics, only: &
|
||||
worldrank
|
||||
use mesh, only: &
|
||||
geomSize, &
|
||||
grid, &
|
||||
|
@ -693,13 +678,11 @@ real(pReal) function utilities_curlRMS()
|
|||
complex(pReal), dimension(3) :: rescaledGeom
|
||||
|
||||
external :: &
|
||||
MPI_Reduce, &
|
||||
MPI_Allreduce
|
||||
|
||||
if (worldrank == 0_pInt) then
|
||||
write(6,'(/,a)') ' ... calculating curl ......................................................'
|
||||
flush(6)
|
||||
endif
|
||||
write(6,'(/,a)') ' ... calculating curl ......................................................'
|
||||
flush(6)
|
||||
|
||||
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -757,8 +740,6 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
|||
prec_isNaN
|
||||
use IO, only: &
|
||||
IO_error
|
||||
use numerics, only: &
|
||||
worldrank
|
||||
use math, only: &
|
||||
math_Plain3333to99, &
|
||||
math_plain99to3333, &
|
||||
|
@ -790,7 +771,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
|||
allocate (sTimesC(size_reduced,size_reduced), source =0.0_pReal)
|
||||
temp99_Real = math_Plain3333to99(math_rotate_forward3333(C,rot_BC))
|
||||
|
||||
if(debugGeneral .and. worldrank == 0_pInt) then
|
||||
if(debugGeneral) then
|
||||
write(6,'(/,a)') ' ... updating masked compliance ............................................'
|
||||
write(6,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',&
|
||||
transpose(temp99_Real)/1.e9_pReal
|
||||
|
@ -831,7 +812,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
|||
if(m/=n .and. abs(sTimesC(m,n)) > (0.0_pReal + 10.0e-12_pReal)) errmatinv = .true. ! off diagonal elements of S*C should be 0
|
||||
enddo
|
||||
enddo
|
||||
if((debugGeneral .or. errmatinv) .and. (worldrank == 0_pInt)) then ! report
|
||||
if(debugGeneral .or. errmatinv) then
|
||||
write(formatString, '(I16.16)') size_reduced
|
||||
formatString = '(/,a,/,'//trim(formatString)//'('//trim(formatString)//'(2x,es9.2,1x)/))'
|
||||
write(6,trim(formatString),advance='no') ' C * S (load) ', &
|
||||
|
@ -845,7 +826,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
|||
else
|
||||
temp99_real = 0.0_pReal
|
||||
endif
|
||||
if(debugGeneral .and. worldrank == 0_pInt) & ! report
|
||||
if(debugGeneral) &
|
||||
write(6,'(/,a,/,9(9(2x,f12.7,1x)/),/)',advance='no') ' Masked Compliance (load) * GPa =', &
|
||||
transpose(temp99_Real*1.e9_pReal)
|
||||
flush(6)
|
||||
|
@ -938,15 +919,11 @@ end subroutine utilities_fourierTensorDivergence
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
||||
P,C_volAvg,C_minmaxAvg,P_av,forwardData,rotation_BC)
|
||||
use prec, only: &
|
||||
dNeq
|
||||
use IO, only: &
|
||||
IO_error
|
||||
use debug, only: &
|
||||
debug_reset, &
|
||||
debug_info
|
||||
use numerics, only: &
|
||||
worldrank
|
||||
use math, only: &
|
||||
math_transpose33, &
|
||||
math_rotate_forward33, &
|
||||
|
@ -974,7 +951,7 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
|||
|
||||
real(pReal),intent(out), dimension(3,3,3,3) :: C_volAvg, C_minmaxAvg !< average stiffness
|
||||
real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress
|
||||
real(pReal),intent(out), dimension(3,3,grid(1),grid(2),grid3) :: P !< PK stress
|
||||
real(pReal),intent(out), dimension(3,3,grid(1),grid(2),grid3) :: P !< PK stress
|
||||
|
||||
logical :: &
|
||||
age
|
||||
|
@ -984,26 +961,18 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
|||
real(pReal), dimension(3,3,3,3) :: max_dPdF, min_dPdF
|
||||
real(pReal) :: max_dPdF_norm, min_dPdF_norm, defgradDetMin, defgradDetMax, defgradDet
|
||||
|
||||
external :: &
|
||||
MPI_Reduce, &
|
||||
MPI_Allreduce
|
||||
|
||||
if (worldrank == 0_pInt) then
|
||||
write(6,'(/,a)') ' ... evaluating constitutive response ......................................'
|
||||
flush(6)
|
||||
endif
|
||||
write(6,'(/,a)') ' ... evaluating constitutive response ......................................'
|
||||
flush(6)
|
||||
age = .False.
|
||||
|
||||
if (forwardData) then ! aging results
|
||||
age = .True.
|
||||
materialpoint_F0 = reshape(F_lastInc, [3,3,1,product(grid(1:2))*grid3])
|
||||
endif
|
||||
if (cutBack) then ! restore saved variables
|
||||
age = .False.
|
||||
endif
|
||||
if (cutBack) age = .False. ! restore saved variables
|
||||
|
||||
materialpoint_F = reshape(F,[3,3,1,product(grid(1:2))*grid3])
|
||||
call debug_reset()
|
||||
call debug_reset() ! this has no effect on rank >0
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! calculate bounds of det(F) and report
|
||||
|
@ -1015,15 +984,10 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
|||
defgradDetMax = max(defgradDetMax,defgradDet)
|
||||
defgradDetMin = min(defgradDetMin,defgradDet)
|
||||
end do
|
||||
call MPI_reduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr)
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max')
|
||||
call MPI_reduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr)
|
||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min')
|
||||
if (worldrank == 0_pInt) then
|
||||
write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax
|
||||
write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin
|
||||
flush(6)
|
||||
endif
|
||||
|
||||
write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax
|
||||
write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin
|
||||
flush(6)
|
||||
endif
|
||||
|
||||
call CPFEM_general(age,timeinc)
|
||||
|
@ -1053,7 +1017,7 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
|||
|
||||
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
||||
|
||||
call debug_info()
|
||||
call debug_info() ! this has no effect on rank >0
|
||||
|
||||
restartWrite = .false. ! reset restartWrite status
|
||||
cutBack = .false. ! reset cutBack status
|
||||
|
@ -1061,15 +1025,13 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
|||
P = reshape(materialpoint_P, [3,3,grid(1),grid(2),grid3])
|
||||
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt ! average of P
|
||||
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
||||
if (debugRotation .and. worldrank == 0_pInt) &
|
||||
if (debugRotation) &
|
||||
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',&
|
||||
math_transpose33(P_av)*1.e-6_pReal
|
||||
P_av = math_rotate_forward33(P_av,rotation_BC)
|
||||
if (worldrank == 0_pInt) then
|
||||
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',&
|
||||
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',&
|
||||
math_transpose33(P_av)*1.e-6_pReal
|
||||
flush(6)
|
||||
endif
|
||||
flush(6)
|
||||
|
||||
end subroutine utilities_constitutiveResponse
|
||||
|
||||
|
|
|
@ -1,35 +1,25 @@
|
|||
# Tasan et.al. 2015 Acta Materalia
|
||||
# Tasan et.al. 2015 International Journal of Plasticity
|
||||
# Diehl et.al. 2015 Meccanica
|
||||
[BCC_Ferrite]
|
||||
[BCC-Ferrite]
|
||||
|
||||
elasticity hooke
|
||||
plasticity phenopowerlaw
|
||||
|
||||
lattice_structure bcc
|
||||
Nslip 12 12 # per family
|
||||
Ntwin 0 # per family
|
||||
Nslip 12 12 # per family
|
||||
Ntwin 0 # per family
|
||||
c11 233.3e9
|
||||
c12 135.5e9
|
||||
c44 118.0e9
|
||||
gdot0_slip 0.001
|
||||
n_slip 20
|
||||
tau0_slip 95.e6 97.e6 0 0 # per family, optimization long simplex 109
|
||||
tausat_slip 222.e6 412.7e6 0 0 # per family, optimization long simplex 109
|
||||
gdot0_twin 0.001
|
||||
n_twin 20
|
||||
tau0_twin 31.0e6 # per family
|
||||
s_pr 0 # push-up factor for slip saturation due to twinning
|
||||
twin_b 0
|
||||
twin_c 0
|
||||
twin_d 0
|
||||
twin_e 0
|
||||
h0_slipslip 1000.0e6 # opti
|
||||
h0_twinslip 0
|
||||
h0_twintwin 0
|
||||
tau0_slip 95.e6 97.e6 # per family, optimization long simplex 109
|
||||
tausat_slip 222.e6 412.7e6 # per family, optimization long simplex 109
|
||||
h0_slipslip 1000.0e6
|
||||
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
||||
interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||
interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||
interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||
w0_slip 2.0 # opti
|
||||
atol_resistance 1
|
||||
w0_slip 2.0
|
||||
(output) totalshear
|
||||
|
|
|
@ -1,34 +1,25 @@
|
|||
# Tasan et.al. 2015 Acta Materalia
|
||||
# Tasan et.al. 2015 International Journal of Plasticity
|
||||
# Diehl et.al. 2015 Meccanica
|
||||
[BCC_Martensite]
|
||||
plasticity phenopowerlaw
|
||||
[BCC-Martensite]
|
||||
|
||||
elasticity hooke
|
||||
plasticity phenopowerlaw
|
||||
|
||||
lattice_structure bcc
|
||||
Nslip 12 12 # per family
|
||||
Ntwin 0 # per family
|
||||
Nslip 12 12 # per family
|
||||
Ntwin 0 # per family
|
||||
c11 417.4e9
|
||||
c12 242.4e9
|
||||
c44 211.1e9
|
||||
gdot0_slip 0.001
|
||||
n_slip 20
|
||||
tau0_slip 405.8e6 456.7e6 0 0 # per family
|
||||
tausat_slip 872.9e6 971.2e6 0 0 # per family
|
||||
gdot0_twin 0.001
|
||||
n_twin 20
|
||||
tau0_twin 31.0e6 # per family
|
||||
s_pr 0 # push-up factor for slip saturation due to twinning
|
||||
twin_b 0
|
||||
twin_c 0
|
||||
twin_d 0
|
||||
twin_e 0
|
||||
tau0_slip 405.8e6 456.7e6 # per family
|
||||
tausat_slip 872.9e6 971.2e6 # per family
|
||||
h0_slipslip 563.0e9
|
||||
h0_twinslip 0
|
||||
h0_twintwin 0
|
||||
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
||||
interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||
interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||
interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||
w0_slip 2.0
|
||||
atol_resistance 1
|
||||
(output) totalshear
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
include ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/variables
|
||||
include ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/rules
|
||||
|
||||
|
||||
run16x16x16:
|
||||
-@${MPIEXEC} -n 2 DAMASK_spectral -l tensionX.load -g 20grains16x16x16.geom
|
||||
|
||||
run32x32x32:
|
||||
-@${MPIEXEC} -n 4 DAMASK_spectral -l tensionX.load -g 20grains32x32x32.geom
|
||||
|
||||
run64x64x64:
|
||||
-@${MPIEXEC} -n 8 DAMASK_spectral -l tensionX.load -g 20grains64x64x64.geom
|
|
@ -11,7 +11,15 @@ bin_link = { \
|
|||
],
|
||||
}
|
||||
|
||||
MarcReleases =[2011,2012,2013,2013.1,2014,2014.2,2015]
|
||||
MarcReleases =[ \
|
||||
'2011',
|
||||
'2012',
|
||||
'2013',
|
||||
'2013.1',
|
||||
'2014',
|
||||
'2014.2',
|
||||
'2015',
|
||||
]
|
||||
|
||||
damaskEnv = damask.Environment()
|
||||
baseDir = damaskEnv.relPath('code/')
|
||||
|
@ -20,27 +28,41 @@ binDir = damaskEnv.options['DAMASK_BIN']
|
|||
if not os.path.isdir(binDir):
|
||||
os.mkdir(binDir)
|
||||
|
||||
for dir in bin_link:
|
||||
for file in bin_link[dir]:
|
||||
src = os.path.abspath(os.path.join(baseDir,dir,file))
|
||||
if os.path.exists(src):
|
||||
sym_link = os.path.abspath(os.path.join(binDir,\
|
||||
{True: dir,
|
||||
False:os.path.splitext(file)[0]}[file == '']))
|
||||
if os.path.lexists(sym_link): os.remove(sym_link)
|
||||
os.symlink(src,sym_link)
|
||||
sys.stdout.write(sym_link+' -> '+src+'\n')
|
||||
sys.stdout.write('\nsymbolic linking...\n')
|
||||
|
||||
for subDir in bin_link:
|
||||
theDir = os.path.abspath(os.path.join(baseDir,subDir))
|
||||
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
|
||||
|
||||
for theFile in bin_link[subDir]:
|
||||
theName,theExt = os.path.splitext(theFile)
|
||||
src = os.path.abspath(os.path.join(theDir,theFile))
|
||||
|
||||
if os.path.exists(src):
|
||||
sym_link = os.path.abspath(os.path.join(binDir,subDir if theFile == '' else theName))
|
||||
|
||||
if os.path.lexists(sym_link):
|
||||
os.remove(sym_link)
|
||||
output = theName+damask.util.deemph(theExt)
|
||||
else:
|
||||
output = damask.util.emph(theName)+damask.util.deemph(theExt)
|
||||
|
||||
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
|
||||
os.symlink(src,sym_link)
|
||||
|
||||
|
||||
sys.stdout.write('\nMSC.Marc versioning...\n\n')
|
||||
theMaster = 'DAMASK_marc.f90'
|
||||
|
||||
for version in MarcReleases:
|
||||
src = os.path.abspath(os.path.join(baseDir,'DAMASK_marc.f90'))
|
||||
src = os.path.abspath(os.path.join(baseDir,theMaster))
|
||||
if os.path.exists(src):
|
||||
sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc'+str(version)+'.f90'))
|
||||
sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc{}.f90'.format(version)))
|
||||
if os.path.lexists(sym_link):
|
||||
os.remove(sym_link)
|
||||
sys.stdout.write(sym_link)
|
||||
output = version
|
||||
else:
|
||||
sys.stdout.write(damask.util.emph(sym_link))
|
||||
output = damask.util.emph(version)
|
||||
|
||||
os.symlink(src,sym_link)
|
||||
sys.stdout.write(' -> '+src+'\n')
|
||||
sys.stdout.write(' '+output+'\n')
|
||||
os.symlink(theMaster,sym_link)
|
||||
|
|
|
@ -13,23 +13,47 @@ if not os.path.isdir(binDir):
|
|||
os.mkdir(binDir)
|
||||
|
||||
#define ToDo list
|
||||
processing_subDirs = ['pre','post','misc',]
|
||||
processing_extensions = ['.py','.sh',]
|
||||
|
||||
processing_subDirs = ['pre',
|
||||
'post',
|
||||
'misc',
|
||||
]
|
||||
processing_extensions = ['.py',
|
||||
'.sh',
|
||||
]
|
||||
|
||||
sys.stdout.write('\nsymbolic linking...\n')
|
||||
|
||||
for subDir in processing_subDirs:
|
||||
theDir = os.path.abspath(os.path.join(baseDir,subDir))
|
||||
|
||||
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
|
||||
|
||||
for theFile in os.listdir(theDir):
|
||||
if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions
|
||||
theName,theExt = os.path.splitext(theFile)
|
||||
if theExt in processing_extensions: # only consider files with proper extensions
|
||||
|
||||
src = os.path.abspath(os.path.join(theDir,theFile))
|
||||
sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0]))
|
||||
sym_link = os.path.abspath(os.path.join(binDir,theName))
|
||||
|
||||
if os.path.lexists(sym_link):
|
||||
os.remove(sym_link)
|
||||
sys.stdout.write(sym_link)
|
||||
output = theName+damask.util.deemph(theExt)
|
||||
else:
|
||||
sys.stdout.write(damask.util.emph(sym_link))
|
||||
output = damask.util.emph(theName)+damask.util.deemph(theExt)
|
||||
|
||||
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
|
||||
os.symlink(src,sym_link)
|
||||
sys.stdout.write(' -> '+src+'\n')
|
||||
|
||||
|
||||
sys.stdout.write('\npruning broken links...\n')
|
||||
|
||||
brokenLinks = 0
|
||||
|
||||
for filename in os.listdir(binDir):
|
||||
path = os.path.join(binDir,filename)
|
||||
if os.path.islink(path) and not os.path.exists(path):
|
||||
sys.stdout.write(' '+damask.util.delete(path)+'\n')
|
||||
os.remove(path)
|
||||
brokenLinks += 1
|
||||
|
||||
sys.stdout.write(('none.' if brokenLinks == 0 else '')+'\n')
|
||||
|
|
|
@ -3,6 +3,14 @@
|
|||
import os,sys
|
||||
import numpy as np
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# python 3 has no unicode object, this ensures that the code works on Python 2&3
|
||||
try:
|
||||
test=isinstance('test', unicode)
|
||||
except(NameError):
|
||||
unicode=str
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
class ASCIItable():
|
||||
"""Read and write to ASCII tables"""
|
||||
|
||||
|
@ -158,12 +166,12 @@ class ASCIItable():
|
|||
|
||||
if self.__IO__['labeled']: # table features labels
|
||||
|
||||
self.info = [self.__IO__['in'].readline().strip() for i in xrange(1,int(m.group(1)))]
|
||||
self.info = [self.__IO__['in'].readline().strip() for i in range(1,int(m.group(1)))]
|
||||
self.tags = shlex.split(self.__IO__['in'].readline()) # store tags found in last line
|
||||
|
||||
else:
|
||||
|
||||
self.info = [self.__IO__['in'].readline().strip() for i in xrange(0,int(m.group(1)))] # all header is info ...
|
||||
self.info = [self.__IO__['in'].readline().strip() for i in range(0,int(m.group(1)))] # all header is info ...
|
||||
|
||||
else: # other table format
|
||||
try:
|
||||
|
@ -224,11 +232,11 @@ class ASCIItable():
|
|||
extra_header = []
|
||||
|
||||
for header in self.info:
|
||||
headitems = map(str.lower,header.split())
|
||||
headitems = list(map(str.lower,header.split()))
|
||||
if len(headitems) == 0: continue # skip blank lines
|
||||
if headitems[0] in mappings.keys():
|
||||
if headitems[0] in identifiers.keys():
|
||||
for i in xrange(len(identifiers[headitems[0]])):
|
||||
if headitems[0] in list(mappings.keys()):
|
||||
if headitems[0] in list(identifiers.keys()):
|
||||
for i in range(len(identifiers[headitems[0]])):
|
||||
info[headitems[0]][i] = \
|
||||
mappings[headitems[0]](headitems[headitems.index(identifiers[headitems[0]][i])+1])
|
||||
else:
|
||||
|
@ -415,9 +423,9 @@ class ASCIItable():
|
|||
start = self.label_index(labels)
|
||||
dim = self.label_dimension(labels)
|
||||
|
||||
return np.hstack(map(lambda c: xrange(c[0],c[0]+c[1]), zip(start,dim))) \
|
||||
return np.hstack([range(c[0],c[0]+c[1]) for c in zip(start,dim)]) \
|
||||
if isinstance(labels, Iterable) and not isinstance(labels, str) \
|
||||
else xrange(start,start+dim)
|
||||
else range(start,start+dim)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def info_append(self,
|
||||
|
@ -447,7 +455,7 @@ class ASCIItable():
|
|||
def data_skipLines(self,
|
||||
count):
|
||||
"""wind forward by count number of lines"""
|
||||
for i in xrange(count):
|
||||
for i in range(count):
|
||||
alive = self.data_read()
|
||||
|
||||
return alive
|
||||
|
@ -501,10 +509,10 @@ class ASCIItable():
|
|||
columns = []
|
||||
for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ...
|
||||
# ... transparently add all components unless column referenced by number or with explicit dimension
|
||||
columns += range(c,c + \
|
||||
columns += list(range(c,c + \
|
||||
(d if str(c) != str(labels[present[i]]) else \
|
||||
1))
|
||||
use = np.array(columns)
|
||||
1)))
|
||||
use = np.array(columns) if len(columns) > 0 else None
|
||||
|
||||
self.tags = list(np.array(self.tags)[use]) # update labels with valid subset
|
||||
|
||||
|
@ -530,7 +538,7 @@ class ASCIItable():
|
|||
"""write whole numpy array data"""
|
||||
for row in self.data:
|
||||
try:
|
||||
output = [fmt % value for value in row] if fmt else map(repr,row)
|
||||
output = [fmt % value for value in row] if fmt else list(map(repr,row))
|
||||
except:
|
||||
output = [fmt % row] if fmt else [repr(row)]
|
||||
|
||||
|
@ -555,7 +563,7 @@ class ASCIItable():
|
|||
try:
|
||||
idx = self.label_index(where)
|
||||
if len(self.data) <= idx:
|
||||
self.data_append(['n/a' for i in xrange(idx+1-len(self.data))]) # grow data if too short
|
||||
self.data_append(['n/a' for i in range(idx+1-len(self.data))]) # grow data if too short
|
||||
self.data[idx] = str(what)
|
||||
except(ValueError):
|
||||
pass
|
||||
|
@ -568,7 +576,7 @@ class ASCIItable():
|
|||
|
||||
# ------------------------------------------------------------------
|
||||
def data_asFloat(self):
|
||||
return map(self._transliterateToFloat,self.data)
|
||||
return list(map(self._transliterateToFloat,self.data))
|
||||
|
||||
|
||||
|
||||
|
@ -590,8 +598,8 @@ class ASCIItable():
|
|||
if len(items) > 2:
|
||||
if items[1].lower() == 'of': items = np.ones(datatype(items[0]))*datatype(items[2])
|
||||
elif items[1].lower() == 'to': items = np.arange(datatype(items[0]),1+datatype(items[2]))
|
||||
else: items = map(datatype,items)
|
||||
else: items = map(datatype,items)
|
||||
else: items = list(map(datatype,items))
|
||||
else: items = list(map(datatype,items))
|
||||
|
||||
s = min(len(items), N-i) # prevent overflow of microstructure array
|
||||
microstructure[i:i+s] = items[:s]
|
||||
|
|
|
@ -33,7 +33,7 @@ class Color():
|
|||
}
|
||||
|
||||
model = model.upper()
|
||||
if model not in self.__transforms__.keys(): model = 'RGB'
|
||||
if model not in list(self.__transforms__.keys()): model = 'RGB'
|
||||
if model == 'RGB' and max(color) > 1.0: # are we RGB255 ?
|
||||
for i in range(3):
|
||||
color[i] /= 255.0 # rescale to RGB
|
||||
|
@ -62,7 +62,7 @@ class Color():
|
|||
# ------------------------------------------------------------------
|
||||
def convertTo(self,toModel = 'RGB'):
|
||||
toModel = toModel.upper()
|
||||
if toModel not in self.__transforms__.keys(): return
|
||||
if toModel not in list(self.__transforms__.keys()): return
|
||||
|
||||
sourcePos = self.__transforms__[self.model]['index']
|
||||
targetPos = self.__transforms__[toModel]['index']
|
||||
|
@ -139,7 +139,7 @@ class Color():
|
|||
HSL[0] = HSL[0]*60.0 # scaling to 360 might be dangerous for small values
|
||||
if (HSL[0] < 0.0):
|
||||
HSL[0] = HSL[0] + 360.0
|
||||
for i in xrange(2):
|
||||
for i in range(2):
|
||||
HSL[i+1] = min(HSL[i+1],1.0)
|
||||
HSL[i+1] = max(HSL[i+1],0.0)
|
||||
|
||||
|
@ -164,11 +164,11 @@ class Color():
|
|||
[0.212671,0.715160,0.072169],
|
||||
[0.019334,0.119193,0.950227]])
|
||||
|
||||
for i in xrange(3):
|
||||
for i in range(3):
|
||||
if (self.color[i] > 0.04045): RGB_lin[i] = ((self.color[i]+0.0555)/1.0555)**2.4
|
||||
else: RGB_lin[i] = self.color[i] /12.92
|
||||
XYZ = np.dot(convert,RGB_lin)
|
||||
for i in xrange(3):
|
||||
for i in range(3):
|
||||
|
||||
XYZ[i] = max(XYZ[i],0.0)
|
||||
|
||||
|
@ -193,10 +193,10 @@ class Color():
|
|||
RGB_lin = np.dot(convert,self.color)
|
||||
RGB = np.zeros(3,'d')
|
||||
|
||||
for i in xrange(3):
|
||||
for i in range(3):
|
||||
if (RGB_lin[i] > 0.0031308): RGB[i] = ((RGB_lin[i])**(1.0/2.4))*1.0555-0.0555
|
||||
else: RGB[i] = RGB_lin[i] *12.92
|
||||
for i in xrange(3):
|
||||
for i in range(3):
|
||||
RGB[i] = min(RGB[i],1.0)
|
||||
RGB[i] = max(RGB[i],0.0)
|
||||
|
||||
|
@ -225,7 +225,7 @@ class Color():
|
|||
XYZ[0] = XYZ[1] + self.color[1]/ 500.0
|
||||
XYZ[2] = XYZ[1] - self.color[2]/ 200.0
|
||||
|
||||
for i in xrange(len(XYZ)):
|
||||
for i in range(len(XYZ)):
|
||||
if (XYZ[i] > 6./29. ): XYZ[i] = XYZ[i]**3.
|
||||
else: XYZ[i] = 108./841. * (XYZ[i] - 4./29.)
|
||||
|
||||
|
@ -245,7 +245,7 @@ class Color():
|
|||
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
||||
XYZ = self.color/ref_white
|
||||
|
||||
for i in xrange(len(XYZ)):
|
||||
for i in range(len(XYZ)):
|
||||
if (XYZ[i] > 216./24389 ): XYZ[i] = XYZ[i]**(1.0/3.0)
|
||||
else: XYZ[i] = (841./108. * XYZ[i]) + 16.0/116.0
|
||||
|
||||
|
@ -451,7 +451,7 @@ class Colormap():
|
|||
"""
|
||||
format = format.lower() # consistent comparison basis
|
||||
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
||||
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
|
||||
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in range(steps)]
|
||||
if format == 'paraview':
|
||||
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
|
||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
|
||||
|
@ -461,7 +461,7 @@ class Colormap():
|
|||
|
||||
elif format == 'gmsh':
|
||||
colormap = ['View.ColorTable = {'] \
|
||||
+ [',\n'.join(['{%s}'%(','.join(map(lambda x:str(x*255.0),color))) for color in colors])] \
|
||||
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \
|
||||
+ ['}']
|
||||
|
||||
elif format == 'gom':
|
||||
|
@ -469,7 +469,7 @@ class Colormap():
|
|||
+ ' 9 ' + str(name) \
|
||||
+ ' 0 1 0 3 0 0 -1 9 \ 0 0 0 255 255 255 0 0 255 ' \
|
||||
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) \
|
||||
+ ' '.join([' 0 %s 255 1'%(' '.join(map(lambda x:str(int(x*255.0)),color))) for color in reversed(colors)])]
|
||||
+ ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])]
|
||||
|
||||
elif format == 'raw':
|
||||
colormap = ['\t'.join(map(str,color)) for color in colors]
|
||||
|
|
|
@ -19,7 +19,7 @@ class Section():
|
|||
self.parameters[key] = data[key]
|
||||
|
||||
if '__order__' not in self.parameters:
|
||||
self.parameters['__order__'] = self.parameters.keys()
|
||||
self.parameters['__order__'] = list(self.parameters.keys())
|
||||
if part.lower() in classes:
|
||||
self.__class__ = classes[part.lower()]
|
||||
self.__init__(data)
|
||||
|
@ -61,11 +61,11 @@ class Texture(Section):
|
|||
|
||||
def add_component(self,theType,properties):
|
||||
|
||||
if 'scatter' not in map(str.lower,properties.keys()):
|
||||
if 'scatter' not in list(map(str.lower,list(properties.keys()))):
|
||||
scatter = 0.0
|
||||
else:
|
||||
scatter = properties['scatter']
|
||||
if 'fraction' not in map(str.lower,properties.keys()):
|
||||
if 'fraction' not in list(map(str.lower,list(properties.keys()))):
|
||||
fraction = 1.0
|
||||
else:
|
||||
fraction = properties['fraction']
|
||||
|
@ -224,10 +224,10 @@ class Material():
|
|||
def add_microstructure(self, section='',
|
||||
components={}, # dict of phase,texture, and fraction lists
|
||||
):
|
||||
"""Experimental! Needs expansion to multi-constituent microstructures..."""
|
||||
"""Experimental! Needs expansion to multi-constituent microstructures..."""
|
||||
microstructure = Microstructure()
|
||||
# make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
||||
components=dict((k.lower(), v) for k,v in components.iteritems())
|
||||
components=dict((k.lower(), v) for k,v in components.items())
|
||||
|
||||
for key in ['phase','texture','fraction','crystallite']:
|
||||
if type(components[key]) is not list:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,subprocess,shlex,re,string
|
||||
import os,subprocess,shlex,re
|
||||
|
||||
class Environment():
|
||||
__slots__ = [ \
|
||||
|
@ -22,7 +22,7 @@ class Environment():
|
|||
for line in configFile:
|
||||
l = re.sub('^set ', '', line).strip() # remove "set" (tcsh) when setting variables
|
||||
if l and not l.startswith('#'):
|
||||
items = map(string.strip,l.split('='))
|
||||
items = re.split(r'\s*=\s*',l)
|
||||
if len(items) == 2:
|
||||
self.options[items[0].upper()] = \
|
||||
re.sub('\$\{*DAMASK_ROOT\}*',self.rootDir(),os.path.expandvars(items[1])) # expand all shell variables and DAMASK_ROOT
|
||||
|
@ -35,7 +35,7 @@ class Environment():
|
|||
try:
|
||||
cmd = """ ssh mulicense2 "/Stat_Flexlm | grep 'Users of %s: ' | cut -d' ' -f7,13" """%software
|
||||
process = subprocess.Popen(shlex.split(cmd),stdout = subprocess.PIPE,stderr = subprocess.PIPE)
|
||||
licenses = map(int, process.stdout.readline().split())
|
||||
licenses = list(map(int, process.stdout.readline().split()))
|
||||
try:
|
||||
if licenses[0]-licenses[1] >= Nneeded:
|
||||
return 0
|
||||
|
|
|
@ -15,7 +15,7 @@ class Geometry():
|
|||
'spectral': damask.geometry.Spectral,
|
||||
'marc': damask.geometry.Marc,
|
||||
}
|
||||
if solver.lower() in solverClass.keys():
|
||||
if solver.lower() in list(solverClass.keys()):
|
||||
self.__class__=solverClass[solver.lower()]
|
||||
self.__init__()
|
||||
|
||||
|
|
|
@ -154,7 +154,7 @@ class Quaternion:
|
|||
|
||||
def __div__(self, other):
|
||||
"""division"""
|
||||
if isinstance(other, (int,float,long)):
|
||||
if isinstance(other, (int,float)):
|
||||
w = self.w / other
|
||||
x = self.x / other
|
||||
y = self.y / other
|
||||
|
@ -165,7 +165,7 @@ class Quaternion:
|
|||
|
||||
def __idiv__(self, other):
|
||||
"""in place division"""
|
||||
if isinstance(other, (int,float,long)):
|
||||
if isinstance(other, (int,float)):
|
||||
self.w /= other
|
||||
self.x /= other
|
||||
self.y /= other
|
||||
|
@ -556,7 +556,7 @@ class Symmetry:
|
|||
|
||||
def __init__(self, symmetry = None):
|
||||
"""lattice with given symmetry, defaults to None"""
|
||||
if isinstance(symmetry, basestring) and symmetry.lower() in Symmetry.lattices:
|
||||
if isinstance(symmetry, str) and symmetry.lower() in Symmetry.lattices:
|
||||
self.lattice = symmetry.lower()
|
||||
else:
|
||||
self.lattice = None
|
||||
|
@ -653,8 +653,8 @@ class Symmetry:
|
|||
[ 1.0,0.0,0.0,0.0 ],
|
||||
]
|
||||
|
||||
return map(Quaternion,
|
||||
np.array(symQuats)[np.atleast_1d(np.array(who)) if who != [] else xrange(len(symQuats))])
|
||||
return list(map(Quaternion,
|
||||
np.array(symQuats)[np.atleast_1d(np.array(who)) if who != [] else range(len(symQuats))]))
|
||||
|
||||
|
||||
def equivalentQuaternions(self,
|
||||
|
@ -891,8 +891,7 @@ class Orientation:
|
|||
|
||||
def equivalentOrientations(self,
|
||||
who = []):
|
||||
return map(lambda q: Orientation(quaternion = q, symmetry = self.symmetry.lattice),
|
||||
self.equivalentQuaternions(who))
|
||||
return [Orientation(quaternion = q, symmetry = self.symmetry.lattice) for q in self.equivalentQuaternions(who)]
|
||||
|
||||
def reduced(self):
|
||||
"""Transform orientation to fall into fundamental zone according to symmetry"""
|
||||
|
@ -921,7 +920,7 @@ class Orientation:
|
|||
for i,sA in enumerate(mySymQs):
|
||||
for j,sB in enumerate(otherSymQs):
|
||||
theQ = sA.conjugated()*misQ*sB
|
||||
for k in xrange(2):
|
||||
for k in range(2):
|
||||
theQ.conjugate()
|
||||
breaker = self.symmetry.inFZ(theQ) \
|
||||
and (not SST or other.symmetry.inDisorientationSST(theQ))
|
||||
|
@ -996,11 +995,17 @@ class Orientation:
|
|||
relationModel,
|
||||
direction,
|
||||
targetSymmetry = None):
|
||||
"""
|
||||
orientation relationship
|
||||
|
||||
positive number: fcc --> bcc
|
||||
negative number: bcc --> fcc
|
||||
"""
|
||||
if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None
|
||||
if int(direction) == 0: return None
|
||||
|
||||
# KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592 DOES THIS PAPER EXISTS?
|
||||
# KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592
|
||||
# for KS rotation matrices also check K. Kitahara et al./Acta Materialia 54 (2006) 1279-1288
|
||||
# GT from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
||||
# GT' from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
||||
# NW from H. Kitahara et al./Materials Characterization 54 (2005) 378-386
|
||||
|
@ -1227,14 +1232,14 @@ class Orientation:
|
|||
myPlane /= np.linalg.norm(myPlane)
|
||||
myNormal = [float(i) for i in normals[relationModel][variant,me]] # map(float, planes[...]) does not work in python 3
|
||||
myNormal /= np.linalg.norm(myNormal)
|
||||
myMatrix = np.array([myPlane,myNormal,np.cross(myPlane,myNormal)])
|
||||
myMatrix = np.array([myNormal,np.cross(myPlane,myNormal),myPlane]).T
|
||||
|
||||
otherPlane = [float(i) for i in planes[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
||||
otherPlane /= np.linalg.norm(otherPlane)
|
||||
otherNormal = [float(i) for i in normals[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
||||
otherNormal /= np.linalg.norm(otherNormal)
|
||||
otherMatrix = np.array([otherPlane,otherNormal,np.cross(otherPlane,otherNormal)])
|
||||
otherMatrix = np.array([otherNormal,np.cross(otherPlane,otherNormal),otherPlane]).T
|
||||
|
||||
rot=np.dot(otherMatrix.T,myMatrix)
|
||||
rot=np.dot(otherMatrix,myMatrix.T)
|
||||
|
||||
return Orientation(matrix=np.dot(rot,self.asMatrix())) # no symmetry information ??
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
|
||||
import numpy as np
|
||||
#import sys
|
||||
|
||||
try:
|
||||
import h5py
|
||||
|
@ -19,19 +17,19 @@ class Result():
|
|||
def __init__(self,resultsFile):
|
||||
self.data=h5py.File(resultsFile,"r")
|
||||
self.Npoints=self.data.attrs['Number of Materialpoints']
|
||||
print("Opened "+resultsFile+" with %i points"%self.Npoints)
|
||||
print("Opened {} with {} points".format(resultsFile,self.Npoints))
|
||||
|
||||
def getCrystallite(self,labels,inc,constituent=1,points=None):
|
||||
if points is None: points = np.array(np.array(xrange(self.Npoints)))
|
||||
if points is None: points = np.array(np.array(range(self.Npoints)))
|
||||
results = {}
|
||||
mapping=self.data['mapping/crystallite']
|
||||
for instance in self.data['increments/%s/crystallite/'%inc]:
|
||||
dsets = self.data['increments/%s/crystallite/%s'%(inc,instance)].keys()
|
||||
dsets = list(self.data['increments/%s/crystallite/%s'%(inc,instance)].keys())
|
||||
for label in labels:
|
||||
if label in dsets and label not in results:
|
||||
shape = np.shape(self.data['increments/%s/crystallite/%s/%s'%(inc,instance,label)])[1:]
|
||||
results[label] = np.nan*np.ones(np.array((self.Npoints,)+shape))
|
||||
for myPoint in xrange(len(points)):
|
||||
for myPoint in range(len(points)):
|
||||
matPoint = points[myPoint]
|
||||
pos = mapping[matPoint,constituent-1]
|
||||
if pos[0] != 0:
|
||||
|
@ -41,5 +39,3 @@ class Result():
|
|||
except:
|
||||
pass
|
||||
return results
|
||||
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ class Marc(Solver):
|
|||
|
||||
MSCpath = damask.environment.Environment().options['MSC_ROOT']
|
||||
|
||||
for release,subdirs in sorted(self.releases.items(),reverse=True):
|
||||
for release,subdirs in sorted(list(self.releases.items()),reverse=True):
|
||||
for subdir in subdirs:
|
||||
path = '%s/mentat%s/shlib/%s'%(MSCpath,release,subdir)
|
||||
if os.path.exists(path): return release
|
||||
|
@ -40,7 +40,7 @@ class Marc(Solver):
|
|||
|
||||
MSCpath = damask.environment.Environment().options['MSC_ROOT']
|
||||
|
||||
if len(releases) == 0: releases = self.releases.keys()
|
||||
if len(releases) == 0: releases = list(self.releases.keys())
|
||||
if type(releases) is not list: releases = [releases]
|
||||
for release in sorted(releases,reverse=True):
|
||||
if release not in self.releases: continue
|
||||
|
|
|
@ -15,7 +15,7 @@ class Solver():
|
|||
'spectral': damask.solver.Spectral,
|
||||
'marc': damask.solver.Marc,
|
||||
}
|
||||
if solver.lower() in solverClass.keys():
|
||||
if solver.lower() in list(solverClass.keys()):
|
||||
self.__class__=solverClass[solver.lower()]
|
||||
self.__init__()
|
||||
|
||||
|
|
|
@ -17,92 +17,96 @@ class Test():
|
|||
|
||||
variants = []
|
||||
|
||||
def __init__(self,test_description):
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
defaults = {'description': '',
|
||||
'keep': False,
|
||||
'accept': False,
|
||||
'updateRequest': False,
|
||||
}
|
||||
for arg in defaults.keys():
|
||||
setattr(self,arg,kwargs.get(arg) if kwargs.get(arg) else defaults[arg])
|
||||
|
||||
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
||||
fh.setLevel(logging.DEBUG)
|
||||
fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s'))
|
||||
|
||||
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(logging.Formatter('%(message)s'))
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(0)
|
||||
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
||||
fh.setLevel(logging.DEBUG)
|
||||
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')
|
||||
fh.setFormatter(full)
|
||||
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
||||
ch.setLevel(logging.INFO)
|
||||
# create formatter and add it to the handlers
|
||||
plain = logging.Formatter('%(message)s')
|
||||
ch.setFormatter(plain)
|
||||
# add the handlers to the logger
|
||||
logger.addHandler(fh)
|
||||
logger.addHandler(ch)
|
||||
logger.setLevel(0)
|
||||
|
||||
logging.info('\n'.join(['+'*40,
|
||||
'-'*40,
|
||||
'| '+self.description,
|
||||
'-'*40,
|
||||
]))
|
||||
|
||||
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \
|
||||
+'----------------------------------------------------------------\n' \
|
||||
+'| '+test_description+'\n' \
|
||||
+'----------------------------------------------------------------')
|
||||
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
|
||||
self.parser = OptionParser(
|
||||
description = test_description+' (using class: {})'.format(damask.version),
|
||||
usage='./test.py [options]')
|
||||
self.updateRequested = False
|
||||
self.parser.add_option("-d", "--debug", action="store_true",\
|
||||
dest="debug",\
|
||||
help="debug run, don't calculate but use existing results")
|
||||
self.parser.add_option("-p", "--pass", action="store_true",\
|
||||
dest="accept",\
|
||||
help="calculate results but always consider test as successfull")
|
||||
self.parser.set_defaults(debug=False,
|
||||
accept=False)
|
||||
|
||||
self.parser = OptionParser(description = '{} (Test class version: {})'.format(self.description,damask.version),
|
||||
usage = './test.py [options]')
|
||||
self.parser.add_option("-k", "--keep",
|
||||
action = "store_true",
|
||||
dest = "keep",
|
||||
help = "keep current results, just run postprocessing")
|
||||
self.parser.add_option("--ok", "--accept",
|
||||
action = "store_true",
|
||||
dest = "accept",
|
||||
help = "calculate results but always consider test as successfull")
|
||||
|
||||
self.parser.set_defaults(keep = self.keep,
|
||||
accept = self.accept,
|
||||
update = self.updateRequest,
|
||||
)
|
||||
|
||||
|
||||
def execute(self):
|
||||
"""Run all variants and report first failure."""
|
||||
if self.options.debug:
|
||||
for variant in xrange(len(self.variants)):
|
||||
try:
|
||||
self.postprocess(variant)
|
||||
if not self.compare(variant):
|
||||
return variant+1 # return culprit
|
||||
except Exception as e :
|
||||
logging.critical('\nWARNING:\n {}\n'.format(e))
|
||||
return variant+1 # return culprit
|
||||
return 0
|
||||
else:
|
||||
if not self.testPossible(): return -1
|
||||
if not self.options.keep:
|
||||
if not self.feasible(): return -1
|
||||
self.clean()
|
||||
self.prepareAll()
|
||||
for variant in xrange(len(self.variants)):
|
||||
try:
|
||||
|
||||
for variant,name in enumerate(self.variants):
|
||||
try:
|
||||
if not self.options.keep:
|
||||
self.prepare(variant)
|
||||
self.run(variant)
|
||||
self.postprocess(variant)
|
||||
if self.updateRequested: # update requested
|
||||
self.update(variant)
|
||||
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
||||
return variant+1 # return culprit
|
||||
except Exception as e :
|
||||
logging.critical('\nWARNING:\n {}\n'.format(e))
|
||||
return variant+1 # return culprit
|
||||
return 0
|
||||
|
||||
self.postprocess(variant)
|
||||
|
||||
if self.options.update:
|
||||
if self.update(variant) != 0: logging.critical('update for "{}" failed.'.format(name))
|
||||
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
||||
return variant+1 # return culprit
|
||||
|
||||
except Exception as e :
|
||||
logging.critical('exception during variant execution: {}'.format(e))
|
||||
return variant+1 # return culprit
|
||||
return 0
|
||||
|
||||
def testPossible(self):
|
||||
"""Check if test is possible or not (e.g. no license available)."""
|
||||
def feasible(self):
|
||||
"""Check whether test is possible or not (e.g. no license available)."""
|
||||
return True
|
||||
|
||||
|
||||
def clean(self):
|
||||
"""Delete directory tree containing current results."""
|
||||
status = True
|
||||
|
||||
try:
|
||||
shutil.rmtree(self.dirCurrent())
|
||||
except:
|
||||
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
||||
status = status and False
|
||||
|
||||
try:
|
||||
os.mkdir(self.dirCurrent())
|
||||
return True
|
||||
except:
|
||||
logging.critical('creation of directory "{}" failed...'.format(self.dirCurrent()))
|
||||
status = status and False
|
||||
|
||||
return status
|
||||
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
|
||||
return False
|
||||
|
||||
def prepareAll(self):
|
||||
"""Do all necessary preparations for the whole test"""
|
||||
|
@ -111,7 +115,7 @@ class Test():
|
|||
def prepare(self,variant):
|
||||
"""Do all necessary preparations for the run of each test variant"""
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def run(self,variant):
|
||||
"""Execute the requested test variant."""
|
||||
|
@ -130,8 +134,8 @@ class Test():
|
|||
|
||||
def update(self,variant):
|
||||
"""Update reference with current results."""
|
||||
logging.debug('Update not necessary')
|
||||
return True
|
||||
logging.critical('update not supported.')
|
||||
return 1
|
||||
|
||||
|
||||
def dirReference(self):
|
||||
|
@ -143,17 +147,17 @@ class Test():
|
|||
"""Directory containing current results of the test."""
|
||||
return os.path.normpath(os.path.join(self.dirBase,'current/'))
|
||||
|
||||
|
||||
|
||||
def dirProof(self):
|
||||
"""Directory containing human readable proof of correctness for the test."""
|
||||
return os.path.normpath(os.path.join(self.dirBase,'proof/'))
|
||||
|
||||
|
||||
|
||||
def fileInRoot(self,dir,file):
|
||||
"""Path to a file in the root directory of DAMASK."""
|
||||
return os.path.join(damask.Environment().rootDir(),dir,file)
|
||||
|
||||
|
||||
|
||||
def fileInReference(self,file):
|
||||
"""Path to a file in the refrence directory for the test."""
|
||||
return os.path.join(self.dirReference(),file)
|
||||
|
@ -163,7 +167,7 @@ class Test():
|
|||
"""Path to a file in the current results directory for the test."""
|
||||
return os.path.join(self.dirCurrent(),file)
|
||||
|
||||
|
||||
|
||||
def fileInProof(self,file):
|
||||
"""Path to a file in the proof directory for the test."""
|
||||
return os.path.join(self.dirProof(),file)
|
||||
|
@ -178,60 +182,60 @@ class Test():
|
|||
"""
|
||||
if not B or len(B) == 0: B = A
|
||||
|
||||
for source,target in zip(map(mapA,A),map(mapB,B)):
|
||||
for source,target in zip(list(map(mapA,A)),list(map(mapB,B))):
|
||||
try:
|
||||
shutil.copy2(source,target)
|
||||
shutil.copy2(source,target)
|
||||
except:
|
||||
logging.critical('error copying {} to {}'.format(source,target))
|
||||
|
||||
|
||||
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
|
||||
|
||||
|
||||
if len(targetfiles) == 0: targetfiles = sourcefiles
|
||||
for i,file in enumerate(sourcefiles):
|
||||
try:
|
||||
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
||||
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
||||
except:
|
||||
logging.critical('Reference2Current: Unable to copy file "{}"'.format(file))
|
||||
|
||||
|
||||
|
||||
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
||||
|
||||
|
||||
source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
|
||||
if len(targetfiles) == 0: targetfiles = sourcefiles
|
||||
for i,file in enumerate(sourcefiles):
|
||||
try:
|
||||
shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i]))
|
||||
shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i]))
|
||||
except:
|
||||
logging.error(os.path.join(source,file))
|
||||
logging.critical('Base2Current: Unable to copy file "{}"'.format(file))
|
||||
|
||||
|
||||
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
|
||||
|
||||
|
||||
if len(targetfiles) == 0: targetfiles = sourcefiles
|
||||
for i,file in enumerate(sourcefiles):
|
||||
try:
|
||||
shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i]))
|
||||
shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i]))
|
||||
except:
|
||||
logging.critical('Current2Reference: Unable to copy file "{}"'.format(file))
|
||||
|
||||
|
||||
|
||||
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
|
||||
|
||||
|
||||
if len(targetfiles) == 0: targetfiles = sourcefiles
|
||||
for i,file in enumerate(sourcefiles):
|
||||
try:
|
||||
shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i]))
|
||||
shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i]))
|
||||
except:
|
||||
logging.critical('Proof2Current: Unable to copy file "{}"'.format(file))
|
||||
|
||||
|
||||
|
||||
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
|
||||
|
||||
|
||||
for i,file in enumerate(sourcefiles):
|
||||
try:
|
||||
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
||||
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
||||
except:
|
||||
logging.critical('Current2Current: Unable to copy file "{}"'.format(file))
|
||||
|
||||
|
@ -243,11 +247,11 @@ class Test():
|
|||
|
||||
logging.info(error)
|
||||
logging.debug(out)
|
||||
|
||||
return out,error
|
||||
|
||||
|
||||
|
||||
return out,error
|
||||
|
||||
|
||||
|
||||
def compare_Array(self,File1,File2):
|
||||
|
||||
import numpy as np
|
||||
|
@ -278,49 +282,49 @@ class Test():
|
|||
|
||||
|
||||
def compare_ArrayRefCur(self,ref,cur=''):
|
||||
|
||||
|
||||
if cur =='': cur = ref
|
||||
refName = self.fileInReference(ref)
|
||||
curName = self.fileInCurrent(cur)
|
||||
return self.compare_Array(refName,curName)
|
||||
|
||||
|
||||
|
||||
def compare_ArrayCurCur(self,cur0,cur1):
|
||||
|
||||
|
||||
cur0Name = self.fileInCurrent(cur0)
|
||||
cur1Name = self.fileInCurrent(cur1)
|
||||
return self.compare_Array(cur0Name,cur1Name)
|
||||
|
||||
def compare_Table(self,headings0,file0,headings1,file1,normHeadings='',normType=None,
|
||||
absoluteTolerance=False,perLine=False,skipLines=[]):
|
||||
|
||||
|
||||
import numpy as np
|
||||
logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
|
||||
if normHeadings == '': normHeadings = headings0
|
||||
|
||||
# check if comparison is possible and determine lenght of columns
|
||||
if len(headings0) == len(headings1) == len(normHeadings):
|
||||
if len(headings0) == len(headings1) == len(normHeadings):
|
||||
dataLength = len(headings0)
|
||||
length = [1 for i in xrange(dataLength)]
|
||||
shape = [[] for i in xrange(dataLength)]
|
||||
data = [[] for i in xrange(dataLength)]
|
||||
maxError = [0.0 for i in xrange(dataLength)]
|
||||
absTol = [absoluteTolerance for i in xrange(dataLength)]
|
||||
column = [[1 for i in xrange(dataLength)] for j in xrange(2)]
|
||||
|
||||
norm = [[] for i in xrange(dataLength)]
|
||||
normLength = [1 for i in xrange(dataLength)]
|
||||
normShape = [[] for i in xrange(dataLength)]
|
||||
normColumn = [1 for i in xrange(dataLength)]
|
||||
length = [1 for i in range(dataLength)]
|
||||
shape = [[] for i in range(dataLength)]
|
||||
data = [[] for i in range(dataLength)]
|
||||
maxError = [0.0 for i in range(dataLength)]
|
||||
absTol = [absoluteTolerance for i in range(dataLength)]
|
||||
column = [[1 for i in range(dataLength)] for j in range(2)]
|
||||
|
||||
for i in xrange(dataLength):
|
||||
if headings0[i]['shape'] != headings1[i]['shape']:
|
||||
norm = [[] for i in range(dataLength)]
|
||||
normLength = [1 for i in range(dataLength)]
|
||||
normShape = [[] for i in range(dataLength)]
|
||||
normColumn = [1 for i in range(dataLength)]
|
||||
|
||||
for i in range(dataLength):
|
||||
if headings0[i]['shape'] != headings1[i]['shape']:
|
||||
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
|
||||
shape[i] = headings0[i]['shape']
|
||||
for j in xrange(np.shape(shape[i])[0]):
|
||||
for j in range(np.shape(shape[i])[0]):
|
||||
length[i] *= shape[i][j]
|
||||
normShape[i] = normHeadings[i]['shape']
|
||||
for j in xrange(np.shape(normShape[i])[0]):
|
||||
for j in range(np.shape(normShape[i])[0]):
|
||||
normLength[i] *= normShape[i][j]
|
||||
else:
|
||||
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0),
|
||||
|
@ -330,9 +334,9 @@ class Test():
|
|||
table0 = damask.ASCIItable(name=file0,readonly=True)
|
||||
table0.head_read()
|
||||
table1 = damask.ASCIItable(name=file1,readonly=True)
|
||||
table1.head_read()
|
||||
table1.head_read()
|
||||
|
||||
for i in xrange(dataLength):
|
||||
for i in range(dataLength):
|
||||
key0 = ('1_' if length[i]>1 else '') + headings0[i]['label']
|
||||
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
|
||||
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
|
||||
|
@ -346,11 +350,11 @@ class Test():
|
|||
column[0][i] = table0.label_index(key0)
|
||||
column[1][i] = table1.label_index(key1)
|
||||
normColumn[i] = table0.label_index(normKey)
|
||||
|
||||
|
||||
line0 = 0
|
||||
while table0.data_read(): # read next data line of ASCII table
|
||||
if line0 not in skipLines:
|
||||
for i in xrange(dataLength):
|
||||
for i in range(dataLength):
|
||||
myData = np.array(map(float,table0.data[column[0][i]:\
|
||||
column[0][i]+length[i]]),'d')
|
||||
normData = np.array(map(float,table0.data[normColumn[i]:\
|
||||
|
@ -361,12 +365,12 @@ class Test():
|
|||
else:
|
||||
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
|
||||
line0 += 1
|
||||
|
||||
for i in xrange(dataLength):
|
||||
|
||||
for i in range(dataLength):
|
||||
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
||||
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
|
||||
if any(norm[i]) == 0.0 or absTol[i]:
|
||||
norm[i] = [1.0 for j in xrange(line0-len(skipLines))]
|
||||
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
||||
absTol[i] = True
|
||||
if perLine:
|
||||
logging.warning('At least one norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||
|
@ -376,9 +380,9 @@ class Test():
|
|||
line1 = 0
|
||||
while table1.data_read(): # read next data line of ASCII table
|
||||
if line1 not in skipLines:
|
||||
for i in xrange(dataLength):
|
||||
for i in range(dataLength):
|
||||
myData = np.array(map(float,table1.data[column[1][i]:\
|
||||
column[1][i]+length[i]]),'d')
|
||||
column[1][i]+length[i]]),'d')
|
||||
maxError[i] = max(maxError[i],np.linalg.norm(np.reshape(myData-data[i][line1-len(skipLines),:],shape[i]))/
|
||||
norm[i][line1-len(skipLines)])
|
||||
line1 +=1
|
||||
|
@ -386,7 +390,7 @@ class Test():
|
|||
if (line0 != line1): raise Exception('found {} lines in 1. table but {} in 2. table'.format(line0,line1))
|
||||
|
||||
logging.info(' ********')
|
||||
for i in xrange(dataLength):
|
||||
for i in range(dataLength):
|
||||
if absTol[i]:
|
||||
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i],
|
||||
headings0[i]['label'],
|
||||
|
@ -424,7 +428,7 @@ class Test():
|
|||
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
|
||||
|
||||
logging.info('comparing ASCIItables statistically')
|
||||
for i in xrange(len(columns)):
|
||||
for i in range(len(columns)):
|
||||
columns[i] = columns[0] if not columns[i] else \
|
||||
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
||||
columns[i]
|
||||
|
@ -432,15 +436,15 @@ class Test():
|
|||
logging.info(files[i]+':'+','.join(columns[i]))
|
||||
|
||||
if len(files) < 2: return True # single table is always close to itself...
|
||||
|
||||
|
||||
data = []
|
||||
for table,labels in zip(tables,columns):
|
||||
table.data_readArray(labels)
|
||||
data.append(table.data)
|
||||
table.close()
|
||||
|
||||
|
||||
for i in xrange(1,len(data)):
|
||||
|
||||
|
||||
for i in range(1,len(data)):
|
||||
delta = data[i]-data[i-1]
|
||||
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
||||
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
||||
|
@ -448,27 +452,23 @@ class Test():
|
|||
std = np.amax(np.std(normedDelta,0))
|
||||
logging.info('mean: {:f}'.format(mean))
|
||||
logging.info('std: {:f}'.format(std))
|
||||
|
||||
|
||||
return (mean<meanTol) & (std < stdTol)
|
||||
|
||||
|
||||
|
||||
def compare_Tables(self,
|
||||
files = [None,None], # list of file names
|
||||
files = [None,None], # list of file names
|
||||
columns = [None], # list of list of column labels (per file)
|
||||
rtol = 1e-5,
|
||||
atol = 1e-8,
|
||||
preFilter = -1.0,
|
||||
postFilter = -1.0,
|
||||
debug = False):
|
||||
"""
|
||||
compare tables with np.allclose
|
||||
|
||||
threshold can be used to ignore small values (a negative number disables this feature)
|
||||
"""
|
||||
rtol = 1e-5,
|
||||
atol = 1e-8,
|
||||
debug = False):
|
||||
"""compare multiple tables with np.allclose"""
|
||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||
files = [str(files)]
|
||||
|
||||
if len(files) < 2: return True # single table is always close to itself...
|
||||
|
||||
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
||||
for table in tables:
|
||||
table.head_read()
|
||||
|
@ -477,54 +477,57 @@ class Test():
|
|||
columns = columns[:len(files)] # truncate to same length as files
|
||||
|
||||
for i,column in enumerate(columns):
|
||||
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
|
||||
if column is None: columns[i] = tables[i].labels(raw = False) # if no column is given, use all
|
||||
|
||||
logging.info('comparing ASCIItables')
|
||||
for i in xrange(len(columns)):
|
||||
for i in range(len(columns)):
|
||||
columns[i] = columns[0] if not columns[i] else \
|
||||
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
||||
columns[i]
|
||||
)
|
||||
logging.info(files[i]+':'+','.join(columns[i]))
|
||||
logging.info(files[i]+': '+','.join(columns[i]))
|
||||
|
||||
if len(files) < 2: return True # single table is always close to itself...
|
||||
|
||||
maximum = np.zeros(len(columns[0]),dtype='f')
|
||||
data = []
|
||||
for table,labels in zip(tables,columns):
|
||||
table.data_readArray(labels)
|
||||
data.append(np.where(np.abs(table.data)<preFilter,np.zeros_like(table.data),table.data))
|
||||
maximum += np.abs(table.data).max(axis=0)
|
||||
table.close()
|
||||
|
||||
maximum /= len(tables)
|
||||
maximum = np.where(maximum >0.0, maximum, 1) # avoid div by zero for empty columns
|
||||
for i in xrange(len(data)):
|
||||
data[i] /= maximum
|
||||
|
||||
mask = np.zeros_like(table.data,dtype='bool')
|
||||
dimensions = tables[0].label_dimension(columns[0]) # width of each requested column
|
||||
maximum = np.zeros_like(columns[0],dtype=float) # one magnitude per column entry
|
||||
data = [] # list of feature table extracted from each file (ASCII table)
|
||||
|
||||
for i,(table,labels) in enumerate(zip(tables,columns)):
|
||||
if np.any(dimensions != table.label_dimension(labels)): # check data object consistency
|
||||
logging.critical('Table {} differs in data layout.'.format(files[i]))
|
||||
return False
|
||||
table.data_readArray(labels) # read data, ...
|
||||
data.append(table.data) # ... store, ...
|
||||
table.close() # ... close
|
||||
|
||||
for j,label in enumerate(labels): # iterate over object labels
|
||||
maximum[j] = np.maximum(\
|
||||
maximum[j],
|
||||
np.amax(np.linalg.norm(table.data[:,table.label_indexrange(label)],
|
||||
axis=1))
|
||||
) # find maximum Euclidean norm across rows
|
||||
|
||||
maximum = np.where(maximum > 0.0, maximum, 1.0) # avoid div by zero for zero columns
|
||||
maximum = np.repeat(maximum,dimensions) # spread maximum over columns of each object
|
||||
|
||||
for i in range(len(data)):
|
||||
data[i] /= maximum # normalize each table
|
||||
|
||||
if debug:
|
||||
logging.debug(str(maximum))
|
||||
allclose = np.absolute(data[0]-data[1]) <= (atol + rtol*np.absolute(data[1]))
|
||||
for ok,valA,valB in zip(allclose,data[0],data[1]):
|
||||
logging.debug('{}:\n{}\n{}'.format(ok,valA,valB))
|
||||
|
||||
for table in data:
|
||||
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values
|
||||
|
||||
|
||||
allclose = True # start optimistic
|
||||
for i in xrange(1,len(data)):
|
||||
if debug:
|
||||
t0 = np.where(mask,0.0,data[i-1])
|
||||
t1 = np.where(mask,0.0,data[i ])
|
||||
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
|
||||
logging.info('{:f}'.format(np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol))))
|
||||
logging.info('{:f} {:f}'.format((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
|
||||
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
|
||||
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
|
||||
for i in range(1,len(data)):
|
||||
allclose &= np.allclose(data[i-1],data[i],rtol,atol) # accumulate "pessimism"
|
||||
|
||||
return allclose
|
||||
|
||||
|
||||
|
||||
def compare_TableRefCur(self,headingsRef,ref,headingsCur='',cur='',normHeadings='',normType=None,\
|
||||
absoluteTolerance=False,perLine=False,skipLines=[]):
|
||||
|
||||
|
||||
if cur == '': cur = ref
|
||||
if headingsCur == '': headingsCur = headingsRef
|
||||
refName = self.fileInReference(ref)
|
||||
|
@ -532,10 +535,10 @@ class Test():
|
|||
return self.compare_Table(headingsRef,refName,headingsCur,curName,normHeadings,normType,
|
||||
absoluteTolerance,perLine,skipLines)
|
||||
|
||||
|
||||
|
||||
def compare_TableCurCur(self,headingsCur0,Cur0,Cur1,headingsCur1='',normHeadings='',normType=None,\
|
||||
absoluteTolerance=False,perLine=False,skipLines=[]):
|
||||
|
||||
|
||||
if headingsCur1 == '': headingsCur1 = headingsCur0
|
||||
cur0Name = self.fileInCurrent(Cur0)
|
||||
cur1Name = self.fileInCurrent(Cur1)
|
||||
|
@ -544,15 +547,17 @@ class Test():
|
|||
|
||||
|
||||
def report_Success(self,culprit):
|
||||
|
||||
|
||||
ret = culprit
|
||||
|
||||
if culprit == 0:
|
||||
logging.critical(('The test' if len(self.variants) == 1 else 'All {} tests'.format(len(self.variants))) + ' passed')
|
||||
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
||||
return 0
|
||||
if culprit == -1:
|
||||
logging.warning('Warning: Could not start test')
|
||||
return 0
|
||||
msg = 'The test passed' if len(self.variants) == 1 \
|
||||
else 'All {} tests passed.'.format(len(self.variants))
|
||||
elif culprit == -1:
|
||||
msg = 'Warning: Could not start test...'
|
||||
ret = 0
|
||||
else:
|
||||
logging.critical(' ********\n * Test {} failed...\n ********'.format(culprit))
|
||||
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
||||
return culprit
|
||||
msg = ' * Test "{}" failed.'.format(self.variants[culprit-1])
|
||||
|
||||
logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
|
||||
return ret
|
||||
|
|
|
@ -20,6 +20,7 @@ class bcolors:
|
|||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
BOLD = '\033[1m'
|
||||
DIM = '\033[2m'
|
||||
UNDERLINE = '\033[4m'
|
||||
|
||||
def disable(self):
|
||||
|
@ -40,7 +41,7 @@ def srepr(arg,glue = '\n'):
|
|||
hasattr(arg, "__getitem__") or
|
||||
hasattr(arg, "__iter__")):
|
||||
return glue.join(srepr(x) for x in arg)
|
||||
return arg if isinstance(arg,basestring) else repr(arg)
|
||||
return arg if isinstance(arg,str) else repr(arg)
|
||||
|
||||
# -----------------------------
|
||||
def croak(what, newline = True):
|
||||
|
@ -70,9 +71,19 @@ def report_geom(info,
|
|||
|
||||
# -----------------------------
|
||||
def emph(what):
|
||||
"""emphasizes string on screen"""
|
||||
"""boldens string"""
|
||||
return bcolors.BOLD+srepr(what)+bcolors.ENDC
|
||||
|
||||
# -----------------------------
|
||||
def deemph(what):
|
||||
"""dims string"""
|
||||
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
||||
|
||||
# -----------------------------
|
||||
def delete(what):
|
||||
"""dims string"""
|
||||
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
||||
|
||||
# -----------------------------
|
||||
def execute(cmd,
|
||||
streamIn = None,
|
||||
|
@ -137,29 +148,22 @@ class extendableOption(Option):
|
|||
class backgroundMessage(threading.Thread):
|
||||
"""reporting with animation to indicate progress"""
|
||||
|
||||
choices = {'bounce': ['_', 'o', 'O', u'\u00B0',
|
||||
u'\u203e',u'\u203e',u'\u00B0','O','o','_'],
|
||||
'spin': [u'\u25dc',u'\u25dd',u'\u25de',u'\u25df'],
|
||||
'circle': [u'\u25f4',u'\u25f5',u'\u25f6',u'\u25f7'],
|
||||
'hexagon': [u'\u2b22',u'\u2b23'],
|
||||
'square': [u'\u2596',u'\u2598',u'\u259d',u'\u2597'],
|
||||
'triangle': [u'\u140a',u'\u140a',u'\u1403',u'\u1405',u'\u1405',u'\u1403'],
|
||||
'amoeba': [u'\u2596',u'\u258f',u'\u2598',u'\u2594',u'\u259d',u'\u2595',
|
||||
u'\u2597',u'\u2582'],
|
||||
'beat': [u'\u2581',u'\u2582',u'\u2583',u'\u2585',u'\u2586',u'\u2587',
|
||||
u'\u2587',u'\u2586',u'\u2585',u'\u2583',u'\u2582',],
|
||||
'prison': [u'\u168b',u'\u168c',u'\u168d',u'\u168f',u'\u168e',u'\u168d',
|
||||
u'\u168c',u'\u168b',],
|
||||
'breath': [u'\u1690',u'\u1691',u'\u1692',u'\u1693',u'\u1694',u'\u1693',
|
||||
u'\u1692',u'\u1691',u'\u1690',],
|
||||
'pulse': [u'·',u'•',u'\u25cf',u'\u25cf',u'•',],
|
||||
'ant': [u'\u2801',u'\u2802',u'\u2810',u'\u2820',u'\u2804',u'\u2840',
|
||||
u'\u2880',u'\u2820',u'\u2804',u'\u2802',u'\u2810',u'\u2808'],
|
||||
'juggle': [u'\ua708',u'\ua709',u'\ua70a',u'\ua70b',u'\ua70c',u'\ua711',
|
||||
u'\ua710',u'\ua70f',u'\ua70d',],
|
||||
# 'wobbler': [u'\u2581',u'\u25e3',u'\u258f',u'\u25e4',u'\u2594',u'\u25e5',u'\u2595',u'\u25e2',],
|
||||
'grout': [u'\u2581',u'\u258f',u'\u2594',u'\u2595',],
|
||||
'partner': [u'\u26ac',u'\u26ad',u'\u26ae',u'\u26af',u'\u26ae',u'\u26ad',],
|
||||
choices = {'bounce': ['_', 'o', 'O', '°', '‾', '‾', '°', 'O', 'o', '_'],
|
||||
'spin': ['◜', '◝', '◞', '◟'],
|
||||
'circle': ['◴', '◵', '◶', '◷'],
|
||||
'hexagon': ['⬢', '⬣'],
|
||||
'square': ['▖', '▘', '▝', '▗'],
|
||||
'triangle': ['ᐊ', 'ᐊ', 'ᐃ', 'ᐅ', 'ᐅ', 'ᐃ'],
|
||||
'amoeba': ['▖', '▏', '▘', '▔', '▝', '▕', '▗', '▂'],
|
||||
'beat': ['▁', '▂', '▃', '▅', '▆', '▇', '▇', '▆', '▅', '▃', '▂'],
|
||||
'prison': ['ᚋ', 'ᚌ', 'ᚍ', 'ᚏ', 'ᚎ', 'ᚍ', 'ᚌ', 'ᚋ'],
|
||||
'breath': ['ᚐ', 'ᚑ', 'ᚒ', 'ᚓ', 'ᚔ', 'ᚓ', 'ᚒ', 'ᚑ', 'ᚐ'],
|
||||
'pulse': ['·', '•', '●', '●', '•'],
|
||||
'ant': ['⠁', '⠂', '⠐', '⠠', '⠄', '⡀', '⢀', '⠠', '⠄', '⠂', '⠐', '⠈'],
|
||||
'juggle': ['꜈', '꜉', '꜊', '꜋', '꜌', '꜑', '꜐', '꜏', '꜍'],
|
||||
# 'wobbler': ['▁', '◣', '▏', '◤', '▔', '◥', '▕', '◢'],
|
||||
'grout': ['▁', '▏', '▔', '▕'],
|
||||
'partner': ['⚬', '⚭', '⚮', '⚯', '⚮', '⚭'],
|
||||
'classic': ['-', '\\', '|', '/',],
|
||||
}
|
||||
|
||||
|
@ -171,7 +175,7 @@ class backgroundMessage(threading.Thread):
|
|||
self.new_message = ''
|
||||
self.counter = 0
|
||||
self.gap = ' '
|
||||
self.symbols = self.choices[symbol if symbol in self.choices else random.choice(self.choices.keys())]
|
||||
self.symbols = self.choices[symbol if symbol in self.choices else random.choice(list(self.choices.keys()))]
|
||||
self.waittime = wait
|
||||
|
||||
def __quit__(self):
|
||||
|
@ -200,7 +204,7 @@ class backgroundMessage(threading.Thread):
|
|||
def print_message(self):
|
||||
length = len(self.symbols[self.counter] + self.gap + self.message)
|
||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length + \
|
||||
self.symbols[self.counter].encode('utf-8') + self.gap + self.new_message) # delete former and print new message
|
||||
self.symbols[self.counter] + self.gap + self.new_message) # delete former and print new message
|
||||
sys.stderr.flush()
|
||||
self.message = self.new_message
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """
|
||||
Adds header to OIM grain file to make it accesible as ASCII table
|
||||
|
||||
""", version = scriptID)
|
||||
|
|
|
@ -13,7 +13,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile[s]]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [angfile[s]]', description = """
|
||||
Convert TSL/EDAX *.ang file to ASCIItable
|
||||
|
||||
""", version = scriptID)
|
||||
|
@ -30,7 +30,7 @@ for name in filenames:
|
|||
outname = os.path.splitext(name)[0]+'.txt' if name else name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header -----------------------------------------------------------------------------
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
|
@ -28,7 +29,7 @@ parser.add_option('-o','--offset',
|
|||
parser.add_option('-l','--label',
|
||||
dest = 'label',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help='heading of column(s) to be mapped')
|
||||
help='column label(s) to be mapped')
|
||||
parser.add_option('-a','--asciitable',
|
||||
dest = 'asciitable',
|
||||
type = 'string', metavar = 'string',
|
||||
|
@ -49,12 +50,13 @@ if options.map is None:
|
|||
if options.asciitable is not None and os.path.isfile(options.asciitable):
|
||||
|
||||
mappedTable = damask.ASCIItable(name = options.asciitable,
|
||||
buffered = False, readonly = True)
|
||||
buffered = False,
|
||||
readonly = True)
|
||||
mappedTable.head_read() # read ASCII header info of mapped table
|
||||
missing_labels = mappedTable.data_readArray(options.label)
|
||||
|
||||
if len(missing_labels) > 0:
|
||||
mappedTable.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels)))
|
||||
damask.util.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels)))
|
||||
|
||||
else:
|
||||
parser.error('no mapped ASCIItable given.')
|
||||
|
@ -64,9 +66,8 @@ else:
|
|||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False)
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
|
@ -96,7 +97,10 @@ for name in filenames:
|
|||
|
||||
outputAlive = True
|
||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||
table.data_append(mappedTable.data[int(round(float(table.data[mappedColumn])))+options.offset-1]) # add all mapped data types
|
||||
try:
|
||||
table.data_append(mappedTable.data[int(round(float(table.data[mappedColumn])))+options.offset-1]) # add all mapped data types
|
||||
except IndexError:
|
||||
table.data_append(np.nan*np.ones_like(mappedTable.data[0]))
|
||||
outputAlive = table.data_write() # output processed line
|
||||
|
||||
# ------------------------------------------ output finalization -----------------------------------
|
||||
|
|
|
@ -69,7 +69,8 @@ parser.add_option('-q', '--quaternion',
|
|||
|
||||
parser.set_defaults(output = [],
|
||||
symmetry = damask.Symmetry.lattices[-1],
|
||||
rotation = (0.,1.,1.,1.), # no rotation about 1,1,1
|
||||
labrotation = (0.,1.,1.,1.), # no rotation about 1,1,1
|
||||
crystalrotation = (0.,1.,1.,1.), # no rotation about 1,1,1
|
||||
degrees = False,
|
||||
)
|
||||
|
||||
|
|
|
@ -167,7 +167,6 @@ force = np.array(options.force)
|
|||
force /= np.linalg.norm(force)
|
||||
|
||||
if options.normal:
|
||||
damask.util.croak('got normal')
|
||||
normal = np.array(options.normal)
|
||||
normal /= np.linalg.norm(normal)
|
||||
if abs(np.dot(force,normal)) > 1e-3:
|
||||
|
|
|
@ -78,6 +78,7 @@ for name in filenames:
|
|||
for type, data in items.iteritems():
|
||||
for column in data['column']:
|
||||
(u,v) = np.linalg.eigh(np.array(map(float,table.data[column:column+data['dim']])).reshape(data['shape']))
|
||||
if np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : v[:, 2] *= -1.0 # ensure right-handed coordinate system
|
||||
table.data_append(list(u))
|
||||
table.data_append(list(v.transpose().reshape(data['dim'])))
|
||||
outputAlive = table.data_write() # output processed line
|
||||
|
|
|
@ -1,82 +0,0 @@
|
|||
#!/usr/bin/env python2.7
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Replace all rows for which column 'label' has identical values by a single row containing their average.
|
||||
Output table will contain as many rows as there are different (unique) values in the grouping column.
|
||||
|
||||
Examples:
|
||||
For grain averaged values, replace all rows of particular 'texture' with a single row containing their average.
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-l','--label',
|
||||
dest = 'label',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for grouping rows')
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
if options.label is None:
|
||||
parser.error('no grouping column specified.')
|
||||
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
outname = os.path.join(
|
||||
os.path.split(name)[0],
|
||||
options.label+'_averaged_'+os.path.split(name)[1]
|
||||
) if name else name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# ------------------------------------------ sanity checks ---------------------------------------
|
||||
|
||||
table.head_read()
|
||||
if table.label_dimension(options.label) != 1:
|
||||
damask.util.croak('column {} is not of scalar dimension.'.format(options.label))
|
||||
table.close(dismiss = True) # close ASCIItable and remove empty file
|
||||
continue
|
||||
|
||||
# ------------------------------------------ assemble info ---------------------------------------
|
||||
|
||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||
table.head_write()
|
||||
|
||||
# ------------------------------------------ process data --------------------------------
|
||||
|
||||
table.data_readArray()
|
||||
rows,cols = table.data.shape
|
||||
|
||||
table.data = table.data[np.lexsort([table.data[:,table.label_index(options.label)]])]
|
||||
|
||||
values,index = np.unique(table.data[:,table.label_index(options.label)], return_index = True)
|
||||
index = np.append(index,rows)
|
||||
avgTable = np.empty((len(values), cols))
|
||||
|
||||
for j in xrange(cols) :
|
||||
for i in xrange(len(values)) :
|
||||
avgTable[i,j] = np.average(table.data[index[i]:index[i+1],j])
|
||||
|
||||
table.data = avgTable
|
||||
|
||||
# ------------------------------------------ output result -------------------------------
|
||||
|
||||
table.data_writeArray()
|
||||
table.close() # close ASCII table
|
|
@ -20,7 +20,7 @@ Produces a binned grid of two columns from an ASCIItable, i.e. a two-dimensional
|
|||
|
||||
parser.add_option('-d','--data',
|
||||
dest = 'data',
|
||||
type='string', nargs = 2, metavar = 'string string',
|
||||
type = 'string', nargs = 2, metavar = 'string string',
|
||||
help = 'column labels containing x and y ')
|
||||
parser.add_option('-w','--weight',
|
||||
dest = 'weight',
|
||||
|
@ -49,15 +49,15 @@ parser.add_option('-z','--zrange',
|
|||
parser.add_option('-i','--invert',
|
||||
dest = 'invert',
|
||||
action = 'store_true',
|
||||
help = 'invert probability density [%default]')
|
||||
help = 'invert probability density')
|
||||
parser.add_option('-r','--rownormalize',
|
||||
dest = 'normRow',
|
||||
action = 'store_true',
|
||||
help = 'normalize probability density in each row [%default]')
|
||||
help = 'normalize probability density in each row')
|
||||
parser.add_option('-c','--colnormalize',
|
||||
dest = 'normCol',
|
||||
action = 'store_true',
|
||||
help = 'normalize probability density in each column [%default]')
|
||||
help = 'normalize probability density in each column')
|
||||
|
||||
parser.set_defaults(bins = (10,10),
|
||||
type = ('linear','linear','linear'),
|
||||
|
@ -79,7 +79,8 @@ result = np.zeros((options.bins[0],options.bins[1],3),'f')
|
|||
|
||||
if options.data is None: parser.error('no data columns specified.')
|
||||
|
||||
labels = options.data
|
||||
labels = list(options.data)
|
||||
|
||||
|
||||
if options.weight is not None: labels += [options.weight] # prevent character splitting of single string value
|
||||
|
||||
|
@ -106,7 +107,7 @@ for name in filenames:
|
|||
# ------------------------------------------ sanity checks ----------------------------------------
|
||||
|
||||
missing_labels = table.data_readArray(labels)
|
||||
|
||||
|
||||
if len(missing_labels) > 0:
|
||||
damask.util.croak('column{} {} not found.'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels)))
|
||||
table.close(dismiss = True)
|
||||
|
@ -119,12 +120,11 @@ for name in filenames:
|
|||
minmax[c] = np.log(minmax[c]) # change minmax to log, too
|
||||
|
||||
delta = minmax[:,1]-minmax[:,0]
|
||||
|
||||
for i in xrange(len(table.data)):
|
||||
x = int(options.bins[0]*(table.data[i,0]-minmax[0,0])/delta[0])
|
||||
y = int(options.bins[1]*(table.data[i,1]-minmax[1,0])/delta[1])
|
||||
if x >= 0 and x < options.bins[0] and y >= 0 and y < options.bins[1]:
|
||||
grid[x,y] += 1. if options.weight is None else table.data[i,2] # count (weighted) occurrences
|
||||
|
||||
(grid,xedges,yedges) = np.histogram2d(table.data[:,0],table.data[:,1],
|
||||
bins=options.bins,
|
||||
range=minmax,
|
||||
weights=None if options.weight is None else table.data[:,2])
|
||||
|
||||
if options.normCol:
|
||||
for x in xrange(options.bins[0]):
|
||||
|
@ -136,7 +136,7 @@ for name in filenames:
|
|||
sum = np.sum(grid[:,y])
|
||||
if sum > 0.0:
|
||||
grid[:,y] /= sum
|
||||
|
||||
|
||||
if (minmax[2] == 0.0).all(): minmax[2] = [grid.min(),grid.max()] # auto scale from data
|
||||
if minmax[2,0] == minmax[2,1]:
|
||||
minmax[2,0] -= 1.
|
||||
|
@ -147,7 +147,7 @@ for name in filenames:
|
|||
if options.type[2].lower() == 'log':
|
||||
grid = np.log(grid)
|
||||
minmax[2] = np.log(minmax[2])
|
||||
|
||||
|
||||
delta[2] = minmax[2,1]-minmax[2,0]
|
||||
|
||||
for x in xrange(options.bins[0]):
|
||||
|
|
|
@ -0,0 +1,131 @@
|
|||
#!/usr/bin/env python2.7
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
import math # noqa
|
||||
import numpy as np
|
||||
from optparse import OptionParser, OptionGroup
|
||||
import damask
|
||||
|
||||
#"https://en.wikipedia.org/wiki/Center_of_mass#Systems_with_periodic_boundary_conditions"
|
||||
def periodicAverage(Points, Box):
|
||||
theta = (Points/Box[1]) * (2.0*np.pi)
|
||||
xi = np.cos(theta)
|
||||
zeta = np.sin(theta)
|
||||
theta_avg = np.arctan2(-1.0*zeta.mean(), -1.0*xi.mean()) + np.pi
|
||||
Pmean = Box[1] * theta_avg/(2.0*np.pi)
|
||||
return Pmean
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Apply a user-specified function to condense all rows for which column 'label' has identical values into a single row.
|
||||
Output table will contain as many rows as there are different (unique) values in the grouping column.
|
||||
|
||||
Examples:
|
||||
For grain averaged values, replace all rows of particular 'texture' with a single row containing their average.
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-l','--label',
|
||||
dest = 'label',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label for grouping rows')
|
||||
parser.add_option('-f','--function',
|
||||
dest = 'function',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'mapping function [%default]')
|
||||
parser.add_option('-a','--all',
|
||||
dest = 'all',
|
||||
action = 'store_true',
|
||||
help = 'apply mapping function also to grouping column')
|
||||
|
||||
group = OptionGroup(parser, "periodic averaging", "")
|
||||
|
||||
group.add_option('-p','--periodic',
|
||||
dest = 'periodic',
|
||||
action = 'store_true',
|
||||
help = 'calculate average in periodic space defined by periodic length [%default]')
|
||||
group.add_option('--boundary',
|
||||
dest = 'boundary', metavar = 'MIN MAX',
|
||||
type = 'float', nargs = 2,
|
||||
help = 'define periodic box end points %default')
|
||||
|
||||
parser.add_option_group(group)
|
||||
|
||||
parser.set_defaults(function = 'np.average',
|
||||
all = False,
|
||||
periodic = False,
|
||||
boundary = [0.0, 1.0])
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
funcModule,funcName = options.function.split('.')
|
||||
|
||||
try:
|
||||
mapFunction = getattr(locals().get(funcModule) or
|
||||
globals().get(funcModule) or
|
||||
__import__(funcModule),
|
||||
funcName)
|
||||
except:
|
||||
mapFunction = None
|
||||
|
||||
if options.label is None:
|
||||
parser.error('no grouping column specified.')
|
||||
if not hasattr(mapFunction,'__call__'):
|
||||
parser.error('function "{}" is not callable.'.format(options.function))
|
||||
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# ------------------------------------------ sanity checks ---------------------------------------
|
||||
|
||||
table.head_read()
|
||||
if table.label_dimension(options.label) != 1:
|
||||
damask.util.croak('column {} is not of scalar dimension.'.format(options.label))
|
||||
table.close(dismiss = True) # close ASCIItable and remove empty file
|
||||
continue
|
||||
else:
|
||||
grpColumn = table.label_index(options.label)
|
||||
|
||||
# ------------------------------------------ assemble info ---------------------------------------
|
||||
|
||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||
table.head_write()
|
||||
|
||||
# ------------------------------------------ process data --------------------------------
|
||||
|
||||
table.data_readArray()
|
||||
rows,cols = table.data.shape
|
||||
|
||||
table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn
|
||||
|
||||
values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions
|
||||
index = np.append(index,rows) # add termination position
|
||||
grpTable = np.empty((len(values), cols)) # initialize output
|
||||
|
||||
for i in xrange(len(values)): # iterate over groups (unique values in grpColumn)
|
||||
if options.periodic :
|
||||
grpTable[i] = periodicAverage(table.data[index[i]:index[i+1]],options.boundary) # apply periodicAverage mapping function
|
||||
else :
|
||||
grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function
|
||||
if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value
|
||||
|
||||
table.data = grpTable
|
||||
|
||||
# ------------------------------------------ output result -------------------------------
|
||||
|
||||
table.data_writeArray()
|
||||
table.close() # close ASCII table
|
|
@ -5,6 +5,7 @@ import os,vtk
|
|||
import damask
|
||||
from collections import defaultdict
|
||||
from optparse import OptionParser
|
||||
from vtk.util import numpy_support
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
@ -13,10 +14,10 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).
|
||||
|
||||
""", version = scriptID)
|
||||
parser = OptionParser(option_class=damask.extendableOption,
|
||||
usage='%prog options [file[s]]',
|
||||
description = """Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).""",
|
||||
version = scriptID)
|
||||
|
||||
parser.add_option( '--vtk',
|
||||
dest = 'vtk',
|
||||
|
@ -30,19 +31,20 @@ parser.add_option('-r', '--render',
|
|||
dest = 'render',
|
||||
action = 'store_true',
|
||||
help = 'open output in VTK render window')
|
||||
parser.add_option('-s', '--scalar', dest='scalar', action='extend',
|
||||
metavar ='<string LIST>',
|
||||
help = 'scalar values')
|
||||
parser.add_option('-v', '--vector',
|
||||
dest = 'vector',
|
||||
parser.add_option('-d', '--data',
|
||||
dest = 'data',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'vector value label(s)')
|
||||
help = 'scalar/vector value(s) label(s)')
|
||||
parser.add_option('-t', '--tensor',
|
||||
dest = 'tensor',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'tensor (3x3) value label(s)')
|
||||
parser.add_option('-c', '--color', dest='color', action='extend',
|
||||
metavar ='<string LIST>',
|
||||
help = 'RGB color tuples')
|
||||
|
||||
parser.set_defaults(scalar = [],
|
||||
vector = [],
|
||||
parser.set_defaults(data = [],
|
||||
tensor = [],
|
||||
color = [],
|
||||
inplace = False,
|
||||
render = False,
|
||||
|
@ -94,59 +96,60 @@ for name in filenames:
|
|||
errors = []
|
||||
VTKarray = {}
|
||||
active = defaultdict(list)
|
||||
|
||||
for datatype,dimension,label in [['scalar',1,options.scalar],
|
||||
['vector',3,options.vector],
|
||||
['color',3,options.color],
|
||||
|
||||
for datatype,dimension,label in [['data',99,options.vector],
|
||||
['tensor',9,options.tensor],
|
||||
['color' ,3,options.color],
|
||||
]:
|
||||
for i,dim in enumerate(table.label_dimension(label)):
|
||||
me = label[i]
|
||||
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
||||
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
||||
else:
|
||||
remarks.append('adding {} "{}"...'.format(datatype,me))
|
||||
active[datatype].append(me)
|
||||
|
||||
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
|
||||
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
|
||||
|
||||
VTKarray[me].SetNumberOfComponents(dimension)
|
||||
VTKarray[me].SetName(label[i])
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# ------------------------------------------ process data ---------------------------------------
|
||||
# --------------------------------------- process and add data -----------------------------------
|
||||
|
||||
while table.data_read(): # read next data line of ASCII table
|
||||
|
||||
for datatype,labels in active.items(): # loop over scalar,color
|
||||
for me in labels: # loop over all requested items
|
||||
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
||||
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
|
||||
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
|
||||
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
|
||||
table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data
|
||||
|
||||
table.input_close() # close input ASCII table
|
||||
for datatype,labels in active.items(): # loop over scalar,color
|
||||
for me in labels: # loop over all requested items
|
||||
VTKtype = vtk.VTK_DOUBLE
|
||||
VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout
|
||||
|
||||
# ------------------------------------------ add data ---------------------------------------
|
||||
if datatype == 'color':
|
||||
VTKtype = vtk.VTK_UNSIGNED_CHAR
|
||||
VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR
|
||||
elif datatype == 'tensor':
|
||||
VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3])
|
||||
VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6])
|
||||
VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7])
|
||||
|
||||
for datatype,labels in active.items(): # loop over scalar,color
|
||||
if datatype == 'color':
|
||||
Polydata.GetPointData().SetScalars(VTKarray[active['color'][0]])
|
||||
Polydata.GetCellData().SetScalars(VTKarray[active['color'][0]])
|
||||
for me in labels: # loop over all requested items
|
||||
Polydata.GetPointData().AddArray(VTKarray[me])
|
||||
Polydata.GetCellData().AddArray(VTKarray[me])
|
||||
VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype)
|
||||
VTKarray[me].SetName(me)
|
||||
|
||||
if datatype == 'color':
|
||||
Polydata.GetPointData().SetScalars(VTKarray[me])
|
||||
Polydata.GetCellData().SetScalars(VTKarray[me])
|
||||
else:
|
||||
Polydata.GetPointData().AddArray(VTKarray[me])
|
||||
Polydata.GetCellData().AddArray(VTKarray[me])
|
||||
|
||||
|
||||
table.input_close() # close input ASCII table
|
||||
|
||||
# ------------------------------------------ output result ---------------------------------------
|
||||
|
||||
Polydata.Modified()
|
||||
if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update()
|
||||
|
||||
# ------------------------------------------ output result ---------------------------------------
|
||||
|
||||
writer = vtk.vtkXMLPolyDataWriter()
|
||||
writer.SetDataModeToBinary()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
|
@ -155,7 +158,7 @@ for name in filenames:
|
|||
else: writer.SetInputData(Polydata)
|
||||
writer.Write()
|
||||
|
||||
# ------------------------------------------ render result ---------------------------------------
|
||||
# ------------------------------------------ render result ---------------------------------------
|
||||
|
||||
if options.render:
|
||||
mapper = vtk.vtkDataSetMapper()
|
||||
|
@ -164,7 +167,7 @@ if options.render:
|
|||
actor.SetMapper(mapper)
|
||||
|
||||
# Create the graphics structure. The renderer renders into the
|
||||
# render window. The render window interactor captures mouse events
|
||||
# render window. The render window interactively captures mouse events
|
||||
# and will perform appropriate camera or actor manipulation
|
||||
# depending on the nature of the events.
|
||||
|
||||
|
@ -179,7 +182,7 @@ if options.render:
|
|||
|
||||
iren = vtk.vtkRenderWindowInteractor()
|
||||
iren.SetRenderWindow(renWin)
|
||||
|
||||
|
||||
iren.Initialize()
|
||||
renWin.Render()
|
||||
iren.Start()
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
import os,vtk
|
||||
import damask
|
||||
from vtk.util import numpy_support
|
||||
from collections import defaultdict
|
||||
from optparse import OptionParser
|
||||
|
||||
|
@ -30,21 +31,21 @@ parser.add_option('-r', '--render',
|
|||
dest = 'render',
|
||||
action = 'store_true',
|
||||
help = 'open output in VTK render window')
|
||||
parser.add_option('-s', '--scalar',
|
||||
dest = 'scalar',
|
||||
parser.add_option('-d', '--data',
|
||||
dest = 'data',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'scalar value label(s)')
|
||||
parser.add_option('-v', '--vector',
|
||||
dest = 'vector',
|
||||
help = 'scalar/vector value(s) label(s)')
|
||||
parser.add_option('-t', '--tensor',
|
||||
dest = 'tensor',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'vector value label(s)')
|
||||
help = 'tensor (3x3) value label(s)')
|
||||
parser.add_option('-c', '--color',
|
||||
dest = 'color',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'RGB color tuple label')
|
||||
|
||||
parser.set_defaults(scalar = [],
|
||||
vector = [],
|
||||
parser.set_defaults(data = [],
|
||||
tensor = [],
|
||||
color = [],
|
||||
inplace = False,
|
||||
render = False,
|
||||
|
@ -92,51 +93,51 @@ for name in filenames:
|
|||
errors = []
|
||||
VTKarray = {}
|
||||
active = defaultdict(list)
|
||||
|
||||
for datatype,dimension,label in [['scalar',1,options.scalar],
|
||||
['vector',3,options.vector],
|
||||
['color',3,options.color],
|
||||
|
||||
for datatype,dimension,label in [['data',99,options.data],
|
||||
['tensor',9,options.tensor],
|
||||
['color' ,3,options.color],
|
||||
]:
|
||||
for i,dim in enumerate(table.label_dimension(label)):
|
||||
me = label[i]
|
||||
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
||||
else:
|
||||
remarks.append('adding {} "{}"...'.format(datatype,me))
|
||||
active[datatype].append(me)
|
||||
|
||||
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
|
||||
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
|
||||
|
||||
VTKarray[me].SetNumberOfComponents(dimension)
|
||||
VTKarray[me].SetName(label[i])
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# ------------------------------------------ process data ---------------------------------------
|
||||
# ------------------------------------------ process data ---------------------------------------
|
||||
|
||||
datacount = 0
|
||||
table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data
|
||||
|
||||
while table.data_read(): # read next data line of ASCII table
|
||||
for datatype,labels in active.items(): # loop over scalar,color
|
||||
for me in labels: # loop over all requested items
|
||||
VTKtype = vtk.VTK_DOUBLE
|
||||
VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout
|
||||
|
||||
datacount += 1 # count data lines
|
||||
for datatype,labels in active.items(): # loop over scalar,color
|
||||
for me in labels: # loop over all requested items
|
||||
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
||||
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
|
||||
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
|
||||
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
|
||||
if datatype == 'color':
|
||||
VTKtype = vtk.VTK_UNSIGNED_CHAR
|
||||
VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR
|
||||
elif datatype == 'tensor':
|
||||
VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3])
|
||||
VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6])
|
||||
VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7])
|
||||
|
||||
VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype)
|
||||
VTKarray[me].SetName(me)
|
||||
|
||||
table.close() # close input ASCII table
|
||||
|
||||
# ------------------------------------------ add data ---------------------------------------
|
||||
# ------------------------------------------ add data ---------------------------------------
|
||||
|
||||
if datacount == Npoints: mode = 'point'
|
||||
elif datacount == Ncells: mode = 'cell'
|
||||
if len(table.data) == Npoints: mode = 'point'
|
||||
elif len(table.data) == Ncells: mode = 'cell'
|
||||
else:
|
||||
damask.util.croak('Data count is incompatible with grid...')
|
||||
continue
|
||||
|
@ -154,7 +155,7 @@ for name in filenames:
|
|||
rGrid.Modified()
|
||||
if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update()
|
||||
|
||||
# ------------------------------------------ output result ---------------------------------------
|
||||
# ------------------------------------------ output result ---------------------------------------
|
||||
|
||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||
writer.SetDataModeToBinary()
|
||||
|
@ -164,7 +165,7 @@ for name in filenames:
|
|||
else: writer.SetInputData(rGrid)
|
||||
writer.Write()
|
||||
|
||||
# ------------------------------------------ render result ---------------------------------------
|
||||
# ------------------------------------------ render result ---------------------------------------
|
||||
|
||||
if options.render:
|
||||
mapper = vtk.vtkDataSetMapper()
|
||||
|
@ -188,7 +189,7 @@ if options.render:
|
|||
|
||||
iren = vtk.vtkRenderWindowInteractor()
|
||||
iren.SetRenderWindow(renWin)
|
||||
|
||||
|
||||
iren.Initialize()
|
||||
renWin.Render()
|
||||
iren.Start()
|
||||
|
|
|
@ -79,9 +79,9 @@ for name in filenames:
|
|||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
||||
|
||||
if options.mode == 'cell':
|
||||
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + len(coords[i]) > 1]] + \
|
||||
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
|
||||
[coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \
|
||||
[3.0 * coords[i][-1] - coords[i][-1 - (len(coords[i]) > 1)]]) for i in xrange(3)]
|
||||
[3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in xrange(3)]
|
||||
|
||||
grid = np.array(map(len,coords),'i')
|
||||
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
|
||||
|
|
|
@ -1,106 +0,0 @@
|
|||
#!/usr/bin/env python2.7
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
from optparse import OptionParser
|
||||
import numpy as np
|
||||
import damask
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
sampleSym = { 'Orthotropic' : (90,90,90),
|
||||
'Triclinic' : (360,180,360)
|
||||
}
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Transform the binned texture data from "TSL OIM Analysis" into linear ODF data,
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-s', '--symmetry', dest='symmetry', choices=sampleSym.keys(),
|
||||
metavar = 'string',
|
||||
help='Sample symmetry {%s} [Triclinic]'%(' '.join(sampleSym.keys())))
|
||||
|
||||
parser.set_defaults(symmetry = 'Triclinic')
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
#--- setup file handles ---------------------------------------------------------------------------
|
||||
files = []
|
||||
if filenames == []:
|
||||
files.append({'name':'STDIN',
|
||||
'input':sys.stdin,
|
||||
'output':sys.stdout,
|
||||
'croak':sys.stderr,
|
||||
})
|
||||
else:
|
||||
for name in filenames:
|
||||
if os.path.exists(name):
|
||||
files.append({'name':name,
|
||||
'input':open(name),
|
||||
'output':open(name+'_tmp','w'),
|
||||
'croak':sys.stdout,
|
||||
})
|
||||
|
||||
#--- loop over input files ------------------------------------------------------------------------
|
||||
for file in files:
|
||||
file['croak'].write('\033[1m' + scriptName + '\033[0m: ' + (file['name'] if file['name'] != 'STDIN' else '') + '\n')
|
||||
|
||||
while True: # read header (forward and get bin Size)
|
||||
line = file['input'].readline()
|
||||
words = line.split()
|
||||
if len(words)>=3:
|
||||
if words[1]=='Bin' and words[2]=='Size:': binSize=float(words[3][:-1])
|
||||
if not line.startswith('#'): break
|
||||
|
||||
delta = [sampleSym[options.symmetry][i]/binSize for i in xrange(3)]
|
||||
|
||||
nPhi1,nPHI,nPhi2 = map(int,delta)
|
||||
dPhi1,dPHI,dPhi2 = [sampleSym[options.symmetry][i]/delta[i] for i in xrange(3)]
|
||||
|
||||
N = (nPhi1-1)*(nPHI-1)*(nPhi2-1)
|
||||
|
||||
|
||||
ODF = [[[[None] for k in range(nPhi2)] for j in range(nPHI)] for i in range(nPhi1)]
|
||||
linear = [None]*N
|
||||
|
||||
ODF = np.empty([nPhi1,nPHI,nPhi2],'d')
|
||||
|
||||
for iPhi1 in range(nPhi1):
|
||||
for iPHI in range(nPHI):
|
||||
for iPhi2 in range(nPhi2):
|
||||
ODF[iPhi1,iPHI,iPhi2] = float(line.split()[3])*0.125 # extract intensity (in column 4) and weight by 1/8
|
||||
line = file['input'].readline()
|
||||
|
||||
for iPhi1 in range(nPhi1-1):
|
||||
for iPHI in range(nPHI-1):
|
||||
for iPhi2 in range(nPhi2-1):
|
||||
linear[iPhi1*(nPHI-1)*(nPhi2-1)+iPHI*(nPhi2-1)+iPhi2] =\
|
||||
ODF[iPhi1 ,iPHI ,iPhi2 ] +\
|
||||
ODF[iPhi1 ,iPHI ,iPhi2+1] +\
|
||||
ODF[iPhi1 ,iPHI+1,iPhi2 ] +\
|
||||
ODF[iPhi1 ,iPHI+1,iPhi2+1] +\
|
||||
ODF[iPhi1+1,iPHI ,iPhi2 ] +\
|
||||
ODF[iPhi1+1,iPHI ,iPhi2+1] +\
|
||||
ODF[iPhi1+1,iPHI+1,iPhi2 ] +\
|
||||
ODF[iPhi1+1,iPHI+1,iPhi2+1]
|
||||
|
||||
|
||||
file['output'].write('4 header\n')
|
||||
file['output'].write('limit phi1 %-6.2f Phi %-6.2f phi2 %-6.2f\n'%sampleSym[options.symmetry])
|
||||
file['output'].write('delta phi1 %-6.2f Phi %-6.2f phi2 %-6.2f\n'%(dPhi1,dPHI,dPhi2))
|
||||
file['output'].write('centration cell-centered\n')
|
||||
file['output'].write('density\n')
|
||||
|
||||
for i in range(N):
|
||||
file['output'].write('%g\n'%(linear[i]))
|
||||
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
if file['name'] != 'STDIN':
|
||||
file['output'].close()
|
||||
os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] +'.linearODF')
|
|
@ -82,7 +82,7 @@ for name in filenames:
|
|||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
|
@ -126,9 +126,9 @@ for name in filenames:
|
|||
primPos = invRotation*gridpos # rotate back to primitive coordinate system
|
||||
if np.dot(mask*primPos/dim,mask*primPos/dim) <= 0.25 and \
|
||||
np.all(abs((1.-mask)*primPos/dim) <= 0.5): # inside ellipsoid and inside box
|
||||
microstructure[(gridpos[0]+options.center[0])%info['grid'][0],
|
||||
(gridpos[1]+options.center[1])%info['grid'][1],
|
||||
(gridpos[2]+options.center[2])%info['grid'][2]] = options.fill # assign microstructure index
|
||||
microstructure[int((gridpos[0]+options.center[0])%info['grid'][0]),
|
||||
int((gridpos[1]+options.center[1])%info['grid'][1]),
|
||||
int((gridpos[2]+options.center[2])%info['grid'][2])] = options.fill # assign microstructure index
|
||||
|
||||
newInfo['microstructures'] = microstructure.max()
|
||||
|
||||
|
@ -153,7 +153,7 @@ for name in filenames:
|
|||
table.labels_clear()
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
||||
|
|
|
@ -81,17 +81,20 @@ for name in filenames:
|
|||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
||||
|
||||
# store a copy the initial microstructure to find locations of immutable indices
|
||||
microstructure_original = np.copy(microstructure)
|
||||
|
||||
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
||||
|
||||
# Calculates gaussian weights for simulating 3d diffusion
|
||||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
||||
gauss[:,:,grid[2]/2::] = gauss[:,:,round(grid[2]/2.)-1::-1] # trying to cope with uneven (odd) grid size
|
||||
gauss[:,grid[1]/2::,:] = gauss[:,round(grid[1]/2.)-1::-1,:]
|
||||
gauss[grid[0]/2::,:,:] = gauss[round(grid[0]/2.)-1::-1,:,:]
|
||||
gauss[:,:,grid[2]/2::] = gauss[:,:,int(round(grid[2]/2.))-1::-1] # trying to cope with uneven (odd) grid size
|
||||
gauss[:,grid[1]/2::,:] = gauss[:,int(round(grid[1]/2.))-1::-1,:]
|
||||
gauss[grid[0]/2::,:,:] = gauss[int(round(grid[0]/2.))-1::-1,:,:]
|
||||
gauss = np.fft.rfftn(gauss)
|
||||
|
||||
interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0
|
||||
interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0 #1.0 if A & B are distinct & nonzero, 0.0 otherwise
|
||||
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
||||
|
||||
|
||||
|
@ -101,9 +104,11 @@ for name in filenames:
|
|||
for j in (-1,0,1):
|
||||
for k in (-1,0,1):
|
||||
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
||||
interfaceEnergy = np.maximum(boundary,
|
||||
boundary = np.maximum(boundary,
|
||||
interfacialEnergy(microstructure,np.roll(np.roll(np.roll(
|
||||
microstructure,i,axis=0), j,axis=1), k,axis=2)))
|
||||
interfaceEnergy = boundary
|
||||
|
||||
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
|
|
|
@ -0,0 +1,117 @@
|
|||
#!/usr/bin/env python2.7
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import damask
|
||||
from optparse import OptionParser
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
validDirections = ['x','y','z']
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Mirrors spectral geometry description along given directions.
|
||||
|
||||
""", version=scriptID)
|
||||
|
||||
parser.add_option('-d','--direction',
|
||||
dest = 'directions',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = "directions in which to mirror {'x','y','z'}")
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
if options.directions is None:
|
||||
parser.error('no direction given.')
|
||||
if not set(options.directions).issubset(validDirections):
|
||||
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
|
||||
parser.error('invalid directions {}. '.format(*invalidDirections))
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
||||
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||
'homogenization: %i'%info['homogenization'],
|
||||
'microstructures: %i'%info['microstructures'],
|
||||
])
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
||||
|
||||
if 'z' in options.directions:
|
||||
microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2)
|
||||
if 'y' in options.directions:
|
||||
microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1)
|
||||
if 'x' in options.directions:
|
||||
microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0)
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'size': microstructure.shape*info['size']/info['grid'],
|
||||
'grid': microstructure.shape,
|
||||
}
|
||||
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
if (any(newInfo['grid'] != info['grid'])):
|
||||
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
|
||||
if (any(newInfo['size'] != info['size'])):
|
||||
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
extra_header
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
||||
table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose()
|
||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
|
@ -30,30 +30,37 @@ parser.add_option('-s', '--substitute',
|
|||
dest = 'substitute',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'substitutions of microstructure indices from,to,from,to,...')
|
||||
parser.add_option('--float',
|
||||
dest = 'real',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
|
||||
parser.set_defaults(origin = (0.0,0.0,0.0),
|
||||
microstructure = 0,
|
||||
substitute = [],
|
||||
real = False,
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
datatype = 'f' if options.real else 'i'
|
||||
|
||||
sub = {}
|
||||
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
|
||||
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
|
||||
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
# --- loop over input files ----------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, labeled = False)
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
# --- interpret header ---------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
|
@ -73,9 +80,9 @@ for name in filenames:
|
|||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
# --- read data ----------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid']) # read microstructure
|
||||
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
|
@ -90,9 +97,9 @@ for name in filenames:
|
|||
substituted += options.microstructure # shift microstructure indices
|
||||
|
||||
newInfo['origin'] = info['origin'] + options.origin
|
||||
newInfo['microstructures'] = substituted.max()
|
||||
newInfo['microstructures'] = len(np.unique(substituted))
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
# --- report -------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
if (any(newInfo['origin'] != info['origin'])):
|
||||
|
@ -101,7 +108,7 @@ for name in filenames:
|
|||
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
# --- write header -------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
|
@ -116,12 +123,12 @@ for name in filenames:
|
|||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
# --- write microstructure information -----------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
||||
format = '%g' if options.real else '%{}i'.format(int(math.floor(math.log10(microstructure.max())+1)))
|
||||
table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
|
||||
table.data_writeArray(format,delimiter = ' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
# --- output finalization ------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
table.close() # close ASCII table
|
||||
|
|
|
@ -29,7 +29,7 @@ def kdtree_search(cloud, queryPoints):
|
|||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [options]', description = """
|
||||
Distribute given number of points randomly within the three-dimensional cube [0.0,0.0,0.0]--[1.0,1.0,1.0].
|
||||
Distribute given number of points randomly within (a fraction of) the three-dimensional cube [0.0,0.0,0.0]--[1.0,1.0,1.0].
|
||||
Reports positions with random crystal orientations in seeds file format to STDOUT.
|
||||
|
||||
""", version = scriptID)
|
||||
|
@ -38,6 +38,11 @@ parser.add_option('-N',
|
|||
dest = 'N',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'number of seed points [%default]')
|
||||
parser.add_option('-f',
|
||||
'--fraction',
|
||||
dest = 'fraction',
|
||||
type = 'float', nargs = 3, metavar = 'float float float',
|
||||
help='fractions along x,y,z of unit cube to fill %default')
|
||||
parser.add_option('-g',
|
||||
'--grid',
|
||||
dest = 'grid',
|
||||
|
@ -86,8 +91,7 @@ group.add_option( '-s',
|
|||
action = 'store_true',
|
||||
dest = 'selective',
|
||||
help = 'selective picking of seed points from random seed points [%default]')
|
||||
group.add_option( '-f',
|
||||
'--force',
|
||||
group.add_option( '--force',
|
||||
action = 'store_true',
|
||||
dest = 'force',
|
||||
help = 'try selective picking despite large seed point number [%default]')
|
||||
|
@ -103,6 +107,7 @@ parser.add_option_group(group)
|
|||
|
||||
parser.set_defaults(randomSeed = None,
|
||||
grid = (16,16,16),
|
||||
fraction = (1.0,1.0,1.0),
|
||||
N = 20,
|
||||
weights = False,
|
||||
max = 0.0,
|
||||
|
@ -118,6 +123,7 @@ parser.set_defaults(randomSeed = None,
|
|||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
options.fraction = np.array(options.fraction)
|
||||
options.grid = np.array(options.grid)
|
||||
gridSize = options.grid.prod()
|
||||
|
||||
|
@ -160,16 +166,25 @@ for name in filenames:
|
|||
grainEuler[2,:] *= 360.0 # phi_2 is uniformly distributed
|
||||
|
||||
if not options.selective:
|
||||
seeds = np.array([])
|
||||
|
||||
while len(seeds) < options.N:
|
||||
|
||||
seeds = np.zeros((3,options.N),dtype='d') # seed positions array
|
||||
gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions
|
||||
theSeeds = np.zeros((options.N,3),dtype=float) # seed positions array
|
||||
gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions
|
||||
|
||||
seeds[0,:] = (np.mod(gridpoints ,options.grid[0])\
|
||||
+np.random.random(options.N)) /options.grid[0]
|
||||
seeds[1,:] = (np.mod(gridpoints// options.grid[0] ,options.grid[1])\
|
||||
+np.random.random(options.N)) /options.grid[1]
|
||||
seeds[2,:] = (np.mod(gridpoints//(options.grid[1]*options.grid[0]),options.grid[2])\
|
||||
+np.random.random(options.N)) /options.grid[2]
|
||||
theSeeds[:,0] = (np.mod(gridpoints ,options.grid[0])\
|
||||
+np.random.random(options.N)) /options.grid[0]
|
||||
theSeeds[:,1] = (np.mod(gridpoints// options.grid[0] ,options.grid[1])\
|
||||
+np.random.random(options.N)) /options.grid[1]
|
||||
theSeeds[:,2] = (np.mod(gridpoints//(options.grid[1]*options.grid[0]),options.grid[2])\
|
||||
+np.random.random(options.N)) /options.grid[2]
|
||||
|
||||
goodSeeds = theSeeds[np.all(theSeeds<=options.fraction,axis=1)] # pick seeds within threshold fraction
|
||||
seeds = goodSeeds if len(seeds) == 0 else np.vstack((seeds,goodSeeds))
|
||||
if len(seeds) > options.N: seeds = seeds[:min(options.N,len(seeds))]
|
||||
|
||||
seeds = seeds.T # switch layout to point index as last index
|
||||
|
||||
else:
|
||||
|
||||
|
|
Loading…
Reference in New Issue