Merge branch 'development' of magit1.mpie.de:damask/DAMASK into development
This commit is contained in:
commit
f8901220ab
|
@ -41,7 +41,7 @@ if [ ! -z "$PS1" ]; then
|
||||||
echo Max-Planck-Institut für Eisenforschung GmbH, Düsseldorf
|
echo Max-Planck-Institut für Eisenforschung GmbH, Düsseldorf
|
||||||
echo https://damask.mpie.de
|
echo https://damask.mpie.de
|
||||||
echo
|
echo
|
||||||
echo Using environment with ...
|
echo "Using environment with ..."
|
||||||
echo "DAMASK $DAMASK_ROOT"
|
echo "DAMASK $DAMASK_ROOT"
|
||||||
echo "Spectral Solver $SOLVER"
|
echo "Spectral Solver $SOLVER"
|
||||||
echo "Post Processing $PROCESSING"
|
echo "Post Processing $PROCESSING"
|
||||||
|
|
|
@ -150,17 +150,16 @@ program DAMASK_spectral
|
||||||
MPI_file_get_position, &
|
MPI_file_get_position, &
|
||||||
MPI_file_write, &
|
MPI_file_write, &
|
||||||
MPI_abort, &
|
MPI_abort, &
|
||||||
|
MPI_finalize, &
|
||||||
MPI_allreduce, &
|
MPI_allreduce, &
|
||||||
PETScFinalize
|
PETScFinalize
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init DAMASK (all modules)
|
! init DAMASK (all modules)
|
||||||
call CPFEM_initAll(el = 1_pInt, ip = 1_pInt)
|
call CPFEM_initAll(el = 1_pInt, ip = 1_pInt)
|
||||||
mainProcess: if (worldrank == 0) then
|
write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>'
|
||||||
write(6,'(/,a)') ' <<<+- DAMASK_spectral init -+>>>'
|
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
|
||||||
#include "compilation_info.f90"
|
#include "compilation_info.f90"
|
||||||
endif mainProcess
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize field solver information
|
! initialize field solver information
|
||||||
|
@ -199,14 +198,14 @@ program DAMASK_spectral
|
||||||
allocate(loadCases(i)%ID(nActiveFields))
|
allocate(loadCases(i)%ID(nActiveFields))
|
||||||
field = 1
|
field = 1
|
||||||
loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default
|
loadCases(i)%ID(field) = FIELD_MECH_ID ! mechanical active by default
|
||||||
if (any(thermal_type == THERMAL_conduction_ID)) then ! thermal field active
|
thermalActive: if (any(thermal_type == THERMAL_conduction_ID)) then
|
||||||
field = field + 1
|
field = field + 1
|
||||||
loadCases(i)%ID(field) = FIELD_THERMAL_ID
|
loadCases(i)%ID(field) = FIELD_THERMAL_ID
|
||||||
endif
|
endif thermalActive
|
||||||
if (any(damage_type == DAMAGE_nonlocal_ID)) then ! damage field active
|
damageActive: if (any(damage_type == DAMAGE_nonlocal_ID)) then
|
||||||
field = field + 1
|
field = field + 1
|
||||||
loadCases(i)%ID(field) = FIELD_DAMAGE_ID
|
loadCases(i)%ID(field) = FIELD_DAMAGE_ID
|
||||||
endif
|
endif damageActive
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -232,8 +231,6 @@ program DAMASK_spectral
|
||||||
endif
|
endif
|
||||||
do j = 1_pInt, 9_pInt
|
do j = 1_pInt, 9_pInt
|
||||||
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a *
|
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not a *
|
||||||
enddo
|
|
||||||
do j = 1_pInt,9_pInt
|
|
||||||
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
|
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
|
||||||
enddo
|
enddo
|
||||||
loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation
|
loadCases(currentLoadCase)%deformation%maskLogical = & ! logical mask in 3x3 notation
|
||||||
|
@ -245,8 +242,6 @@ program DAMASK_spectral
|
||||||
temp_valueVector = 0.0_pReal
|
temp_valueVector = 0.0_pReal
|
||||||
do j = 1_pInt, 9_pInt
|
do j = 1_pInt, 9_pInt
|
||||||
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk
|
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk
|
||||||
enddo
|
|
||||||
do j = 1_pInt,9_pInt
|
|
||||||
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
|
if (temp_maskVector(j)) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) ! read value where applicable
|
||||||
enddo
|
enddo
|
||||||
loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3]))
|
loadCases(currentLoadCase)%P%maskLogical = transpose(reshape(temp_maskVector,[ 3,3]))
|
||||||
|
@ -303,14 +298,14 @@ program DAMASK_spectral
|
||||||
write(6,'(1x,a,i6)') 'load case: ', currentLoadCase
|
write(6,'(1x,a,i6)') 'load case: ', currentLoadCase
|
||||||
if (.not. loadCases(currentLoadCase)%followFormerTrajectory) &
|
if (.not. loadCases(currentLoadCase)%followFormerTrajectory) &
|
||||||
write(6,'(2x,a)') 'drop guessing along trajectory'
|
write(6,'(2x,a)') 'drop guessing along trajectory'
|
||||||
if (loadCases(currentLoadCase)%deformation%myType=='l') then
|
if (loadCases(currentLoadCase)%deformation%myType == 'l') then
|
||||||
do j = 1_pInt, 3_pInt
|
do j = 1_pInt, 3_pInt
|
||||||
if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. &
|
if (any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .true.) .and. &
|
||||||
any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) &
|
any(loadCases(currentLoadCase)%deformation%maskLogical(j,1:3) .eqv. .false.)) &
|
||||||
errorID = 832_pInt ! each row should be either fully or not at all defined
|
errorID = 832_pInt ! each row should be either fully or not at all defined
|
||||||
enddo
|
enddo
|
||||||
write(6,'(2x,a)') 'velocity gradient:'
|
write(6,'(2x,a)') 'velocity gradient:'
|
||||||
else if (loadCases(currentLoadCase)%deformation%myType=='f') then
|
else if (loadCases(currentLoadCase)%deformation%myType == 'f') then
|
||||||
write(6,'(2x,a)') 'deformation gradient at end of load case:'
|
write(6,'(2x,a)') 'deformation gradient at end of load case:'
|
||||||
else
|
else
|
||||||
write(6,'(2x,a)') 'deformation gradient rate:'
|
write(6,'(2x,a)') 'deformation gradient rate:'
|
||||||
|
@ -319,12 +314,12 @@ program DAMASK_spectral
|
||||||
if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then
|
if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then
|
||||||
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j)
|
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%deformation%values(i,j)
|
||||||
else
|
else
|
||||||
write(6,'(2x,12a)',advance='no') ' * '
|
write(6,'(2x,12a)',advance='no') ' * '
|
||||||
endif
|
endif
|
||||||
enddo; write(6,'(/)',advance='no')
|
enddo; write(6,'(/)',advance='no')
|
||||||
enddo
|
enddo
|
||||||
if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. &
|
if (any(loadCases(currentLoadCase)%P%maskLogical .eqv. &
|
||||||
loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only
|
loadCases(currentLoadCase)%deformation%maskLogical)) errorID = 831_pInt ! exclusive or masking only
|
||||||
if (any(loadCases(currentLoadCase)%P%maskLogical .and. &
|
if (any(loadCases(currentLoadCase)%P%maskLogical .and. &
|
||||||
transpose(loadCases(currentLoadCase)%P%maskLogical) .and. &
|
transpose(loadCases(currentLoadCase)%P%maskLogical) .and. &
|
||||||
reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) &
|
reshape([ .false.,.true.,.true.,.true.,.false.,.true.,.true.,.true.,.false.],[ 3,3]))) &
|
||||||
|
@ -334,12 +329,12 @@ program DAMASK_spectral
|
||||||
if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then
|
if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then
|
||||||
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal
|
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal
|
||||||
else
|
else
|
||||||
write(6,'(2x,12a)',advance='no') ' * '
|
write(6,'(2x,12a)',advance='no') ' * '
|
||||||
endif
|
endif
|
||||||
enddo; write(6,'(/)',advance='no')
|
enddo; write(6,'(/)',advance='no')
|
||||||
enddo
|
enddo
|
||||||
if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, &
|
if (any(abs(math_mul33x33(loadCases(currentLoadCase)%rotation, &
|
||||||
math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) >&
|
math_transpose33(loadCases(currentLoadCase)%rotation))-math_I3) > &
|
||||||
reshape(spread(tol_math_check,1,9),[ 3,3]))&
|
reshape(spread(tol_math_check,1,9),[ 3,3]))&
|
||||||
.or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > &
|
.or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > &
|
||||||
1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain
|
1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain
|
||||||
|
@ -379,7 +374,7 @@ program DAMASK_spectral
|
||||||
call Polarisation_init
|
call Polarisation_init
|
||||||
|
|
||||||
case default
|
case default
|
||||||
call IO_error(error_ID = 891, ext_msg = trim(spectral_solver))
|
call IO_error(error_ID = 891_pInt, ext_msg = trim(spectral_solver))
|
||||||
|
|
||||||
end select
|
end select
|
||||||
|
|
||||||
|
@ -429,29 +424,30 @@ program DAMASK_spectral
|
||||||
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
|
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
|
||||||
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
|
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
|
||||||
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
|
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
|
||||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce')
|
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce')
|
||||||
call MPI_file_open(PETSC_COMM_WORLD, &
|
call MPI_file_open(PETSC_COMM_WORLD, &
|
||||||
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &
|
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &
|
||||||
MPI_MODE_WRONLY + MPI_MODE_APPEND, &
|
MPI_MODE_WRONLY + MPI_MODE_APPEND, &
|
||||||
MPI_INFO_NULL, &
|
MPI_INFO_NULL, &
|
||||||
resUnit, &
|
resUnit, &
|
||||||
ierr)
|
ierr)
|
||||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open')
|
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open')
|
||||||
call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header
|
call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header
|
||||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position')
|
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position')
|
||||||
fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
|
fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
|
||||||
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
|
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
|
||||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
|
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek')
|
||||||
|
|
||||||
if (.not. appendToOutFile) then ! if not restarting, write 0th increment
|
if (.not. appendToOutFile) then ! if not restarting, write 0th increment
|
||||||
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
|
do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
|
||||||
outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
|
outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
|
||||||
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
|
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
|
||||||
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
|
call MPI_file_write(resUnit, &
|
||||||
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
|
reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
|
||||||
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
|
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
|
||||||
|
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults, &
|
||||||
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
|
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
|
||||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
|
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write')
|
||||||
enddo
|
enddo
|
||||||
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
|
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
|
||||||
if (worldrank == 0) &
|
if (worldrank == 0) &
|
||||||
|
@ -490,7 +486,7 @@ program DAMASK_spectral
|
||||||
endif
|
endif
|
||||||
timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step
|
timeinc = timeinc / 2.0_pReal**real(cutBackLevel,pReal) ! depending on cut back level, decrease time step
|
||||||
|
|
||||||
forwarding: if(totalIncsCounter >= restartInc) then
|
forwarding: if (totalIncsCounter >= restartInc) then
|
||||||
stepFraction = 0_pInt
|
stepFraction = 0_pInt
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -596,7 +592,7 @@ program DAMASK_spectral
|
||||||
guess,timeinc,timeIncOld,remainingLoadCaseTime)
|
guess,timeinc,timeIncOld,remainingLoadCaseTime)
|
||||||
|
|
||||||
end select
|
end select
|
||||||
if(.not. solres(field)%converged) exit ! no solution found
|
if (.not. solres(field)%converged) exit ! no solution found
|
||||||
enddo
|
enddo
|
||||||
stagIter = stagIter + 1_pInt
|
stagIter = stagIter + 1_pInt
|
||||||
stagIterate = stagIter < stagItMax .and. &
|
stagIterate = stagIter < stagItMax .and. &
|
||||||
|
@ -711,6 +707,13 @@ program DAMASK_spectral
|
||||||
|
|
||||||
call PETScFinalize(ierr); CHKERRQ(ierr)
|
call PETScFinalize(ierr); CHKERRQ(ierr)
|
||||||
|
|
||||||
|
#ifdef _OPENMP
|
||||||
|
call MPI_finalize(i)
|
||||||
|
if (i /= 0_pInt) then
|
||||||
|
call IO_error(error_ID=894, el=i, ext_msg="Finalize()")
|
||||||
|
endif
|
||||||
|
#endif
|
||||||
|
|
||||||
if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged
|
if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged
|
||||||
call quit(0_pInt) ! no complains ;)
|
call quit(0_pInt) ! no complains ;)
|
||||||
|
|
||||||
|
|
|
@ -60,8 +60,6 @@ subroutine FE_init
|
||||||
IO_warning, &
|
IO_warning, &
|
||||||
IO_timeStamp
|
IO_timeStamp
|
||||||
use DAMASK_interface
|
use DAMASK_interface
|
||||||
use numerics, only: &
|
|
||||||
worldrank
|
|
||||||
|
|
||||||
implicit none
|
implicit none
|
||||||
#if defined(Marc4DAMASK) || defined(Abaqus)
|
#if defined(Marc4DAMASK) || defined(Abaqus)
|
||||||
|
|
|
@ -129,6 +129,7 @@ recursive function IO_read(fileUnit,reset) result(line)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! normal case
|
! normal case
|
||||||
if (input == '') return ! regular line
|
if (input == '') return ! regular line
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! recursion case
|
! recursion case
|
||||||
if (stack >= 10_pInt) call IO_error(104_pInt,ext_msg=input) ! recursion limit reached
|
if (stack >= 10_pInt) call IO_error(104_pInt,ext_msg=input) ! recursion limit reached
|
||||||
|
@ -141,7 +142,7 @@ recursive function IO_read(fileUnit,reset) result(line)
|
||||||
pathOn(stack) = path(1:scan(path,SEP,.true.))//input ! glue include to current file's dir
|
pathOn(stack) = path(1:scan(path,SEP,.true.))//input ! glue include to current file's dir
|
||||||
endif
|
endif
|
||||||
|
|
||||||
open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack)) ! open included file
|
open(newunit=unitOn(stack),iostat=myStat,file=pathOn(stack),action='read') ! open included file
|
||||||
if (myStat /= 0_pInt) call IO_error(100_pInt,el=myStat,ext_msg=pathOn(stack))
|
if (myStat /= 0_pInt) call IO_error(100_pInt,el=myStat,ext_msg=pathOn(stack))
|
||||||
|
|
||||||
line = IO_read(fileUnit)
|
line = IO_read(fileUnit)
|
||||||
|
|
113
code/Makefile
113
code/Makefile
|
@ -257,10 +257,10 @@ COMPILE_OPTIONS_gfortran :=-DDAMASKVERSION=\"${DAMASKVERSION}\"\
|
||||||
#-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
|
#-Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
|
||||||
#-Wstrict-overflow:
|
#-Wstrict-overflow:
|
||||||
|
|
||||||
DEBUG_OPTIONS_gfortran :=-g\
|
DEBUG_OPTIONS_gfortran :=-g \
|
||||||
-fbacktrace\
|
-fbacktrace \
|
||||||
-fdump-core\
|
-fdump-core \
|
||||||
-fcheck=all\
|
-fcheck=all \
|
||||||
-ffpe-trap=invalid,zero,overflow
|
-ffpe-trap=invalid,zero,overflow
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
|
@ -300,37 +300,60 @@ COMPILE =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$
|
||||||
COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90))
|
COMPILE_MAXOPTI =$(OPENMP_FLAG_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) $(COMPILE_OPTIONS_$(F90)) $(INCLUDE_DIRS) $(PRECISION_$(F90))
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
SOURCE_FILES = \
|
SOURCE_FILES = \
|
||||||
source_thermal_dissipation.o source_thermal_externalheat.o \
|
source_thermal_dissipation.o \
|
||||||
source_damage_isoBrittle.o source_damage_isoDuctile.o source_damage_anisoBrittle.o source_damage_anisoDuctile.o \
|
source_thermal_externalheat.o \
|
||||||
source_vacancy_phenoplasticity.o source_vacancy_irradiation.o source_vacancy_thermalfluc.o
|
source_damage_isoBrittle.o \
|
||||||
|
source_damage_isoDuctile.o \
|
||||||
|
source_damage_anisoBrittle.o \
|
||||||
|
source_damage_anisoDuctile.o \
|
||||||
|
source_vacancy_phenoplasticity.o \
|
||||||
|
source_vacancy_irradiation.o \
|
||||||
|
source_vacancy_thermalfluc.o
|
||||||
|
|
||||||
KINEMATICS_FILES = \
|
KINEMATICS_FILES = \
|
||||||
kinematics_cleavage_opening.o kinematics_slipplane_opening.o \
|
kinematics_cleavage_opening.o \
|
||||||
|
kinematics_slipplane_opening.o \
|
||||||
kinematics_thermal_expansion.o \
|
kinematics_thermal_expansion.o \
|
||||||
kinematics_vacancy_strain.o kinematics_hydrogen_strain.o
|
kinematics_vacancy_strain.o \
|
||||||
|
kinematics_hydrogen_strain.o
|
||||||
|
|
||||||
PLASTIC_FILES = \
|
PLASTIC_FILES = \
|
||||||
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \
|
plastic_dislotwin.o \
|
||||||
plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \
|
plastic_disloUCLA.o \
|
||||||
|
plastic_isotropic.o \
|
||||||
|
plastic_phenopowerlaw.o \
|
||||||
|
plastic_titanmod.o \
|
||||||
|
plastic_nonlocal.o \
|
||||||
|
plastic_none.o \
|
||||||
plastic_phenoplus.o
|
plastic_phenoplus.o
|
||||||
|
|
||||||
THERMAL_FILES = \
|
THERMAL_FILES = \
|
||||||
thermal_isothermal.o thermal_adiabatic.o thermal_conduction.o
|
thermal_isothermal.o \
|
||||||
|
thermal_adiabatic.o \
|
||||||
|
thermal_conduction.o
|
||||||
|
|
||||||
DAMAGE_FILES = \
|
DAMAGE_FILES = \
|
||||||
damage_none.o damage_local.o damage_nonlocal.o
|
damage_none.o \
|
||||||
|
damage_local.o \
|
||||||
|
damage_nonlocal.o
|
||||||
|
|
||||||
VACANCYFLUX_FILES = \
|
VACANCYFLUX_FILES = \
|
||||||
vacancyflux_isoconc.o vacancyflux_isochempot.o vacancyflux_cahnhilliard.o
|
vacancyflux_isoconc.o \
|
||||||
|
vacancyflux_isochempot.o \
|
||||||
|
vacancyflux_cahnhilliard.o
|
||||||
|
|
||||||
POROSITY_FILES = \
|
POROSITY_FILES = \
|
||||||
porosity_none.o porosity_phasefield.o
|
porosity_none.o \
|
||||||
|
porosity_phasefield.o
|
||||||
|
|
||||||
HYDROGENFLUX_FILES = \
|
HYDROGENFLUX_FILES = \
|
||||||
hydrogenflux_isoconc.o hydrogenflux_cahnhilliard.o
|
hydrogenflux_isoconc.o \
|
||||||
|
hydrogenflux_cahnhilliard.o
|
||||||
|
|
||||||
HOMOGENIZATION_FILES = \
|
HOMOGENIZATION_FILES = \
|
||||||
homogenization_RGC.o homogenization_isostrain.o homogenization_none.o
|
homogenization_RGC.o \
|
||||||
|
homogenization_isostrain.o \
|
||||||
|
homogenization_none.o
|
||||||
|
|
||||||
#####################
|
#####################
|
||||||
# Spectral Solver
|
# Spectral Solver
|
||||||
|
@ -351,11 +374,28 @@ DAMASK_spectral.o: INTERFACENAME := spectral_interface.f90
|
||||||
SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \
|
SPECTRAL_SOLVER_FILES = spectral_mech_AL.o spectral_mech_Basic.o spectral_mech_Polarisation.o \
|
||||||
spectral_thermal.o spectral_damage.o
|
spectral_thermal.o spectral_damage.o
|
||||||
|
|
||||||
SPECTRAL_FILES = C_routines.o system_routines.o prec.o DAMASK_interface.o IO.o numerics.o debug.o math.o \
|
SPECTRAL_FILES = C_routines.o \
|
||||||
FEsolving.o mesh.o material.o lattice.o \
|
system_routines.o \
|
||||||
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \
|
prec.o \
|
||||||
|
DAMASK_interface.o \
|
||||||
|
IO.o \
|
||||||
|
numerics.o \
|
||||||
|
debug.o \
|
||||||
|
math.o \
|
||||||
|
FEsolving.o \
|
||||||
|
mesh.o \
|
||||||
|
material.o \
|
||||||
|
lattice.o \
|
||||||
|
$(SOURCE_FILES) \
|
||||||
|
$(KINEMATICS_FILES) \
|
||||||
|
$(PLASTIC_FILES) \
|
||||||
|
constitutive.o \
|
||||||
crystallite.o \
|
crystallite.o \
|
||||||
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \
|
$(THERMAL_FILES) \
|
||||||
|
$(DAMAGE_FILES) \
|
||||||
|
$(VACANCYFLUX_FILES) \
|
||||||
|
$(HYDROGENFLUX_FILES) \
|
||||||
|
$(POROSITY_FILES) \
|
||||||
$(HOMOGENIZATION_FILES) homogenization.o \
|
$(HOMOGENIZATION_FILES) homogenization.o \
|
||||||
CPFEM2.o \
|
CPFEM2.o \
|
||||||
spectral_utilities.o \
|
spectral_utilities.o \
|
||||||
|
@ -401,14 +441,31 @@ DAMASK_FEM.exe: INCLUDE_DIRS += -I./
|
||||||
|
|
||||||
FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o
|
FEM_SOLVER_FILES = FEM_mech.o FEM_thermal.o FEM_damage.o FEM_vacancyflux.o FEM_porosity.o FEM_hydrogenflux.o
|
||||||
|
|
||||||
FEM_FILES = prec.o DAMASK_interface.o FEZoo.o IO.o numerics.o debug.o math.o \
|
FEM_FILES = prec.o \
|
||||||
FEsolving.o mesh.o material.o lattice.o \
|
DAMASK_interface.o \
|
||||||
$(SOURCE_FILES) $(KINEMATICS_FILES) $(PLASTIC_FILES) constitutive.o \
|
FEZoo.o \
|
||||||
|
IO.o \
|
||||||
|
numerics.o \
|
||||||
|
debug.o \
|
||||||
|
math.o \
|
||||||
|
FEsolving.o \
|
||||||
|
mesh.o \
|
||||||
|
material.o \
|
||||||
|
lattice.o \
|
||||||
|
$(SOURCE_FILES) \
|
||||||
|
$(KINEMATICS_FILES) \
|
||||||
|
$(PLASTIC_FILES) \
|
||||||
|
constitutive.o \
|
||||||
crystallite.o \
|
crystallite.o \
|
||||||
$(THERMAL_FILES) $(DAMAGE_FILES) $(VACANCYFLUX_FILES) $(HYDROGENFLUX_FILES) $(POROSITY_FILES) \
|
$(THERMAL_FILES) \
|
||||||
|
$(DAMAGE_FILES) \
|
||||||
|
$(VACANCYFLUX_FILES) \
|
||||||
|
$(HYDROGENFLUX_FILES) \
|
||||||
|
$(POROSITY_FILES) \
|
||||||
$(HOMOGENIZATION_FILES) homogenization.o \
|
$(HOMOGENIZATION_FILES) homogenization.o \
|
||||||
CPFEM.o \
|
CPFEM.o \
|
||||||
FEM_utilities.o $(FEM_SOLVER_FILES)
|
FEM_utilities.o \
|
||||||
|
$(FEM_SOLVER_FILES)
|
||||||
|
|
||||||
DAMASK_FEM.exe: DAMASK_FEM_driver.o
|
DAMASK_FEM.exe: DAMASK_FEM_driver.o
|
||||||
$(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \
|
$(PREFIX) $(LINKERNAME) $(OPENMP_FLAG_$(F90)) $(LINK_OPTIONS_$(F90)) $(STANDARD_CHECK_$(F90)) $(OPTIMIZATION_$(MAXOPTI)_$(F90)) \
|
||||||
|
@ -658,8 +715,8 @@ tidy:
|
||||||
@rm -rf *.inst.f90 # for instrumentation
|
@rm -rf *.inst.f90 # for instrumentation
|
||||||
@rm -rf *.pomp.f90 # for instrumentation
|
@rm -rf *.pomp.f90 # for instrumentation
|
||||||
@rm -rf *.pp.f90 # for instrumentation
|
@rm -rf *.pp.f90 # for instrumentation
|
||||||
@rm -rf *.pdb # for instrumnentation
|
@rm -rf *.pdb # for instrumentation
|
||||||
@rm -rf *.opari.inc # for instrumnentation
|
@rm -rf *.opari.inc # for instrumentation
|
||||||
|
|
||||||
.PHONY: cleanDAMASK
|
.PHONY: cleanDAMASK
|
||||||
cleanDAMASK:
|
cleanDAMASK:
|
||||||
|
|
|
@ -104,7 +104,6 @@ contains
|
||||||
subroutine debug_init
|
subroutine debug_init
|
||||||
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
||||||
use numerics, only: &
|
use numerics, only: &
|
||||||
worldrank, &
|
|
||||||
nStress, &
|
nStress, &
|
||||||
nState, &
|
nState, &
|
||||||
nCryst, &
|
nCryst, &
|
||||||
|
@ -130,47 +129,27 @@ subroutine debug_init
|
||||||
integer(pInt), allocatable, dimension(:) :: chunkPos
|
integer(pInt), allocatable, dimension(:) :: chunkPos
|
||||||
character(len=65536) :: tag, line
|
character(len=65536) :: tag, line
|
||||||
|
|
||||||
mainProcess: if (worldrank == 0) then
|
write(6,'(/,a)') ' <<<+- debug init -+>>>'
|
||||||
write(6,'(/,a)') ' <<<+- debug init -+>>>'
|
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
|
||||||
#include "compilation_info.f90"
|
#include "compilation_info.f90"
|
||||||
endif mainProcess
|
|
||||||
|
|
||||||
if (allocated(debug_StressLoopLpDistribution)) &
|
allocate(debug_StressLoopLpDistribution(nStress+1,2), source=0_pInt)
|
||||||
deallocate(debug_StressLoopLpDistribution)
|
allocate(debug_StressLoopLiDistribution(nStress+1,2), source=0_pInt)
|
||||||
allocate(debug_StressLoopLpDistribution(nStress+1,2))
|
allocate(debug_StateLoopDistribution(nState+1,2), source=0_pInt)
|
||||||
debug_StressLoopLpDistribution = 0_pInt
|
allocate(debug_CrystalliteLoopDistribution(nCryst+1), source=0_pInt)
|
||||||
if (allocated(debug_StressLoopLiDistribution)) &
|
allocate(debug_MaterialpointStateLoopDistribution(nMPstate), source=0_pInt)
|
||||||
deallocate(debug_StressLoopLiDistribution)
|
allocate(debug_MaterialpointLoopDistribution(nHomog+1), source=0_pInt)
|
||||||
allocate(debug_StressLoopLiDistribution(nStress+1,2))
|
|
||||||
debug_StressLoopLiDistribution = 0_pInt
|
|
||||||
if (allocated(debug_StateLoopDistribution)) &
|
|
||||||
deallocate(debug_StateLoopDistribution)
|
|
||||||
allocate(debug_StateLoopDistribution(nState+1,2))
|
|
||||||
debug_StateLoopDistribution = 0_pInt
|
|
||||||
if (allocated(debug_CrystalliteLoopDistribution)) &
|
|
||||||
deallocate(debug_CrystalliteLoopDistribution)
|
|
||||||
allocate(debug_CrystalliteLoopDistribution(nCryst+1))
|
|
||||||
debug_CrystalliteLoopDistribution = 0_pInt
|
|
||||||
if (allocated(debug_MaterialpointStateLoopDistribution)) &
|
|
||||||
deallocate(debug_MaterialpointStateLoopDistribution)
|
|
||||||
allocate(debug_MaterialpointStateLoopDistribution(nMPstate))
|
|
||||||
debug_MaterialpointStateLoopDistribution = 0_pInt
|
|
||||||
if (allocated(debug_MaterialpointLoopDistribution)) &
|
|
||||||
deallocate(debug_MaterialpointLoopDistribution)
|
|
||||||
allocate(debug_MaterialpointLoopDistribution(nHomog+1))
|
|
||||||
debug_MaterialpointLoopDistribution = 0_pInt
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! try to open the config file
|
! try to open the config file
|
||||||
|
|
||||||
line = ''
|
line = ''
|
||||||
fileExists: if(IO_open_file_stat(FILEUNIT,debug_configFile)) then
|
fileExists: if(IO_open_file_stat(FILEUNIT,debug_configFile)) then
|
||||||
do while (trim(line) /= IO_EOF) ! read thru sections of phase part
|
do while (trim(line) /= IO_EOF) ! read thru sections of phase part
|
||||||
line = IO_read(FILEUNIT)
|
line = IO_read(FILEUNIT)
|
||||||
if (IO_isBlank(line)) cycle ! skip empty lines
|
if (IO_isBlank(line)) cycle ! skip empty lines
|
||||||
chunkPos = IO_stringPos(line)
|
chunkPos = IO_stringPos(line)
|
||||||
tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key
|
tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key
|
||||||
select case(tag)
|
select case(tag)
|
||||||
case ('element','e','el')
|
case ('element','e','el')
|
||||||
debug_e = IO_intValue(line,chunkPos,2_pInt)
|
debug_e = IO_intValue(line,chunkPos,2_pInt)
|
||||||
|
|
|
@ -158,7 +158,8 @@ module math
|
||||||
math_areaTriangle, &
|
math_areaTriangle, &
|
||||||
math_rotate_forward33, &
|
math_rotate_forward33, &
|
||||||
math_rotate_backward33, &
|
math_rotate_backward33, &
|
||||||
math_rotate_forward3333
|
math_rotate_forward3333, &
|
||||||
|
math_limit
|
||||||
private :: &
|
private :: &
|
||||||
math_partition, &
|
math_partition, &
|
||||||
halton, &
|
halton, &
|
||||||
|
@ -178,7 +179,6 @@ subroutine math_init
|
||||||
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
|
||||||
use prec, only: tol_math_check
|
use prec, only: tol_math_check
|
||||||
use numerics, only: &
|
use numerics, only: &
|
||||||
worldrank, &
|
|
||||||
fixedSeed
|
fixedSeed
|
||||||
use IO, only: IO_error, IO_timeStamp
|
use IO, only: IO_error, IO_timeStamp
|
||||||
|
|
||||||
|
|
|
@ -481,6 +481,7 @@ subroutine mesh_init(ip,el)
|
||||||
#endif
|
#endif
|
||||||
#ifdef Spectral
|
#ifdef Spectral
|
||||||
IO_open_file, &
|
IO_open_file, &
|
||||||
|
IO_error, &
|
||||||
#else
|
#else
|
||||||
IO_open_InputFile, &
|
IO_open_InputFile, &
|
||||||
#endif
|
#endif
|
||||||
|
@ -507,7 +508,8 @@ subroutine mesh_init(ip,el)
|
||||||
|
|
||||||
implicit none
|
implicit none
|
||||||
#ifdef Spectral
|
#ifdef Spectral
|
||||||
integer(C_INTPTR_T) :: gridMPI(3), alloc_local, local_K, local_K_offset
|
integer(C_INTPTR_T) :: devNull, local_K, local_K_offset
|
||||||
|
integer :: ierr, worldsize
|
||||||
#endif
|
#endif
|
||||||
integer(pInt), parameter :: FILEUNIT = 222_pInt
|
integer(pInt), parameter :: FILEUNIT = 222_pInt
|
||||||
integer(pInt), intent(in) :: el, ip
|
integer(pInt), intent(in) :: el, ip
|
||||||
|
@ -547,10 +549,13 @@ subroutine mesh_init(ip,el)
|
||||||
call IO_open_file(FILEUNIT,geometryFile) ! parse info from geometry file...
|
call IO_open_file(FILEUNIT,geometryFile) ! parse info from geometry file...
|
||||||
if (myDebug) write(6,'(a)') ' Opened geometry file'; flush(6)
|
if (myDebug) write(6,'(a)') ' Opened geometry file'; flush(6)
|
||||||
grid = mesh_spectral_getGrid(fileUnit)
|
grid = mesh_spectral_getGrid(fileUnit)
|
||||||
|
call MPI_comm_size(MPI_COMM_WORLD, worldsize, ierr)
|
||||||
|
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_comm_size')
|
||||||
|
if(worldsize>grid(3)) call IO_error(894_pInt, ext_msg='number of processes exceeds grid(3)')
|
||||||
|
|
||||||
geomSize = mesh_spectral_getSize(fileUnit)
|
geomSize = mesh_spectral_getSize(fileUnit)
|
||||||
gridMPI = int(grid,C_INTPTR_T)
|
devNull = fftw_mpi_local_size_3d(int(grid(3),C_INTPTR_T),int(grid(2),C_INTPTR_T),&
|
||||||
alloc_local = fftw_mpi_local_size_3d(gridMPI(3), gridMPI(2), gridMPI(1)/2 +1, &
|
int(grid(1),C_INTPTR_T)/2+1,MPI_COMM_WORLD,local_K,local_K_offset)
|
||||||
MPI_COMM_WORLD, local_K, local_K_offset)
|
|
||||||
grid3 = int(local_K,pInt)
|
grid3 = int(local_K,pInt)
|
||||||
grid3Offset = int(local_K_offset,pInt)
|
grid3Offset = int(local_K_offset,pInt)
|
||||||
size3 = geomSize(3)*real(grid3,pReal) /real(grid(3),pReal)
|
size3 = geomSize(3)*real(grid3,pReal) /real(grid(3),pReal)
|
||||||
|
|
|
@ -1115,7 +1115,7 @@ allocate(nonSchmidProjection(3,3,4,maxTotalNslip,maxNinstances),
|
||||||
|
|
||||||
initializeInstances: do phase = 1_pInt, size(phase_plasticity)
|
initializeInstances: do phase = 1_pInt, size(phase_plasticity)
|
||||||
NofMyPhase=count(material_phase==phase)
|
NofMyPhase=count(material_phase==phase)
|
||||||
myPhase2: if (phase_plasticity(phase) == PLASTICITY_NONLOCAL_ID .and. NofMyPhase/=0) then
|
myPhase2: if (phase_plasticity(phase) == PLASTICITY_NONLOCAL_ID) then
|
||||||
instance = phase_plasticityInstance(phase)
|
instance = phase_plasticityInstance(phase)
|
||||||
!*** Inverse lookup of my slip system family and the slip system in lattice
|
!*** Inverse lookup of my slip system family and the slip system in lattice
|
||||||
|
|
||||||
|
|
|
@ -81,7 +81,7 @@ module prec
|
||||||
nTwin = 0_pInt, &
|
nTwin = 0_pInt, &
|
||||||
nTrans = 0_pInt
|
nTrans = 0_pInt
|
||||||
logical :: &
|
logical :: &
|
||||||
nonlocal = .false. !< absolute tolerance for state integration
|
nonlocal = .false.
|
||||||
real(pReal), pointer, dimension(:,:), contiguous :: &
|
real(pReal), pointer, dimension(:,:), contiguous :: &
|
||||||
slipRate, & !< slip rate
|
slipRate, & !< slip rate
|
||||||
accumulatedSlip !< accumulated plastic slip
|
accumulatedSlip !< accumulated plastic slip
|
||||||
|
|
|
@ -57,7 +57,8 @@ subroutine DAMASK_interface_init()
|
||||||
integer :: &
|
integer :: &
|
||||||
i, &
|
i, &
|
||||||
threadLevel, &
|
threadLevel, &
|
||||||
worldrank = 0
|
worldrank = 0, &
|
||||||
|
worldsize = 0
|
||||||
integer, allocatable, dimension(:) :: &
|
integer, allocatable, dimension(:) :: &
|
||||||
chunkPos
|
chunkPos
|
||||||
integer, dimension(8) :: &
|
integer, dimension(8) :: &
|
||||||
|
@ -66,6 +67,7 @@ subroutine DAMASK_interface_init()
|
||||||
external :: &
|
external :: &
|
||||||
quit,&
|
quit,&
|
||||||
MPI_Comm_rank,&
|
MPI_Comm_rank,&
|
||||||
|
MPI_Comm_size,&
|
||||||
PETScInitialize, &
|
PETScInitialize, &
|
||||||
MPI_Init_Thread, &
|
MPI_Init_Thread, &
|
||||||
MPI_abort
|
MPI_abort
|
||||||
|
@ -77,17 +79,17 @@ subroutine DAMASK_interface_init()
|
||||||
#ifdef _OPENMP
|
#ifdef _OPENMP
|
||||||
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,ierr);CHKERRQ(ierr) ! in case of OpenMP, don't rely on PETScInitialize doing MPI init
|
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,ierr);CHKERRQ(ierr) ! in case of OpenMP, don't rely on PETScInitialize doing MPI init
|
||||||
if (threadLevel<MPI_THREAD_FUNNELED) then
|
if (threadLevel<MPI_THREAD_FUNNELED) then
|
||||||
write(6,'(a)') 'MPI library does not support OpenMP'
|
write(6,'(a)') ' MPI library does not support OpenMP'
|
||||||
call quit(1_pInt)
|
call quit(1_pInt)
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
call PetscInitialize(PETSC_NULL_CHARACTER,ierr) ! according to PETSc manual, that should be the first line in the code
|
call PetscInitialize(PETSC_NULL_CHARACTER,ierr) ! according to PETSc manual, that should be the first line in the code
|
||||||
CHKERRQ(ierr) ! this is a macro definition, it is case sensitive
|
CHKERRQ(ierr) ! this is a macro definition, it is case sensitive
|
||||||
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,ierr);CHKERRQ(ierr)
|
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,ierr);CHKERRQ(ierr)
|
||||||
|
call MPI_Comm_size(MPI_COMM_WORLD, worldsize, ierr);CHKERRQ(ierr)
|
||||||
mainProcess: if (worldrank == 0) then
|
mainProcess: if (worldrank == 0) then
|
||||||
if (output_unit /= 6) then
|
if (output_unit /= 6) then
|
||||||
write(output_unit,'(a)') 'STDOUT != 6'
|
write(output_unit,'(a)') ' STDOUT != 6'
|
||||||
call quit(1_pInt)
|
call quit(1_pInt)
|
||||||
endif
|
endif
|
||||||
else mainProcess
|
else mainProcess
|
||||||
|
@ -104,6 +106,7 @@ subroutine DAMASK_interface_init()
|
||||||
write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':',&
|
write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':',&
|
||||||
dateAndTime(6),':',&
|
dateAndTime(6),':',&
|
||||||
dateAndTime(7)
|
dateAndTime(7)
|
||||||
|
write(6,'(/,a,i4.1)') ' MPI processes: ',worldsize
|
||||||
write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>'
|
write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>'
|
||||||
#include "compilation_info.f90"
|
#include "compilation_info.f90"
|
||||||
|
|
||||||
|
|
|
@ -245,7 +245,7 @@ type(tSolutionState) function spectral_thermal_solution(guess,timeinc,timeinc_ol
|
||||||
if (worldrank == 0) then
|
if (worldrank == 0) then
|
||||||
if (spectral_thermal_solution%converged) &
|
if (spectral_thermal_solution%converged) &
|
||||||
write(6,'(/,a)') ' ... thermal conduction converged ..................................'
|
write(6,'(/,a)') ' ... thermal conduction converged ..................................'
|
||||||
write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature = ',&
|
write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',&
|
||||||
minTemperature, maxTemperature, stagNorm
|
minTemperature, maxTemperature, stagNorm
|
||||||
write(6,'(/,a)') ' ==========================================================================='
|
write(6,'(/,a)') ' ==========================================================================='
|
||||||
flush(6)
|
flush(6)
|
||||||
|
|
|
@ -172,8 +172,7 @@ subroutine utilities_init()
|
||||||
memory_efficient, &
|
memory_efficient, &
|
||||||
petsc_defaultOptions, &
|
petsc_defaultOptions, &
|
||||||
petsc_options, &
|
petsc_options, &
|
||||||
divergence_correction, &
|
divergence_correction
|
||||||
worldrank
|
|
||||||
use debug, only: &
|
use debug, only: &
|
||||||
debug_level, &
|
debug_level, &
|
||||||
debug_SPECTRAL, &
|
debug_SPECTRAL, &
|
||||||
|
@ -212,11 +211,9 @@ subroutine utilities_init()
|
||||||
vecSize = 3_C_INTPTR_T, &
|
vecSize = 3_C_INTPTR_T, &
|
||||||
tensorSize = 9_C_INTPTR_T
|
tensorSize = 9_C_INTPTR_T
|
||||||
|
|
||||||
mainProcess: if (worldrank == 0) then
|
write(6,'(/,a)') ' <<<+- spectral_utilities init -+>>>'
|
||||||
write(6,'(/,a)') ' <<<+- spectral_utilities init -+>>>'
|
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
||||||
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
|
|
||||||
#include "compilation_info.f90"
|
#include "compilation_info.f90"
|
||||||
endif mainProcess
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set debugging parameters
|
! set debugging parameters
|
||||||
|
@ -224,11 +221,11 @@ subroutine utilities_init()
|
||||||
debugRotation = iand(debug_level(debug_SPECTRAL),debug_SPECTRALROTATION) /= 0
|
debugRotation = iand(debug_level(debug_SPECTRAL),debug_SPECTRALROTATION) /= 0
|
||||||
debugPETSc = iand(debug_level(debug_SPECTRAL),debug_SPECTRALPETSC) /= 0
|
debugPETSc = iand(debug_level(debug_SPECTRAL),debug_SPECTRALPETSC) /= 0
|
||||||
|
|
||||||
if(debugPETSc .and. worldrank == 0_pInt) write(6,'(3(/,a),/)') &
|
if(debugPETSc) write(6,'(3(/,a),/)') &
|
||||||
' Initializing PETSc with debug options: ', &
|
' Initializing PETSc with debug options: ', &
|
||||||
trim(PETScDebug), &
|
trim(PETScDebug), &
|
||||||
' add more using the PETSc_Options keyword in numerics.config '
|
' add more using the PETSc_Options keyword in numerics.config '; flush(6)
|
||||||
flush(6)
|
|
||||||
call PetscOptionsClear(ierr); CHKERRQ(ierr)
|
call PetscOptionsClear(ierr); CHKERRQ(ierr)
|
||||||
if(debugPETSc) call PetscOptionsInsertString(trim(PETSCDEBUG),ierr); CHKERRQ(ierr)
|
if(debugPETSc) call PetscOptionsInsertString(trim(PETSCDEBUG),ierr); CHKERRQ(ierr)
|
||||||
call PetscOptionsInsertString(trim(petsc_defaultOptions),ierr); CHKERRQ(ierr)
|
call PetscOptionsInsertString(trim(petsc_defaultOptions),ierr); CHKERRQ(ierr)
|
||||||
|
@ -237,10 +234,8 @@ subroutine utilities_init()
|
||||||
grid1Red = grid(1)/2_pInt + 1_pInt
|
grid1Red = grid(1)/2_pInt + 1_pInt
|
||||||
wgt = 1.0/real(product(grid),pReal)
|
wgt = 1.0/real(product(grid),pReal)
|
||||||
|
|
||||||
if (worldrank == 0) then
|
write(6,'(a,3(i12 ))') ' grid a b c: ', grid
|
||||||
write(6,'(a,3(i12 ))') ' grid a b c: ', grid
|
write(6,'(a,3(es12.5))') ' size x y z: ', geomSize
|
||||||
write(6,'(a,3(es12.5))') ' size x y z: ', geomSize
|
|
||||||
endif
|
|
||||||
|
|
||||||
select case (spectral_derivative)
|
select case (spectral_derivative)
|
||||||
case ('continuous') ! default, no weighting
|
case ('continuous') ! default, no weighting
|
||||||
|
@ -342,8 +337,7 @@ subroutine utilities_init()
|
||||||
if (pReal /= C_DOUBLE .or. pInt /= C_INT) call IO_error(0_pInt,ext_msg='Fortran to C') ! check for correct precision in C
|
if (pReal /= C_DOUBLE .or. pInt /= C_INT) call IO_error(0_pInt,ext_msg='Fortran to C') ! check for correct precision in C
|
||||||
call fftw_set_timelimit(fftw_timelimit) ! set timelimit for plan creation
|
call fftw_set_timelimit(fftw_timelimit) ! set timelimit for plan creation
|
||||||
|
|
||||||
if (debugGeneral .and. worldrank == 0_pInt) write(6,'(/,a)') ' FFTW initialized'
|
if (debugGeneral) write(6,'(/,a)') ' FFTW initialized'; flush(6)
|
||||||
flush(6)
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! calculation of discrete angular frequencies, ordered as in FFTW (wrap around)
|
! calculation of discrete angular frequencies, ordered as in FFTW (wrap around)
|
||||||
|
@ -527,8 +521,6 @@ subroutine utilities_fourierGammaConvolution(fieldAim)
|
||||||
use math, only: &
|
use math, only: &
|
||||||
math_det33, &
|
math_det33, &
|
||||||
math_invert
|
math_invert
|
||||||
use numerics, only: &
|
|
||||||
worldrank
|
|
||||||
use mesh, only: &
|
use mesh, only: &
|
||||||
grid3, &
|
grid3, &
|
||||||
grid, &
|
grid, &
|
||||||
|
@ -545,10 +537,8 @@ subroutine utilities_fourierGammaConvolution(fieldAim)
|
||||||
logical :: err
|
logical :: err
|
||||||
|
|
||||||
|
|
||||||
if (worldrank == 0_pInt) then
|
write(6,'(/,a)') ' ... doing gamma convolution ...............................................'
|
||||||
write(6,'(/,a)') ' ... doing gamma convolution ...............................................'
|
flush(6)
|
||||||
flush(6)
|
|
||||||
endif
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! do the actual spectral method calculation (mechanical equilibrium)
|
! do the actual spectral method calculation (mechanical equilibrium)
|
||||||
|
@ -624,8 +614,6 @@ end subroutine utilities_fourierGreenConvolution
|
||||||
real(pReal) function utilities_divergenceRMS()
|
real(pReal) function utilities_divergenceRMS()
|
||||||
use IO, only: &
|
use IO, only: &
|
||||||
IO_error
|
IO_error
|
||||||
use numerics, only: &
|
|
||||||
worldrank
|
|
||||||
use mesh, only: &
|
use mesh, only: &
|
||||||
geomSize, &
|
geomSize, &
|
||||||
grid, &
|
grid, &
|
||||||
|
@ -638,10 +626,9 @@ real(pReal) function utilities_divergenceRMS()
|
||||||
external :: &
|
external :: &
|
||||||
MPI_Allreduce
|
MPI_Allreduce
|
||||||
|
|
||||||
if (worldrank == 0_pInt) then
|
write(6,'(/,a)') ' ... calculating divergence ................................................'
|
||||||
write(6,'(/,a)') ' ... calculating divergence ................................................'
|
flush(6)
|
||||||
flush(6)
|
|
||||||
endif
|
|
||||||
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -680,8 +667,6 @@ end function utilities_divergenceRMS
|
||||||
real(pReal) function utilities_curlRMS()
|
real(pReal) function utilities_curlRMS()
|
||||||
use IO, only: &
|
use IO, only: &
|
||||||
IO_error
|
IO_error
|
||||||
use numerics, only: &
|
|
||||||
worldrank
|
|
||||||
use mesh, only: &
|
use mesh, only: &
|
||||||
geomSize, &
|
geomSize, &
|
||||||
grid, &
|
grid, &
|
||||||
|
@ -693,13 +678,11 @@ real(pReal) function utilities_curlRMS()
|
||||||
complex(pReal), dimension(3) :: rescaledGeom
|
complex(pReal), dimension(3) :: rescaledGeom
|
||||||
|
|
||||||
external :: &
|
external :: &
|
||||||
MPI_Reduce, &
|
|
||||||
MPI_Allreduce
|
MPI_Allreduce
|
||||||
|
|
||||||
if (worldrank == 0_pInt) then
|
write(6,'(/,a)') ' ... calculating curl ......................................................'
|
||||||
write(6,'(/,a)') ' ... calculating curl ......................................................'
|
flush(6)
|
||||||
flush(6)
|
|
||||||
endif
|
|
||||||
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
rescaledGeom = cmplx(geomSize/scaledGeomSize,0.0_pReal)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -757,8 +740,6 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
||||||
prec_isNaN
|
prec_isNaN
|
||||||
use IO, only: &
|
use IO, only: &
|
||||||
IO_error
|
IO_error
|
||||||
use numerics, only: &
|
|
||||||
worldrank
|
|
||||||
use math, only: &
|
use math, only: &
|
||||||
math_Plain3333to99, &
|
math_Plain3333to99, &
|
||||||
math_plain99to3333, &
|
math_plain99to3333, &
|
||||||
|
@ -790,7 +771,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
||||||
allocate (sTimesC(size_reduced,size_reduced), source =0.0_pReal)
|
allocate (sTimesC(size_reduced,size_reduced), source =0.0_pReal)
|
||||||
temp99_Real = math_Plain3333to99(math_rotate_forward3333(C,rot_BC))
|
temp99_Real = math_Plain3333to99(math_rotate_forward3333(C,rot_BC))
|
||||||
|
|
||||||
if(debugGeneral .and. worldrank == 0_pInt) then
|
if(debugGeneral) then
|
||||||
write(6,'(/,a)') ' ... updating masked compliance ............................................'
|
write(6,'(/,a)') ' ... updating masked compliance ............................................'
|
||||||
write(6,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',&
|
write(6,'(/,a,/,9(9(2x,f12.7,1x)/))',advance='no') ' Stiffness C (load) / GPa =',&
|
||||||
transpose(temp99_Real)/1.e9_pReal
|
transpose(temp99_Real)/1.e9_pReal
|
||||||
|
@ -831,7 +812,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
||||||
if(m/=n .and. abs(sTimesC(m,n)) > (0.0_pReal + 10.0e-12_pReal)) errmatinv = .true. ! off diagonal elements of S*C should be 0
|
if(m/=n .and. abs(sTimesC(m,n)) > (0.0_pReal + 10.0e-12_pReal)) errmatinv = .true. ! off diagonal elements of S*C should be 0
|
||||||
enddo
|
enddo
|
||||||
enddo
|
enddo
|
||||||
if((debugGeneral .or. errmatinv) .and. (worldrank == 0_pInt)) then ! report
|
if(debugGeneral .or. errmatinv) then
|
||||||
write(formatString, '(I16.16)') size_reduced
|
write(formatString, '(I16.16)') size_reduced
|
||||||
formatString = '(/,a,/,'//trim(formatString)//'('//trim(formatString)//'(2x,es9.2,1x)/))'
|
formatString = '(/,a,/,'//trim(formatString)//'('//trim(formatString)//'(2x,es9.2,1x)/))'
|
||||||
write(6,trim(formatString),advance='no') ' C * S (load) ', &
|
write(6,trim(formatString),advance='no') ' C * S (load) ', &
|
||||||
|
@ -845,7 +826,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
|
||||||
else
|
else
|
||||||
temp99_real = 0.0_pReal
|
temp99_real = 0.0_pReal
|
||||||
endif
|
endif
|
||||||
if(debugGeneral .and. worldrank == 0_pInt) & ! report
|
if(debugGeneral) &
|
||||||
write(6,'(/,a,/,9(9(2x,f12.7,1x)/),/)',advance='no') ' Masked Compliance (load) * GPa =', &
|
write(6,'(/,a,/,9(9(2x,f12.7,1x)/),/)',advance='no') ' Masked Compliance (load) * GPa =', &
|
||||||
transpose(temp99_Real*1.e9_pReal)
|
transpose(temp99_Real*1.e9_pReal)
|
||||||
flush(6)
|
flush(6)
|
||||||
|
@ -938,15 +919,11 @@ end subroutine utilities_fourierTensorDivergence
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
||||||
P,C_volAvg,C_minmaxAvg,P_av,forwardData,rotation_BC)
|
P,C_volAvg,C_minmaxAvg,P_av,forwardData,rotation_BC)
|
||||||
use prec, only: &
|
|
||||||
dNeq
|
|
||||||
use IO, only: &
|
use IO, only: &
|
||||||
IO_error
|
IO_error
|
||||||
use debug, only: &
|
use debug, only: &
|
||||||
debug_reset, &
|
debug_reset, &
|
||||||
debug_info
|
debug_info
|
||||||
use numerics, only: &
|
|
||||||
worldrank
|
|
||||||
use math, only: &
|
use math, only: &
|
||||||
math_transpose33, &
|
math_transpose33, &
|
||||||
math_rotate_forward33, &
|
math_rotate_forward33, &
|
||||||
|
@ -974,7 +951,7 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
||||||
|
|
||||||
real(pReal),intent(out), dimension(3,3,3,3) :: C_volAvg, C_minmaxAvg !< average stiffness
|
real(pReal),intent(out), dimension(3,3,3,3) :: C_volAvg, C_minmaxAvg !< average stiffness
|
||||||
real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress
|
real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress
|
||||||
real(pReal),intent(out), dimension(3,3,grid(1),grid(2),grid3) :: P !< PK stress
|
real(pReal),intent(out), dimension(3,3,grid(1),grid(2),grid3) :: P !< PK stress
|
||||||
|
|
||||||
logical :: &
|
logical :: &
|
||||||
age
|
age
|
||||||
|
@ -984,26 +961,18 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
||||||
real(pReal), dimension(3,3,3,3) :: max_dPdF, min_dPdF
|
real(pReal), dimension(3,3,3,3) :: max_dPdF, min_dPdF
|
||||||
real(pReal) :: max_dPdF_norm, min_dPdF_norm, defgradDetMin, defgradDetMax, defgradDet
|
real(pReal) :: max_dPdF_norm, min_dPdF_norm, defgradDetMin, defgradDetMax, defgradDet
|
||||||
|
|
||||||
external :: &
|
write(6,'(/,a)') ' ... evaluating constitutive response ......................................'
|
||||||
MPI_Reduce, &
|
flush(6)
|
||||||
MPI_Allreduce
|
|
||||||
|
|
||||||
if (worldrank == 0_pInt) then
|
|
||||||
write(6,'(/,a)') ' ... evaluating constitutive response ......................................'
|
|
||||||
flush(6)
|
|
||||||
endif
|
|
||||||
age = .False.
|
age = .False.
|
||||||
|
|
||||||
if (forwardData) then ! aging results
|
if (forwardData) then ! aging results
|
||||||
age = .True.
|
age = .True.
|
||||||
materialpoint_F0 = reshape(F_lastInc, [3,3,1,product(grid(1:2))*grid3])
|
materialpoint_F0 = reshape(F_lastInc, [3,3,1,product(grid(1:2))*grid3])
|
||||||
endif
|
endif
|
||||||
if (cutBack) then ! restore saved variables
|
if (cutBack) age = .False. ! restore saved variables
|
||||||
age = .False.
|
|
||||||
endif
|
|
||||||
|
|
||||||
materialpoint_F = reshape(F,[3,3,1,product(grid(1:2))*grid3])
|
materialpoint_F = reshape(F,[3,3,1,product(grid(1:2))*grid3])
|
||||||
call debug_reset()
|
call debug_reset() ! this has no effect on rank >0
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! calculate bounds of det(F) and report
|
! calculate bounds of det(F) and report
|
||||||
|
@ -1015,15 +984,10 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
||||||
defgradDetMax = max(defgradDetMax,defgradDet)
|
defgradDetMax = max(defgradDetMax,defgradDet)
|
||||||
defgradDetMin = min(defgradDetMin,defgradDet)
|
defgradDetMin = min(defgradDetMin,defgradDet)
|
||||||
end do
|
end do
|
||||||
call MPI_reduce(MPI_IN_PLACE,defgradDetMax,1,MPI_DOUBLE,MPI_MAX,0,PETSC_COMM_WORLD,ierr)
|
|
||||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce max')
|
write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax
|
||||||
call MPI_reduce(MPI_IN_PLACE,defgradDetMin,1,MPI_DOUBLE,MPI_MIN,0,PETSC_COMM_WORLD,ierr)
|
write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin
|
||||||
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_Allreduce min')
|
flush(6)
|
||||||
if (worldrank == 0_pInt) then
|
|
||||||
write(6,'(a,1x,es11.4)') ' max determinant of deformation =', defgradDetMax
|
|
||||||
write(6,'(a,1x,es11.4)') ' min determinant of deformation =', defgradDetMin
|
|
||||||
flush(6)
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call CPFEM_general(age,timeinc)
|
call CPFEM_general(age,timeinc)
|
||||||
|
@ -1053,7 +1017,7 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
||||||
|
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
||||||
|
|
||||||
call debug_info()
|
call debug_info() ! this has no effect on rank >0
|
||||||
|
|
||||||
restartWrite = .false. ! reset restartWrite status
|
restartWrite = .false. ! reset restartWrite status
|
||||||
cutBack = .false. ! reset cutBack status
|
cutBack = .false. ! reset cutBack status
|
||||||
|
@ -1061,15 +1025,13 @@ subroutine utilities_constitutiveResponse(F_lastInc,F,timeinc, &
|
||||||
P = reshape(materialpoint_P, [3,3,grid(1),grid(2),grid3])
|
P = reshape(materialpoint_P, [3,3,grid(1),grid(2),grid3])
|
||||||
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt ! average of P
|
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt ! average of P
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
|
||||||
if (debugRotation .and. worldrank == 0_pInt) &
|
if (debugRotation) &
|
||||||
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',&
|
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress (lab) / MPa =',&
|
||||||
math_transpose33(P_av)*1.e-6_pReal
|
math_transpose33(P_av)*1.e-6_pReal
|
||||||
P_av = math_rotate_forward33(P_av,rotation_BC)
|
P_av = math_rotate_forward33(P_av,rotation_BC)
|
||||||
if (worldrank == 0_pInt) then
|
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',&
|
||||||
write(6,'(/,a,/,3(3(2x,f12.4,1x)/))',advance='no') ' Piola--Kirchhoff stress / MPa =',&
|
|
||||||
math_transpose33(P_av)*1.e-6_pReal
|
math_transpose33(P_av)*1.e-6_pReal
|
||||||
flush(6)
|
flush(6)
|
||||||
endif
|
|
||||||
|
|
||||||
end subroutine utilities_constitutiveResponse
|
end subroutine utilities_constitutiveResponse
|
||||||
|
|
||||||
|
|
|
@ -1,35 +1,25 @@
|
||||||
# Tasan et.al. 2015 Acta Materalia
|
# Tasan et.al. 2015 Acta Materalia
|
||||||
# Tasan et.al. 2015 International Journal of Plasticity
|
# Tasan et.al. 2015 International Journal of Plasticity
|
||||||
# Diehl et.al. 2015 Meccanica
|
# Diehl et.al. 2015 Meccanica
|
||||||
[BCC_Ferrite]
|
[BCC-Ferrite]
|
||||||
|
|
||||||
elasticity hooke
|
elasticity hooke
|
||||||
plasticity phenopowerlaw
|
plasticity phenopowerlaw
|
||||||
|
|
||||||
lattice_structure bcc
|
lattice_structure bcc
|
||||||
Nslip 12 12 # per family
|
Nslip 12 12 # per family
|
||||||
Ntwin 0 # per family
|
Ntwin 0 # per family
|
||||||
c11 233.3e9
|
c11 233.3e9
|
||||||
c12 135.5e9
|
c12 135.5e9
|
||||||
c44 118.0e9
|
c44 118.0e9
|
||||||
gdot0_slip 0.001
|
gdot0_slip 0.001
|
||||||
n_slip 20
|
n_slip 20
|
||||||
tau0_slip 95.e6 97.e6 0 0 # per family, optimization long simplex 109
|
tau0_slip 95.e6 97.e6 # per family, optimization long simplex 109
|
||||||
tausat_slip 222.e6 412.7e6 0 0 # per family, optimization long simplex 109
|
tausat_slip 222.e6 412.7e6 # per family, optimization long simplex 109
|
||||||
gdot0_twin 0.001
|
h0_slipslip 1000.0e6
|
||||||
n_twin 20
|
|
||||||
tau0_twin 31.0e6 # per family
|
|
||||||
s_pr 0 # push-up factor for slip saturation due to twinning
|
|
||||||
twin_b 0
|
|
||||||
twin_c 0
|
|
||||||
twin_d 0
|
|
||||||
twin_e 0
|
|
||||||
h0_slipslip 1000.0e6 # opti
|
|
||||||
h0_twinslip 0
|
|
||||||
h0_twintwin 0
|
|
||||||
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
||||||
interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||||
interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||||
interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||||
w0_slip 2.0 # opti
|
w0_slip 2.0
|
||||||
atol_resistance 1
|
|
||||||
(output) totalshear
|
(output) totalshear
|
||||||
|
|
|
@ -1,34 +1,25 @@
|
||||||
# Tasan et.al. 2015 Acta Materalia
|
# Tasan et.al. 2015 Acta Materalia
|
||||||
# Tasan et.al. 2015 International Journal of Plasticity
|
# Tasan et.al. 2015 International Journal of Plasticity
|
||||||
# Diehl et.al. 2015 Meccanica
|
# Diehl et.al. 2015 Meccanica
|
||||||
[BCC_Martensite]
|
[BCC-Martensite]
|
||||||
plasticity phenopowerlaw
|
|
||||||
elasticity hooke
|
elasticity hooke
|
||||||
|
plasticity phenopowerlaw
|
||||||
|
|
||||||
lattice_structure bcc
|
lattice_structure bcc
|
||||||
Nslip 12 12 # per family
|
Nslip 12 12 # per family
|
||||||
Ntwin 0 # per family
|
Ntwin 0 # per family
|
||||||
c11 417.4e9
|
c11 417.4e9
|
||||||
c12 242.4e9
|
c12 242.4e9
|
||||||
c44 211.1e9
|
c44 211.1e9
|
||||||
gdot0_slip 0.001
|
gdot0_slip 0.001
|
||||||
n_slip 20
|
n_slip 20
|
||||||
tau0_slip 405.8e6 456.7e6 0 0 # per family
|
tau0_slip 405.8e6 456.7e6 # per family
|
||||||
tausat_slip 872.9e6 971.2e6 0 0 # per family
|
tausat_slip 872.9e6 971.2e6 # per family
|
||||||
gdot0_twin 0.001
|
|
||||||
n_twin 20
|
|
||||||
tau0_twin 31.0e6 # per family
|
|
||||||
s_pr 0 # push-up factor for slip saturation due to twinning
|
|
||||||
twin_b 0
|
|
||||||
twin_c 0
|
|
||||||
twin_d 0
|
|
||||||
twin_e 0
|
|
||||||
h0_slipslip 563.0e9
|
h0_slipslip 563.0e9
|
||||||
h0_twinslip 0
|
|
||||||
h0_twintwin 0
|
|
||||||
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
||||||
interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
interaction_sliptwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||||
interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||||
interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
|
||||||
w0_slip 2.0
|
w0_slip 2.0
|
||||||
atol_resistance 1
|
|
||||||
(output) totalshear
|
(output) totalshear
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
include ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/variables
|
|
||||||
include ${PETSC_DIR}/${PETSC_ARCH}/lib/petsc/conf/rules
|
|
||||||
|
|
||||||
|
|
||||||
run16x16x16:
|
|
||||||
-@${MPIEXEC} -n 2 DAMASK_spectral -l tensionX.load -g 20grains16x16x16.geom
|
|
||||||
|
|
||||||
run32x32x32:
|
|
||||||
-@${MPIEXEC} -n 4 DAMASK_spectral -l tensionX.load -g 20grains32x32x32.geom
|
|
||||||
|
|
||||||
run64x64x64:
|
|
||||||
-@${MPIEXEC} -n 8 DAMASK_spectral -l tensionX.load -g 20grains64x64x64.geom
|
|
|
@ -11,7 +11,15 @@ bin_link = { \
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
MarcReleases =[2011,2012,2013,2013.1,2014,2014.2,2015]
|
MarcReleases =[ \
|
||||||
|
'2011',
|
||||||
|
'2012',
|
||||||
|
'2013',
|
||||||
|
'2013.1',
|
||||||
|
'2014',
|
||||||
|
'2014.2',
|
||||||
|
'2015',
|
||||||
|
]
|
||||||
|
|
||||||
damaskEnv = damask.Environment()
|
damaskEnv = damask.Environment()
|
||||||
baseDir = damaskEnv.relPath('code/')
|
baseDir = damaskEnv.relPath('code/')
|
||||||
|
@ -20,27 +28,41 @@ binDir = damaskEnv.options['DAMASK_BIN']
|
||||||
if not os.path.isdir(binDir):
|
if not os.path.isdir(binDir):
|
||||||
os.mkdir(binDir)
|
os.mkdir(binDir)
|
||||||
|
|
||||||
for dir in bin_link:
|
sys.stdout.write('\nsymbolic linking...\n')
|
||||||
for file in bin_link[dir]:
|
|
||||||
src = os.path.abspath(os.path.join(baseDir,dir,file))
|
|
||||||
if os.path.exists(src):
|
|
||||||
sym_link = os.path.abspath(os.path.join(binDir,\
|
|
||||||
{True: dir,
|
|
||||||
False:os.path.splitext(file)[0]}[file == '']))
|
|
||||||
if os.path.lexists(sym_link): os.remove(sym_link)
|
|
||||||
os.symlink(src,sym_link)
|
|
||||||
sys.stdout.write(sym_link+' -> '+src+'\n')
|
|
||||||
|
|
||||||
|
for subDir in bin_link:
|
||||||
|
theDir = os.path.abspath(os.path.join(baseDir,subDir))
|
||||||
|
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
|
||||||
|
|
||||||
|
for theFile in bin_link[subDir]:
|
||||||
|
theName,theExt = os.path.splitext(theFile)
|
||||||
|
src = os.path.abspath(os.path.join(theDir,theFile))
|
||||||
|
|
||||||
|
if os.path.exists(src):
|
||||||
|
sym_link = os.path.abspath(os.path.join(binDir,subDir if theFile == '' else theName))
|
||||||
|
|
||||||
|
if os.path.lexists(sym_link):
|
||||||
|
os.remove(sym_link)
|
||||||
|
output = theName+damask.util.deemph(theExt)
|
||||||
|
else:
|
||||||
|
output = damask.util.emph(theName)+damask.util.deemph(theExt)
|
||||||
|
|
||||||
|
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
|
||||||
|
os.symlink(src,sym_link)
|
||||||
|
|
||||||
|
|
||||||
|
sys.stdout.write('\nMSC.Marc versioning...\n\n')
|
||||||
|
theMaster = 'DAMASK_marc.f90'
|
||||||
|
|
||||||
for version in MarcReleases:
|
for version in MarcReleases:
|
||||||
src = os.path.abspath(os.path.join(baseDir,'DAMASK_marc.f90'))
|
src = os.path.abspath(os.path.join(baseDir,theMaster))
|
||||||
if os.path.exists(src):
|
if os.path.exists(src):
|
||||||
sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc'+str(version)+'.f90'))
|
sym_link = os.path.abspath(os.path.join(baseDir,'DAMASK_marc{}.f90'.format(version)))
|
||||||
if os.path.lexists(sym_link):
|
if os.path.lexists(sym_link):
|
||||||
os.remove(sym_link)
|
os.remove(sym_link)
|
||||||
sys.stdout.write(sym_link)
|
output = version
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(damask.util.emph(sym_link))
|
output = damask.util.emph(version)
|
||||||
|
|
||||||
os.symlink(src,sym_link)
|
sys.stdout.write(' '+output+'\n')
|
||||||
sys.stdout.write(' -> '+src+'\n')
|
os.symlink(theMaster,sym_link)
|
||||||
|
|
|
@ -13,23 +13,47 @@ if not os.path.isdir(binDir):
|
||||||
os.mkdir(binDir)
|
os.mkdir(binDir)
|
||||||
|
|
||||||
#define ToDo list
|
#define ToDo list
|
||||||
processing_subDirs = ['pre','post','misc',]
|
processing_subDirs = ['pre',
|
||||||
processing_extensions = ['.py','.sh',]
|
'post',
|
||||||
|
'misc',
|
||||||
|
]
|
||||||
|
processing_extensions = ['.py',
|
||||||
|
'.sh',
|
||||||
|
]
|
||||||
|
|
||||||
|
sys.stdout.write('\nsymbolic linking...\n')
|
||||||
|
|
||||||
for subDir in processing_subDirs:
|
for subDir in processing_subDirs:
|
||||||
theDir = os.path.abspath(os.path.join(baseDir,subDir))
|
theDir = os.path.abspath(os.path.join(baseDir,subDir))
|
||||||
|
|
||||||
|
sys.stdout.write('\n'+binDir+' ->\n'+theDir+damask.util.deemph(' ...')+'\n')
|
||||||
|
|
||||||
for theFile in os.listdir(theDir):
|
for theFile in os.listdir(theDir):
|
||||||
if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions
|
theName,theExt = os.path.splitext(theFile)
|
||||||
|
if theExt in processing_extensions: # only consider files with proper extensions
|
||||||
|
|
||||||
src = os.path.abspath(os.path.join(theDir,theFile))
|
src = os.path.abspath(os.path.join(theDir,theFile))
|
||||||
sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0]))
|
sym_link = os.path.abspath(os.path.join(binDir,theName))
|
||||||
|
|
||||||
if os.path.lexists(sym_link):
|
if os.path.lexists(sym_link):
|
||||||
os.remove(sym_link)
|
os.remove(sym_link)
|
||||||
sys.stdout.write(sym_link)
|
output = theName+damask.util.deemph(theExt)
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(damask.util.emph(sym_link))
|
output = damask.util.emph(theName)+damask.util.deemph(theExt)
|
||||||
|
|
||||||
|
sys.stdout.write(damask.util.deemph('... ')+output+'\n')
|
||||||
os.symlink(src,sym_link)
|
os.symlink(src,sym_link)
|
||||||
sys.stdout.write(' -> '+src+'\n')
|
|
||||||
|
|
||||||
|
sys.stdout.write('\npruning broken links...\n')
|
||||||
|
|
||||||
|
brokenLinks = 0
|
||||||
|
|
||||||
|
for filename in os.listdir(binDir):
|
||||||
|
path = os.path.join(binDir,filename)
|
||||||
|
if os.path.islink(path) and not os.path.exists(path):
|
||||||
|
sys.stdout.write(' '+damask.util.delete(path)+'\n')
|
||||||
|
os.remove(path)
|
||||||
|
brokenLinks += 1
|
||||||
|
|
||||||
|
sys.stdout.write(('none.' if brokenLinks == 0 else '')+'\n')
|
||||||
|
|
|
@ -3,6 +3,14 @@
|
||||||
import os,sys
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# python 3 has no unicode object, this ensures that the code works on Python 2&3
|
||||||
|
try:
|
||||||
|
test=isinstance('test', unicode)
|
||||||
|
except(NameError):
|
||||||
|
unicode=str
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
class ASCIItable():
|
class ASCIItable():
|
||||||
"""Read and write to ASCII tables"""
|
"""Read and write to ASCII tables"""
|
||||||
|
|
||||||
|
@ -158,12 +166,12 @@ class ASCIItable():
|
||||||
|
|
||||||
if self.__IO__['labeled']: # table features labels
|
if self.__IO__['labeled']: # table features labels
|
||||||
|
|
||||||
self.info = [self.__IO__['in'].readline().strip() for i in xrange(1,int(m.group(1)))]
|
self.info = [self.__IO__['in'].readline().strip() for i in range(1,int(m.group(1)))]
|
||||||
self.tags = shlex.split(self.__IO__['in'].readline()) # store tags found in last line
|
self.tags = shlex.split(self.__IO__['in'].readline()) # store tags found in last line
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
self.info = [self.__IO__['in'].readline().strip() for i in xrange(0,int(m.group(1)))] # all header is info ...
|
self.info = [self.__IO__['in'].readline().strip() for i in range(0,int(m.group(1)))] # all header is info ...
|
||||||
|
|
||||||
else: # other table format
|
else: # other table format
|
||||||
try:
|
try:
|
||||||
|
@ -224,11 +232,11 @@ class ASCIItable():
|
||||||
extra_header = []
|
extra_header = []
|
||||||
|
|
||||||
for header in self.info:
|
for header in self.info:
|
||||||
headitems = map(str.lower,header.split())
|
headitems = list(map(str.lower,header.split()))
|
||||||
if len(headitems) == 0: continue # skip blank lines
|
if len(headitems) == 0: continue # skip blank lines
|
||||||
if headitems[0] in mappings.keys():
|
if headitems[0] in list(mappings.keys()):
|
||||||
if headitems[0] in identifiers.keys():
|
if headitems[0] in list(identifiers.keys()):
|
||||||
for i in xrange(len(identifiers[headitems[0]])):
|
for i in range(len(identifiers[headitems[0]])):
|
||||||
info[headitems[0]][i] = \
|
info[headitems[0]][i] = \
|
||||||
mappings[headitems[0]](headitems[headitems.index(identifiers[headitems[0]][i])+1])
|
mappings[headitems[0]](headitems[headitems.index(identifiers[headitems[0]][i])+1])
|
||||||
else:
|
else:
|
||||||
|
@ -415,9 +423,9 @@ class ASCIItable():
|
||||||
start = self.label_index(labels)
|
start = self.label_index(labels)
|
||||||
dim = self.label_dimension(labels)
|
dim = self.label_dimension(labels)
|
||||||
|
|
||||||
return np.hstack(map(lambda c: xrange(c[0],c[0]+c[1]), zip(start,dim))) \
|
return np.hstack([range(c[0],c[0]+c[1]) for c in zip(start,dim)]) \
|
||||||
if isinstance(labels, Iterable) and not isinstance(labels, str) \
|
if isinstance(labels, Iterable) and not isinstance(labels, str) \
|
||||||
else xrange(start,start+dim)
|
else range(start,start+dim)
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def info_append(self,
|
def info_append(self,
|
||||||
|
@ -447,7 +455,7 @@ class ASCIItable():
|
||||||
def data_skipLines(self,
|
def data_skipLines(self,
|
||||||
count):
|
count):
|
||||||
"""wind forward by count number of lines"""
|
"""wind forward by count number of lines"""
|
||||||
for i in xrange(count):
|
for i in range(count):
|
||||||
alive = self.data_read()
|
alive = self.data_read()
|
||||||
|
|
||||||
return alive
|
return alive
|
||||||
|
@ -501,10 +509,10 @@ class ASCIItable():
|
||||||
columns = []
|
columns = []
|
||||||
for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ...
|
for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ...
|
||||||
# ... transparently add all components unless column referenced by number or with explicit dimension
|
# ... transparently add all components unless column referenced by number or with explicit dimension
|
||||||
columns += range(c,c + \
|
columns += list(range(c,c + \
|
||||||
(d if str(c) != str(labels[present[i]]) else \
|
(d if str(c) != str(labels[present[i]]) else \
|
||||||
1))
|
1)))
|
||||||
use = np.array(columns)
|
use = np.array(columns) if len(columns) > 0 else None
|
||||||
|
|
||||||
self.tags = list(np.array(self.tags)[use]) # update labels with valid subset
|
self.tags = list(np.array(self.tags)[use]) # update labels with valid subset
|
||||||
|
|
||||||
|
@ -530,7 +538,7 @@ class ASCIItable():
|
||||||
"""write whole numpy array data"""
|
"""write whole numpy array data"""
|
||||||
for row in self.data:
|
for row in self.data:
|
||||||
try:
|
try:
|
||||||
output = [fmt % value for value in row] if fmt else map(repr,row)
|
output = [fmt % value for value in row] if fmt else list(map(repr,row))
|
||||||
except:
|
except:
|
||||||
output = [fmt % row] if fmt else [repr(row)]
|
output = [fmt % row] if fmt else [repr(row)]
|
||||||
|
|
||||||
|
@ -555,7 +563,7 @@ class ASCIItable():
|
||||||
try:
|
try:
|
||||||
idx = self.label_index(where)
|
idx = self.label_index(where)
|
||||||
if len(self.data) <= idx:
|
if len(self.data) <= idx:
|
||||||
self.data_append(['n/a' for i in xrange(idx+1-len(self.data))]) # grow data if too short
|
self.data_append(['n/a' for i in range(idx+1-len(self.data))]) # grow data if too short
|
||||||
self.data[idx] = str(what)
|
self.data[idx] = str(what)
|
||||||
except(ValueError):
|
except(ValueError):
|
||||||
pass
|
pass
|
||||||
|
@ -568,7 +576,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_asFloat(self):
|
def data_asFloat(self):
|
||||||
return map(self._transliterateToFloat,self.data)
|
return list(map(self._transliterateToFloat,self.data))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -590,8 +598,8 @@ class ASCIItable():
|
||||||
if len(items) > 2:
|
if len(items) > 2:
|
||||||
if items[1].lower() == 'of': items = np.ones(datatype(items[0]))*datatype(items[2])
|
if items[1].lower() == 'of': items = np.ones(datatype(items[0]))*datatype(items[2])
|
||||||
elif items[1].lower() == 'to': items = np.arange(datatype(items[0]),1+datatype(items[2]))
|
elif items[1].lower() == 'to': items = np.arange(datatype(items[0]),1+datatype(items[2]))
|
||||||
else: items = map(datatype,items)
|
else: items = list(map(datatype,items))
|
||||||
else: items = map(datatype,items)
|
else: items = list(map(datatype,items))
|
||||||
|
|
||||||
s = min(len(items), N-i) # prevent overflow of microstructure array
|
s = min(len(items), N-i) # prevent overflow of microstructure array
|
||||||
microstructure[i:i+s] = items[:s]
|
microstructure[i:i+s] = items[:s]
|
||||||
|
|
|
@ -33,7 +33,7 @@ class Color():
|
||||||
}
|
}
|
||||||
|
|
||||||
model = model.upper()
|
model = model.upper()
|
||||||
if model not in self.__transforms__.keys(): model = 'RGB'
|
if model not in list(self.__transforms__.keys()): model = 'RGB'
|
||||||
if model == 'RGB' and max(color) > 1.0: # are we RGB255 ?
|
if model == 'RGB' and max(color) > 1.0: # are we RGB255 ?
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
color[i] /= 255.0 # rescale to RGB
|
color[i] /= 255.0 # rescale to RGB
|
||||||
|
@ -62,7 +62,7 @@ class Color():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def convertTo(self,toModel = 'RGB'):
|
def convertTo(self,toModel = 'RGB'):
|
||||||
toModel = toModel.upper()
|
toModel = toModel.upper()
|
||||||
if toModel not in self.__transforms__.keys(): return
|
if toModel not in list(self.__transforms__.keys()): return
|
||||||
|
|
||||||
sourcePos = self.__transforms__[self.model]['index']
|
sourcePos = self.__transforms__[self.model]['index']
|
||||||
targetPos = self.__transforms__[toModel]['index']
|
targetPos = self.__transforms__[toModel]['index']
|
||||||
|
@ -139,7 +139,7 @@ class Color():
|
||||||
HSL[0] = HSL[0]*60.0 # scaling to 360 might be dangerous for small values
|
HSL[0] = HSL[0]*60.0 # scaling to 360 might be dangerous for small values
|
||||||
if (HSL[0] < 0.0):
|
if (HSL[0] < 0.0):
|
||||||
HSL[0] = HSL[0] + 360.0
|
HSL[0] = HSL[0] + 360.0
|
||||||
for i in xrange(2):
|
for i in range(2):
|
||||||
HSL[i+1] = min(HSL[i+1],1.0)
|
HSL[i+1] = min(HSL[i+1],1.0)
|
||||||
HSL[i+1] = max(HSL[i+1],0.0)
|
HSL[i+1] = max(HSL[i+1],0.0)
|
||||||
|
|
||||||
|
@ -164,11 +164,11 @@ class Color():
|
||||||
[0.212671,0.715160,0.072169],
|
[0.212671,0.715160,0.072169],
|
||||||
[0.019334,0.119193,0.950227]])
|
[0.019334,0.119193,0.950227]])
|
||||||
|
|
||||||
for i in xrange(3):
|
for i in range(3):
|
||||||
if (self.color[i] > 0.04045): RGB_lin[i] = ((self.color[i]+0.0555)/1.0555)**2.4
|
if (self.color[i] > 0.04045): RGB_lin[i] = ((self.color[i]+0.0555)/1.0555)**2.4
|
||||||
else: RGB_lin[i] = self.color[i] /12.92
|
else: RGB_lin[i] = self.color[i] /12.92
|
||||||
XYZ = np.dot(convert,RGB_lin)
|
XYZ = np.dot(convert,RGB_lin)
|
||||||
for i in xrange(3):
|
for i in range(3):
|
||||||
|
|
||||||
XYZ[i] = max(XYZ[i],0.0)
|
XYZ[i] = max(XYZ[i],0.0)
|
||||||
|
|
||||||
|
@ -193,10 +193,10 @@ class Color():
|
||||||
RGB_lin = np.dot(convert,self.color)
|
RGB_lin = np.dot(convert,self.color)
|
||||||
RGB = np.zeros(3,'d')
|
RGB = np.zeros(3,'d')
|
||||||
|
|
||||||
for i in xrange(3):
|
for i in range(3):
|
||||||
if (RGB_lin[i] > 0.0031308): RGB[i] = ((RGB_lin[i])**(1.0/2.4))*1.0555-0.0555
|
if (RGB_lin[i] > 0.0031308): RGB[i] = ((RGB_lin[i])**(1.0/2.4))*1.0555-0.0555
|
||||||
else: RGB[i] = RGB_lin[i] *12.92
|
else: RGB[i] = RGB_lin[i] *12.92
|
||||||
for i in xrange(3):
|
for i in range(3):
|
||||||
RGB[i] = min(RGB[i],1.0)
|
RGB[i] = min(RGB[i],1.0)
|
||||||
RGB[i] = max(RGB[i],0.0)
|
RGB[i] = max(RGB[i],0.0)
|
||||||
|
|
||||||
|
@ -225,7 +225,7 @@ class Color():
|
||||||
XYZ[0] = XYZ[1] + self.color[1]/ 500.0
|
XYZ[0] = XYZ[1] + self.color[1]/ 500.0
|
||||||
XYZ[2] = XYZ[1] - self.color[2]/ 200.0
|
XYZ[2] = XYZ[1] - self.color[2]/ 200.0
|
||||||
|
|
||||||
for i in xrange(len(XYZ)):
|
for i in range(len(XYZ)):
|
||||||
if (XYZ[i] > 6./29. ): XYZ[i] = XYZ[i]**3.
|
if (XYZ[i] > 6./29. ): XYZ[i] = XYZ[i]**3.
|
||||||
else: XYZ[i] = 108./841. * (XYZ[i] - 4./29.)
|
else: XYZ[i] = 108./841. * (XYZ[i] - 4./29.)
|
||||||
|
|
||||||
|
@ -245,7 +245,7 @@ class Color():
|
||||||
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
||||||
XYZ = self.color/ref_white
|
XYZ = self.color/ref_white
|
||||||
|
|
||||||
for i in xrange(len(XYZ)):
|
for i in range(len(XYZ)):
|
||||||
if (XYZ[i] > 216./24389 ): XYZ[i] = XYZ[i]**(1.0/3.0)
|
if (XYZ[i] > 216./24389 ): XYZ[i] = XYZ[i]**(1.0/3.0)
|
||||||
else: XYZ[i] = (841./108. * XYZ[i]) + 16.0/116.0
|
else: XYZ[i] = (841./108. * XYZ[i]) + 16.0/116.0
|
||||||
|
|
||||||
|
@ -451,7 +451,7 @@ class Colormap():
|
||||||
"""
|
"""
|
||||||
format = format.lower() # consistent comparison basis
|
format = format.lower() # consistent comparison basis
|
||||||
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
||||||
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
|
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in range(steps)]
|
||||||
if format == 'paraview':
|
if format == 'paraview':
|
||||||
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
|
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
|
||||||
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
|
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
|
||||||
|
@ -461,7 +461,7 @@ class Colormap():
|
||||||
|
|
||||||
elif format == 'gmsh':
|
elif format == 'gmsh':
|
||||||
colormap = ['View.ColorTable = {'] \
|
colormap = ['View.ColorTable = {'] \
|
||||||
+ [',\n'.join(['{%s}'%(','.join(map(lambda x:str(x*255.0),color))) for color in colors])] \
|
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \
|
||||||
+ ['}']
|
+ ['}']
|
||||||
|
|
||||||
elif format == 'gom':
|
elif format == 'gom':
|
||||||
|
@ -469,7 +469,7 @@ class Colormap():
|
||||||
+ ' 9 ' + str(name) \
|
+ ' 9 ' + str(name) \
|
||||||
+ ' 0 1 0 3 0 0 -1 9 \ 0 0 0 255 255 255 0 0 255 ' \
|
+ ' 0 1 0 3 0 0 -1 9 \ 0 0 0 255 255 255 0 0 255 ' \
|
||||||
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) \
|
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) \
|
||||||
+ ' '.join([' 0 %s 255 1'%(' '.join(map(lambda x:str(int(x*255.0)),color))) for color in reversed(colors)])]
|
+ ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])]
|
||||||
|
|
||||||
elif format == 'raw':
|
elif format == 'raw':
|
||||||
colormap = ['\t'.join(map(str,color)) for color in colors]
|
colormap = ['\t'.join(map(str,color)) for color in colors]
|
||||||
|
|
|
@ -19,7 +19,7 @@ class Section():
|
||||||
self.parameters[key] = data[key]
|
self.parameters[key] = data[key]
|
||||||
|
|
||||||
if '__order__' not in self.parameters:
|
if '__order__' not in self.parameters:
|
||||||
self.parameters['__order__'] = self.parameters.keys()
|
self.parameters['__order__'] = list(self.parameters.keys())
|
||||||
if part.lower() in classes:
|
if part.lower() in classes:
|
||||||
self.__class__ = classes[part.lower()]
|
self.__class__ = classes[part.lower()]
|
||||||
self.__init__(data)
|
self.__init__(data)
|
||||||
|
@ -61,11 +61,11 @@ class Texture(Section):
|
||||||
|
|
||||||
def add_component(self,theType,properties):
|
def add_component(self,theType,properties):
|
||||||
|
|
||||||
if 'scatter' not in map(str.lower,properties.keys()):
|
if 'scatter' not in list(map(str.lower,list(properties.keys()))):
|
||||||
scatter = 0.0
|
scatter = 0.0
|
||||||
else:
|
else:
|
||||||
scatter = properties['scatter']
|
scatter = properties['scatter']
|
||||||
if 'fraction' not in map(str.lower,properties.keys()):
|
if 'fraction' not in list(map(str.lower,list(properties.keys()))):
|
||||||
fraction = 1.0
|
fraction = 1.0
|
||||||
else:
|
else:
|
||||||
fraction = properties['fraction']
|
fraction = properties['fraction']
|
||||||
|
@ -227,7 +227,7 @@ class Material():
|
||||||
"""Experimental! Needs expansion to multi-constituent microstructures..."""
|
"""Experimental! Needs expansion to multi-constituent microstructures..."""
|
||||||
microstructure = Microstructure()
|
microstructure = Microstructure()
|
||||||
# make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
# make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
||||||
components=dict((k.lower(), v) for k,v in components.iteritems())
|
components=dict((k.lower(), v) for k,v in components.items())
|
||||||
|
|
||||||
for key in ['phase','texture','fraction','crystallite']:
|
for key in ['phase','texture','fraction','crystallite']:
|
||||||
if type(components[key]) is not list:
|
if type(components[key]) is not list:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,subprocess,shlex,re,string
|
import os,subprocess,shlex,re
|
||||||
|
|
||||||
class Environment():
|
class Environment():
|
||||||
__slots__ = [ \
|
__slots__ = [ \
|
||||||
|
@ -22,7 +22,7 @@ class Environment():
|
||||||
for line in configFile:
|
for line in configFile:
|
||||||
l = re.sub('^set ', '', line).strip() # remove "set" (tcsh) when setting variables
|
l = re.sub('^set ', '', line).strip() # remove "set" (tcsh) when setting variables
|
||||||
if l and not l.startswith('#'):
|
if l and not l.startswith('#'):
|
||||||
items = map(string.strip,l.split('='))
|
items = re.split(r'\s*=\s*',l)
|
||||||
if len(items) == 2:
|
if len(items) == 2:
|
||||||
self.options[items[0].upper()] = \
|
self.options[items[0].upper()] = \
|
||||||
re.sub('\$\{*DAMASK_ROOT\}*',self.rootDir(),os.path.expandvars(items[1])) # expand all shell variables and DAMASK_ROOT
|
re.sub('\$\{*DAMASK_ROOT\}*',self.rootDir(),os.path.expandvars(items[1])) # expand all shell variables and DAMASK_ROOT
|
||||||
|
@ -35,7 +35,7 @@ class Environment():
|
||||||
try:
|
try:
|
||||||
cmd = """ ssh mulicense2 "/Stat_Flexlm | grep 'Users of %s: ' | cut -d' ' -f7,13" """%software
|
cmd = """ ssh mulicense2 "/Stat_Flexlm | grep 'Users of %s: ' | cut -d' ' -f7,13" """%software
|
||||||
process = subprocess.Popen(shlex.split(cmd),stdout = subprocess.PIPE,stderr = subprocess.PIPE)
|
process = subprocess.Popen(shlex.split(cmd),stdout = subprocess.PIPE,stderr = subprocess.PIPE)
|
||||||
licenses = map(int, process.stdout.readline().split())
|
licenses = list(map(int, process.stdout.readline().split()))
|
||||||
try:
|
try:
|
||||||
if licenses[0]-licenses[1] >= Nneeded:
|
if licenses[0]-licenses[1] >= Nneeded:
|
||||||
return 0
|
return 0
|
||||||
|
|
|
@ -15,7 +15,7 @@ class Geometry():
|
||||||
'spectral': damask.geometry.Spectral,
|
'spectral': damask.geometry.Spectral,
|
||||||
'marc': damask.geometry.Marc,
|
'marc': damask.geometry.Marc,
|
||||||
}
|
}
|
||||||
if solver.lower() in solverClass.keys():
|
if solver.lower() in list(solverClass.keys()):
|
||||||
self.__class__=solverClass[solver.lower()]
|
self.__class__=solverClass[solver.lower()]
|
||||||
self.__init__()
|
self.__init__()
|
||||||
|
|
||||||
|
|
|
@ -154,7 +154,7 @@ class Quaternion:
|
||||||
|
|
||||||
def __div__(self, other):
|
def __div__(self, other):
|
||||||
"""division"""
|
"""division"""
|
||||||
if isinstance(other, (int,float,long)):
|
if isinstance(other, (int,float)):
|
||||||
w = self.w / other
|
w = self.w / other
|
||||||
x = self.x / other
|
x = self.x / other
|
||||||
y = self.y / other
|
y = self.y / other
|
||||||
|
@ -165,7 +165,7 @@ class Quaternion:
|
||||||
|
|
||||||
def __idiv__(self, other):
|
def __idiv__(self, other):
|
||||||
"""in place division"""
|
"""in place division"""
|
||||||
if isinstance(other, (int,float,long)):
|
if isinstance(other, (int,float)):
|
||||||
self.w /= other
|
self.w /= other
|
||||||
self.x /= other
|
self.x /= other
|
||||||
self.y /= other
|
self.y /= other
|
||||||
|
@ -556,7 +556,7 @@ class Symmetry:
|
||||||
|
|
||||||
def __init__(self, symmetry = None):
|
def __init__(self, symmetry = None):
|
||||||
"""lattice with given symmetry, defaults to None"""
|
"""lattice with given symmetry, defaults to None"""
|
||||||
if isinstance(symmetry, basestring) and symmetry.lower() in Symmetry.lattices:
|
if isinstance(symmetry, str) and symmetry.lower() in Symmetry.lattices:
|
||||||
self.lattice = symmetry.lower()
|
self.lattice = symmetry.lower()
|
||||||
else:
|
else:
|
||||||
self.lattice = None
|
self.lattice = None
|
||||||
|
@ -653,8 +653,8 @@ class Symmetry:
|
||||||
[ 1.0,0.0,0.0,0.0 ],
|
[ 1.0,0.0,0.0,0.0 ],
|
||||||
]
|
]
|
||||||
|
|
||||||
return map(Quaternion,
|
return list(map(Quaternion,
|
||||||
np.array(symQuats)[np.atleast_1d(np.array(who)) if who != [] else xrange(len(symQuats))])
|
np.array(symQuats)[np.atleast_1d(np.array(who)) if who != [] else range(len(symQuats))]))
|
||||||
|
|
||||||
|
|
||||||
def equivalentQuaternions(self,
|
def equivalentQuaternions(self,
|
||||||
|
@ -891,8 +891,7 @@ class Orientation:
|
||||||
|
|
||||||
def equivalentOrientations(self,
|
def equivalentOrientations(self,
|
||||||
who = []):
|
who = []):
|
||||||
return map(lambda q: Orientation(quaternion = q, symmetry = self.symmetry.lattice),
|
return [Orientation(quaternion = q, symmetry = self.symmetry.lattice) for q in self.equivalentQuaternions(who)]
|
||||||
self.equivalentQuaternions(who))
|
|
||||||
|
|
||||||
def reduced(self):
|
def reduced(self):
|
||||||
"""Transform orientation to fall into fundamental zone according to symmetry"""
|
"""Transform orientation to fall into fundamental zone according to symmetry"""
|
||||||
|
@ -921,7 +920,7 @@ class Orientation:
|
||||||
for i,sA in enumerate(mySymQs):
|
for i,sA in enumerate(mySymQs):
|
||||||
for j,sB in enumerate(otherSymQs):
|
for j,sB in enumerate(otherSymQs):
|
||||||
theQ = sA.conjugated()*misQ*sB
|
theQ = sA.conjugated()*misQ*sB
|
||||||
for k in xrange(2):
|
for k in range(2):
|
||||||
theQ.conjugate()
|
theQ.conjugate()
|
||||||
breaker = self.symmetry.inFZ(theQ) \
|
breaker = self.symmetry.inFZ(theQ) \
|
||||||
and (not SST or other.symmetry.inDisorientationSST(theQ))
|
and (not SST or other.symmetry.inDisorientationSST(theQ))
|
||||||
|
@ -996,11 +995,17 @@ class Orientation:
|
||||||
relationModel,
|
relationModel,
|
||||||
direction,
|
direction,
|
||||||
targetSymmetry = None):
|
targetSymmetry = None):
|
||||||
|
"""
|
||||||
|
orientation relationship
|
||||||
|
|
||||||
|
positive number: fcc --> bcc
|
||||||
|
negative number: bcc --> fcc
|
||||||
|
"""
|
||||||
if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None
|
if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None
|
||||||
if int(direction) == 0: return None
|
if int(direction) == 0: return None
|
||||||
|
|
||||||
# KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592 DOES THIS PAPER EXISTS?
|
# KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592
|
||||||
|
# for KS rotation matrices also check K. Kitahara et al./Acta Materialia 54 (2006) 1279-1288
|
||||||
# GT from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
# GT from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
||||||
# GT' from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
# GT' from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
||||||
# NW from H. Kitahara et al./Materials Characterization 54 (2005) 378-386
|
# NW from H. Kitahara et al./Materials Characterization 54 (2005) 378-386
|
||||||
|
@ -1227,14 +1232,14 @@ class Orientation:
|
||||||
myPlane /= np.linalg.norm(myPlane)
|
myPlane /= np.linalg.norm(myPlane)
|
||||||
myNormal = [float(i) for i in normals[relationModel][variant,me]] # map(float, planes[...]) does not work in python 3
|
myNormal = [float(i) for i in normals[relationModel][variant,me]] # map(float, planes[...]) does not work in python 3
|
||||||
myNormal /= np.linalg.norm(myNormal)
|
myNormal /= np.linalg.norm(myNormal)
|
||||||
myMatrix = np.array([myPlane,myNormal,np.cross(myPlane,myNormal)])
|
myMatrix = np.array([myNormal,np.cross(myPlane,myNormal),myPlane]).T
|
||||||
|
|
||||||
otherPlane = [float(i) for i in planes[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
otherPlane = [float(i) for i in planes[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
||||||
otherPlane /= np.linalg.norm(otherPlane)
|
otherPlane /= np.linalg.norm(otherPlane)
|
||||||
otherNormal = [float(i) for i in normals[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
otherNormal = [float(i) for i in normals[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
||||||
otherNormal /= np.linalg.norm(otherNormal)
|
otherNormal /= np.linalg.norm(otherNormal)
|
||||||
otherMatrix = np.array([otherPlane,otherNormal,np.cross(otherPlane,otherNormal)])
|
otherMatrix = np.array([otherNormal,np.cross(otherPlane,otherNormal),otherPlane]).T
|
||||||
|
|
||||||
rot=np.dot(otherMatrix.T,myMatrix)
|
rot=np.dot(otherMatrix,myMatrix.T)
|
||||||
|
|
||||||
return Orientation(matrix=np.dot(rot,self.asMatrix())) # no symmetry information ??
|
return Orientation(matrix=np.dot(rot,self.asMatrix())) # no symmetry information ??
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
#import sys
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import h5py
|
import h5py
|
||||||
|
@ -19,19 +17,19 @@ class Result():
|
||||||
def __init__(self,resultsFile):
|
def __init__(self,resultsFile):
|
||||||
self.data=h5py.File(resultsFile,"r")
|
self.data=h5py.File(resultsFile,"r")
|
||||||
self.Npoints=self.data.attrs['Number of Materialpoints']
|
self.Npoints=self.data.attrs['Number of Materialpoints']
|
||||||
print("Opened "+resultsFile+" with %i points"%self.Npoints)
|
print("Opened {} with {} points".format(resultsFile,self.Npoints))
|
||||||
|
|
||||||
def getCrystallite(self,labels,inc,constituent=1,points=None):
|
def getCrystallite(self,labels,inc,constituent=1,points=None):
|
||||||
if points is None: points = np.array(np.array(xrange(self.Npoints)))
|
if points is None: points = np.array(np.array(range(self.Npoints)))
|
||||||
results = {}
|
results = {}
|
||||||
mapping=self.data['mapping/crystallite']
|
mapping=self.data['mapping/crystallite']
|
||||||
for instance in self.data['increments/%s/crystallite/'%inc]:
|
for instance in self.data['increments/%s/crystallite/'%inc]:
|
||||||
dsets = self.data['increments/%s/crystallite/%s'%(inc,instance)].keys()
|
dsets = list(self.data['increments/%s/crystallite/%s'%(inc,instance)].keys())
|
||||||
for label in labels:
|
for label in labels:
|
||||||
if label in dsets and label not in results:
|
if label in dsets and label not in results:
|
||||||
shape = np.shape(self.data['increments/%s/crystallite/%s/%s'%(inc,instance,label)])[1:]
|
shape = np.shape(self.data['increments/%s/crystallite/%s/%s'%(inc,instance,label)])[1:]
|
||||||
results[label] = np.nan*np.ones(np.array((self.Npoints,)+shape))
|
results[label] = np.nan*np.ones(np.array((self.Npoints,)+shape))
|
||||||
for myPoint in xrange(len(points)):
|
for myPoint in range(len(points)):
|
||||||
matPoint = points[myPoint]
|
matPoint = points[myPoint]
|
||||||
pos = mapping[matPoint,constituent-1]
|
pos = mapping[matPoint,constituent-1]
|
||||||
if pos[0] != 0:
|
if pos[0] != 0:
|
||||||
|
@ -41,5 +39,3 @@ class Result():
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ class Marc(Solver):
|
||||||
|
|
||||||
MSCpath = damask.environment.Environment().options['MSC_ROOT']
|
MSCpath = damask.environment.Environment().options['MSC_ROOT']
|
||||||
|
|
||||||
for release,subdirs in sorted(self.releases.items(),reverse=True):
|
for release,subdirs in sorted(list(self.releases.items()),reverse=True):
|
||||||
for subdir in subdirs:
|
for subdir in subdirs:
|
||||||
path = '%s/mentat%s/shlib/%s'%(MSCpath,release,subdir)
|
path = '%s/mentat%s/shlib/%s'%(MSCpath,release,subdir)
|
||||||
if os.path.exists(path): return release
|
if os.path.exists(path): return release
|
||||||
|
@ -40,7 +40,7 @@ class Marc(Solver):
|
||||||
|
|
||||||
MSCpath = damask.environment.Environment().options['MSC_ROOT']
|
MSCpath = damask.environment.Environment().options['MSC_ROOT']
|
||||||
|
|
||||||
if len(releases) == 0: releases = self.releases.keys()
|
if len(releases) == 0: releases = list(self.releases.keys())
|
||||||
if type(releases) is not list: releases = [releases]
|
if type(releases) is not list: releases = [releases]
|
||||||
for release in sorted(releases,reverse=True):
|
for release in sorted(releases,reverse=True):
|
||||||
if release not in self.releases: continue
|
if release not in self.releases: continue
|
||||||
|
|
|
@ -15,7 +15,7 @@ class Solver():
|
||||||
'spectral': damask.solver.Spectral,
|
'spectral': damask.solver.Spectral,
|
||||||
'marc': damask.solver.Marc,
|
'marc': damask.solver.Marc,
|
||||||
}
|
}
|
||||||
if solver.lower() in solverClass.keys():
|
if solver.lower() in list(solverClass.keys()):
|
||||||
self.__class__=solverClass[solver.lower()]
|
self.__class__=solverClass[solver.lower()]
|
||||||
self.__init__()
|
self.__init__()
|
||||||
|
|
||||||
|
|
|
@ -17,92 +17,96 @@ class Test():
|
||||||
|
|
||||||
variants = []
|
variants = []
|
||||||
|
|
||||||
def __init__(self,test_description):
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
|
defaults = {'description': '',
|
||||||
|
'keep': False,
|
||||||
|
'accept': False,
|
||||||
|
'updateRequest': False,
|
||||||
|
}
|
||||||
|
for arg in defaults.keys():
|
||||||
|
setattr(self,arg,kwargs.get(arg) if kwargs.get(arg) else defaults[arg])
|
||||||
|
|
||||||
|
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
||||||
|
fh.setLevel(logging.DEBUG)
|
||||||
|
fh.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s'))
|
||||||
|
|
||||||
|
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
||||||
|
ch.setLevel(logging.INFO)
|
||||||
|
ch.setFormatter(logging.Formatter('%(message)s'))
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
logger.setLevel(0)
|
|
||||||
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
|
||||||
fh.setLevel(logging.DEBUG)
|
|
||||||
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')
|
|
||||||
fh.setFormatter(full)
|
|
||||||
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
|
||||||
ch.setLevel(logging.INFO)
|
|
||||||
# create formatter and add it to the handlers
|
|
||||||
plain = logging.Formatter('%(message)s')
|
|
||||||
ch.setFormatter(plain)
|
|
||||||
# add the handlers to the logger
|
|
||||||
logger.addHandler(fh)
|
logger.addHandler(fh)
|
||||||
logger.addHandler(ch)
|
logger.addHandler(ch)
|
||||||
|
logger.setLevel(0)
|
||||||
|
|
||||||
|
logging.info('\n'.join(['+'*40,
|
||||||
|
'-'*40,
|
||||||
|
'| '+self.description,
|
||||||
|
'-'*40,
|
||||||
|
]))
|
||||||
|
|
||||||
logging.info('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n' \
|
|
||||||
+'----------------------------------------------------------------\n' \
|
|
||||||
+'| '+test_description+'\n' \
|
|
||||||
+'----------------------------------------------------------------')
|
|
||||||
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
|
self.dirBase = os.path.dirname(os.path.realpath(sys.modules[self.__class__.__module__].__file__))
|
||||||
self.parser = OptionParser(
|
|
||||||
description = test_description+' (using class: {})'.format(damask.version),
|
self.parser = OptionParser(description = '{} (Test class version: {})'.format(self.description,damask.version),
|
||||||
usage='./test.py [options]')
|
usage = './test.py [options]')
|
||||||
self.updateRequested = False
|
self.parser.add_option("-k", "--keep",
|
||||||
self.parser.add_option("-d", "--debug", action="store_true",\
|
action = "store_true",
|
||||||
dest="debug",\
|
dest = "keep",
|
||||||
help="debug run, don't calculate but use existing results")
|
help = "keep current results, just run postprocessing")
|
||||||
self.parser.add_option("-p", "--pass", action="store_true",\
|
self.parser.add_option("--ok", "--accept",
|
||||||
dest="accept",\
|
action = "store_true",
|
||||||
help="calculate results but always consider test as successfull")
|
dest = "accept",
|
||||||
self.parser.set_defaults(debug=False,
|
help = "calculate results but always consider test as successfull")
|
||||||
accept=False)
|
|
||||||
|
self.parser.set_defaults(keep = self.keep,
|
||||||
|
accept = self.accept,
|
||||||
|
update = self.updateRequest,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
"""Run all variants and report first failure."""
|
"""Run all variants and report first failure."""
|
||||||
if self.options.debug:
|
if not self.options.keep:
|
||||||
for variant in xrange(len(self.variants)):
|
if not self.feasible(): return -1
|
||||||
try:
|
|
||||||
self.postprocess(variant)
|
|
||||||
if not self.compare(variant):
|
|
||||||
return variant+1 # return culprit
|
|
||||||
except Exception as e :
|
|
||||||
logging.critical('\nWARNING:\n {}\n'.format(e))
|
|
||||||
return variant+1 # return culprit
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
if not self.testPossible(): return -1
|
|
||||||
self.clean()
|
self.clean()
|
||||||
self.prepareAll()
|
self.prepareAll()
|
||||||
for variant in xrange(len(self.variants)):
|
|
||||||
try:
|
for variant,name in enumerate(self.variants):
|
||||||
|
try:
|
||||||
|
if not self.options.keep:
|
||||||
self.prepare(variant)
|
self.prepare(variant)
|
||||||
self.run(variant)
|
self.run(variant)
|
||||||
self.postprocess(variant)
|
|
||||||
if self.updateRequested: # update requested
|
|
||||||
self.update(variant)
|
|
||||||
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
|
||||||
return variant+1 # return culprit
|
|
||||||
except Exception as e :
|
|
||||||
logging.critical('\nWARNING:\n {}\n'.format(e))
|
|
||||||
return variant+1 # return culprit
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def testPossible(self):
|
self.postprocess(variant)
|
||||||
"""Check if test is possible or not (e.g. no license available)."""
|
|
||||||
|
if self.options.update:
|
||||||
|
if self.update(variant) != 0: logging.critical('update for "{}" failed.'.format(name))
|
||||||
|
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
||||||
|
return variant+1 # return culprit
|
||||||
|
|
||||||
|
except Exception as e :
|
||||||
|
logging.critical('exception during variant execution: {}'.format(e))
|
||||||
|
return variant+1 # return culprit
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def feasible(self):
|
||||||
|
"""Check whether test is possible or not (e.g. no license available)."""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
"""Delete directory tree containing current results."""
|
"""Delete directory tree containing current results."""
|
||||||
status = True
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(self.dirCurrent())
|
shutil.rmtree(self.dirCurrent())
|
||||||
except:
|
except:
|
||||||
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
||||||
status = status and False
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir(self.dirCurrent())
|
os.mkdir(self.dirCurrent())
|
||||||
|
return True
|
||||||
except:
|
except:
|
||||||
logging.critical('creation of directory "{}" failed...'.format(self.dirCurrent()))
|
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
|
||||||
status = status and False
|
return False
|
||||||
|
|
||||||
return status
|
|
||||||
|
|
||||||
def prepareAll(self):
|
def prepareAll(self):
|
||||||
"""Do all necessary preparations for the whole test"""
|
"""Do all necessary preparations for the whole test"""
|
||||||
|
@ -130,8 +134,8 @@ class Test():
|
||||||
|
|
||||||
def update(self,variant):
|
def update(self,variant):
|
||||||
"""Update reference with current results."""
|
"""Update reference with current results."""
|
||||||
logging.debug('Update not necessary')
|
logging.critical('update not supported.')
|
||||||
return True
|
return 1
|
||||||
|
|
||||||
|
|
||||||
def dirReference(self):
|
def dirReference(self):
|
||||||
|
@ -178,7 +182,7 @@ class Test():
|
||||||
"""
|
"""
|
||||||
if not B or len(B) == 0: B = A
|
if not B or len(B) == 0: B = A
|
||||||
|
|
||||||
for source,target in zip(map(mapA,A),map(mapB,B)):
|
for source,target in zip(list(map(mapA,A)),list(map(mapB,B))):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(source,target)
|
shutil.copy2(source,target)
|
||||||
except:
|
except:
|
||||||
|
@ -301,26 +305,26 @@ class Test():
|
||||||
# check if comparison is possible and determine lenght of columns
|
# check if comparison is possible and determine lenght of columns
|
||||||
if len(headings0) == len(headings1) == len(normHeadings):
|
if len(headings0) == len(headings1) == len(normHeadings):
|
||||||
dataLength = len(headings0)
|
dataLength = len(headings0)
|
||||||
length = [1 for i in xrange(dataLength)]
|
length = [1 for i in range(dataLength)]
|
||||||
shape = [[] for i in xrange(dataLength)]
|
shape = [[] for i in range(dataLength)]
|
||||||
data = [[] for i in xrange(dataLength)]
|
data = [[] for i in range(dataLength)]
|
||||||
maxError = [0.0 for i in xrange(dataLength)]
|
maxError = [0.0 for i in range(dataLength)]
|
||||||
absTol = [absoluteTolerance for i in xrange(dataLength)]
|
absTol = [absoluteTolerance for i in range(dataLength)]
|
||||||
column = [[1 for i in xrange(dataLength)] for j in xrange(2)]
|
column = [[1 for i in range(dataLength)] for j in range(2)]
|
||||||
|
|
||||||
norm = [[] for i in xrange(dataLength)]
|
norm = [[] for i in range(dataLength)]
|
||||||
normLength = [1 for i in xrange(dataLength)]
|
normLength = [1 for i in range(dataLength)]
|
||||||
normShape = [[] for i in xrange(dataLength)]
|
normShape = [[] for i in range(dataLength)]
|
||||||
normColumn = [1 for i in xrange(dataLength)]
|
normColumn = [1 for i in range(dataLength)]
|
||||||
|
|
||||||
for i in xrange(dataLength):
|
for i in range(dataLength):
|
||||||
if headings0[i]['shape'] != headings1[i]['shape']:
|
if headings0[i]['shape'] != headings1[i]['shape']:
|
||||||
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
|
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
|
||||||
shape[i] = headings0[i]['shape']
|
shape[i] = headings0[i]['shape']
|
||||||
for j in xrange(np.shape(shape[i])[0]):
|
for j in range(np.shape(shape[i])[0]):
|
||||||
length[i] *= shape[i][j]
|
length[i] *= shape[i][j]
|
||||||
normShape[i] = normHeadings[i]['shape']
|
normShape[i] = normHeadings[i]['shape']
|
||||||
for j in xrange(np.shape(normShape[i])[0]):
|
for j in range(np.shape(normShape[i])[0]):
|
||||||
normLength[i] *= normShape[i][j]
|
normLength[i] *= normShape[i][j]
|
||||||
else:
|
else:
|
||||||
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0),
|
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0),
|
||||||
|
@ -332,7 +336,7 @@ class Test():
|
||||||
table1 = damask.ASCIItable(name=file1,readonly=True)
|
table1 = damask.ASCIItable(name=file1,readonly=True)
|
||||||
table1.head_read()
|
table1.head_read()
|
||||||
|
|
||||||
for i in xrange(dataLength):
|
for i in range(dataLength):
|
||||||
key0 = ('1_' if length[i]>1 else '') + headings0[i]['label']
|
key0 = ('1_' if length[i]>1 else '') + headings0[i]['label']
|
||||||
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
|
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
|
||||||
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
|
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
|
||||||
|
@ -350,7 +354,7 @@ class Test():
|
||||||
line0 = 0
|
line0 = 0
|
||||||
while table0.data_read(): # read next data line of ASCII table
|
while table0.data_read(): # read next data line of ASCII table
|
||||||
if line0 not in skipLines:
|
if line0 not in skipLines:
|
||||||
for i in xrange(dataLength):
|
for i in range(dataLength):
|
||||||
myData = np.array(map(float,table0.data[column[0][i]:\
|
myData = np.array(map(float,table0.data[column[0][i]:\
|
||||||
column[0][i]+length[i]]),'d')
|
column[0][i]+length[i]]),'d')
|
||||||
normData = np.array(map(float,table0.data[normColumn[i]:\
|
normData = np.array(map(float,table0.data[normColumn[i]:\
|
||||||
|
@ -362,11 +366,11 @@ class Test():
|
||||||
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
|
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
|
||||||
line0 += 1
|
line0 += 1
|
||||||
|
|
||||||
for i in xrange(dataLength):
|
for i in range(dataLength):
|
||||||
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
||||||
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
|
data[i] = np.reshape(data[i],[line0-len(skipLines),length[i]])
|
||||||
if any(norm[i]) == 0.0 or absTol[i]:
|
if any(norm[i]) == 0.0 or absTol[i]:
|
||||||
norm[i] = [1.0 for j in xrange(line0-len(skipLines))]
|
norm[i] = [1.0 for j in range(line0-len(skipLines))]
|
||||||
absTol[i] = True
|
absTol[i] = True
|
||||||
if perLine:
|
if perLine:
|
||||||
logging.warning('At least one norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
logging.warning('At least one norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||||
|
@ -376,9 +380,9 @@ class Test():
|
||||||
line1 = 0
|
line1 = 0
|
||||||
while table1.data_read(): # read next data line of ASCII table
|
while table1.data_read(): # read next data line of ASCII table
|
||||||
if line1 not in skipLines:
|
if line1 not in skipLines:
|
||||||
for i in xrange(dataLength):
|
for i in range(dataLength):
|
||||||
myData = np.array(map(float,table1.data[column[1][i]:\
|
myData = np.array(map(float,table1.data[column[1][i]:\
|
||||||
column[1][i]+length[i]]),'d')
|
column[1][i]+length[i]]),'d')
|
||||||
maxError[i] = max(maxError[i],np.linalg.norm(np.reshape(myData-data[i][line1-len(skipLines),:],shape[i]))/
|
maxError[i] = max(maxError[i],np.linalg.norm(np.reshape(myData-data[i][line1-len(skipLines),:],shape[i]))/
|
||||||
norm[i][line1-len(skipLines)])
|
norm[i][line1-len(skipLines)])
|
||||||
line1 +=1
|
line1 +=1
|
||||||
|
@ -386,7 +390,7 @@ class Test():
|
||||||
if (line0 != line1): raise Exception('found {} lines in 1. table but {} in 2. table'.format(line0,line1))
|
if (line0 != line1): raise Exception('found {} lines in 1. table but {} in 2. table'.format(line0,line1))
|
||||||
|
|
||||||
logging.info(' ********')
|
logging.info(' ********')
|
||||||
for i in xrange(dataLength):
|
for i in range(dataLength):
|
||||||
if absTol[i]:
|
if absTol[i]:
|
||||||
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i],
|
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i],
|
||||||
headings0[i]['label'],
|
headings0[i]['label'],
|
||||||
|
@ -424,7 +428,7 @@ class Test():
|
||||||
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
|
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
|
||||||
|
|
||||||
logging.info('comparing ASCIItables statistically')
|
logging.info('comparing ASCIItables statistically')
|
||||||
for i in xrange(len(columns)):
|
for i in range(len(columns)):
|
||||||
columns[i] = columns[0] if not columns[i] else \
|
columns[i] = columns[0] if not columns[i] else \
|
||||||
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
||||||
columns[i]
|
columns[i]
|
||||||
|
@ -440,7 +444,7 @@ class Test():
|
||||||
table.close()
|
table.close()
|
||||||
|
|
||||||
|
|
||||||
for i in xrange(1,len(data)):
|
for i in range(1,len(data)):
|
||||||
delta = data[i]-data[i-1]
|
delta = data[i]-data[i-1]
|
||||||
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
normBy = (np.abs(data[i]) + np.abs(data[i-1]))*0.5
|
||||||
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
||||||
|
@ -454,21 +458,17 @@ class Test():
|
||||||
|
|
||||||
|
|
||||||
def compare_Tables(self,
|
def compare_Tables(self,
|
||||||
files = [None,None], # list of file names
|
files = [None,None], # list of file names
|
||||||
columns = [None], # list of list of column labels (per file)
|
columns = [None], # list of list of column labels (per file)
|
||||||
rtol = 1e-5,
|
rtol = 1e-5,
|
||||||
atol = 1e-8,
|
atol = 1e-8,
|
||||||
preFilter = -1.0,
|
debug = False):
|
||||||
postFilter = -1.0,
|
"""compare multiple tables with np.allclose"""
|
||||||
debug = False):
|
|
||||||
"""
|
|
||||||
compare tables with np.allclose
|
|
||||||
|
|
||||||
threshold can be used to ignore small values (a negative number disables this feature)
|
|
||||||
"""
|
|
||||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||||
files = [str(files)]
|
files = [str(files)]
|
||||||
|
|
||||||
|
if len(files) < 2: return True # single table is always close to itself...
|
||||||
|
|
||||||
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table.head_read()
|
table.head_read()
|
||||||
|
@ -477,47 +477,50 @@ class Test():
|
||||||
columns = columns[:len(files)] # truncate to same length as files
|
columns = columns[:len(files)] # truncate to same length as files
|
||||||
|
|
||||||
for i,column in enumerate(columns):
|
for i,column in enumerate(columns):
|
||||||
if column is None: columns[i] = tables[i].labels(raw = True) # if no column is given, read all
|
if column is None: columns[i] = tables[i].labels(raw = False) # if no column is given, use all
|
||||||
|
|
||||||
logging.info('comparing ASCIItables')
|
logging.info('comparing ASCIItables')
|
||||||
for i in xrange(len(columns)):
|
for i in range(len(columns)):
|
||||||
columns[i] = columns[0] if not columns[i] else \
|
columns[i] = columns[0] if not columns[i] else \
|
||||||
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
([columns[i]] if not (isinstance(columns[i], Iterable) and not isinstance(columns[i], str)) else \
|
||||||
columns[i]
|
columns[i]
|
||||||
)
|
)
|
||||||
logging.info(files[i]+':'+','.join(columns[i]))
|
logging.info(files[i]+': '+','.join(columns[i]))
|
||||||
|
|
||||||
if len(files) < 2: return True # single table is always close to itself...
|
dimensions = tables[0].label_dimension(columns[0]) # width of each requested column
|
||||||
|
maximum = np.zeros_like(columns[0],dtype=float) # one magnitude per column entry
|
||||||
|
data = [] # list of feature table extracted from each file (ASCII table)
|
||||||
|
|
||||||
maximum = np.zeros(len(columns[0]),dtype='f')
|
for i,(table,labels) in enumerate(zip(tables,columns)):
|
||||||
data = []
|
if np.any(dimensions != table.label_dimension(labels)): # check data object consistency
|
||||||
for table,labels in zip(tables,columns):
|
logging.critical('Table {} differs in data layout.'.format(files[i]))
|
||||||
table.data_readArray(labels)
|
return False
|
||||||
data.append(np.where(np.abs(table.data)<preFilter,np.zeros_like(table.data),table.data))
|
table.data_readArray(labels) # read data, ...
|
||||||
maximum += np.abs(table.data).max(axis=0)
|
data.append(table.data) # ... store, ...
|
||||||
table.close()
|
table.close() # ... close
|
||||||
|
|
||||||
maximum /= len(tables)
|
for j,label in enumerate(labels): # iterate over object labels
|
||||||
maximum = np.where(maximum >0.0, maximum, 1) # avoid div by zero for empty columns
|
maximum[j] = np.maximum(\
|
||||||
for i in xrange(len(data)):
|
maximum[j],
|
||||||
data[i] /= maximum
|
np.amax(np.linalg.norm(table.data[:,table.label_indexrange(label)],
|
||||||
|
axis=1))
|
||||||
|
) # find maximum Euclidean norm across rows
|
||||||
|
|
||||||
mask = np.zeros_like(table.data,dtype='bool')
|
maximum = np.where(maximum > 0.0, maximum, 1.0) # avoid div by zero for zero columns
|
||||||
|
maximum = np.repeat(maximum,dimensions) # spread maximum over columns of each object
|
||||||
|
|
||||||
for table in data:
|
for i in range(len(data)):
|
||||||
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values
|
data[i] /= maximum # normalize each table
|
||||||
|
|
||||||
|
if debug:
|
||||||
|
logging.debug(str(maximum))
|
||||||
|
allclose = np.absolute(data[0]-data[1]) <= (atol + rtol*np.absolute(data[1]))
|
||||||
|
for ok,valA,valB in zip(allclose,data[0],data[1]):
|
||||||
|
logging.debug('{}:\n{}\n{}'.format(ok,valA,valB))
|
||||||
|
|
||||||
allclose = True # start optimistic
|
allclose = True # start optimistic
|
||||||
for i in xrange(1,len(data)):
|
for i in range(1,len(data)):
|
||||||
if debug:
|
allclose &= np.allclose(data[i-1],data[i],rtol,atol) # accumulate "pessimism"
|
||||||
t0 = np.where(mask,0.0,data[i-1])
|
|
||||||
t1 = np.where(mask,0.0,data[i ])
|
|
||||||
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
|
|
||||||
logging.info('{:f}'.format(np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol))))
|
|
||||||
logging.info('{:f} {:f}'.format((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
|
|
||||||
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
|
|
||||||
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
|
|
||||||
|
|
||||||
return allclose
|
return allclose
|
||||||
|
|
||||||
|
@ -545,14 +548,16 @@ class Test():
|
||||||
|
|
||||||
def report_Success(self,culprit):
|
def report_Success(self,culprit):
|
||||||
|
|
||||||
|
ret = culprit
|
||||||
|
|
||||||
if culprit == 0:
|
if culprit == 0:
|
||||||
logging.critical(('The test' if len(self.variants) == 1 else 'All {} tests'.format(len(self.variants))) + ' passed')
|
msg = 'The test passed' if len(self.variants) == 1 \
|
||||||
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
else 'All {} tests passed.'.format(len(self.variants))
|
||||||
return 0
|
elif culprit == -1:
|
||||||
if culprit == -1:
|
msg = 'Warning: Could not start test...'
|
||||||
logging.warning('Warning: Could not start test')
|
ret = 0
|
||||||
return 0
|
|
||||||
else:
|
else:
|
||||||
logging.critical(' ********\n * Test {} failed...\n ********'.format(culprit))
|
msg = ' * Test "{}" failed.'.format(self.variants[culprit-1])
|
||||||
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
|
||||||
return culprit
|
logging.critical('\n'.join(['*'*40,msg,'*'*40]) + '\n')
|
||||||
|
return ret
|
||||||
|
|
|
@ -20,6 +20,7 @@ class bcolors:
|
||||||
FAIL = '\033[91m'
|
FAIL = '\033[91m'
|
||||||
ENDC = '\033[0m'
|
ENDC = '\033[0m'
|
||||||
BOLD = '\033[1m'
|
BOLD = '\033[1m'
|
||||||
|
DIM = '\033[2m'
|
||||||
UNDERLINE = '\033[4m'
|
UNDERLINE = '\033[4m'
|
||||||
|
|
||||||
def disable(self):
|
def disable(self):
|
||||||
|
@ -40,7 +41,7 @@ def srepr(arg,glue = '\n'):
|
||||||
hasattr(arg, "__getitem__") or
|
hasattr(arg, "__getitem__") or
|
||||||
hasattr(arg, "__iter__")):
|
hasattr(arg, "__iter__")):
|
||||||
return glue.join(srepr(x) for x in arg)
|
return glue.join(srepr(x) for x in arg)
|
||||||
return arg if isinstance(arg,basestring) else repr(arg)
|
return arg if isinstance(arg,str) else repr(arg)
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def croak(what, newline = True):
|
def croak(what, newline = True):
|
||||||
|
@ -70,9 +71,19 @@ def report_geom(info,
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def emph(what):
|
def emph(what):
|
||||||
"""emphasizes string on screen"""
|
"""boldens string"""
|
||||||
return bcolors.BOLD+srepr(what)+bcolors.ENDC
|
return bcolors.BOLD+srepr(what)+bcolors.ENDC
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
def deemph(what):
|
||||||
|
"""dims string"""
|
||||||
|
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
def delete(what):
|
||||||
|
"""dims string"""
|
||||||
|
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def execute(cmd,
|
def execute(cmd,
|
||||||
streamIn = None,
|
streamIn = None,
|
||||||
|
@ -137,29 +148,22 @@ class extendableOption(Option):
|
||||||
class backgroundMessage(threading.Thread):
|
class backgroundMessage(threading.Thread):
|
||||||
"""reporting with animation to indicate progress"""
|
"""reporting with animation to indicate progress"""
|
||||||
|
|
||||||
choices = {'bounce': ['_', 'o', 'O', u'\u00B0',
|
choices = {'bounce': ['_', 'o', 'O', '°', '‾', '‾', '°', 'O', 'o', '_'],
|
||||||
u'\u203e',u'\u203e',u'\u00B0','O','o','_'],
|
'spin': ['◜', '◝', '◞', '◟'],
|
||||||
'spin': [u'\u25dc',u'\u25dd',u'\u25de',u'\u25df'],
|
'circle': ['◴', '◵', '◶', '◷'],
|
||||||
'circle': [u'\u25f4',u'\u25f5',u'\u25f6',u'\u25f7'],
|
'hexagon': ['⬢', '⬣'],
|
||||||
'hexagon': [u'\u2b22',u'\u2b23'],
|
'square': ['▖', '▘', '▝', '▗'],
|
||||||
'square': [u'\u2596',u'\u2598',u'\u259d',u'\u2597'],
|
'triangle': ['ᐊ', 'ᐊ', 'ᐃ', 'ᐅ', 'ᐅ', 'ᐃ'],
|
||||||
'triangle': [u'\u140a',u'\u140a',u'\u1403',u'\u1405',u'\u1405',u'\u1403'],
|
'amoeba': ['▖', '▏', '▘', '▔', '▝', '▕', '▗', '▂'],
|
||||||
'amoeba': [u'\u2596',u'\u258f',u'\u2598',u'\u2594',u'\u259d',u'\u2595',
|
'beat': ['▁', '▂', '▃', '▅', '▆', '▇', '▇', '▆', '▅', '▃', '▂'],
|
||||||
u'\u2597',u'\u2582'],
|
'prison': ['ᚋ', 'ᚌ', 'ᚍ', 'ᚏ', 'ᚎ', 'ᚍ', 'ᚌ', 'ᚋ'],
|
||||||
'beat': [u'\u2581',u'\u2582',u'\u2583',u'\u2585',u'\u2586',u'\u2587',
|
'breath': ['ᚐ', 'ᚑ', 'ᚒ', 'ᚓ', 'ᚔ', 'ᚓ', 'ᚒ', 'ᚑ', 'ᚐ'],
|
||||||
u'\u2587',u'\u2586',u'\u2585',u'\u2583',u'\u2582',],
|
'pulse': ['·', '•', '●', '●', '•'],
|
||||||
'prison': [u'\u168b',u'\u168c',u'\u168d',u'\u168f',u'\u168e',u'\u168d',
|
'ant': ['⠁', '⠂', '⠐', '⠠', '⠄', '⡀', '⢀', '⠠', '⠄', '⠂', '⠐', '⠈'],
|
||||||
u'\u168c',u'\u168b',],
|
'juggle': ['꜈', '꜉', '꜊', '꜋', '꜌', '꜑', '꜐', '꜏', '꜍'],
|
||||||
'breath': [u'\u1690',u'\u1691',u'\u1692',u'\u1693',u'\u1694',u'\u1693',
|
# 'wobbler': ['▁', '◣', '▏', '◤', '▔', '◥', '▕', '◢'],
|
||||||
u'\u1692',u'\u1691',u'\u1690',],
|
'grout': ['▁', '▏', '▔', '▕'],
|
||||||
'pulse': [u'·',u'•',u'\u25cf',u'\u25cf',u'•',],
|
'partner': ['⚬', '⚭', '⚮', '⚯', '⚮', '⚭'],
|
||||||
'ant': [u'\u2801',u'\u2802',u'\u2810',u'\u2820',u'\u2804',u'\u2840',
|
|
||||||
u'\u2880',u'\u2820',u'\u2804',u'\u2802',u'\u2810',u'\u2808'],
|
|
||||||
'juggle': [u'\ua708',u'\ua709',u'\ua70a',u'\ua70b',u'\ua70c',u'\ua711',
|
|
||||||
u'\ua710',u'\ua70f',u'\ua70d',],
|
|
||||||
# 'wobbler': [u'\u2581',u'\u25e3',u'\u258f',u'\u25e4',u'\u2594',u'\u25e5',u'\u2595',u'\u25e2',],
|
|
||||||
'grout': [u'\u2581',u'\u258f',u'\u2594',u'\u2595',],
|
|
||||||
'partner': [u'\u26ac',u'\u26ad',u'\u26ae',u'\u26af',u'\u26ae',u'\u26ad',],
|
|
||||||
'classic': ['-', '\\', '|', '/',],
|
'classic': ['-', '\\', '|', '/',],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -171,7 +175,7 @@ class backgroundMessage(threading.Thread):
|
||||||
self.new_message = ''
|
self.new_message = ''
|
||||||
self.counter = 0
|
self.counter = 0
|
||||||
self.gap = ' '
|
self.gap = ' '
|
||||||
self.symbols = self.choices[symbol if symbol in self.choices else random.choice(self.choices.keys())]
|
self.symbols = self.choices[symbol if symbol in self.choices else random.choice(list(self.choices.keys()))]
|
||||||
self.waittime = wait
|
self.waittime = wait
|
||||||
|
|
||||||
def __quit__(self):
|
def __quit__(self):
|
||||||
|
@ -200,7 +204,7 @@ class backgroundMessage(threading.Thread):
|
||||||
def print_message(self):
|
def print_message(self):
|
||||||
length = len(self.symbols[self.counter] + self.gap + self.message)
|
length = len(self.symbols[self.counter] + self.gap + self.message)
|
||||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length + \
|
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length + \
|
||||||
self.symbols[self.counter].encode('utf-8') + self.gap + self.new_message) # delete former and print new message
|
self.symbols[self.counter] + self.gap + self.new_message) # delete former and print new message
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
self.message = self.new_message
|
self.message = self.new_message
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
# MAIN
|
# MAIN
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """
|
||||||
Adds header to OIM grain file to make it accesible as ASCII table
|
Adds header to OIM grain file to make it accesible as ASCII table
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
|
@ -13,7 +13,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
# MAIN
|
# MAIN
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [angfile[s]]', description = """
|
||||||
Convert TSL/EDAX *.ang file to ASCIItable
|
Convert TSL/EDAX *.ang file to ASCIItable
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
@ -30,7 +30,7 @@ for name in filenames:
|
||||||
outname = os.path.splitext(name)[0]+'.txt' if name else name,
|
outname = os.path.splitext(name)[0]+'.txt' if name else name,
|
||||||
buffered = False, labeled = False)
|
buffered = False, labeled = False)
|
||||||
except: continue
|
except: continue
|
||||||
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# --- interpret header -----------------------------------------------------------------------------
|
# --- interpret header -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys
|
import os,sys
|
||||||
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -28,7 +29,7 @@ parser.add_option('-o','--offset',
|
||||||
parser.add_option('-l','--label',
|
parser.add_option('-l','--label',
|
||||||
dest = 'label',
|
dest = 'label',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help='heading of column(s) to be mapped')
|
help='column label(s) to be mapped')
|
||||||
parser.add_option('-a','--asciitable',
|
parser.add_option('-a','--asciitable',
|
||||||
dest = 'asciitable',
|
dest = 'asciitable',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
|
@ -49,12 +50,13 @@ if options.map is None:
|
||||||
if options.asciitable is not None and os.path.isfile(options.asciitable):
|
if options.asciitable is not None and os.path.isfile(options.asciitable):
|
||||||
|
|
||||||
mappedTable = damask.ASCIItable(name = options.asciitable,
|
mappedTable = damask.ASCIItable(name = options.asciitable,
|
||||||
buffered = False, readonly = True)
|
buffered = False,
|
||||||
|
readonly = True)
|
||||||
mappedTable.head_read() # read ASCII header info of mapped table
|
mappedTable.head_read() # read ASCII header info of mapped table
|
||||||
missing_labels = mappedTable.data_readArray(options.label)
|
missing_labels = mappedTable.data_readArray(options.label)
|
||||||
|
|
||||||
if len(missing_labels) > 0:
|
if len(missing_labels) > 0:
|
||||||
mappedTable.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels)))
|
damask.util.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels)))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
parser.error('no mapped ASCIItable given.')
|
parser.error('no mapped ASCIItable given.')
|
||||||
|
@ -64,9 +66,8 @@ else:
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False)
|
||||||
buffered = False)
|
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -96,7 +97,10 @@ for name in filenames:
|
||||||
|
|
||||||
outputAlive = True
|
outputAlive = True
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
table.data_append(mappedTable.data[int(round(float(table.data[mappedColumn])))+options.offset-1]) # add all mapped data types
|
try:
|
||||||
|
table.data_append(mappedTable.data[int(round(float(table.data[mappedColumn])))+options.offset-1]) # add all mapped data types
|
||||||
|
except IndexError:
|
||||||
|
table.data_append(np.nan*np.ones_like(mappedTable.data[0]))
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
|
@ -69,7 +69,8 @@ parser.add_option('-q', '--quaternion',
|
||||||
|
|
||||||
parser.set_defaults(output = [],
|
parser.set_defaults(output = [],
|
||||||
symmetry = damask.Symmetry.lattices[-1],
|
symmetry = damask.Symmetry.lattices[-1],
|
||||||
rotation = (0.,1.,1.,1.), # no rotation about 1,1,1
|
labrotation = (0.,1.,1.,1.), # no rotation about 1,1,1
|
||||||
|
crystalrotation = (0.,1.,1.,1.), # no rotation about 1,1,1
|
||||||
degrees = False,
|
degrees = False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -167,7 +167,6 @@ force = np.array(options.force)
|
||||||
force /= np.linalg.norm(force)
|
force /= np.linalg.norm(force)
|
||||||
|
|
||||||
if options.normal:
|
if options.normal:
|
||||||
damask.util.croak('got normal')
|
|
||||||
normal = np.array(options.normal)
|
normal = np.array(options.normal)
|
||||||
normal /= np.linalg.norm(normal)
|
normal /= np.linalg.norm(normal)
|
||||||
if abs(np.dot(force,normal)) > 1e-3:
|
if abs(np.dot(force,normal)) > 1e-3:
|
||||||
|
|
|
@ -78,6 +78,7 @@ for name in filenames:
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for column in data['column']:
|
for column in data['column']:
|
||||||
(u,v) = np.linalg.eigh(np.array(map(float,table.data[column:column+data['dim']])).reshape(data['shape']))
|
(u,v) = np.linalg.eigh(np.array(map(float,table.data[column:column+data['dim']])).reshape(data['shape']))
|
||||||
|
if np.dot(np.cross(v[:,0], v[:,1]), v[:,2]) < 0.0 : v[:, 2] *= -1.0 # ensure right-handed coordinate system
|
||||||
table.data_append(list(u))
|
table.data_append(list(u))
|
||||||
table.data_append(list(v.transpose().reshape(data['dim'])))
|
table.data_append(list(v.transpose().reshape(data['dim'])))
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
|
@ -1,82 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
|
||||||
|
|
||||||
import os,sys
|
|
||||||
import numpy as np
|
|
||||||
from optparse import OptionParser
|
|
||||||
import damask
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
|
||||||
Replace all rows for which column 'label' has identical values by a single row containing their average.
|
|
||||||
Output table will contain as many rows as there are different (unique) values in the grouping column.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
For grain averaged values, replace all rows of particular 'texture' with a single row containing their average.
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-l','--label',
|
|
||||||
dest = 'label',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'column label for grouping rows')
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
if options.label is None:
|
|
||||||
parser.error('no grouping column specified.')
|
|
||||||
|
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
try: table = damask.ASCIItable(name = name,
|
|
||||||
outname = os.path.join(
|
|
||||||
os.path.split(name)[0],
|
|
||||||
options.label+'_averaged_'+os.path.split(name)[1]
|
|
||||||
) if name else name,
|
|
||||||
buffered = False)
|
|
||||||
except: continue
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ---------------------------------------
|
|
||||||
|
|
||||||
table.head_read()
|
|
||||||
if table.label_dimension(options.label) != 1:
|
|
||||||
damask.util.croak('column {} is not of scalar dimension.'.format(options.label))
|
|
||||||
table.close(dismiss = True) # close ASCIItable and remove empty file
|
|
||||||
continue
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble info ---------------------------------------
|
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# ------------------------------------------ process data --------------------------------
|
|
||||||
|
|
||||||
table.data_readArray()
|
|
||||||
rows,cols = table.data.shape
|
|
||||||
|
|
||||||
table.data = table.data[np.lexsort([table.data[:,table.label_index(options.label)]])]
|
|
||||||
|
|
||||||
values,index = np.unique(table.data[:,table.label_index(options.label)], return_index = True)
|
|
||||||
index = np.append(index,rows)
|
|
||||||
avgTable = np.empty((len(values), cols))
|
|
||||||
|
|
||||||
for j in xrange(cols) :
|
|
||||||
for i in xrange(len(values)) :
|
|
||||||
avgTable[i,j] = np.average(table.data[index[i]:index[i+1],j])
|
|
||||||
|
|
||||||
table.data = avgTable
|
|
||||||
|
|
||||||
# ------------------------------------------ output result -------------------------------
|
|
||||||
|
|
||||||
table.data_writeArray()
|
|
||||||
table.close() # close ASCII table
|
|
|
@ -20,7 +20,7 @@ Produces a binned grid of two columns from an ASCIItable, i.e. a two-dimensional
|
||||||
|
|
||||||
parser.add_option('-d','--data',
|
parser.add_option('-d','--data',
|
||||||
dest = 'data',
|
dest = 'data',
|
||||||
type='string', nargs = 2, metavar = 'string string',
|
type = 'string', nargs = 2, metavar = 'string string',
|
||||||
help = 'column labels containing x and y ')
|
help = 'column labels containing x and y ')
|
||||||
parser.add_option('-w','--weight',
|
parser.add_option('-w','--weight',
|
||||||
dest = 'weight',
|
dest = 'weight',
|
||||||
|
@ -49,15 +49,15 @@ parser.add_option('-z','--zrange',
|
||||||
parser.add_option('-i','--invert',
|
parser.add_option('-i','--invert',
|
||||||
dest = 'invert',
|
dest = 'invert',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
help = 'invert probability density [%default]')
|
help = 'invert probability density')
|
||||||
parser.add_option('-r','--rownormalize',
|
parser.add_option('-r','--rownormalize',
|
||||||
dest = 'normRow',
|
dest = 'normRow',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
help = 'normalize probability density in each row [%default]')
|
help = 'normalize probability density in each row')
|
||||||
parser.add_option('-c','--colnormalize',
|
parser.add_option('-c','--colnormalize',
|
||||||
dest = 'normCol',
|
dest = 'normCol',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
help = 'normalize probability density in each column [%default]')
|
help = 'normalize probability density in each column')
|
||||||
|
|
||||||
parser.set_defaults(bins = (10,10),
|
parser.set_defaults(bins = (10,10),
|
||||||
type = ('linear','linear','linear'),
|
type = ('linear','linear','linear'),
|
||||||
|
@ -79,7 +79,8 @@ result = np.zeros((options.bins[0],options.bins[1],3),'f')
|
||||||
|
|
||||||
if options.data is None: parser.error('no data columns specified.')
|
if options.data is None: parser.error('no data columns specified.')
|
||||||
|
|
||||||
labels = options.data
|
labels = list(options.data)
|
||||||
|
|
||||||
|
|
||||||
if options.weight is not None: labels += [options.weight] # prevent character splitting of single string value
|
if options.weight is not None: labels += [options.weight] # prevent character splitting of single string value
|
||||||
|
|
||||||
|
@ -120,11 +121,10 @@ for name in filenames:
|
||||||
|
|
||||||
delta = minmax[:,1]-minmax[:,0]
|
delta = minmax[:,1]-minmax[:,0]
|
||||||
|
|
||||||
for i in xrange(len(table.data)):
|
(grid,xedges,yedges) = np.histogram2d(table.data[:,0],table.data[:,1],
|
||||||
x = int(options.bins[0]*(table.data[i,0]-minmax[0,0])/delta[0])
|
bins=options.bins,
|
||||||
y = int(options.bins[1]*(table.data[i,1]-minmax[1,0])/delta[1])
|
range=minmax,
|
||||||
if x >= 0 and x < options.bins[0] and y >= 0 and y < options.bins[1]:
|
weights=None if options.weight is None else table.data[:,2])
|
||||||
grid[x,y] += 1. if options.weight is None else table.data[i,2] # count (weighted) occurrences
|
|
||||||
|
|
||||||
if options.normCol:
|
if options.normCol:
|
||||||
for x in xrange(options.bins[0]):
|
for x in xrange(options.bins[0]):
|
||||||
|
|
|
@ -0,0 +1,131 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,sys
|
||||||
|
import math # noqa
|
||||||
|
import numpy as np
|
||||||
|
from optparse import OptionParser, OptionGroup
|
||||||
|
import damask
|
||||||
|
|
||||||
|
#"https://en.wikipedia.org/wiki/Center_of_mass#Systems_with_periodic_boundary_conditions"
|
||||||
|
def periodicAverage(Points, Box):
|
||||||
|
theta = (Points/Box[1]) * (2.0*np.pi)
|
||||||
|
xi = np.cos(theta)
|
||||||
|
zeta = np.sin(theta)
|
||||||
|
theta_avg = np.arctan2(-1.0*zeta.mean(), -1.0*xi.mean()) + np.pi
|
||||||
|
Pmean = Box[1] * theta_avg/(2.0*np.pi)
|
||||||
|
return Pmean
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
|
Apply a user-specified function to condense all rows for which column 'label' has identical values into a single row.
|
||||||
|
Output table will contain as many rows as there are different (unique) values in the grouping column.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
For grain averaged values, replace all rows of particular 'texture' with a single row containing their average.
|
||||||
|
""", version = scriptID)
|
||||||
|
|
||||||
|
parser.add_option('-l','--label',
|
||||||
|
dest = 'label',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'column label for grouping rows')
|
||||||
|
parser.add_option('-f','--function',
|
||||||
|
dest = 'function',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'mapping function [%default]')
|
||||||
|
parser.add_option('-a','--all',
|
||||||
|
dest = 'all',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'apply mapping function also to grouping column')
|
||||||
|
|
||||||
|
group = OptionGroup(parser, "periodic averaging", "")
|
||||||
|
|
||||||
|
group.add_option('-p','--periodic',
|
||||||
|
dest = 'periodic',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'calculate average in periodic space defined by periodic length [%default]')
|
||||||
|
group.add_option('--boundary',
|
||||||
|
dest = 'boundary', metavar = 'MIN MAX',
|
||||||
|
type = 'float', nargs = 2,
|
||||||
|
help = 'define periodic box end points %default')
|
||||||
|
|
||||||
|
parser.add_option_group(group)
|
||||||
|
|
||||||
|
parser.set_defaults(function = 'np.average',
|
||||||
|
all = False,
|
||||||
|
periodic = False,
|
||||||
|
boundary = [0.0, 1.0])
|
||||||
|
|
||||||
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
funcModule,funcName = options.function.split('.')
|
||||||
|
|
||||||
|
try:
|
||||||
|
mapFunction = getattr(locals().get(funcModule) or
|
||||||
|
globals().get(funcModule) or
|
||||||
|
__import__(funcModule),
|
||||||
|
funcName)
|
||||||
|
except:
|
||||||
|
mapFunction = None
|
||||||
|
|
||||||
|
if options.label is None:
|
||||||
|
parser.error('no grouping column specified.')
|
||||||
|
if not hasattr(mapFunction,'__call__'):
|
||||||
|
parser.error('function "{}" is not callable.'.format(options.function))
|
||||||
|
|
||||||
|
|
||||||
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
|
for name in filenames:
|
||||||
|
try: table = damask.ASCIItable(name = name,
|
||||||
|
buffered = False)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
# ------------------------------------------ sanity checks ---------------------------------------
|
||||||
|
|
||||||
|
table.head_read()
|
||||||
|
if table.label_dimension(options.label) != 1:
|
||||||
|
damask.util.croak('column {} is not of scalar dimension.'.format(options.label))
|
||||||
|
table.close(dismiss = True) # close ASCIItable and remove empty file
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
grpColumn = table.label_index(options.label)
|
||||||
|
|
||||||
|
# ------------------------------------------ assemble info ---------------------------------------
|
||||||
|
|
||||||
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# ------------------------------------------ process data --------------------------------
|
||||||
|
|
||||||
|
table.data_readArray()
|
||||||
|
rows,cols = table.data.shape
|
||||||
|
|
||||||
|
table.data = table.data[np.lexsort([table.data[:,grpColumn]])] # sort data by grpColumn
|
||||||
|
|
||||||
|
values,index = np.unique(table.data[:,grpColumn], return_index = True) # unique grpColumn values and their positions
|
||||||
|
index = np.append(index,rows) # add termination position
|
||||||
|
grpTable = np.empty((len(values), cols)) # initialize output
|
||||||
|
|
||||||
|
for i in xrange(len(values)): # iterate over groups (unique values in grpColumn)
|
||||||
|
if options.periodic :
|
||||||
|
grpTable[i] = periodicAverage(table.data[index[i]:index[i+1]],options.boundary) # apply periodicAverage mapping function
|
||||||
|
else :
|
||||||
|
grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply mapping function
|
||||||
|
if not options.all: grpTable[i,grpColumn] = table.data[index[i],grpColumn] # restore grouping column value
|
||||||
|
|
||||||
|
table.data = grpTable
|
||||||
|
|
||||||
|
# ------------------------------------------ output result -------------------------------
|
||||||
|
|
||||||
|
table.data_writeArray()
|
||||||
|
table.close() # close ASCII table
|
|
@ -5,6 +5,7 @@ import os,vtk
|
||||||
import damask
|
import damask
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
from vtk.util import numpy_support
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
@ -13,10 +14,10 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
# MAIN
|
# MAIN
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption,
|
||||||
Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).
|
usage='%prog options [file[s]]',
|
||||||
|
description = """Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).""",
|
||||||
""", version = scriptID)
|
version = scriptID)
|
||||||
|
|
||||||
parser.add_option( '--vtk',
|
parser.add_option( '--vtk',
|
||||||
dest = 'vtk',
|
dest = 'vtk',
|
||||||
|
@ -30,19 +31,20 @@ parser.add_option('-r', '--render',
|
||||||
dest = 'render',
|
dest = 'render',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
help = 'open output in VTK render window')
|
help = 'open output in VTK render window')
|
||||||
parser.add_option('-s', '--scalar', dest='scalar', action='extend',
|
parser.add_option('-d', '--data',
|
||||||
metavar ='<string LIST>',
|
dest = 'data',
|
||||||
help = 'scalar values')
|
|
||||||
parser.add_option('-v', '--vector',
|
|
||||||
dest = 'vector',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'vector value label(s)')
|
help = 'scalar/vector value(s) label(s)')
|
||||||
|
parser.add_option('-t', '--tensor',
|
||||||
|
dest = 'tensor',
|
||||||
|
action = 'extend', metavar = '<string LIST>',
|
||||||
|
help = 'tensor (3x3) value label(s)')
|
||||||
parser.add_option('-c', '--color', dest='color', action='extend',
|
parser.add_option('-c', '--color', dest='color', action='extend',
|
||||||
metavar ='<string LIST>',
|
metavar ='<string LIST>',
|
||||||
help = 'RGB color tuples')
|
help = 'RGB color tuples')
|
||||||
|
|
||||||
parser.set_defaults(scalar = [],
|
parser.set_defaults(data = [],
|
||||||
vector = [],
|
tensor = [],
|
||||||
color = [],
|
color = [],
|
||||||
inplace = False,
|
inplace = False,
|
||||||
render = False,
|
render = False,
|
||||||
|
@ -95,58 +97,59 @@ for name in filenames:
|
||||||
VTKarray = {}
|
VTKarray = {}
|
||||||
active = defaultdict(list)
|
active = defaultdict(list)
|
||||||
|
|
||||||
for datatype,dimension,label in [['scalar',1,options.scalar],
|
for datatype,dimension,label in [['data',99,options.vector],
|
||||||
['vector',3,options.vector],
|
['tensor',9,options.tensor],
|
||||||
['color',3,options.color],
|
['color' ,3,options.color],
|
||||||
]:
|
]:
|
||||||
for i,dim in enumerate(table.label_dimension(label)):
|
for i,dim in enumerate(table.label_dimension(label)):
|
||||||
me = label[i]
|
me = label[i]
|
||||||
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||||
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
||||||
else:
|
else:
|
||||||
remarks.append('adding {} "{}"...'.format(datatype,me))
|
remarks.append('adding {} "{}"...'.format(datatype,me))
|
||||||
active[datatype].append(me)
|
active[datatype].append(me)
|
||||||
|
|
||||||
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
|
|
||||||
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
|
|
||||||
|
|
||||||
VTKarray[me].SetNumberOfComponents(dimension)
|
|
||||||
VTKarray[me].SetName(label[i])
|
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
damask.util.croak(errors)
|
damask.util.croak(errors)
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# --------------------------------------- process and add data -----------------------------------
|
||||||
|
|
||||||
while table.data_read(): # read next data line of ASCII table
|
table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data
|
||||||
|
|
||||||
for datatype,labels in active.items(): # loop over scalar,color
|
for datatype,labels in active.items(): # loop over scalar,color
|
||||||
for me in labels: # loop over all requested items
|
for me in labels: # loop over all requested items
|
||||||
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
VTKtype = vtk.VTK_DOUBLE
|
||||||
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
|
VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout
|
||||||
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
|
|
||||||
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
|
|
||||||
|
|
||||||
table.input_close() # close input ASCII table
|
if datatype == 'color':
|
||||||
|
VTKtype = vtk.VTK_UNSIGNED_CHAR
|
||||||
|
VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR
|
||||||
|
elif datatype == 'tensor':
|
||||||
|
VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3])
|
||||||
|
VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6])
|
||||||
|
VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7])
|
||||||
|
|
||||||
# ------------------------------------------ add data ---------------------------------------
|
VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype)
|
||||||
|
VTKarray[me].SetName(me)
|
||||||
|
|
||||||
for datatype,labels in active.items(): # loop over scalar,color
|
if datatype == 'color':
|
||||||
if datatype == 'color':
|
Polydata.GetPointData().SetScalars(VTKarray[me])
|
||||||
Polydata.GetPointData().SetScalars(VTKarray[active['color'][0]])
|
Polydata.GetCellData().SetScalars(VTKarray[me])
|
||||||
Polydata.GetCellData().SetScalars(VTKarray[active['color'][0]])
|
else:
|
||||||
for me in labels: # loop over all requested items
|
Polydata.GetPointData().AddArray(VTKarray[me])
|
||||||
Polydata.GetPointData().AddArray(VTKarray[me])
|
Polydata.GetCellData().AddArray(VTKarray[me])
|
||||||
Polydata.GetCellData().AddArray(VTKarray[me])
|
|
||||||
|
|
||||||
|
table.input_close() # close input ASCII table
|
||||||
|
|
||||||
|
# ------------------------------------------ output result ---------------------------------------
|
||||||
|
|
||||||
Polydata.Modified()
|
Polydata.Modified()
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update()
|
if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update()
|
||||||
|
|
||||||
# ------------------------------------------ output result ---------------------------------------
|
|
||||||
|
|
||||||
writer = vtk.vtkXMLPolyDataWriter()
|
writer = vtk.vtkXMLPolyDataWriter()
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
|
@ -164,7 +167,7 @@ if options.render:
|
||||||
actor.SetMapper(mapper)
|
actor.SetMapper(mapper)
|
||||||
|
|
||||||
# Create the graphics structure. The renderer renders into the
|
# Create the graphics structure. The renderer renders into the
|
||||||
# render window. The render window interactor captures mouse events
|
# render window. The render window interactively captures mouse events
|
||||||
# and will perform appropriate camera or actor manipulation
|
# and will perform appropriate camera or actor manipulation
|
||||||
# depending on the nature of the events.
|
# depending on the nature of the events.
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
import os,vtk
|
import os,vtk
|
||||||
import damask
|
import damask
|
||||||
|
from vtk.util import numpy_support
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
@ -30,21 +31,21 @@ parser.add_option('-r', '--render',
|
||||||
dest = 'render',
|
dest = 'render',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
help = 'open output in VTK render window')
|
help = 'open output in VTK render window')
|
||||||
parser.add_option('-s', '--scalar',
|
parser.add_option('-d', '--data',
|
||||||
dest = 'scalar',
|
dest = 'data',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'scalar value label(s)')
|
help = 'scalar/vector value(s) label(s)')
|
||||||
parser.add_option('-v', '--vector',
|
parser.add_option('-t', '--tensor',
|
||||||
dest = 'vector',
|
dest = 'tensor',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'vector value label(s)')
|
help = 'tensor (3x3) value label(s)')
|
||||||
parser.add_option('-c', '--color',
|
parser.add_option('-c', '--color',
|
||||||
dest = 'color',
|
dest = 'color',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'RGB color tuple label')
|
help = 'RGB color tuple label')
|
||||||
|
|
||||||
parser.set_defaults(scalar = [],
|
parser.set_defaults(data = [],
|
||||||
vector = [],
|
tensor = [],
|
||||||
color = [],
|
color = [],
|
||||||
inplace = False,
|
inplace = False,
|
||||||
render = False,
|
render = False,
|
||||||
|
@ -93,24 +94,18 @@ for name in filenames:
|
||||||
VTKarray = {}
|
VTKarray = {}
|
||||||
active = defaultdict(list)
|
active = defaultdict(list)
|
||||||
|
|
||||||
for datatype,dimension,label in [['scalar',1,options.scalar],
|
for datatype,dimension,label in [['data',99,options.data],
|
||||||
['vector',3,options.vector],
|
['tensor',9,options.tensor],
|
||||||
['color',3,options.color],
|
['color' ,3,options.color],
|
||||||
]:
|
]:
|
||||||
for i,dim in enumerate(table.label_dimension(label)):
|
for i,dim in enumerate(table.label_dimension(label)):
|
||||||
me = label[i]
|
me = label[i]
|
||||||
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||||
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
||||||
else:
|
else:
|
||||||
remarks.append('adding {} "{}"...'.format(datatype,me))
|
remarks.append('adding {} "{}"...'.format(datatype,me))
|
||||||
active[datatype].append(me)
|
active[datatype].append(me)
|
||||||
|
|
||||||
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
|
|
||||||
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
|
|
||||||
|
|
||||||
VTKarray[me].SetNumberOfComponents(dimension)
|
|
||||||
VTKarray[me].SetName(label[i])
|
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
damask.util.croak(errors)
|
damask.util.croak(errors)
|
||||||
|
@ -119,24 +114,30 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
|
|
||||||
datacount = 0
|
table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data
|
||||||
|
|
||||||
while table.data_read(): # read next data line of ASCII table
|
for datatype,labels in active.items(): # loop over scalar,color
|
||||||
|
for me in labels: # loop over all requested items
|
||||||
|
VTKtype = vtk.VTK_DOUBLE
|
||||||
|
VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout
|
||||||
|
|
||||||
datacount += 1 # count data lines
|
if datatype == 'color':
|
||||||
for datatype,labels in active.items(): # loop over scalar,color
|
VTKtype = vtk.VTK_UNSIGNED_CHAR
|
||||||
for me in labels: # loop over all requested items
|
VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR
|
||||||
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
elif datatype == 'tensor':
|
||||||
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
|
VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3])
|
||||||
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
|
VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6])
|
||||||
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
|
VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7])
|
||||||
|
|
||||||
|
VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype)
|
||||||
|
VTKarray[me].SetName(me)
|
||||||
|
|
||||||
table.close() # close input ASCII table
|
table.close() # close input ASCII table
|
||||||
|
|
||||||
# ------------------------------------------ add data ---------------------------------------
|
# ------------------------------------------ add data ---------------------------------------
|
||||||
|
|
||||||
if datacount == Npoints: mode = 'point'
|
if len(table.data) == Npoints: mode = 'point'
|
||||||
elif datacount == Ncells: mode = 'cell'
|
elif len(table.data) == Ncells: mode = 'cell'
|
||||||
else:
|
else:
|
||||||
damask.util.croak('Data count is incompatible with grid...')
|
damask.util.croak('Data count is incompatible with grid...')
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -79,9 +79,9 @@ for name in filenames:
|
||||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
||||||
|
|
||||||
if options.mode == 'cell':
|
if options.mode == 'cell':
|
||||||
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + len(coords[i]) > 1]] + \
|
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
|
||||||
[coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \
|
[coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \
|
||||||
[3.0 * coords[i][-1] - coords[i][-1 - (len(coords[i]) > 1)]]) for i in xrange(3)]
|
[3.0 * coords[i][-1] - coords[i][-1 - int(len(coords[i]) > 1)]]) for i in xrange(3)]
|
||||||
|
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
|
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
|
||||||
|
|
|
@ -1,106 +0,0 @@
|
||||||
#!/usr/bin/env python2.7
|
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
|
||||||
|
|
||||||
import os,sys
|
|
||||||
from optparse import OptionParser
|
|
||||||
import numpy as np
|
|
||||||
import damask
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
sampleSym = { 'Orthotropic' : (90,90,90),
|
|
||||||
'Triclinic' : (360,180,360)
|
|
||||||
}
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
|
||||||
Transform the binned texture data from "TSL OIM Analysis" into linear ODF data,
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-s', '--symmetry', dest='symmetry', choices=sampleSym.keys(),
|
|
||||||
metavar = 'string',
|
|
||||||
help='Sample symmetry {%s} [Triclinic]'%(' '.join(sampleSym.keys())))
|
|
||||||
|
|
||||||
parser.set_defaults(symmetry = 'Triclinic')
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
#--- setup file handles ---------------------------------------------------------------------------
|
|
||||||
files = []
|
|
||||||
if filenames == []:
|
|
||||||
files.append({'name':'STDIN',
|
|
||||||
'input':sys.stdin,
|
|
||||||
'output':sys.stdout,
|
|
||||||
'croak':sys.stderr,
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
for name in filenames:
|
|
||||||
if os.path.exists(name):
|
|
||||||
files.append({'name':name,
|
|
||||||
'input':open(name),
|
|
||||||
'output':open(name+'_tmp','w'),
|
|
||||||
'croak':sys.stdout,
|
|
||||||
})
|
|
||||||
|
|
||||||
#--- loop over input files ------------------------------------------------------------------------
|
|
||||||
for file in files:
|
|
||||||
file['croak'].write('\033[1m' + scriptName + '\033[0m: ' + (file['name'] if file['name'] != 'STDIN' else '') + '\n')
|
|
||||||
|
|
||||||
while True: # read header (forward and get bin Size)
|
|
||||||
line = file['input'].readline()
|
|
||||||
words = line.split()
|
|
||||||
if len(words)>=3:
|
|
||||||
if words[1]=='Bin' and words[2]=='Size:': binSize=float(words[3][:-1])
|
|
||||||
if not line.startswith('#'): break
|
|
||||||
|
|
||||||
delta = [sampleSym[options.symmetry][i]/binSize for i in xrange(3)]
|
|
||||||
|
|
||||||
nPhi1,nPHI,nPhi2 = map(int,delta)
|
|
||||||
dPhi1,dPHI,dPhi2 = [sampleSym[options.symmetry][i]/delta[i] for i in xrange(3)]
|
|
||||||
|
|
||||||
N = (nPhi1-1)*(nPHI-1)*(nPhi2-1)
|
|
||||||
|
|
||||||
|
|
||||||
ODF = [[[[None] for k in range(nPhi2)] for j in range(nPHI)] for i in range(nPhi1)]
|
|
||||||
linear = [None]*N
|
|
||||||
|
|
||||||
ODF = np.empty([nPhi1,nPHI,nPhi2],'d')
|
|
||||||
|
|
||||||
for iPhi1 in range(nPhi1):
|
|
||||||
for iPHI in range(nPHI):
|
|
||||||
for iPhi2 in range(nPhi2):
|
|
||||||
ODF[iPhi1,iPHI,iPhi2] = float(line.split()[3])*0.125 # extract intensity (in column 4) and weight by 1/8
|
|
||||||
line = file['input'].readline()
|
|
||||||
|
|
||||||
for iPhi1 in range(nPhi1-1):
|
|
||||||
for iPHI in range(nPHI-1):
|
|
||||||
for iPhi2 in range(nPhi2-1):
|
|
||||||
linear[iPhi1*(nPHI-1)*(nPhi2-1)+iPHI*(nPhi2-1)+iPhi2] =\
|
|
||||||
ODF[iPhi1 ,iPHI ,iPhi2 ] +\
|
|
||||||
ODF[iPhi1 ,iPHI ,iPhi2+1] +\
|
|
||||||
ODF[iPhi1 ,iPHI+1,iPhi2 ] +\
|
|
||||||
ODF[iPhi1 ,iPHI+1,iPhi2+1] +\
|
|
||||||
ODF[iPhi1+1,iPHI ,iPhi2 ] +\
|
|
||||||
ODF[iPhi1+1,iPHI ,iPhi2+1] +\
|
|
||||||
ODF[iPhi1+1,iPHI+1,iPhi2 ] +\
|
|
||||||
ODF[iPhi1+1,iPHI+1,iPhi2+1]
|
|
||||||
|
|
||||||
|
|
||||||
file['output'].write('4 header\n')
|
|
||||||
file['output'].write('limit phi1 %-6.2f Phi %-6.2f phi2 %-6.2f\n'%sampleSym[options.symmetry])
|
|
||||||
file['output'].write('delta phi1 %-6.2f Phi %-6.2f phi2 %-6.2f\n'%(dPhi1,dPHI,dPhi2))
|
|
||||||
file['output'].write('centration cell-centered\n')
|
|
||||||
file['output'].write('density\n')
|
|
||||||
|
|
||||||
for i in range(N):
|
|
||||||
file['output'].write('%g\n'%(linear[i]))
|
|
||||||
|
|
||||||
#--- output finalization --------------------------------------------------------------------------
|
|
||||||
if file['name'] != 'STDIN':
|
|
||||||
file['output'].close()
|
|
||||||
os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] +'.linearODF')
|
|
|
@ -126,9 +126,9 @@ for name in filenames:
|
||||||
primPos = invRotation*gridpos # rotate back to primitive coordinate system
|
primPos = invRotation*gridpos # rotate back to primitive coordinate system
|
||||||
if np.dot(mask*primPos/dim,mask*primPos/dim) <= 0.25 and \
|
if np.dot(mask*primPos/dim,mask*primPos/dim) <= 0.25 and \
|
||||||
np.all(abs((1.-mask)*primPos/dim) <= 0.5): # inside ellipsoid and inside box
|
np.all(abs((1.-mask)*primPos/dim) <= 0.5): # inside ellipsoid and inside box
|
||||||
microstructure[(gridpos[0]+options.center[0])%info['grid'][0],
|
microstructure[int((gridpos[0]+options.center[0])%info['grid'][0]),
|
||||||
(gridpos[1]+options.center[1])%info['grid'][1],
|
int((gridpos[1]+options.center[1])%info['grid'][1]),
|
||||||
(gridpos[2]+options.center[2])%info['grid'][2]] = options.fill # assign microstructure index
|
int((gridpos[2]+options.center[2])%info['grid'][2])] = options.fill # assign microstructure index
|
||||||
|
|
||||||
newInfo['microstructures'] = microstructure.max()
|
newInfo['microstructures'] = microstructure.max()
|
||||||
|
|
||||||
|
|
|
@ -81,17 +81,20 @@ for name in filenames:
|
||||||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||||
grid[1]/2:-grid[1]/2,
|
grid[1]/2:-grid[1]/2,
|
||||||
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
||||||
|
|
||||||
# store a copy the initial microstructure to find locations of immutable indices
|
# store a copy the initial microstructure to find locations of immutable indices
|
||||||
microstructure_original = np.copy(microstructure)
|
microstructure_original = np.copy(microstructure)
|
||||||
|
|
||||||
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
||||||
|
|
||||||
|
# Calculates gaussian weights for simulating 3d diffusion
|
||||||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
||||||
gauss[:,:,grid[2]/2::] = gauss[:,:,round(grid[2]/2.)-1::-1] # trying to cope with uneven (odd) grid size
|
gauss[:,:,grid[2]/2::] = gauss[:,:,int(round(grid[2]/2.))-1::-1] # trying to cope with uneven (odd) grid size
|
||||||
gauss[:,grid[1]/2::,:] = gauss[:,round(grid[1]/2.)-1::-1,:]
|
gauss[:,grid[1]/2::,:] = gauss[:,int(round(grid[1]/2.))-1::-1,:]
|
||||||
gauss[grid[0]/2::,:,:] = gauss[round(grid[0]/2.)-1::-1,:,:]
|
gauss[grid[0]/2::,:,:] = gauss[int(round(grid[0]/2.))-1::-1,:,:]
|
||||||
gauss = np.fft.rfftn(gauss)
|
gauss = np.fft.rfftn(gauss)
|
||||||
|
|
||||||
interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0
|
interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0 #1.0 if A & B are distinct & nonzero, 0.0 otherwise
|
||||||
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
||||||
|
|
||||||
|
|
||||||
|
@ -101,9 +104,11 @@ for name in filenames:
|
||||||
for j in (-1,0,1):
|
for j in (-1,0,1):
|
||||||
for k in (-1,0,1):
|
for k in (-1,0,1):
|
||||||
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
||||||
interfaceEnergy = np.maximum(boundary,
|
boundary = np.maximum(boundary,
|
||||||
interfacialEnergy(microstructure,np.roll(np.roll(np.roll(
|
interfacialEnergy(microstructure,np.roll(np.roll(np.roll(
|
||||||
microstructure,i,axis=0), j,axis=1), k,axis=2)))
|
microstructure,i,axis=0), j,axis=1), k,axis=2)))
|
||||||
|
interfaceEnergy = boundary
|
||||||
|
|
||||||
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
||||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||||
grid[1]/2:-grid[1]/2,
|
grid[1]/2:-grid[1]/2,
|
||||||
|
|
|
@ -0,0 +1,117 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,sys,math
|
||||||
|
import numpy as np
|
||||||
|
import damask
|
||||||
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
validDirections = ['x','y','z']
|
||||||
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||||
|
Mirrors spectral geometry description along given directions.
|
||||||
|
|
||||||
|
""", version=scriptID)
|
||||||
|
|
||||||
|
parser.add_option('-d','--direction',
|
||||||
|
dest = 'directions',
|
||||||
|
action = 'extend', metavar = '<string LIST>',
|
||||||
|
help = "directions in which to mirror {'x','y','z'}")
|
||||||
|
|
||||||
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
if options.directions is None:
|
||||||
|
parser.error('no direction given.')
|
||||||
|
if not set(options.directions).issubset(validDirections):
|
||||||
|
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
|
||||||
|
parser.error('invalid directions {}. '.format(*invalidDirections))
|
||||||
|
|
||||||
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
|
for name in filenames:
|
||||||
|
try:
|
||||||
|
table = damask.ASCIItable(name = name,
|
||||||
|
buffered = False, labeled = False)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
# --- interpret header ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
table.head_read()
|
||||||
|
info,extra_header = table.head_getGeom()
|
||||||
|
|
||||||
|
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||||
|
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||||
|
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||||
|
'homogenization: %i'%info['homogenization'],
|
||||||
|
'microstructures: %i'%info['microstructures'],
|
||||||
|
])
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||||
|
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --- read data ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
||||||
|
|
||||||
|
if 'z' in options.directions:
|
||||||
|
microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2)
|
||||||
|
if 'y' in options.directions:
|
||||||
|
microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1)
|
||||||
|
if 'x' in options.directions:
|
||||||
|
microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0)
|
||||||
|
|
||||||
|
# --- do work ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
newInfo = {
|
||||||
|
'size': microstructure.shape*info['size']/info['grid'],
|
||||||
|
'grid': microstructure.shape,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# --- report ---------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
remarks = []
|
||||||
|
if (any(newInfo['grid'] != info['grid'])):
|
||||||
|
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
|
||||||
|
if (any(newInfo['size'] != info['size'])):
|
||||||
|
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
|
||||||
|
# --- write header ---------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
table.labels_clear()
|
||||||
|
table.info_clear()
|
||||||
|
table.info_append([
|
||||||
|
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||||
|
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
|
||||||
|
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
|
||||||
|
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||||
|
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||||
|
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||||
|
extra_header
|
||||||
|
])
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# --- write microstructure information ------------------------------------------------------------
|
||||||
|
|
||||||
|
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
||||||
|
table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose()
|
||||||
|
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
|
||||||
|
|
||||||
|
# --- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
table.close() # close ASCII table
|
|
@ -30,30 +30,37 @@ parser.add_option('-s', '--substitute',
|
||||||
dest = 'substitute',
|
dest = 'substitute',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'substitutions of microstructure indices from,to,from,to,...')
|
help = 'substitutions of microstructure indices from,to,from,to,...')
|
||||||
|
parser.add_option('--float',
|
||||||
|
dest = 'real',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'use float input')
|
||||||
|
|
||||||
parser.set_defaults(origin = (0.0,0.0,0.0),
|
parser.set_defaults(origin = (0.0,0.0,0.0),
|
||||||
microstructure = 0,
|
microstructure = 0,
|
||||||
substitute = [],
|
substitute = [],
|
||||||
|
real = False,
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
datatype = 'f' if options.real else 'i'
|
||||||
|
|
||||||
sub = {}
|
sub = {}
|
||||||
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
|
for i in xrange(len(options.substitute)/2): # split substitution list into "from" -> "to"
|
||||||
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
|
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files ----------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False,
|
||||||
buffered = False, labeled = False)
|
labeled = False)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# --- interpret header ----------------------------------------------------------------------------
|
# --- interpret header ---------------------------------------------------------------------------
|
||||||
|
|
||||||
table.head_read()
|
table.head_read()
|
||||||
info,extra_header = table.head_getGeom()
|
info,extra_header = table.head_getGeom()
|
||||||
|
@ -73,9 +80,9 @@ for name in filenames:
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# --- read data ------------------------------------------------------------------------------------
|
# --- read data ----------------------------------------------------------------------------------
|
||||||
|
|
||||||
microstructure = table.microstructure_read(info['grid']) # read microstructure
|
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
|
||||||
|
|
||||||
# --- do work ------------------------------------------------------------------------------------
|
# --- do work ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -90,9 +97,9 @@ for name in filenames:
|
||||||
substituted += options.microstructure # shift microstructure indices
|
substituted += options.microstructure # shift microstructure indices
|
||||||
|
|
||||||
newInfo['origin'] = info['origin'] + options.origin
|
newInfo['origin'] = info['origin'] + options.origin
|
||||||
newInfo['microstructures'] = substituted.max()
|
newInfo['microstructures'] = len(np.unique(substituted))
|
||||||
|
|
||||||
# --- report ---------------------------------------------------------------------------------------
|
# --- report -------------------------------------------------------------------------------------
|
||||||
|
|
||||||
remarks = []
|
remarks = []
|
||||||
if (any(newInfo['origin'] != info['origin'])):
|
if (any(newInfo['origin'] != info['origin'])):
|
||||||
|
@ -101,7 +108,7 @@ for name in filenames:
|
||||||
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
|
||||||
# --- write header ---------------------------------------------------------------------------------
|
# --- write header -------------------------------------------------------------------------------
|
||||||
|
|
||||||
table.labels_clear()
|
table.labels_clear()
|
||||||
table.info_clear()
|
table.info_clear()
|
||||||
|
@ -116,12 +123,12 @@ for name in filenames:
|
||||||
])
|
])
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# --- write microstructure information ------------------------------------------------------------
|
# --- write microstructure information -----------------------------------------------------------
|
||||||
|
|
||||||
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
format = '%g' if options.real else '%{}i'.format(int(math.floor(math.log10(microstructure.max())+1)))
|
||||||
table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
|
table.data_writeArray(format,delimiter = ' ')
|
||||||
|
|
||||||
# --- output finalization --------------------------------------------------------------------------
|
# --- output finalization ------------------------------------------------------------------------
|
||||||
|
|
||||||
table.close() # close ASCII table
|
table.close() # close ASCII table
|
||||||
|
|
|
@ -29,7 +29,7 @@ def kdtree_search(cloud, queryPoints):
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [options]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [options]', description = """
|
||||||
Distribute given number of points randomly within the three-dimensional cube [0.0,0.0,0.0]--[1.0,1.0,1.0].
|
Distribute given number of points randomly within (a fraction of) the three-dimensional cube [0.0,0.0,0.0]--[1.0,1.0,1.0].
|
||||||
Reports positions with random crystal orientations in seeds file format to STDOUT.
|
Reports positions with random crystal orientations in seeds file format to STDOUT.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
@ -38,6 +38,11 @@ parser.add_option('-N',
|
||||||
dest = 'N',
|
dest = 'N',
|
||||||
type = 'int', metavar = 'int',
|
type = 'int', metavar = 'int',
|
||||||
help = 'number of seed points [%default]')
|
help = 'number of seed points [%default]')
|
||||||
|
parser.add_option('-f',
|
||||||
|
'--fraction',
|
||||||
|
dest = 'fraction',
|
||||||
|
type = 'float', nargs = 3, metavar = 'float float float',
|
||||||
|
help='fractions along x,y,z of unit cube to fill %default')
|
||||||
parser.add_option('-g',
|
parser.add_option('-g',
|
||||||
'--grid',
|
'--grid',
|
||||||
dest = 'grid',
|
dest = 'grid',
|
||||||
|
@ -86,8 +91,7 @@ group.add_option( '-s',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
dest = 'selective',
|
dest = 'selective',
|
||||||
help = 'selective picking of seed points from random seed points [%default]')
|
help = 'selective picking of seed points from random seed points [%default]')
|
||||||
group.add_option( '-f',
|
group.add_option( '--force',
|
||||||
'--force',
|
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
dest = 'force',
|
dest = 'force',
|
||||||
help = 'try selective picking despite large seed point number [%default]')
|
help = 'try selective picking despite large seed point number [%default]')
|
||||||
|
@ -103,6 +107,7 @@ parser.add_option_group(group)
|
||||||
|
|
||||||
parser.set_defaults(randomSeed = None,
|
parser.set_defaults(randomSeed = None,
|
||||||
grid = (16,16,16),
|
grid = (16,16,16),
|
||||||
|
fraction = (1.0,1.0,1.0),
|
||||||
N = 20,
|
N = 20,
|
||||||
weights = False,
|
weights = False,
|
||||||
max = 0.0,
|
max = 0.0,
|
||||||
|
@ -118,6 +123,7 @@ parser.set_defaults(randomSeed = None,
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
options.fraction = np.array(options.fraction)
|
||||||
options.grid = np.array(options.grid)
|
options.grid = np.array(options.grid)
|
||||||
gridSize = options.grid.prod()
|
gridSize = options.grid.prod()
|
||||||
|
|
||||||
|
@ -160,16 +166,25 @@ for name in filenames:
|
||||||
grainEuler[2,:] *= 360.0 # phi_2 is uniformly distributed
|
grainEuler[2,:] *= 360.0 # phi_2 is uniformly distributed
|
||||||
|
|
||||||
if not options.selective:
|
if not options.selective:
|
||||||
|
seeds = np.array([])
|
||||||
|
|
||||||
seeds = np.zeros((3,options.N),dtype='d') # seed positions array
|
while len(seeds) < options.N:
|
||||||
gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions
|
|
||||||
|
|
||||||
seeds[0,:] = (np.mod(gridpoints ,options.grid[0])\
|
theSeeds = np.zeros((options.N,3),dtype=float) # seed positions array
|
||||||
+np.random.random(options.N)) /options.grid[0]
|
gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions
|
||||||
seeds[1,:] = (np.mod(gridpoints// options.grid[0] ,options.grid[1])\
|
|
||||||
+np.random.random(options.N)) /options.grid[1]
|
theSeeds[:,0] = (np.mod(gridpoints ,options.grid[0])\
|
||||||
seeds[2,:] = (np.mod(gridpoints//(options.grid[1]*options.grid[0]),options.grid[2])\
|
+np.random.random(options.N)) /options.grid[0]
|
||||||
+np.random.random(options.N)) /options.grid[2]
|
theSeeds[:,1] = (np.mod(gridpoints// options.grid[0] ,options.grid[1])\
|
||||||
|
+np.random.random(options.N)) /options.grid[1]
|
||||||
|
theSeeds[:,2] = (np.mod(gridpoints//(options.grid[1]*options.grid[0]),options.grid[2])\
|
||||||
|
+np.random.random(options.N)) /options.grid[2]
|
||||||
|
|
||||||
|
goodSeeds = theSeeds[np.all(theSeeds<=options.fraction,axis=1)] # pick seeds within threshold fraction
|
||||||
|
seeds = goodSeeds if len(seeds) == 0 else np.vstack((seeds,goodSeeds))
|
||||||
|
if len(seeds) > options.N: seeds = seeds[:min(options.N,len(seeds))]
|
||||||
|
|
||||||
|
seeds = seeds.T # switch layout to point index as last index
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue