Merge branch 'development' into CCodeUse

This commit is contained in:
Martin Diehl 2016-05-05 12:57:07 +02:00
commit 6f8f2da2c0
82 changed files with 188708 additions and 192519 deletions

10
.gitattributes vendored Normal file
View File

@ -0,0 +1,10 @@
# from https://help.github.com/articles/dealing-with-line-endings/
#
# always use LF, even if the files are edited on windows, they need to be compiled/used on unix
* text eol=lf
installation/mods_Abaqus/abaqus_v6_windows.env eol=crlf
# Denote all files that are truly binary and should not be modified.
*.png binary
*.jpg binary
*.cae binary

View File

@ -1,19 +0,0 @@
:: sets up an environment for DAMASK on Windows
:: usage: call DAMASK_env.bat
@echo off
set LOCATION=%~dp0
set DAMASK_ROOT=%LOCATION%\DAMASK
set DAMASK_NUM_THREADS=2
chcp 1252
Title Düsseldorf Advanced Materials Simulation Kit - DAMASK, MPIE Düsseldorf
echo.
echo Düsseldorf Advanced Materials Simulation Kit - DAMASK
echo Max-Planck-Institut für Eisenforschung, Düsseldorf
echo http://damask.mpie.de
echo.
echo Preparing environment ...
echo DAMASK_ROOT=%DAMASK_ROOT%
echo DAMASK_NUM_THREADS=%DAMASK_NUM_THREADS%
set DAMASK_BIN=%DAMASK_ROOT%\bin
set PATH=%PATH%;%DAMASK_BIN%
set PYTHONPATH=%PYTHONPATH%;%DAMASK_ROOT%\lib

View File

@ -2,7 +2,7 @@
# usage: source DAMASK_env.sh
if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == 'linux' ]; then
DAMASK_ROOT=$(readlink -f "`dirname $BASH_SOURCE`")
DAMASK_ROOT=$(python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "`dirname $BASH_SOURCE`")
else
[[ "${BASH_SOURCE::1}" == "/" ]] && BASE="" || BASE="`pwd`/"
STAT=$(stat "`dirname $BASE$BASH_SOURCE`")
@ -18,11 +18,11 @@ fi
SOLVER=`which DAMASK_spectral 2>/dev/null`
if [ "x$SOLVER" == "x" ]; then
export SOLVER='Not found!'
SOLVER='Not found!'
fi
PROCESSING=`which postResults 2>/dev/null`
if [ "x$PROCESSING" == "x" ]; then
export PROCESSING='Not found!'
PROCESSING='Not found!'
fi
# according to http://software.intel.com/en-us/forums/topic/501500
@ -55,7 +55,8 @@ if [ ! -z "$PS1" ]; then
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
if [ "x$PETSC_DIR" != "x" ]; then
echo "PETSc location $PETSC_DIR"
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
[[ `python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR"` == $PETSC_DIR ]] \
|| echo " ~~> "`python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR"`
fi
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
echo "MSC.Marc/Mentat $MSC_ROOT"

View File

@ -1 +1 @@
v2.0.0-43-ge39441f
v2.0.0-219-g297d5ca

8
code/.gitattributes vendored Normal file
View File

@ -0,0 +1,8 @@
# from https://help.github.com/articles/dealing-with-line-endings/
#
# always use LF, even if the files are edited on windows, they need to be compiled/used on unix
* text eol=lf
# Denote all files that are truly binary and should not be modified.
*.png binary
*.jpg binary

1
code/.gitignore vendored
View File

@ -1,2 +1,3 @@
DAMASK_marc*.f90
quit__genmod.f90
*.marc

View File

@ -13,7 +13,8 @@ program DAMASK_spectral
pInt, &
pLongInt, &
pReal, &
tol_math_check
tol_math_check, &
dNeq
use DAMASK_interface, only: &
DAMASK_interface_init, &
loadCaseFile, &
@ -59,8 +60,6 @@ program DAMASK_spectral
materialpoint_sizeResults, &
materialpoint_results, &
materialpoint_postResults
use material, only: &
thermal_type, &
damage_type, &
@ -149,7 +148,9 @@ program DAMASK_spectral
MPI_file_seek, &
MPI_file_get_position, &
MPI_file_write, &
MPI_allreduce
MPI_abort, &
MPI_allreduce, &
PETScFinalize
!--------------------------------------------------------------------------------------------------
! init DAMASK (all modules)
@ -341,7 +342,7 @@ program DAMASK_spectral
reshape(spread(tol_math_check,1,9),[ 3,3]))&
.or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > &
1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain
if (any(loadCases(currentLoadCase)%rotation /= math_I3)) &
if (any(dNeq(loadCases(currentLoadCase)%rotation, math_I3))) &
write(6,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',&
math_transpose33(loadCases(currentLoadCase)%rotation)
if (loadCases(currentLoadCase)%time < 0.0_pReal) errorID = 834_pInt ! negative time increment
@ -425,28 +426,33 @@ program DAMASK_spectral
!--------------------------------------------------------------------------------------------------
! prepare MPI parallel out (including opening of file)
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
outputSize(worldrank+1) = int(size(materialpoint_results)*pReal,MPI_OFFSET_KIND)
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce')
call MPI_file_open(PETSC_COMM_WORLD, &
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &
MPI_MODE_WRONLY + MPI_MODE_APPEND, &
MPI_INFO_NULL, &
resUnit, &
ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open')
call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position')
fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
if (.not. appendToOutFile) then ! if not restarting, write 0th increment
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex=[(i-1)*((maxByteOut/pReal)/materialpoint_sizeResults)+1, &
min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))]
outputIndex=int([(i-1_pInt)*((maxByteOut/pReal)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
if (worldrank == 0) &
write(6,'(1/,a)') ' ... writing initial configuration to file ........................'
endif
@ -645,15 +651,17 @@ program DAMASK_spectral
write(6,'(1/,a)') ' ... writing results to file ......................................'
call materialpoint_postResults()
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex=[(i-1)*maxByteOut/pReal/materialpoint_sizeResults+1, &
min(i*maxByteOut/pReal/materialpoint_sizeResults,size(materialpoint_results,3))]
outputIndex=int([(i-1_pInt)*((maxByteOut/pReal)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
endif
if( loadCases(currentLoadCase)%restartFrequency > 0_pInt .and. & ! at frequency of writing restart information set restart parameter for FEsolving
mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! first call to CPFEM_general will write?
@ -700,7 +708,7 @@ program DAMASK_spectral
enddo
call utilities_destroy()
call PetscFinalize(ierr); CHKERRQ(ierr)
call PETScFinalize(ierr); CHKERRQ(ierr)
if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged
call quit(0_pInt) ! no complains ;)

View File

@ -6,10 +6,6 @@
!> @brief input/output functions, partly depending on chosen solver
!--------------------------------------------------------------------------------------------------
module IO
#ifdef HDF
use hdf5, only: &
HID_T
#endif
use prec, only: &
pInt, &
pReal
@ -18,22 +14,8 @@ module IO
private
character(len=5), parameter, public :: &
IO_EOF = '#EOF#' !< end of file string
#ifdef HDF
integer(HID_T), public, protected :: tempCoordinates, tempResults
integer(HID_T), private :: resultsFile, tempFile
integer(pInt), private :: currentInc
#endif
public :: &
#ifdef HDF
HDF5_mappingConstitutive, &
HDF5_mappingHomogenization, &
HDF5_mappingCells, &
HDF5_addGroup ,&
HDF5_forwardResults, &
HDF5_addScalarDataset, &
IO_formatIntToString ,&
#endif
IO_init, &
IO_read, &
IO_checkAndRewind, &
@ -117,9 +99,6 @@ subroutine IO_init
#include "compilation_info.f90"
endif mainProcess
#ifdef HDF
call HDF5_createJobFile
#endif
end subroutine IO_init
@ -1669,7 +1648,9 @@ subroutine IO_error(error_ID,el,ip,g,ext_msg)
msg = 'unknown filter type selected'
case (893_pInt)
msg = 'PETSc: SNES_DIVERGED_FNORM_NAN'
case (894_pInt)
msg = 'MPI error'
!-------------------------------------------------------------------------------------------------
! error messages related to parsing of Abaqus input file
case (900_pInt)
@ -1942,526 +1923,4 @@ recursive function abaqus_assembleInputFile(unit1,unit2) result(createSuccess)
end function abaqus_assembleInputFile
#endif
#ifdef HDF
!--------------------------------------------------------------------------------------------------
!> @brief creates and initializes HDF5 output files
!--------------------------------------------------------------------------------------------------
subroutine HDF5_createJobFile
use hdf5
use DAMASK_interface, only: &
getSolverWorkingDirectoryName, &
getSolverJobName
implicit none
integer :: hdferr
integer(SIZE_T) :: typeSize
character(len=1024) :: path
integer(HID_T) :: prp_id
integer(SIZE_T), parameter :: increment = 104857600 ! increase temp file in memory in 100MB steps
!--------------------------------------------------------------------------------------------------
! initialize HDF5 library and check if integer and float type size match
call h5open_f(hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5open_f')
call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5tget_size_f (int)')
if (int(pInt,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pInt does not match H5T_NATIVE_INTEGER')
call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5tget_size_f (double)')
if (int(pReal,SIZE_T)/=typeSize) call IO_error(0_pInt,ext_msg='pReal does not match H5T_NATIVE_DOUBLE')
!--------------------------------------------------------------------------------------------------
! open file
path = trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.'//'DAMASKout'
call h5fcreate_f(path,H5F_ACC_TRUNC_F,resultsFile,hdferr)
if (hdferr < 0) call IO_error(100_pInt,ext_msg=path)
call HDF5_addStringAttribute(resultsFile,'createdBy','$Id$')
!--------------------------------------------------------------------------------------------------
! open temp file
path = trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.'//'DAMASKoutTemp'
call h5pcreate_f(H5P_FILE_ACCESS_F, prp_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5pcreate_f')
call h5pset_fapl_core_f(prp_id, increment, .false., hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_createJobFile: h5pset_fapl_core_f')
call h5fcreate_f(path,H5F_ACC_TRUNC_F,tempFile,hdferr)
if (hdferr < 0) call IO_error(100_pInt,ext_msg=path)
!--------------------------------------------------------------------------------------------------
! create mapping groups in out file
call HDF5_closeGroup(HDF5_addGroup("mapping"))
call HDF5_closeGroup(HDF5_addGroup("results"))
call HDF5_closeGroup(HDF5_addGroup("coordinates"))
!--------------------------------------------------------------------------------------------------
! create results group in temp file
tempResults = HDF5_addGroup("results",tempFile)
tempCoordinates = HDF5_addGroup("coordinates",tempFile)
end subroutine HDF5_createJobFile
!--------------------------------------------------------------------------------------------------
!> @brief creates and initializes HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine HDF5_closeJobFile()
use hdf5
implicit none
integer :: hdferr
call h5fclose_f(resultsFile,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeJobFile: h5fclose_f')
end subroutine HDF5_closeJobFile
!--------------------------------------------------------------------------------------------------
!> @brief adds a new group to the results file, or if loc is present at the given location
!--------------------------------------------------------------------------------------------------
integer(HID_T) function HDF5_addGroup(path,loc)
use hdf5
implicit none
character(len=*), intent(in) :: path
integer(HID_T), intent(in),optional :: loc
integer :: hdferr
if (present(loc)) then
call h5gcreate_f(loc, trim(path), HDF5_addGroup, hdferr)
else
call h5gcreate_f(resultsFile, trim(path), HDF5_addGroup, hdferr)
endif
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(path)//' )')
end function HDF5_addGroup
!--------------------------------------------------------------------------------------------------
!> @brief adds a new group to the results file
!--------------------------------------------------------------------------------------------------
integer(HID_T) function HDF5_openGroup(path)
use hdf5
implicit none
character(len=*), intent(in) :: path
integer :: hdferr
call h5gopen_f(resultsFile, trim(path), HDF5_openGroup, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(path)//' )')
end function HDF5_openGroup
!--------------------------------------------------------------------------------------------------
!> @brief closes a group
!--------------------------------------------------------------------------------------------------
subroutine HDF5_closeGroup(ID)
use hdf5
implicit none
integer(HID_T), intent(in) :: ID
integer :: hdferr
call h5gclose_f(ID, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f')
end subroutine HDF5_closeGroup
!--------------------------------------------------------------------------------------------------
!> @brief adds a new group to the results file
!--------------------------------------------------------------------------------------------------
subroutine HDF5_addStringAttribute(entity,attrLabel,attrValue)
use hdf5
implicit none
integer(HID_T), intent(in) :: entity
character(len=*), intent(in) :: attrLabel, attrValue
integer :: hdferr
integer(HID_T) :: attr_id, space_id, type_id
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5screate_f')
call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tcopy_f')
call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5tset_size_f')
call h5acreate_f(entity, trim(attrLabel),type_id,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5acreate_f')
call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5awrite_f')
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5aclose_f')
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_addStringAttribute: h5sclose_f')
end subroutine HDF5_addStringAttribute
!--------------------------------------------------------------------------------------------------
!> @brief adds the unique mapping from spatial position and constituent ID to results
!--------------------------------------------------------------------------------------------------
subroutine HDF5_mappingConstitutive(mapping)
use hdf5
implicit none
integer(pInt), intent(in), dimension(:,:,:) :: mapping
integer :: hdferr, NmatPoints,Nconstituents
integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id,instance_id,position_id
Nconstituents=size(mapping,1)
NmatPoints=size(mapping,2)
mapping_ID = HDF5_openGroup("mapping")
!--------------------------------------------------------------------------------------------------
! create dataspace
call h5screate_simple_f(2, int([Nconstituents,NmatPoints],HSIZE_T), space_id, hdferr, &
int([Nconstituents,NmatPoints],HSIZE_T))
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive')
!--------------------------------------------------------------------------------------------------
! compound type
call h5tcreate_f(H5T_COMPOUND_F, 6_SIZE_T, dtype_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tcreate_f dtype_id')
call h5tinsert_f(dtype_id, "Constitutive Instance", 0_SIZE_T, H5T_STD_U16LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f 0')
call h5tinsert_f(dtype_id, "Position in Instance Results", 2_SIZE_T, H5T_STD_U32LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f 2')
!--------------------------------------------------------------------------------------------------
! create Dataset
call h5dcreate_f(mapping_id, "Constitutive", dtype_id, space_id, dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive')
!--------------------------------------------------------------------------------------------------
! Create memory types (one compound datatype for each member)
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), instance_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tcreate_f instance_id')
call h5tinsert_f(instance_id, "Constitutive Instance", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f instance_id')
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tcreate_f position_id')
call h5tinsert_f(position_id, "Position in Instance Results", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tinsert_f position_id')
!--------------------------------------------------------------------------------------------------
! write data by fields in the datatype. Fields order is not important.
call h5dwrite_f(dset_id, position_id, mapping(1:Nconstituents,1:NmatPoints,1), &
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dwrite_f position_id')
call h5dwrite_f(dset_id, instance_id, mapping(1:Nconstituents,1:NmatPoints,2), &
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dwrite_f instance_id')
!--------------------------------------------------------------------------------------------------
!close types, dataspaces
call h5tclose_f(dtype_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tclose_f dtype_id')
call h5tclose_f(position_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tclose_f position_id')
call h5tclose_f(instance_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5tclose_f instance_id')
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dclose_f')
call h5sclose_f(space_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5sclose_f')
call HDF5_closeGroup(mapping_ID)
end subroutine HDF5_mappingConstitutive
!--------------------------------------------------------------------------------------------------
!> @brief adds the unique mapping from spatial position and constituent ID to results
!--------------------------------------------------------------------------------------------------
subroutine HDF5_mappingCrystallite(mapping)
use hdf5
implicit none
integer(pInt), intent(in), dimension(:,:,:) :: mapping
integer :: hdferr, NmatPoints,Nconstituents
integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id,instance_id,position_id
Nconstituents=size(mapping,1)
NmatPoints=size(mapping,2)
mapping_ID = HDF5_openGroup("mapping")
!--------------------------------------------------------------------------------------------------
! create dataspace
call h5screate_simple_f(2, int([Nconstituents,NmatPoints],HSIZE_T), space_id, hdferr, &
int([Nconstituents,NmatPoints],HSIZE_T))
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite')
!--------------------------------------------------------------------------------------------------
! compound type
call h5tcreate_f(H5T_COMPOUND_F, 6_SIZE_T, dtype_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f dtype_id')
call h5tinsert_f(dtype_id, "Crystallite Instance", 0_SIZE_T, H5T_STD_U16LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 0')
call h5tinsert_f(dtype_id, "Position in Instance Results", 2_SIZE_T, H5T_STD_U32LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f 2')
!--------------------------------------------------------------------------------------------------
! create Dataset
call h5dcreate_f(mapping_id, "Crystallite", dtype_id, space_id, dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite')
!--------------------------------------------------------------------------------------------------
! Create memory types (one compound datatype for each member)
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), instance_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f instance_id')
call h5tinsert_f(instance_id, "Crystallite Instance", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f instance_id')
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tcreate_f position_id')
call h5tinsert_f(position_id, "Position in Instance Results", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tinsert_f position_id')
!--------------------------------------------------------------------------------------------------
! write data by fields in the datatype. Fields order is not important.
call h5dwrite_f(dset_id, position_id, mapping(1:Nconstituents,1:NmatPoints,1), &
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f position_id')
call h5dwrite_f(dset_id, instance_id, mapping(1:Nconstituents,1:NmatPoints,2), &
int([Nconstituents, NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dwrite_f instance_id')
!--------------------------------------------------------------------------------------------------
!close types, dataspaces
call h5tclose_f(dtype_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f dtype_id')
call h5tclose_f(position_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f position_id')
call h5tclose_f(instance_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5tclose_f instance_id')
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5dclose_f')
call h5sclose_f(space_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCrystallite: h5sclose_f')
call HDF5_closeGroup(mapping_ID)
end subroutine HDF5_mappingCrystallite
!--------------------------------------------------------------------------------------------------
!> @brief adds the unique mapping from spatial position to results
!--------------------------------------------------------------------------------------------------
subroutine HDF5_mappingHomogenization(mapping)
use hdf5
implicit none
integer(pInt), intent(in), dimension(:,:) :: mapping
integer :: hdferr, NmatPoints
integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id,instance_id,position_id,elem_id,ip_id
NmatPoints=size(mapping,1)
mapping_ID = HDF5_openGroup("mapping")
!--------------------------------------------------------------------------------------------------
! create dataspace
call h5screate_simple_f(1, int([NmatPoints],HSIZE_T), space_id, hdferr, &
int([NmatPoints],HSIZE_T))
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization')
!--------------------------------------------------------------------------------------------------
! compound type
call h5tcreate_f(H5T_COMPOUND_F, 11_SIZE_T, dtype_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f dtype_id')
call h5tinsert_f(dtype_id, "Homogenization Instance", 0_SIZE_T, H5T_STD_U16LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 0')
call h5tinsert_f(dtype_id, "Position in Instance Results", 2_SIZE_T, H5T_STD_U32LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 2')
call h5tinsert_f(dtype_id, "Element Number", 6_SIZE_T, H5T_STD_U32LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 6')
call h5tinsert_f(dtype_id, "Material Point Number", 10_SIZE_T, H5T_STD_U8LE, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f 10')
!--------------------------------------------------------------------------------------------------
! create Dataset
call h5dcreate_f(mapping_id, "Homogenization", dtype_id, space_id, dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization')
!--------------------------------------------------------------------------------------------------
! Create memory types (one compound datatype for each member)
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), instance_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f instance_id')
call h5tinsert_f(instance_id, "Homogenization Instance", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f instance_id')
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f position_id')
call h5tinsert_f(position_id, "Position in Instance Results", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f position_id')
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), elem_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f elem_id')
call h5tinsert_f(elem_id, "Element Number", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f elem_id')
call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), ip_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tcreate_f ip_id')
call h5tinsert_f(ip_id, "Material Point Number", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tinsert_f ip_id')
!--------------------------------------------------------------------------------------------------
! write data by fields in the datatype. Fields order is not important.
call h5dwrite_f(dset_id, position_id, mapping(1:NmatPoints,1), &
int([NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f position_id')
call h5dwrite_f(dset_id, instance_id, mapping(1:NmatPoints,2), &
int([NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f position_id')
call h5dwrite_f(dset_id, elem_id, mapping(1:NmatPoints,3), &
int([NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f elem_id')
call h5dwrite_f(dset_id, ip_id, mapping(1:NmatPoints,4), &
int([NmatPoints],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dwrite_f ip_id')
!--------------------------------------------------------------------------------------------------
!close types, dataspaces
call h5tclose_f(dtype_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f dtype_id')
call h5tclose_f(position_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f position_id')
call h5tclose_f(instance_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f instance_id')
call h5tclose_f(ip_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f ip_id')
call h5tclose_f(elem_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5tclose_f elem_id')
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5dclose_f')
call h5sclose_f(space_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingHomogenization: h5sclose_f')
call HDF5_closeGroup(mapping_ID)
end subroutine HDF5_mappingHomogenization
!--------------------------------------------------------------------------------------------------
!> @brief adds the unique cell to node mapping
!--------------------------------------------------------------------------------------------------
subroutine HDF5_mappingCells(mapping)
use hdf5
implicit none
integer(pInt), intent(in), dimension(:) :: mapping
integer :: hdferr, Nnodes
integer(HID_T) :: mapping_id, dset_id, space_id
Nnodes=size(mapping)
mapping_ID = HDF5_openGroup("mapping")
!--------------------------------------------------------------------------------------------------
! create dataspace
call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, &
int([Nnodes],HSIZE_T))
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5screate_simple_f')
!--------------------------------------------------------------------------------------------------
! create Dataset
call h5dcreate_f(mapping_id, "Cell",H5T_NATIVE_INTEGER, space_id, dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells')
!--------------------------------------------------------------------------------------------------
! write data by fields in the datatype. Fields order is not important.
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, mapping, int([Nnodes],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingCells: h5dwrite_f instance_id')
!--------------------------------------------------------------------------------------------------
!close types, dataspaces
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5dclose_f')
call h5sclose_f(space_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='IO_mappingConstitutive: h5sclose_f')
call HDF5_closeGroup(mapping_ID)
end subroutine HDF5_mappingCells
!--------------------------------------------------------------------------------------------------
!> @brief creates a new scalar dataset in the given group location
!--------------------------------------------------------------------------------------------------
subroutine HDF5_addScalarDataset(group,nnodes,label,SIunit)
use hdf5
implicit none
integer(HID_T), intent(in) :: group
integer(pInt), intent(in) :: nnodes
character(len=*), intent(in) :: SIunit,label
integer :: hdferr
integer(HID_T) :: dset_id, space_id
!--------------------------------------------------------------------------------------------------
! create dataspace
call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, &
int([Nnodes],HSIZE_T))
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5screate_simple_f')
!--------------------------------------------------------------------------------------------------
! create Dataset
call h5dcreate_f(group, trim(label),H5T_NATIVE_DOUBLE, space_id, dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dcreate_f')
call HDF5_addStringAttribute(dset_id,'unit',trim(SIunit))
!--------------------------------------------------------------------------------------------------
!close types, dataspaces
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addScalarDataset: h5dclose_f')
call h5sclose_f(space_id, hdferr)
end subroutine HDF5_addScalarDataset
!--------------------------------------------------------------------------------------------------
!> @brief returns nicely formatted string of integer value
!--------------------------------------------------------------------------------------------------
function IO_formatIntToString(myInt)
implicit none
integer(pInt), intent(in) :: myInt
character(len=1_pInt + int(log10(real(myInt)),pInt)) :: IO_formatIntToString
write(IO_formatIntToString,'('//IO_intOut(myInt)//')') myInt
end function
!--------------------------------------------------------------------------------------------------
!> @brief copies the current temp results to the actual results file
!--------------------------------------------------------------------------------------------------
subroutine HDF5_forwardResults
use hdf5
implicit none
integer :: hdferr
integer(HID_T) :: new_loc_id
new_loc_id = HDF5_openGroup("results")
currentInc = currentInc + 1_pInt
call h5ocopy_f(tempFile, 'results', new_loc_id,dst_name=IO_formatIntToString(currentInc), hdferr=hdferr)
if (hdferr < 0_pInt) call IO_error(1_pInt,ext_msg='HDF5_forwardResults: h5ocopy_f')
call HDF5_closeGroup(new_loc_id)
end subroutine HDF5_forwardResults
#endif
end module IO

View File

@ -310,7 +310,7 @@ KINEMATICS_FILES = \
kinematics_vacancy_strain.o kinematics_hydrogen_strain.o
PLASTIC_FILES = \
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o plastic_j2.o \
plastic_dislotwin.o plastic_disloUCLA.o plastic_isotropic.o \
plastic_phenopowerlaw.o plastic_titanmod.o plastic_nonlocal.o plastic_none.o \
plastic_phenoplus.o
@ -580,9 +580,6 @@ plastic_phenoplus.o: plastic_phenoplus.f90 \
plastic_isotropic.o: plastic_isotropic.f90 \
lattice.o
plastic_j2.o: plastic_j2.f90 \
lattice.o
plastic_none.o: plastic_none.f90 \
lattice.o
ifeq "$(F90)" "gfortran"

View File

@ -28,7 +28,6 @@
#include "kinematics_hydrogen_strain.f90"
#include "plastic_none.f90"
#include "plastic_isotropic.f90"
#include "plastic_j2.f90"
#include "plastic_phenopowerlaw.f90"
#include "plastic_phenoplus.f90"
#include "plastic_titanmod.f90"

View File

@ -69,7 +69,6 @@ subroutine constitutive_init()
ELASTICITY_hooke_ID, &
PLASTICITY_none_ID, &
PLASTICITY_isotropic_ID, &
PLASTICITY_j2_ID, &
PLASTICITY_phenopowerlaw_ID, &
PLASTICITY_phenoplus_ID, &
PLASTICITY_dislotwin_ID, &
@ -93,7 +92,6 @@ subroutine constitutive_init()
ELASTICITY_HOOKE_label, &
PLASTICITY_NONE_label, &
PLASTICITY_ISOTROPIC_label, &
PLASTICITY_J2_label, &
PLASTICITY_PHENOPOWERLAW_label, &
PLASTICITY_PHENOPLUS_label, &
PLASTICITY_DISLOTWIN_label, &
@ -114,7 +112,6 @@ subroutine constitutive_init()
use plastic_none
use plastic_isotropic
use plastic_j2
use plastic_phenopowerlaw
use plastic_phenoplus
use plastic_dislotwin
@ -160,7 +157,6 @@ subroutine constitutive_init()
! parse plasticities from config file
if (any(phase_plasticity == PLASTICITY_NONE_ID)) call plastic_none_init
if (any(phase_plasticity == PLASTICITY_ISOTROPIC_ID)) call plastic_isotropic_init(FILEUNIT)
if (any(phase_plasticity == PLASTICITY_J2_ID)) call plastic_j2_init(FILEUNIT)
if (any(phase_plasticity == PLASTICITY_PHENOPOWERLAW_ID)) call plastic_phenopowerlaw_init(FILEUNIT)
if (any(phase_plasticity == PLASTICITY_PHENOPLUS_ID)) call plastic_phenoplus_init(FILEUNIT)
if (any(phase_plasticity == PLASTICITY_DISLOTWIN_ID)) call plastic_dislotwin_init(FILEUNIT)
@ -217,11 +213,6 @@ subroutine constitutive_init()
thisNoutput => plastic_isotropic_Noutput
thisOutput => plastic_isotropic_output
thisSize => plastic_isotropic_sizePostResult
case (PLASTICITY_J2_ID) plasticityType
outputName = PLASTICITY_J2_label
thisNoutput => plastic_j2_Noutput
thisOutput => plastic_j2_output
thisSize => plastic_j2_sizePostResult
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
outputName = PLASTICITY_PHENOPOWERLAW_label
thisNoutput => plastic_phenopowerlaw_Noutput
@ -408,8 +399,6 @@ function constitutive_homogenizedC(ipc,ip,el)
plastic_titanmod_homogenizedC
use plastic_dislotwin, only: &
plastic_dislotwin_homogenizedC
use plastic_disloucla, only: &
plastic_disloucla_homogenizedC
use lattice, only: &
lattice_C66
@ -423,8 +412,6 @@ function constitutive_homogenizedC(ipc,ip,el)
plasticityType: select case (phase_plasticity(material_phase(ipc,ip,el)))
case (PLASTICITY_DISLOTWIN_ID) plasticityType
constitutive_homogenizedC = plastic_dislotwin_homogenizedC(ipc,ip,el)
case (PLASTICITY_DISLOUCLA_ID) plasticityType
constitutive_homogenizedC = plastic_disloucla_homogenizedC(ipc,ip,el)
case (PLASTICITY_TITANMOD_ID) plasticityType
constitutive_homogenizedC = plastic_titanmod_homogenizedC (ipc,ip,el)
case default plasticityType
@ -513,7 +500,6 @@ subroutine constitutive_LpAndItsTangent(Lp, dLp_dTstar3333, dLp_dFi3333, Tstar_v
thermalMapping, &
PLASTICITY_NONE_ID, &
PLASTICITY_ISOTROPIC_ID, &
PLASTICITY_J2_ID, &
PLASTICITY_PHENOPOWERLAW_ID, &
PLASTICITY_PHENOPLUS_ID, &
PLASTICITY_DISLOTWIN_ID, &
@ -522,8 +508,6 @@ subroutine constitutive_LpAndItsTangent(Lp, dLp_dTstar3333, dLp_dFi3333, Tstar_v
PLASTICITY_NONLOCAL_ID
use plastic_isotropic, only: &
plastic_isotropic_LpAndItsTangent
use plastic_j2, only: &
plastic_j2_LpAndItsTangent
use plastic_phenopowerlaw, only: &
plastic_phenopowerlaw_LpAndItsTangent
use plastic_phenoplus, only: &
@ -574,8 +558,6 @@ subroutine constitutive_LpAndItsTangent(Lp, dLp_dTstar3333, dLp_dFi3333, Tstar_v
dLp_dMstar = 0.0_pReal
case (PLASTICITY_ISOTROPIC_ID) plasticityType
call plastic_isotropic_LpAndItsTangent(Lp,dLp_dMstar,Mstar_v,ipc,ip,el)
case (PLASTICITY_J2_ID) plasticityType
call plastic_j2_LpAndItsTangent(Lp,dLp_dMstar,Mstar_v,ipc,ip,el)
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
call plastic_phenopowerlaw_LpAndItsTangent(Lp,dLp_dMstar,Mstar_v,ipc,ip,el)
case (PLASTICITY_PHENOPLUS_ID) plasticityType
@ -903,7 +885,6 @@ subroutine constitutive_collectDotState(Tstar_v, FeArray, FpArray, subdt, subfra
homogenization_maxNgrains, &
PLASTICITY_none_ID, &
PLASTICITY_isotropic_ID, &
PLASTICITY_j2_ID, &
PLASTICITY_phenopowerlaw_ID, &
PLASTICITY_phenoplus_ID, &
PLASTICITY_dislotwin_ID, &
@ -916,8 +897,6 @@ subroutine constitutive_collectDotState(Tstar_v, FeArray, FpArray, subdt, subfra
SOURCE_thermal_externalheat_ID
use plastic_isotropic, only: &
plastic_isotropic_dotState
use plastic_j2, only: &
plastic_j2_dotState
use plastic_phenopowerlaw, only: &
plastic_phenopowerlaw_dotState
use plastic_phenoplus, only: &
@ -971,8 +950,6 @@ subroutine constitutive_collectDotState(Tstar_v, FeArray, FpArray, subdt, subfra
plasticityType: select case (phase_plasticity(material_phase(ipc,ip,el)))
case (PLASTICITY_ISOTROPIC_ID) plasticityType
call plastic_isotropic_dotState (Tstar_v,ipc,ip,el)
case (PLASTICITY_J2_ID) plasticityType
call plastic_j2_dotState (Tstar_v,ipc,ip,el)
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
call plastic_phenopowerlaw_dotState(Tstar_v,ipc,ip,el)
case (PLASTICITY_PHENOPLUS_ID) plasticityType
@ -1117,7 +1094,6 @@ function constitutive_postResults(Tstar_v, FeArray, ipc, ip, el)
homogenization_maxNgrains, &
PLASTICITY_NONE_ID, &
PLASTICITY_ISOTROPIC_ID, &
PLASTICITY_J2_ID, &
PLASTICITY_PHENOPOWERLAW_ID, &
PLASTICITY_PHENOPLUS_ID, &
PLASTICITY_DISLOTWIN_ID, &
@ -1130,8 +1106,6 @@ function constitutive_postResults(Tstar_v, FeArray, ipc, ip, el)
SOURCE_damage_anisoDuctile_ID
use plastic_isotropic, only: &
plastic_isotropic_postResults
use plastic_j2, only: &
plastic_j2_postResults
use plastic_phenopowerlaw, only: &
plastic_phenopowerlaw_postResults
use plastic_phenoplus, only: &
@ -1185,8 +1159,6 @@ function constitutive_postResults(Tstar_v, FeArray, ipc, ip, el)
constitutive_postResults(startPos:endPos) = plastic_titanmod_postResults(ipc,ip,el)
case (PLASTICITY_ISOTROPIC_ID) plasticityType
constitutive_postResults(startPos:endPos) = plastic_isotropic_postResults(Tstar_v,ipc,ip,el)
case (PLASTICITY_J2_ID) plasticityType
constitutive_postResults(startPos:endPos) = plastic_j2_postResults(Tstar_v,ipc,ip,el)
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_phenopowerlaw_postResults(Tstar_v,ipc,ip,el)

View File

@ -258,7 +258,8 @@ subroutine crystallite_init
allocate(crystallite_orientation(4,cMax,iMax,eMax), source=0.0_pReal)
allocate(crystallite_orientation0(4,cMax,iMax,eMax), source=0.0_pReal)
allocate(crystallite_rotation(4,cMax,iMax,eMax), source=0.0_pReal)
allocate(crystallite_disorientation(4,nMax,cMax,iMax,eMax), source=0.0_pReal)
if (any(plasticState%nonLocal)) &
allocate(crystallite_disorientation(4,nMax,cMax,iMax,eMax),source=0.0_pReal)
allocate(crystallite_localPlasticity(cMax,iMax,eMax), source=.true.)
allocate(crystallite_requested(cMax,iMax,eMax), source=.false.)
allocate(crystallite_todo(cMax,iMax,eMax), source=.false.)
@ -3961,7 +3962,6 @@ subroutine crystallite_orientations
use plastic_nonlocal, only: &
plastic_nonlocal_updateCompatibility
implicit none
integer(pInt) &
c, & !< counter in integration point component loop
@ -3977,50 +3977,51 @@ subroutine crystallite_orientations
! --- CALCULATE ORIENTATION AND LATTICE ROTATION ---
!$OMP PARALLEL DO PRIVATE(orientation)
do e = FEsolving_execElem(1),FEsolving_execElem(2)
do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
do c = 1_pInt,homogenization_Ngrains(mesh_element(3,e))
!$OMP PARALLEL DO PRIVATE(orientation)
do e = FEsolving_execElem(1),FEsolving_execElem(2)
do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
do c = 1_pInt,homogenization_Ngrains(mesh_element(3,e))
! somehow this subroutine is not threadsafe, so need critical statement here; not clear, what exactly the problem is
!$OMP CRITICAL (polarDecomp)
orientation = math_RtoQ(transpose(math_rotationalPart33(crystallite_Fe(1:3,1:3,c,i,e)))) ! rotational part from polar decomposition as quaternion
!$OMP END CRITICAL (polarDecomp)
crystallite_rotation(1:4,c,i,e) = lattice_qDisorientation(crystallite_orientation0(1:4,c,i,e), & ! active rotation from ori0
orientation) ! to current orientation (with no symmetry)
crystallite_orientation(1:4,c,i,e) = orientation
enddo; enddo; enddo
!$OMP END PARALLEL DO
!$OMP CRITICAL (polarDecomp)
orientation = math_RtoQ(transpose(math_rotationalPart33(crystallite_Fe(1:3,1:3,c,i,e))))
!$OMP END CRITICAL (polarDecomp)
crystallite_rotation(1:4,c,i,e) = lattice_qDisorientation(crystallite_orientation0(1:4,c,i,e), &! active rotation from initial
orientation) ! to current orientation (with no symmetry)
crystallite_orientation(1:4,c,i,e) = orientation
enddo; enddo; enddo
!$OMP END PARALLEL DO
! --- UPDATE SOME ADDITIONAL VARIABLES THAT ARE NEEDED FOR NONLOCAL MATERIAL ---
! --- we use crystallite_orientation from above, so need a separate loop
!$OMP PARALLEL DO PRIVATE(myPhase,neighboring_e,neighboring_i,neighboringPhase)
nonlocalPresent: if (any(plasticState%nonLocal)) then
!$OMP PARALLEL DO PRIVATE(myPhase,neighboring_e,neighboring_i,neighboringPhase)
do e = FEsolving_execElem(1),FEsolving_execElem(2)
do i = FEsolving_execIP(1,e),FEsolving_execIP(2,e)
myPhase = material_phase(1,i,e) ! get my phase (non-local models make no sense with more than one grain per material point)
if (plasticState(myPhase)%nonLocal) then ! if nonlocal model
myPhase = material_phase(1,i,e) ! get my phase (non-local models make no sense with more than one grain per material point)
if (plasticState(myPhase)%nonLocal) then ! if nonlocal model
! --- calculate disorientation between me and my neighbor ---
do n = 1_pInt,FE_NipNeighbors(FE_celltype(FE_geomtype(mesh_element(2,e)))) ! loop through my neighbors
do n = 1_pInt,FE_NipNeighbors(FE_celltype(FE_geomtype(mesh_element(2,e)))) ! loop through my neighbors
neighboring_e = mesh_ipNeighborhood(1,n,i,e)
neighboring_i = mesh_ipNeighborhood(2,n,i,e)
if (neighboring_e > 0 .and. neighboring_i > 0) then ! if neighbor exists
neighboringPhase = material_phase(1,neighboring_i,neighboring_e) ! get my neighbor's phase
if (plasticState(neighboringPhase)%nonLocal) then ! neighbor got also nonlocal plasticity
if (lattice_structure(myPhase) == lattice_structure(neighboringPhase)) then ! if my neighbor has same crystal structure like me
if (neighboring_e > 0 .and. neighboring_i > 0) then ! if neighbor exists
neighboringPhase = material_phase(1,neighboring_i,neighboring_e) ! get my neighbor's phase
if (plasticState(neighboringPhase)%nonLocal) then ! neighbor got also nonlocal plasticity
if (lattice_structure(myPhase) == lattice_structure(neighboringPhase)) then ! if my neighbor has same crystal structure like me
crystallite_disorientation(:,n,1,i,e) = &
lattice_qDisorientation( crystallite_orientation(1:4,1,i,e), &
crystallite_orientation(1:4,1,neighboring_i,neighboring_e), &
lattice_structure(myPhase)) ! calculate disorientation for given symmetry
else ! for neighbor with different phase
crystallite_disorientation(:,n,1,i,e) = [0.0_pReal, 1.0_pReal, 0.0_pReal, 0.0_pReal] ! 180 degree rotation about 100 axis
lattice_structure(myPhase)) ! calculate disorientation for given symmetry
else ! for neighbor with different phase
crystallite_disorientation(:,n,1,i,e) = [0.0_pReal, 1.0_pReal, 0.0_pReal, 0.0_pReal]! 180 degree rotation about 100 axis
endif
else ! for neighbor with local plasticity
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal] ! homomorphic identity
else ! for neighbor with local plasticity
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal]! homomorphic identity
endif
else ! no existing neighbor
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal] ! homomorphic identity
else ! no existing neighbor
crystallite_disorientation(:,n,1,i,e) = [-1.0_pReal, 0.0_pReal, 0.0_pReal, 0.0_pReal] ! homomorphic identity
endif
enddo
@ -4031,7 +4032,8 @@ subroutine crystallite_orientations
endif
enddo; enddo
!$OMP END PARALLEL DO
!$OMP END PARALLEL DO
endif nonlocalPresent
end subroutine crystallite_orientations

View File

@ -71,12 +71,6 @@ contains
!> @brief module initialization
!--------------------------------------------------------------------------------------------------
subroutine homogenization_init
#ifdef HDF
use hdf5, only: &
HID_T
use IO, only : &
HDF5_mappingHomogenization
#endif
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
use math, only: &
math_I3
@ -131,12 +125,6 @@ subroutine homogenization_init
character(len=64), dimension(:,:), pointer :: thisOutput
character(len=32) :: outputName !< name of output, intermediate fix until HDF5 output is ready
logical :: knownHomogenization, knownThermal, knownDamage, knownVacancyflux, knownPorosity, knownHydrogenflux
#ifdef HDF
integer(pInt), dimension(:,:), allocatable :: mapping
integer(pInt), dimension(:), allocatable :: InstancePosition
allocate(mapping(mesh_ncpelems,4),source=0_pInt)
allocate(InstancePosition(material_Nhomogenization),source=0_pInt)
#endif
!--------------------------------------------------------------------------------------------------
@ -396,17 +384,6 @@ subroutine homogenization_init
!--------------------------------------------------------------------------------------------------
! allocate and initialize global state and postresutls variables
#ifdef HDF
elementLooping: do e = 1,mesh_NcpElems
myInstance = homogenization_typeInstance(mesh_element(3,e))
IpLooping: do i = 1,FE_Nips(FE_geomtype(mesh_element(2,e)))
InstancePosition(myInstance) = InstancePosition(myInstance)+1_pInt
mapping(e,1:4) = [instancePosition(myinstance),myinstance,e,i]
enddo IpLooping
enddo elementLooping
call HDF5_mappingHomogenization(mapping)
#endif
homogenization_maxSizePostResults = 0_pInt
thermal_maxSizePostResults = 0_pInt
damage_maxSizePostResults = 0_pInt

View File

@ -17,13 +17,7 @@ module lattice
LATTICE_maxNslipFamily = 13_pInt, & !< max # of slip system families over lattice structures
LATTICE_maxNtwinFamily = 4_pInt, & !< max # of twin system families over lattice structures
LATTICE_maxNtransFamily = 2_pInt, & !< max # of transformation system families over lattice structures
LATTICE_maxNcleavageFamily = 3_pInt, & !< max # of transformation system families over lattice structures
LATTICE_maxNslip = 52_pInt, & !< max # of slip systems over lattice structures
LATTICE_maxNtwin = 24_pInt, & !< max # of twin systems over lattice structures
LATTICE_maxNinteraction = 182_pInt, & !< max # of interaction types (in hardening matrix part)
LATTICE_maxNnonSchmid = 6_pInt, & !< max # of non schmid contributions over lattice structures
LATTICE_maxNtrans = 12_pInt, & !< max # of transformations over lattice structures
LATTICE_maxNcleavage = 9_pInt !< max # of cleavage over lattice structures
LATTICE_maxNcleavageFamily = 3_pInt !< max # of transformation system families over lattice structures
integer(pInt), allocatable, dimension(:,:), protected, public :: &
lattice_NslipSystem, & !< total # of slip systems in each family
@ -80,25 +74,25 @@ module lattice
lattice_NnonSchmid !< total # of non-Schmid contributions for each structure
!--------------------------------------------------------------------------------------------------
! fcc
! face centered cubic
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
LATTICE_fcc_NslipSystem = int([12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],pInt) !< total # of slip systems per family for fcc
LATTICE_fcc_NslipSystem = int([12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],pInt) !< # of slip systems per family for fcc
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
LATTICE_fcc_NtwinSystem = int([12, 0, 0, 0],pInt) !< total # of twin systems per family for fcc
LATTICE_fcc_NtwinSystem = int([12, 0, 0, 0],pInt) !< # of twin systems per family for fcc
integer(pInt), dimension(LATTICE_maxNtransFamily), parameter, public :: &
LATTICE_fcc_NtransSystem = int([12, 0],pInt) !< total # of transformation systems per family for fcc
LATTICE_fcc_NtransSystem = int([12, 0],pInt) !< # of transformation systems per family for fcc
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_fcc_NcleavageSystem = int([3, 4, 0],pInt) !< total # of cleavage systems per family for fcc
LATTICE_fcc_NcleavageSystem = int([3, 4, 0],pInt) !< # of cleavage systems per family for fcc
integer(pInt), parameter, private :: &
LATTICE_fcc_Nslip = 12_pInt, & ! sum(lattice_fcc_NslipSystem), & !< total # of slip systems for fcc
LATTICE_fcc_Ntwin = 12_pInt, & ! sum(lattice_fcc_NtwinSystem) !< total # of twin systems for fcc
LATTICE_fcc_Nslip = 12_pInt, & !sum(lattice_fcc_NslipSystem), & !< total # of slip systems for fcc
LATTICE_fcc_Ntwin = 12_pInt, & !sum(lattice_fcc_NtwinSystem), & !< total # of twin systems for fcc
LATTICE_fcc_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for fcc
LATTICE_fcc_Ntrans = 12_pInt, & !< total # of transformations for fcc
LATTICE_fcc_Ncleavage = 7_pInt !< total # of cleavage systems for fcc
LATTICE_fcc_Ntrans = 12_pInt, & !sum(lattice_fcc_NtransSystem), & !< total # of transformation systems for fcc
LATTICE_fcc_Ncleavage = 7_pInt !sum(lattice_fcc_NcleavageSystem) !< total # of cleavage systems for fcc
real(pReal), dimension(3+3,LATTICE_fcc_Nslip), parameter, private :: &
LATTICE_fcc_systemSlip = reshape(real([&
@ -312,8 +306,8 @@ module lattice
0.0, 0.0, 1.0, 45.0 &
],[ 4_pInt,LATTICE_fcc_Ntrans])
real(pReal), dimension(LATTICE_fcc_Ntrans,LATTICE_fcc_Ntrans), parameter, private :: & ! Matrix for projection of shear from slip system to fault-band (twin) systems
LATTICE_fccTobcc_projectionTrans = reshape(real([& ! For ns = nt = nr
real(pReal), dimension(LATTICE_fcc_Ntrans,LATTICE_fcc_Ntrans), parameter, private :: & ! Matrix for projection of shear from slip system to fault-band (twin) systems
LATTICE_fccTobcc_projectionTrans = reshape(real([& ! For ns = nt = nr
0, 1,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, &
-1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, &
1,-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, &
@ -363,27 +357,26 @@ module lattice
],pReal),[ 3_pInt + 3_pInt,LATTICE_fcc_Ncleavage])
!--------------------------------------------------------------------------------------------------
! bcc
! body centered cubic
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
LATTICE_bcc_NslipSystem = int([ 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], pInt) !< total # of slip systems per family for bcc
LATTICE_bcc_NslipSystem = int([ 12, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], pInt) !< # of slip systems per family for bcc
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
LATTICE_bcc_NtwinSystem = int([ 12, 0, 0, 0], pInt) !< total # of twin systems per family for bcc
LATTICE_bcc_NtwinSystem = int([ 12, 0, 0, 0], pInt) !< # of twin systems per family for bcc
integer(pInt), dimension(LATTICE_maxNtransFamily), parameter, public :: &
LATTICE_bcc_NtransSystem = int([0,0],pInt) !< total # of transformation systems per family for bcc
LATTICE_bcc_NtransSystem = int([0,0],pInt) !< # of transformation systems per family for bcc
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_bcc_NcleavageSystem = int([3,6,0],pInt) !< total # of cleavage systems per family for bcc
LATTICE_bcc_NcleavageSystem = int([3,6,0],pInt) !< # of cleavage systems per family for bcc
integer(pInt), parameter, private :: &
LATTICE_bcc_Nslip = 24_pInt, & ! sum(lattice_bcc_NslipSystem), & !< total # of slip systems for bcc
LATTICE_bcc_Ntwin = 12_pInt, & ! sum(lattice_bcc_NtwinSystem) !< total # of twin systems for bcc
LATTICE_bcc_NnonSchmid = 6_pInt, & !< # of non-Schmid contributions for bcc. 6 known non schmid contributions for BCC (A. Koester, A. Ma, A. Hartmaier 2012)
LATTICE_bcc_Ntrans = 0_pInt, & !< total # of transformations for bcc
LATTICE_bcc_Ncleavage = 9_pInt !< total # of cleavage systems for bcc
LATTICE_bcc_Nslip = 24_pInt, & !sum(lattice_bcc_NslipSystem), & !< total # of slip systems for bcc
LATTICE_bcc_Ntwin = 12_pInt, & !sum(lattice_bcc_NtwinSystem), & !< total # of twin systems for bcc
LATTICE_bcc_NnonSchmid = 6_pInt, & !< total # of non-Schmid contributions for bcc (A. Koester, A. Ma, A. Hartmaier 2012)
LATTICE_bcc_Ntrans = 0_pInt, & !sum(lattice_bcc_NtransSystem), & !< total # of transformation systems for bcc
LATTICE_bcc_Ncleavage = 9_pInt !sum(lattice_bcc_NcleavageSystem) !< total # of cleavage systems for bcc
real(pReal), dimension(3+3,LATTICE_bcc_Nslip), parameter, private :: &
LATTICE_bcc_systemSlip = reshape(real([&
! Slip direction Plane normal
@ -561,25 +554,25 @@ module lattice
],pReal),[ 3_pInt + 3_pInt,LATTICE_bcc_Ncleavage])
!--------------------------------------------------------------------------------------------------
! hex
! hexagonal
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
lattice_hex_NslipSystem = int([ 3, 3, 3, 6, 12, 6, 0, 0, 0, 0, 0, 0, 0],pInt) !< # of slip systems per family for hex
lattice_hex_NslipSystem = int([ 3, 3, 3, 6, 12, 6, 0, 0, 0, 0, 0, 0, 0],pInt) !< # of slip systems per family for hex
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
lattice_hex_NtwinSystem = int([ 6, 6, 6, 6],pInt) !< # of slip systems per family for hex
integer(pInt), dimension(LATTICE_maxNtransFamily), parameter, public :: &
LATTICE_hex_NtransSystem = int([0,0],pInt) !< total # of transformation systems per family for hex
LATTICE_hex_NtransSystem = int([0,0],pInt) !< # of transformation systems per family for hex
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_hex_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for hex
LATTICE_hex_NcleavageSystem = int([3,0,0],pInt) !< # of cleavage systems per family for hex
integer(pInt), parameter , private :: &
LATTICE_hex_Nslip = 33_pInt, & ! sum(lattice_hex_NslipSystem), !< total # of slip systems for hex
LATTICE_hex_Ntwin = 24_pInt, & ! sum(lattice_hex_NtwinSystem) !< total # of twin systems for hex
LATTICE_hex_NnonSchmid = 0_pInt, & !< # of non-Schmid contributions for hex
LATTICE_hex_Ntrans = 0_pInt, & !< total # of transformations for hex
LATTICE_hex_Ncleavage = 3_pInt !< total # of transformations for hex
integer(pInt), parameter, private :: &
LATTICE_hex_Nslip = 33_pInt, & !sum(lattice_hex_NslipSystem), & !< total # of slip systems for hex
LATTICE_hex_Ntwin = 24_pInt, & !sum(lattice_hex_NtwinSystem), & !< total # of twin systems for hex
LATTICE_hex_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for hex
LATTICE_hex_Ntrans = 0_pInt, & !sum(lattice_hex_NtransSystem), & !< total # of transformation systems for hex
LATTICE_hex_Ncleavage = 3_pInt !sum(lattice_hex_NcleavageSystem) !< total # of cleavage systems for hex
real(pReal), dimension(4+4,LATTICE_hex_Nslip), parameter, private :: &
LATTICE_hex_systemSlip = reshape(real([&
@ -842,28 +835,26 @@ module lattice
],pReal),[ 4_pInt + 4_pInt,LATTICE_hex_Ncleavage])
!--------------------------------------------------------------------------------------------------
! bct
! body centered tetragonal
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
LATTICE_bct_NslipSystem = int([2, 2, 2, 4, 2, 4, 2, 2, 4, 8, 4, 8, 8 ],pInt) !< # of slip systems per family for bct (Sn) Bieler J. Electr Mater 2009
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
LATTICE_bct_NtwinSystem = int([0, 0, 0, 0], pInt) !< total # of twin systems per family for bct-example
LATTICE_bct_NtwinSystem = int([0, 0, 0, 0], pInt) !< # of twin systems per family for bct
integer(pInt), dimension(LATTICE_maxNtransFamily), parameter, public :: &
LATTICE_bct_NtransSystem = int([0,0],pInt) !< total # of transformation systems per family for bct
LATTICE_bct_NtransSystem = int([0,0],pInt) !< # of transformation systems per family for bct
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_bct_NcleavageSystem = int([0,0,0],pInt) !< total # of cleavage systems per family for bct
LATTICE_bct_NcleavageSystem = int([0,0,0],pInt) !< # of cleavage systems per family for bct
integer(pInt), parameter , private :: &
LATTICE_bct_Nslip = 52_pInt, & ! sum(lattice_bct_NslipSystem), !< total # of slip systems for bct
LATTICE_bct_Ntwin = 0_pInt, & ! sum(lattice_bcc_NtwinSystem) !< total # of twin systems for bct
LATTICE_bct_NnonSchmid = 0_pInt, & !< # of non-Schmid contributions for bct
LATTICE_bct_Ntrans = 0_pInt, & !< total # of transformations for bct
LATTICE_bct_Ncleavage = 0_pInt !< total # of transformations for bct
integer(pInt), parameter, private :: &
LATTICE_bct_Nslip = 52_pInt, & !sum(lattice_bct_NslipSystem), & !< total # of slip systems for bct
LATTICE_bct_Ntwin = 0_pInt, & !sum(lattice_bct_NtwinSystem), & !< total # of twin systems for bct
LATTICE_bct_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for bct
LATTICE_bct_Ntrans = 0_pInt, & !sum(lattice_bct_NtransSystem), & !< total # of transformation systems for bct
LATTICE_bct_Ncleavage = 0_pInt !sum(lattice_bct_NcleavageSystem) !< total # of cleavage systems for bct
real(pReal), dimension(3+3,LATTICE_bct_Nslip), parameter, private :: &
LATTICE_bct_systemSlip = reshape(real([&
@ -1006,12 +997,25 @@ module lattice
!--------------------------------------------------------------------------------------------------
! isotropic
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
LATTICE_iso_NslipSystem = int([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],pInt) !< # of slip systems per family for iso
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
LATTICE_iso_NtwinSystem = int([0, 0, 0, 0], pInt) !< # of twin systems per family for iso
integer(pInt), dimension(LATTICE_maxNtransFamily), parameter, public :: &
LATTICE_iso_NtransSystem = int([0, 0],pInt) !< # of transformation systems per family for iso
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_iso_NcleavageSystem = int([3,0,0],pInt) !< total # of cleavage systems per family for isotropic
LATTICE_iso_NcleavageSystem = int([3,0,0],pInt) !< # of cleavage systems per family for iso
integer(pInt), parameter, private :: &
LATTICE_iso_Ncleavage = 3_pInt !< total # of cleavage systems for bcc
LATTICE_iso_Nslip = 0_pInt, & !sum(lattice_iso_NslipSystem), & !< total # of slip systems for iso
LATTICE_iso_Ntwin = 0_pInt, & !sum(lattice_iso_NtwinSystem), & !< total # of twin systems for iso
LATTICE_iso_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for iso
LATTICE_iso_Ntrans = 0_pInt, & !sum(lattice_iso_NtransSystem), & !< total # of transformation systems for iso
LATTICE_iso_Ncleavage = 3_pInt !sum(lattice_iso_NcleavageSystem) !< total # of cleavage systems for iso
real(pReal), dimension(3+3,LATTICE_iso_Ncleavage), parameter, private :: &
LATTICE_iso_systemCleavage = reshape(real([&
! Cleavage direction Plane normal
@ -1022,12 +1026,25 @@ module lattice
!--------------------------------------------------------------------------------------------------
! orthorhombic
integer(pInt), dimension(LATTICE_maxNslipFamily), parameter, public :: &
LATTICE_ortho_NslipSystem = int([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],pInt) !< # of slip systems per family for ortho
integer(pInt), dimension(LATTICE_maxNtwinFamily), parameter, public :: &
LATTICE_ortho_NtwinSystem = int([0, 0, 0, 0], pInt) !< # of twin systems per family for ortho
integer(pInt), dimension(LATTICE_maxNtransFamily), parameter, public :: &
LATTICE_ortho_NtransSystem = int([0, 0],pInt) !< # of transformation systems per family for ortho
integer(pInt), dimension(LATTICE_maxNcleavageFamily), parameter, public :: &
LATTICE_ortho_NcleavageSystem = int([1,1,1],pInt) !< total # of cleavage systems per family for orthotropic
LATTICE_ortho_NcleavageSystem = int([1,1,1],pInt) !< # of cleavage systems per family for ortho
integer(pInt), parameter, private :: &
LATTICE_ortho_Ncleavage = 3_pInt !< total # of cleavage systems for bcc
LATTICE_ortho_Nslip = 0_pInt, & !sum(lattice_ortho_NslipSystem), & !< total # of slip systems for ortho
LATTICE_ortho_Ntwin = 0_pInt, & !sum(lattice_ortho_NtwinSystem), & !< total # of twin systems for ortho
LATTICE_ortho_NnonSchmid = 0_pInt, & !< total # of non-Schmid contributions for ortho
LATTICE_ortho_Ntrans = 0_pInt, & !sum(lattice_ortho_NtransSystem), & !< total # of transformation systems for ortho
LATTICE_ortho_Ncleavage = 3_pInt !sum(lattice_ortho_NcleavageSystem) !< total # of cleavage systems for ortho
real(pReal), dimension(3+3,LATTICE_ortho_Ncleavage), parameter, private :: &
LATTICE_ortho_systemCleavage = reshape(real([&
! Cleavage direction Plane normal
@ -1036,16 +1053,36 @@ module lattice
1, 0, 0, 0, 0, 1 &
],pReal),[ 3_pInt + 3_pInt,LATTICE_ortho_Ncleavage])
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
integer(pInt), parameter, public :: &
LATTICE_maxNslip = 52_pInt, &
!LATTICE_maxNslip = maxval([LATTICE_fcc_Nslip,LATTICE_bcc_Nslip,LATTICE_hex_Nslip,\
! LATTICE_bct_Nslip,LATTICE_iso_Nslip,LATTICE_ortho_Nslip]), & !< max # of slip systems over lattice structures
LATTICE_maxNtwin = 24_pInt, &
!LATTICE_maxNtwin = maxval([LATTICE_fcc_Ntwin,LATTICE_bcc_Ntwin,LATTICE_hex_Ntwin,\
! LATTICE_bct_Ntwin,LATTICE_iso_Ntwin,LATTICE_ortho_Ntwin]), & !< max # of twin systems over lattice structures
LATTICE_maxNnonSchmid = 6_pInt, &
!LATTICE_maxNtwin = maxval([LATTICE_fcc_NnonSchmid,LATTICE_bcc_NnonSchmid,\
! LATTICE_hex_NnonSchmid,LATTICE_bct_NnonSchmid,\
! LATTICE_iso_NnonSchmid,LATTICE_ortho_NnonSchmid]), & !< max # of non-Schmid contributions over lattice structures
LATTICE_maxNtrans = 12_pInt, &
!LATTICE_maxNtrans = maxval([LATTICE_fcc_Ntrans,LATTICE_bcc_Ntrans,LATTICE_hex_Ntrans,\
! LATTICE_bct_Ntrans,LATTICE_iso_Ntrans,LATTICE_ortho_Ntrans]),&!< max # of transformation systems over lattice structures
LATTICE_maxNcleavage = 9_pInt, &
!LATTICE_maxNcleavage = maxval([LATTICE_fcc_Ncleavage,LATTICE_bcc_Ncleavage,\
! LATTICE_hex_Ncleavage,LATTICE_bct_Ncleavage,\
! LATTICE_iso_Ncleavage,LATTICE_ortho_Ncleavage]) !< max # of cleavage systems over lattice structures
LATTICE_maxNinteraction = 182_pInt !< max # of interaction types (in hardening matrix part)
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
lattice_C66, lattice_trans_C66
real(pReal), dimension(:,:,:,:,:), allocatable, public, protected :: &
real(pReal), dimension(:,:,:,:,:), allocatable, public, protected :: &
lattice_C3333, lattice_trans_C3333
real(pReal), dimension(:), allocatable, public, protected :: &
real(pReal), dimension(:), allocatable, public, protected :: &
lattice_mu, &
lattice_nu, &
lattice_trans_mu, &
lattice_trans_nu
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
real(pReal), dimension(:,:,:), allocatable, public, protected :: &
lattice_thermalConductivity33, &
lattice_thermalExpansion33, &
lattice_damageDiffusion33, &
@ -1054,7 +1091,7 @@ module lattice
lattice_porosityDiffusion33, &
lattice_hydrogenfluxDiffusion33, &
lattice_hydrogenfluxMobility33
real(pReal), dimension(:), allocatable, public, protected :: &
real(pReal), dimension(:), allocatable, public, protected :: &
lattice_damageMobility, &
lattice_porosityMobility, &
lattice_massDensity, &
@ -1253,7 +1290,7 @@ subroutine lattice_init
endif mainProcess
!--------------------------------------------------------------------------------------------------
! consistency checks
! consistency checks (required since ifort 15.0 does not support sum/maxval in parameter definition)
if (LATTICE_maxNslip /= maxval([LATTICE_fcc_Nslip,LATTICE_bcc_Nslip,LATTICE_hex_Nslip,LATTICE_bct_Nslip])) &
call IO_error(0_pInt,ext_msg = 'LATTICE_maxNslip')

View File

@ -24,7 +24,6 @@ module material
ELASTICITY_hooke_label = 'hooke', &
PLASTICITY_none_label = 'none', &
PLASTICITY_isotropic_label = 'isotropic', &
PLASTICITY_j2_label = 'j2', &
PLASTICITY_phenopowerlaw_label = 'phenopowerlaw', &
PLASTICITY_phenoplus_label = 'phenoplus', &
PLASTICITY_dislotwin_label = 'dislotwin', &
@ -74,7 +73,6 @@ module material
enumerator :: PLASTICITY_undefined_ID, &
PLASTICITY_none_ID, &
PLASTICITY_isotropic_ID, &
PLASTICITY_j2_ID, &
PLASTICITY_phenopowerlaw_ID, &
PLASTICITY_phenoplus_ID, &
PLASTICITY_dislotwin_ID, &
@ -313,7 +311,6 @@ module material
ELASTICITY_hooke_ID ,&
PLASTICITY_none_ID, &
PLASTICITY_isotropic_ID, &
PLASTICITY_J2_ID, &
PLASTICITY_phenopowerlaw_ID, &
PLASTICITY_phenoplus_ID, &
PLASTICITY_dislotwin_ID, &
@ -351,9 +348,6 @@ module material
HYDROGENFLUX_cahnhilliard_ID, &
HOMOGENIZATION_none_ID, &
HOMOGENIZATION_isostrain_ID, &
#ifdef HDF
material_NconstituentsPhase, &
#endif
HOMOGENIZATION_RGC_ID
private :: &
@ -982,8 +976,6 @@ subroutine material_parsePhase(fileUnit,myPart)
phase_plasticity(section) = PLASTICITY_NONE_ID
case (PLASTICITY_ISOTROPIC_label)
phase_plasticity(section) = PLASTICITY_ISOTROPIC_ID
case (PLASTICITY_J2_label)
phase_plasticity(section) = PLASTICITY_J2_ID
case (PLASTICITY_PHENOPOWERLAW_label)
phase_plasticity(section) = PLASTICITY_PHENOPOWERLAW_ID
case (PLASTICITY_PHENOPLUS_label)
@ -1280,7 +1272,7 @@ subroutine material_populateGrains
integer(pInt) :: t,e,i,g,j,m,c,r,homog,micro,sgn,hme, myDebug, &
phaseID,textureID,dGrains,myNgrains,myNorientations,myNconstituents, &
grain,constituentGrain,ipGrain,symExtension, ip
real(pReal) :: extreme,rnd
real(pReal) :: deviation,extreme,rnd
integer(pInt), dimension (:,:), allocatable :: Nelems ! counts number of elements in homog, micro array
type(p_intvec), dimension (:,:), allocatable :: elemsOfHomogMicro ! lists element number in homog, micro array
@ -1407,8 +1399,11 @@ subroutine material_populateGrains
extreme = 0.0_pReal
t = 0_pInt
do i = 1_pInt,myNconstituents ! find largest deviator
if (real(sgn,pReal)*log(NgrainsOfConstituent(i)/myNgrains/microstructure_fraction(i,micro)) > extreme) then
extreme = real(sgn,pReal)*log(NgrainsOfConstituent(i)/myNgrains/microstructure_fraction(i,micro))
deviation = real(sgn,pReal)*log( microstructure_fraction(i,micro) / &
!-------------------------------- &
(real(NgrainsOfConstituent(i),pReal)/real(myNgrains,pReal) ) )
if (deviation > extreme) then
extreme = deviation
t = i
endif
enddo
@ -1600,14 +1595,4 @@ subroutine material_populateGrains
end subroutine material_populateGrains
#ifdef HDF
integer(pInt) pure function material_NconstituentsPhase(matID)
implicit none
integer(pInt), intent(in) :: matID
material_NconstituentsPhase = count(microstructure_phase == matID)
end function
#endif
end module material

View File

@ -839,6 +839,9 @@ subroutine math_invert(myDim,A, InvA, error)
real(pReal), dimension(myDim,myDim), intent(out) :: invA
logical, intent(out) :: error
external :: &
dgetrf, &
dgetri
invA = A
call dgetrf(myDim,myDim,invA,myDim,ipiv,ierr)
@ -1645,14 +1648,14 @@ pure function math_qToAxisAngle(Q)
real(pReal) :: halfAngle, sinHalfAngle
real(pReal), dimension(4) :: math_qToAxisAngle
halfAngle = acos(max(-1.0_pReal, min(1.0_pReal, Q(1)))) ! limit to [-1,1] --> 0 to 180 deg
halfAngle = acos(math_limit(Q(1),-1.0_pReal,1.0_pReal))
sinHalfAngle = sin(halfAngle)
if (sinHalfAngle <= 1.0e-4_pReal) then ! very small rotation angle?
smallRotation: if (sinHalfAngle <= 1.0e-4_pReal) then
math_qToAxisAngle = 0.0_pReal
else
else smallRotation
math_qToAxisAngle= [ Q(2:4)/sinHalfAngle, halfAngle*2.0_pReal]
endif
endif smallRotation
end function math_qToAxisAngle

View File

@ -4525,17 +4525,9 @@ subroutine mesh_write_cellGeom
VTK_geo, &
VTK_con, &
VTK_end
#ifdef HDF
use IO, only: &
HDF5_mappingCells
#endif
implicit none
integer(I4P), dimension(1:mesh_Ncells) :: celltype
integer(I4P), dimension(mesh_Ncells*(1_pInt+FE_maxNcellnodesPerCell)) :: cellconnection
#ifdef HDF
integer(pInt), dimension(mesh_Ncells*FE_maxNcellnodesPerCell) :: cellconnectionHDF5
integer(pInt) :: j2=0_pInt
#endif
integer(I4P):: error
integer(I4P):: g, c, e, CellID, i, j
@ -4550,16 +4542,8 @@ subroutine mesh_write_cellGeom
cellconnection(j+1_pInt:j+FE_NcellnodesPerCell(c)+1_pInt) &
= [FE_NcellnodesPerCell(c),mesh_cell(1:FE_NcellnodesPerCell(c),i,e)-1_pInt] ! number of cellnodes per cell & list of global cellnode IDs belnging to this cell (cellnode counting starts at 0)
j = j + FE_NcellnodesPerCell(c) + 1_pInt
#ifdef HDF
cellconnectionHDF5(j2+1_pInt:j2+FE_NcellnodesPerCell(c)) &
= mesh_cell(1:FE_NcellnodesPerCell(c),i,e)-1_pInt
j2=j2 + FE_ncellnodesPerCell(c)
#endif
enddo
enddo
#ifdef HDF
call HDF5_mappingCells(cellconnectionHDF5(1:j2))
#endif
error=VTK_ini(output_format = 'ASCII', &
title=trim(getSolverJobName())//' cell mesh', &

File diff suppressed because it is too large Load Diff

View File

@ -7,14 +7,10 @@
!! untextured polycrystal
!--------------------------------------------------------------------------------------------------
module plastic_isotropic
#ifdef HDF
use hdf5, only: &
HID_T
#endif
use prec, only: &
pReal,&
pInt
pInt, &
DAMASK_NaN
implicit none
private
@ -40,22 +36,22 @@ module plastic_isotropic
integer(kind(undefined_ID)), allocatable, dimension(:) :: &
outputID
real(pReal) :: &
fTaylor, &
tau0, &
gdot0, &
n, &
h0, &
h0_slopeLnRate, &
tausat, &
a, &
aTolFlowstress, &
aTolShear , &
tausat_SinhFitA, &
tausat_SinhFitB, &
tausat_SinhFitC, &
tausat_SinhFitD
fTaylor = DAMASK_NaN, &
tau0 = DAMASK_NaN, &
gdot0 = DAMASK_NaN, &
n = DAMASK_NaN, &
h0 = DAMASK_NaN, &
h0_slopeLnRate = 0.0_pReal, &
tausat = DAMASK_NaN, &
a = DAMASK_NaN, &
aTolFlowstress = 1.0_pReal, &
aTolShear = 1.0e-6_pReal, &
tausat_SinhFitA= 0.0_pReal, &
tausat_SinhFitB= 0.0_pReal, &
tausat_SinhFitC= 0.0_pReal, &
tausat_SinhFitD= 0.0_pReal
logical :: &
dilatation
dilatation = .false.
end type
type(tParameters), dimension(:), allocatable, private :: param !< containers of constitutive parameters (len Ninstance)
@ -143,9 +139,10 @@ subroutine plastic_isotropic_init(fileUnit)
sizeDeltaState
character(len=65536) :: &
tag = '', &
outputtag = '', &
line = '', &
extmsg = ''
character(len=64) :: &
outputtag = ''
integer(pInt) :: NipcMyPhase
mainProcess: if (worldrank == 0) then
@ -385,8 +382,7 @@ subroutine plastic_isotropic_LpAndItsTangent(Lp,dLp_dTstar99,Tstar_v,ipc,ip,el)
math_mul33xx33, &
math_transpose33
use material, only: &
phaseAt, phasememberAt, &
plasticState, &
phasememberAt, &
material_phase, &
phase_plasticityInstance
@ -416,7 +412,7 @@ subroutine plastic_isotropic_LpAndItsTangent(Lp,dLp_dTstar99,Tstar_v,ipc,ip,el)
k, l, m, n
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
Tstar_dev_33 = math_deviatoric33(math_Mandel6to33(Tstar_v)) ! deviatoric part of 2nd Piola-Kirchhoff stress
squarenorm_Tstar_dev = math_mul33xx33(Tstar_dev_33,Tstar_dev_33)
@ -466,15 +462,15 @@ subroutine plastic_isotropic_LiAndItsTangent(Li,dLi_dTstar_3333,Tstar_v,ipc,ip,e
math_spherical33, &
math_mul33xx33
use material, only: &
phaseAt, phasememberAt, &
plasticState, &
phasememberAt, &
material_phase, &
phase_plasticityInstance
implicit none
real(pReal), dimension(3,3), intent(out) :: &
Li !< plastic velocity gradient
real(pReal), dimension(3,3,3,3), intent(out) :: &
dLi_dTstar_3333 !< derivative of Li with respect to Tstar as 4th order tensor
real(pReal), dimension(6), intent(in) :: &
Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation
integer(pInt), intent(in) :: &
@ -484,9 +480,7 @@ subroutine plastic_isotropic_LiAndItsTangent(Li,dLi_dTstar_3333,Tstar_v,ipc,ip,e
real(pReal), dimension(3,3) :: &
Tstar_sph_33 !< sphiatoric part of the 2nd Piola Kirchhoff stress tensor as 2nd order tensor
real(pReal), dimension(3,3,3,3), intent(out) :: &
dLi_dTstar_3333 !< derivative of Li with respect to Tstar as 4th order tensor
real(pReal) :: &
real(pReal) :: &
gamma_dot, & !< strainrate
norm_Tstar_sph, & !< euclidean norm of Tstar_sph
squarenorm_Tstar_sph !< square of the euclidean norm of Tstar_sph
@ -495,34 +489,32 @@ subroutine plastic_isotropic_LiAndItsTangent(Li,dLi_dTstar_3333,Tstar_v,ipc,ip,e
k, l, m, n
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
Tstar_sph_33 = math_spherical33(math_Mandel6to33(Tstar_v)) ! spherical part of 2nd Piola-Kirchhoff stress
squarenorm_Tstar_sph = math_mul33xx33(Tstar_sph_33,Tstar_sph_33)
norm_Tstar_sph = sqrt(squarenorm_Tstar_sph)
if (param(instance)%dilatation) then
if (norm_Tstar_sph <= 0.0_pReal) then ! Tstar == 0 --> both Li and dLi_dTstar are zero
Li = 0.0_pReal
dLi_dTstar_3333 = 0.0_pReal
else
gamma_dot = param(instance)%gdot0 &
* (sqrt(1.5_pReal) * norm_Tstar_sph / param(instance)%fTaylor / state(instance)%flowstress(of) ) &
**param(instance)%n
if (param(instance)%dilatation .and. norm_Tstar_sph > 0.0_pReal) then ! Tstar == 0 or J2 plascitiy --> both Li and dLi_dTstar are zero
gamma_dot = param(instance)%gdot0 &
* (sqrt(1.5_pReal) * norm_Tstar_sph / param(instance)%fTaylor / state(instance)%flowstress(of) ) &
**param(instance)%n
Li = Tstar_sph_33/norm_Tstar_sph * gamma_dot/param(instance)%fTaylor
Li = Tstar_sph_33/norm_Tstar_sph * gamma_dot/param(instance)%fTaylor
!--------------------------------------------------------------------------------------------------
! Calculation of the tangent of Li
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) &
dLi_dTstar_3333(k,l,m,n) = (param(instance)%n-1.0_pReal) * &
Tstar_sph_33(k,l)*Tstar_sph_33(m,n) / squarenorm_Tstar_sph
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt) &
dLi_dTstar_3333(k,l,k,l) = dLi_dTstar_3333(k,l,k,l) + 1.0_pReal
!--------------------------------------------------------------------------------------------------
! Calculation of the tangent of Li
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) &
dLi_dTstar_3333(k,l,m,n) = (param(instance)%n-1.0_pReal) * &
Tstar_sph_33(k,l)*Tstar_sph_33(m,n) / squarenorm_Tstar_sph
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt) &
dLi_dTstar_3333(k,l,k,l) = dLi_dTstar_3333(k,l,k,l) + 1.0_pReal
dLi_dTstar_3333 = gamma_dot / param(instance)%fTaylor * &
dLi_dTstar_3333 / norm_Tstar_sph
endif
dLi_dTstar_3333 = gamma_dot / param(instance)%fTaylor * &
dLi_dTstar_3333 / norm_Tstar_sph
else
Li = 0.0_pReal
dLi_dTstar_3333 = 0.0_pReal
endif
end subroutine plastic_isotropic_LiAndItsTangent
@ -535,8 +527,7 @@ subroutine plastic_isotropic_dotState(Tstar_v,ipc,ip,el)
use math, only: &
math_mul6x6
use material, only: &
phaseAt, phasememberAt, &
plasticState, &
phasememberAt, &
material_phase, &
phase_plasticityInstance
@ -559,7 +550,7 @@ subroutine plastic_isotropic_dotState(Tstar_v,ipc,ip,el)
of !< shortcut notation for offset position in state array
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
!--------------------------------------------------------------------------------------------------
! norm of (deviatoric) 2nd Piola-Kirchhoff stress
@ -615,8 +606,7 @@ function plastic_isotropic_postResults(Tstar_v,ipc,ip,el)
math_mul6x6
use material, only: &
material_phase, &
plasticState, &
phaseAt, phasememberAt, &
phasememberAt, &
phase_plasticityInstance
implicit none
@ -640,7 +630,7 @@ function plastic_isotropic_postResults(Tstar_v,ipc,ip,el)
o
of = phasememberAt(ipc,ip,el) ! phasememberAt should be tackled by material and be renamed to material_phasemember
instance = phase_plasticityInstance(phaseAt(ipc,ip,el)) ! "phaseAt" equivalent to "material_phase" !!
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
!--------------------------------------------------------------------------------------------------
! norm of (deviatoric) 2nd Piola-Kirchhoff stress

View File

@ -1,577 +0,0 @@
!--------------------------------------------------------------------------------------------------
!> @author Franz Roters, Max-Planck-Institut für Eisenforschung GmbH
!> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH
!> @brief material subroutine for isotropic (J2) plasticity
!> @details Isotropic (J2) Plasticity which resembles the phenopowerlaw plasticity without
!! resolving the stress on the slip systems. Will give the response of phenopowerlaw for an
!! untextured polycrystal
!--------------------------------------------------------------------------------------------------
module plastic_j2
#ifdef HDF
use hdf5, only: &
HID_T
#endif
use prec, only: &
pReal,&
pInt
implicit none
private
integer(pInt), dimension(:), allocatable, public, protected :: &
plastic_j2_sizePostResults !< cumulative size of post results
integer(pInt), dimension(:,:), allocatable, target, public :: &
plastic_j2_sizePostResult !< size of each post result output
character(len=64), dimension(:,:), allocatable, target, public :: &
plastic_j2_output !< name of each post result output
integer(pInt), dimension(:), allocatable, target, public :: &
plastic_j2_Noutput !< number of outputs per instance
real(pReal), dimension(:), allocatable, private :: &
plastic_j2_fTaylor, & !< Taylor factor
plastic_j2_tau0, & !< initial plastic stress
plastic_j2_gdot0, & !< reference velocity
plastic_j2_n, & !< Visco-plastic parameter
!--------------------------------------------------------------------------------------------------
! h0 as function of h0 = A + B log (gammadot)
plastic_j2_h0, &
plastic_j2_h0_slopeLnRate, &
plastic_j2_tausat, & !< final plastic stress
plastic_j2_a, &
plastic_j2_aTolResistance, &
plastic_j2_aTolShear, &
!--------------------------------------------------------------------------------------------------
! tausat += (asinh((gammadot / SinhFitA)**(1 / SinhFitD)))**(1 / SinhFitC) / (SinhFitB * (gammadot / gammadot0)**(1/n))
plastic_j2_tausat_SinhFitA, & !< fitting parameter for normalized strain rate vs. stress function
plastic_j2_tausat_SinhFitB, & !< fitting parameter for normalized strain rate vs. stress function
plastic_j2_tausat_SinhFitC, & !< fitting parameter for normalized strain rate vs. stress function
plastic_j2_tausat_SinhFitD !< fitting parameter for normalized strain rate vs. stress function
enum, bind(c)
enumerator :: undefined_ID, &
flowstress_ID, &
strainrate_ID
end enum
integer(kind(undefined_ID)), dimension(:,:), allocatable, private :: &
plastic_j2_outputID !< ID of each post result output
#ifdef HDF
type plastic_j2_tOutput
real(pReal), dimension(:), allocatable, private :: &
flowstress, &
strainrate
logical :: flowstressActive = .false., strainrateActive = .false. ! if we can write the output block wise, this is not needed anymore because we can do an if(allocated(xxx))
end type plastic_j2_tOutput
type(plastic_j2_tOutput), allocatable, dimension(:) :: plastic_j2_Output2
integer(HID_T), allocatable, dimension(:) :: outID
#endif
public :: &
plastic_j2_init, &
plastic_j2_LpAndItsTangent, &
plastic_j2_dotState, &
plastic_j2_postResults
contains
!--------------------------------------------------------------------------------------------------
!> @brief module initialization
!> @details reads in material parameters, allocates arrays, and does sanity checks
!--------------------------------------------------------------------------------------------------
subroutine plastic_j2_init(fileUnit)
use, intrinsic :: iso_fortran_env ! to get compiler_version and compiler_options (at least for gfortran 4.6 at the moment)
#ifdef HDF
use hdf5
#endif
use debug, only: &
debug_level, &
debug_constitutive, &
debug_levelBasic
use numerics, only: &
analyticJaco, &
worldrank, &
numerics_integrator
use math, only: &
math_Mandel3333to66, &
math_Voigt66to3333
use IO, only: &
IO_read, &
IO_lc, &
IO_getTag, &
IO_isBlank, &
IO_stringPos, &
IO_stringValue, &
IO_floatValue, &
IO_error, &
IO_timeStamp, &
#ifdef HDF
tempResults, &
HDF5_addGroup, &
HDF5_addScalarDataset,&
#endif
IO_EOF
use material, only: &
phase_plasticity, &
phase_plasticityInstance, &
phase_Noutput, &
PLASTICITY_J2_label, &
PLASTICITY_J2_ID, &
material_phase, &
plasticState, &
MATERIAL_partPhase
use lattice
implicit none
integer(pInt), intent(in) :: fileUnit
integer(pInt), allocatable, dimension(:) :: chunkPos
integer(pInt) :: &
o, &
phase, &
maxNinstance, &
instance, &
mySize, &
sizeDotState, &
sizeState, &
sizeDeltaState
character(len=65536) :: &
tag = '', &
line = ''
integer(pInt) :: NofMyPhase
#ifdef HDF
character(len=5) :: &
str1
integer(HID_T) :: ID,ID2,ID4
#endif
mainProcess: if (worldrank == 0) then
write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_J2_label//' init -+>>>'
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
#include "compilation_info.f90"
endif mainProcess
maxNinstance = int(count(phase_plasticity == PLASTICITY_J2_ID),pInt)
if (maxNinstance == 0_pInt) return
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0_pInt) &
write(6,'(a16,1x,i5,/)') '# instances:',maxNinstance
#ifdef HDF
allocate(plastic_j2_Output2(maxNinstance))
allocate(outID(maxNinstance))
#endif
allocate(plastic_j2_sizePostResults(maxNinstance), source=0_pInt)
allocate(plastic_j2_sizePostResult(maxval(phase_Noutput), maxNinstance),source=0_pInt)
allocate(plastic_j2_output(maxval(phase_Noutput), maxNinstance))
plastic_j2_output = ''
allocate(plastic_j2_outputID(maxval(phase_Noutput),maxNinstance), source=undefined_ID)
allocate(plastic_j2_Noutput(maxNinstance), source=0_pInt)
allocate(plastic_j2_fTaylor(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_tau0(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_gdot0(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_n(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_h0(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_h0_slopeLnRate(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_tausat(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_a(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_aTolResistance(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_aTolShear (maxNinstance), source=0.0_pReal)
allocate(plastic_j2_tausat_SinhFitA(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_tausat_SinhFitB(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_tausat_SinhFitC(maxNinstance), source=0.0_pReal)
allocate(plastic_j2_tausat_SinhFitD(maxNinstance), source=0.0_pReal)
rewind(fileUnit)
phase = 0_pInt
do while (trim(line) /= IO_EOF .and. IO_lc(IO_getTag(line,'<','>')) /= material_partPhase) ! wind forward to <phase>
line = IO_read(fileUnit)
enddo
parsingFile: do while (trim(line) /= IO_EOF) ! read through sections of phase part
line = IO_read(fileUnit)
if (IO_isBlank(line)) cycle ! skip empty lines
if (IO_getTag(line,'<','>') /= '') then ! stop at next part
line = IO_read(fileUnit, .true.) ! reset IO_read
exit
endif
if (IO_getTag(line,'[',']') /= '') then ! next section
phase = phase + 1_pInt ! advance section counter
if (phase_plasticity(phase) == PLASTICITY_J2_ID) then
instance = phase_plasticityInstance(phase)
#ifdef HDF
outID(instance)=HDF5_addGroup(str1,tempResults)
#endif
endif
cycle ! skip to next line
endif
if (phase > 0_pInt ) then; if (phase_plasticity(phase) == PLASTICITY_J2_ID) then ! one of my phases. Do not short-circuit here (.and. between if-statements), it's not safe in Fortran
instance = phase_plasticityInstance(phase) ! which instance of my plasticity is present phase
chunkPos = IO_stringPos(line)
tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key
select case(tag)
case ('(output)')
select case(IO_lc(IO_stringValue(line,chunkPos,2_pInt)))
case ('flowstress')
plastic_j2_Noutput(instance) = plastic_j2_Noutput(instance) + 1_pInt
plastic_j2_outputID(plastic_j2_Noutput(instance),instance) = flowstress_ID
plastic_j2_output(plastic_j2_Noutput(instance),instance) = &
IO_lc(IO_stringValue(line,chunkPos,2_pInt))
#ifdef HDF
call HDF5_addScalarDataset(outID(instance),myConstituents,'flowstress','MPa')
allocate(plastic_j2_Output2(instance)%flowstress(myConstituents))
plastic_j2_Output2(instance)%flowstressActive = .true.
#endif
case ('strainrate')
plastic_j2_Noutput(instance) = plastic_j2_Noutput(instance) + 1_pInt
plastic_j2_outputID(plastic_j2_Noutput(instance),instance) = strainrate_ID
plastic_j2_output(plastic_j2_Noutput(instance),instance) = &
IO_lc(IO_stringValue(line,chunkPos,2_pInt))
#ifdef HDF
call HDF5_addScalarDataset(outID(instance),myConstituents,'strainrate','1/s')
allocate(plastic_j2_Output2(instance)%strainrate(myConstituents))
plastic_j2_Output2(instance)%strainrateActive = .true.
#endif
case default
end select
case ('tau0')
plastic_j2_tau0(instance) = IO_floatValue(line,chunkPos,2_pInt)
if (plastic_j2_tau0(instance) < 0.0_pReal) &
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
case ('gdot0')
plastic_j2_gdot0(instance) = IO_floatValue(line,chunkPos,2_pInt)
if (plastic_j2_gdot0(instance) <= 0.0_pReal) &
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
case ('n')
plastic_j2_n(instance) = IO_floatValue(line,chunkPos,2_pInt)
if (plastic_j2_n(instance) <= 0.0_pReal) &
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
case ('h0')
plastic_j2_h0(instance) = IO_floatValue(line,chunkPos,2_pInt)
case ('h0_slope','slopelnrate')
plastic_j2_h0_slopeLnRate(instance) = IO_floatValue(line,chunkPos,2_pInt)
case ('tausat')
plastic_j2_tausat(instance) = IO_floatValue(line,chunkPos,2_pInt)
if (plastic_j2_tausat(instance) <= 0.0_pReal) &
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
case ('tausat_sinhfita')
plastic_j2_tausat_SinhFitA(instance) = IO_floatValue(line,chunkPos,2_pInt)
case ('tausat_sinhfitb')
plastic_j2_tausat_SinhFitB(instance) = IO_floatValue(line,chunkPos,2_pInt)
case ('tausat_sinhfitc')
plastic_j2_tausat_SinhFitC(instance) = IO_floatValue(line,chunkPos,2_pInt)
case ('tausat_sinhfitd')
plastic_j2_tausat_SinhFitD(instance) = IO_floatValue(line,chunkPos,2_pInt)
case ('a', 'w0')
plastic_j2_a(instance) = IO_floatValue(line,chunkPos,2_pInt)
if (plastic_j2_a(instance) <= 0.0_pReal) &
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
case ('taylorfactor')
plastic_j2_fTaylor(instance) = IO_floatValue(line,chunkPos,2_pInt)
if (plastic_j2_fTaylor(instance) <= 0.0_pReal) &
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
case ('atol_resistance')
plastic_j2_aTolResistance(instance) = IO_floatValue(line,chunkPos,2_pInt)
if (plastic_j2_aTolResistance(instance) <= 0.0_pReal) &
call IO_error(211_pInt,ext_msg=trim(tag)//' ('//PLASTICITY_J2_label//')')
case ('atol_shear')
plastic_j2_aTolShear(instance) = IO_floatValue(line,chunkPos,2_pInt)
case default
end select
endif; endif
enddo parsingFile
initializeInstances: do phase = 1_pInt, size(phase_plasticity)
myPhase: if (phase_plasticity(phase) == PLASTICITY_j2_ID) then
NofMyPhase=count(material_phase==phase)
instance = phase_plasticityInstance(phase)
!--------------------------------------------------------------------------------------------------
! sanity checks
if (plastic_j2_aTolShear(instance) <= 0.0_pReal) &
plastic_j2_aTolShear(instance) = 1.0e-6_pReal ! default absolute tolerance 1e-6
!--------------------------------------------------------------------------------------------------
! Determine size of postResults array
outputsLoop: do o = 1_pInt,plastic_j2_Noutput(instance)
select case(plastic_j2_outputID(o,instance))
case(flowstress_ID,strainrate_ID)
mySize = 1_pInt
case default
end select
outputFound: if (mySize > 0_pInt) then
plastic_j2_sizePostResult(o,instance) = mySize
plastic_j2_sizePostResults(instance) = &
plastic_j2_sizePostResults(instance) + mySize
endif outputFound
enddo outputsLoop
!--------------------------------------------------------------------------------------------------
! allocate state arrays
sizeState = 2_pInt
sizeDotState = sizeState
sizeDeltaState = 0_pInt
plasticState(phase)%sizeState = sizeState
plasticState(phase)%sizeDotState = sizeDotState
plasticState(phase)%sizeDeltaState = sizeDeltaState
plasticState(phase)%sizePostResults = plastic_j2_sizePostResults(instance)
plasticState(phase)%nSlip = 1
plasticState(phase)%nTwin = 0
plasticState(phase)%nTrans= 0
allocate(plasticState(phase)%aTolState ( sizeState))
plasticState(phase)%aTolState(1) = plastic_j2_aTolResistance(instance)
plasticState(phase)%aTolState(2) = plastic_j2_aTolShear(instance)
allocate(plasticState(phase)%state0 ( sizeState,NofMyPhase))
plasticState(phase)%state0(1,1:NofMyPhase) = plastic_j2_tau0(instance)
plasticState(phase)%state0(2,1:NofMyPhase) = 0.0_pReal
allocate(plasticState(phase)%partionedState0 ( sizeState,NofMyPhase),source=0.0_pReal)
allocate(plasticState(phase)%subState0 ( sizeState,NofMyPhase),source=0.0_pReal)
allocate(plasticState(phase)%state ( sizeState,NofMyPhase),source=0.0_pReal)
allocate(plasticState(phase)%dotState (sizeDotState,NofMyPhase),source=0.0_pReal)
allocate(plasticState(phase)%deltaState (sizeDeltaState,NofMyPhase),source=0.0_pReal)
if (.not. analyticJaco) then
allocate(plasticState(phase)%state_backup ( sizeState,NofMyPhase),source=0.0_pReal)
allocate(plasticState(phase)%dotState_backup (sizeDotState,NofMyPhase),source=0.0_pReal)
endif
if (any(numerics_integrator == 1_pInt)) then
allocate(plasticState(phase)%previousDotState (sizeDotState,NofMyPhase),source=0.0_pReal)
allocate(plasticState(phase)%previousDotState2(sizeDotState,NofMyPhase),source=0.0_pReal)
endif
if (any(numerics_integrator == 4_pInt)) &
allocate(plasticState(phase)%RK4dotState (sizeDotState,NofMyPhase),source=0.0_pReal)
if (any(numerics_integrator == 5_pInt)) &
allocate(plasticState(phase)%RKCK45dotState (6,sizeDotState,NofMyPhase),source=0.0_pReal)
plasticState(phase)%slipRate => plasticState(phase)%dotState(2:2,1:NofMyPhase)
plasticState(phase)%accumulatedSlip => plasticState(phase)%state (2:2,1:NofMyPhase)
endif myPhase
enddo initializeInstances
end subroutine plastic_j2_init
!--------------------------------------------------------------------------------------------------
!> @brief calculates plastic velocity gradient and its tangent
!--------------------------------------------------------------------------------------------------
subroutine plastic_j2_LpAndItsTangent(Lp,dLp_dTstar99,Tstar_v,ipc,ip,el)
use math, only: &
math_mul6x6, &
math_Mandel6to33, &
math_Plain3333to99, &
math_deviatoric33, &
math_mul33xx33
use material, only: &
phaseAt, phasememberAt, &
plasticState, &
material_phase, &
phase_plasticityInstance
implicit none
real(pReal), dimension(3,3), intent(out) :: &
Lp !< plastic velocity gradient
real(pReal), dimension(9,9), intent(out) :: &
dLp_dTstar99 !< derivative of Lp with respect to 2nd Piola Kirchhoff stress
real(pReal), dimension(6), intent(in) :: &
Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation
integer(pInt), intent(in) :: &
ipc, & !< component-ID of integration point
ip, & !< integration point
el !< element
real(pReal), dimension(3,3) :: &
Tstar_dev_33 !< deviatoric part of the 2nd Piola Kirchhoff stress tensor as 2nd order tensor
real(pReal), dimension(3,3,3,3) :: &
dLp_dTstar_3333 !< derivative of Lp with respect to Tstar as 4th order tensor
real(pReal) :: &
gamma_dot, & !< strainrate
norm_Tstar_dev, & !< euclidean norm of Tstar_dev
squarenorm_Tstar_dev !< square of the euclidean norm of Tstar_dev
integer(pInt) :: &
instance, &
k, l, m, n
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
Tstar_dev_33 = math_deviatoric33(math_Mandel6to33(Tstar_v)) ! deviatoric part of 2nd Piola-Kirchhoff stress
squarenorm_Tstar_dev = math_mul33xx33(Tstar_dev_33,Tstar_dev_33)
norm_Tstar_dev = sqrt(squarenorm_Tstar_dev)
if (norm_Tstar_dev <= 0.0_pReal) then ! Tstar == 0 --> both Lp and dLp_dTstar are zero
Lp = 0.0_pReal
dLp_dTstar99 = 0.0_pReal
else
gamma_dot = plastic_j2_gdot0(instance) &
* (sqrt(1.5_pReal) * norm_Tstar_dev / (plastic_j2_fTaylor(instance) * &
plasticState(phaseAt(ipc,ip,el))%state(1,phasememberAt(ipc,ip,el)))) &
**plastic_j2_n(instance)
Lp = Tstar_dev_33/norm_Tstar_dev * gamma_dot/plastic_j2_fTaylor(instance)
!--------------------------------------------------------------------------------------------------
! Calculation of the tangent of Lp
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt,m=1_pInt:3_pInt,n=1_pInt:3_pInt) &
dLp_dTstar_3333(k,l,m,n) = (plastic_j2_n(instance)-1.0_pReal) * &
Tstar_dev_33(k,l)*Tstar_dev_33(m,n) / squarenorm_Tstar_dev
forall (k=1_pInt:3_pInt,l=1_pInt:3_pInt) &
dLp_dTstar_3333(k,l,k,l) = dLp_dTstar_3333(k,l,k,l) + 1.0_pReal
forall (k=1_pInt:3_pInt,m=1_pInt:3_pInt) &
dLp_dTstar_3333(k,k,m,m) = dLp_dTstar_3333(k,k,m,m) - 1.0_pReal/3.0_pReal
dLp_dTstar99 = math_Plain3333to99(gamma_dot / plastic_j2_fTaylor(instance) * &
dLp_dTstar_3333 / norm_Tstar_dev)
end if
end subroutine plastic_j2_LpAndItsTangent
!--------------------------------------------------------------------------------------------------
!> @brief calculates the rate of change of microstructure
!--------------------------------------------------------------------------------------------------
subroutine plastic_j2_dotState(Tstar_v,ipc,ip,el)
use math, only: &
math_mul6x6
use material, only: &
phaseAt, phasememberAt, &
plasticState, &
material_phase, &
phase_plasticityInstance
implicit none
real(pReal), dimension(6), intent(in):: &
Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation
integer(pInt), intent(in) :: &
ipc, & !< component-ID of integration point
ip, & !< integration point
el !< element
real(pReal), dimension(6) :: &
Tstar_dev_v !< deviatoric part of the 2nd Piola Kirchhoff stress tensor in Mandel notation
real(pReal) :: &
gamma_dot, & !< strainrate
hardening, & !< hardening coefficient
saturation, & !< saturation resistance
norm_Tstar_dev !< euclidean norm of Tstar_dev
integer(pInt) :: &
instance, & !< instance of my instance (unique number of my constitutive model)
of, & !< shortcut notation for offset position in state array
ph !< shortcut notation for phase ID (unique number of all phases, regardless of constitutive model)
of = phasememberAt(ipc,ip,el)
ph = phaseAt(ipc,ip,el)
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
!--------------------------------------------------------------------------------------------------
! norm of deviatoric part of 2nd Piola-Kirchhoff stress
Tstar_dev_v(1:3) = Tstar_v(1:3) - sum(Tstar_v(1:3))/3.0_pReal
Tstar_dev_v(4:6) = Tstar_v(4:6)
norm_Tstar_dev = sqrt(math_mul6x6(Tstar_dev_v,Tstar_dev_v))
!--------------------------------------------------------------------------------------------------
! strain rate
gamma_dot = plastic_j2_gdot0(instance) * ( sqrt(1.5_pReal) * norm_Tstar_dev &
/ &!-----------------------------------------------------------------------------------
(plastic_j2_fTaylor(instance)*plasticState(ph)%state(1,of)) )**plastic_j2_n(instance)
!--------------------------------------------------------------------------------------------------
! hardening coefficient
if (abs(gamma_dot) > 1e-12_pReal) then
if (abs(plastic_j2_tausat_SinhFitA(instance)) <= tiny(0.0_pReal)) then
saturation = plastic_j2_tausat(instance)
else
saturation = ( plastic_j2_tausat(instance) &
+ ( log( ( gamma_dot / plastic_j2_tausat_SinhFitA(instance)&
)**(1.0_pReal / plastic_j2_tausat_SinhFitD(instance))&
+ sqrt( ( gamma_dot / plastic_j2_tausat_SinhFitA(instance) &
)**(2.0_pReal / plastic_j2_tausat_SinhFitD(instance)) &
+ 1.0_pReal ) &
) & ! asinh(K) = ln(K + sqrt(K^2 +1))
)**(1.0_pReal / plastic_j2_tausat_SinhFitC(instance)) &
/ ( plastic_j2_tausat_SinhFitB(instance) &
* (gamma_dot / plastic_j2_gdot0(instance))**(1.0_pReal / plastic_j2_n(instance)) &
) &
)
endif
hardening = ( plastic_j2_h0(instance) + plastic_j2_h0_slopeLnRate(instance) * log(gamma_dot) ) &
* abs( 1.0_pReal - plasticState(ph)%state(1,of)/saturation )**plastic_j2_a(instance) &
* sign(1.0_pReal, 1.0_pReal - plasticState(ph)%state(1,of)/saturation)
else
hardening = 0.0_pReal
endif
plasticState(ph)%dotState(1,of) = hardening * gamma_dot
plasticState(ph)%dotState(2,of) = gamma_dot
end subroutine plastic_j2_dotState
!--------------------------------------------------------------------------------------------------
!> @brief return array of constitutive results
!--------------------------------------------------------------------------------------------------
function plastic_j2_postResults(Tstar_v,ipc,ip,el)
use math, only: &
math_mul6x6
use material, only: &
material_phase, &
plasticState, &
phaseAt, phasememberAt, &
phase_plasticityInstance
implicit none
real(pReal), dimension(6), intent(in) :: &
Tstar_v !< 2nd Piola Kirchhoff stress tensor in Mandel notation
integer(pInt), intent(in) :: &
ipc, & !< component-ID of integration point
ip, & !< integration point
el !< element
real(pReal), dimension(plastic_j2_sizePostResults(phase_plasticityInstance(material_phase(ipc,ip,el)))) :: &
plastic_j2_postResults
real(pReal), dimension(6) :: &
Tstar_dev_v ! deviatoric part of the 2nd Piola Kirchhoff stress tensor in Mandel notation
real(pReal) :: &
norm_Tstar_dev ! euclidean norm of Tstar_dev
integer(pInt) :: &
instance, & !< instance of my instance (unique number of my constitutive model)
of, & !< shortcut notation for offset position in state array
ph, & !< shortcut notation for phase ID (unique number of all phases, regardless of constitutive model)
c, &
o
of = phasememberAt(ipc,ip,el)
ph = phaseAt(ipc,ip,el)
instance = phase_plasticityInstance(material_phase(ipc,ip,el))
!--------------------------------------------------------------------------------------------------
! calculate deviatoric part of 2nd Piola-Kirchhoff stress and its norm
Tstar_dev_v(1:3) = Tstar_v(1:3) - sum(Tstar_v(1:3))/3.0_pReal
Tstar_dev_v(4:6) = Tstar_v(4:6)
norm_Tstar_dev = sqrt(math_mul6x6(Tstar_dev_v,Tstar_dev_v))
c = 0_pInt
plastic_j2_postResults = 0.0_pReal
outputsLoop: do o = 1_pInt,plastic_j2_Noutput(instance)
select case(plastic_j2_outputID(o,instance))
case (flowstress_ID)
plastic_j2_postResults(c+1_pInt) = plasticState(ph)%state(1,of)
c = c + 1_pInt
case (strainrate_ID)
plastic_j2_postResults(c+1_pInt) = &
plastic_j2_gdot0(instance) * ( sqrt(1.5_pReal) * norm_Tstar_dev &
/ &!----------------------------------------------------------------------------------
(plastic_j2_fTaylor(instance) * plasticState(ph)%state(1,of)) ) ** plastic_j2_n(instance)
c = c + 1_pInt
end select
enddo outputsLoop
end function plastic_j2_postResults
end module plastic_j2

View File

@ -113,7 +113,9 @@ module prec
public :: &
prec_init, &
prec_isNaN
prec_isNaN, &
dEq, &
dNeq
contains

View File

@ -1,37 +0,0 @@
### $Id$ ###
[Tungsten]
elasticity hooke
plasticity dislokmc
### Material parameters ###
lattice_structure bcc
C11 523.0e9 # From Marinica et al. Journal of Physics: Condensed Matter(2013)
C12 202.0e9
C44 161.0e9
grainsize 2.0e-5 # Average grain size [m] 2.0e-5
SolidSolutionStrength 0.0 # Strength due to elements in solid solution
### Dislocation glide parameters ###
#per family
Nslip 12 0
slipburgers 2.72e-10 # Burgers vector of slip system [m]
rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3]
rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3]
Qedge 2.725e-19 # Activation energy for dislocation glide [J]
v0 3560.3 # Initial glide velocity [m/s](kmC)
p_slip 0.16 # p-exponent in glide velocity
q_slip 1.00 # q-exponent in glide velocity
u_slip 2.47 # u-exponent of stress pre-factor (kmC)
s_slip 0.97 # self hardening in glide velocity (kmC)
tau_peierls 2.03e9 # peierls stress [Pa]
#hardening
dipoleformationfactor 0 # to have hardening due to dipole formation off
CLambdaSlip 10.0 # Adj. parameter controlling dislocation mean free path
D0 4.0e-5 # Vacancy diffusion prefactor [m**2/s]
Qsd 4.5e-19 # Activation energy for climb [J]
Catomicvolume 1.0 # Adj. parameter controlling the atomic volume [in b]
Cedgedipmindistance 1.0 # Adj. parameter controlling the minimum dipole distance [in b]
interaction_slipslip 0.2 0.11 0.19 0.15 0.11 0.17

View File

@ -3,7 +3,7 @@
# Kuo, J. C., Mikrostrukturmechanik von Bikristallen mit Kippkorngrenzen. Shaker-Verlag 2004. http://edoc.mpg.de/204079
elasticity hooke
plasticity j2
plasticity isotropic
(output) flowstress
(output) strainrate

File diff suppressed because it is too large Load Diff

1
examples/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
postProc

View File

@ -1,33 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os
import glob
from subprocess import call
geom_name = '20grains16x16x16_tensionX'
postResults = 'postResults --cr f,p --split --separation x,y,z '+geom_name+'.spectralOut'
sts = call(postResults, shell=True)
os.chdir('./postProc/')
ascii_files = glob.glob(geom_name+'_inc*.txt')
print ascii_files
showTable = "showTable -a "
addCauchy = 'addCauchy '
addMises = 'addMises -s Cauchy '
addStrainTensors = "addStrainTensors -0 -v "
visualize3D = "3Dvisualize -s 'Mises(Cauchy)',1_p Cauchy "
postProc = [addCauchy, addMises, addStrainTensors, visualize3D]
for f in ascii_files:
print f
for p in postProc:
p = p+f
print p
sts = call(p,shell=True)

View File

@ -1,17 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import sys
resolutions = [16,32,64]
resolution = resolutions[0]
try:
resolution = int(sys.argv[1])
except:
pass
if resolution not in resolutions:
resolution = resolutions[0]
from subprocess import call
call('make run%s'%('x'.join([str(resolution)]*3)), shell=True)

View File

@ -1,177 +1,177 @@
#
# System-Wide Abaqus Environment File
# -------------------------------------
standard_parallel = ALL
mp_mode = MPI
mp_file_system = (DETECT,DETECT)
mp_num_parallel_ftps = (4, 4)
mp_environment_export = ('MPI_PROPAGATE_TSTP',
'ABA_CM_BUFFERING',
'ABA_CM_BUFFERING_LIMIT',
'ABA_ITERATIVE_SOLVER_VERBOSE',
'ABA_DMPSOLVER_BWDPARALLELOFF',
'ABA_ELP_SURFACE_SPLIT',
'ABA_ELP_SUSPEND',
'ABA_HOME',
'ABA_MEMORY_MODE',
'ABA_MPI_MESSAGE_TRACKING',
'ABA_MPI_VERBOSE_LEVEL',
'ABA_PATH',
'ABAQUS_CSE_RELTIMETOLERANCE',
'ABA_RESOURCE_MONITOR',
'ABA_RESOURCE_USEMALLINFO',
'ABAQUS_LANG',
'ABAQUS_CSE_CURRCONFIGMAPPING',
'ABAQUS_MPF_DIAGNOSTIC_LEVEL',
'ABAQUSLM_LICENSE_FILE',
'ABQ_CRTMALLOC',
'ABQ_DATACHECK',
'ABQ_RECOVER',
'ABQ_RESTART',
'ABQ_SPLITFILE',
'ABQ_XPL_WINDOWDUMP',
'ABQ_XPL_PARTITIONSIZE',
'ABQLMHANGLIMIT',
'ABQLMQUEUE',
'ABQLMUSER',
'CCI_RENDEZVOUS',
'DOMAIN',
'DOMAIN_CPUS',
'DOUBLE_PRECISION',
'FLEXLM_DIAGNOSTICS',
'FOR0006',
'FOR0064',
'FOR_IGNORE_EXCEPTIONS',
'FOR_DISABLE_DIAGNOSTIC_DISPLAY',
'LD_PRELOAD',
'MP_NUMBER_OF_THREADS',
'MPC_GANG',
'MPI_FLAGS',
'MPI_FLUSH_FCACHE',
'MPI_RDMA_NENVELOPE',
'MPI_SOCKBUFSIZE',
'MPI_USE_MALLOPT_MMAP_MAX',
'MPI_USE_MALLOPT_MMAP_THRESHOLD',
'MPI_USE_MALLOPT_SBRK_PROTECTION',
'MPI_WORKDIR',
'MPCCI_DEBUG',
'MPCCI_CODEID',
'MPCCI_JOBID',
'MPCCI_NETDEVICE',
'MPCCI_TINFO',
'MPCCI_SERVER',
'ABAQUS_CCI_DEBUG',
'NCPUS',
'OMP_DYNAMIC',
'OMP_NUM_THREADS',
'OUTDIR',
'PAIDUP',
'PARALLEL_METHOD',
'RAIDEV_NDREG_LAZYMEM',
'ABA_SYMBOLIC_GENERALCOLLAPSE',
'ABA_SYMBOLIC_GENERAL_MAXCLIQUERANK',
'ABA_ADM_MINIMUMINCREASE',
'ABA_ADM_MINIMUMDECREASE',
'IPATH_NO_CPUAFFINITY',
'MALLOC_MMAP_THRESHOLD_',
'ABA_EXT_SIMOUTPUT',
'SMA_WS',
'SMA_PARENT',
'SMA_PLATFORM',
'ABA_PRE_DECOMPOSITION',
'ACML_FAST_MALLOC',
'ACML_FAST_MALLOC_CHUNK_SIZE',
'ACML_FAST_MALLOC_MAX_CHUNKS',
'ACML_FAST_MALLOC_DEBUG')
import driverUtils, os
#-*- mode: python -*-
# #
# Compile and Link command settings for the Windows 64 Platform #
# ( AMD Opteron / Intel EM64T ) #
# #
compile_fortran=['ifort',
'/c','/DABQ_WIN86_64', '/u',
'/iface:cref', '/recursive', '/Qauto-scalar',
'/QxSSE3', '/QaxAVX',
'/heap-arrays:1',
# '/Od', '/Ob0' # <-- Optimization
# '/Zi', # <-- Debugging
'/include:%I', '/free', '/O1', '/fpp', '/openmp', '/Qmkl']
link_sl=['LINK',
'/nologo', '/NOENTRY', '/INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64',
'/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB',
'/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:LIBIFCOREMD.LIB', '/DEFAULTLIB:LIBIFPORTMD', '/DEFAULTLIB:LIBMMD.LIB',
'/DEFAULTLIB:kernel32.lib', '/DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
'/FIXED:NO', '/dll',
'/def:%E', '/out:%U', '%F', '%A', '%L', '%B',
'oldnames.lib', 'user32.lib', 'ws2_32.lib', 'netapi32.lib', 'advapi32.lib']
link_exe=['LINK',
'/nologo', '/INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64', '/STACK:20000000',
'/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB', '/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:LIBIFCOREMD.LIB',
'/DEFAULTLIB:LIBIFPORTMD', '/DEFAULTLIB:LIBMMD.LIB', '/DEFAULTLIB:kernel32.lib',
'/DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
'/FIXED:NO', '/LARGEADDRESSAWARE',
'/out:%J', '%F', '%M', '%L', '%B', '%O',
'oldnames.lib', 'user32.lib', 'ws2_32.lib', 'netapi32.lib', 'advapi32.lib']
# Link command to be used for MAKE w/o fortran compiler.
# remove the pound signs in order to remove the comments and have the file take effect.
#
#link_exe=['LINK', '/nologo', 'INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64', '/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB',
# '/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:MSVCRT.LIB', '/DEFAULTLIB:kernel32.lib', 'DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
# '/FIXED:NO', '/LARGEADDRESSAWARE', '/DEBUG', '/out:%J', '%F', '%M', '%L', '%B', '%O', 'oldnames.lib', 'user32.lib', 'ws2_32.lib',
# 'netapi32.lib', 'advapi32.lib]
# MPI Configuration
mp_mode = THREADS
mp_mpi_implementation = NATIVE
mp_rsh_command = 'dummy %H -l %U -n %C'
mp_mpirun_path = {}
mpirun = ''
progDir = os.environ.get('ProgramFiles','C:\\Program Files')
for mpiDir in ('Microsoft HPC Pack', 'Microsoft HPC Pack 2008 R2', 'Microsoft HPC Pack 2008', 'Microsoft HPC Pack 2008 SDK'):
mpirun = progDir + os.sep + mpiDir + os.sep + 'bin' + os.sep + 'mpiexec.exe'
if os.path.exists(mpirun):
mp_mpirun_path[NATIVE] = mpirun
mp_mpirun_path[MSSDK] = os.path.join(progDir, mpiDir)
break
if os.environ.has_key('CCP_HOME'):
from queueCCS import QueueCCS
queues['default'] = QueueCCS(queueName='share')
queues['share'] = QueueCCS(queueName='share')
queues['local'] = QueueCCS(queueName='local')
queues['genxmlshare'] = QueueCCS(queueName='genxmlshare')
queues['genxmllocal'] = QueueCCS(queueName='genxmllocal')
del QueueCCS
mpirun = os.path.join(os.environ['CCP_HOME'], 'bin', 'mpiexec.exe')
if os.path.exists(mpirun):
mp_mpirun_path[NATIVE] = mpirun
run_mode=BATCH
if mp_mpirun_path:
mp_mode=MPI
del progDir, mpiDir, mpirun
graphicsEnv = driverUtils.locateFile(os.environ['ABA_PATH'],'site','graphicsConfig','env')
if graphicsEnv:
execfile(graphicsEnv)
else:
raise 'Cannot find the graphics configuration environment file (graphicsConfig.env)'
del driverUtils, os, graphicsEnv
license_server_type=FLEXNET
abaquslm_license_file=""
doc_root="
doc_root_type="html"
academic=RESEARCH
#
# System-Wide Abaqus Environment File
# -------------------------------------
standard_parallel = ALL
mp_mode = MPI
mp_file_system = (DETECT,DETECT)
mp_num_parallel_ftps = (4, 4)
mp_environment_export = ('MPI_PROPAGATE_TSTP',
'ABA_CM_BUFFERING',
'ABA_CM_BUFFERING_LIMIT',
'ABA_ITERATIVE_SOLVER_VERBOSE',
'ABA_DMPSOLVER_BWDPARALLELOFF',
'ABA_ELP_SURFACE_SPLIT',
'ABA_ELP_SUSPEND',
'ABA_HOME',
'ABA_MEMORY_MODE',
'ABA_MPI_MESSAGE_TRACKING',
'ABA_MPI_VERBOSE_LEVEL',
'ABA_PATH',
'ABAQUS_CSE_RELTIMETOLERANCE',
'ABA_RESOURCE_MONITOR',
'ABA_RESOURCE_USEMALLINFO',
'ABAQUS_LANG',
'ABAQUS_CSE_CURRCONFIGMAPPING',
'ABAQUS_MPF_DIAGNOSTIC_LEVEL',
'ABAQUSLM_LICENSE_FILE',
'ABQ_CRTMALLOC',
'ABQ_DATACHECK',
'ABQ_RECOVER',
'ABQ_RESTART',
'ABQ_SPLITFILE',
'ABQ_XPL_WINDOWDUMP',
'ABQ_XPL_PARTITIONSIZE',
'ABQLMHANGLIMIT',
'ABQLMQUEUE',
'ABQLMUSER',
'CCI_RENDEZVOUS',
'DOMAIN',
'DOMAIN_CPUS',
'DOUBLE_PRECISION',
'FLEXLM_DIAGNOSTICS',
'FOR0006',
'FOR0064',
'FOR_IGNORE_EXCEPTIONS',
'FOR_DISABLE_DIAGNOSTIC_DISPLAY',
'LD_PRELOAD',
'MP_NUMBER_OF_THREADS',
'MPC_GANG',
'MPI_FLAGS',
'MPI_FLUSH_FCACHE',
'MPI_RDMA_NENVELOPE',
'MPI_SOCKBUFSIZE',
'MPI_USE_MALLOPT_MMAP_MAX',
'MPI_USE_MALLOPT_MMAP_THRESHOLD',
'MPI_USE_MALLOPT_SBRK_PROTECTION',
'MPI_WORKDIR',
'MPCCI_DEBUG',
'MPCCI_CODEID',
'MPCCI_JOBID',
'MPCCI_NETDEVICE',
'MPCCI_TINFO',
'MPCCI_SERVER',
'ABAQUS_CCI_DEBUG',
'NCPUS',
'OMP_DYNAMIC',
'OMP_NUM_THREADS',
'OUTDIR',
'PAIDUP',
'PARALLEL_METHOD',
'RAIDEV_NDREG_LAZYMEM',
'ABA_SYMBOLIC_GENERALCOLLAPSE',
'ABA_SYMBOLIC_GENERAL_MAXCLIQUERANK',
'ABA_ADM_MINIMUMINCREASE',
'ABA_ADM_MINIMUMDECREASE',
'IPATH_NO_CPUAFFINITY',
'MALLOC_MMAP_THRESHOLD_',
'ABA_EXT_SIMOUTPUT',
'SMA_WS',
'SMA_PARENT',
'SMA_PLATFORM',
'ABA_PRE_DECOMPOSITION',
'ACML_FAST_MALLOC',
'ACML_FAST_MALLOC_CHUNK_SIZE',
'ACML_FAST_MALLOC_MAX_CHUNKS',
'ACML_FAST_MALLOC_DEBUG')
import driverUtils, os
#-*- mode: python -*-
# #
# Compile and Link command settings for the Windows 64 Platform #
# ( AMD Opteron / Intel EM64T ) #
# #
compile_fortran=['ifort',
'/c','/DABQ_WIN86_64', '/u',
'/iface:cref', '/recursive', '/Qauto-scalar',
'/QxSSE3', '/QaxAVX',
'/heap-arrays:1',
# '/Od', '/Ob0' # <-- Optimization
# '/Zi', # <-- Debugging
'/include:%I', '/free', '/O1', '/fpp', '/openmp', '/Qmkl']
link_sl=['LINK',
'/nologo', '/NOENTRY', '/INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64',
'/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB',
'/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:LIBIFCOREMD.LIB', '/DEFAULTLIB:LIBIFPORTMD', '/DEFAULTLIB:LIBMMD.LIB',
'/DEFAULTLIB:kernel32.lib', '/DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
'/FIXED:NO', '/dll',
'/def:%E', '/out:%U', '%F', '%A', '%L', '%B',
'oldnames.lib', 'user32.lib', 'ws2_32.lib', 'netapi32.lib', 'advapi32.lib']
link_exe=['LINK',
'/nologo', '/INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64', '/STACK:20000000',
'/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB', '/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:LIBIFCOREMD.LIB',
'/DEFAULTLIB:LIBIFPORTMD', '/DEFAULTLIB:LIBMMD.LIB', '/DEFAULTLIB:kernel32.lib',
'/DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
'/FIXED:NO', '/LARGEADDRESSAWARE',
'/out:%J', '%F', '%M', '%L', '%B', '%O',
'oldnames.lib', 'user32.lib', 'ws2_32.lib', 'netapi32.lib', 'advapi32.lib']
# Link command to be used for MAKE w/o fortran compiler.
# remove the pound signs in order to remove the comments and have the file take effect.
#
#link_exe=['LINK', '/nologo', 'INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64', '/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB',
# '/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:MSVCRT.LIB', '/DEFAULTLIB:kernel32.lib', 'DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
# '/FIXED:NO', '/LARGEADDRESSAWARE', '/DEBUG', '/out:%J', '%F', '%M', '%L', '%B', '%O', 'oldnames.lib', 'user32.lib', 'ws2_32.lib',
# 'netapi32.lib', 'advapi32.lib]
# MPI Configuration
mp_mode = THREADS
mp_mpi_implementation = NATIVE
mp_rsh_command = 'dummy %H -l %U -n %C'
mp_mpirun_path = {}
mpirun = ''
progDir = os.environ.get('ProgramFiles','C:\\Program Files')
for mpiDir in ('Microsoft HPC Pack', 'Microsoft HPC Pack 2008 R2', 'Microsoft HPC Pack 2008', 'Microsoft HPC Pack 2008 SDK'):
mpirun = progDir + os.sep + mpiDir + os.sep + 'bin' + os.sep + 'mpiexec.exe'
if os.path.exists(mpirun):
mp_mpirun_path[NATIVE] = mpirun
mp_mpirun_path[MSSDK] = os.path.join(progDir, mpiDir)
break
if os.environ.has_key('CCP_HOME'):
from queueCCS import QueueCCS
queues['default'] = QueueCCS(queueName='share')
queues['share'] = QueueCCS(queueName='share')
queues['local'] = QueueCCS(queueName='local')
queues['genxmlshare'] = QueueCCS(queueName='genxmlshare')
queues['genxmllocal'] = QueueCCS(queueName='genxmllocal')
del QueueCCS
mpirun = os.path.join(os.environ['CCP_HOME'], 'bin', 'mpiexec.exe')
if os.path.exists(mpirun):
mp_mpirun_path[NATIVE] = mpirun
run_mode=BATCH
if mp_mpirun_path:
mp_mode=MPI
del progDir, mpiDir, mpirun
graphicsEnv = driverUtils.locateFile(os.environ['ABA_PATH'],'site','graphicsConfig','env')
if graphicsEnv:
execfile(graphicsEnv)
else:
raise 'Cannot find the graphics configuration environment file (graphicsConfig.env)'
del driverUtils, os, graphicsEnv
license_server_type=FLEXNET
abaquslm_license_file=""
doc_root="
doc_root_type="html"
academic=RESEARCH

View File

@ -21,13 +21,13 @@ if not os.path.isdir(binDir):
#define ToDo list
processing_subDirs = ['pre','post','misc',]
processing_extensions = ['.py',]
processing_extensions = ['.py','.sh',]
for subDir in processing_subDirs:
theDir = os.path.abspath(os.path.join(baseDir,subDir))
for theFile in os.listdir(theDir):
if os.path.splitext(theFile)[1] in processing_extensions: # omit anything not fitting our script extensions (skip .py.bak, .py~, and the like)
if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions
src = os.path.abspath(os.path.join(theDir,theFile))
sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0]))

2
lib/damask/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
core.so
corientation.so

View File

@ -25,7 +25,7 @@ class ASCIItable():
readonly = False, # no reading from file
):
self.__IO__ = {'output': [],
'buffered': buffered,
'buffered': buffered,
'labeled': labeled, # header contains labels
'labels': [], # labels according to file info
'readBuffer': [], # buffer to hold non-advancing reads
@ -35,18 +35,18 @@ class ASCIItable():
self.__IO__['inPlace'] = not outname and name and not readonly
if self.__IO__['inPlace']: outname = name + self.tmpext # transparently create tmp file
try:
self.__IO__['in'] = (open( name,'r') if os.access( name, os.R_OK) else None) if name else sys.stdin
self.__IO__['in'] = (open( name,'r') if os.access( name, os.R_OK) else None) if name else sys.stdin
except TypeError:
self.__IO__['in'] = name
try:
self.__IO__['out'] = (open(outname,'w') if (not os.path.isfile(outname) \
or os.access( outname, os.W_OK) \
) \
and (not self.__IO__['inPlace'] \
or not os.path.isfile(name) \
or os.access( name, os.W_OK) \
) else None) if outname else sys.stdout
self.__IO__['out'] = (open(outname,'w') if (not os.path.isfile(outname) or
os.access( outname, os.W_OK)
) and
(not self.__IO__['inPlace'] or
not os.path.isfile(name) or
os.access( name, os.W_OK)
) else None) if outname else sys.stdout
except TypeError:
self.__IO__['out'] = outname
@ -66,6 +66,24 @@ class ASCIItable():
except:
return 0.0
# ------------------------------------------------------------------
def _removeCRLF(self,
string):
try:
return string.replace('\n','').replace('\r','')
except:
return string
# ------------------------------------------------------------------
def _quote(self,
what):
"""quote empty or white space-containing output"""
import re
return '{quote}{content}{quote}'.format(
quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''),
content = what)
# ------------------------------------------------------------------
def close(self,
dismiss = False):
@ -128,7 +146,7 @@ class ASCIItable():
the first row or, if keyword "head[*]" is present,
the last line of the header
"""
import re
import re,shlex
try:
self.__IO__['in'].seek(0)
@ -143,7 +161,7 @@ class ASCIItable():
if self.__IO__['labeled']: # table features labels
self.info = [self.__IO__['in'].readline().strip() for i in xrange(1,int(m.group(1)))]
self.labels = self.__IO__['in'].readline().split() # store labels found in last line
self.labels = shlex.split(self.__IO__['in'].readline()) # store labels found in last line
else:
@ -179,7 +197,7 @@ class ASCIItable():
"""write current header information (info + labels)"""
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
head.append(self.info)
if self.__IO__['labeled']: head.append('\t'.join(self.labels))
if self.__IO__['labeled']: head.append('\t'.join(map(self._quote,self.labels)))
return self.output_write(head)
@ -243,9 +261,9 @@ class ASCIItable():
try:
for item in what: self.labels_append(item)
except:
self.labels += [str(what)]
self.labels += [self._removeCRLF(str(what))]
else:
self.labels += [what]
self.labels += [self._removeCRLF(what)]
self.__IO__['labeled'] = True # switch on processing (in particular writing) of labels
if reset: self.__IO__['labels'] = list(self.labels) # subsequent data_read uses current labels as data size
@ -272,7 +290,7 @@ class ASCIItable():
for label in labels:
if label is not None:
try:
idx.append(int(label)) # column given as integer number?
idx.append(int(label)-1) # column given as integer number?
except ValueError:
try:
idx.append(self.labels.index(label)) # locate string in label list
@ -283,7 +301,7 @@ class ASCIItable():
idx.append(-1) # not found...
else:
try:
idx = int(labels)
idx = int(labels)-1 # offset for python array indexing
except ValueError:
try:
idx = self.labels.index(labels)
@ -293,7 +311,7 @@ class ASCIItable():
except ValueError:
idx = None if labels is None else -1
return np.array(idx) if isinstance(idx,list) else idx
return np.array(idx) if isinstance(idx,Iterable) else idx
# ------------------------------------------------------------------
def label_dimension(self,
@ -312,7 +330,7 @@ class ASCIItable():
if label is not None:
myDim = -1
try: # column given as number?
idx = int(label)
idx = int(label)-1
myDim = 1 # if found has at least dimension 1
if self.labels[idx].startswith('1_'): # column has multidim indicator?
while idx+myDim < len(self.labels) and self.labels[idx+myDim].startswith("%i_"%(myDim+1)):
@ -331,7 +349,7 @@ class ASCIItable():
dim = -1 # assume invalid label
idx = -1
try: # column given as number?
idx = int(labels)
idx = int(labels)-1
dim = 1 # if found has at least dimension 1
if self.labels[idx].startswith('1_'): # column has multidim indicator?
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
@ -345,7 +363,7 @@ class ASCIItable():
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
dim += 1 # keep adding while going through object
return np.array(dim) if isinstance(dim,list) else dim
return np.array(dim) if isinstance(dim,Iterable) else dim
# ------------------------------------------------------------------
def label_indexrange(self,
@ -361,9 +379,10 @@ class ASCIItable():
start = self.label_index(labels)
dim = self.label_dimension(labels)
return map(lambda a,b: xrange(a,a+b), zip(start,dim)) if isinstance(labels, Iterable) and not isinstance(labels, str) \
else xrange(start,start+dim)
return np.hstack(map(lambda c: xrange(c[0],c[0]+c[1]), zip(start,dim))) \
if isinstance(labels, Iterable) and not isinstance(labels, str) \
else xrange(start,start+dim)
# ------------------------------------------------------------------
def info_append(self,
what):
@ -372,9 +391,9 @@ class ASCIItable():
try:
for item in what: self.info_append(item)
except:
self.info += [str(what)]
self.info += [self._removeCRLF(str(what))]
else:
self.info += [what]
self.info += [self._removeCRLF(what)]
# ------------------------------------------------------------------
def info_clear(self):
@ -402,6 +421,8 @@ class ASCIItable():
advance = True,
respectLabels = True):
"""read next line (possibly buffered) and parse it into data array"""
import shlex
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
@ -411,10 +432,10 @@ class ASCIItable():
self.line = self.line.rstrip('\n')
if self.__IO__['labeled'] and respectLabels: # if table has labels
items = self.line.split()[:len(self.__IO__['labels'])] # use up to label count (from original file info)
items = shlex.split(self.line)[:len(self.__IO__['labels'])] # use up to label count (from original file info)
self.data = items if len(items) == len(self.__IO__['labels']) else [] # take entries if label count matches
else:
self.data = self.line.split() # otherwise take all
self.data = shlex.split(self.line) # otherwise take all
return self.data != []
@ -462,9 +483,9 @@ class ASCIItable():
if len(self.data) == 0: return True
if isinstance(self.data[0],list):
return self.output_write([delimiter.join(map(str,items)) for items in self.data])
return self.output_write([delimiter.join(map(self._quote,items)) for items in self.data])
else:
return self.output_write(delimiter.join(map(str,self.data)))
return self.output_write( delimiter.join(map(self._quote,self.data)))
# ------------------------------------------------------------------
def data_writeArray(self,

View File

@ -303,36 +303,45 @@ class Colormap():
'interpolate',
]
__predefined__ = {
'gray': {'left': Color('HSL',[0,1,1]),
'gray': {'left': Color('HSL',[0,1,1]),
'right': Color('HSL',[0,0,0.15]),
'interpolate': 'perceptualuniform'},
'grey': {'left': Color('HSL',[0,1,1]),
'grey': {'left': Color('HSL',[0,1,1]),
'right': Color('HSL',[0,0,0.15]),
'interpolate': 'perceptualuniform'},
'red': {'left': Color('HSL',[0,1,0.14]),
'red': {'left': Color('HSL',[0,1,0.14]),
'right': Color('HSL',[0,0.35,0.91]),
'interpolate': 'perceptualuniform'},
'green': {'left': Color('HSL',[0.33333,1,0.14]),
'green': {'left': Color('HSL',[0.33333,1,0.14]),
'right': Color('HSL',[0.33333,0.35,0.91]),
'interpolate': 'perceptualuniform'},
'blue': {'left': Color('HSL',[0.66,1,0.14]),
'blue': {'left': Color('HSL',[0.66,1,0.14]),
'right': Color('HSL',[0.66,0.35,0.91]),
'interpolate': 'perceptualuniform'},
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]),
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]),
'right': Color('HSL',[0.40000,0.1,0.9]),
'interpolate': 'perceptualuniform'},
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]),
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]),
'right': Color('HSL',[0.11,0.75,0.38]),
'interpolate': 'perceptualuniform'},
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]),
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]),
'right': Color('HSL',[0.33333,1.0,0.14]),
'interpolate': 'perceptualuniform'},
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]),
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]),
'right': Color('HSL',[0.97,0.96,0.36]),
'interpolate': 'perceptualuniform'},
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]),
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]),
'right': Color('HSL',[0,1,0.5]),
'interpolate': 'linear' },
'orientation': {'left': Color('RGB',[0.933334,0.878432,0.878431]),
'right': Color('RGB',[0.250980,0.007843,0.000000]),
'interpolate': 'perceptualuniform'},
'strain': {'left': Color('RGB',[0.941177,0.941177,0.870588]),
'right': Color('RGB',[0.266667,0.266667,0.000000]),
'interpolate': 'perceptualuniform'},
'stress': {'left': Color('RGB',[0.878432,0.874511,0.949019]),
'right': Color('RGB',[0.000002,0.000000,0.286275]),
'interpolate': 'perceptualuniform'},
}
@ -344,7 +353,7 @@ class Colormap():
predefined = None
):
if str(predefined).lower() in self.__predefined__:
if predefined is not None:
left = self.__predefined__[predefined.lower()]['left']
right= self.__predefined__[predefined.lower()]['right']
interpolate = self.__predefined__[predefined.lower()]['interpolate']
@ -442,11 +451,12 @@ class Colormap():
format = format.lower() # consistent comparison basis
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
if format == 'paraview':
colormap = ['<ColorMap name="'+str(name)+'" space="Diverging">'] \
+ ['<Point x="%i"'%i + ' o="1" r="%g" g="%g" b="%g"/>'%(color[0],color[1],color[2],) for i,color in colors] \
+ ['</ColorMap>']
colormap = ['[\n {{\n "ColorSpace" : "RGB", "Name" : "{}",\n "RGBPoints" : ['.format(name)] \
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f},'.format(i,color[0],color[1],color[2],)
for i,color in enumerate(colors[:-1])]\
+ [' {:4d},{:8.6f},{:8.6f},{:8.6f} '.format(i+1,colors[-1][0],colors[-1][1],colors[-1][2],)]\
+ [' ]\n }\n]']
elif format == 'gmsh':
colormap = ['View.ColorTable = {'] \

View File

@ -53,6 +53,20 @@ def report(who,what):
"""reports script and file name"""
croak( (emph(who) if who else '') + (': '+what if what else '') )
# -----------------------------
def report_geom(info,
what = ['grid','size','origin','homogenization','microstructures']):
"""reports (selected) geometry information"""
output = {
'grid' : 'grid a b c: {}'.format(' x '.join(map(str,info['grid' ]))),
'size' : 'size x y z: {}'.format(' x '.join(map(str,info['size' ]))),
'origin' : 'origin x y z: {}'.format(' : '.join(map(str,info['origin']))),
'homogenization' : 'homogenization: {}'.format(info['homogenization']),
'microstructures' : 'microstructures: {}'.format(info['microstructures']),
}
for item in what: croak(output[item.lower()])
# -----------------------------
def emph(what):
"""emphasizes string on screen"""

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

After

Width:  |  Height:  |  Size: 34 KiB

View File

@ -1,61 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,string,h5py
import numpy as np
from optparse import OptionParser
import damask
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add column(s) containing Cauchy stress based on given column(s) of
deformation gradient and first Piola--Kirchhoff stress.
""" + string.replace('$Id$','\n','\\n')
)
parser.add_option('-f','--defgrad', dest='defgrad', \
help='heading of columns containing deformation gradient [%default]')
parser.add_option('-p','--stress', dest='stress', \
help='heading of columns containing first Piola--Kirchhoff stress [%default]')
parser.add_option('-o','--output', dest='output', \
help='group containing requested data [%default]')
parser.set_defaults(defgrad = 'f')
parser.set_defaults(stress = 'p')
parser.set_defaults(output = 'crystallite')
(options,filenames) = parser.parse_args()
if options.defgrad is None or options.stress is None or options.output is None:
parser.error('missing data column...')
# ------------------------------------------ setup file handles ---------------------------------------
files = []
for name in filenames:
if os.path.exists(name):
files.append({'name':name, 'file':h5py.File(name,"a")})
# ------------------------------------------ loop over input files ------------------------------------
for myFile in files:
print(myFile['name'])
# ------------------------------------------ loop over increments -------------------------------------
for inc in myFile['file']['increments'].keys():
print("Current Increment: "+inc)
for instance in myFile['file']['increments/'+inc+'/'+options.output].keys():
dsets = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance].keys()
if (options.defgrad in dsets and options.stress in dsets):
defgrad = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance+'/'+options.defgrad]
stress = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance+'/'+options.stress]
cauchy=np.zeros(np.shape(stress),'f')
for p in range(stress.shape[0]):
cauchy[p,...] = 1.0/np.linalg.det(defgrad[p,...])*np.dot(stress[p,...],defgrad[p,...].T) # [Cauchy] = (1/det(F)) * [P].[F_transpose]
cauchyFile = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance].create_dataset('cauchy', data=cauchy)
cauchyFile.attrs['units'] = 'Pa'

View File

@ -21,7 +21,7 @@ Operates on periodic three-dimensional x,y,z-ordered data sets.
parser.add_option('-c','--coordinates',
dest = 'coords',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'column heading of coordinates [%default]')
parser.add_option('-f','--defgrad',
@ -36,10 +36,10 @@ parser.add_option('--no-volume','-v',
dest = 'volume',
action = 'store_false',
help = 'omit volume mismatch')
parser.set_defaults(coords = 'ipinitialcoord',
defgrad = 'f',
shape = True,
volume = True,
parser.set_defaults(pos = 'pos',
defgrad = 'f',
shape = True,
volume = True,
)
(options,filenames) = parser.parse_args()
@ -64,8 +64,8 @@ for name in filenames:
errors = []
remarks = []
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: colCoord = table.label_index(options.coords)
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
if table.label_dimension(options.defgrad) != 9: errors.append('deformation gradient {} is not a tensor.'.format(options.defgrad))
else: colF = table.label_index(options.defgrad)

View File

@ -10,6 +10,7 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def curlFFT(geomdim,field):
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
@ -17,8 +18,8 @@ def curlFFT(geomdim,field):
if n == 3: dataType = 'vector'
elif n == 9: dataType = 'tensor'
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
curl_fourier = np.zeros(field_fourier.shape,'c16')
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
curl_fourier = np.empty(field_fourier.shape,'c16')
# differentiation in Fourier space
k_s = np.zeros([3],'i')
@ -55,34 +56,34 @@ def curlFFT(geomdim,field):
curl_fourier[i,j,k,2] = ( field_fourier[i,j,k,1]*xi[0]\
-field_fourier[i,j,k,0]*xi[1]) *TWOPIIMG
return np.fft.fftpack.irfftn(curl_fourier,axes=(0,1,2)).reshape([N,n])
return np.fft.fftpack.irfftn(curl_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
Add column(s) containing curl of requested column(s).
Operates on periodic ordered three-dimensional data sets.
Deals with both vector- and tensor-valued fields.
Deals with both vector- and tensor fields.
""", version = scriptID)
parser.add_option('-c','--coordinates',
dest = 'coords',
type = 'string', metavar='string',
help = 'column heading for coordinates [%default]')
parser.add_option('-p','--pos','--periodiccellcenter',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-v','--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'heading of columns containing vector field values')
help = 'label(s) of vector field values')
parser.add_option('-t','--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'heading of columns containing tensor field values')
help = 'label(s) of tensor field values')
parser.set_defaults(coords = 'ipinitialcoord',
parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
@ -90,7 +91,7 @@ parser.set_defaults(coords = 'ipinitialcoord',
if options.vector is None and options.tensor is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
@ -113,8 +114,8 @@ for name in filenames:
remarks = []
column = {}
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: colCoord = table.label_index(options.coords)
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
for type, data in items.iteritems():
for what in (data['labels'] if data['labels'] is not None else []):
@ -147,7 +148,7 @@ for name in filenames:
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1]))
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
# ------------------------------------------ process value field -----------------------------------

View File

@ -1,164 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,math
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
def deformedCoordsFFT(F,undeformed=False):
wgt = 1.0/grid.prod()
integrator = np.array([0.+1.j,0.+1.j,0.+1.j],'c16') * size/ 2.0 / math.pi
step = size/grid
F_fourier = np.fft.rfftn(F,axes=(0,1,2))
coords_fourier = np.zeros(F_fourier.shape[0:4],'c16')
if undeformed:
Favg=np.eye(3)
else:
Favg=np.real(F_fourier[0,0,0,:,:])*wgt
#--------------------------------------------------------------------------------------------------
# integration in Fourier space
k_s = np.zeros([3],'i')
for i in xrange(grid[2]):
k_s[2] = i
if(i > grid[2]//2 ): k_s[2] = k_s[2] - grid[2]
for j in xrange(grid[1]):
k_s[1] = j
if(j > grid[1]//2 ): k_s[1] = k_s[1] - grid[1]
for k in xrange(grid[0]//2+1):
k_s[0] = k
for m in xrange(3):
coords_fourier[i,j,k,m] = sum(F_fourier[i,j,k,m,0:3]*k_s*integrator)
if (any(k_s != 0)):
coords_fourier[i,j,k,0:3] /= -sum(k_s*k_s)
#--------------------------------------------------------------------------------------------------
# add average to scaled fluctuation and put (0,0,0) on (0,0,0)
coords = np.fft.irfftn(coords_fourier,F.shape[0:3],axes=(0,1,2))
offset_coords = np.dot(F[0,0,0,:,:],step/2.0) - scaling*coords[0,0,0,0:3]
for z in xrange(grid[2]):
for y in xrange(grid[1]):
for x in xrange(grid[0]):
coords[z,y,x,0:3] = scaling*coords[z,y,x,0:3] \
+ offset_coords \
+ np.dot(Favg,step*np.array([x,y,z]))
return coords
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
Add deformed configuration of given initial coordinates.
Operates on periodic three-dimensional x,y,z-ordered data sets.
""", version = scriptID)
parser.add_option('-f', '--defgrad',dest='defgrad', metavar = 'string',
help='heading of deformation gradient columns [%default]')
parser.add_option('--reference', dest='undeformed', action='store_true',
help='map results to reference (undeformed) average configuration [%default]')
parser.add_option('--scaling', dest='scaling', action='extend', metavar = '<float LIST>',
help='scaling of fluctuation')
parser.add_option('-u', '--unitlength', dest='unitlength', type='float', metavar = 'float',
help='set unit length for 2D model [%default]')
parser.add_option('--coordinates', dest='coords', metavar='string',
help='column heading for coordinates [%default]')
parser.set_defaults(defgrad = 'f')
parser.set_defaults(coords = 'ipinitialcoord')
parser.set_defaults(scaling = [])
parser.set_defaults(undeformed = False)
parser.set_defaults(unitlength = 0.0)
(options,filenames) = parser.parse_args()
options.scaling += [1.0 for i in xrange(max(0,3-len(options.scaling)))]
scaling = map(float, options.scaling)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: colCoord = table.label_index(options.coords)
if table.label_dimension(options.defgrad) != 9: errors.append('deformation gradient {} is not a tensor.'.format(options.defgrad))
else: colF = table.label_index(options.defgrad)
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for coord in xrange(3):
label = '{}_{}_{}'.format(coord+1,options.defgrad,options.coords)
if np.any(scaling) != 1.0: label+='_{}_{}_{}'.format(scaling)
if options.undeformed: label+='_undeformed'
table.labels_append([label]) # extend ASCII header with new labels
table.head_write()
# ------------------------------------------ read deformation gradient field -----------------------
centroids = deformedCoordsFFT(table.data[:,colF:colF+9].reshape(grid[2],grid[1],grid[0],3,3),
options.undeformed)
# ------------------------------------------ process data ------------------------------------------
table.data_rewind()
for z in xrange(grid[2]):
for y in xrange(grid[1]):
for x in xrange(grid[0]):
table.data_read()
table.data_append(list(centroids[z,y,x,:]))
table.data_write()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -0,0 +1,222 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,math
import numpy as np
import scipy.ndimage
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
def cell2node(cellData,grid):
nodeData = 0.0
datalen = np.array(cellData.shape[3:]).prod()
for i in xrange(datalen):
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid)+(datalen,))[...,i],
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
mode = 'wrap',
origin = -1, # offset to have cell origin as center
) # now averaged at cell origins
node = np.append(node,node[np.newaxis,0,:,:,...],axis=0) # wrap along z
node = np.append(node,node[:,0,np.newaxis,:,...],axis=1) # wrap along y
node = np.append(node,node[:,:,0,np.newaxis,...],axis=2) # wrap along x
nodeData = node[...,np.newaxis] if i==0 else np.concatenate((nodeData,node[...,np.newaxis]),axis=-1)
return nodeData
#--------------------------------------------------------------------------------------------------
def displacementAvgFFT(F,grid,size,nodal=False,transformed=False):
"""calculate average cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
if nodal:
x, y, z = np.meshgrid(np.linspace(0,size[0],1+grid[0]),
np.linspace(0,size[1],1+grid[1]),
np.linspace(0,size[2],1+grid[2]),
indexing = 'ij')
else:
x, y, z = np.meshgrid(np.linspace(0,size[0],grid[0],endpoint=False),
np.linspace(0,size[1],grid[1],endpoint=False),
np.linspace(0,size[2],grid[2],endpoint=False),
indexing = 'ij')
origCoords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
F_fourier = F if transformed else np.fft.rfftn(F,axes=(0,1,2)) # transform or use provided data
Favg = np.real(F_fourier[0,0,0,:,:])/grid.prod() # take zero freq for average
avgDisplacement = np.einsum('ml,ijkl->ijkm',Favg-np.eye(3),origCoords) # dX = Favg.X
return avgDisplacement
#--------------------------------------------------------------------------------------------------
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
"""calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
integrator = 0.5j * size / math.pi
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1])),
np.arange(grid[0]//2+1),
indexing = 'ij')
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3)
k_sSquared = np.einsum('...l,...l',k_s,k_s)
k_sSquared[0,0,0] = 1.0 # ignore global average frequency
#--------------------------------------------------------------------------------------------------
# integration in Fourier space
displacement_fourier = -np.einsum('ijkml,ijkl,l->ijkm',
F if transformed else np.fft.rfftn(F,axes=(0,1,2)),
k_s,
integrator,
) / k_sSquared[...,np.newaxis]
#--------------------------------------------------------------------------------------------------
# backtransformation to real space
displacement = np.fft.irfftn(displacement_fourier,grid,axes=(0,1,2))
return cell2node(displacement,grid) if nodal else displacement
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
Add displacments resulting from deformation gradient field.
Operates on periodic three-dimensional x,y,z-ordered data sets.
Outputs at cell centers or cell nodes (into separate file).
""", version = scriptID)
parser.add_option('-f',
'--defgrad',
dest = 'defgrad',
metavar = 'string',
help = 'column label of deformation gradient [%default]')
parser.add_option('-p',
'--pos', '--position',
dest = 'pos',
metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('--nodal',
dest = 'nodal',
action = 'store_true',
help = 'output nodal (instad of cell-centered) displacements')
parser.set_defaults(defgrad = 'f',
pos = 'pos',
nodal = False,
)
(options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
outname = (os.path.splitext(name)[0] +
'_nodal' +
os.path.splitext(name)[1]) if (options.nodal and name) else None
try: table = damask.ASCIItable(name = name,
outname = outname,
buffered = False)
except: continue
damask.util.report(scriptName,'{}{}'.format(name,' --> {}'.format(outname) if outname else ''))
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.defgrad) != 9:
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3:
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray([options.defgrad,options.pos])
table.data_rewind()
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
if table.data[:,9:].shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
coords = [np.unique(table.data[:,9+i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ------------------------------------------
F_fourier = np.fft.rfftn(table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),axes=(0,1,2)) # perform transform only once...
displacement = displacementFluctFFT(F_fourier,grid,size,options.nodal,transformed=True)
avgDisplacement = displacementAvgFFT (F_fourier,grid,size,options.nodal,transformed=True)
# ------------------------------------------ assemble header ---------------------------------------
if options.nodal:
table.info_clear()
table.labels_clear()
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append((['{}_pos' .format(i+1) for i in xrange(3)] if options.nodal else []) +
['{}_avg({}).{}' .format(i+1,options.defgrad,options.pos) for i in xrange(3)] +
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.pos) for i in xrange(3)] )
table.head_write()
# ------------------------------------------ output data -------------------------------------------
zrange = np.linspace(0,size[2],1+grid[2]) if options.nodal else xrange(grid[2])
yrange = np.linspace(0,size[1],1+grid[1]) if options.nodal else xrange(grid[1])
xrange = np.linspace(0,size[0],1+grid[0]) if options.nodal else xrange(grid[0])
for i,z in enumerate(zrange):
for j,y in enumerate(yrange):
for k,x in enumerate(xrange):
if options.nodal: table.data_clear()
else: table.data_read()
table.data_append([x,y,z] if options.nodal else [])
table.data_append(list(avgDisplacement[i,j,k,:]))
table.data_append(list( displacement[i,j,k,:]))
table.data_write()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -10,15 +10,16 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def divFFT(geomdim,field):
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
div_fourier = np.zeros(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
div_fourier = np.empty(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
# differentiation in Fourier space
k_s=np.zeros([3],'i')
k_s = np.zeros([3],'i')
TWOPIIMG = 2.0j*math.pi
for i in xrange(grid[2]):
k_s[0] = i
@ -41,34 +42,34 @@ def divFFT(geomdim,field):
elif n == 3: # vector, 3 -> 1
div_fourier[i,j,k] = sum(field_fourier[i,j,k,0:3]*xi) *TWOPIIMG
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2)).reshape([N,n/3])
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n/3])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
Add column(s) containing divergence of requested column(s).
Operates on periodic ordered three-dimensional data sets.
Deals with both vector- and tensor-valued fields.
""", version = scriptID)
parser.add_option('-c','--coordinates',
dest = 'coords',
parser.add_option('-p','--pos','--periodiccellcenter',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'column heading for coordinates [%default]')
help = 'label of coordinates [%default]')
parser.add_option('-v','--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'heading of columns containing vector field values')
help = 'label(s) of vector field values')
parser.add_option('-t','--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'heading of columns containing tensor field values')
help = 'label(s) of tensor field values')
parser.set_defaults(coords = 'ipinitialcoord',
parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
@ -99,8 +100,8 @@ for name in filenames:
remarks = []
column = {}
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: colCoord = table.label_index(options.coords)
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
for type, data in items.iteritems():
for what in (data['labels'] if data['labels'] is not None else []):

View File

@ -88,20 +88,32 @@ Add column(s) containing Euclidean distance to grain structural features: bounda
""", version = scriptID)
parser.add_option('-c','--coordinates', dest='coords', metavar='string',
help='column heading for coordinates [%default]')
parser.add_option('-i','--identifier', dest='id', metavar = 'string',
help='heading of column containing grain identifier [%default]')
parser.add_option('-t','--type', dest = 'type', action = 'extend', metavar = '<string LIST>',
help = 'feature type {%s} '%(', '.join(map(lambda x:'/'.join(x['names']),features))) )
parser.add_option('-n','--neighborhood',dest='neighborhood', choices = neighborhoods.keys(), metavar = 'string',
help = 'type of neighborhood [neumann] {%s}'%(', '.join(neighborhoods.keys())))
parser.add_option('-s', '--scale', dest = 'scale', type = 'float', metavar='float',
parser.add_option('-p',
'--pos', '--position',
dest = 'pos', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-i',
'--id', '--identifier',
dest = 'id', metavar = 'string',
help='label of grain identifier [%default]')
parser.add_option('-t',
'--type',
dest = 'type', action = 'extend', metavar = '<string LIST>',
help = 'feature type {{{}}} '.format(', '.join(map(lambda x:'/'.join(x['names']),features))) )
parser.add_option('-n',
'--neighborhood',
dest = 'neighborhood', choices = neighborhoods.keys(), metavar = 'string',
help = 'neighborhood type [neumann] {{{}}}'.format(', '.join(neighborhoods.keys())))
parser.add_option('-s',
'--scale',
dest = 'scale', type = 'float', metavar = 'float',
help = 'voxel size [%default]')
parser.set_defaults(coords = 'ipinitialcoord')
parser.set_defaults(id = 'texture')
parser.set_defaults(neighborhood = 'neumann')
parser.set_defaults(scale = 1.0)
parser.set_defaults(pos = 'pos',
id = 'texture',
neighborhood = 'neumann',
scale = 1.0,
)
(options,filenames) = parser.parse_args()
@ -110,7 +122,7 @@ if options.type is None:
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
if 'biplane' in options.type and 'boundary' in options.type:
parser.error("only one from aliases 'biplane' and 'boundary' possible.")
parser.error('only one from aliases "biplane" and "boundary" possible.')
feature_list = []
for i,feature in enumerate(features):
@ -125,10 +137,8 @@ for i,feature in enumerate(features):
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name, buffered = False)
except:
continue
try: table = damask.ASCIItable(name = name, buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
@ -141,13 +151,17 @@ for name in filenames:
remarks = []
column = {}
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: coordCol = table.label_index(options.coords)
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
else: coordCol = table.label_index(options.pos)
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
else: idCol = table.label_index(options.id)
if remarks != []: damask.util.croak(remarks)
if remarks != []:
damask.util.croak(remarks)
remarks = []
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
@ -164,51 +178,57 @@ for name in filenames:
table.data_readArray()
coords = [{},{},{}]
for i in xrange(len(table.data)):
for j in xrange(3):
coords[j][str(table.data[i,coordCol+j])] = True
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness
coords = [np.unique(table.data[:,coordCol+i]) for i in xrange(coordDim)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords)+[1]*(3-len(coords)),'i')
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
else: remarks.append('grid: {}x{}x{}'.format(*grid))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
neighborhood = neighborhoods[options.neighborhood]
convoluted = np.empty([len(neighborhood)]+list(grid+2),'i')
microstructure = periodic_3Dpad(np.array(table.data[:,idCol].reshape(grid),'i'))
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
microstructure = periodic_3Dpad(table.data[:,idCol].astype('i').reshape(grid,order='F'))
for i,p in enumerate(neighborhood):
stencil = np.zeros((3,3,3),'i')
stencil[1,1,1] = -1
stencil[p[0]+1,
p[1]+1,
p[2]+1] = 1
convoluted[i,:,:,:] = ndimage.convolve(microstructure,stencil)
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
# ...to every one in the specified neighborhood
# for same IDs at both locations ==> 0
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
convoluted = np.sort(convoluted,axis = 0)
uniques = np.where(convoluted[0,1:-1,1:-1,1:-1] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
# ...reflects number of unique neighbors
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
for i in xrange(1,len(neighborhood)): # check remaining points in neighborhood
uniques += np.where(np.logical_and(
convoluted[i,1:-1,1:-1,1:-1] != convoluted[i-1,1:-1,1:-1,1:-1], # flip of ID difference detected?
convoluted[i,1:-1,1:-1,1:-1] != 0), # not myself?
1,0) # count flip
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
), # flip of ID difference detected?
1,0) # count that flip
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
for i,feature_id in enumerate(feature_list):
distance[i,:,:,:] = np.where(uniques >= features[feature_id]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
distance.shape = ([len(feature_list),grid.prod(),1])
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
for i in xrange(len(feature_list)):
stack.append(distance[i,:])

View File

@ -10,14 +10,16 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def gradFFT(geomdim,field):
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
if n == 3: dataType = 'vector'
elif n == 1: dataType = 'scalar'
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
grad_fourier = np.zeros(field_fourier.shape+(3,),'c16')
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2),s=shapeFFT)
grad_fourier = np.empty(field_fourier.shape+(3,),'c16')
# differentiation in Fourier space
k_s = np.zeros([3],'i')
@ -44,34 +46,34 @@ def gradFFT(geomdim,field):
grad_fourier[i,j,k,1,:] = field_fourier[i,j,k,1]*xi *TWOPIIMG # tensor field from vector data
grad_fourier[i,j,k,2,:] = field_fourier[i,j,k,2]*xi *TWOPIIMG
return np.fft.fftpack.irfftn(grad_fourier,axes=(0,1,2)).reshape([N,3*n])
return np.fft.fftpack.irfftn(grad_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,3*n])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
Add column(s) containing gradient of requested column(s).
Operates on periodic ordered three-dimensional data sets.
Deals with both vector- and scalar fields.
""", version = scriptID)
parser.add_option('-c','--coordinates',
dest = 'coords',
type = 'string', metavar='string',
help = 'column heading for coordinates [%default]')
parser.add_option('-p','--pos','--periodiccellcenter',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-v','--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'heading of columns containing vector field values')
help = 'label(s) of vector field values')
parser.add_option('-s','--scalar',
dest = 'scalar',
action = 'extend', metavar = '<string LIST>',
help = 'heading of columns containing scalar field values')
help = 'label(s) of scalar field values')
parser.set_defaults(coords = 'ipinitialcoord',
parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
@ -96,14 +98,14 @@ for name in filenames:
items = {
'scalar': {'dim': 1, 'shape': [1], 'labels':options.scalar, 'active':[], 'column': []},
'vector': {'dim': 3, 'shape': [3], 'labels':options.vector, 'active':[], 'column': []},
'vector': {'dim': 3, 'shape': [3], 'labels':options.vector, 'active':[], 'column': []},
}
errors = []
remarks = []
column = {}
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: colCoord = table.label_index(options.coords)
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
for type, data in items.iteritems():
for what in (data['labels'] if data['labels'] is not None else []):
@ -136,7 +138,7 @@ for name in filenames:
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1]))
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
# ------------------------------------------ process value field -----------------------------------

View File

@ -5,7 +5,6 @@ import numpy as np
import damask
from optparse import OptionParser
from scipy import spatial
from collections import defaultdict
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
@ -16,53 +15,61 @@ Add grain index based on similiarity of crystal lattice orientation.
""", version = scriptID)
parser.add_option('-r', '--radius',
parser.add_option('-r',
'--radius',
dest = 'radius',
type = 'float', metavar = 'float',
help = 'search radius')
parser.add_option('-d', '--disorientation',
parser.add_option('-d',
'--disorientation',
dest = 'disorientation',
type = 'float', metavar = 'float',
help = 'disorientation threshold per grain [%default] (degrees)')
parser.add_option('-s', '--symmetry',
help = 'disorientation threshold in degrees [%default]')
parser.add_option('-s',
'--symmetry',
dest = 'symmetry',
type = 'string', metavar = 'string',
help = 'crystal symmetry [%default]')
parser.add_option('-e', '--eulers',
parser.add_option('-e',
'--eulers',
dest = 'eulers',
type = 'string', metavar = 'string',
help = 'Euler angles')
parser.add_option( '--degrees',
help = 'label of Euler angles')
parser.add_option('--degrees',
dest = 'degrees',
action = 'store_true',
help = 'Euler angles are given in degrees [%default]')
parser.add_option('-m', '--matrix',
parser.add_option('-m',
'--matrix',
dest = 'matrix',
type = 'string', metavar = 'string',
help = 'orientation matrix')
help = 'label of orientation matrix')
parser.add_option('-a',
dest = 'a',
type = 'string', metavar = 'string',
help = 'crystal frame a vector')
help = 'label of crystal frame a vector')
parser.add_option('-b',
dest = 'b',
type = 'string', metavar = 'string',
help = 'crystal frame b vector')
help = 'label of crystal frame b vector')
parser.add_option('-c',
dest = 'c',
type = 'string', metavar = 'string',
help = 'crystal frame c vector')
parser.add_option('-q', '--quaternion',
help = 'label of crystal frame c vector')
parser.add_option('-q',
'--quaternion',
dest = 'quaternion',
type = 'string', metavar = 'string',
help = 'quaternion')
parser.add_option('-p', '--position',
dest = 'coords',
help = 'label of quaternion')
parser.add_option('-p',
'--pos', '--position',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'spatial position of voxel [%default]')
help = 'label of coordinates [%default]')
parser.set_defaults(symmetry = 'cubic',
coords = 'pos',
parser.set_defaults(disorientation = 5,
symmetry = 'cubic',
pos = 'pos',
degrees = False,
)
@ -86,17 +93,16 @@ if np.sum(input) != 1: parser.error('needs exactly one input format.')
(options.matrix,9,'matrix'),
(options.quaternion,4,'quaternion'),
][np.where(input)[0][0]] # select input label that was requested
toRadians = np.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
cos_disorientation = np.cos(options.disorientation/2.*toRadians)
toRadians = np.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
cos_disorientation = np.cos(np.radians(options.disorientation/2.)) # cos of half the disorientation angle
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
@ -109,8 +115,10 @@ for name in filenames:
errors = []
remarks = []
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
if not 3 >= table.label_dimension(options.pos) >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
if not np.all(table.label_dimension(label) == dim):
errors.append('input "{}" does not have dimension {}.'.format(label,dim))
else: column = table.label_index(label)
if remarks != []: damask.util.croak(remarks)
@ -122,8 +130,10 @@ for name in filenames:
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append('grainID_{}@{}'.format(label,
options.disorientation if options.degrees else np.degrees(options.disorientation))) # report orientation source and disorientation in degrees
table.labels_append('grainID_{}@{:g}'.format('+'.join(label)
if isinstance(label, (list,tuple))
else label,
options.disorientation)) # report orientation source and disorientation
table.head_write()
# ------------------------------------------ process data ------------------------------------------
@ -137,7 +147,7 @@ for name in filenames:
bg.set_message('reading positions...')
table.data_readArray(options.coords) # read position vectors
table.data_readArray(options.pos) # read position vectors
grainID = -np.ones(len(table.data),dtype=int)
start = tick = time.clock()
@ -162,7 +172,7 @@ for name in filenames:
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'\
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),np.count_nonzero(memberCounts)))
if inputtype == 'eulers':
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,
@ -179,84 +189,51 @@ for name in filenames:
o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])),
symmetry = options.symmetry).reduced()
matched = False
matched = False
alreadyChecked = {}
candidates = []
bestDisorientation = damask.Quaternion([0,0,0,1]) # initialize to 180 deg rotation as worst case
# check against last matched needs to be really picky. best would be to exclude jumps across the poke (checking distance between last and me?)
# when walking through neighborhood first check whether grainID of that point has already been tested, if yes, skip!
if matchedID != -1: # has matched before?
matched = (o.quaternion.conjugated() * orientations[matchedID].quaternion).w > cos_disorientation
if not matched:
alreadyChecked = {}
bestDisorientation = damask.Quaternion([0,0,0,1]) # initialize to 180 deg rotation as worst case
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
gID = grainID[i]
if gID != -1 and gID not in alreadyChecked: # indexed point belonging to a grain not yet tested?
alreadyChecked[gID] = True # remember not to check again
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against other orientation
if disorientation.quaternion.w > cos_disorientation and \
disorientation.quaternion.w >= bestDisorientation.w: # within threshold and betterthan current best?
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
gID = grainID[i]
if gID != -1 and gID not in alreadyChecked: # indexed point belonging to a grain not yet tested?
alreadyChecked[gID] = True # remember not to check again
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against other orientation
if disorientation.quaternion.w > cos_disorientation: # within threshold ...
candidates.append(gID) # remember as potential candidate
if disorientation.quaternion.w >= bestDisorientation.w: # ... and better than current best?
matched = True
matchedID = gID # remember that grain
bestDisorientation = disorientation.quaternion
if not matched: # no match -> new grain found
memberCounts += [1] # start new membership counter
if matched: # did match existing grain
memberCounts[matchedID] += 1
if len(candidates) > 1: # ambiguity in grain identification?
largestGrain = sorted(candidates,key=lambda x:memberCounts[x])[-1] # find largest among potential candidate grains
matchedID = largestGrain
for c in [c for c in candidates if c != largestGrain]: # loop over smaller candidates
memberCounts[largestGrain] += memberCounts[c] # reassign member count of smaller to largest
memberCounts[c] = 0
grainID = np.where(np.in1d(grainID,candidates), largestGrain, grainID) # relabel grid points of smaller candidates as largest one
else: # no match -> new grain found
orientations += [o] # initialize with current orientation
memberCounts += [1] # start new membership counter
matchedID = g
g += 1 # increment grain counter
else: # did match existing grain
memberCounts[matchedID] += 1
grainID[p] = matchedID # remember grain index assigned to point
p += 1 # increment point
bg.set_message('identifying similar orientations among {} grains...'.format(len(orientations)))
memberCounts = np.array(memberCounts)
similarOrientations = [[] for i in xrange(len(orientations))]
for i,orientation in enumerate(orientations[:-1]): # compare each identified orientation...
for j in xrange(i+1,len(orientations)): # ...against all others that were defined afterwards
if orientation.disorientation(orientations[j],SST = False)[0].quaternion.w > cos_disorientation: # similar orientations in both grainIDs?
similarOrientations[i].append(j) # remember in upper triangle...
similarOrientations[j].append(i) # ...and lower triangle of matrix
if similarOrientations[i] != []:
bg.set_message('grainID {} is as: {}'.format(i,' '.join(map(str,similarOrientations[i]))))
stillShifting = True
while stillShifting:
stillShifting = False
tick = time.clock()
for p,gID in enumerate(grainID): # walk through all points
if p > 0 and p % 1000 == 0:
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
bg.set_message('(%02i:%02i:%02i) shifting ID of point %i out of %i (grain count %i)...'
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
if similarOrientations[gID] != []: # orientation of my grainID is similar to someone else?
similarNeighbors = defaultdict(int) # frequency of neighboring grainIDs sharing my orientation
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring point
if grainID[i] in similarOrientations[gID]: # neighboring point shares my orientation?
similarNeighbors[grainID[i]] += 1 # remember its grainID
if similarNeighbors != {}: # found similar orientation(s) in neighborhood
candidates = np.array([gID]+similarNeighbors.keys()) # possible replacement grainIDs for me
grainID[p] = candidates[np.argsort(memberCounts[candidates])[-1]] # adopt ID that is most frequent in overall dataset
memberCounts[gID] -= 1 # my former ID loses one fellow
memberCounts[grainID[p]] += 1 # my new ID gains one fellow
bg.set_message('{}:{} --> {}'.format(p,gID,grainID[p])) # report switch of grainID
stillShifting = True
grainIDs = np.where(np.array(memberCounts) > 0)[0] # identify "live" grain identifiers
packingMap = dict(zip(list(grainIDs),range(len(grainIDs)))) # map to condense into consecutive IDs
table.data_rewind()
outputAlive = True
p = 0
while outputAlive and table.data_read(): # read next data line of ASCII table
table.data_append(1+grainID[p]) # add grain ID
table.data_append(1+packingMap[grainID[p]]) # add (condensed) grain ID
outputAlive = table.data_write() # output processed line
p += 1

56
processing/post/addInfo.py Executable file
View File

@ -0,0 +1,56 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
Add info lines to ASCIItable header.
""", version = scriptID)
parser.add_option('-i',
'--info',
dest = 'info', action = 'extend', metavar = '<string LIST>',
help = 'items to add')
parser.set_defaults(info = [],
)
(options,filenames) = parser.parse_args()
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ assemble header ---------------------------------------
table.head_read()
table.info_append(options.info)
table.head_write()
# ------------------------------------------ pass through data -------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -9,225 +9,95 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
slipnormal_temp = [
[0,0,0,1],
[0,0,0,1],
[0,0,0,1],
[0,1,-1,0],
[-1,0,1,0],
[1,-1,0,0],
[0,1,-1,1],
[-1,1,0,1],
[-1,0,1,1],
[0,-1,1,1],
[1,-1,0,1],
[1,0,-1,1],
[0,1,-1,1],
[0,1,-1,1],
[-1,1,0,1],
[-1,1,0,1],
[-1,0,1,1],
[-1,0,1,1],
[0,-1,1,1],
[0,-1,1,1],
[1,-1,0,1],
[1,-1,0,1],
[1,0,-1,1],
[1,0,-1,1],
]
slipdirection_temp = [
[2,-1,-1,0],
[-1,2,-1,0],
[-1,-1,2,0],
[2,-1,-1,0],
[-1,2,-1,0],
[-1,-1,2,0],
[2,-1,-1,0],
[1,1,-2,0],
[-1,2,-1,0],
[-2,1,1,0],
[-1,-1,2,0],
[1,-2,1,0],
[-1,2,-1,3],
[1,1,-2,3],
[-2,1,1,3],
[-1,2,-1,3],
[-1,-1,2,3],
[-2,1,1,3],
[1,-2,1,3],
[-1,-1,2,3],
[2,-1,-1,3],
[1,-2,1,3],
[1,1,-2,3],
[2,-1,-1,3],
]
# slip normals and directions according to cpfem implementation
Nslipsystems = {'fcc': 12, 'bcc': 24, 'hex': 24}
slipnormal = { \
'fcc': [
[1,1,1],
[1,1,1],
[1,1,1],
[-1,-1,1],
[-1,-1,1],
[-1,-1,1],
[1,-1,-1],
[1,-1,-1],
[1,-1,-1],
[-1,1,-1],
[-1,1,-1],
[-1,1,-1],
],
'bcc': [
[0,1,1],
[0,1,1],
[0,-1,1],
[0,-1,1],
[1,0,1],
[1,0,1],
[-1,0,1],
[-1,0,1],
[1,1,0],
[1,1,0],
[-1,1,0],
[-1,1,0],
[2,1,1],
[-2,1,1],
[2,-1,1],
[2,1,-1],
[1,2,1],
[-1,2,1],
[1,-2,1],
[1,2,-1],
[1,1,2],
[-1,1,2],
[1,-1,2],
[1,1,-2],
],
'hex': [ # these are dummy numbers and are recalculated based on the above hex real slip systems.
[1,1,0],
[1,1,0],
[1,0,1],
[1,0,1],
[0,1,1],
[0,1,1],
[1,-1,0],
[1,-1,0],
[-1,0,1],
[-1,0,1],
[0,-1,1],
[0,-1,1],
[2,-1,1],
[1,-2,-1],
[1,1,2],
[2,1,1],
[1,2,-1],
[1,-1,2],
[2,1,-1],
[1,2,1],
[1,-1,-2],
[2,-1,-1],
[1,-2,1],
[1,1,-2],
],
}
slipdirection = { \
'fcc': [
[0,1,-1],
[-1,0,1],
[1,-1,0],
[0,-1,-1],
[1,0,1],
[-1,1,0],
[0,-1,1],
[-1,0,-1],
[1,1,0],
[0,1,1],
[1,0,-1],
[-1,-1,0],
],
'bcc': [
[1,-1,1],
[-1,-1,1],
[1,1,1],
[-1,1,1],
[-1,1,1],
[-1,-1,1],
[1,1,1],
[1,-1,1],
[-1,1,1],
[-1,1,-1],
[1,1,1],
[1,1,-1],
[-1,1,1],
[1,1,1],
[1,1,-1],
[1,-1,1],
[1,-1,1],
[1,1,-1],
[1,1,1],
[-1,1,1],
[1,1,-1],
[1,-1,1],
[-1,1,1],
[1,1,1],
],
'hex': [ # these are dummy numbers and are recalculated based on the above hex real slip systems.
[-1,1,1],
[1,-1,1],
[-1,-1,1],
[-1,1,1],
[-1,-1,1],
[1,-1,1],
[1,1,1],
[-1,-1,1],
[1,-1,1],
[1,1,1],
[1,1,1],
[-1,1,1],
[1,1,-1],
[1,1,-1],
[1,1,-1],
[1,-1,-1],
[1,-1,-1],
[1,-1,-1],
[1,-1,1],
[1,-1,1],
[1,-1,1],
[1,1,1],
[1,1,1],
[1,1,1],
],
}
def applyEulers(phi1,Phi,phi2,x):
"""transform x given in crystal coordinates to xbar returned in lab coordinates for Euler angles phi1,Phi,phi2"""
eulerRot = [[ math.cos(phi1)*math.cos(phi2) - math.cos(Phi)*math.sin(phi1)*math.sin(phi2),
-math.cos(phi1)*math.sin(phi2) - math.cos(Phi)*math.cos(phi2)*math.sin(phi1),
math.sin(Phi)*math.sin(phi1)
],
[ math.cos(phi2)*math.sin(phi1) + math.cos(Phi)*math.cos(phi1)*math.sin(phi2),
math.cos(Phi)*math.cos(phi1)*math.cos(phi2) - math.sin(phi1)*math.sin(phi2),
-math.sin(Phi)*math.cos(phi1)
],
[ math.sin(Phi)*math.sin(phi2),
math.sin(Phi)*math.cos(phi2),
math.cos(Phi)
]]
xbar = [0,0,0]
if len(x) == 3:
for i in range(3):
xbar[i] = sum([eulerRot[i][j]*x[j] for j in range(3)])
return xbar
def normalize(x):
norm = math.sqrt(sum([x[i]*x[i] for i in range(len(x))]))
return [x[i]/norm for i in range(len(x))]
slipSystems = {
'fcc':
np.array([
# Slip direction Plane normal
[ 0, 1,-1, 1, 1, 1, ],
[-1, 0, 1, 1, 1, 1, ],
[ 1,-1, 0, 1, 1, 1, ],
[ 0,-1,-1, -1,-1, 1, ],
[ 1, 0, 1, -1,-1, 1, ],
[-1, 1, 0, -1,-1, 1, ],
[ 0,-1, 1, 1,-1,-1, ],
[-1, 0,-1, 1,-1,-1, ],
[ 1, 1, 0, 1,-1,-1, ],
[ 0, 1, 1, -1, 1,-1, ],
[ 1, 0,-1, -1, 1,-1, ],
[-1,-1, 0, -1, 1,-1, ],
],'f'),
'bcc':
np.array([
# Slip system <111>{110}
[ 1,-1, 1, 0, 1, 1, ],
[-1,-1, 1, 0, 1, 1, ],
[ 1, 1, 1, 0,-1, 1, ],
[-1, 1, 1, 0,-1, 1, ],
[-1, 1, 1, 1, 0, 1, ],
[-1,-1, 1, 1, 0, 1, ],
[ 1, 1, 1, -1, 0, 1, ],
[ 1,-1, 1, -1, 0, 1, ],
[-1, 1, 1, 1, 1, 0, ],
[-1, 1,-1, 1, 1, 0, ],
[ 1, 1, 1, -1, 1, 0, ],
[ 1, 1,-1, -1, 1, 0, ],
# Slip system <111>{112}
[-1, 1, 1, 2, 1, 1, ],
[ 1, 1, 1, -2, 1, 1, ],
[ 1, 1,-1, 2,-1, 1, ],
[ 1,-1, 1, 2, 1,-1, ],
[ 1,-1, 1, 1, 2, 1, ],
[ 1, 1,-1, -1, 2, 1, ],
[ 1, 1, 1, 1,-2, 1, ],
[-1, 1, 1, 1, 2,-1, ],
[ 1, 1,-1, 1, 1, 2, ],
[ 1,-1, 1, -1, 1, 2, ],
[-1, 1, 1, 1,-1, 2, ],
[ 1, 1, 1, 1, 1,-2, ],
],'f'),
'hex':
np.array([
# Basal systems <11.0>{00.1} (independent of c/a-ratio, Bravais notation (4 coordinate base))
[ 2, -1, -1, 0, 0, 0, 0, 1, ],
[-1, 2, -1, 0, 0, 0, 0, 1, ],
[-1, -1, 2, 0, 0, 0, 0, 1, ],
# 1st type prismatic systems <11.0>{10.0} (independent of c/a-ratio)
[ 2, -1, -1, 0, 0, 1, -1, 0, ],
[-1, 2, -1, 0, -1, 0, 1, 0, ],
[-1, -1, 2, 0, 1, -1, 0, 0, ],
# 2nd type prismatic systems <10.0>{11.0} -- a slip; plane normals independent of c/a-ratio
[ 0, 1, -1, 0, 2, -1, -1, 0, ],
[-1, 0, 1, 0, -1, 2, -1, 0, ],
[ 1, -1, 0, 0, -1, -1, 2, 0, ],
# 1st type 1st order pyramidal systems <11.0>{-11.1} -- plane normals depend on the c/a-ratio
[ 2, -1, -1, 0, 0, 1, -1, 1, ],
[-1, 2, -1, 0, -1, 0, 1, 1, ],
[-1, -1, 2, 0, 1, -1, 0, 1, ],
[ 1, 1, -2, 0, -1, 1, 0, 1, ],
[-2, 1, 1, 0, 0, -1, 1, 1, ],
[ 1, -2, 1, 0, 1, 0, -1, 1, ],
# pyramidal system: c+a slip <11.3>{-10.1} -- plane normals depend on the c/a-ratio
[ 2, -1, -1, 3, -1, 1, 0, 1, ],
[ 1, -2, 1, 3, -1, 1, 0, 1, ],
[-1, -1, 2, 3, 1, 0, -1, 1, ],
[-2, 1, 1, 3, 1, 0, -1, 1, ],
[-1, 2, -1, 3, 0, -1, 1, 1, ],
[ 1, 1, -2, 3, 0, -1, 1, 1, ],
[-2, 1, 1, 3, 1, -1, 0, 1, ],
[-1, 2, -1, 3, 1, -1, 0, 1, ],
[ 1, 1, -2, 3, -1, 0, 1, 1, ],
[ 2, -1, -1, 3, -1, 0, 1, 1, ],
[ 1, -2, 1, 3, 0, 1, -1, 1, ],
[-1, -1, 2, 3, 0, 1, -1, 1, ],
# pyramidal system: c+a slip <11.3>{-1-1.2} -- as for hexagonal ice (Castelnau et al. 1996, similar to twin system found below)
[ 2, -1, -1, 3, -2, 1, 1, 2, ], # sorted according to similar twin system
[-1, 2, -1, 3, 1, -2, 1, 2, ], # <11.3>{-1-1.2} shear = 2((c/a)^2-2)/(3 c/a)
[-1, -1, 2, 3, 1, 1, -2, 2, ],
[-2, 1, 1, 3, 2, -1, -1, 2, ],
[ 1, -2, 1, 3, -1, 2, -1, 2, ],
[ 1, 1, -2, 3, -1, -1, 2, 2, ],
],'f'),
}
# --------------------------------------------------------------------
# MAIN
@ -238,134 +108,166 @@ Add columns listing Schmid factors (and optional trace vector of selected system
""", version = scriptID)
latticeChoices = ('fcc','bcc','hex')
parser.add_option('-l','--lattice',
dest='lattice', type='choice', choices=('fcc','bcc','hex'), metavar='string',
help="type of lattice structure [%default] {fcc,bcc',hex}")
parser.add_option('--direction',
dest='forcedirection', type='int', nargs=3, metavar='int int int',
help='force direction in lab coordinates %default')
parser.add_option('-n','--normal',
dest='stressnormal', type='int', nargs=3, metavar='int int int',
help='stress plane normal in lab coordinates ')
parser.add_option('--trace',
dest='traceplane', type='int', nargs=3, metavar='int int int',
help='normal (in lab coordinates) of plane on which the plane trace of the Schmid factor(s) is reported')
dest = 'lattice', type = 'choice', choices = latticeChoices, metavar='string',
help = 'type of lattice structure [%default] {}'.format(latticeChoices))
parser.add_option('--covera',
dest='CoverA', type='float', metavar='float',
help='C over A ratio for hexagonal systems')
parser.add_option('-r','--rank',
dest='rank', type='int', nargs=3, metavar='int int int',
help="report trace of r'th highest Schmid factor [%default]")
dest = 'CoverA', type = 'float', metavar = 'float',
help = 'C over A ratio for hexagonal systems')
parser.add_option('-f', '--force',
dest = 'force',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'force direction in lab frame [%default]')
parser.add_option('-n', '--normal',
dest = 'normal',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'stress plane normal in lab frame [%default]')
parser.add_option('-e', '--eulers',
dest='eulers', metavar='string',
help='Euler angles label')
dest = 'eulers',
type = 'string', metavar = 'string',
help = 'Euler angles label')
parser.add_option('-d', '--degrees',
dest='degrees', action='store_true',
help='Euler angles are given in degrees [%default]')
parser.set_defaults(lattice = 'fcc')
parser.set_defaults(forcedirection = [0, 0, 1])
parser.set_defaults(stressnormal = None)
parser.set_defaults(traceplane = None)
parser.set_defaults(rank = 0)
parser.set_defaults(CoverA = 1.587)
parser.set_defaults(eulers = 'eulerangles')
dest = 'degrees',
action = 'store_true',
help = 'Euler angles are given in degrees [%default]')
parser.add_option('-m', '--matrix',
dest = 'matrix',
type = 'string', metavar = 'string',
help = 'orientation matrix label')
parser.add_option('-a',
dest = 'a',
type = 'string', metavar = 'string',
help = 'crystal frame a vector label')
parser.add_option('-b',
dest = 'b',
type = 'string', metavar = 'string',
help = 'crystal frame b vector label')
parser.add_option('-c',
dest = 'c',
type = 'string', metavar = 'string',
help = 'crystal frame c vector label')
parser.add_option('-q', '--quaternion',
dest = 'quaternion',
type = 'string', metavar = 'string',
help = 'quaternion label')
(options,filenames) = parser.parse_args()
parser.set_defaults(force = (0.0,0.0,1.0),
normal = None,
lattice = latticeChoices[0],
CoverA = math.sqrt(8./3.),
degrees = False,
)
options.forcedirection = normalize(options.forcedirection)
if options.stressnormal:
if abs(sum([options.forcedirection[i] * options.stressnormal[i] for i in range(3)])) < 1e-3:
options.stressnormal = normalize(options.stressnormal)
else:
parser.error('stress plane normal not orthogonal to force direction')
else:
options.stressnormal = options.forcedirection
if options.traceplane:
options.traceplane = normalize(options.traceplane)
options.rank = min(options.rank,Nslipsystems[options.lattice])
datainfo = { # list of requested labels per datatype
'vector': {'len':3,
'label':[]},
}
datainfo['vector']['label'] += [options.eulers]
(options, filenames) = parser.parse_args()
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
# Convert 4 Miller indices notation of hex to orthogonal 3 Miller indices notation
if options.lattice=='hex':
for i in range(Nslipsystems[options.lattice]):
slipnormal[options.lattice][i][0]=slipnormal_temp[i][0]
slipnormal[options.lattice][i][1]=(slipnormal_temp[i][0]+2.0*slipnormal_temp[i][1])/math.sqrt(3.0)
slipnormal[options.lattice][i][2]=slipnormal_temp[i][3]/options.CoverA
slipdirection[options.lattice][i][0]=slipdirection_temp[i][0]*1.5 # direction [uvtw]->[3u/2 (u+2v)*sqrt(3)/2 w*(c/a)] ,
slipdirection[options.lattice][i][1]=(slipdirection_temp[i][0]+2.0*slipdirection_temp[i][1])*(0.5*math.sqrt(3.0))
slipdirection[options.lattice][i][2]=slipdirection_temp[i][3]*options.CoverA
for i in range(Nslipsystems[options.lattice]):
slipnormal[options.lattice][i]=normalize(slipnormal[options.lattice][i])
slipdirection[options.lattice][i]=normalize(slipdirection[options.lattice][i])
force = np.array(options.force)
force /= np.linalg.norm(force)
# --- loop over input files -------------------------------------------------------------------------
if options.normal:
damask.util.croak('got normal')
normal = np.array(options.normal)
normal /= np.linalg.norm(normal)
if abs(np.dot(force,normal)) > 1e-3:
parser.error('stress plane normal not orthogonal to force direction')
else:
normal = force
input = [options.eulers is not None,
options.a is not None and \
options.b is not None and \
options.c is not None,
options.matrix is not None,
options.quaternion is not None,
]
if np.sum(input) != 1: parser.error('needs exactly one input format.')
(label,dim,inputtype) = [(options.eulers,3,'eulers'),
([options.a,options.b,options.c],[3,3,3],'frame'),
(options.matrix,9,'matrix'),
(options.quaternion,4,'quaternion'),
][np.where(input)[0][0]] # select input label that was requested
c_direction = np.zeros((len(slipSystems[options.lattice]),3),'f')
c_normal = np.zeros_like(c_direction)
if options.lattice in latticeChoices[:2]:
c_direction = slipSystems[options.lattice][:,:3]
c_normal = slipSystems[options.lattice][:,3:]
elif options.lattice == latticeChoices[2]:
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
for i in xrange(len(c_direction)):
c_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
slipSystems['hex'][i,3]*options.CoverA,
])
c_normal[i] = np.array([slipSystems['hex'][i,4],
(slipSystems['hex'][i,4] + 2.*slipSystems['hex'][i,5])/np.sqrt(3),
slipSystems['hex'][i,7]/options.CoverA,
])
c_direction /= np.tile(np.linalg.norm(c_direction,axis=1),(3,1)).T
c_normal /= np.tile(np.linalg.norm(c_normal ,axis=1),(3,1)).T
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,buffered = False)
except:
continue
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
table.head_read() # read ASCII header info
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
# ------------------------------------------ read header ------------------------------------------
key = '1_%s'%datainfo['vector']['label'][0]
if key not in table.labels:
file['croak'].write('column %s not found...\n'%key)
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
if not np.all(table.label_dimension(label) == dim):
damask.util.croak('input {} does not have dimension {}.'.format(label,dim))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
else:
column = table.labels.index(key) # remember columns of requested data
column = table.label_index(label)
# ------------------------------------------ assemble header ---------------------------------------
table.labels_append(['%i_S(%i_%i_%i)[%i_%i_%i]'%(i+1,
slipnormal[options.lattice][i][0],
slipnormal[options.lattice][i][1],
slipnormal[options.lattice][i][2],
slipdirection[options.lattice][i][0],
slipdirection[options.lattice][i][1],
slipdirection[options.lattice][i][2],
) for i in range(Nslipsystems[options.lattice])])
if options.traceplane:
if options.rank > 0:
table.labels_append('trace_x trace_y trace_z system')
else:
table.labels_append(['(%i)tx\tty\ttz'%(i+1) for i in range(Nslipsystems[options.lattice])])
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['{id}_'
'S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
.format( id = i+1,
normal = theNormal,
direction = theDirection,
) for i,(theNormal,theDirection) in enumerate(zip(c_normal,c_direction))])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
[phi1,Phi,phi2] = Eulers=toRadians*np.array(map(\
float,table.data[column:column+datainfo['vector']['len']]))
S = [ sum( [applyEulers(phi1,Phi,phi2,normalize( \
slipnormal[options.lattice][slipsystem]))[i]*options.stressnormal[i] for i in range(3)] ) * \
sum( [applyEulers(phi1,Phi,phi2,normalize( \
slipdirection[options.lattice][slipsystem]))[i]*options.forcedirection[i] for i in range(3)] ) \
for slipsystem in range(Nslipsystems[options.lattice]) ]
table.data_append(S)
if options.traceplane:
trace = [np.cross(options.traceplane,applyEulers(phi1,Phi,phi2,normalize(slipnormal[options.lattice][slipsystem]))) \
for slipsystem in range(Nslipsystems[options.lattice]) ]
if options.rank == 0:
table.data_append('\t'.join(map(lambda x:'%f\t%f\t%f'%(x[0],x[1],x[2]),trace)))
elif options.rank > 0:
SabsSorted = sorted([(abs(S[i]),i) for i in range(len(S))])
table.data_append('\t'.join(map(str,trace[SabsSorted[-options.rank][1]])) + '\t%i'%(1+SabsSorted[-options.rank][1]))
if inputtype == 'eulers':
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,)
elif inputtype == 'matrix':
o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose(),)
elif inputtype == 'frame':
o = damask.Orientation(matrix = np.array(map(float,table.data[column[0]:column[0]+3] + \
table.data[column[1]:column[1]+3] + \
table.data[column[2]:column[2]+3])).reshape(3,3),)
elif inputtype == 'quaternion':
o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])),)
rotForce = o.quaternion.conjugated() * force
rotNormal = o.quaternion.conjugated() * normal
table.data_append(np.abs(np.sum(c_direction*rotForce,axis=1) * np.sum(c_normal*rotNormal,axis=1)))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
table.close() # close ASCII tables

View File

@ -20,9 +20,9 @@ Average each data block of size 'packing' into single values thus reducing the f
""", version = scriptID)
parser.add_option('-c','--coordinates',
dest = 'coords',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'column heading for coordinates [%default]')
help = 'column label of coordinates [%default]')
parser.add_option('-p','--packing',
dest = 'packing',
type = 'int', nargs = 3, metavar = 'int int int',
@ -39,7 +39,7 @@ parser.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'size in x,y,z [autodetect]')
parser.set_defaults(coords = 'ipinitialcoord',
parser.set_defaults(pos = 'pos',
packing = (2,2,2),
shift = (0,0,0),
grid = (0,0,0),
@ -59,11 +59,10 @@ if any(shift != 0): prefix += 'shift{:+}{:+}{:+}_'.format(*shift)
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
try: table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
@ -75,10 +74,9 @@ for name in filenames:
errors = []
remarks = []
colCoord = None
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: colCoord = table.label_index(options.coords)
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
if remarks != []: damask.util.croak(remarks)
if errors != []:
@ -86,7 +84,6 @@ for name in filenames:
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))

View File

@ -37,10 +37,14 @@ if options.label is None:
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = options.label+'_averaged_'+name if name else name,
buffered = False)
damask.util.croak(name)
try: table = damask.ASCIItable(name = name,
outname = os.path.join(
os.path.split(name)[0],
options.label+'_averaged_'+os.path.split(name)[1]
) if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)

View File

@ -19,84 +19,83 @@ to resolution*packing.
""", version = scriptID)
parser.add_option('-c','--coordinates', dest='coords', metavar='string',
help='column heading for coordinates [%default]')
parser.add_option('-p','--packing', dest='packing', type='int', nargs=3, metavar='int int int',
help='dimension of packed group [%default]')
parser.add_option('-g','--grid', dest='resolution', type='int', nargs=3, metavar='int int int',
help='resolution in x,y,z [autodetect]')
parser.add_option('-s','--size', dest='dimension', type='float', nargs=3, metavar='int int int',
help='dimension in x,y,z [autodetect]')
parser.set_defaults(coords = 'ipinitialcoord')
parser.set_defaults(packing = (2,2,2))
parser.set_defaults(grid = (0,0,0))
parser.set_defaults(size = (0.0,0.0,0.0))
parser.add_option('-c','--coordinates',
dest = 'pos', metavar = 'string',
help = 'column label of coordinates [%default]')
parser.add_option('-p','--packing',
dest = 'packing', type = 'int', nargs = 3, metavar = 'int int int',
help = 'dimension of packed group [%default]')
parser.add_option('-g','--grid',
dest = 'resolution', type = 'int', nargs = 3, metavar = 'int int int',
help = 'resolution in x,y,z [autodetect]')
parser.add_option('-s','--size',
dest = 'dimension', type = 'float', nargs = 3, metavar = 'int int int',
help = 'dimension in x,y,z [autodetect]')
parser.set_defaults(pos = 'pos',
packing = (2,2,2),
grid = (0,0,0),
size = (0.0,0.0,0.0),
)
(options,filenames) = parser.parse_args()
options.packing = np.array(options.packing)
prefix = 'blowUp%ix%ix%i_'%(options.packing[0],options.packing[1],options.packing[2])
prefix = 'blowUp{}x{}x{}_'.format(*options.packing)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
prefix+ \
os.path.basename(name)) if name else name,
buffered = False)
try: table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
errors = []
# ------------------------------------------ sanity checks ----------------------------------------
if table.label_dimension(options.coords) != 3:
damask.util.croak('coordinates {} are not a vector.'.format(options.coords))
errors = []
remarks = []
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
colElem = table.label_index('elem')
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
else:
coordCol = table.label_index(options.coords)
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
coords = [{},{},{}]
for i in xrange(len(table.data)):
for j in xrange(3):
coords[j][str(table.data[i,coordCol+j])] = True
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
table.data_readArray(options.pos)
table.data_rewind()
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
packing = np.array(options.packing,'i')
outSize = grid*packing
# ------------------------------------------ assemble header ---------------------------------------
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data -------------------------------------------
table.data_rewind()
data = np.zeros(outSize.tolist()+[len(table.labels)])
p = np.zeros(3,'i')
@ -107,15 +106,15 @@ for name in filenames:
table.data_read()
data[d[0]:d[0]+packing[0],
d[1]:d[1]+packing[1],
d[2]:d[2]+packing[2],
d[2]:d[2]+packing[2],
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
elementSize = size/grid/packing
elem = 1
for c in xrange(outSize[2]):
for b in xrange(outSize[1]):
for a in xrange(outSize[0]):
data[a,b,c,coordCol:coordCol+3] = [a+0.5,b+0.5,c+0.5]*elementSize
data[a,b,c,table.label_index('elem')] = elem
data[a,b,c,colCoord:colCoord+3] = [a+0.5,b+0.5,c+0.5]*elementSize
if colElem != -1: data[a,b,c,colElem] = elem
table.data = data[a,b,c,:].tolist()
outputAlive = table.data_write() # output processed line
elem += 1

142
processing/post/histogram.py Executable file
View File

@ -0,0 +1,142 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Generate histogram of N bins in given data range.
""", version = scriptID)
parser.add_option('-d','--data',
dest = 'data',
type = 'string', metavar = 'string',
help = 'column heading for data')
parser.add_option('-w','--weights',
dest = 'weights',
type = 'string', metavar = 'string',
help = 'column heading for weights')
parser.add_option('--range',
dest = 'range',
type = 'float', nargs = 2, metavar = 'float float',
help = 'data range of histogram [min - max]')
parser.add_option('-N',
dest = 'N',
type = 'int', metavar = 'int',
help = 'number of bins')
parser.add_option('--density',
dest = 'density',
action = 'store_true',
help = 'report probability density')
parser.add_option('--logarithmic',
dest = 'log',
action = 'store_true',
help = 'logarithmically spaced bins')
parser.set_defaults(data = None,
weights = None,
range = None,
N = None,
density = False,
log = False,
)
(options,filenames) = parser.parse_args()
if not options.data: parser.error('no data specified.')
if not options.N: parser.error('no bin number specified.')
if options.log:
def forward(x):
return np.log(x)
def reverse(x):
return np.exp(x)
else:
def forward(x):
return x
def reverse(x):
return x
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.data) != 1: errors.append('data {} are not scalar.'.format(options.data))
if options.weights and \
table.label_dimension(options.data) != 1: errors.append('weights {} are not scalar.'.format(options.weights))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --------------- read data ----------------------------------------------------------------
table.data_readArray([options.data,options.weights])
# --------------- auto range ---------------------------------------------------------------
if options.range is None:
rangeMin,rangeMax = min(table.data[:,0]),max(table.data[:,0])
else:
rangeMin,rangeMax = min(options.range),max(options.range)
# --------------- bin data ----------------------------------------------------------------
count,edges = np.histogram(table.data[:,0],
bins = reverse(forward(rangeMin) + np.arange(options.N+1) *
(forward(rangeMax)-forward(rangeMin))/options.N),
range = (rangeMin,rangeMax),
weights = None if options.weights is None else table.data[:,1],
density = options.density,
)
bincenter = reverse(forward(rangeMin) + (0.5+np.arange(options.N)) *
(forward(rangeMax)-forward(rangeMin))/options.N) # determine center of bins
# ------------------------------------------ assemble header ---------------------------------------
table.info_clear()
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
scriptID + ':\t' +
'data range {} -- {}'.format(rangeMin,rangeMax) +
(' (log)' if options.log else ''),
])
table.labels_clear()
table.labels_append(['bincenter','count'])
table.head_write()
# ------------------------------------------ output result -----------------------------------------
table.data = np.squeeze(np.dstack((bincenter,count)))
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -1,144 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,shutil
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
# MAIN FUNCTION STARTS HERE
# -----------------------------
# --- input parsing
parser = OptionParser(usage='%prog [options] resultfile', description = """
Create vtk files for the (deformed) geometry that belongs to a .t16 (MSC.Marc) results file.
""", version = scriptID)
parser.add_option('-d','--dir', dest='dir', \
help='name of subdirectory to hold output [%default]')
parser.add_option('-r','--range', dest='range', type='int', nargs=3, \
help='range of positions (or increments) to output (start, end, step) [all]')
parser.add_option('--increments', action='store_true', dest='getIncrements', \
help='switch to increment range [%default]')
parser.add_option('-t','--type', dest='type', type='choice', choices=['ipbased','nodebased'], \
help='processed geometry type [ipbased and nodebased]')
parser.set_defaults(dir = 'vtk')
parser.set_defaults(getIncrements= False)
(options, files) = parser.parse_args()
# --- basic sanity checks
if files == []:
parser.print_help()
parser.error('no file specified...')
filename = os.path.splitext(files[0])[0]
if not os.path.exists(filename+'.t16'):
parser.print_help()
parser.error('invalid file "%s" specified...'%filename+'.t16')
if not options.type :
options.type = ['nodebased', 'ipbased']
else:
options.type = [options.type]
# --- more sanity checks
sys.path.append(damask.solver.Marc().libraryPath('../../'))
try:
import py_post
except:
print('error: no valid Mentat release found')
sys.exit(-1)
# --------------------------- open results file and initialize mesh ----------
p = py_post.post_open(filename+'.t16')
p.moveto(0)
Nnodes = p.nodes()
Nincrements = p.increments() - 1 # t16 contains one "virtual" increment (at 0)
if damask.core.mesh.mesh_init_postprocessing(filename+'.mesh') > 0:
print('error: init not successful')
sys.exit(-1)
Ncellnodes = damask.core.mesh.mesh_get_Ncellnodes()
unitlength = damask.core.mesh.mesh_get_unitlength()
# --------------------------- create output dir --------------------------------
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0755)
# --------------------------- get positions --------------------------------
incAtPosition = {}
positionOfInc = {}
for position in range(Nincrements):
p.moveto(position+1)
incAtPosition[position] = p.increment # remember "real" increment at this position
positionOfInc[p.increment] = position # remember position of "real" increment
if not options.range:
options.getIncrements = False
locations = range(Nincrements) # process all positions
else:
options.range = list(options.range) # convert to list
if options.getIncrements:
locations = [positionOfInc[x] for x in range(options.range[0],options.range[1]+1,options.range[2])
if x in positionOfInc]
else:
locations = range( max(0,options.range[0]),
min(Nincrements,options.range[1]+1),
options.range[2] )
increments = [incAtPosition[x] for x in locations] # build list of increments to process
# --------------------------- loop over positions --------------------------------
for incCount,position in enumerate(locations): # walk through locations
p.moveto(position+1) # wind to correct position
# --- get displacements
node_displacement = [[0,0,0] for i in range(Nnodes)]
for n in range(Nnodes):
if p.node_displacements():
node_displacement[n] = map(lambda x:x*unitlength,list(p.node_displacement(n)))
c = damask.core.mesh.mesh_build_cellnodes(np.array(node_displacement).T,Ncellnodes)
cellnode_displacement = [[c[i][n] for i in range(3)] for n in range(Ncellnodes)]
# --- append displacements to corresponding files
for geomtype in options.type:
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")\
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
print outFilename
shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename)
with open(outFilename,'a') as myfile:
myfile.write("POINT_DATA %i\n"%{'nodebased':Nnodes,'ipbased':Ncellnodes}[geomtype])
myfile.write("VECTORS displacement double\n")
coordinates = {'nodebased':node_displacement,'ipbased':cellnode_displacement}[geomtype]
for n in range({'nodebased':Nnodes,'ipbased':Ncellnodes}[geomtype]):
myfile.write("%.8e %.8e %.8e\n"%(coordinates[n][0],coordinates[n][1],coordinates[n][2]))
# --------------------------- DONE --------------------------------

View File

@ -1,421 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,string,re,time
from optparse import OptionParser, OptionGroup
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def ParseOutputFormat(filename,homogID,crystID,phaseID):
"""parse .output* files in order to get a list of outputs"""
myID = {'Homogenization': homogID,
'Crystallite': crystID,
'Constitutive': phaseID,
}
format = {}
for what in ['Homogenization','Crystallite','Constitutive']:
content = []
format[what] = {'outputs':{},'specials':{'brothers':[]}}
for prefix in ['']+map(str,range(1,17)):
if os.path.exists(prefix+filename+'.output'+what):
try:
file = open(prefix+filename+'.output'+what)
content = file.readlines()
file.close()
break
except:
pass
if content == []: continue # nothing found...
tag = ''
tagID = 0
for line in content:
if re.match("\s*$",line) or re.match("#",line): # skip blank lines and comments
continue
m = re.match("\[(.+)\]",line) # look for block indicator
if m: # next section
tag = m.group(1)
tagID += 1
format[what]['specials']['brothers'].append(tag)
if tag == myID[what] or (myID[what].isdigit() and tagID == int(myID[what])):
format[what]['specials']['_id'] = tagID
format[what]['outputs'] = []
tag = myID[what]
else: # data from section
if tag == myID[what]:
(output,length) = line.split()
output.lower()
if length.isdigit():
length = int(length)
if re.match("\((.+)\)",output): # special data, e.g. (Ngrains)
format[what]['specials'][output] = length
elif length > 0:
format[what]['outputs'].append([output,length])
if '_id' not in format[what]['specials']:
print "\nsection '%s' not found in <%s>"%(myID[what], what)
print '\n'.join(map(lambda x:' [%s]'%x, format[what]['specials']['brothers']))
return format
# -----------------------------
def ParsePostfile(p,filename, outputFormat, legacyFormat):
"""
parse postfile in order to get position and labels of outputs
needs "outputFormat" for mapping of output names to postfile output indices
"""
startVar = {True: 'GrainCount',
False:'HomogenizationCount'}
# --- build statistics
stat = { \
'IndexOfLabel': {}, \
'Title': p.title(), \
'Extrapolation': p.extrapolate, \
'NumberOfIncrements': p.increments() - 1, \
'NumberOfNodes': p.nodes(), \
'NumberOfNodalScalars': p.node_scalars(), \
'LabelOfNodalScalar': [None]*p.node_scalars() , \
'NumberOfElements': p.elements(), \
'NumberOfElementalScalars': p.element_scalars(), \
'LabelOfElementalScalar': [None]*p.element_scalars() , \
'NumberOfElementalTensors': p.element_tensors(), \
'LabelOfElementalTensor': [None]*p.element_tensors(), \
}
# --- find labels
for labelIndex in range(stat['NumberOfNodalScalars']):
label = p.node_scalar_label(labelIndex)
stat['IndexOfLabel'][label] = labelIndex
stat['LabelOfNodalScalar'][labelIndex] = label
for labelIndex in range(stat['NumberOfElementalScalars']):
label = p.element_scalar_label(labelIndex)
stat['IndexOfLabel'][label] = labelIndex
stat['LabelOfElementalScalar'][labelIndex] = label
for labelIndex in range(stat['NumberOfElementalTensors']):
label = p.element_tensor_label(labelIndex)
stat['IndexOfLabel'][label] = labelIndex
stat['LabelOfElementalTensor'][labelIndex] = label
if 'User Defined Variable 1' in stat['IndexOfLabel']: # output format without dedicated names?
stat['IndexOfLabel'][startVar[legacyFormat]] = stat['IndexOfLabel']['User Defined Variable 1'] # adjust first named entry
if startVar[legacyFormat] in stat['IndexOfLabel']: # does the result file contain relevant user defined output at all?
startIndex = stat['IndexOfLabel'][startVar[legacyFormat]]
stat['LabelOfElementalScalar'][startIndex] = startVar[legacyFormat]
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
# given in the .output* file
offset = 1
if legacyFormat:
stat['LabelOfElementalScalar'][startIndex + offset] = startVar[not legacyFormat] # add HomogenizationCount as second
offset += 1
for (name,N) in outputFormat['Homogenization']['outputs']:
for i in range(N):
label = {False: '%s'%( name),
True:'%i_%s'%(i+1,name)}[N > 1]
stat['IndexOfLabel'][label] = startIndex + offset
stat['LabelOfElementalScalar'][startIndex + offset] = label
offset += 1
if not legacyFormat:
stat['IndexOfLabel'][startVar[not legacyFormat]] = startIndex + offset
stat['LabelOfElementalScalar'][startIndex + offset] = startVar[not legacyFormat] # add GrainCount
offset += 1
if '(ngrains)' in outputFormat['Homogenization']['specials']:
for grain in range(outputFormat['Homogenization']['specials']['(ngrains)']):
stat['IndexOfLabel']['%i_CrystalliteCount'%(grain+1)] = startIndex + offset # report crystallite count
stat['LabelOfElementalScalar'][startIndex + offset] = '%i_CrystalliteCount'%(grain+1) # add GrainCount
offset += 1
for (name,N) in outputFormat['Crystallite']['outputs']: # add crystallite outputs
for i in range(N):
label = {False: '%i_%s'%(grain+1, name),
True:'%i_%i_%s'%(grain+1,i+1,name)}[N > 1]
stat['IndexOfLabel'][label] = startIndex + offset
stat['LabelOfElementalScalar'][startIndex + offset] = label
offset += 1
stat['IndexOfLabel']['%i_ConstitutiveCount'%(grain+1)] = startIndex + offset # report constitutive count
stat['LabelOfElementalScalar'][startIndex + offset] = '%i_ConstitutiveCount'%(grain+1) # add GrainCount
offset += 1
for (name,N) in outputFormat['Constitutive']['outputs']: # add constitutive outputs
for i in range(N):
label = {False: '%i_%s'%(grain+1, name),
True:'%i_%i_%s'%(grain+1,i+1,name)}[N > 1]
stat['IndexOfLabel'][label] = startIndex + offset
try:
stat['LabelOfElementalScalar'][startIndex + offset] = label
except IndexError:
print 'trying to assign %s at position %i+%i'%(label,startIndex,offset)
sys.exit(1)
offset += 1
return stat
# -----------------------------
def GetIncrementLocations(p,Nincrements,options):
"""get mapping between positions in postfile and increment number"""
incAtPosition = {}
positionOfInc = {}
for position in range(Nincrements):
p.moveto(position+1)
incAtPosition[position] = p.increment # remember "real" increment at this position
positionOfInc[p.increment] = position # remember position of "real" increment
if not options.range:
options.getIncrements = False
locations = range(Nincrements) # process all positions
else:
options.range = list(options.range) # convert to list
if options.getIncrements:
locations = [positionOfInc[x] for x in range(options.range[0],options.range[1]+1,options.range[2])
if x in positionOfInc]
else:
locations = range( max(0,options.range[0]),
min(Nincrements,options.range[1]+1),
options.range[2] )
increments = [incAtPosition[x] for x in locations] # build list of increments to process
return [increments,locations]
# -----------------------------
def SummarizePostfile(stat,where=sys.stdout):
where.write('\n\n')
where.write('title:\t%s'%stat['Title'] + '\n\n')
where.write('extraplation:\t%s'%stat['Extrapolation'] + '\n\n')
where.write('increments:\t%i'%(stat['NumberOfIncrements']) + '\n\n')
where.write('nodes:\t%i'%stat['NumberOfNodes'] + '\n\n')
where.write('elements:\t%i'%stat['NumberOfElements'] + '\n\n')
where.write('nodal scalars:\t%i'%stat['NumberOfNodalScalars'] + '\n\n '\
+'\n '.join(stat['LabelOfNodalScalar']) + '\n\n')
where.write('elemental scalars:\t%i'%stat['NumberOfElementalScalars'] + '\n\n '\
+ '\n '.join(stat['LabelOfElementalScalar']) + '\n\n')
where.write('elemental tensors:\t%i'%stat['NumberOfElementalTensors'] + '\n\n '\
+ '\n '.join(stat['LabelOfElementalTensor']) + '\n\n')
return True
# -----------------------------
def SummarizeOutputfile(format,where=sys.stdout):
where.write('\nUser Defined Outputs')
for what in format.keys():
where.write('\n\n %s:'%what)
for output in format[what]['outputs']:
where.write('\n %s'%output)
return True
# -----------------------------
def writeHeader(myfile,stat,geomtype):
myfile.write('2\theader\n')
myfile.write(string.replace('$Id$','\n','\\n')+
'\t' + ' '.join(sys.argv[1:]) + '\n')
if geomtype == 'nodebased':
myfile.write('node')
for i in range(stat['NumberOfNodalScalars']):
myfile.write('\t%s'%''.join(stat['LabelOfNodalScalar'][i].split()))
elif geomtype == 'ipbased':
myfile.write('elem\tip')
for i in range(stat['NumberOfElementalScalars']):
myfile.write('\t%s'%''.join(stat['LabelOfElementalScalar'][i].split()))
myfile.write('\n')
return True
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Extract data from a .t16 (MSC.Marc) results file.
""", version = scriptID)
parser.add_option('-i','--info', action='store_true', dest='info', \
help='list contents of resultfile [%default]')
parser.add_option('-l','--legacy', action='store_true', dest='legacy', \
help='legacy user result block (starts with GrainCount) [%default]')
parser.add_option('-d','--dir', dest='dir', \
help='name of subdirectory to hold output [%default]')
parser.add_option('-r','--range', dest='range', type='int', nargs=3, \
help='range of positions (or increments) to output (start, end, step) [all]')
parser.add_option('--increments', action='store_true', dest='getIncrements', \
help='switch to increment range [%default]')
parser.add_option('-t','--type', dest='type', type='choice', choices=['ipbased','nodebased'], \
help='processed geometry type [ipbased and nodebased]')
group_material = OptionGroup(parser,'Material identifier')
group_material.add_option('--homogenization', dest='homog', \
help='homogenization identifier (as string or integer [%default])', metavar='<ID>')
group_material.add_option('--crystallite', dest='cryst', \
help='crystallite identifier (as string or integer [%default])', metavar='<ID>')
group_material.add_option('--phase', dest='phase', \
help='phase identifier (as string or integer [%default])', metavar='<ID>')
parser.add_option_group(group_material)
parser.set_defaults(info = False)
parser.set_defaults(legacy = False)
parser.set_defaults(dir = 'vtk')
parser.set_defaults(getIncrements= False)
parser.set_defaults(homog = '1')
parser.set_defaults(cryst = '1')
parser.set_defaults(phase = '1')
(options, files) = parser.parse_args()
# --- sanity checks
if files == []:
parser.print_help()
parser.error('no file specified...')
filename = os.path.splitext(files[0])[0]
if not os.path.exists(filename+'.t16'):
parser.print_help()
parser.error('invalid file "%s" specified...'%filename+'.t16')
sys.path.append(damask.solver.Marc().libraryPath('../../'))
try:
import py_post
except:
print('error: no valid Mentat release found')
sys.exit(-1)
if not options.type :
options.type = ['nodebased', 'ipbased']
else:
options.type = [options.type]
# --- initialize mesh data
if damask.core.mesh.mesh_init_postprocessing(filename+'.mesh'):
print('error: init not successful')
sys.exit(-1)
# --- check if ip data available for all elements; if not, then .t19 file is required
p = py_post.post_open(filename+'.t16')
asciiFile = False
p.moveto(1)
for e in range(p.elements()):
if not damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),1):
if os.path.exists(filename+'.t19'):
p.close()
p = py_post.post_open(filename+'.t19')
asciiFile = True
break
# --- parse *.output and *.t16 file
outputFormat = ParseOutputFormat(filename,options.homog,options.cryst,options.phase)
p.moveto(1)
p.extrapolation('translate')
stat = ParsePostfile(p,filename,outputFormat,options.legacy)
# --- output info
if options.info:
print '\n\nMentat release %s'%damask.solver.Marc().version('../../')
SummarizePostfile(stat)
SummarizeOutputfile(outputFormat)
sys.exit(0)
# --- create output dir
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0755)
# --- get positions
[increments,locations] = GetIncrementLocations(p,stat['NumberOfIncrements'],options)
# --- loop over positions
time_start = time.time()
for incCount,position in enumerate(locations): # walk through locations
p.moveto(position+1) # wind to correct position
time_delta = (float(len(locations)) / float(incCount+1) - 1.0) * (time.time() - time_start)
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."\
%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
sys.stdout.flush()
# --- write header
outFilename = {}
for geomtype in options.type:
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")\
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
with open(outFilename[geomtype],'w') as myfile:
writeHeader(myfile,stat,geomtype)
# --- write node based data
if geomtype == 'nodebased':
for n in range(stat['NumberOfNodes']):
myfile.write(str(n))
for l in range(stat['NumberOfNodalScalars']):
myfile.write('\t'+str(p.node_scalar(n,l)))
myfile.write('\n')
# --- write ip based data
elif geomtype == 'ipbased':
for e in range(stat['NumberOfElements']):
if asciiFile:
print 'ascii postfile not yet supported'
sys.exit(-1)
else:
ipData = [[]]
for l in range(stat['NumberOfElementalScalars']):
data = p.element_scalar(e,l)
for i in range(len(data)): # at least as many nodes as ips
node = damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),i+1) # fortran indexing starts at 1
if not node: break # no more ips
while i >= len(ipData): ipData.append([])
ipData[i].extend([data[node-1].value]) # python indexing starts at 0
for i in range(len(ipData)):
myfile.write('\t'.join(map(str,[e,i]+ipData[i]))+'\n')
p.close()
sys.stdout.write("\n")

View File

@ -14,7 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
#Borland, D., & Taylor, R. M. (2007). Rainbow Color Map (Still) Considered Harmful. Computer Graphics and Applications, IEEE, 27(2), 14--17.
#Moreland, K. (2009). Diverging Color Maps for Scientific Visualization. In Proc. 5th Int. Symp. Visual Computing (pp. 92--103).
outtypes = ['paraview','gmsh','raw','GOM']
extensions = ['.xml','.msh','.txt','.legend']
extensions = ['.json','.msh','.txt','.legend']
colormodels = ['RGB','HSL','XYZ','CIELAB','MSH']
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
@ -34,11 +34,9 @@ parser.add_option('-r','--right', dest='right', type='float', nargs=3, metavar='
parser.add_option('-c','--colormodel', dest='colormodel', metavar='string',
help='colormodel: '+', '.join(colormodels)+' [%default]')
parser.add_option('-p','--predefined', dest='predefined', metavar='string',
help='predefined colormap [%default]')
help='predefined colormap')
parser.add_option('-f','--format', dest='format', metavar='string',
help='output format: '+', '.join(outtypes)+' [%default]')
parser.add_option('-b','--basename', dest='basename', metavar='string',
help='basename of output file [%default]')
parser.set_defaults(colormodel = 'RGB')
parser.set_defaults(predefined = None)
parser.set_defaults(basename = None)
@ -48,7 +46,7 @@ parser.set_defaults(trim = (-1.0,1.0))
parser.set_defaults(left = (1.0,1.0,1.0))
parser.set_defaults(right = (0.0,0.0,0.0))
(options,filenames) = parser.parse_args()
(options,filename) = parser.parse_args()
if options.format not in outtypes:
parser.error('invalid format: "%s" (can be %s).'%(options.format,', '.join(outtypes)))
@ -62,10 +60,10 @@ if options.trim[0] < -1.0 or \
parser.error('invalid trim range (-1 +1).')
name = options.format if options.basename is None\
else options.basename
output = sys.stdout if options.basename is None\
else open(os.path.basename(options.basename)+extensions[outtypes.index(options.format)],'w')
name = options.format if filename[0] is None\
else filename[0]
output = sys.stdout if filename[0] is None\
else open(os.path.basename(filename[0])+extensions[outtypes.index(options.format)],'w')
colorLeft = damask.Color(options.colormodel.upper(), list(options.left))
colorRight = damask.Color(options.colormodel.upper(), list(options.right))

View File

@ -1003,11 +1003,8 @@ fileOpen = False
assembleHeader = True
header = []
standard = ['inc'] + \
{True: ['time'],
False:[]}[options.time] + \
['elem','node','ip','grain'] + \
{True: ['1_nodeinitialcoord','2_nodeinitialcoord','3_nodeinitialcoord'],
False:['1_ipinitialcoord','2_ipinitialcoord','3_ipinitialcoord']}[options.nodalScalar != []]
(['time'] if options.time else []) + \
['elem','node','ip','grain','1_pos','2_pos','3_pos']
# --------------------------- loop over positions --------------------------------

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys
import os,sys,re
import damask
from optparse import OptionParser
@ -32,14 +32,17 @@ parser.set_defaults(label = [],
(options,filenames) = parser.parse_args()
pattern = [re.compile('^()(.+)$'), # label pattern for scalar
re.compile('^(\d+_)?(.+)$'), # label pattern for multidimension
]
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
@ -63,8 +66,9 @@ for name in filenames:
for i,index in enumerate(indices):
if index == -1: remarks.append('label {} not present...'.format(options.label[i]))
else:
m = pattern[dimensions[i]>1].match(table.labels[index]) # isolate label name
for j in xrange(dimensions[i]):
table.labels[index+j] = table.labels[index+j].replace(options.label[i],options.substitute[i])
table.labels[index+j] = table.labels[index+j].replace(m.group(2),options.substitute[i]) # replace name with substitute
if remarks != []: damask.util.croak(remarks)
if errors != []:

View File

@ -14,7 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Sort rows by given column label(s).
Sort rows by given (or all) column label(s).
Examples:
With coordinates in columns "x", "y", and "z"; sorting with x slowest and z fastest varying index: --label x,y,z.
@ -30,25 +30,19 @@ parser.add_option('-r','--reverse',
action = 'store_true',
help = 'sort in reverse')
parser.set_defaults(key = [],
reverse = False,
parser.set_defaults(reverse = False,
)
(options,filenames) = parser.parse_args()
if options.keys is None:
parser.error('No sorting column(s) specified.')
options.keys.reverse() # numpy sorts with most significant column as last
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
@ -61,15 +55,16 @@ for name in filenames:
# ------------------------------------------ process data ---------------------------------------
table.data_readArray()
keys = table.labels[::-1] if options.keys is None else options.keys[::-1] # numpy sorts with most significant column as last
cols = []
remarks = []
for i,column in enumerate(table.label_index(options.keys)):
if column < 0:
remarks.append("label {0} not present.".format(options.keys[i]))
else:
cols += [table.data[:,column]]
for i,column in enumerate(table.label_index(keys)):
if column < 0: remarks.append('label "{}" not present...'.format(keys[i]))
else: cols += [table.data[:,column]]
if remarks != []: damask.util.croak(remarks)
ind = np.lexsort(cols) if cols != [] else np.arange(table.data.shape[0])
if options.reverse: ind = ind[::-1]

View File

@ -19,7 +19,7 @@ to resolution/packing.
""", version = scriptID)
parser.add_option('-c','--coordinates', dest='coords', type='string',\
parser.add_option('-c','--coordinates', dest='pos', type='string',\
help='column heading for coordinates [%default]')
parser.add_option('-p','--packing', dest='packing', type='int', nargs=3, \
help='dimension of packed group %default')
@ -29,7 +29,7 @@ parser.add_option('-r','--resolution', dest='resolution', type='int', nargs=3,
help='resolution in x,y,z [autodetect]')
parser.add_option('-d','--dimension', dest='dimension', type='float', nargs=3, \
help='dimension in x,y,z [autodetect]')
parser.set_defaults(coords = 'ipinitialcoord')
parser.set_defaults(coords = 'pos')
parser.set_defaults(packing = [2,2,2])
parser.set_defaults(shift = [0,0,0])
parser.set_defaults(resolution = [0,0,0])
@ -75,12 +75,12 @@ for file in files:
# --------------- figure out size and grid ---------------------------------------------------------
try:
locationCol = table.labels.index('1_%s'%options.coords) # columns containing location data
locationCol = table.labels.index('1_%s'%options.pos) # columns containing location data
except ValueError:
try:
locationCol = table.labels.index('%s.x'%options.coords) # columns containing location data (legacy naming scheme)
locationCol = table.labels.index('%s.x'%options.pos) # columns containing location data (legacy naming scheme)
except ValueError:
file['croak'].write('no coordinate data (1_%s/%s.x) found...\n'%(options.coords,options.coords))
file['croak'].write('no coordinate data (1_%s/%s.x) found...\n'%(options.pos,options.pos))
continue
if (any(options.resolution)==0 or any(options.dimension)==0.0):

View File

@ -1,82 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,glob,re
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def findTag(filename,tag):
with open(filename,'r') as myfile:
mypattern = re.compile(str(tag))
for line in myfile:
if mypattern.search(line): return True
return False
# -----------------------------
# MAIN FUNCTION STARTS HERE
# -----------------------------
# --- input parsing
parser = OptionParser(usage='%prog [options] directory', description = """
Add data from an ASCII table to a VTK geometry file.
""", version = scriptID)
parser.add_option('-s','--sub', action='store_true', dest='subdir', \
help='include files in subdirectories [%default]')
parser.set_defaults(subdir = False)
(options, dirname) = parser.parse_args()
# --- sanity checks
if dirname == []:
parser.print_help()
parser.error('no directory specified...')
else:
dirname = os.path.abspath(dirname[0]) # only use first argument
if not os.path.isdir(dirname):
parser.print_help()
parser.error('invalid directory "%s" specified...'%dirname)
# --- loop over "nodebased" and "ipbased" data files and
# copy data to corresponding geometry files
dataSetTag = {'nodebased':'POINT_DATA', 'ipbased':'CELL_DATA'}
for geomtype in ['nodebased','ipbased']:
for vtkfilename in glob.iglob(dirname+os.sep+'*'+geomtype+'*.vtk'):
if not os.path.dirname(vtkfilename) == dirname and not options.subdir: continue # include files in subdir?
datafilename = os.path.splitext(vtkfilename)[0] + '.txt'
if not os.path.exists(datafilename): continue # no corresponding datafile found
# --- read data from datafile
with open(datafilename,'r') as datafile: # open datafile in read mode
table = damask.ASCIItable(fileIn=datafile) # use ASCIItable class to read data file
table.head_read() # read ASCII header info
myData = []
while table.data_read(): # read line in datafile
myData.append(table.data)
myData = zip(*myData) # reorder data: first index now label, not node
# --- append data to vtkfile
with open(vtkfilename,'a') as vtkfile: # open vtkfile in append mode
print vtkfilename
if not findTag(vtkfilename,dataSetTag[geomtype]): # check if data set is already present...
vtkfile.write(dataSetTag[geomtype] + ' %i'%len(myData[0])) # ... if not, write keyword
for idx,label in enumerate(table.labels): # write data
vtkfile.write('\nSCALARS '+label+' double 1\nLOOKUP_TABLE default\n') # all scalar data
vtkfile.write('\n'.join(map(str,myData[idx])))

View File

@ -1,8 +1,9 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,vtk
import os,vtk
import damask
from collections import defaultdict
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
@ -17,125 +18,166 @@ Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).
""", version = scriptID)
parser.add_option('-v', '--vtk', dest='vtk', \
parser.add_option( '--vtk',
dest = 'vtk',
type = 'string', metavar = 'string',
help = 'VTK file name')
parser.add_option( '--inplace',
dest = 'inplace',
action = 'store_true',
help = 'modify VTK file in-place')
parser.add_option('-r', '--render',
dest = 'render',
action = 'store_true',
help = 'open output in VTK render window')
parser.add_option('-s', '--scalar', dest='scalar', action='extend', \
help = 'scalar values')
parser.add_option('-v', '--vector',
dest = 'vector',
action = 'extend', metavar = '<string LIST>',
help = 'vector value label(s)')
parser.add_option('-c', '--color', dest='color', action='extend', \
help = 'RGB color tuples')
parser.set_defaults(scalar = [])
parser.set_defaults(color = [])
parser.set_defaults(scalar = [],
vector = [],
color = [],
inplace = False,
render = False,
)
(options, filenames) = parser.parse_args()
datainfo = { # list of requested labels per datatype
'scalar': {'len':1,
'label':[]},
'color': {'len':3,
'label':[]},
}
if not options.vtk: parser.error('No VTK file specified.')
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
if not os.path.exists(options.vtk):
parser.error('VTK file does not exist'); sys.exit()
if os.path.splitext(options.vtk)[1] == '.vtp':
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(options.vtk)
reader.Update()
Polydata = reader.GetOutput()
elif os.path.splitext(options.vtk)[1] == '.vtk':
reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(options.vtk)
reader.Update()
Polydata = reader.GetPolyDataOutput()
else:
parser.error('Unsupported VTK file type extension.')
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(options.vtk)
reader.Update()
Npoints = reader.GetNumberOfPoints()
Ncells = reader.GetNumberOfCells()
Nvertices = reader.GetNumberOfVerts()
Polydata = reader.GetOutput()
Npoints = Polydata.GetNumberOfPoints()
Ncells = Polydata.GetNumberOfCells()
Nvertices = Polydata.GetNumberOfVerts()
if Npoints != Ncells or Npoints != Nvertices:
parser.error('Number of points, cells, and vertices in VTK differ from each other'); sys.exit()
if options.scalar is not None: datainfo['scalar']['label'] += options.scalar
if options.color is not None: datainfo['color']['label'] += options.color
parser.error('Number of points, cells, and vertices in VTK differ from each other.')
# ------------------------------------------ setup file handles ---------------------------------------
damask.util.croak('{}: {} points, {} vertices, and {} cells...'.format(options.vtk,Npoints,Nvertices,Ncells))
files = []
if filenames == []:
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr})
else:
for name in filenames:
if os.path.exists(name):
files.append({'name':name, 'input':open(name), 'output':sys.stderr, 'croak':sys.stderr})
# --- loop over input files -------------------------------------------------------------------------
#--- loop over input files ------------------------------------------------------------------------
for file in files:
if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
if filenames == []: filenames = [None]
table = damask.ASCIItable(file['input'],file['output'],False) # make unbuffered ASCII_table
table.head_read() # read ASCII header info
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName, name)
# --------------- figure out columns to process
active = {}
column = {}
# --- interpret header ----------------------------------------------------------------------------
array = {}
table.head_read()
remarks = []
errors = []
VTKarray = {}
active = defaultdict(list)
for datatype,info in datainfo.items():
for label in info['label']:
foundIt = False
for key in ['1_'+label,label]:
if key in table.labels:
foundIt = True
if datatype not in active: active[datatype] = []
if datatype not in column: column[datatype] = {}
if datatype not in array: array[datatype] = {}
active[datatype].append(label)
column[datatype][label] = table.labels.index(key) # remember columns of requested data
if datatype == 'scalar':
array[datatype][label] = vtk.vtkDoubleArray()
array[datatype][label].SetNumberOfComponents(1)
array[datatype][label].SetName(label)
elif datatype == 'color':
array[datatype][label] = vtk.vtkUnsignedCharArray()
array[datatype][label].SetNumberOfComponents(3)
array[datatype][label].SetName(label)
if not foundIt:
file['croak'].write('column %s not found...\n'%label)
for datatype,dimension,label in [['scalar',1,options.scalar],
['vector',3,options.vector],
['color',3,options.color],
]:
for i,dim in enumerate(table.label_dimension(label)):
me = label[i]
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
else:
remarks.append('adding {} "{}"...'.format(datatype,me))
active[datatype].append(me)
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
VTKarray[me].SetNumberOfComponents(dimension)
VTKarray[me].SetName(label[i])
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ---------------------------------------
while table.data_read(): # read next data line of ASCII table
while table.data_read(): # read next data line of ASCII table
for datatype,labels in active.items(): # loop over scalar,color
for label in labels: # loop over all requested items
theData = table.data[column[datatype][label]:\
column[datatype][label]+datainfo[datatype]['len']] # read strings
if datatype == 'color':
theData = map(lambda x: int(255.*float(x)),theData)
array[datatype][label].InsertNextTuple3(theData[0],theData[1],theData[2],)
elif datatype == 'scalar':
array[datatype][label].InsertNextValue(float(theData[0]))
for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
table.input_close() # close input ASCII table
# ------------------------------------------ add data ---------------------------------------
for datatype,labels in active.items(): # loop over scalar,color
for datatype,labels in active.items(): # loop over scalar,color
if datatype == 'color':
Polydata.GetPointData().SetScalars(array[datatype][labels[0]])
Polydata.GetCellData().SetScalars(array[datatype][labels[0]])
for label in labels: # loop over all requested items
Polydata.GetPointData().AddArray(array[datatype][label])
Polydata.GetCellData().AddArray(array[datatype][label])
Polydata.GetPointData().SetScalars(VTKarray[active['color'][0]])
Polydata.GetCellData().SetScalars(VTKarray[active['color'][0]])
for me in labels: # loop over all requested items
Polydata.GetPointData().AddArray(VTKarray[me])
Polydata.GetCellData().AddArray(VTKarray[me])
Polydata.Modified()
if vtk.VTK_MAJOR_VERSION <= 5:
Polydata.Update()
if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update()
# ------------------------------------------ output result ---------------------------------------
writer = vtk.vtkXMLPolyDataWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.splitext(options.vtk)[0]+'_added.vtp')
if vtk.VTK_MAJOR_VERSION <= 5:
writer.SetInput(Polydata)
else:
writer.SetInputData(Polydata)
writer.Write()
writer = vtk.vtkXMLPolyDataWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.splitext(options.vtk)[0]+('.vtp' if options.inplace else '_added.vtp'))
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(Polydata)
else: writer.SetInputData(Polydata)
writer.Write()
# ------------------------------------------ render result ---------------------------------------
if options.render:
mapper = vtk.vtkDataSetMapper()
mapper.SetInputData(Polydata)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# Create the graphics structure. The renderer renders into the
# render window. The render window interactor captures mouse events
# and will perform appropriate camera or actor manipulation
# depending on the nature of the events.
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
ren.AddActor(actor)
ren.SetBackground(1, 1, 1)
renWin.SetSize(200, 200)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
iren.Initialize()
renWin.Render()
iren.Start()

View File

@ -30,10 +30,6 @@ parser.add_option('-r', '--render',
dest = 'render',
action = 'store_true',
help = 'open output in VTK render window')
parser.add_option('-m', '--mode',
dest = 'mode',
type = 'choice', metavar = 'string', choices = ['cell', 'point'],
help = 'cell-centered or point-centered data')
parser.add_option('-s', '--scalar',
dest = 'scalar',
action = 'extend', metavar = '<string LIST>',
@ -56,7 +52,6 @@ parser.set_defaults(scalar = [],
(options, filenames) = parser.parse_args()
if not options.mode: parser.error('No data mode specified.')
if not options.vtk: parser.error('No VTK file specified.')
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
@ -83,9 +78,9 @@ damask.util.croak('{}: {} points and {} cells...'.format(options.vtk,Npoints,Nce
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName, name)
@ -124,8 +119,11 @@ for name in filenames:
# ------------------------------------------ process data ---------------------------------------
datacount = 0
while table.data_read(): # read next data line of ASCII table
datacount += 1 # count data lines
for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
@ -133,15 +131,25 @@ for name in filenames:
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
table.close() # close input ASCII table
# ------------------------------------------ add data ---------------------------------------
if datacount == Npoints: mode = 'point'
elif datacount == Ncells: mode = 'cell'
else:
damask.util.croak('Data count is incompatible with grid...')
continue
damask.util.croak('{} mode...'.format(mode))
for datatype,labels in active.items(): # loop over scalar,color
if datatype == 'color':
if options.mode == 'cell': rGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
elif options.mode == 'point': rGrid.GetPointData().SetScalars(VTKarray[active['color'][0]])
if mode == 'cell': rGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
elif mode == 'point': rGrid.GetPointData().SetScalars(VTKarray[active['color'][0]])
for me in labels: # loop over all requested items
if options.mode == 'cell': rGrid.GetCellData().AddArray(VTKarray[me])
elif options.mode == 'point': rGrid.GetPointData().AddArray(VTKarray[me])
if mode == 'cell': rGrid.GetCellData().AddArray(VTKarray[me])
elif mode == 'point': rGrid.GetPointData().AddArray(VTKarray[me])
rGrid.Modified()
if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update()

View File

@ -1,135 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,vtk
import damask
from collections import defaultdict
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add scalar and RGB tuples from ASCIItable to existing VTK voxel cloud (.vtu/.vtk).
""", version = scriptID)
parser.add_option('-v', '--vtk', dest='vtk', \
help = 'VTK file name')
parser.add_option('-s', '--scalar', dest='scalar', action='extend', \
help = 'scalar values')
parser.add_option('-c', '--color', dest='color', action='extend', \
help = 'RGB color tuples')
parser.set_defaults(scalar = [],
color = [],
render = False,
)
(options, filenames) = parser.parse_args()
if options.vtk is None or not os.path.exists(options.vtk):
parser.error('VTK file does not exist')
if os.path.splitext(options.vtk)[1] == '.vtu':
reader = vtk.vtkXMLUnstructuredGridReader()
reader.SetFileName(options.vtk)
reader.Update()
uGrid = reader.GetOutput()
elif os.path.splitext(options.vtk)[1] == '.vtk':
reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(options.vtk)
reader.Update()
uGrid = reader.GetUnstructuredGridOutput()
else:
parser.error('unsupported VTK file type extension')
Npoints = uGrid.GetNumberOfPoints()
Ncells = uGrid.GetNumberOfCells()
sys.stderr.write('{}: {} points and {} cells...\n'.format(damask.util.emph(options.vtk),Npoints,Ncells))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
except: continue
damask.util.croak(damask.util.emph(scriptName)+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
remarks = []
errors = []
VTKarray = {}
active = defaultdict(list)
for datatype,dimension,label in [['scalar',1,options.scalar],
['color',3,options.color],
]:
for i,dim in enumerate(table.label_dimension(label)):
me = label[i]
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
elif dim > dimension: remarks.append('"{}" not of dimension{}...'.format(me,dimension))
else:
damask.util.croak('adding {} {}'.format(datatype,me))
active[datatype].append(me)
if datatype in ['scalar']:
VTKarray[me] = vtk.vtkDoubleArray()
elif datatype == 'color':
VTKarray[me] = vtk.vtkUnsignedCharArray()
VTKarray[me].SetNumberOfComponents(dimension)
VTKarray[me].SetName(label[i])
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# ------------------------------------------ process data ---------------------------------------
while table.data_read(): # read next data line of ASCII table
for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
if datatype == 'color':
VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
elif datatype == 'scalar':
VTKarray[me].InsertNextValue(float(theData[0]))
# ------------------------------------------ add data ---------------------------------------
for datatype,labels in active.items(): # loop over scalar,color
if datatype == 'color':
uGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
for label in labels: # loop over all requested items
uGrid.GetCellData().AddArray(VTKarray[me])
uGrid.Modified()
if vtk.VTK_MAJOR_VERSION <= 5:
uGrid.Update()
# ------------------------------------------ output result ---------------------------------------
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.splitext(options.vtk)[0]+'_added.vtu')
if vtk.VTK_MAJOR_VERSION <= 5:
writer.SetInput(uGrid)
else:
writer.SetInputData(uGrid)
writer.Write()

View File

@ -18,12 +18,13 @@ Produce a VTK point cloud dataset based on coordinates given in an ASCIItable.
""", version = scriptID)
parser.add_option('-d', '--deformed',
dest = 'deformed',
parser.add_option('-p',
'--pos', '--position',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'deformed coordinate label [%default]')
help = 'label of coordinates [%default]')
parser.set_defaults(deformed = 'ipdeformedcoord'
parser.set_defaults(pos = 'pos',
)
(options, filenames) = parser.parse_args()
@ -46,9 +47,11 @@ for name in filenames:
errors = []
remarks = []
coordDim = table.label_dimension(options.deformed)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.deformed))
elif coordDim < 3: remarks.append('appending {} dimensions to coordinates "{}"...'.format(3-coordDim,options.deformed))
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
@ -58,7 +61,7 @@ for name in filenames:
# ------------------------------------------ process data ---------------------------------------
table.data_readArray(options.deformed)
table.data_readArray(options.pos)
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
@ -83,19 +86,21 @@ for name in filenames:
if name:
writer = vtk.vtkXMLPolyDataWriter()
(directory,filename) = os.path.split(name)
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]\
+'.'+writer.GetDefaultFileExtension()))
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(os.path.split(name)[0],
os.path.splitext(os.path.split(name)[1])[0] +
'.' + writer.GetDefaultFileExtension()))
else:
writer = vtk.vtkDataSetWriter()
writer.WriteToOutputStringOn()
writer.SetHeader('# powered by '+scriptID)
writer.WriteToOutputStringOn()
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(Polydata)
else: writer.SetInputData(Polydata)
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0
table.close()

View File

@ -6,7 +6,6 @@ import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
@ -19,16 +18,19 @@ Create regular voxel grid from points in an ASCIItable.
""", version = scriptID)
parser.add_option('-m', '--mode',
parser.add_option('-m',
'--mode',
dest = 'mode',
type = 'choice', choices = ['cell','point'],
help = 'cell-centered or point-centered coordinates ')
parser.add_option('-c', '--coordinates',
dest = 'position',
help = 'cell-centered or point-centered coordinates')
parser.add_option('-p',
'--pos', '--position',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'coordinate label [%default]')
parser.set_defaults(position ='ipinitialcoord',
mode ='cell'
help = 'label of coordinates [%default]')
parser.set_defaults(mode = 'cell',
pos = 'pos',
)
(options, filenames) = parser.parse_args()
@ -38,9 +40,11 @@ parser.set_defaults(position ='ipinitialcoord',
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = True,
readonly = True,
)
except: continue
damask.util.report(scriptName,name)
@ -48,10 +52,15 @@ for name in filenames:
table.head_read()
errors = []
if table.label_dimension(options.position) != 3:
errors.append('coordinates {} are not a vector.'.format(options.position))
remarks = []
errors = []
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
@ -59,17 +68,25 @@ for name in filenames:
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray(options.position)
table.data_readArray(options.pos)
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
if options.mode == 'cell':
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + len(coords[i]) > 1]] + \
[coords[i][j-1] + coords[i][j] for j in xrange(1,len(coords[i]))] + \
[3.0 * coords[i][-1] - coords[i][-1 - (len(coords[i]) > 1)]]) for i in xrange(3)]
grid = np.array(map(len,coords),'i')
grid = np.array(map(len,coords),'i')
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*(grid - options.mode == 'cell') ))
if N != len(table.data):
errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*(grid - (options.mode == 'cell')) ))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
@ -93,24 +110,26 @@ for name in filenames:
rGrid.SetZCoordinates(coordArray[2])
# ------------------------------------------ output result ---------------------------------------
# ------------------------------------------ output result ---------------------------------------
if name:
writer = vtk.vtkXMLRectilinearGridWriter()
(directory,filename) = os.path.split(name)
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0] \
+'_{}({})'.format(options.position, options.mode) \
+'.'+writer.GetDefaultFileExtension()))
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(os.path.split(name)[0],
os.path.splitext(os.path.split(name)[1])[0] +
'_{}({})'.format(options.pos, options.mode) +
'.' + writer.GetDefaultFileExtension()))
else:
writer = vtk.vtkDataSetWriter()
writer.WriteToOutputStringOn()
writer.SetHeader('# powered by '+scriptID)
writer.WriteToOutputStringOn()
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(rGrid)
else: writer.SetInputData(rGrid)
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0
table.close()

View File

@ -1,122 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,shutil
import damask
from optparse import OptionParser
import vtk
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
""", version = scriptID)
parser.add_option('-v','--vector', nargs=3, dest='vector', \
help='suffices indicating vector components [%default]')
parser.add_option('-s','--separator', dest='separator', \
help='separator between label and suffix [%default]')
parser.set_defaults(vector = ['x','y','z'])
parser.set_defaults(separator = '.')
(options, filenames) = parser.parse_args()
# --- sanity checks
if filenames == []:
parser.print_help()
parser.error('no file specified...')
for filename in filenames:
if not os.path.isfile(filename):
parser.print_help()
parser.error('invalid file "%s" specified...'%filename)
# --- ITERATE OVER FILES AND PROCESS THEM
for filename in filenames:
sys.stdout.write('read file "%s" ...'%filename)
sys.stdout.flush()
suffix = os.path.splitext(filename)[1]
if suffix == '.vtk':
reader = vtk.vtkUnstructuredGridReader()
reader.ReadAllScalarsOn()
reader.ReadAllVectorsOn()
reader.ReadAllTensorsOn()
elif suffix == '.vtu':
reader = vtk.vtkXMLUnstructuredGridReader()
else:
parser.error('filetype "%s" not supported'%suffix)
reader.SetFileName(filename)
reader.Update()
uGrid = reader.GetOutput()
sys.stdout.write(' done\n')
sys.stdout.flush()
# Read the scalar data
scalarData = {}
scalarsToBeRemoved = []
Nscalars = uGrid.GetCellData().GetNumberOfArrays()
for i in range(Nscalars):
sys.stdout.write("\rread scalar data %d%%" %(100*i/Nscalars))
sys.stdout.flush()
scalarName = uGrid.GetCellData().GetArrayName(i)
if scalarName.split(options.separator)[-1] in options.vector:
label,suffix = scalarName.split(options.separator)
if label not in scalarData:
scalarData[label] = [[],[],[]]
uGrid.GetCellData().SetActiveScalars(scalarName)
scalarData[label][options.vector.index(suffix)] = uGrid.GetCellData().GetScalars(scalarName)
scalarsToBeRemoved.append(scalarName)
for scalarName in scalarsToBeRemoved:
uGrid.GetCellData().RemoveArray(scalarName)
sys.stdout.write('\rread scalar data done\n')
sys.stdout.flush()
# Convert the scalar data to vector data
NscalarData = len(scalarData)
for n,label in enumerate(scalarData):
sys.stdout.write("\rconvert to vector data %d%%" %(100*n/NscalarData))
sys.stdout.flush()
Nvalues = scalarData[label][0].GetNumberOfTuples()
vectorData = vtk.vtkDoubleArray()
vectorData.SetName(label)
vectorData.SetNumberOfComponents(3) # set this before NumberOfTuples !!!
vectorData.SetNumberOfTuples(Nvalues)
for i in range(Nvalues):
for j in range(3):
vectorData.SetComponent(i,j,scalarData[label][j].GetValue(i))
uGrid.GetCellData().AddArray(vectorData)
sys.stdout.write('\rconvert to vector data done\n')
# Write to new vtk file
outfilename = os.path.splitext(filename)[0]+'.vtu'
sys.stdout.write('write to file "%s" ...'%outfilename)
sys.stdout.flush()
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetFileName(outfilename+'_tmp')
writer.SetDataModeToAscii()
writer.SetInput(uGrid)
writer.Write()
sys.stdout.write(' done\n')
sys.stdout.flush()
shutil.move(outfilename+'_tmp',outfilename)
# --------------------------- DONE --------------------------------

View File

@ -1,118 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,vtk
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Create hexahedral voxels around points in an ASCIItable.
""", version = scriptID)
parser.add_option('-d', '--deformed',
dest = 'deformed',
type = 'string', metavar = 'string',
help = 'deformed coordinate label [%default]')
parser.add_option('-c','--coordinates',
dest = 'coords',
type = 'string', metavar='string',
help = 'undeformed coordinates label [%default]')
parser.set_defaults(deformed = 'ipdeformedcoord',
coords = 'ipinitialcoord',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
except: continue
damask.util.report(scriptName,name)
# --------------- interprete header -----------------------------------------------------------------
table.head_read()
errors=[]
if table.label_dimension(options.deformed) != 3:
errors.append('columns "{}" have dimension {}'.format(options.deformed,table.label_dimension(options.deformed)))
if table.label_dimension(options.coords) != 3:
errors.append('coordinates {} are not a vector.'.format(options.coords))
table.data_readArray([options.coords,options.deformed])
# --------------- figure out size and grid ---------------------------------------------------------
coords = [{},{},{}]
for i in xrange(len(table.data)):
for j in xrange(3):
coords[j][str(table.data[i,table.label_index(options.coords)+j])] = True
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
# ------------------------------------------ process data ---------------------------------------
hexPoints = np.array([[-1,-1,-1],
[ 1,-1,-1],
[ 1, 1,-1],
[-1, 1,-1],
[-1,-1, 1],
[ 1,-1, 1],
[ 1, 1, 1],
[-1, 1, 1],
])
halfDelta = 0.5*size/grid
Points = vtk.vtkPoints()
Hex = vtk.vtkHexahedron()
uGrid = vtk.vtkUnstructuredGrid()
for p in table.data:
for i,h in enumerate(hexPoints):
pointID = Points.InsertNextPoint(p[table.label_index(options.deformed):table.label_index(options.deformed)+3]+h*halfDelta)
Hex.GetPointIds().SetId(i,pointID)
uGrid.InsertNextCell(Hex.GetCellType(), Hex.GetPointIds())
uGrid.SetPoints(Points)
# ------------------------------------------ output result ---------------------------------------
if name:
writer = vtk.vtkXMLUnstructuredGridWriter()
(directory,filename) = os.path.split(name)
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]\
+'.'+writer.GetDefaultFileExtension()))
else:
writer = vtk.vtkDataSetWriter()
writer.WriteToOutputStringOn()
writer.SetHeader('# powered by '+scriptID)
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(uGrid)
else: writer.SetInputData(uGrid)
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
table.close() # close input ASCII table

View File

@ -15,25 +15,29 @@ scriptID = ' '.join([scriptName,damask.version])
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Changes the (three-dimensional) canvas of a spectral geometry description.
Grid can be given as absolute or relative values, e.g. 16 16 16 or 2x 0.5x 32.
""", version = scriptID)
parser.add_option('-g', '--grid',
parser.add_option('-g',
'--grid',
dest = 'grid',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'a,b,c grid of hexahedral box [auto]')
parser.add_option('-o', '--offset',
help = 'a,b,c grid of hexahedral box. [auto]')
parser.add_option('-o',
'--offset',
dest = 'offset',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c offset from old to new origin of grid [%default]')
parser.add_option('-f', '--fill',
parser.add_option('-f',
'--fill',
dest = 'fill',
type = 'float', metavar = 'float',
help = '(background) canvas grain index. "0" selects maximum microstructure index + 1 [%default]')
parser.add_option('--float',
dest = 'real',
action = 'store_true',
help = 'input data is float [%default]')
help = 'use float input')
parser.set_defaults(grid = ['0','0','0'],
offset = (0,0,0),
@ -61,13 +65,7 @@ for name in filenames:
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
@ -105,7 +103,7 @@ for name in filenames:
translate_y = [i - options.offset[1] for i in yindex]
translate_z = [i - options.offset[2] for i in zindex]
if 0 in map(len,[xindex,yindex,zindex,translate_x,translate_y,translate_z]):
damask.util.croak('Invaldid grid-offset comination')
damask.util.croak('invaldid grid-offset combination.')
table.close(dismiss = True)
continue
microstructure_cropped[min(translate_x):(max(translate_x)+1),\
@ -125,13 +123,13 @@ for name in filenames:
errors = []
if (any(newInfo['grid'] != info['grid'])):
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
remarks.append('--> grid a b c: {}'.format(' x '.join(map(str,newInfo['grid']))))
if (any(newInfo['size'] != info['size'])):
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
remarks.append('--> size x y z: {}'.format(' x '.join(map(str,newInfo['size']))))
if (any(newInfo['origin'] != info['origin'])):
remarks.append('--> origin x y z: %s'%(' : '.join(map(str,newInfo['origin']))))
remarks.append('--> origin x y z: {}'.format(' : '.join(map(str,newInfo['origin']))))
if ( newInfo['microstructures'] != info['microstructures']):
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
if np.any(newInfo['grid'] < 1): errors.append('invalid new grid a b c.')
if np.any(newInfo['size'] <= 0.0): errors.append('invalid new size x y z.')
@ -147,11 +145,11 @@ for name in filenames:
table.info_clear()
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
"grid\ta {}\tb {}\tc {}".format(*newInfo['grid']),
"size\tx {}\ty {}\tz {}".format(*newInfo['size']),
"origin\tx {}\ty {}\tz {}".format(*newInfo['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(newInfo['microstructures']),
extra_header
])
table.labels_clear()

View File

@ -1,108 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,vtk
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """
Produce VTK rectilinear mesh of structure data from geom description
""", version = scriptID)
parser.add_option('-m','--nodata',
dest = 'data',
action = 'store_false',
help = 'generate mesh without microstructure index data')
parser.set_defaults(data = True,
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False, readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
#--- read microstructure information --------------------------------------------------------------
if options.data:
microstructure,ok = table.microstructure_read(info['grid'],strict = True) # read microstructure
if ok:
structure = vtk.vtkIntArray()
structure.SetName('Microstructures')
for idx in microstructure: structure.InsertNextValue(idx)
else: errors.append('mismatch between data and grid dimension.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- generate VTK rectilinear grid ---------------------------------------------------------------
grid = vtk.vtkRectilinearGrid()
grid.SetDimensions([x+1 for x in info['grid']])
for i in xrange(3):
temp = vtk.vtkDoubleArray()
temp.SetNumberOfTuples(info['grid'][i]+1)
for j in xrange(info['grid'][i]+1):
temp.InsertTuple1(j,j*info['size'][i]/info['grid'][i]+info['origin'][i])
if i == 0: grid.SetXCoordinates(temp)
elif i == 1: grid.SetYCoordinates(temp)
elif i == 2: grid.SetZCoordinates(temp)
if options.data: grid.GetCellData().AddArray(structure)
# --- write data -----------------------------------------------------------------------------------
if name:
writer = vtk.vtkXMLRectilinearGridWriter()
(directory,filename) = os.path.split(name)
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]+'.'+writer.GetDefaultFileExtension()))
else:
writer = vtk.vtkDataSetWriter()
writer.WriteToOutputStringOn()
writer.SetHeader('# powered by '+scriptID)
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
else: writer.SetInputData(grid)
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
table.close()

18
processing/pre/geom_check.sh Executable file
View File

@ -0,0 +1,18 @@
#!/bin/bash
for geom in "$@"
do
geom_toTable \
< $geom \
| \
vtk_rectilinearGrid > ${geom%.*}.vtk
geom_toTable \
< $geom \
| \
vtk_addRectilinearGridData \
--scalar microstructure \
--inplace \
--vtk ${geom%.*}.vtk
rm ${geom%.*}.vtk
done

View File

@ -6,15 +6,12 @@ import numpy as np
import damask
from scipy import ndimage
from optparse import OptionParser
from collections import defaultdict
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def mostFrequent(arr):
d = defaultdict(int)
for i in arr: d[i] += 1
return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)[0][0] # return value of most frequent microstructure
return np.argmax(np.bincount(arr))
#--------------------------------------------------------------------------------------------------
@ -43,10 +40,9 @@ parser.set_defaults(stencil = 3,
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
@ -72,7 +68,7 @@ for name in filenames:
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
# --- do work ------------------------------------------------------------------------------------

View File

@ -1,206 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,math
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Generate geometry description and material configuration from EBSD data in given square-gridded 'ang' file.
Two phases can be discriminated based on threshold value in a given data column.
""", version = scriptID)
parser.add_option('--column', dest='column', type='int', metavar = 'int',
help='data column to discriminate between both phases [%default]')
parser.add_option('-t','--threshold', dest='threshold', type='float', metavar = 'float',
help='threshold value for phase discrimination [%default]')
parser.add_option('--homogenization', dest='homogenization', type='int', metavar = 'int',
help='homogenization index for <microstructure> configuration [%default]')
parser.add_option('--phase', dest='phase', type='int', nargs = 2, metavar = 'int int',
help='phase indices for <microstructure> configuration %default')
parser.add_option('--crystallite', dest='crystallite', type='int', metavar = 'int',
help='crystallite index for <microstructure> configuration [%default]')
parser.add_option('-c','--compress', dest='compress', action='store_true',
help='lump identical microstructure and texture information [%default]')
parser.add_option('-a', '--axes', dest='axes', nargs = 3, metavar = 'string string string',
help='Euler angle coordinate system for <texture> configuration x,y,z = %default')
parser.add_option('-p', '--precision', dest='precision', choices=['0','1','2','3'], metavar = 'int',
help = 'euler angles decimal places for output format and compressing (0,1,2,3) [2]')
parser.set_defaults(column = 11,
threshold = 0.5,
homogenization = 1,
phase = [1,2],
crystallite = 1,
compress = False,
axes = ['y','x','-z'],
precision = '2',
)
(options,filenames) = parser.parse_args()
for i in options.axes:
if i.lower() not in ['x','+x','-x','y','+y','-y','z','+z','-z']:
parser.error('invalid axes %s %s %s' %(options.axes[0],options.axes[1],options.axes[2]))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name)
info = {
'grid': np.ones(3,'i'),
'size': np.zeros(3,'d'),
'origin': np.zeros(3,'d'),
'microstructures': 0,
'homogenization': options.homogenization
}
coords = [{},{},{1:True}]
pos = {'min':[ float("inf"), float("inf")],
'max':[-float("inf"),-float("inf")]}
phase = []
eulerangles = []
# --------------- read data -----------------------------------------------------------------------
errors = []
while table.data_read():
words = table.data
if words[0] == '#': # process initial comments/header block
if len(words) > 2:
if words[2].lower() == 'hexgrid':
errors.append('The file has HexGrid format. Please first convert to SquareGrid...')
break
else:
currPos = words[3:5]
for i in xrange(2):
coords[i][currPos[i]] = True
currPos = map(float,currPos)
for i in xrange(2):
pos['min'][i] = min(pos['min'][i],currPos[i])
pos['max'][i] = max(pos['max'][i],currPos[i])
eulerangles.append(map(math.degrees,map(float,words[:3])))
phase.append(options.phase[int(float(words[options.column-1]) > options.threshold)])
if errors != []:
damask.util.croak(errors)
continue
# --------------- determine size and grid ---------------------------------------------------------
info['grid'] = np.array(map(len,coords),'i')
info['size'][0:2] = info['grid'][0:2]/(info['grid'][0:2]-1.0)* \
np.array([pos['max'][0]-pos['min'][0],
pos['max'][1]-pos['min'][1]],'d')
info['size'][2]=info['size'][0]/info['grid'][0]
eulerangles = np.array(eulerangles,dtype='f').reshape(info['grid'].prod(),3)
phase = np.array(phase,dtype='i').reshape(info['grid'].prod())
limits = [360,180,360]
if any([np.any(eulerangles[:,i]>=limits[i]) for i in [0,1,2]]):
errors.append('Error: euler angles out of bound. Ang file might contain unidexed poins.')
for i,angle in enumerate(['phi1','PHI','phi2']):
for n in np.nditer(np.where(eulerangles[:,i]>=limits[i]),['zerosize_ok']):
errors.append('%s in line %i (%4.2f %4.2f %4.2f)\n'
%(angle,n,eulerangles[n,0],eulerangles[n,1],eulerangles[n,2]))
if errors != []:
damask.util.croak(errors)
continue
eulerangles=np.around(eulerangles,int(options.precision)) # round to desired precision
# ensure, that rounded euler angles are not out of bounds (modulo by limits)
for i,angle in enumerate(['phi1','PHI','phi2']):
eulerangles[:,i]%=limits[i]
# scale angles by desired precision and convert to int. create unique integer key from three euler angles by
# concatenating the string representation with leading zeros and store as integer and search unique euler angle keys.
# Texture IDs are the indices of the first occurrence, the inverse is used to construct the microstructure
# create a microstructure (texture/phase pair) for each point using unique texture IDs.
# Use longInt (64bit, i8) because the keys might be long
if options.compress:
formatString='{0:0>'+str(int(options.precision)+3)+'}'
euleranglesRadInt = (eulerangles*10**int(options.precision)).astype('int')
eulerKeys = np.array([int(''.join(map(formatString.format,euleranglesRadInt[i,:]))) \
for i in xrange(info['grid'].prod())])
devNull, texture, eulerKeys_idx = np.unique(eulerKeys, return_index = True, return_inverse=True)
msFull = np.array([[eulerKeys_idx[i],phase[i]] for i in xrange(info['grid'].prod())],'i8')
devNull,msUnique,matPoints = np.unique(msFull.view('c16'),True,True)
matPoints+=1
microstructure = np.array([msFull[i] for i in msUnique]) # pick only unique microstructures
else:
texture = np.arange(info['grid'].prod())
microstructure = np.hstack( zip(texture,phase) ).reshape(info['grid'].prod(),2) # create texture/phase pairs
formatOut = 1+int(math.log10(len(texture)))
textureOut =['<texture>']
eulerFormatOut='%%%i.%if'%(int(options.precision)+4,int(options.precision))
outStringAngles='(gauss) phi1 '+eulerFormatOut+' Phi '+eulerFormatOut+' phi2 '+eulerFormatOut+' scatter 0.0 fraction 1.0'
for i in xrange(len(texture)):
textureOut += ['[Texture%s]'%str(i+1).zfill(formatOut),
'axes %s %s %s'%(options.axes[0],options.axes[1],options.axes[2]),
outStringAngles%tuple(eulerangles[texture[i],...])
]
formatOut = 1+int(math.log10(len(microstructure)))
microstructureOut =['<microstructure>']
for i in xrange(len(microstructure)):
microstructureOut += ['[Grain%s]'%str(i+1).zfill(formatOut),
'crystallite\t%i'%options.crystallite,
'(constituent)\tphase %i\ttexture %i\tfraction 1.0'%(microstructure[i,1],microstructure[i,0]+1)
]
info['microstructures'] = len(microstructure)
#--- report ---------------------------------------------------------------------------------------
damask.util.croak('grid a b c: %s\n'%(' x '.join(map(str,info['grid']))) +
'size x y z: %s\n'%(' x '.join(map(str,info['size']))) +
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) +
'homogenization: %i\n'%info['homogenization'] +
'microstructures: %i\n\n'%info['microstructures'])
if np.any(info['grid'] < 1):
damask.util.croak('invalid grid a b c.\n')
continue
if np.any(info['size'] <= 0.0):
damask.util.croak('invalid size x y z.\n')
continue
#--- write data/header --------------------------------------------------------------------------------
table.info_clear()
table.info_append([' '.join([scriptID] + sys.argv[1:]),
"grid\ta %i\tb %i\tc %i"%(info['grid'][0],info['grid'][1],info['grid'][2],),
"size\tx %f\ty %f\tz %f"%(info['size'][0],info['size'][1],info['size'][2],),
"origin\tx %f\ty %f\tz %f"%(info['origin'][0],info['origin'][1],info['origin'][2],),
"microstructures\t%i"%info['microstructures'],
"homogenization\t%i"%info['homogenization'],
] +
[line for line in microstructureOut + textureOut]
)
table.head_write()
if options.compress:
matPoints = matPoints.reshape((info['grid'][1],info['grid'][0]))
table.data = matPoints
table.data_writeArray('%%%ii'%(1+int(math.log10(np.amax(matPoints)))),delimiter=' ')
else:
table.output_write("1 to %i\n"%(info['microstructures']))
table.output_flush()
# --- output finalization --------------------------------------------------------------------------
table.close()

View File

@ -1,297 +0,0 @@
#!/usr/bin/env python
##
# This script will read in all the seeds and partition the space
# using scipy.spatial.Delaunay triangulation.
# The unknown location will be then interpolated through Barycentric
# interpolation method, which relies on the triangulation.
# A rim will be automatically added to the patch, which will help
# improve the compatibility with the spectral solver as well as
# maintain meaningful microstructure(reduce artifacts).
import os
import numpy as np
import argparse
from scipy.spatial import Delaunay
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
OFFSET = 0.1 #resize the seeded volume to give space for rim/pan
PHANTOM_ID = -1 #grain ID for phantom seeds
def d_print(info, data, separator=False):
"""quickly print debug information"""
if(separator): print "*"*80
print info
print data
def meshgrid2(*arrs):
"""code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d"""
arrs = tuple(reversed(arrs))
arrs = tuple(arrs)
lens = np.array(map(len, arrs))
dim = len(arrs)
ans = []
for i, arr in enumerate(arrs):
slc = np.ones(dim,'i')
slc[i] = lens[i]
arr2 = np.asarray(arr).reshape(slc)
for j, sz in enumerate(lens):
if j != i:
arr2 = arr2.repeat(sz, axis=j)
ans.insert(0,arr2)
return tuple(ans)
#prepare command line interface
parser = argparse.ArgumentParser(prog="geoFromBarycentic",
description='''Generate geom file through \
Barycentric interpolating seeds file.''',
epilog="requires numpy, and scipy.")
parser.add_argument("seeds",
help="seeds file in DAMASK format:\
http://damask.mpie.de/Documentation/AsciiTableFormat",
default="test.seeds")
parser.add_argument("-v", "--version",
action="version",
version="%(prog)s 0.1")
parser.add_argument("-g", "--grid",
nargs=3,
help="grid size(mesh resolution, recommend using 2^x)",
default=[32,32,32],
type=int)
parser.add_argument("-s", "--size",
help="physical size of the target volume.",
nargs=3,
default=[1.0,1.0,1.0],
type=float)
parser.add_argument("-o", "--origin",
help="lower left corner of the patch.",
nargs=3,
default=[0.0,0.0,0.0],
type=float)
parser.add_argument('-m', '--homogenization',
help='homogenization index to be used',
default=1,
type=int)
parser.add_argument('-c', '--crystallite',
help='crystallite index to be used',
default=1,
type=int)
parser.add_argument('-p', '--phase',
help='phase index to be used',
default=1,
type=int)
parser.add_argument('-F', '--Favg',
help='reshape the periodicity, not useful for RIM method',
nargs=9,
default=[1.0,0.0,0.0,
0.0,1.0,0.0,
0.0,0.0,1.0],
type=float)
parser.add_argument("-G", "--geomFile",
help='the name of the output geom file',
default='seeds.geom',
type=str)
parser.add_argument("-C", "--configFile",
help='output dummy material.config file',
action='store_true',
default=False)
parser.add_argument("-d", "--debug",
help="start debugging script",
action='store_true',
default=False)
parser.add_argument("-S", "--seedsFile",
help="write out resized seeds file",
action='store_true',
default=False)
parser.add_argument("-r", '--addRim',
help="add rim and provide control of face lifting point",
action='store_true',
default=False)
args = parser.parse_args() # get all the arguments right after
#quick help to user
print "*"*80
parser.print_help()
print """Sample usage:
./geoFromBarycentic.py 20grains.seeds -g 128 128 128 -S -r; geom_check seeds.geom; seeds_check new_seed.seeds.
"""
print "*"*80
if (args.debug):
d_print("args are:", parser.parse_args(),separator=True)
#/\/\/\/\/#
# m a i n #
#\/\/\/\/\#
print "only work for 3D case now, 2D support coming soon..."
print "reading seeds file: {}".format(args.seeds)
with open(args.seeds, 'r') as f:
rawtext = f.readlines()
n_header = int(rawtext.pop(0).split()[0])
#record all the seeds position
if (args.addRim):
grid_shift = np.array(args.size) * np.array([OFFSET,OFFSET,OFFSET*2])
s_coords = np.array([[np.array(float(item))*(1 - OFFSET*2)
for item in line.split()[:3]] + grid_shift
for line in rawtext[n_header:]])
else:
#no need for shifting with periodicity
s_coords = np.array([[np.array(float(item))
for item in line.split()[:3]]
for line in rawtext[n_header:]])
#record ID of the seeds: int/EulerAngles
if 'microstructure' in rawtext[n_header-1]:
s_id = [int(line.split()[-1]) for line in rawtext[n_header:]]
else:
print "WARNING:"
print "THIS SCRIPT DOES NOT UNDERSTAND HOW TO GROUP CRYSTALLITES."
print "ALL CRYSTAL ORIENTATIONS ARE CONSIDERED TO BE UNIQUE."
print "FOR MORE ACCURATE CONTROL OF SEEDS GROUPING, USE MICROSTRUCTURE ID."
s_id = range(len(s_coords))
#s_eulers here is just a quick book keeping
s_eulers = np.array([[float(item) for item in line.split()[3:]]
for line in rawtext[n_header:]])
if(args.debug):
print d_print("resize point cloud to make space for rim/pan:",
s_coords)
if(args.addRim):
#add binding box to create rim/pan for the volume where the ID of the seeds is
#unknown
print "Shrining the seeds to {}x in each direction".format(1 - OFFSET*2)
x,y,z = args.size[0],args.size[1],args.size[2]
print "Use circumscribed sphere to place phantom seeds."
r = np.sqrt(x**2+y**2+z**2)/2.0
BINDBOX = [[0,0,0],[x,0,0],[0,y,0],[x,y,0],
[0,0,z],[x,0,z],[0,y,z],[x,y,z],
[x/2.0+r,y/2, z/2], [x/2.0-r, y/2, z/2],
[x/2, y/2.0+r, z/2], [x/2, y/2.0-r, z/2],
[x/2, y/2, z/2.0-r]] #8 corners + 5 face centers (no top)
print "Adding phantom seeds for RIM generation:"
for point in BINDBOX:
print point
s_coords = np.vstack([s_coords,point])
s_id.append(PHANTOM_ID)
else:
#The idea here is that we read in each seed point, than duplicate in 3D (make a few copies),
#move on to the next seed point, repeat the same procedure. As for the ID list, we can just use the
#same one. The trick here is use the floor division to find the correct id since we pretty much duplicate
#the same point several times.
Favg = np.array(args.Favg).reshape((3,3))
x,y,z = args.size[0],args.size[1],args.size[2]
tmp = []
for seed in s_coords:
tmp += [np.dot(Favg, np.array(seed) + np.array([dx,dy,dz]))
for dz in [-z, 0, z]
for dy in [-y, 0, y]
for dx in [-x, 0, x]]
s_coords = tmp
if (args.seedsFile):
with open("new_seed.seeds", "w") as f:
outstr = "4\theader\n"
outstr += "grid\ta {}\tb {}\tc {}\n".format(args.grid[0],
args.grid[1],
args.grid[2])
outstr += "microstructures {}\n".format(len(set(s_id)))
outstr += "x\ty\tz\tmicrostructure"
if (args.addRim):
for i in range(len(s_id)):
outstr += "{}\t{}\t{}\t{}\n".format(s_coords[i][0],
s_coords[i][1],
s_coords[i][2],
s_id[i])
else:
for i in range(len(s_coords)):
outstr += "{}\t{}\t{}\t{}\n".format(s_coords[i][0],
s_coords[i][1],
s_coords[i][2],
s_id[i//3**3])
f.write(outstr)
#triangulate space with given point-clouds
tri = Delaunay(s_coords)
if(args.debug):
d_print("simplices:", tri.simplices, separator=True)
d_print("vertices:", s_coords[tri.simplices])
#populate grid points (only 3D for now)
'''
#populating grid using meshgrid2
x = (np.arange(args.grid[0])+0.5)*args.size[0]/args.grid[0]
y = (np.arange(args.grid[1])+0.5)*args.size[1]/args.grid[1]
z = (np.arange(args.grid[2])+0.5)*args.size[2]/args.grid[2]
mesh_pts = np.transpose(np.vstack(map(np.ravel, meshgrid2(x, y, z))))
print mesh_pts
'''
#this is actually faster than using meshgrid2
mesh_pts = [[(i+0.5)*args.size[0]/args.grid[0],
(j+0.5)*args.size[1]/args.grid[1],
(k+0.5)*args.size[2]/args.grid[2]]
for k in range(args.grid[2])
for j in range(args.grid[1])
for i in range(args.grid[0])]
mesh_ids = [PHANTOM_ID*2]*len(mesh_pts) #initialize grid
#search ID for each grid point
s_id = np.array(s_id) #allow multi-indexing
mesh_idx = tri.find_simplex(mesh_pts)
for i, pt in enumerate(mesh_pts):
if mesh_idx[i] < 0:
continue #didn't find any envelop tetrahedron --> something wrong!
#calculate Barycentric coordinates
bary_c = tri.transform[mesh_idx[i],:3,:3].dot(pt-tri.transform[mesh_idx[i],3,:])
bary_c = np.append(bary_c, 1 - bary_c.sum())
if (args.addRim):
tmp_ids = s_id[tri.simplices[mesh_idx[i]]] #rim method
else:
tmp_ids = np.array(s_id[tri.simplices[mesh_idx[i]]//(3**3)]) #kill periodicity through floor division
#print tmp_ids
#print tri.simplices[mesh_idx[i]]//(3**3)
max_weight = -1960
for this_id in tmp_ids:
msk = [item==this_id for item in tmp_ids] #find vertex with the same id
tmp_weight = sum([bary_c[j] for j in range(len(bary_c)) if msk[j]])
if tmp_weight > max_weight:
max_weight = tmp_weight
mesh_ids[i] = this_id
if (args.debug):
d_print("bary_c:",bary_c,separator=True)
d_print("vertex ID:", tmp_ids)
d_print("final ID:", mesh_ids[i])
mesh_ids = np.reshape(mesh_ids, (-1, args.grid[0]))
#write to file
with open(args.geomFile, "w") as f:
outstr = "5\theader\n"
outstr += "grid\ta {}\tb {}\tc {}\n".format(args.grid[0],
args.grid[1],
args.grid[2])
outstr += "size\tx {}\ty {}\tz {}\n".format(args.size[0],
args.size[1],
args.size[2])
outstr += "origin\tx {}\ty {}\tz {}\n".format(args.origin[0],
args.origin[1],
args.origin[2])
outstr += "homogenization\t{}\nmicrostructure\t{}\n".format(args.homogenization,
len(set(s_id)))
for row in mesh_ids:
row = [str(item) for item in list(row)]
outstr += "\t".join(row) + "\n"
f.write(outstr)

View File

@ -1,219 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,math,itertools
import numpy as np
from scipy import ndimage
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def periodic_3Dpad(array, rimdim=(1,1,1)):
rimdim = np.array(rimdim,'i')
size = np.array(array.shape,'i')
padded = np.empty(size+2*rimdim,array.dtype)
padded[rimdim[0]:rimdim[0]+size[0],
rimdim[1]:rimdim[1]+size[1],
rimdim[2]:rimdim[2]+size[2]] = array
p = np.zeros(3,'i')
for side in xrange(3):
for p[(side+2)%3] in xrange(padded.shape[(side+2)%3]):
for p[(side+1)%3] in xrange(padded.shape[(side+1)%3]):
for p[side%3] in xrange(rimdim[side%3]):
spot = (p-rimdim)%size
padded[p[0],p[1],p[2]] = array[spot[0],spot[1],spot[2]]
for p[side%3] in xrange(rimdim[side%3]+size[side%3],size[side%3]+2*rimdim[side%3]):
spot = (p-rimdim)%size
padded[p[0],p[1],p[2]] = array[spot[0],spot[1],spot[2]]
return padded
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
features = [
{'aliens': 1, 'alias': ['boundary','biplane'],},
{'aliens': 2, 'alias': ['tripleline',],},
{'aliens': 3, 'alias': ['quadruplepoint',],}
]
neighborhoods = {
'neumann':np.array([
[-1, 0, 0],
[ 1, 0, 0],
[ 0,-1, 0],
[ 0, 1, 0],
[ 0, 0,-1],
[ 0, 0, 1],
]),
'moore':np.array([
[-1,-1,-1],
[ 0,-1,-1],
[ 1,-1,-1],
[-1, 0,-1],
[ 0, 0,-1],
[ 1, 0,-1],
[-1, 1,-1],
[ 0, 1,-1],
[ 1, 1,-1],
#
[-1,-1, 0],
[ 0,-1, 0],
[ 1,-1, 0],
[-1, 0, 0],
#
[ 1, 0, 0],
[-1, 1, 0],
[ 0, 1, 0],
[ 1, 1, 0],
#
[-1,-1, 1],
[ 0,-1, 1],
[ 1,-1, 1],
[-1, 0, 1],
[ 0, 0, 1],
[ 1, 0, 1],
[-1, 1, 1],
[ 0, 1, 1],
[ 1, 1, 1],
])
}
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Produce geom files containing Euclidean distance to grain structural features:
boundaries, triple lines, and quadruple points.
""", version = scriptID)
parser.add_option('-t','--type',
dest = 'type',
action = 'extend', metavar = '<string LIST>',
help = 'feature type {%s} '%(', '.join(map(lambda x:'|'.join(x['alias']),features))) )
parser.add_option('-n','--neighborhood',
dest = 'neighborhood',
choices = neighborhoods.keys(), metavar = 'string',
help = 'type of neighborhood {%s} [neumann]'%(', '.join(neighborhoods.keys())))
parser.add_option('-s', '--scale',
dest = 'scale',
type = 'float', metavar = 'float',
help = 'voxel size [%default]')
parser.set_defaults(type = [],
neighborhood = 'neumann',
scale = 1.0,
)
(options,filenames) = parser.parse_args()
if len(options.type) == 0 or \
not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['alias'],features))))):
parser.error('sleect feature type from (%s).'%(', '.join(map(lambda x:'|'.join(x['alias']),features))) )
if 'biplane' in options.type and 'boundary' in options.type:
parser.error("only one alias out 'biplane' and 'boundary' required")
feature_list = []
for i,feature in enumerate(features):
for name in feature['alias']:
for myType in options.type:
if name.startswith(myType):
feature_list.append(i) # remember selected features
break
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False, readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
table.close()
neighborhood = neighborhoods[options.neighborhood]
convoluted = np.empty([len(neighborhood)]+list(info['grid']+2),'i')
structure = periodic_3Dpad(microstructure)
for i,p in enumerate(neighborhood):
stencil = np.zeros((3,3,3),'i')
stencil[1,1,1] = -1
stencil[p[0]+1,
p[1]+1,
p[2]+1] = 1
convoluted[i,:,:,:] = ndimage.convolve(structure,stencil)
convoluted = np.sort(convoluted,axis = 0)
uniques = np.where(convoluted[0,1:-1,1:-1,1:-1] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
for i in xrange(1,len(neighborhood)): # check remaining points in neighborhood
uniques += np.where(np.logical_and(
convoluted[i,1:-1,1:-1,1:-1] != convoluted[i-1,1:-1,1:-1,1:-1], # flip of ID difference detected?
convoluted[i,1:-1,1:-1,1:-1] != 0), # not myself?
1,0) # count flip
for feature in feature_list:
try:
table = damask.ASCIItable(outname = features[feature]['alias'][0]+'_'+name if name else name,
buffered = False, labeled = False)
except: continue
damask.util.croak(features[feature]['alias'][0])
distance = np.where(uniques >= features[feature]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
distance = ndimage.morphology.distance_transform_edt(distance)*[options.scale]*3
info['microstructures'] = int(math.ceil(distance.max()))
#--- write header ---------------------------------------------------------------------------------
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.labels_clear()
table.head_write()
# --- write microstructure information ------------------------------------------------------------
formatwidth = int(math.floor(math.log10(distance.max())+1))
table.data = distance.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray('%%%ii'%(formatwidth),delimiter=' ')
#--- output finalization --------------------------------------------------------------------------
table.close()

View File

@ -1,110 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,math
import numpy as np
from optparse import OptionParser
from PIL import Image
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Generate geometry description from (multilayer) images.
Microstructure index is based on gray scale value (1..256).
""", version = scriptID)
parser.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index [%default]')
parser.set_defaults(homogenization = 1,
)
(options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.geom' if name else name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- read image ------------------------------------------------------------------------------------
img = Image.open(name).convert(mode = 'L') # open and convert to grayscale 8bit
slice = 0
while True:
try:
img.seek(slice) # advance to slice
layer = np.expand_dims(1+np.array(img,dtype = 'uint16'),axis = 0) # read image layer
microstructure = layer if slice == 0 else np.vstack((microstructure,layer)) # noqa
slice += 1 # advance to next slice
except EOFError:
break
# http://docs.scipy.org/doc/scipy/reference/ndimage.html
# http://scipy-lectures.github.io/advanced/image_processing/
info = {
'grid': np.array(microstructure.shape,'i')[::-1],
'size': np.array(microstructure.shape,'d')[::-1],
'origin': np.zeros(3,'d'),
'microstructures': len(np.unique(microstructure)),
'homogenization': options.homogenization,
}
# --- report ---------------------------------------------------------------------------------------
table.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
table.croak(errors)
table.close(dismiss = True)
continue
# --- write header ---------------------------------------------------------------------------------
table.info_clear()
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.labels_clear()
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
table.data = microstructure.reshape((info['grid'][1]*info['grid'][2],info['grid'][0]),order='C')
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -21,7 +21,7 @@ Generate geometry description and material configuration from position, phase, a
""", version = scriptID)
parser.add_option('--coordinates',
dest = 'coordinates',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'coordinates label')
parser.add_option('--phase',
@ -135,11 +135,11 @@ for name in filenames:
# ------------------------------------------ sanity checks ---------------------------------------
coordDim = table.label_dimension(options.coordinates)
coordDim = table.label_dimension(options.pos)
errors = []
if not 3 >= coordDim >= 2:
errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.coordinates))
errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.pos))
if not np.all(table.label_dimension(label) == dim):
errors.append('input "{}" needs to have dimension {}.'.format(label,dim))
if options.phase and table.label_dimension(options.phase) != 1:
@ -150,7 +150,7 @@ for name in filenames:
table.close(dismiss = True)
continue
table.data_readArray([options.coordinates] \
table.data_readArray([options.pos] \
+ ([label] if isinstance(label, types.StringTypes) else label) \
+ ([options.phase] if options.phase else []))

View File

@ -4,7 +4,7 @@
import os,sys,math
import numpy as np
import multiprocessing
from optparse import OptionParser
from optparse import OptionParser,OptionGroup
from scipy import spatial
import damask
@ -109,98 +109,112 @@ Generate geometry description and material configuration by standard Voronoi tes
""", version = scriptID)
parser.add_option('-g', '--grid',
dest = 'grid',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c grid of hexahedral box [auto]')
parser.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'x,y,z size of hexahedral box [auto]')
parser.add_option('-o', '--origin',
dest = 'origin',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'offset from old to new origin of grid')
parser.add_option('-p', '--position',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for seed positions [%default]')
parser.add_option('-w', '--weight',
dest = 'weight',
type = 'string', metavar = 'string',
help = 'column label for seed weights [%default]')
parser.add_option('-m', '--microstructure',
dest = 'microstructure',
type = 'string', metavar = 'string',
help = 'column label for seed microstructures [%default]')
parser.add_option('-e', '--eulers',
dest = 'eulers',
type = 'string', metavar = 'string',
help = 'column label for seed Euler angles [%default]')
parser.add_option('--axes',
dest = 'axes',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'orientation coordinate frame in terms of position coordinate frame')
parser.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index to be used [%default]')
parser.add_option('--crystallite',
dest = 'crystallite',
type = 'int', metavar = 'int',
help = 'crystallite index to be used [%default]')
parser.add_option('--phase',
dest = 'phase',
type = 'int', metavar = 'int',
help = 'phase index to be used [%default]')
parser.add_option('-r', '--rnd',
dest = 'randomSeed',
type = 'int', metavar='int',
help = 'seed of random number generator for second phase distribution [%default]')
parser.add_option('--secondphase',
dest = 'secondphase',
type = 'float', metavar= 'float',
help = 'volume fraction of randomly distribute second phase [%default]')
parser.add_option('-l', '--laguerre',
dest = 'laguerre',
action = 'store_true',
help = 'use Laguerre (weighted Voronoi) tessellation')
parser.add_option('--cpus',
dest = 'cpus',
type = 'int', metavar = 'int',
help = 'number of parallel processes to use for Laguerre tessellation [%default]')
parser.add_option('--nonperiodic',
dest = 'nonperiodic',
action = 'store_true',
help = 'use nonperiodic tessellation')
parser.set_defaults(position = 'pos',
group = OptionGroup(parser, "Tessellation","")
group.add_option('-l',
'--laguerre',
dest = 'laguerre',
action = 'store_true',
help = 'use Laguerre (weighted Voronoi) tessellation')
group.add_option('--cpus',
dest = 'cpus',
type = 'int', metavar = 'int',
help = 'number of parallel processes to use for Laguerre tessellation [%default]')
group.add_option('--nonperiodic',
dest = 'nonperiodic',
action = 'store_true',
help = 'nonperiodic tessellation')
parser.add_option_group(group)
group = OptionGroup(parser, "Geometry","")
group.add_option('-g',
'--grid',
dest = 'grid',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c grid of hexahedral box [auto]')
group.add_option('-s',
'--size',
dest = 'size',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'x,y,z size of hexahedral box [auto]')
group.add_option('-o',
'--origin',
dest = 'origin',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'origin of grid')
parser.add_option_group(group)
group = OptionGroup(parser, "Seeds","")
group.add_option('-p',
'--pos', '--seedposition',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
group.add_option('-w',
'--weight',
dest = 'weight',
type = 'string', metavar = 'string',
help = 'label of weights [%default]')
group.add_option('-m',
'--microstructure',
dest = 'microstructure',
type = 'string', metavar = 'string',
help = 'label of microstructures [%default]')
group.add_option('-e',
'--eulers',
dest = 'eulers',
type = 'string', metavar = 'string',
help = 'label of Euler angles [%default]')
group.add_option('--axes',
dest = 'axes',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'orientation coordinate frame in terms of position coordinate frame')
parser.add_option_group(group)
group = OptionGroup(parser, "Configuration","")
group.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index to be used [%default]')
group.add_option('--crystallite',
dest = 'crystallite',
type = 'int', metavar = 'int',
help = 'crystallite index to be used [%default]')
group.add_option('--phase',
dest = 'phase',
type = 'int', metavar = 'int',
help = 'phase index to be used [%default]')
parser.add_option_group(group)
parser.set_defaults(pos = 'pos',
weight = 'weight',
microstructure = 'microstructure',
eulers = 'eulerangles',
eulers = 'euler',
homogenization = 1,
crystallite = 1,
phase = 1,
secondphase = 0.0,
cpus = 2,
laguerre = False,
nonperiodic = False,
randomSeed = None,
)
(options,filenames) = parser.parse_args()
if options.secondphase > 1.0 or options.secondphase < 0.0:
parser.error('volume fraction of second phase ({}) out of bounds.'.format(options.secondphase))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
buffered = False)
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.geom' if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
@ -232,11 +246,11 @@ for name in filenames:
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
if table.label_dimension(options.position) != 3:
errors.append('position columns "{}" have dimension {}.'.format(options.position,
table.label_dimension(options.position)))
if table.label_dimension(options.pos) != 3:
errors.append('seed positions "{}" have dimension {}.'.format(options.pos,
table.label_dimension(options.pos)))
else:
labels += [options.position]
labels += [options.pos]
if not hasEulers: remarks.append('missing seed orientations...')
else: labels += [options.eulers]
@ -254,15 +268,14 @@ for name in filenames:
# ------------------------------------------ read seeds ---------------------------------------
table.data_readArray(labels)
coords = table.data[:,table.label_index(options.position):table.label_index(options.position)+3]\
* info['size']
eulers = table.data[:,table.label_index(options.eulers ):table.label_index(options.eulers )+3]\
if hasEulers else np.zeros(3*len(coords))
grains = table.data[:,table.label_index(options.microstructure)].astype('i')\
if hasGrains else 1+np.arange(len(coords))
weights = table.data[:,table.label_index(options.weight)]\
if hasWeights else np.zeros(len(coords))
grainIDs = np.unique(grains).astype('i')
coords = table.data[:,table.label_indexrange(options.pos)] * info['size']
eulers = table.data[:,table.label_indexrange(options.eulers)] if hasEulers \
else np.zeros(3*len(coords))
grains = table.data[:,table.label_indexrange(options.microstructure)].astype('i') if hasGrains \
else 1+np.arange(len(coords))
weights = table.data[:,table.label_indexrange(options.weight)] if hasWeights \
else np.zeros(len(coords))
grainIDs = np.unique(grains).astype('i')
NgrainIDs = len(grainIDs)
# --- tessellate microstructure ------------------------------------------------------------
@ -283,50 +296,38 @@ for name in filenames:
if info['homogenization'] == 0: info['homogenization'] = options.homogenization
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i%s'%(info['microstructures'],
(' out of %i'%NgrainIDs if NgrainIDs != info['microstructures'] else '')),
])
damask.util.report_geom(info,['grid','size','origin','homogenization',])
damask.util.croak(['microstructures: {}{}'.format(info['microstructures'],
(' out of {}'.format(NgrainIDs) if NgrainIDs != info['microstructures'] else '')),
])
config_header = []
formatwidth = 1+int(math.log10(NgrainIDs))
phase = options.phase * np.ones(NgrainIDs,'i')
if int(options.secondphase*info['microstructures']) > 0:
phase[0:int(options.secondphase*info['microstructures'])] += 1 # alter fraction 'options.secondphase' of used grainIDs
randomSeed = options.randomSeed if options.randomSeed \
else int(os.urandom(4).encode('hex'), 16) # random seed for second phase
np.random.seed(randomSeed)
np.random.shuffle(phase)
config_header += ['# random seed (phase shuffling): {}'.format(randomSeed)]
config_header += ['<microstructure>']
for i,ID in enumerate(grainIDs):
config_header += ['[Grain%s]'%(str(ID).zfill(formatwidth)),
'crystallite %i'%options.crystallite,
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(phase[i],str(ID).rjust(formatwidth)),
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)),
'crystallite {}'.format(options.crystallite),
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(options.phase,str(ID).rjust(formatwidth)),
]
if hasEulers:
config_header += ['<texture>']
for ID in grainIDs:
eulerID = np.nonzero(grains == ID)[0][0] # find first occurrence of this grain id
config_header += ['[Grain%s]'%(str(ID).zfill(formatwidth)),
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(eulers[eulerID])
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)),
'(gauss)\tphi1 {:g}\tPhi {:g}\tphi2 {:g}\tscatter 0.0\tfraction 1.0'.format(*eulers[eulerID])
]
if options.axes is not None: config_header.append('axes\t%s %s %s'%tuple(options.axes))
if options.axes is not None: config_header.append('axes\t{} {} {}'.format(*options.axes))
table.labels_clear()
table.info_clear()
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(info['microstructures']),
config_header,
])
table.head_write()

View File

@ -19,16 +19,18 @@ Translate geom description into ASCIItable containing 1/2/3_pos and microstructu
""", version = scriptID)
parser.add_option('-p','--position',
dest = 'position',
parser.add_option('-p',
'--pos', '--position',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'column label for position [%default]')
parser.add_option('-m','--microstructure',
help = 'label of coordinates [%default]')
parser.add_option('-m',
'--microstructure',
dest = 'microstructure',
type = 'string', metavar = 'string',
help = 'column label for microstructure index [%default]')
help = 'label of microstructure index [%default]')
parser.set_defaults(position = 'pos',
parser.set_defaults(pos = 'pos',
microstructure = 'microstructure',
)
@ -39,10 +41,11 @@ parser.set_defaults(position = 'pos',
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.txt' if name else name,
buffered = False, labeled = False)
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.txt' if name else name,
buffered = False,
labeled = False,
)
except: continue
damask.util.report(scriptName,name)
@ -75,7 +78,7 @@ for name in filenames:
table.info_clear()
table.info_append(extra_header + [scriptID + '\t' + ' '.join(sys.argv[1:])])
table.labels_clear()
table.labels_append(['{}_{}'.format(1+i,options.position) for i in xrange(3)]+[options.microstructure])
table.labels_append(['{}_{}'.format(1+i,options.pos) for i in xrange(3)]+[options.microstructure])
table.head_write()
table.output_flush()

View File

@ -1,125 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,vtk
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [seedsfile[s]]', description = """
Produce VTK point mesh from seeds file
""", version = scriptID)
parser.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'x,y,z size of hexahedral box [1.0 along largest grid point number]')
parser.add_option('-p','--position',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for coordinates [%default]')
parser.set_defaults(size = [0.0,0.0,0.0],
position = 'pos',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
remarks = []
errors = []
if np.any(info['grid'] < 1): remarks.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0) \
and np.all(info['grid'] < 1): errors.append('invalid size x y z.')
else:
for i in xrange(3):
if info['size'][i] <= 0.0: # any invalid size?
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
if table.label_dimension(options.position) != 3:
errors.append('columns "{}" have dimension {}'.format(options.position,table.label_dimension(options.position)))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
labels = ['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in xrange(3)]
hasGrains = table.label_index('microstructure') != -1
labels += ['microstructure'] if hasGrains else []
table.data_readArray(labels) # read ASCIItable columns
coords = table.data[:,:3]*info['size'] # assign coordinates (rescaled to box size)
grain = table.data[:,3].astype('i') if hasGrains else 1+np.arange(len(coords),dtype='i') # assign grains
# --- generate grid --------------------------------------------------------------------------------
grid = vtk.vtkUnstructuredGrid()
pts = vtk.vtkPoints()
# --- process microstructure information -----------------------------------------------------------
IDs = vtk.vtkIntArray()
IDs.SetNumberOfComponents(1)
IDs.SetName("GrainID")
for i,item in enumerate(coords):
IDs.InsertNextValue(grain[i])
pid = pts.InsertNextPoint(item[0:3])
pointIds = vtk.vtkIdList()
pointIds.InsertId(0, pid)
grid.InsertNextCell(1, pointIds)
grid.SetPoints(pts)
grid.GetCellData().AddArray(IDs)
#--- write data -----------------------------------------------------------------------------------
if name:
writer = vtk.vtkXMLRectilinearGridWriter()
(directory,filename) = os.path.split(name)
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]+'.'+writer.GetDefaultFileExtension()))
else:
writer = vtk.vtkDataSetWriter()
writer.WriteToOutputStringOn()
writer.SetHeader('# powered by '+scriptID)
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
else: writer.SetInputData(grid)
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
table.close()

12
processing/pre/seeds_check.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
for seeds in "$@"
do
vtk_pointcloud $seeds
vtk_addPointCloudData $seeds \
--scalar microstructure,weight \
--inplace \
--vtk ${seeds%.*}.vtp \
done

View File

@ -133,10 +133,10 @@ class myThread (threading.Thread):
break
elif currentError[i] < target[i]['error']: # better fit
bestSeedsUpdate = time.time() # save time of better fit
damask.util.croak('Thread %i: Better match (%i bins, %6.4f --> %6.4f)'
%(self.threadID,i+1,target[i]['error'],currentError[i]))
damask.util.croak(' target: ',target[i]['histogram'])
damask.util.croak(' best: ',currentHist[i])
damask.util.croak('Thread {:d}: Better match ({:d} bins, {:6.4f} --> {:6.4f})'\
.format(self.threadID,i+1,target[i]['error'],currentError[i]))
damask.util.croak(' target: '+np.array_str(target[i]['histogram']))
damask.util.croak(' best: '+np.array_str(currentHist[i]))
currentSeedsName = baseFile+'_'+str(bestSeedsUpdate).replace('.','-') # name of new seed file (use time as unique identifier)
perturbedSeedsVFile.reset()
bestSeedsVFile.close()
@ -149,7 +149,7 @@ class myThread (threading.Thread):
for j in xrange(nMicrostructures): # save new errors for all bins
target[j]['error'] = currentError[j]
if myMatch > match: # one or more new bins have no deviation
damask.util.croak( 'Stage %i cleared'%(myMatch))
damask.util.croak( 'Stage {:d} cleared'.format(myMatch))
match=myMatch
sys.stdout.flush()
break
@ -164,8 +164,8 @@ class myThread (threading.Thread):
target[j]['error'] = currentError[j]
randReset = True
else: #--- not all grains are tessellated
damask.util.croak('Thread %i: Microstructure mismatch (%i microstructures mapped)'
%(self.threadID,myNmicrostructures))
damask.util.croak('Thread {:d}: Microstructure mismatch ({:d} microstructures mapped)'\
.format(self.threadID,myNmicrostructures))
randReset = True
@ -243,7 +243,7 @@ if os.path.isfile(os.path.splitext(options.seedFile)[0]+'.seeds'):
else:
bestSeedsVFile.write(damask.util.execute('seeds_fromRandom'+\
' -g '+' '.join(map(str, options.grid))+\
' -r %i'%options.randomSeed+\
' -r {:d}'.format(options.randomSeed)+\
' -N '+str(nMicrostructures))[0])
bestSeedsUpdate = time.time()
@ -279,7 +279,7 @@ if options.maxseeds < 1:
else:
maxSeeds = options.maxseeds
if match >0: damask.util.croak('Stage %i cleared'%match)
if match >0: damask.util.croak('Stage {:d} cleared'.format(match))
sys.stdout.flush()
initialGeomVFile.close()

View File

@ -14,26 +14,30 @@ scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Create seed file taking microstructure indices from given geom file but excluding black-listed grains.
Create seed file taking microstructure indices from given geom file.
Indices can be black-listed or white-listed.
""", version = scriptID)
parser.add_option('-w','--white',
action = 'extend', metavar='<int LIST>',
parser.add_option('-w',
'--white',
action = 'extend', metavar = '<int LIST>',
dest = 'whitelist',
help = 'whitelist of grain IDs')
parser.add_option('-b','--black',
action = 'extend', metavar='<int LIST>',
parser.add_option('-b',
'--black',
action = 'extend', metavar = '<int LIST>',
dest = 'blacklist',
help = 'blacklist of grain IDs')
parser.add_option('-p','--position',
parser.add_option('-p',
'--pos', '--seedposition',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for coordinates [%default]')
help = 'label of coordinates [%default]')
parser.set_defaults(whitelist = [],
blacklist = [],
position = 'pos',
pos = 'pos',
)
(options,filenames) = parser.parse_args()
@ -46,25 +50,18 @@ options.blacklist = map(int,options.blacklist)
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
buffered = False, labeled = False)
except:
continue
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
@ -98,14 +95,14 @@ for name in filenames:
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(info['microstructures']),
])
table.labels_clear()
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in range(3)]+['microstructure'])
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.pos) for i in range(3)]+['microstructure'])
table.head_write()
table.output_flush()

View File

@ -34,61 +34,71 @@ Reports positions with random crystal orientations in seeds file format to STDOU
""", version = scriptID)
parser.add_option('-N', dest='N',
parser.add_option('-N',
dest = 'N',
type = 'int', metavar = 'int',
help = 'number of seed points to distribute [%default]')
parser.add_option('-g','--grid',
help = 'number of seed points [%default]')
parser.add_option('-g',
'--grid',
dest = 'grid',
type = 'int', nargs = 3, metavar = 'int int int',
help='min a,b,c grid of hexahedral box %default')
parser.add_option('-m', '--microstructure',
parser.add_option('-m',
'--microstructure',
dest = 'microstructure',
type = 'int', metavar='int',
type = 'int', metavar = 'int',
help = 'first microstructure index [%default]')
parser.add_option('-r', '--rnd',
parser.add_option('-r',
'--rnd',
dest = 'randomSeed', type = 'int', metavar = 'int',
help = 'seed of random number generator [%default]')
parser.add_option('--format',
dest = 'format', type = 'string', metavar = 'string',
help = 'output number format [auto]')
group = OptionGroup(parser, "Laguerre Tessellation Options",
group = OptionGroup(parser, "Laguerre Tessellation",
"Parameters determining shape of weight distribution of seed points"
)
group.add_option('-w', '--weights',
group.add_option( '-w',
'--weights',
action = 'store_true',
dest = 'weights',
help = 'assign random weigts to seed points for Laguerre tessellation [%default]')
group.add_option('--max',
help = 'assign random weights to seed points for Laguerre tessellation [%default]')
group.add_option( '--max',
dest = 'max',
type = 'float', metavar = 'float',
help = 'max of uniform distribution for weights [%default]')
group.add_option('--mean',
group.add_option( '--mean',
dest = 'mean',
type = 'float', metavar = 'float',
help = 'mean of normal distribution for weights [%default]')
group.add_option('--sigma',
dest = 'sigma',
type = 'float', metavar = 'float',
help='standard deviation of normal distribution for weights [%default]')
group.add_option( '--sigma',
dest = 'sigma',
type = 'float', metavar = 'float',
help='standard deviation of normal distribution for weights [%default]')
parser.add_option_group(group)
group = OptionGroup(parser, "Selective Seeding Options",
"More uniform distribution of seed points using Mitchell\'s Best Candidate Algorithm"
group = OptionGroup(parser, "Selective Seeding",
"More uniform distribution of seed points using Mitchell's Best Candidate Algorithm"
)
group.add_option('-s','--selective',
group.add_option( '-s',
'--selective',
action = 'store_true',
dest = 'selective',
help = 'selective picking of seed points from random seed points [%default]')
group.add_option('-f','--force',
group.add_option( '-f',
'--force',
action = 'store_true',
dest = 'force',
help = 'try selective picking despite large seed point number [%default]')
group.add_option('--distance',
dest = 'distance',
type = 'float', metavar = 'float',
help = 'minimum distance to the next neighbor [%default]')
group.add_option('--numCandidates',
dest = 'numCandidates',
type = 'int', metavar = 'int',
help = 'size of point group to select best distance from [%default]')
group.add_option( '--distance',
dest = 'distance',
type = 'float', metavar = 'float',
help = 'minimum distance to next neighbor [%default]')
group.add_option( '--numCandidates',
dest = 'numCandidates',
type = 'int', metavar = 'int',
help = 'size of point group to select best distance from [%default]')
parser.add_option_group(group)
parser.set_defaults(randomSeed = None,
@ -103,6 +113,7 @@ parser.set_defaults(randomSeed = None,
force = False,
distance = 0.2,
numCandidates = 10,
format = None,
)
(options,filenames) = parser.parse_args()
@ -120,11 +131,9 @@ random.seed(options.randomSeed)
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(outname = name,
buffered = False)
except:
continue
try: table = damask.ASCIItable(outname = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# --- sanity checks -------------------------------------------------------------------------
@ -132,7 +141,7 @@ for name in filenames:
remarks = []
errors = []
if gridSize == 0:
errors.append('zero grid dimension for %s.'%(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
errors.append('zero grid dimension for {}.'.format(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
if options.N > gridSize/10.: errors.append('seed count exceeds 0.1 of grid points.')
if options.selective and 4./3.*math.pi*(options.distance/2.)**3*options.N > 0.5:
(remarks if options.force else errors).append('maximum recommended seed point count for given distance is {}.{}'.
@ -182,10 +191,8 @@ for name in filenames:
seeds = seeds.T # prepare shape for stacking
if options.weights:
if options.max > 0.0:
weights = [np.random.uniform(low = 0, high = options.max, size = options.N)]
else:
weights = [np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)]
weights = [np.random.uniform(low = 0, high = options.max, size = options.N)] if options.max > 0.0 \
else [np.random.normal(loc = options.mean, scale = options.sigma, size = options.N)]
else:
weights = []
seeds = np.transpose(np.vstack(tuple([seeds,
@ -200,13 +207,13 @@ for name in filenames:
table.info_clear()
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=options.grid),
"grid\ta {}\tb {}\tc {}".format(*options.grid),
"microstructures\t{}".format(options.N),
"randomSeed\t{}".format(options.randomSeed),
])
table.labels_clear()
table.labels_append( ['{dim}_{label}'.format(dim = 1+k,label = 'pos') for k in xrange(3)] +
['{dim}_{label}'.format(dim = 1+k,label = 'eulerangles') for k in xrange(3)] +
['{dim}_{label}'.format(dim = 1+k,label = 'euler') for k in xrange(3)] +
['microstructure'] +
(['weight'] if options.weights else []))
table.head_write()
@ -215,7 +222,7 @@ for name in filenames:
# --- write seeds information ------------------------------------------------------------
table.data = seeds
table.data_writeArray()
table.data_writeArray(fmt = options.format)
# --- output finalization --------------------------------------------------------------------------

View File

@ -1,126 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,itertools
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Create seed file by taking microstructure indices from given ASCIItable column.
White and black-listing of microstructure indices is possible.
Examples:
--white 1,2,5 --index grainID isolates grainID entries of value 1, 2, and 5;
--black 1 --index grainID takes all grainID entries except for value 1.
""", version = scriptID)
parser.add_option('-p', '--positions',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'coordinate label [%default]')
parser.add_option('--boundingbox',
dest = 'box',
type = 'float', nargs = 6, metavar = ' '.join(['float']*6),
help = 'min (x,y,z) and max (x,y,z) coordinates of bounding box [tight]')
parser.add_option('-i', '--index',
dest = 'index',
type = 'string', metavar = 'string',
help = 'microstructure index label [%default]')
parser.add_option('-w','--white',
dest = 'whitelist',
action = 'extend', metavar = '<int LIST>',
help = 'whitelist of microstructure indices')
parser.add_option('-b','--black',
dest = 'blacklist',
action = 'extend', metavar = '<int LIST>',
help = 'blacklist of microstructure indices')
parser.set_defaults(pos = 'pos',
index ='microstructure',
)
(options,filenames) = parser.parse_args()
if options.whitelist is not None: options.whitelist = map(int,options.whitelist)
if options.blacklist is not None: options.blacklist = map(int,options.blacklist)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
table.head_read() # read ASCII header info
# ------------------------------------------ sanity checks ---------------------------------------
missing_labels = table.data_readArray([options.pos,options.index])
errors = []
if len(missing_labels) > 0:
errors.append('column{} {} not found'.format('s' if len(missing_labels) > 1 else '',
', '.join(missing_labels)))
for label, dim in {options.pos: 3,
options.index: 1}.iteritems():
if table.label_dimension(label) != dim:
errors.append('column {} has wrong dimension'.format(label))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True) # close ASCII table file handles and delete output file
continue
# ------------------------------------------ process data ------------------------------------------
# --- finding bounding box -------------------------------------------------------------------------
boundingBox = np.array((np.amin(table.data[:,0:3],axis = 0),np.amax(table.data[:,0:3],axis = 0)))
if options.box:
boundingBox[0,:] = np.minimum(options.box[0:3],boundingBox[0,:])
boundingBox[1,:] = np.maximum(options.box[3:6],boundingBox[1,:])
# --- rescaling coordinates ------------------------------------------------------------------------
table.data[:,0:3] -= boundingBox[0,:]
table.data[:,0:3] /= boundingBox[1,:]-boundingBox[0,:]
# --- filtering of grain voxels --------------------------------------------------------------------
mask = np.logical_and(
np.ones_like(table.data[:,3],bool) if options.whitelist is None \
else np.in1d(table.data[:,3].ravel(), options.whitelist).reshape(table.data[:,3].shape),
np.ones_like(table.data[:,3],bool) if options.blacklist is None \
else np.invert(np.in1d(table.data[:,3].ravel(), options.blacklist).reshape(table.data[:,3].shape))
)
table.data = table.data[mask]
# ------------------------------------------ assemble header ---------------------------------------
table.info = [
scriptID,
'size %s'%(' '.join(list(itertools.chain.from_iterable(zip(['x','y','z'],
map(str,boundingBox[1,:]-boundingBox[0,:])))))),
]
table.labels_clear()
table.labels_append(['1_pos','2_pos','3_pos','microstructure']) # implicitly switching label processing/writing on
table.head_write()
# ------------------------------------------ output result ---------------------------------------
table.data_writeArray()
table.close() # close ASCII tables

View File

@ -0,0 +1,55 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Adds header to OIM grain file to make it accesible as ASCII table
""", version = scriptID)
parser.add_option('-l', '--labels',
dest = 'labels',
help = 'lables of requested columns')
parser.set_defaults(labels = ['1_euler','2_euler','3_euler',
'1_pos','2_pos', 'IQ', 'CI', 'Fit', 'GrainID',],
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
table.head_read()
data = []
while table.data_read():
data.append(table.data[0:len(options.labels)])
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(options.labels)
table.head_write()
for i in data:
table.data = i
table.data_write()
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table