Merge branch 'development' into vectorized-orientation-conversion

This commit is contained in:
Martin Diehl 2020-01-14 09:00:10 +01:00
commit 3fc616bc7d
169 changed files with 3275 additions and 9097 deletions

4
.gitattributes vendored
View File

@ -3,8 +3,8 @@
# always use LF, even if the files are edited on windows, they need to be compiled/used on unix
* text eol=lf
# Denote all files that are truly binary and should not be modified.
# Denote all files that are binary and should not be modified.
*.png binary
*.jpg binary
*.cae binary
*.hdf5 binary
*.pdf binary

View File

@ -115,13 +115,6 @@ Pytest:
- release
###################################################################################################
OrientationRelationship:
stage: preprocessing
script: OrientationRelationship/test.py
except:
- master
- release
Pre_SeedGeneration:
stage: preprocessing
script: PreProcessing_SeedGeneration/test.py
@ -398,7 +391,6 @@ Marc_compileIfort:
stage: compileMarc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- Marc_compileIfort/test.py
except:
- master
@ -409,7 +401,6 @@ Hex_elastic:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- Hex_elastic/test.py
except:
- master
@ -419,7 +410,6 @@ CubicFCC_elastic:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- CubicFCC_elastic/test.py
except:
- master
@ -429,7 +419,6 @@ CubicBCC_elastic:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- CubicBCC_elastic/test.py
except:
- master
@ -439,7 +428,6 @@ J2_plasticBehavior:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- J2_plasticBehavior/test.py
except:
- master
@ -506,18 +494,6 @@ GridSolver:
- master
- release
Processing:
stage: createDocumentation
script:
- cd $DAMASKROOT/processing/pre
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
- cd $DAMASKROOT/processing/post
- rm vtk2ang.py DAD*.py
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
except:
- master
- release
##################################################################################################
backupData:
stage: saveDocumentation
@ -528,7 +504,6 @@ backupData:
- mv $TESTROOT/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $TESTROOT/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $DAMASKROOT/PRIVATE/documenting/DAMASK_* $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $DAMASKROOT/processing $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
only:
- development

View File

@ -113,13 +113,11 @@ if (DAMASK_SOLVER STREQUAL "grid")
elseif (DAMASK_SOLVER STREQUAL "fem" OR DAMASK_SOLVER STREQUAL "mesh")
project (damask-mesh Fortran C)
add_definitions (-DFEM)
message ("Building FEM Solver\n")
message ("Building Mesh Solver\n")
else ()
message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined")
endif ()
# set linker commands (needs to be done after defining the project)
set (CMAKE_LINKER "${PETSC_LINKER}")
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake)
if (CMAKE_BUILD_TYPE STREQUAL "")
set (CMAKE_BUILD_TYPE "RELEASE")
@ -168,9 +166,6 @@ add_definitions (-DDAMASKVERSION="${DAMASK_V}")
# definition of other macros
add_definitions (-DPETSc)
set (DAMASK_INCLUDE_FLAGS "${DAMASK_INCLUDE_FLAGS} ${PETSC_INCLUDES}")
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake)
if (CMAKE_Fortran_COMPILER_ID STREQUAL "Intel")
include(Compiler-Intel)
elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
@ -183,14 +178,14 @@ endif ()
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${BUILDCMD_PRE} ${OPENMP_FLAGS} ${STANDARD_CHECK} ${OPTIMIZATION_FLAGS} ${COMPILE_FLAGS} ${PRECISION_FLAGS}")
set (CMAKE_Fortran_LINK_EXECUTABLE "${BUILDCMD_PRE} ${CMAKE_LINKER} ${OPENMP_FLAGS} ${OPTIMIZATION_FLAGS} ${LINKER_FLAGS}")
set (CMAKE_Fortran_LINK_EXECUTABLE "${BUILDCMD_PRE} ${PETSC_LINKER} ${OPENMP_FLAGS} ${OPTIMIZATION_FLAGS} ${LINKER_FLAGS}")
if (CMAKE_BUILD_TYPE STREQUAL "DEBUG")
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${DEBUG_FLAGS}")
set (CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} ${DEBUG_FLAGS}")
endif ()
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${DAMASK_INCLUDE_FLAGS} ${BUILDCMD_POST}")
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${PETSC_INCLUDES} ${BUILDCMD_POST}")
set (CMAKE_Fortran_LINK_EXECUTABLE "${CMAKE_Fortran_LINK_EXECUTABLE} <OBJECTS> -o <TARGET> <LINK_LIBRARIES> ${PETSC_EXTERNAL_LIB} ${BUILDCMD_POST}")
message ("Fortran Compiler Flags:\n${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}}\n")

4
CONFIG
View File

@ -1,11 +1,7 @@
# "set"-syntax needed only for tcsh (but works with bash and zsh)
# DAMASK_ROOT will be expanded
set DAMASK_NUM_THREADS = 4
set MSC_ROOT = /opt/msc
set MARC_VERSION = 2019
set ABAQUS_VERSION = 2019
set DAMASK_HDF5 = ON

View File

@ -1,4 +1,4 @@
Copyright 2011-19 Max-Planck-Institut für Eisenforschung GmbH
Copyright 2011-20 Max-Planck-Institut für Eisenforschung GmbH
DAMASK is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by

View File

@ -26,7 +26,7 @@ build/grid:
.PHONY: build/mesh
build/mesh:
@mkdir -p build/mesh
@(cd build/mesh; cmake -Wno-dev -DDAMASK_SOLVER=FEM -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
@(cd build/mesh; cmake -Wno-dev -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
.PHONY: clean
clean:

@ -1 +1 @@
Subproject commit 524e86c117d816e3bd873eed7663e258a6f2e139
Subproject commit 99b076706a186ec7deb051ae181c2d9697c799fc

View File

@ -1 +1 @@
v2.0.3-1237-g5a2053cd
v2.0.3-1484-g93fca511

View File

@ -2,129 +2,129 @@
# GNU Compiler
###################################################################################################
if (OPENMP)
set (OPENMP_FLAGS "-fopenmp")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-fopenmp")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize")
endif ()
set (STANDARD_CHECK "-std=f2008ts -pedantic-errors" )
set (LINKER_FLAGS "${LINKER_FLAGS} -Wl")
# options parsed directly to the linker
set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
# ensure to link against dynamic libraries
set (STANDARD_CHECK "-std=f2008ts -pedantic-errors" )
set (LINKER_FLAGS "${LINKER_FLAGS} -Wl")
# options parsed directly to the linker
set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
# ensure to link against dynamic libraries
#------------------------------------------------------------------------------------------------
# Fine tuning compilation options
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
# restrict line length to the standard 132 characters (lattice.f90 require more characters)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
# restrict line length to the standard 132 characters (lattice.f90 require more characters)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fimplicit-none")
# assume "implicit none" even if not present in source
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fimplicit-none")
# assume "implicit none" even if not present in source
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wall")
# sets the following Fortran options:
# -Waliasing: warn about possible aliasing of dummy arguments. Specifically, it warns if the same actual argument is associated with a dummy argument with "INTENT(IN)" and a dummy argument with "INTENT(OUT)" in a call with an explicit interface.
# -Wampersand: checks if a character expression is continued proberly by an ampersand at the end of the line and at the beginning of the new line
# -Warray-bounds: checks if array reference is out of bounds at compile time. use -fcheck-bounds to also check during runtime
# -Wconversion: warn about implicit conversions between different type
# -Wsurprising: warn when "suspicious" code constructs are encountered. While technically legal these usually indicate that an error has been made.
# -Wc-binding-type:
# -Wintrinsics-std: only standard intrisics are available, e.g. "call flush(6)" will cause an error
# -Wno-tabs: do not allow tabs in source
# -Wintrinsic-shadow: warn if a user-defined procedure or module procedure has the same name as an intrinsic
# -Wline-truncation:
# -Wtarget-lifetime:
# -Wreal-q-constant: warn about real-literal-constants with 'q' exponent-letter
# -Wunused: a number of unused-xxx warnings
# and sets the general (non-Fortran options) options:
# -Waddress
# -Warray-bounds (only with -O2)
# -Wc++11-compat
# -Wchar-subscripts
# -Wcomment
# -Wformat
# -Wmaybe-uninitialized
# -Wnonnull
# -Wparentheses
# -Wpointer-sign
# -Wreorder
# -Wreturn-type
# -Wsequence-point
# -Wstrict-aliasing
# -Wstrict-overflow=1
# -Wswitch
# -Wtrigraphs
# -Wuninitialized
# -Wunknown-pragmas
# -Wunused-function
# -Wunused-label
# -Wunused-value
# -Wunused-variable
# -Wvolatile-register-var
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wall")
# sets the following Fortran options:
# -Waliasing: warn about possible aliasing of dummy arguments. Specifically, it warns if the same actual argument is associated with a dummy argument with "INTENT(IN)" and a dummy argument with "INTENT(OUT)" in a call with an explicit interface.
# -Wampersand: checks if a character expression is continued proberly by an ampersand at the end of the line and at the beginning of the new line
# -Warray-bounds: checks if array reference is out of bounds at compile time. use -fcheck-bounds to also check during runtime
# -Wconversion: warn about implicit conversions between different type
# -Wsurprising: warn when "suspicious" code constructs are encountered. While technically legal these usually indicate that an error has been made.
# -Wc-binding-type:
# -Wintrinsics-std: only standard intrisics are available, e.g. "call flush(6)" will cause an error
# -Wno-tabs: do not allow tabs in source
# -Wintrinsic-shadow: warn if a user-defined procedure or module procedure has the same name as an intrinsic
# -Wline-truncation:
# -Wtarget-lifetime:
# -Wreal-q-constant: warn about real-literal-constants with 'q' exponent-letter
# -Wunused: a number of unused-xxx warnings
# and sets the general (non-Fortran options) options:
# -Waddress
# -Warray-bounds (only with -O2)
# -Wc++11-compat
# -Wchar-subscripts
# -Wcomment
# -Wformat
# -Wmaybe-uninitialized
# -Wnonnull
# -Wparentheses
# -Wpointer-sign
# -Wreorder
# -Wreturn-type
# -Wsequence-point
# -Wstrict-aliasing
# -Wstrict-overflow=1
# -Wswitch
# -Wtrigraphs
# -Wuninitialized
# -Wunknown-pragmas
# -Wunused-function
# -Wunused-label
# -Wunused-value
# -Wunused-variable
# -Wvolatile-register-var
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wextra")
# sets the following Fortran options:
# -Wunuses-parameter:
# -Wcompare-reals:
# and sets the general (non-Fortran options) options:
# -Wclobbered
# -Wempty-body
# -Wignored-qualifiers
# -Wmissing-field-initializers
# -Woverride-init
# -Wsign-compare
# -Wtype-limits
# -Wuninitialized
# -Wunused-but-set-parameter (only with -Wunused or -Wall)
# -Wno-globals
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wextra")
# sets the following Fortran options:
# -Wunuses-parameter:
# -Wcompare-reals:
# and sets the general (non-Fortran options) options:
# -Wclobbered
# -Wempty-body
# -Wignored-qualifiers
# -Wmissing-field-initializers
# -Woverride-init
# -Wsign-compare
# -Wtype-limits
# -Wuninitialized
# -Wunused-but-set-parameter (only with -Wunused or -Wall)
# -Wno-globals
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wcharacter-truncation")
# warn if character expressions (strings) are truncated
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wcharacter-truncation")
# warn if character expressions (strings) are truncated
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wunderflow")
# produce a warning when numerical constant expressions are encountered, which yield an UNDERFLOW during compilation
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wunderflow")
# produce a warning when numerical constant expressions are encountered, which yield an UNDERFLOW during compilation
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=pure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=noreturn")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wconversion-extra")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wimplicit-procedure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wno-unused-parameter")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffpe-summary=all")
# print summary of floating point exeptions (invalid,zero,overflow,underflow,inexact,denormal)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=pure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=noreturn")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wconversion-extra")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wimplicit-procedure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wno-unused-parameter")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffpe-summary=all")
# print summary of floating point exeptions (invalid,zero,overflow,underflow,inexact,denormal)
# Additional options
# -Warray-temporarieswarnings: because we have many temporary arrays (performance issue?):
# -Wimplicit-interface: no interfaces for lapack/MPI routines
# -Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
# Additional options
# -Warray-temporarieswarnings: because we have many temporary arrays (performance issue?):
# -Wimplicit-interface: no interfaces for lapack/MPI routines
# -Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
#------------------------------------------------------------------------------------------------
# Runtime debugging
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ffpe-trap=invalid,zero,overflow")
# stop execution if floating point exception is detected (NaN is silent)
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ffpe-trap=invalid,zero,overflow")
# stop execution if floating point exception is detected (NaN is silent)
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fbacktrace")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fdump-core")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fcheck=all")
# checks for (array-temps,bounds,do,mem,pointer,recursion)
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fbacktrace")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fdump-core")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fcheck=all")
# checks for (array-temps,bounds,do,mem,pointer,recursion)
# Additional options
# -ffpe-trap=precision,denormal,underflow
# Additional options
# -ffpe-trap=precision,denormal,underflow
#------------------------------------------------------------------------------------------------
# precision settings
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-real-8")
# set precision to 8 bytes for standard real (=8 for pReal). Will set size of double to 16 bytes as long as -fdefault-double-8 is not set
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-double-8")
# set precision to 8 bytes for double real, would be 16 bytes if -fdefault-real-8 is used
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-real-8")
# set precision to 8 bytes for standard real (=8 for pReal). Will set size of double to 16 bytes as long as -fdefault-double-8 is not set
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-double-8")
# set precision to 8 bytes for double real, would be 16 bytes if -fdefault-real-8 is used

View File

@ -1,116 +1,116 @@
###################################################################################################
# Intel Compiler
###################################################################################################
if (OPENMP)
set (OPENMP_FLAGS "-qopenmp -parallel")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-qopenmp -parallel")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0 -no-ip")
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-ipo -O3 -no-prec-div -fp-model fast=2 -xHost")
# -fast = -ipo, -O3, -no-prec-div, -static, -fp-model fast=2, and -xHost"
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0 -no-ip")
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-ipo -O3 -no-prec-div -fp-model fast=2 -xHost")
# -fast = -ipo, -O3, -no-prec-div, -static, -fp-model fast=2, and -xHost"
endif ()
# -assume std_mod_proc_name (included in -standard-semantics) causes problems if other modules
# (PETSc, HDF5) are not compiled with this option (https://software.intel.com/en-us/forums/intel-fortran-compiler-for-linux-and-mac-os-x/topic/62172)
set (STANDARD_CHECK "-stand f15 -standard-semantics -assume nostd_mod_proc_name")
set (LINKER_FLAGS "${LINKER_FLAGS} -shared-intel")
# Link against shared Intel libraries instead of static ones
# -assume std_mod_proc_name (included in -standard-semantics) causes problems if other modules
# (PETSc, HDF5) are not compiled with this option (https://software.intel.com/en-us/forums/intel-fortran-compiler-for-linux-and-mac-os-x/topic/62172)
set (STANDARD_CHECK "-stand f15 -standard-semantics -assume nostd_mod_proc_name")
set (LINKER_FLAGS "${LINKER_FLAGS} -shared-intel")
# Link against shared Intel libraries instead of static ones
#------------------------------------------------------------------------------------------------
# Fine tuning compilation options
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fpp")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fpp")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ftz")
# flush underflow to zero, automatically set if -O[1,2,3]
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ftz")
# flush underflow to zero, automatically set if -O[1,2,3]
set (COMPILE_FLAGS "${COMPILE_FLAGS} -diag-disable")
# disables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} 5268")
# ... the text exceeds right hand column allowed on the line (we have only comments there)
set (COMPILE_FLAGS "${COMPILE_FLAGS},7624")
# ... about deprecated forall (has nice syntax and most likely a performance advantage)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -diag-disable")
# disables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} 5268")
# ... the text exceeds right hand column allowed on the line (we have only comments there)
set (COMPILE_FLAGS "${COMPILE_FLAGS},7624")
# ... about deprecated forall (has nice syntax and most likely a performance advantage)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -warn")
# enables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} declarations")
# ... any undeclared names (alternative name: -implicitnone)
set (COMPILE_FLAGS "${COMPILE_FLAGS},general")
# ... warning messages and informational messages are issued by the compiler
set (COMPILE_FLAGS "${COMPILE_FLAGS},usage")
# ... questionable programming practices
set (COMPILE_FLAGS "${COMPILE_FLAGS},interfaces")
# ... checks the interfaces of all SUBROUTINEs called and FUNCTIONs invoked in your compilation against an external set of interface blocks
set (COMPILE_FLAGS "${COMPILE_FLAGS},ignore_loc")
# ... %LOC is stripped from an actual argument
set (COMPILE_FLAGS "${COMPILE_FLAGS},alignments")
# ... data that is not naturally aligned
set (COMPILE_FLAGS "${COMPILE_FLAGS},unused")
# ... declared variables that are never used
set (COMPILE_FLAGS "${COMPILE_FLAGS} -warn")
# enables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} declarations")
# ... any undeclared names (alternative name: -implicitnone)
set (COMPILE_FLAGS "${COMPILE_FLAGS},general")
# ... warning messages and informational messages are issued by the compiler
set (COMPILE_FLAGS "${COMPILE_FLAGS},usage")
# ... questionable programming practices
set (COMPILE_FLAGS "${COMPILE_FLAGS},interfaces")
# ... checks the interfaces of all SUBROUTINEs called and FUNCTIONs invoked in your compilation against an external set of interface blocks
set (COMPILE_FLAGS "${COMPILE_FLAGS},ignore_loc")
# ... %LOC is stripped from an actual argument
set (COMPILE_FLAGS "${COMPILE_FLAGS},alignments")
# ... data that is not naturally aligned
set (COMPILE_FLAGS "${COMPILE_FLAGS},unused")
# ... declared variables that are never used
# Additional options
# -warn: enables warnings, where
# truncated_source: Determines whether warnings occur when source exceeds the maximum column width in fixed-format files.
# (too many warnings because we have comments beyond character 132)
# uncalled: Determines whether warnings occur when a statement function is never called
# all:
# -name as_is: case sensitive Fortran!
# Additional options
# -warn: enables warnings, where
# truncated_source: Determines whether warnings occur when source exceeds the maximum column width in fixed-format files.
# (too many warnings because we have comments beyond character 132)
# uncalled: Determines whether warnings occur when a statement function is never called
# all:
# -name as_is: case sensitive Fortran!
#------------------------------------------------------------------------------------------------
# Runtime debugging
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -traceback")
# Generate extra information in the object file to provide source file traceback information when a severe error occurs at run time
set (DEBUG_FLAGS "${DEBUG_FLAGS} -traceback")
# Generate extra information in the object file to provide source file traceback information when a severe error occurs at run time
set (DEBUG_FLAGS "${DEBUG_FLAGS} -gen-interfaces")
# Generate an interface block for each routine. http://software.intel.com/en-us/blogs/2012/01/05/doctor-fortran-gets-explicit-again/
set (DEBUG_FLAGS "${DEBUG_FLAGS} -gen-interfaces")
# Generate an interface block for each routine. http://software.intel.com/en-us/blogs/2012/01/05/doctor-fortran-gets-explicit-again/
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-stack-check")
# Generate extra code after every function call to ensure that the floating-point (FP) stack is in the expected state
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-stack-check")
# Generate extra code after every function call to ensure that the floating-point (FP) stack is in the expected state
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-model strict")
# Trap uninitalized variables
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-model strict")
# Trap uninitalized variables
set (DEBUG_FLAGS "${DEBUG_FLAGS} -check" )
# Checks at runtime ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} bounds")
# ... if an array index is too small (<1) or too large!
set (DEBUG_FLAGS "${DEBUG_FLAGS},format")
# ... for the data type of an item being formatted for output.
set (DEBUG_FLAGS "${DEBUG_FLAGS},output_conversion")
# ... for the fit of data items within a designated format descriptor field.
set (DEBUG_FLAGS "${DEBUG_FLAGS},pointers")
# ... for certain disassociated or uninitialized pointers or unallocated allocatable objects.
set (DEBUG_FLAGS "${DEBUG_FLAGS},uninit")
# ... for uninitialized variables.
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ftrapuv")
# ... initializes stack local variables to an unusual value to aid error detection
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fpe-all=0")
# ... capture all floating-point exceptions, sets -ftz automatically
set (DEBUG_FLAGS "${DEBUG_FLAGS} -check" )
# Checks at runtime ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} bounds")
# ... if an array index is too small (<1) or too large!
set (DEBUG_FLAGS "${DEBUG_FLAGS},format")
# ... for the data type of an item being formatted for output.
set (DEBUG_FLAGS "${DEBUG_FLAGS},output_conversion")
# ... for the fit of data items within a designated format descriptor field.
set (DEBUG_FLAGS "${DEBUG_FLAGS},pointers")
# ... for certain disassociated or uninitialized pointers or unallocated allocatable objects.
set (DEBUG_FLAGS "${DEBUG_FLAGS},uninit")
# ... for uninitialized variables.
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ftrapuv")
# ... initializes stack local variables to an unusual value to aid error detection
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fpe-all=0")
# ... capture all floating-point exceptions, sets -ftz automatically
set (DEBUG_FLAGS "${DEBUG_FLAGS} -warn")
# enables warnings ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} errors")
# ... warnings are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS},stderrors")
# ... warnings about Fortran standard violations are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS} -warn")
# enables warnings ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} errors")
# ... warnings are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS},stderrors")
# ... warnings about Fortran standard violations are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS} -debug-parameters all")
# generate debug information for parameters
set (DEBUG_FLAGS "${DEBUG_FLAGS} -debug-parameters all")
# generate debug information for parameters
# Additional options
# -heap-arrays: Should not be done for OpenMP, but set "ulimit -s unlimited" on shell. Probably it helps also to unlimit other limits
# -check: Checks at runtime, where
# arg_temp_created: will cause a lot of warnings because we create a bunch of temporary arrays (performance?)
# stack:
# Additional options
# -heap-arrays: Should not be done for OpenMP, but set "ulimit -s unlimited" on shell. Probably it helps also to unlimit other limits
# -check: Checks at runtime, where
# arg_temp_created: will cause a lot of warnings because we create a bunch of temporary arrays (performance?)
# stack:
#------------------------------------------------------------------------------------------------
# precision settings
set (PRECISION_FLAGS "${PRECISION_FLAGS} -real-size 64")
# set precision for standard real to 32 | 64 | 128 (= 4 | 8 | 16 bytes, type pReal is always 8 bytes)
set (PRECISION_FLAGS "${PRECISION_FLAGS} -real-size 64")
# set precision for standard real to 32 | 64 | 128 (= 4 | 8 | 16 bytes, type pReal is always 8 bytes)

View File

@ -1,25 +1,24 @@
###################################################################################################
# PGI Compiler
###################################################################################################
elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "PGI")
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3")
endif ()
#------------------------------------------------------------------------------------------------
# Fine tuning compilation options
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Mpreprocess")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Mpreprocess")
# preprocessor
set (STANDARD_CHECK "-Mallocatable=03")
set (STANDARD_CHECK "-Mallocatable=03")
#------------------------------------------------------------------------------------------------
# Runtime debugging
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Includes debugging information in the object module; sets the optimization level to zero unless a -O option is present on the command line
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Includes debugging information in the object module; sets the optimization level to zero unless a -O option is present on the command line

22
env/DAMASK.csh vendored
View File

@ -7,12 +7,6 @@ set DAMASK_ROOT=`python -c "import os,sys; print(os.path.realpath(os.path.expand
source $DAMASK_ROOT/CONFIG
# add BRANCH if DAMASK_ROOT is a git repository
cd $DAMASK_ROOT >/dev/null
set BRANCH = `git branch 2>/dev/null| grep -E '^\* ')`
cd - >/dev/null
# if DAMASK_BIN is present
set path = ($DAMASK_ROOT/bin $path)
set SOLVER=`which DAMASK_spectral`
@ -21,20 +15,12 @@ if ( "x$DAMASK_NUM_THREADS" == "x" ) then
set DAMASK_NUM_THREADS=1
endif
# currently, there is no information that unlimited causes problems
# currently, there is no information that unlimited stack size causes problems
# still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
# more info https://jblevins.org/log/segfault
# https://stackoverflow.com/questions/79923/what-and-where-are-the-stack-and-heap
# http://superuser.com/questions/220059/what-parameters-has-ulimit
limit datasize unlimited # maximum heap size (kB)
# http://superuser.com/questions/220059/what-parameters-has-ulimit
limit stacksize unlimited # maximum stack size (kB)
endif
if ( `limit | grep memoryuse` != "" ) then
limit memoryuse unlimited # maximum physical memory size
endif
if ( `limit | grep vmemoryuse` != "" ) then
limit vmemoryuse unlimited # maximum virtual memory size
endif
# disable output in case of scp
if ( $?prompt ) then
@ -44,8 +30,8 @@ if ( $?prompt ) then
echo https://damask.mpie.de
echo
echo Using environment with ...
echo "DAMASK $DAMASK_ROOT $BRANCH"
echo "Spectral Solver $SOLVER"
echo "DAMASK $DAMASK_ROOT"
echo "Grid Solver $SOLVER"
echo "Post Processing $PROCESSING"
if ( $?PETSC_DIR) then
echo "PETSc location $PETSC_DIR"

11
env/DAMASK.sh vendored
View File

@ -43,15 +43,12 @@ PROCESSING=$(type -p postResults || true 2>/dev/null)
[ "x$DAMASK_NUM_THREADS" == "x" ] && DAMASK_NUM_THREADS=1
# currently, there is no information that unlimited causes problems
# currently, there is no information that unlimited stack size causes problems
# still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
# more info https://jblevins.org/log/segfault
# https://stackoverflow.com/questions/79923/what-and-where-are-the-stack-and-heap
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -d unlimited 2>/dev/null # maximum heap size (kB)
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -s unlimited 2>/dev/null # maximum stack size (kB)
ulimit -v unlimited 2>/dev/null # maximum virtual memory size
ulimit -m unlimited 2>/dev/null # maximum physical memory size
# disable output in case of scp
if [ ! -z "$PS1" ]; then
@ -62,7 +59,7 @@ if [ ! -z "$PS1" ]; then
echo
echo Using environment with ...
echo "DAMASK $DAMASK_ROOT $BRANCH"
echo "Spectral Solver $SOLVER"
echo "Grid Solver $SOLVER"
echo "Post Processing $PROCESSING"
if [ "x$PETSC_DIR" != "x" ]; then
echo -n "PETSc location "
@ -96,7 +93,7 @@ fi
export DAMASK_NUM_THREADS
export PYTHONPATH=$DAMASK_ROOT/python:$PYTHONPATH
for var in BASE STAT SOLVER PROCESSING FREE DAMASK_BIN BRANCH; do
for var in BASE STAT SOLVER PROCESSING BRANCH; do
unset "${var}"
done
for var in DAMASK MSC; do

12
env/DAMASK.zsh vendored
View File

@ -24,7 +24,6 @@ unset -f set
# add BRANCH if DAMASK_ROOT is a git repository
cd $DAMASK_ROOT >/dev/null; BRANCH=$(git branch 2>/dev/null| grep -E '^\* '); cd - >/dev/null
# add DAMASK_BIN if present
PATH=${DAMASK_ROOT}/bin:$PATH
SOLVER=$(which DAMASK_spectral || true 2>/dev/null)
@ -35,15 +34,12 @@ PROCESSING=$(which postResults || true 2>/dev/null)
[[ "x$DAMASK_NUM_THREADS" == "x" ]] && DAMASK_NUM_THREADS=1
# currently, there is no information that unlimited causes problems
# currently, there is no information that unlimited stack size causes problems
# still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
# more info https://jblevins.org/log/segfault
# https://stackoverflow.com/questions/79923/what-and-where-are-the-stack-and-heap
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -d unlimited 2>/dev/null # maximum heap size (kB)
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -s unlimited 2>/dev/null # maximum stack size (kB)
ulimit -v unlimited 2>/dev/null # maximum virtual memory size
ulimit -m unlimited 2>/dev/null # maximum physical memory size
# disable output in case of scp
if [ ! -z "$PS1" ]; then
@ -54,7 +50,7 @@ if [ ! -z "$PS1" ]; then
echo
echo "Using environment with ..."
echo "DAMASK $DAMASK_ROOT $BRANCH"
echo "Spectral Solver $SOLVER"
echo "Grid Solver $SOLVER"
echo "Post Processing $PROCESSING"
if [ "x$PETSC_DIR" != "x" ]; then
echo -n "PETSc location "
@ -90,7 +86,7 @@ fi
export DAMASK_NUM_THREADS
export PYTHONPATH=$DAMASK_ROOT/python:$PYTHONPATH
for var in BASE STAT SOLVER PROCESSING FREE DAMASK_BIN BRANCH; do
for var in SOLVER PROCESSING BRANCH; do
unset "${var}"
done
for var in DAMASK MSC; do

View File

@ -1,3 +1,3 @@
thermal conduction
initialT 300.0
t0 270.0
(output) temperature

View File

@ -1,2 +1,2 @@
[001]
(gauss) phi1 0.000 Phi 0.000 phi2 0.000 scatter 0.000 fraction 1.000
(gauss) phi1 0.000 Phi 0.000 phi2 0.000

View File

@ -1,2 +1,2 @@
[101]
(gauss) phi1 0.000 Phi 45.000 phi2 90.000 scatter 0.000 fraction 1.000
(gauss) phi1 0.000 Phi 45.000 phi2 90.000

View File

@ -1,2 +1,2 @@
[111]
(gauss) phi1 0.000 Phi 54.7356 phi2 45.000 scatter 0.000 fraction 1.000
(gauss) phi1 0.000 Phi 54.7356 phi2 45.000

View File

@ -1,2 +1,2 @@
[123]
(gauss) phi1 209.805 Phi 29.206 phi2 63.435 scatter 0.000 fraction 1.000
(gauss) phi1 209.805 Phi 29.206 phi2 63.435

View File

@ -4,14 +4,6 @@
[SX]
mech none
#-------------------#
<crystallite>
#-------------------#
[aLittleSomething]
(output) f
(output) p
#-------------------#
<phase>
#-------------------#
@ -50,408 +42,212 @@ interaction_twinslip 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
interaction_twintwin 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
atol_resistance 1
(output) f
(output) p
#-------------------#
<microstructure>
#-------------------#
[Grain001]
crystallite 1
(constituent) phase 1 texture 1 fraction 1.0
[Grain002]
crystallite 1
(constituent) phase 1 texture 2 fraction 1.0
[Grain003]
crystallite 1
(constituent) phase 1 texture 3 fraction 1.0
[Grain004]
crystallite 1
(constituent) phase 1 texture 4 fraction 1.0
[Grain005]
crystallite 1
(constituent) phase 1 texture 5 fraction 1.0
[Grain006]
crystallite 1
(constituent) phase 1 texture 6 fraction 1.0
[Grain007]
crystallite 1
(constituent) phase 1 texture 7 fraction 1.0
[Grain008]
crystallite 1
(constituent) phase 1 texture 8 fraction 1.0
[Grain009]
crystallite 1
(constituent) phase 1 texture 9 fraction 1.0
[Grain010]
crystallite 1
(constituent) phase 1 texture 10 fraction 1.0
[Grain011]
crystallite 1
(constituent) phase 1 texture 11 fraction 1.0
[Grain012]
crystallite 1
(constituent) phase 1 texture 12 fraction 1.0
[Grain013]
crystallite 1
(constituent) phase 1 texture 13 fraction 1.0
[Grain014]
crystallite 1
(constituent) phase 1 texture 14 fraction 1.0
[Grain015]
crystallite 1
(constituent) phase 1 texture 15 fraction 1.0
[Grain016]
crystallite 1
(constituent) phase 1 texture 16 fraction 1.0
[Grain017]
crystallite 1
(constituent) phase 1 texture 17 fraction 1.0
[Grain018]
crystallite 1
(constituent) phase 1 texture 18 fraction 1.0
[Grain019]
crystallite 1
(constituent) phase 1 texture 19 fraction 1.0
[Grain020]
crystallite 1
(constituent) phase 1 texture 20 fraction 1.0
[Grain021]
crystallite 1
(constituent) phase 1 texture 21 fraction 1.0
[Grain022]
crystallite 1
(constituent) phase 1 texture 22 fraction 1.0
[Grain023]
crystallite 1
(constituent) phase 1 texture 23 fraction 1.0
[Grain024]
crystallite 1
(constituent) phase 1 texture 24 fraction 1.0
[Grain025]
crystallite 1
(constituent) phase 1 texture 25 fraction 1.0
[Grain026]
crystallite 1
(constituent) phase 1 texture 26 fraction 1.0
[Grain027]
crystallite 1
(constituent) phase 1 texture 27 fraction 1.0
[Grain028]
crystallite 1
(constituent) phase 1 texture 28 fraction 1.0
[Grain029]
crystallite 1
(constituent) phase 1 texture 29 fraction 1.0
[Grain030]
crystallite 1
(constituent) phase 1 texture 30 fraction 1.0
[Grain031]
crystallite 1
(constituent) phase 1 texture 31 fraction 1.0
[Grain032]
crystallite 1
(constituent) phase 1 texture 32 fraction 1.0
[Grain033]
crystallite 1
(constituent) phase 1 texture 33 fraction 1.0
[Grain034]
crystallite 1
(constituent) phase 1 texture 34 fraction 1.0
[Grain035]
crystallite 1
(constituent) phase 1 texture 35 fraction 1.0
[Grain036]
crystallite 1
(constituent) phase 1 texture 36 fraction 1.0
[Grain037]
crystallite 1
(constituent) phase 1 texture 37 fraction 1.0
[Grain038]
crystallite 1
(constituent) phase 1 texture 38 fraction 1.0
[Grain039]
crystallite 1
(constituent) phase 1 texture 39 fraction 1.0
[Grain040]
crystallite 1
(constituent) phase 1 texture 40 fraction 1.0
[Grain041]
crystallite 1
(constituent) phase 1 texture 41 fraction 1.0
[Grain042]
crystallite 1
(constituent) phase 1 texture 42 fraction 1.0
[Grain043]
crystallite 1
(constituent) phase 1 texture 43 fraction 1.0
[Grain044]
crystallite 1
(constituent) phase 1 texture 44 fraction 1.0
[Grain045]
crystallite 1
(constituent) phase 1 texture 45 fraction 1.0
[Grain046]
crystallite 1
(constituent) phase 1 texture 46 fraction 1.0
[Grain047]
crystallite 1
(constituent) phase 1 texture 47 fraction 1.0
[Grain048]
crystallite 1
(constituent) phase 1 texture 48 fraction 1.0
[Grain049]
crystallite 1
(constituent) phase 1 texture 49 fraction 1.0
[Grain050]
crystallite 1
(constituent) phase 1 texture 50 fraction 1.0
[Grain051]
crystallite 1
(constituent) phase 1 texture 51 fraction 1.0
[Grain052]
crystallite 1
(constituent) phase 1 texture 52 fraction 1.0
[Grain053]
crystallite 1
(constituent) phase 1 texture 53 fraction 1.0
[Grain054]
crystallite 1
(constituent) phase 1 texture 54 fraction 1.0
[Grain055]
crystallite 1
(constituent) phase 1 texture 55 fraction 1.0
[Grain056]
crystallite 1
(constituent) phase 1 texture 56 fraction 1.0
[Grain057]
crystallite 1
(constituent) phase 1 texture 57 fraction 1.0
[Grain058]
crystallite 1
(constituent) phase 1 texture 58 fraction 1.0
[Grain059]
crystallite 1
(constituent) phase 1 texture 59 fraction 1.0
[Grain060]
crystallite 1
(constituent) phase 1 texture 60 fraction 1.0
[Grain061]
crystallite 1
(constituent) phase 1 texture 61 fraction 1.0
[Grain062]
crystallite 1
(constituent) phase 1 texture 62 fraction 1.0
[Grain063]
crystallite 1
(constituent) phase 1 texture 63 fraction 1.0
[Grain064]
crystallite 1
(constituent) phase 1 texture 64 fraction 1.0
[Grain065]
crystallite 1
(constituent) phase 1 texture 65 fraction 1.0
[Grain066]
crystallite 1
(constituent) phase 1 texture 66 fraction 1.0
[Grain067]
crystallite 1
(constituent) phase 1 texture 67 fraction 1.0
[Grain068]
crystallite 1
(constituent) phase 1 texture 68 fraction 1.0
[Grain069]
crystallite 1
(constituent) phase 1 texture 69 fraction 1.0
[Grain070]
crystallite 1
(constituent) phase 1 texture 70 fraction 1.0
[Grain071]
crystallite 1
(constituent) phase 1 texture 71 fraction 1.0
[Grain072]
crystallite 1
(constituent) phase 1 texture 72 fraction 1.0
[Grain073]
crystallite 1
(constituent) phase 1 texture 73 fraction 1.0
[Grain074]
crystallite 1
(constituent) phase 1 texture 74 fraction 1.0
[Grain075]
crystallite 1
(constituent) phase 1 texture 75 fraction 1.0
[Grain076]
crystallite 1
(constituent) phase 1 texture 76 fraction 1.0
[Grain077]
crystallite 1
(constituent) phase 1 texture 77 fraction 1.0
[Grain078]
crystallite 1
(constituent) phase 1 texture 78 fraction 1.0
[Grain079]
crystallite 1
(constituent) phase 1 texture 79 fraction 1.0
[Grain080]
crystallite 1
(constituent) phase 1 texture 80 fraction 1.0
[Grain081]
crystallite 1
(constituent) phase 1 texture 81 fraction 1.0
[Grain082]
crystallite 1
(constituent) phase 1 texture 82 fraction 1.0
[Grain083]
crystallite 1
(constituent) phase 1 texture 83 fraction 1.0
[Grain084]
crystallite 1
(constituent) phase 1 texture 84 fraction 1.0
[Grain085]
crystallite 1
(constituent) phase 1 texture 85 fraction 1.0
[Grain086]
crystallite 1
(constituent) phase 1 texture 86 fraction 1.0
[Grain087]
crystallite 1
(constituent) phase 1 texture 87 fraction 1.0
[Grain088]
crystallite 1
(constituent) phase 1 texture 88 fraction 1.0
[Grain089]
crystallite 1
(constituent) phase 1 texture 89 fraction 1.0
[Grain090]
crystallite 1
(constituent) phase 1 texture 90 fraction 1.0
[Grain091]
crystallite 1
(constituent) phase 1 texture 91 fraction 1.0
[Grain092]
crystallite 1
(constituent) phase 1 texture 92 fraction 1.0
[Grain093]
crystallite 1
(constituent) phase 1 texture 93 fraction 1.0
[Grain094]
crystallite 1
(constituent) phase 1 texture 94 fraction 1.0
[Grain095]
crystallite 1
(constituent) phase 1 texture 95 fraction 1.0
[Grain096]
crystallite 1
(constituent) phase 1 texture 96 fraction 1.0
[Grain097]
crystallite 1
(constituent) phase 1 texture 97 fraction 1.0
[Grain098]
crystallite 1
(constituent) phase 1 texture 98 fraction 1.0
[Grain099]
crystallite 1
(constituent) phase 1 texture 99 fraction 1.0
[Grain100]
crystallite 1
(constituent) phase 1 texture 100 fraction 1.0
#-------------------#
@ -459,301 +255,202 @@ crystallite 1
#-------------------#
[Grain001]
(gauss) phi1 172.344 Phi 114.046 phi2 294.669 scatter 0.0 fraction 1.0
(gauss) phi1 172.344 Phi 114.046 phi2 294.669
[Grain002]
(gauss) phi1 186.013 Phi 94.7338 phi2 329.683 scatter 0.0 fraction 1.0
(gauss) phi1 186.013 Phi 94.7338 phi2 329.683
[Grain003]
(gauss) phi1 162.41 Phi 98.9455 phi2 130.322 scatter 0.0 fraction 1.0
(gauss) phi1 162.41 Phi 98.9455 phi2 130.322
[Grain004]
(gauss) phi1 355.272 Phi 140.621 phi2 125.567 scatter 0.0 fraction 1.0
(gauss) phi1 355.272 Phi 140.621 phi2 125.567
[Grain005]
(gauss) phi1 21.7641 Phi 143.388 phi2 240.373 scatter 0.0 fraction 1.0
(gauss) phi1 21.7641 Phi 143.388 phi2 240.373
[Grain006]
(gauss) phi1 88.1966 Phi 92.3358 phi2 194.78 scatter 0.0 fraction 1.0
(gauss) phi1 88.1966 Phi 92.3358 phi2 194.78
[Grain007]
(gauss) phi1 161.137 Phi 78.0062 phi2 111.948 scatter 0.0 fraction 1.0
(gauss) phi1 161.137 Phi 78.0062 phi2 111.948
[Grain008]
(gauss) phi1 169.792 Phi 89.5333 phi2 159.265 scatter 0.0 fraction 1.0
(gauss) phi1 169.792 Phi 89.5333 phi2 159.265
[Grain009]
(gauss) phi1 264.847 Phi 130.291 phi2 180.604 scatter 0.0 fraction 1.0
(gauss) phi1 264.847 Phi 130.291 phi2 180.604
[Grain010]
(gauss) phi1 70.6323 Phi 84.1754 phi2 341.162 scatter 0.0 fraction 1.0
(gauss) phi1 70.6323 Phi 84.1754 phi2 341.162
[Grain011]
(gauss) phi1 67.7751 Phi 36.1662 phi2 139.898 scatter 0.0 fraction 1.0
(gauss) phi1 67.7751 Phi 36.1662 phi2 139.898
[Grain012]
(gauss) phi1 111.621 Phi 19.1089 phi2 228.338 scatter 0.0 fraction 1.0
(gauss) phi1 111.621 Phi 19.1089 phi2 228.338
[Grain013]
(gauss) phi1 129.9 Phi 139.011 phi2 238.735 scatter 0.0 fraction 1.0
(gauss) phi1 129.9 Phi 139.011 phi2 238.735
[Grain014]
(gauss) phi1 221.405 Phi 129.743 phi2 99.6471 scatter 0.0 fraction 1.0
(gauss) phi1 221.405 Phi 129.743 phi2 99.6471
[Grain015]
(gauss) phi1 241.783 Phi 98.3729 phi2 260.615 scatter 0.0 fraction 1.0
(gauss) phi1 241.783 Phi 98.3729 phi2 260.615
[Grain016]
(gauss) phi1 72.5592 Phi 122.403 phi2 165.046 scatter 0.0 fraction 1.0
(gauss) phi1 72.5592 Phi 122.403 phi2 165.046
[Grain017]
(gauss) phi1 64.8818 Phi 82.6384 phi2 236.305 scatter 0.0 fraction 1.0
(gauss) phi1 64.8818 Phi 82.6384 phi2 236.305
[Grain018]
(gauss) phi1 201.096 Phi 65.9312 phi2 330.745 scatter 0.0 fraction 1.0
(gauss) phi1 201.096 Phi 65.9312 phi2 330.745
[Grain019]
(gauss) phi1 192.994 Phi 81.9371 phi2 239.326 scatter 0.0 fraction 1.0
(gauss) phi1 192.994 Phi 81.9371 phi2 239.326
[Grain020]
(gauss) phi1 125.335 Phi 90.4527 phi2 207.982 scatter 0.0 fraction 1.0
(gauss) phi1 125.335 Phi 90.4527 phi2 207.982
[Grain021]
(gauss) phi1 55.8848 Phi 26.4455 phi2 100.921 scatter 0.0 fraction 1.0
(gauss) phi1 55.8848 Phi 26.4455 phi2 100.921
[Grain022]
(gauss) phi1 40.722 Phi 95.6415 phi2 269.174 scatter 0.0 fraction 1.0
(gauss) phi1 40.722 Phi 95.6415 phi2 269.174
[Grain023]
(gauss) phi1 250.487 Phi 69.6035 phi2 201.732 scatter 0.0 fraction 1.0
(gauss) phi1 250.487 Phi 69.6035 phi2 201.732
[Grain024]
(gauss) phi1 204.199 Phi 84.983 phi2 20.3469 scatter 0.0 fraction 1.0
(gauss) phi1 204.199 Phi 84.983 phi2 20.3469
[Grain025]
(gauss) phi1 12.7416 Phi 128.589 phi2 271.553 scatter 0.0 fraction 1.0
(gauss) phi1 12.7416 Phi 128.589 phi2 271.553
[Grain026]
(gauss) phi1 299.704 Phi 85.3961 phi2 217.359 scatter 0.0 fraction 1.0
(gauss) phi1 299.704 Phi 85.3961 phi2 217.359
[Grain027]
(gauss) phi1 48.8232 Phi 83.6209 phi2 200.361 scatter 0.0 fraction 1.0
(gauss) phi1 48.8232 Phi 83.6209 phi2 200.361
[Grain028]
(gauss) phi1 336.395 Phi 97.3059 phi2 187.071 scatter 0.0 fraction 1.0
(gauss) phi1 336.395 Phi 97.3059 phi2 187.071
[Grain029]
(gauss) phi1 274.354 Phi 78.2424 phi2 320.308 scatter 0.0 fraction 1.0
(gauss) phi1 274.354 Phi 78.2424 phi2 320.308
[Grain030]
(gauss) phi1 320.776 Phi 149.72 phi2 163.862 scatter 0.0 fraction 1.0
(gauss) phi1 320.776 Phi 149.72 phi2 163.862
[Grain031]
(gauss) phi1 179.549 Phi 106.548 phi2 345.498 scatter 0.0 fraction 1.0
(gauss) phi1 179.549 Phi 106.548 phi2 345.498
[Grain032]
(gauss) phi1 163.508 Phi 24.4238 phi2 127.809 scatter 0.0 fraction 1.0
(gauss) phi1 163.508 Phi 24.4238 phi2 127.809
[Grain033]
(gauss) phi1 193.405 Phi 157.012 phi2 321.342 scatter 0.0 fraction 1.0
(gauss) phi1 193.405 Phi 157.012 phi2 321.342
[Grain034]
(gauss) phi1 9.09886 Phi 95.9453 phi2 102.32 scatter 0.0 fraction 1.0
(gauss) phi1 9.09886 Phi 95.9453 phi2 102.32
[Grain035]
(gauss) phi1 353.876 Phi 150.824 phi2 174.886 scatter 0.0 fraction 1.0
(gauss) phi1 353.876 Phi 150.824 phi2 174.886
[Grain036]
(gauss) phi1 138.914 Phi 76.5811 phi2 167.927 scatter 0.0 fraction 1.0
(gauss) phi1 138.914 Phi 76.5811 phi2 167.927
[Grain037]
(gauss) phi1 262.655 Phi 76.2738 phi2 12.4459 scatter 0.0 fraction 1.0
(gauss) phi1 262.655 Phi 76.2738 phi2 12.4459
[Grain038]
(gauss) phi1 121.849 Phi 65.5254 phi2 192.601 scatter 0.0 fraction 1.0
(gauss) phi1 121.849 Phi 65.5254 phi2 192.601
[Grain039]
(gauss) phi1 275.824 Phi 81.6788 phi2 164.228 scatter 0.0 fraction 1.0
(gauss) phi1 275.824 Phi 81.6788 phi2 164.228
[Grain040]
(gauss) phi1 68.9202 Phi 160.5 phi2 210.862 scatter 0.0 fraction 1.0
(gauss) phi1 68.9202 Phi 160.5 phi2 210.862
[Grain041]
(gauss) phi1 51.0398 Phi 82.7291 phi2 74.016 scatter 0.0 fraction 1.0
(gauss) phi1 51.0398 Phi 82.7291 phi2 74.016
[Grain042]
(gauss) phi1 338.746 Phi 62.7854 phi2 129.362 scatter 0.0 fraction 1.0
(gauss) phi1 338.746 Phi 62.7854 phi2 129.362
[Grain043]
(gauss) phi1 204.51 Phi 151.256 phi2 178.89 scatter 0.0 fraction 1.0
(gauss) phi1 204.51 Phi 151.256 phi2 178.89
[Grain044]
(gauss) phi1 122.098 Phi 104.003 phi2 323.04 scatter 0.0 fraction 1.0
(gauss) phi1 122.098 Phi 104.003 phi2 323.04
[Grain045]
(gauss) phi1 106.693 Phi 108.61 phi2 336.935 scatter 0.0 fraction 1.0
(gauss) phi1 106.693 Phi 108.61 phi2 336.935
[Grain046]
(gauss) phi1 118.856 Phi 160.992 phi2 316.152 scatter 0.0 fraction 1.0
(gauss) phi1 118.856 Phi 160.992 phi2 316.152
[Grain047]
(gauss) phi1 177.962 Phi 114.868 phi2 13.6918 scatter 0.0 fraction 1.0
(gauss) phi1 177.962 Phi 114.868 phi2 13.6918
[Grain048]
(gauss) phi1 330.273 Phi 174.495 phi2 231.249 scatter 0.0 fraction 1.0
(gauss) phi1 330.273 Phi 174.495 phi2 231.249
[Grain049]
(gauss) phi1 7.31937 Phi 94.7313 phi2 17.8461 scatter 0.0 fraction 1.0
(gauss) phi1 7.31937 Phi 94.7313 phi2 17.8461
[Grain050]
(gauss) phi1 74.3385 Phi 49.9546 phi2 286.482 scatter 0.0 fraction 1.0
(gauss) phi1 74.3385 Phi 49.9546 phi2 286.482
[Grain051]
(gauss) phi1 326.388 Phi 76.9547 phi2 214.758 scatter 0.0 fraction 1.0
(gauss) phi1 326.388 Phi 76.9547 phi2 214.758
[Grain052]
(gauss) phi1 276.024 Phi 72.1242 phi2 275.884 scatter 0.0 fraction 1.0
(gauss) phi1 276.024 Phi 72.1242 phi2 275.884
[Grain053]
(gauss) phi1 137.681 Phi 116.99 phi2 6.87047 scatter 0.0 fraction 1.0
(gauss) phi1 137.681 Phi 116.99 phi2 6.87047
[Grain054]
(gauss) phi1 200.213 Phi 123.618 phi2 268.84 scatter 0.0 fraction 1.0
(gauss) phi1 200.213 Phi 123.618 phi2 268.84
[Grain055]
(gauss) phi1 7.13702 Phi 56.2015 phi2 119.65 scatter 0.0 fraction 1.0
(gauss) phi1 7.13702 Phi 56.2015 phi2 119.65
[Grain056]
(gauss) phi1 72.1783 Phi 81.0906 phi2 6.06213 scatter 0.0 fraction 1.0
(gauss) phi1 72.1783 Phi 81.0906 phi2 6.06213
[Grain057]
(gauss) phi1 184.565 Phi 110.01 phi2 239.546 scatter 0.0 fraction 1.0
(gauss) phi1 184.565 Phi 110.01 phi2 239.546
[Grain058]
(gauss) phi1 210.124 Phi 128.631 phi2 8.61611 scatter 0.0 fraction 1.0
(gauss) phi1 210.124 Phi 128.631 phi2 8.61611
[Grain059]
(gauss) phi1 290.326 Phi 170.412 phi2 144.269 scatter 0.0 fraction 1.0
(gauss) phi1 290.326 Phi 170.412 phi2 144.269
[Grain060]
(gauss) phi1 204.748 Phi 76.7343 phi2 200.385 scatter 0.0 fraction 1.0
(gauss) phi1 204.748 Phi 76.7343 phi2 200.385
[Grain061]
(gauss) phi1 54.3015 Phi 65.9143 phi2 117.373 scatter 0.0 fraction 1.0
(gauss) phi1 54.3015 Phi 65.9143 phi2 117.373
[Grain062]
(gauss) phi1 261.263 Phi 52.255 phi2 95.9146 scatter 0.0 fraction 1.0
(gauss) phi1 261.263 Phi 52.255 phi2 95.9146
[Grain063]
(gauss) phi1 328.054 Phi 51.0778 phi2 24.2782 scatter 0.0 fraction 1.0
(gauss) phi1 328.054 Phi 51.0778 phi2 24.2782
[Grain064]
(gauss) phi1 163.03 Phi 154.894 phi2 64.126 scatter 0.0 fraction 1.0
(gauss) phi1 163.03 Phi 154.894 phi2 64.126
[Grain065]
(gauss) phi1 183.87 Phi 80.1848 phi2 18.7438 scatter 0.0 fraction 1.0
(gauss) phi1 183.87 Phi 80.1848 phi2 18.7438
[Grain066]
(gauss) phi1 219.91 Phi 113.727 phi2 126.67 scatter 0.0 fraction 1.0
(gauss) phi1 219.91 Phi 113.727 phi2 126.67
[Grain067]
(gauss) phi1 1.43844 Phi 87.6365 phi2 217.342 scatter 0.0 fraction 1.0
(gauss) phi1 1.43844 Phi 87.6365 phi2 217.342
[Grain068]
(gauss) phi1 16.6245 Phi 162.07 phi2 43.7899 scatter 0.0 fraction 1.0
(gauss) phi1 16.6245 Phi 162.07 phi2 43.7899
[Grain069]
(gauss) phi1 16.86 Phi 53.8682 phi2 256.917 scatter 0.0 fraction 1.0
(gauss) phi1 16.86 Phi 53.8682 phi2 256.917
[Grain070]
(gauss) phi1 1.01921 Phi 118.449 phi2 307.223 scatter 0.0 fraction 1.0
(gauss) phi1 1.01921 Phi 118.449 phi2 307.223
[Grain071]
(gauss) phi1 19.0397 Phi 83.8885 phi2 262.687 scatter 0.0 fraction 1.0
(gauss) phi1 19.0397 Phi 83.8885 phi2 262.687
[Grain072]
(gauss) phi1 99.799 Phi 77.2307 phi2 84.9727 scatter 0.0 fraction 1.0
(gauss) phi1 99.799 Phi 77.2307 phi2 84.9727
[Grain073]
(gauss) phi1 234.292 Phi 63.5029 phi2 250.315 scatter 0.0 fraction 1.0
(gauss) phi1 234.292 Phi 63.5029 phi2 250.315
[Grain074]
(gauss) phi1 315.529 Phi 106.015 phi2 103.711 scatter 0.0 fraction 1.0
(gauss) phi1 315.529 Phi 106.015 phi2 103.711
[Grain075]
(gauss) phi1 235.595 Phi 110.152 phi2 210.277 scatter 0.0 fraction 1.0
(gauss) phi1 235.595 Phi 110.152 phi2 210.277
[Grain076]
(gauss) phi1 341.907 Phi 17.1839 phi2 332.75 scatter 0.0 fraction 1.0
(gauss) phi1 341.907 Phi 17.1839 phi2 332.75
[Grain077]
(gauss) phi1 352.166 Phi 88.6049 phi2 114.964 scatter 0.0 fraction 1.0
(gauss) phi1 352.166 Phi 88.6049 phi2 114.964
[Grain078]
(gauss) phi1 342.33 Phi 117.777 phi2 180.346 scatter 0.0 fraction 1.0
(gauss) phi1 342.33 Phi 117.777 phi2 180.346
[Grain079]
(gauss) phi1 224.952 Phi 70.5702 phi2 148.486 scatter 0.0 fraction 1.0
(gauss) phi1 224.952 Phi 70.5702 phi2 148.486
[Grain080]
(gauss) phi1 7.71702 Phi 23.6124 phi2 131.591 scatter 0.0 fraction 1.0
(gauss) phi1 7.71702 Phi 23.6124 phi2 131.591
[Grain081]
(gauss) phi1 65.1024 Phi 138.774 phi2 247.344 scatter 0.0 fraction 1.0
(gauss) phi1 65.1024 Phi 138.774 phi2 247.344
[Grain082]
(gauss) phi1 37.6181 Phi 51.5209 phi2 8.4169 scatter 0.0 fraction 1.0
(gauss) phi1 37.6181 Phi 51.5209 phi2 8.4169
[Grain083]
(gauss) phi1 245.335 Phi 53.4543 phi2 52.5205 scatter 0.0 fraction 1.0
(gauss) phi1 245.335 Phi 53.4543 phi2 52.5205
[Grain084]
(gauss) phi1 259.572 Phi 87.7026 phi2 272.065 scatter 0.0 fraction 1.0
(gauss) phi1 259.572 Phi 87.7026 phi2 272.065
[Grain085]
(gauss) phi1 269.39 Phi 103.379 phi2 132.506 scatter 0.0 fraction 1.0
(gauss) phi1 269.39 Phi 103.379 phi2 132.506
[Grain086]
(gauss) phi1 175.156 Phi 119.338 phi2 355.51 scatter 0.0 fraction 1.0
(gauss) phi1 175.156 Phi 119.338 phi2 355.51
[Grain087]
(gauss) phi1 248.11 Phi 39.4772 phi2 310.371 scatter 0.0 fraction 1.0
(gauss) phi1 248.11 Phi 39.4772 phi2 310.371
[Grain088]
(gauss) phi1 121.809 Phi 141.465 phi2 10.0736 scatter 0.0 fraction 1.0
(gauss) phi1 121.809 Phi 141.465 phi2 10.0736
[Grain089]
(gauss) phi1 2.4357 Phi 47.118 phi2 274.654 scatter 0.0 fraction 1.0
(gauss) phi1 2.4357 Phi 47.118 phi2 274.654
[Grain090]
(gauss) phi1 314.188 Phi 134.146 phi2 250.673 scatter 0.0 fraction 1.0
(gauss) phi1 314.188 Phi 134.146 phi2 250.673
[Grain091]
(gauss) phi1 114.815 Phi 121.132 phi2 275.124 scatter 0.0 fraction 1.0
(gauss) phi1 114.815 Phi 121.132 phi2 275.124
[Grain092]
(gauss) phi1 126.699 Phi 99.0325 phi2 320.537 scatter 0.0 fraction 1.0
(gauss) phi1 126.699 Phi 99.0325 phi2 320.537
[Grain093]
(gauss) phi1 184.138 Phi 20.1663 phi2 159.314 scatter 0.0 fraction 1.0
(gauss) phi1 184.138 Phi 20.1663 phi2 159.314
[Grain094]
(gauss) phi1 296.502 Phi 15.2389 phi2 39.382 scatter 0.0 fraction 1.0
(gauss) phi1 296.502 Phi 15.2389 phi2 39.382
[Grain095]
(gauss) phi1 167.8 Phi 151.764 phi2 192.568 scatter 0.0 fraction 1.0
(gauss) phi1 167.8 Phi 151.764 phi2 192.568
[Grain096]
(gauss) phi1 257.822 Phi 133.446 phi2 257.108 scatter 0.0 fraction 1.0
(gauss) phi1 257.822 Phi 133.446 phi2 257.108
[Grain097]
(gauss) phi1 71.6923 Phi 74.5726 phi2 342.575 scatter 0.0 fraction 1.0
(gauss) phi1 71.6923 Phi 74.5726 phi2 342.575
[Grain098]
(gauss) phi1 176.748 Phi 28.39 phi2 327.375 scatter 0.0 fraction 1.0
(gauss) phi1 176.748 Phi 28.39 phi2 327.375
[Grain099]
(gauss) phi1 121.822 Phi 141.836 phi2 22.6349 scatter 0.0 fraction 1.0
(gauss) phi1 121.822 Phi 141.836 phi2 22.6349
[Grain100]
(gauss) phi1 180.151 Phi 109.246 phi2 146.177 scatter 0.0 fraction 1.0
(gauss) phi1 180.151 Phi 109.246 phi2 146.177

View File

@ -9,307 +9,206 @@
#-------------------#
[Grain001]
crystallite 1
(constituent) phase 1 texture 1 fraction 1.0
[Grain002]
crystallite 1
(constituent) phase 1 texture 2 fraction 1.0
[Grain003]
crystallite 1
(constituent) phase 1 texture 3 fraction 1.0
[Grain004]
crystallite 1
(constituent) phase 1 texture 4 fraction 1.0
[Grain005]
crystallite 1
(constituent) phase 1 texture 5 fraction 1.0
[Grain006]
crystallite 1
(constituent) phase 1 texture 6 fraction 1.0
[Grain007]
crystallite 1
(constituent) phase 1 texture 7 fraction 1.0
[Grain008]
crystallite 1
(constituent) phase 1 texture 8 fraction 1.0
[Grain009]
crystallite 1
(constituent) phase 1 texture 9 fraction 1.0
[Grain010]
crystallite 1
(constituent) phase 1 texture 10 fraction 1.0
[Grain011]
crystallite 1
(constituent) phase 1 texture 11 fraction 1.0
[Grain012]
crystallite 1
(constituent) phase 1 texture 12 fraction 1.0
[Grain013]
crystallite 1
(constituent) phase 1 texture 13 fraction 1.0
[Grain014]
crystallite 1
(constituent) phase 1 texture 14 fraction 1.0
[Grain015]
crystallite 1
(constituent) phase 1 texture 15 fraction 1.0
[Grain016]
crystallite 1
(constituent) phase 1 texture 16 fraction 1.0
[Grain017]
crystallite 1
(constituent) phase 1 texture 17 fraction 1.0
[Grain018]
crystallite 1
(constituent) phase 1 texture 18 fraction 1.0
[Grain019]
crystallite 1
(constituent) phase 1 texture 19 fraction 1.0
[Grain020]
crystallite 1
(constituent) phase 1 texture 20 fraction 1.0
[Grain021]
crystallite 1
(constituent) phase 1 texture 21 fraction 1.0
[Grain022]
crystallite 1
(constituent) phase 1 texture 22 fraction 1.0
[Grain023]
crystallite 1
(constituent) phase 1 texture 23 fraction 1.0
[Grain024]
crystallite 1
(constituent) phase 1 texture 24 fraction 1.0
[Grain025]
crystallite 1
(constituent) phase 1 texture 25 fraction 1.0
[Grain026]
crystallite 1
(constituent) phase 1 texture 26 fraction 1.0
[Grain027]
crystallite 1
(constituent) phase 1 texture 27 fraction 1.0
[Grain028]
crystallite 1
(constituent) phase 1 texture 28 fraction 1.0
[Grain029]
crystallite 1
(constituent) phase 1 texture 29 fraction 1.0
[Grain030]
crystallite 1
(constituent) phase 1 texture 30 fraction 1.0
[Grain031]
crystallite 1
(constituent) phase 1 texture 31 fraction 1.0
[Grain032]
crystallite 1
(constituent) phase 1 texture 32 fraction 1.0
[Grain033]
crystallite 1
(constituent) phase 1 texture 33 fraction 1.0
[Grain034]
crystallite 1
(constituent) phase 1 texture 34 fraction 1.0
[Grain035]
crystallite 1
(constituent) phase 1 texture 35 fraction 1.0
[Grain036]
crystallite 1
(constituent) phase 1 texture 36 fraction 1.0
[Grain037]
crystallite 1
(constituent) phase 1 texture 37 fraction 1.0
[Grain038]
crystallite 1
(constituent) phase 1 texture 38 fraction 1.0
[Grain039]
crystallite 1
(constituent) phase 1 texture 39 fraction 1.0
[Grain040]
crystallite 1
(constituent) phase 1 texture 40 fraction 1.0
[Grain041]
crystallite 1
(constituent) phase 1 texture 41 fraction 1.0
[Grain042]
crystallite 1
(constituent) phase 1 texture 42 fraction 1.0
[Grain043]
crystallite 1
(constituent) phase 1 texture 43 fraction 1.0
[Grain044]
crystallite 1
(constituent) phase 1 texture 44 fraction 1.0
[Grain045]
crystallite 1
(constituent) phase 1 texture 45 fraction 1.0
[Grain046]
crystallite 1
(constituent) phase 1 texture 46 fraction 1.0
[Grain047]
crystallite 1
(constituent) phase 1 texture 47 fraction 1.0
[Grain048]
crystallite 1
(constituent) phase 1 texture 48 fraction 1.0
[Grain049]
crystallite 1
(constituent) phase 1 texture 49 fraction 1.0
[Grain050]
crystallite 1
(constituent) phase 1 texture 50 fraction 1.0
[Grain051]
crystallite 1
(constituent) phase 1 texture 51 fraction 1.0
[Grain052]
crystallite 1
(constituent) phase 1 texture 52 fraction 1.0
[Grain053]
crystallite 1
(constituent) phase 1 texture 53 fraction 1.0
[Grain054]
crystallite 1
(constituent) phase 1 texture 54 fraction 1.0
[Grain055]
crystallite 1
(constituent) phase 1 texture 55 fraction 1.0
[Grain056]
crystallite 1
(constituent) phase 1 texture 56 fraction 1.0
[Grain057]
crystallite 1
(constituent) phase 1 texture 57 fraction 1.0
[Grain058]
crystallite 1
(constituent) phase 1 texture 58 fraction 1.0
[Grain059]
crystallite 1
(constituent) phase 1 texture 59 fraction 1.0
[Grain060]
crystallite 1
(constituent) phase 1 texture 60 fraction 1.0
[Grain061]
crystallite 1
(constituent) phase 1 texture 61 fraction 1.0
[Grain062]
crystallite 1
(constituent) phase 1 texture 62 fraction 1.0
[Grain063]
crystallite 1
(constituent) phase 1 texture 63 fraction 1.0
[Grain064]
crystallite 1
(constituent) phase 1 texture 64 fraction 1.0
[Grain065]
crystallite 1
(constituent) phase 1 texture 65 fraction 1.0
[Grain066]
crystallite 1
(constituent) phase 1 texture 66 fraction 1.0
[Grain067]
crystallite 1
(constituent) phase 1 texture 67 fraction 1.0
[Grain068]
crystallite 1
(constituent) phase 1 texture 68 fraction 1.0
[Grain069]
crystallite 1
(constituent) phase 1 texture 69 fraction 1.0
[Grain070]
crystallite 1
(constituent) phase 1 texture 70 fraction 1.0
[Grain071]
crystallite 1
(constituent) phase 1 texture 71 fraction 1.0
[Grain072]
crystallite 1
(constituent) phase 1 texture 72 fraction 1.0
[Grain073]
crystallite 1
(constituent) phase 1 texture 73 fraction 1.0
[Grain074]
crystallite 1
(constituent) phase 1 texture 74 fraction 1.0
[Grain075]
crystallite 1
(constituent) phase 1 texture 75 fraction 1.0
[Grain076]
crystallite 1
(constituent) phase 1 texture 76 fraction 1.0
[Grain077]
crystallite 1
(constituent) phase 1 texture 77 fraction 1.0
[Grain078]
crystallite 1
(constituent) phase 1 texture 78 fraction 1.0
[Grain079]
crystallite 1
(constituent) phase 1 texture 79 fraction 1.0
[Grain080]
crystallite 1
(constituent) phase 1 texture 80 fraction 1.0
[Grain081]
crystallite 1
(constituent) phase 1 texture 81 fraction 1.0
[Grain082]
crystallite 1
(constituent) phase 1 texture 82 fraction 1.0
[Grain083]
crystallite 1
(constituent) phase 1 texture 83 fraction 1.0
[Grain084]
crystallite 1
(constituent) phase 1 texture 84 fraction 1.0
[Grain085]
crystallite 1
(constituent) phase 1 texture 85 fraction 1.0
[Grain086]
crystallite 1
(constituent) phase 1 texture 86 fraction 1.0
[Grain087]
crystallite 1
(constituent) phase 1 texture 87 fraction 1.0
[Grain088]
crystallite 1
(constituent) phase 1 texture 88 fraction 1.0
[Grain089]
crystallite 1
(constituent) phase 1 texture 89 fraction 1.0
[Grain090]
crystallite 1
(constituent) phase 1 texture 90 fraction 1.0
[Grain091]
crystallite 1
(constituent) phase 1 texture 91 fraction 1.0
[Grain092]
crystallite 1
(constituent) phase 1 texture 92 fraction 1.0
[Grain093]
crystallite 1
(constituent) phase 1 texture 93 fraction 1.0
[Grain094]
crystallite 1
(constituent) phase 1 texture 94 fraction 1.0
[Grain095]
crystallite 1
(constituent) phase 1 texture 95 fraction 1.0
[Grain096]
crystallite 1
(constituent) phase 1 texture 96 fraction 1.0
[Grain097]
crystallite 1
(constituent) phase 1 texture 97 fraction 1.0
[Grain098]
crystallite 1
(constituent) phase 1 texture 98 fraction 1.0
[Grain099]
crystallite 1
(constituent) phase 1 texture 99 fraction 1.0
[Grain100]
crystallite 1
(constituent) phase 1 texture 100 fraction 1.0
[cubeGrain]
crystallite 1
(constituent) phase 1 texture 101 fraction 1.0
#-------------------#
@ -317,214 +216,209 @@ crystallite 1
#-------------------#
[Grain001]
(gauss) phi1 359.121452 Phi 82.319471 Phi2 347.729535 scatter 0 fraction 1
(gauss) phi1 359.121452 Phi 82.319471 Phi2 347.729535
[Grain002]
(gauss) phi1 269.253967 Phi 105.379919 Phi2 173.029284 scatter 0 fraction 1
(gauss) phi1 269.253967 Phi 105.379919 Phi2 173.029284
[Grain003]
(gauss) phi1 26.551535 Phi 171.606752 Phi2 124.949264 scatter 0 fraction 1
(gauss) phi1 26.551535 Phi 171.606752 Phi2 124.949264
[Grain004]
(gauss) phi1 123.207774 Phi 124.339577 Phi2 47.937748 scatter 0 fraction 1
(gauss) phi1 123.207774 Phi 124.339577 Phi2 47.937748
[Grain005]
(gauss) phi1 324.188825 Phi 103.089216 Phi2 160.373624 scatter 0 fraction 1
(gauss) phi1 324.188825 Phi 103.089216 Phi2 160.373624
[Grain006]
(gauss) phi1 238.295585 Phi 165.416882 Phi2 234.307741 scatter 0 fraction 1
(gauss) phi1 238.295585 Phi 165.416882 Phi2 234.307741
[Grain007]
(gauss) phi1 232.707177 Phi 110.733726 Phi2 308.049265 scatter 0 fraction 1
(gauss) phi1 232.707177 Phi 110.733726 Phi2 308.049265
[Grain008]
(gauss) phi1 144.463291 Phi 125.891441 Phi2 348.674207 scatter 0 fraction 1
(gauss) phi1 144.463291 Phi 125.891441 Phi2 348.674207
[Grain009]
(gauss) phi1 215.423832 Phi 69.759502 Phi2 164.477632 scatter 0 fraction 1
(gauss) phi1 215.423832 Phi 69.759502 Phi2 164.477632
[Grain010]
(gauss) phi1 118.805444 Phi 143.057031 Phi2 271.963190 scatter 0 fraction 1
(gauss) phi1 118.805444 Phi 143.057031 Phi2 271.963190
[Grain011]
(gauss) phi1 218.049576 Phi 64.017550 Phi2 323.040457 scatter 0 fraction 1
(gauss) phi1 218.049576 Phi 64.017550 Phi2 323.040457
[Grain012]
(gauss) phi1 236.962483 Phi 134.312093 Phi2 220.433366 scatter 0 fraction 1
(gauss) phi1 236.962483 Phi 134.312093 Phi2 220.433366
[Grain013]
(gauss) phi1 352.317686 Phi 3.356527 Phi2 92.447275 scatter 0 fraction 1
(gauss) phi1 352.317686 Phi 3.356527 Phi2 92.447275
[Grain014]
(gauss) phi1 198.311545 Phi 71.452240 Phi2 199.441849 scatter 0 fraction 1
(gauss) phi1 198.311545 Phi 71.452240 Phi2 199.441849
[Grain015]
(gauss) phi1 351.993635 Phi 36.500987 Phi2 236.852886 scatter 0 fraction 1
(gauss) phi1 351.993635 Phi 36.500987 Phi2 236.852886
[Grain016]
(gauss) phi1 262.389063 Phi 101.249950 Phi2 334.305959 scatter 0 fraction 1
(gauss) phi1 262.389063 Phi 101.249950 Phi2 334.305959
[Grain017]
(gauss) phi1 53.220668 Phi 69.570254 Phi2 277.061151 scatter 0 fraction 1
(gauss) phi1 53.220668 Phi 69.570254 Phi2 277.061151
[Grain018]
(gauss) phi1 122.156119 Phi 140.207051 Phi2 221.172906 scatter 0 fraction 1
(gauss) phi1 122.156119 Phi 140.207051 Phi2 221.172906
[Grain019]
(gauss) phi1 295.422170 Phi 26.595511 Phi2 263.206315 scatter 0 fraction 1
(gauss) phi1 295.422170 Phi 26.595511 Phi2 263.206315
[Grain020]
(gauss) phi1 179.137406 Phi 104.500977 Phi2 151.742108 scatter 0 fraction 1
(gauss) phi1 179.137406 Phi 104.500977 Phi2 151.742108
[Grain021]
(gauss) phi1 199.045094 Phi 5.228899 Phi2 356.542109 scatter 0 fraction 1
(gauss) phi1 199.045094 Phi 5.228899 Phi2 356.542109
[Grain022]
(gauss) phi1 268.671476 Phi 24.835403 Phi2 33.578889 scatter 0 fraction 1
(gauss) phi1 268.671476 Phi 24.835403 Phi2 33.578889
[Grain023]
(gauss) phi1 264.248527 Phi 59.766630 Phi2 340.865462 scatter 0 fraction 1
(gauss) phi1 264.248527 Phi 59.766630 Phi2 340.865462
[Grain024]
(gauss) phi1 254.223491 Phi 51.125301 Phi2 201.094027 scatter 0 fraction 1
(gauss) phi1 254.223491 Phi 51.125301 Phi2 201.094027
[Grain025]
(gauss) phi1 22.214008 Phi 92.248774 Phi2 215.168318 scatter 0 fraction 1
(gauss) phi1 22.214008 Phi 92.248774 Phi2 215.168318
[Grain026]
(gauss) phi1 49.511491 Phi 79.933539 Phi2 187.188575 scatter 0 fraction 1
(gauss) phi1 49.511491 Phi 79.933539 Phi2 187.188575
[Grain027]
(gauss) phi1 318.916204 Phi 113.102650 Phi2 241.076629 scatter 0 fraction 1
(gauss) phi1 318.916204 Phi 113.102650 Phi2 241.076629
[Grain028]
(gauss) phi1 239.378433 Phi 89.578655 Phi2 94.167043 scatter 0 fraction 1
(gauss) phi1 239.378433 Phi 89.578655 Phi2 94.167043
[Grain029]
(gauss) phi1 27.561421 Phi 142.892093 Phi2 197.735666 scatter 0 fraction 1
(gauss) phi1 27.561421 Phi 142.892093 Phi2 197.735666
[Grain030]
(gauss) phi1 135.210581 Phi 165.859834 Phi2 285.449561 scatter 0 fraction 1
(gauss) phi1 135.210581 Phi 165.859834 Phi2 285.449561
[Grain031]
(gauss) phi1 223.515916 Phi 56.824378 Phi2 343.289074 scatter 0 fraction 1
(gauss) phi1 223.515916 Phi 56.824378 Phi2 343.289074
[Grain032]
(gauss) phi1 41.127974 Phi 111.289145 Phi2 214.855145 scatter 0 fraction 1
(gauss) phi1 41.127974 Phi 111.289145 Phi2 214.855145
[Grain033]
(gauss) phi1 17.335045 Phi 140.496745 Phi2 77.747371 scatter 0 fraction 1
(gauss) phi1 17.335045 Phi 140.496745 Phi2 77.747371
[Grain034]
(gauss) phi1 36.206421 Phi 148.574232 Phi2 88.870226 scatter 0 fraction 1
(gauss) phi1 36.206421 Phi 148.574232 Phi2 88.870226
[Grain035]
(gauss) phi1 159.618336 Phi 125.680504 Phi2 204.119403 scatter 0 fraction 1
(gauss) phi1 159.618336 Phi 125.680504 Phi2 204.119403
[Grain036]
(gauss) phi1 8.752464 Phi 99.173166 Phi2 143.227089 scatter 0 fraction 1
(gauss) phi1 8.752464 Phi 99.173166 Phi2 143.227089
[Grain037]
(gauss) phi1 351.570753 Phi 67.343218 Phi2 1.779612 scatter 0 fraction 1
(gauss) phi1 351.570753 Phi 67.343218 Phi2 1.779612
[Grain038]
(gauss) phi1 46.771572 Phi 155.018674 Phi2 302.319987 scatter 0 fraction 1
(gauss) phi1 46.771572 Phi 155.018674 Phi2 302.319987
[Grain039]
(gauss) phi1 244.255976 Phi 80.566566 Phi2 264.069331 scatter 0 fraction 1
(gauss) phi1 244.255976 Phi 80.566566 Phi2 264.069331
[Grain040]
(gauss) phi1 41.775388 Phi 47.109507 Phi2 300.598550 scatter 0 fraction 1
(gauss) phi1 41.775388 Phi 47.109507 Phi2 300.598550
[Grain041]
(gauss) phi1 268.753103 Phi 46.654050 Phi2 190.382041 scatter 0 fraction 1
(gauss) phi1 268.753103 Phi 46.654050 Phi2 190.382041
[Grain042]
(gauss) phi1 239.574480 Phi 62.517793 Phi2 147.817535 scatter 0 fraction 1
(gauss) phi1 239.574480 Phi 62.517793 Phi2 147.817535
[Grain043]
(gauss) phi1 128.059775 Phi 61.916743 Phi2 169.674359 scatter 0 fraction 1
(gauss) phi1 128.059775 Phi 61.916743 Phi2 169.674359
[Grain044]
(gauss) phi1 166.545156 Phi 58.709099 Phi2 252.885391 scatter 0 fraction 1
(gauss) phi1 166.545156 Phi 58.709099 Phi2 252.885391
[Grain045]
(gauss) phi1 92.867691 Phi 28.906456 Phi2 164.197290 scatter 0 fraction 1
(gauss) phi1 92.867691 Phi 28.906456 Phi2 164.197290
[Grain046]
(gauss) phi1 291.056147 Phi 35.145174 Phi2 250.155599 scatter 0 fraction 1
(gauss) phi1 291.056147 Phi 35.145174 Phi2 250.155599
[Grain047]
(gauss) phi1 79.015862 Phi 44.772479 Phi2 267.982808 scatter 0 fraction 1
(gauss) phi1 79.015862 Phi 44.772479 Phi2 267.982808
[Grain048]
(gauss) phi1 108.400702 Phi 69.883075 Phi2 222.737053 scatter 0 fraction 1
(gauss) phi1 108.400702 Phi 69.883075 Phi2 222.737053
[Grain049]
(gauss) phi1 348.326500 Phi 11.339714 Phi2 121.682346 scatter 0 fraction 1
(gauss) phi1 348.326500 Phi 11.339714 Phi2 121.682346
[Grain050]
(gauss) phi1 331.476209 Phi 108.775043 Phi2 335.139671 scatter 0 fraction 1
(gauss) phi1 331.476209 Phi 108.775043 Phi2 335.139671
[Grain051]
(gauss) phi1 196.750278 Phi 93.955106 Phi2 63.689075 scatter 0 fraction 1
(gauss) phi1 196.750278 Phi 93.955106 Phi2 63.689075
[Grain052]
(gauss) phi1 136.077875 Phi 130.508342 Phi2 128.468976 scatter 0 fraction 1
(gauss) phi1 136.077875 Phi 130.508342 Phi2 128.468976
[Grain053]
(gauss) phi1 239.643513 Phi 76.284643 Phi2 168.821008 scatter 0 fraction 1
(gauss) phi1 239.643513 Phi 76.284643 Phi2 168.821008
[Grain054]
(gauss) phi1 113.850670 Phi 117.531757 Phi2 71.971648 scatter 0 fraction 1
(gauss) phi1 113.850670 Phi 117.531757 Phi2 71.971648
[Grain055]
(gauss) phi1 149.554071 Phi 16.543098 Phi2 195.556172 scatter 0 fraction 1
(gauss) phi1 149.554071 Phi 16.543098 Phi2 195.556172
[Grain056]
(gauss) phi1 46.626579 Phi 52.447846 Phi2 304.495569 scatter 0 fraction 1
(gauss) phi1 46.626579 Phi 52.447846 Phi2 304.495569
[Grain057]
(gauss) phi1 255.251821 Phi 86.678048 Phi2 238.982712 scatter 0 fraction 1
(gauss) phi1 255.251821 Phi 86.678048 Phi2 238.982712
[Grain058]
(gauss) phi1 324.266133 Phi 28.075458 Phi2 41.191295 scatter 0 fraction 1
(gauss) phi1 324.266133 Phi 28.075458 Phi2 41.191295
[Grain059]
(gauss) phi1 312.000332 Phi 74.648725 Phi2 87.403581 scatter 0 fraction 1
(gauss) phi1 312.000332 Phi 74.648725 Phi2 87.403581
[Grain060]
(gauss) phi1 57.742481 Phi 163.241519 Phi2 68.491438 scatter 0 fraction 1
(gauss) phi1 57.742481 Phi 163.241519 Phi2 68.491438
[Grain061]
(gauss) phi1 112.442447 Phi 51.735320 Phi2 206.538656 scatter 0 fraction 1
(gauss) phi1 112.442447 Phi 51.735320 Phi2 206.538656
[Grain062]
(gauss) phi1 297.453842 Phi 115.283041 Phi2 57.785319 scatter 0 fraction 1
(gauss) phi1 297.453842 Phi 115.283041 Phi2 57.785319
[Grain063]
(gauss) phi1 119.132681 Phi 117.923565 Phi2 196.121206 scatter 0 fraction 1
(gauss) phi1 119.132681 Phi 117.923565 Phi2 196.121206
[Grain064]
(gauss) phi1 199.267314 Phi 163.091476 Phi2 53.549301 scatter 0 fraction 1
(gauss) phi1 199.267314 Phi 163.091476 Phi2 53.549301
[Grain065]
(gauss) phi1 37.765215 Phi 76.795488 Phi2 146.264753 scatter 0 fraction 1
(gauss) phi1 37.765215 Phi 76.795488 Phi2 146.264753
[Grain066]
(gauss) phi1 324.550183 Phi 27.665150 Phi2 56.383148 scatter 0 fraction 1
(gauss) phi1 324.550183 Phi 27.665150 Phi2 56.383148
[Grain067]
(gauss) phi1 337.305377 Phi 136.807151 Phi2 133.661586 scatter 0 fraction 1
(gauss) phi1 337.305377 Phi 136.807151 Phi2 133.661586
[Grain068]
(gauss) phi1 115.744041 Phi 64.536978 Phi2 262.694800 scatter 0 fraction 1
(gauss) phi1 115.744041 Phi 64.536978 Phi2 262.694800
[Grain069]
(gauss) phi1 136.293403 Phi 48.862462 Phi2 343.319175 scatter 0 fraction 1
(gauss) phi1 136.293403 Phi 48.862462 Phi2 343.319175
[Grain070]
(gauss) phi1 111.030931 Phi 80.823213 Phi2 84.041594 scatter 0 fraction 1
(gauss) phi1 111.030931 Phi 80.823213 Phi2 84.041594
[Grain071]
(gauss) phi1 303.985249 Phi 118.929631 Phi2 302.307709 scatter 0 fraction 1
(gauss) phi1 303.985249 Phi 118.929631 Phi2 302.307709
[Grain072]
(gauss) phi1 193.556259 Phi 75.928015 Phi2 176.696899 scatter 0 fraction 1
(gauss) phi1 193.556259 Phi 75.928015 Phi2 176.696899
[Grain073]
(gauss) phi1 102.543259 Phi 121.929923 Phi2 234.496773 scatter 0 fraction 1
(gauss) phi1 102.543259 Phi 121.929923 Phi2 234.496773
[Grain074]
(gauss) phi1 218.581323 Phi 101.753894 Phi2 305.566089 scatter 0 fraction 1
(gauss) phi1 218.581323 Phi 101.753894 Phi2 305.566089
[Grain075]
(gauss) phi1 229.542114 Phi 118.839215 Phi2 129.179156 scatter 0 fraction 1
(gauss) phi1 229.542114 Phi 118.839215 Phi2 129.179156
[Grain076]
(gauss) phi1 202.258840 Phi 139.205956 Phi2 352.248979 scatter 0 fraction 1
(gauss) phi1 202.258840 Phi 139.205956 Phi2 352.248979
[Grain077]
(gauss) phi1 137.954289 Phi 63.806918 Phi2 128.975049 scatter 0 fraction 1
(gauss) phi1 137.954289 Phi 63.806918 Phi2 128.975049
[Grain078]
(gauss) phi1 327.557366 Phi 84.987420 Phi2 345.483143 scatter 0 fraction 1
(gauss) phi1 327.557366 Phi 84.987420 Phi2 345.483143
[Grain079]
(gauss) phi1 334.610243 Phi 74.535474 Phi2 106.419231 scatter 0 fraction 1
(gauss) phi1 334.610243 Phi 74.535474 Phi2 106.419231
[Grain080]
(gauss) phi1 62.906243 Phi 46.752029 Phi2 222.692276 scatter 0 fraction 1
(gauss) phi1 62.906243 Phi 46.752029 Phi2 222.692276
[Grain081]
(gauss) phi1 254.121439 Phi 121.005485 Phi2 287.265977 scatter 0 fraction 1
(gauss) phi1 254.121439 Phi 121.005485 Phi2 287.265977
[Grain082]
(gauss) phi1 140.765045 Phi 141.268031 Phi2 271.327656 scatter 0 fraction 1
(gauss) phi1 140.765045 Phi 141.268031 Phi2 271.327656
[Grain083]
(gauss) phi1 10.726984 Phi 66.339177 Phi2 189.073212 scatter 0 fraction 1
(gauss) phi1 10.726984 Phi 66.339177 Phi2 189.073212
[Grain084]
(gauss) phi1 270.921536 Phi 72.821127 Phi2 313.590515 scatter 0 fraction 1
(gauss) phi1 270.921536 Phi 72.821127 Phi2 313.590515
[Grain085]
(gauss) phi1 299.059668 Phi 23.884874 Phi2 80.016277 scatter 0 fraction 1
(gauss) phi1 299.059668 Phi 23.884874 Phi2 80.016277
[Grain086]
(gauss) phi1 208.617406 Phi 11.031834 Phi2 302.388247 scatter 0 fraction 1
(gauss) phi1 208.617406 Phi 11.031834 Phi2 302.388247
[Grain087]
(gauss) phi1 62.929967 Phi 65.223261 Phi2 108.558265 scatter 0 fraction 1
(gauss) phi1 62.929967 Phi 65.223261 Phi2 108.558265
[Grain088]
(gauss) phi1 9.014959 Phi 33.542169 Phi2 247.970366 scatter 0 fraction 1
(gauss) phi1 9.014959 Phi 33.542169 Phi2 247.970366
[Grain089]
(gauss) phi1 272.432808 Phi 30.065174 Phi2 19.803570 scatter 0 fraction 1
(gauss) phi1 272.432808 Phi 30.065174 Phi2 19.803570
[Grain090]
(gauss) phi1 179.621980 Phi 151.763475 Phi2 61.871794 scatter 0 fraction 1
(gauss) phi1 179.621980 Phi 151.763475 Phi2 61.871794
[Grain091]
(gauss) phi1 247.810321 Phi 112.752980 Phi2 264.668469 scatter 0 fraction 1
(gauss) phi1 247.810321 Phi 112.752980 Phi2 264.668469
[Grain092]
(gauss) phi1 270.780630 Phi 102.037858 Phi2 31.602610 scatter 0 fraction 1
(gauss) phi1 270.780630 Phi 102.037858 Phi2 31.602610
[Grain093]
(gauss) phi1 17.626672 Phi 56.032415 Phi2 245.079600 scatter 0 fraction 1
(gauss) phi1 17.626672 Phi 56.032415 Phi2 245.079600
[Grain094]
(gauss) phi1 112.165186 Phi 87.390459 Phi2 182.086729 scatter 0 fraction 1
(gauss) phi1 112.165186 Phi 87.390459 Phi2 182.086729
[Grain095]
(gauss) phi1 157.869381 Phi 79.905131 Phi2 107.037081 scatter 0 fraction 1
(gauss) phi1 157.869381 Phi 79.905131 Phi2 107.037081
[Grain096]
(gauss) phi1 106.163846 Phi 148.477084 Phi2 350.980466 scatter 0 fraction 1
(gauss) phi1 106.163846 Phi 148.477084 Phi2 350.980466
[Grain097]
(gauss) phi1 262.138550 Phi 58.923588 Phi2 111.303439 scatter 0 fraction 1
(gauss) phi1 262.138550 Phi 58.923588 Phi2 111.303439
[Grain098]
(gauss) phi1 88.739397 Phi 119.092789 Phi2 222.502594 scatter 0 fraction 1
(gauss) phi1 88.739397 Phi 119.092789 Phi2 222.502594
[Grain099]
(gauss) phi1 337.603765 Phi 10.145102 Phi2 80.934916 scatter 0 fraction 1
(gauss) phi1 337.603765 Phi 10.145102 Phi2 80.934916
[Grain100]
(gauss) phi1 341.022242 Phi 45.927285 Phi2 252.045476 scatter 0 fraction 1
(gauss) phi1 341.022242 Phi 45.927285 Phi2 252.045476
[cube]
(gauss) phi1 0 Phi 0 phi2 0 scatter 0 fraction 1
(gauss) phi1 0 Phi 0 phi2 0
#-------------------#
<crystallite>
#-------------------#
{../ConfigFiles/Crystallite_All.config}
#-------------------#
<phase>

View File

@ -1,94 +0,0 @@
2 header
$Id: postResults 861 2011-05-06 10:00:27Z MPIE\c.kords $
inc time elem node ip grain ip.x ip.y ip.z CauchyStress.intensity CauchyStress.t11 CauchyStress.t22 CauchyStress.t33 CauchyStress.t12 CauchyStress.t23 CauchyStress.t13 1_1_f 1_2_f 1_3_f 1_4_f 1_5_f 1_6_f 1_7_f 1_8_f 1_9_f 1_1_grainrotation 1_2_grainrotation 1_3_grainrotation 1_4_grainrotation 1_1_resistance_slip 1_2_resistance_slip 1_3_resistance_slip 1_4_resistance_slip 1_5_resistance_slip 1_6_resistance_slip 1_7_resistance_slip 1_8_resistance_slip 1_9_resistance_slip 1_10_resistance_slip 1_11_resistance_slip 1_12_resistance_slip
0 0.0 1 5 1 1 0.5 0.5 0.5 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
1 1.0 1 5 1 1 0.5 0.5 0.5 0.024172998067 0.056046936661 0.0577092021704 0.0580734722316 0.0075496127829 0.00882737897336 0.00766104180366 1.0 1.1259596093e-13 1.12595994811e-13 1.55780499177e-13 0.999847710133 -0.0174524057657 1.55782193243e-13 0.0174524057657 0.999847710133 1.0 -1.23725617425e-12 1.23720879461e-12 1.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
2 2.0 1 5 1 1 0.5 0.5 0.5 0.0241432830571 0.0377263836563 0.0324090756476 0.033376660198 0.00727691268548 0.00865175202489 0.00764666078612 1.0 1.10501743118e-13 1.10501777e-13 1.50193822223e-13 0.99939084053 -0.034899495542 1.55527744546e-13 0.034899495542 0.99939084053 1.0 -6.45080505939e-13 5.68662738426e-13 2.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
3 3.0 1 5 1 1 0.5 0.5 0.5 0.0257848323757 0.0201567672193 0.00817993376404 0.00972554087639 0.00701188668609 0.00847246591002 0.00763081293553 1.0 1.08486407791e-13 1.08486434896e-13 1.447601533e-13 0.998629510403 -0.0523359552026 1.55243941075e-13 0.0523359552026 0.998629510403 1.0 -4.46705416797e-13 3.46547009838e-13 3.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
4 4.0 1 5 1 1 0.5 0.5 0.5 0.0286209738301 0.00342932180502 -0.0149000706151 -0.0128218811005 0.00675505120307 0.00828998535872 0.0076136472635 1.0 1.06547922069e-13 1.06547949174e-13 1.39501515266e-13 0.997564077377 -0.0697564706206 1.54932937682e-13 0.0697564706206 0.997564077377 1.0 -3.46813588048e-13 2.36204529243e-13 4.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
5 5.0 1 5 1 1 0.5 0.5 0.5 0.032148640163 -0.0124530605972 -0.0368128865957 -0.0341981202364 0.00650494545698 0.00810338370502 0.00759505899623 1.0 1.04684232745e-13 1.04684253074e-13 1.34370257434e-13 0.996194720268 -0.0871557444334 1.54588120733e-13 0.0871557444334 0.996194720268 1.0 -2.8629133569e-13 1.70304512138e-13 5.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
6 6.0 1 5 1 1 0.5 0.5 0.5 0.0360145416329 -0.027449907735 -0.0575108379126 -0.0543776340783 0.00626290449873 0.00791467912495 0.0075753852725 1.0 1.02893266281e-13 1.0289328661e-13 1.29417364412e-13 0.994521915913 -0.104528464377 1.54218746606e-13 0.104528464377 0.994521915913 1.0 -2.45509477548e-13 1.26874964528e-13 6.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
7 7.0 1 5 1 1 0.5 0.5 0.5 0.0399740663918 -0.0415176264942 -0.0769297853112 -0.0733039304614 0.00602708896622 0.00772315822542 0.00755509966984 1.0 1.01173003352e-13 1.01173016904e-13 1.24581768512e-13 0.992546141148 -0.121869340539 1.5383908611e-13 0.121869340539 0.992546141148 1.0 -2.1607593644e-13 9.60403970436e-14 7.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
8 8.0 1 5 1 1 0.5 0.5 0.5 0.0438656010832 -0.0547002702951 -0.0950862541795 -0.0910048484802 0.00579795939848 0.0075320713222 0.00753418169916 1.0 9.95214110774e-14 9.952142463e-14 1.19883541028e-13 0.990268051624 -0.139173105359 1.5344615769e-13 0.139173105359 0.990268051624 1.0 -1.93732616754e-13 7.31539937134e-14 8.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
9 9.0 1 5 1 1 0.5 0.5 0.5 0.0476265064906 -0.06693007797 -0.111963532865 -0.107425913215 0.00557443965226 0.00733647309244 0.00751292472705 1.0 9.79365107892e-14 9.79365175654e-14 1.15288543144e-13 0.987688362598 -0.156434461474 1.53048269045e-13 0.156434461474 0.987688362598 1.0 -1.7614966289e-13 5.54610577028e-14 9.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
10 10.0 1 5 1 1 0.5 0.5 0.5 0.0511619284052 -0.0781974568963 -0.127493560314 -0.122531078756 0.00535882124677 0.00714056473225 0.00749061629176 1.0 9.64163509231e-14 9.64163509231e-14 1.10881335851e-13 0.984807729721 -0.173648178577 1.52618409989e-13 0.173648178577 0.984807729721 1.0 -1.61827379296e-13 4.16502735892e-14 10.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
11 11.0 1 5 1 1 0.5 0.5 0.5 0.0544590124719 -0.088471762836 -0.141687169671 -0.136302277446 0.0051483400166 0.00694213900715 0.00746928341687 1.0 9.49590273492e-14 9.49590341255e-14 1.06568040796e-13 0.981627166271 -0.190808996558 1.52226335378e-13 0.190808996558 0.981627166271 1.0 -1.50064490456e-13 3.04205106743e-14 11.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
12 12.0 1 5 1 1 0.5 0.5 0.5 0.0574517639017 -0.0978478044271 -0.154580652714 -0.148810505867 0.00494291307405 0.00674560666084 0.00744635425508 1.0 9.35626969238e-14 9.35626969238e-14 1.02349369485e-13 0.978147625923 -0.207911685109 1.51779779608e-13 0.207911685109 0.978147625923 1.0 -1.40004354681e-13 2.11307767752e-14 12.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
13 13.0 1 5 1 1 0.5 0.5 0.5 0.0601600304963 -0.10622742027 -0.166131272912 -0.159981891513 0.0047429674305 0.00654464075342 0.00742361694574 1.0 9.22255571608e-14 9.22255503845e-14 9.82438888813e-14 0.974370062351 -0.224951043725 1.5134031181e-13 0.224951043725 0.974370062351 1.0 -1.31394718782e-13 1.33769905892e-14 13.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
14 14.0 1 5 1 1 0.5 0.5 0.5 0.0625560237246 -0.113614186645 -0.176321923733 -0.169804736972 0.00454916572198 0.00634495634586 0.00740135088563 1.0 9.09458597841e-14 9.09458530078e-14 9.42784397643e-14 0.970295727253 -0.241921886802 1.50916524286e-13 0.241921886802 0.970295727253 1.0 -1.23946358277e-13 6.88772167895e-15 14.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
15 15.0 1 5 1 1 0.5 0.5 0.5 0.064623152588 -0.120048590004 -0.185179919004 -0.17832493782 0.00435922853649 0.00614238297567 0.00737902149558 1.0 8.97219175041e-14 8.97219107278e-14 9.03759421358e-14 0.965925812721 -0.258819043636 1.50488345743e-13 0.258819043636 0.965925812721 1.0 -1.17391732728e-13 1.26347923687e-15 15.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
16 16.0 1 5 1 1 0.5 0.5 0.5 0.0663746195376 -0.125523671508 -0.192728817463 -0.185538485646 0.00417540827766 0.00594279170036 0.00735600618646 1.0 8.85521107938e-14 8.85521040175e-14 8.66191951607e-14 0.961261689663 -0.275637358427 1.50032669123e-13 0.275637358427 0.961261689663 1.0 -1.11524369384e-13 -3.50626472131e-15 16.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
17 17.0 1 5 1 1 0.5 0.5 0.5 0.0677872073143 -0.130049750209 -0.198957800865 -0.191445931792 0.00399514567107 0.00573630817235 0.00733454944566 1.0 8.74348675601e-14 8.74348607838e-14 8.29210628655e-14 0.956304728985 -0.292371690273 1.49627706059e-13 0.292371690273 0.956304728985 1.0 -1.06359209905e-13 -7.7192983925e-15 17.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
18 18.0 1 5 1 1 0.5 0.5 0.5 0.0688726440667 -0.13366368413 -0.20391356945 -0.196102648973 0.00382082024589 0.00553295295686 0.00731354439631 1.0 8.63686902487e-14 8.63686766962e-14 7.93686337898e-14 0.951056540012 -0.309017002583 1.49233476596e-13 0.309017002583 0.951056540012 1.0 -1.01717394815e-13 -1.1326327212e-14 18.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
19 19.0 1 5 1 1 0.5 0.5 0.5 0.0696297180331 -0.136321663857 -0.207558274269 -0.199463963509 0.00364940264262 0.00533262779936 0.00729088904336 1.0 8.53521355156e-14 8.53521151868e-14 7.58580481416e-14 0.945518553257 -0.325568139553 1.48775008155e-13 0.325568139553 0.945518553257 1.0 -9.7403408581e-14 -1.45807929668e-14 19.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
20 20.0 1 5 1 1 0.5 0.5 0.5 0.0700644066665 -0.138097688556 -0.209973961115 -0.201613843441 0.00348059250973 0.00512766698375 0.00727100577205 1.0 8.43838074503e-14 8.43837938977e-14 7.23816555195e-14 0.939692616463 -0.342020124197 1.48407572039e-13 0.342020124197 0.939692616463 1.0 -9.35965172554e-14 -1.75459757942e-14 20.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
21 21.0 1 5 1 1 0.5 0.5 0.5 0.0701877711751 -0.138971403241 -0.211157605052 -0.20253777504 0.00331580708735 0.00492311827838 0.00725055858493 1.0 8.34624050102e-14 8.34623846814e-14 6.89923311081e-14 0.933580458164 -0.358367949724 1.4801277337e-13 0.358367949724 0.933580458164 1.0 -9.0061604821e-14 -2.01888507268e-14 21.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
22 22.0 1 5 1 1 0.5 0.5 0.5 0.0699866553715 -0.138959825039 -0.211107447743 -0.202245801687 0.00315649039112 0.00472341617569 0.00723067810759 1.0 8.25866610342e-14 8.25866339291e-14 6.57438174539e-14 0.927183866501 -0.374606579542 1.4762987382e-13 0.374606579542 0.927183866501 1.0 -8.68153950963e-14 -2.24807118179e-14 22.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
23 23.0 1 5 1 1 0.5 0.5 0.5 0.0694881714825 -0.138143435121 -0.209938883781 -0.200845211744 0.00299837184139 0.00451778201386 0.00721126794815 1.0 8.17553761235e-14 8.17553490184e-14 6.24881345127e-14 0.920504868031 -0.39073112607 1.47254902499e-13 0.39073112607 0.920504868031 1.0 -8.38166477413e-14 -2.46553706542e-14 23.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
24 24.0 1 5 1 1 0.5 0.5 0.5 0.0687132541533 -0.136471450329 -0.20762142539 -0.198303565383 0.0028445108328 0.00431606685743 0.00719203986228 1.0 8.09674050896e-14 8.09673779845e-14 5.93358776826e-14 0.913545489311 -0.406736642122 1.46877193567e-13 0.406736642122 0.913545489311 1.0 -8.10227265054e-14 -2.65915626107e-14 24.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
25 25.0 1 5 1 1 0.5 0.5 0.5 0.0676169983635 -0.13403198123 -0.204196736217 -0.194667950273 0.00269203982316 0.00410861568525 0.00717331608757 1.0 8.02216705066e-14 8.02216434015e-14 5.61884252234e-14 0.906307816505 -0.422618240118 1.46507656809e-13 0.422618240118 0.906307816505 1.0 -7.84230400888e-14 -2.84337508508e-14 25.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
26 26.0 1 5 1 1 0.5 0.5 0.5 0.0662631331079 -0.130787238479 -0.199683398008 -0.189970225096 0.00254649785347 0.00390863511711 0.00715752178803 1.0 7.95171491586e-14 7.95171220536e-14 5.32437219585e-14 0.898794054985 -0.438371151686 1.46231442753e-13 0.438371151686 0.898794054985 1.0 -7.6093421657e-14 -2.99671074901e-14 26.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
27 27.0 1 5 1 1 0.5 0.5 0.5 0.0646436064666 -0.126818150282 -0.194168791175 -0.184269443154 0.00239934097044 0.00370743637905 0.00713936518878 1.0 7.88528788163e-14 7.8852844935e-14 5.02027178238e-14 0.891006529331 -0.453990489244 1.45861472314e-13 0.453990489244 0.891006529331 1.0 -7.37995886732e-14 -3.15536984562e-14 27.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
28 28.0 1 5 1 1 0.5 0.5 0.5 0.0627565607148 -0.122118026018 -0.18763422966 -0.177565723658 0.00225654477254 0.00350114423782 0.00712363282219 1.0 7.82279311316e-14 7.82279040265e-14 4.72786279527e-14 0.88294762373 -0.469471544027 1.45566271168e-13 0.469471544027 0.88294762373 1.0 -7.17171954418e-14 -3.29618534615e-14 28.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
29 29.0 1 5 1 1 0.5 0.5 0.5 0.0606526623375 -0.11671321094 -0.180158898234 -0.169930398464 0.00211710762233 0.003298870055 0.00710933981463 1.0 7.76414658477e-14 7.76414319664e-14 4.44384214485e-14 0.874619722366 -0.484809607267 1.45310806031e-13 0.484809607267 0.874619722366 1.0 -6.9789633057e-14 -3.42433838175e-14 29.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
30 30.0 1 5 1 1 0.5 0.5 0.5 0.0582944850272 -0.1107018888 -0.171804904938 -0.161417961121 0.00197660620324 0.00309796072543 0.00709319859743 1.0 7.7092669813e-14 7.70926291555e-14 4.15282669635e-14 0.866025388241 -0.5 1.44978782668e-13 0.5 0.866025388241 1.0 -6.78861535128e-14 -3.55643994614e-14 30.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
31 31.0 1 5 1 1 0.5 0.5 0.5 0.0557503201598 -0.104059666395 -0.162606656551 -0.152081504464 0.00184033811092 0.00289721833542 0.00708150491118 1.0 7.65807840861e-14 7.65807502048e-14 3.87372832379e-14 0.857167303562 -0.515038073063 1.44792137264e-13 0.515038073063 0.857167303562 1.0 -6.62197551025e-14 -3.67385463685e-14 31.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
32 32.0 1 5 1 1 0.5 0.5 0.5 0.0529884126408 -0.0968004092574 -0.152546048164 -0.14190004766 0.00170668761712 0.0026978047099 0.00706525752321 1.0 7.6105117488e-14 7.61050768304e-14 3.60106096418e-14 0.848048090935 -0.529919266701 1.44433035952e-13 0.529919266701 0.848048090935 1.0 -6.44701645043e-14 -3.78307716371e-14 32.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
33 33.0 1 5 1 1 0.5 0.5 0.5 0.0500567272075 -0.0890314355493 -0.141778171062 -0.131015405059 0.00157438474707 0.00249168649316 0.00705360202119 1.0 7.56650127211e-14 7.56649788398e-14 3.33055252214e-14 0.838670551777 -0.544639050961 1.44223988221e-13 0.544639050961 0.838670551777 1.0 -6.29398605716e-14 -3.88876756317e-14 33.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
34 34.0 1 5 1 1 0.5 0.5 0.5 0.0469640629868 -0.0807560905814 -0.130312100053 -0.119427792728 0.00144245161209 0.00229033012874 0.00704180356115 1.0 7.52598734743e-14 7.52598328167e-14 3.05895784506e-14 0.829037606716 -0.559192895889 1.43997755885e-13 0.559192895889 0.829037606716 1.0 -6.14617270923e-14 -3.99417607007e-14 34.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
35 35.0 1 5 1 1 0.5 0.5 0.5 0.0437172172209 -0.071987785399 -0.118148125708 -0.107174038887 0.00131240475457 0.00208335206844 0.00703246705234 1.0 7.48891440939e-14 7.48891034364e-14 2.79183008092e-14 0.819152057171 -0.573576450348 1.43846116659e-13 0.573576450348 0.819152057171 1.0 -6.01114481542e-14 -4.09455862228e-14 35.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
36 36.0 1 5 1 1 0.5 0.5 0.5 0.0403788372113 -0.0627495497465 -0.105374902487 -0.0943066850305 0.00118379341438 0.00188469397835 0.00702259968966 1.0 7.45523163603e-14 7.45522757027e-14 2.5277750135e-14 0.809017002583 -0.587785243988 1.43663347277e-13 0.587785243988 0.809017002583 1.0 -5.87893923538e-14 -4.19154524999e-14 36.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
37 37.0 1 5 1 1 0.5 0.5 0.5 0.0369407713034 -0.053146019578 -0.0920672789216 -0.0809241756797 0.00105845439248 0.00168191338889 0.0070105525665 1.0 7.42489362636e-14 7.42488888298e-14 2.27346861768e-14 0.798635542393 -0.601814985275 1.43391131217e-13 0.601814985275 0.798635542393 1.0 -5.74447715958e-14 -4.27990704942e-14 37.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
38 38.0 1 5 1 1 0.5 0.5 0.5 0.0334600072977 -0.0431970842183 -0.0782782137394 -0.0670673400164 0.000933687435463 0.00148375425488 0.00700649619102 1.0 7.39785836756e-14 7.39785362418e-14 2.01957371047e-14 0.788010776043 -0.61566144228 1.43388285186e-13 0.61566144228 0.788010776043 1.0 -5.63700595804e-14 -4.36789107216e-14 38.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
39 39.0 1 5 1 1 0.5 0.5 0.5 0.0299732687575 -0.032885748893 -0.0640020221472 -0.0527336075902 0.000808531127404 0.00128437299281 0.00699805375189 1.0 7.37408926781e-14 7.37408452443e-14 1.76288596622e-14 0.777145981789 -0.629320383072 1.4321798413e-13 0.629320383072 0.777145981789 1.0 -5.52001275093e-14 -4.45814548201e-14 39.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
40 40.0 1 5 1 1 0.5 0.5 0.5 0.0265501119175 -0.0222893729806 -0.0493332147598 -0.0380037464201 0.000683732156176 0.00107720762026 0.00699263811111 1.0 7.35355447866e-14 7.35354973528e-14 1.50622549643e-14 0.766044437885 -0.642787575722 1.43141751165e-13 0.642787575722 0.766044437885 1.0 -5.41440536069e-14 -4.54841479964e-14 40.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
41 41.0 1 5 1 1 0.5 0.5 0.5 0.0232912298862 -0.0114479679614 -0.0343343839049 -0.0229646526277 0.000558626372367 0.000879483588506 0.00698518194258 1.0 7.33622553982e-14 7.33622079643e-14 1.24743245179e-14 0.754709601402 -0.656059026718 1.42980692932e-13 0.656059026718 0.754709601402 1.0 -5.30580997703e-14 -4.64043103802e-14 41.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
42 42.0 1 5 1 1 0.5 0.5 0.5 0.0203406476883 -0.000415417132899 -0.0190748237073 -0.00765737332404 0.000437716866145 0.000681487843394 0.00698295794427 1.0 7.32207941197e-14 7.32207466859e-14 1.00257809663e-14 0.7431448102 -0.669130623341 1.42991467191e-13 0.669130623341 0.7431448102 1.0 -5.21353590182e-14 -4.72217344437e-14 42.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
43 43.0 1 5 1 1 0.5 0.5 0.5 0.017873190849 0.0107409814373 -0.00360602373257 0.00782212708145 0.000314239208819 0.000479505921248 0.00697580305859 1.0 7.31109712159e-14 7.31109170058e-14 7.47973206403e-15 0.731353700161 -0.681998372078 1.42815406311e-13 0.681998372078 0.731353700161 1.0 -5.1103120623e-14 -4.81168585336e-14 43.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
44 44.0 1 5 1 1 0.5 0.5 0.5 0.0162081584588 0.0220755711198 0.0120714716613 0.0235084760934 0.000193880769075 0.000282061198959 0.00697334948927 1.0 7.30326240564e-14 7.30325698463e-14 5.03865299026e-15 0.71933978796 -0.694658339024 1.42792353462e-13 0.694658339024 0.71933978796 1.0 -5.02115777884e-14 -4.89405845223e-14 44.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
45 45.0 1 5 1 1 0.5 0.5 0.5 0.0156273554774 0.0334772281349 0.0278379991651 0.0392913781106 7.09175001248e-05 8.74613033375e-05 0.00697097880766 1.0 7.29856645498e-14 7.29856103397e-14 2.50234157871e-15 0.707106769085 -0.707106769085 1.42759393716e-13 0.707106769085 0.707106769085 1.0 -4.93375177139e-14 -4.98392356573e-14 45.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
46 46.0 1 5 1 1 0.5 0.5 0.5 0.0162480691786 0.0449191257358 0.0436743237078 0.0551158338785 -4.99888519698e-05 -0.000117406474601 0.00696983095258 1.0 7.29700181572e-14 7.29699571708e-14 3.70651989336e-17 0.694658398628 -0.71933978796 1.42756737421e-13 0.71933978796 0.694658398628 1.0 -4.85075202933e-14 -5.06943662396e-14 46.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
47 47.0 1 5 1 1 0.5 0.5 0.5 0.017954318699 0.0563476122916 0.0594997182488 0.0709141045809 -0.000170402156073 -0.000321725587128 0.00697033200413 1.0 7.29856645498e-14 7.29856035634e-14 -2.41059012283e-15 0.681998372078 -0.731353700161 1.42799441434e-13 0.731353700161 0.681998372078 1.0 -4.77291918824e-14 -5.15456851735e-14 47.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
48 48.0 1 5 1 1 0.5 0.5 0.5 0.0204742447658 0.0677090287209 0.075221426785 0.0866304710507 -0.00029246028862 -0.000511626130901 0.00697235297412 1.0 7.30326240564e-14 7.303256307e-14 -4.91429161294e-15 0.669130623341 -0.7431448102 1.42883006316e-13 0.7431448102 0.669130623341 1.0 -4.69965219356e-14 -5.2443960226e-14 48.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
49 49.0 1 5 1 1 0.5 0.5 0.5 0.023524710328 0.0790234953165 0.0908864960074 0.102273575962 -0.000416969822254 -0.000719646865036 0.0069726947695 1.0 7.31109644396e-14 7.31109034533e-14 -7.50072916376e-15 0.656059026718 -0.754709541798 1.42894593727e-13 0.754709541798 0.656059026718 1.0 -4.62321492396e-14 -5.34057729138e-14 49.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
50 50.0 1 5 1 1 0.5 0.5 0.5 0.0268598238159 0.0902177020907 0.106357127428 0.1177008003 -0.000538720283657 -0.0009086750797 0.00697285402566 1.0 7.32207941197e-14 7.32207331333e-14 -9.98427042269e-15 0.642787635326 -0.766044437885 1.42887085627e-13 0.766044437885 0.642787635326 1.0 -4.54714797716e-14 -5.43082492497e-14 50.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
51 51.0 1 5 1 1 0.5 0.5 0.5 0.0303826202004 0.101213820279 0.121579430997 0.132867023349 -0.000660901481751 -0.00111284002196 0.00697690108791 1.0 7.33622553982e-14 7.33621876355e-14 -1.24756806177e-14 0.629320383072 -0.777145922184 1.43004138804e-13 0.777145922184 0.629320383072 1.0 -4.48062200948e-14 -5.52263923069e-14 51.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
52 52.0 1 5 1 1 0.5 0.5 0.5 0.0339949033569 0.11201544106 0.136535584927 0.147761180997 -0.000784714065958 -0.00131459208205 0.00697894394398 1.0 7.35355380104e-14 7.3535477024e-14 -1.50155309329e-14 0.615661501884 -0.788010716438 1.43037965912e-13 0.788010716438 0.615661501884 1.0 -4.40999640234e-14 -5.61864601069e-14 52.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
53 53.0 1 5 1 1 0.5 0.5 0.5 0.0376308280896 0.122612737119 0.15118843317 0.1623544842 -0.00091004971182 -0.00151794939302 0.00698458682746 1.0 7.37408859018e-14 7.37408249155e-14 -1.75981801289e-14 0.60181504488 -0.798635482788 1.43186406742e-13 0.798635482788 0.60181504488 1.0 -4.34776455289e-14 -5.71844546542e-14 53.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
54 54.0 1 5 1 1 0.5 0.5 0.5 0.0412246747871 0.132881388068 0.165393546224 0.176486164331 -0.00103311298881 -0.00171284656972 0.00699377711862 1.0 7.39785768994e-14 7.39785091367e-14 -2.00878776232e-14 0.587785243988 -0.809017002583 1.4344772657e-13 0.809017002583 0.587785243988 1.0 -4.29343382678e-14 -5.81362689651e-14 54.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
55 55.0 1 5 1 1 0.5 0.5 0.5 0.0447415113323 0.142851829529 0.179176211357 0.190176308155 -0.00116179022007 -0.00190604047384 0.00699571380392 1.0 7.42489294873e-14 7.42488617247e-14 -2.27610458421e-14 0.573576450348 -0.819152057171 1.43440909649e-13 0.819152057171 0.573576450348 1.0 -4.22339470525e-14 -5.92136542165e-14 55.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
56 56.0 1 5 1 1 0.5 0.5 0.5 0.0481736416751 0.152466788888 0.192468911409 0.203372821212 -0.00128960516304 -0.00210840022191 0.00700309127569 1.0 7.4552309584e-14 7.45522418214e-14 -2.53872833536e-14 0.559192895889 -0.829037547112 1.43614029631e-13 0.829037547112 0.559192895889 1.0 -4.1651784696e-14 -6.02744647271e-14 56.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
57 57.0 1 5 1 1 0.5 0.5 0.5 0.0514957040887 0.16164894402 0.205184012651 0.215988025069 -0.00141807645559 -0.00230375886895 0.00701119331643 1.0 7.48891373177e-14 7.48890627788e-14 -2.80182744139e-14 0.544639050961 -0.838670551777 1.43800837665e-13 0.838670551777 0.544639050961 1.0 -4.10839371963e-14 -6.13515111652e-14 57.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
58 58.0 1 5 1 1 0.5 0.5 0.5 0.0546568840541 0.170429125428 0.217311233282 0.228001371026 -0.00154578115325 -0.00251251878217 0.00702094798908 1.0 7.5259866698e-14 7.52597921591e-14 -3.06018773689e-14 0.529919266701 -0.848048090935 1.44034266393e-13 0.848048090935 0.529919266701 1.0 -4.05486903006e-14 -6.24149440898e-14 58.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
59 59.0 1 5 1 1 0.5 0.5 0.5 0.057665178562 0.178753256798 0.228810757399 0.239380404353 -0.00167655316181 -0.0027051072102 0.00703151477501 1.0 7.56650059448e-14 7.56649314059e-14 -3.3271399958e-14 0.515038073063 -0.857167303562 1.44284839068e-13 0.857167303562 0.515038073063 1.0 -4.00271416218e-14 -6.35444252555e-14 59.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
60 60.0 1 5 1 1 0.5 0.5 0.5 0.0604909854062 0.186547547579 0.239586278796 0.250017344952 -0.00180981971789 -0.00291019980796 0.00704179238528 1.0 7.61051039354e-14 7.61050361728e-14 -3.6004626201e-14 0.5 -0.866025388241 1.44513903882e-13 0.866025388241 0.5 1.0 -3.94958825573e-14 -6.47265850943e-14 60.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
61 61.0 1 5 1 1 0.5 0.5 0.5 0.0631217688617 0.193791866302 0.249607756734 0.259886533022 -0.00194548477884 -0.00310831447132 0.00705294730142 1.0 7.65807705336e-14 7.65806959947e-14 -3.87961283108e-14 0.48480963707 -0.874619722366 1.44762877358e-13 0.874619722366 0.48480963707 1.0 -3.89781861843e-14 -6.59583268537e-14 61.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
62 62.0 1 5 1 1 0.5 0.5 0.5 0.0655460742438 0.20045940578 0.258834958076 0.268953949213 -0.00208330713212 -0.00330717721954 0.00706291478127 1.0 7.70926562605e-14 7.70925749453e-14 -4.16352980467e-14 0.46947157383 -0.882947564125 1.44959158609e-13 0.882947564125 0.46947157383 1.0 -3.84318262164e-14 -6.72338636046e-14 62.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
63 63.0 1 5 1 1 0.5 0.5 0.5 0.0677412166974 0.206590846181 0.267280697823 0.277241885662 -0.00222166883759 -0.00350732635707 0.00707424012944 1.0 7.76414522952e-14 7.764137098e-14 -4.44629108651e-14 0.453990519047 -0.891006529331 1.45192844835e-13 0.891006529331 0.453990519047 1.0 -3.79073942375e-14 -6.85204659939e-14 63.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
64 64.0 1 5 1 1 0.5 0.5 0.5 0.0697129410434 0.212010905147 0.274793446064 0.284571915865 -0.00236255140044 -0.00370562658645 0.00708921952173 1.0 7.82279175791e-14 7.82278430402e-14 -4.73467022966e-14 0.438371151686 -0.898794054985 1.45545264751e-13 0.898794054985 0.438371151686 1.0 -3.74487496135e-14 -6.98572872725e-14 64.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
65 65.0 1 5 1 1 0.5 0.5 0.5 0.0714181561332 0.216807678342 0.281412482262 0.290993452072 -0.00250276038423 -0.00390887679532 0.00710331602022 1.0 7.88528652638e-14 7.88527839486e-14 -5.01717130298e-14 0.42261826992 -0.906307756901 1.45856701825e-13 0.906307756901 0.42261826992 1.0 -3.69653241935e-14 -7.11814369383e-14 65.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
66 66.0 1 5 1 1 0.5 0.5 0.5 0.0728654392243 0.220906943083 0.287071973085 0.296444416046 -0.00265323766507 -0.00410525733605 0.00711599038914 1.0 7.95171356061e-14 7.95170542909e-14 -5.33217404692e-14 0.406736671925 -0.913545429707 1.46108494227e-13 0.913545429707 0.406736671925 1.0 -3.64467062487e-14 -7.27051272377e-14 66.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
67 67.0 1 5 1 1 0.5 0.5 0.5 0.0740421446261 0.224261745811 0.291689932346 0.300876945257 -0.00280131306499 -0.00430715922266 0.00713248644024 1.0 8.0221656954e-14 8.02215688626e-14 -5.63473692618e-14 0.390731155872 -0.920504868031 1.46486108291e-13 0.920504868031 0.390731155872 1.0 -3.59935876677e-14 -7.41815937562e-14 67.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
68 68.0 1 5 1 1 0.5 0.5 0.5 0.0749262839347 0.22694632411 0.295370638371 0.30431458354 -0.00294701498933 -0.00449755322188 0.00714947842062 1.0 8.09673915371e-14 8.09673034456e-14 -5.92471696161e-14 0.374606609344 -0.927183866501 1.46872639918e-13 0.927183866501 0.374606609344 1.0 -3.55405978356e-14 -7.56131336471e-14 68.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
69 69.0 1 5 1 1 0.5 0.5 0.5 0.0755447188712 0.228792086244 0.297914654016 0.306645393372 -0.00310259847902 -0.00470214849338 0.00716820033267 1.0 8.17553557947e-14 8.17552677033e-14 -6.24511022323e-14 0.358367979527 -0.93358039856 1.47312107716e-13 0.93358039856 0.358367979527 1.0 -3.51104440118e-14 -7.7233013008e-14 69.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
70 70.0 1 5 1 1 0.5 0.5 0.5 0.0758345472717 0.229867011309 0.299378037453 0.307846367359 -0.00326239760034 -0.00490199634805 0.00718151498586 1.0 8.25866474817e-14 8.2586552614e-14 -6.57566788021e-14 0.342020153999 -0.939692616463 1.47553288489e-13 0.939692616463 0.342020153999 1.0 -3.45680752631e-14 -7.89318358395e-14 70.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
71 71.0 1 5 1 1 0.5 0.5 0.5 0.0758360864293 0.230157241225 0.299764603376 0.307952821255 -0.00342002091929 -0.00510233594105 0.00720483297482 1.0 8.34623914577e-14 8.34623033663e-14 -6.89357628598e-14 0.325568169355 -0.945518553257 1.48140492386e-13 0.945518553257 0.325568169355 1.0 -3.42024992312e-14 -8.05897232628e-14 71.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
72 72.0 1 5 1 1 0.5 0.5 0.5 0.075525141316 0.229602411389 0.298972666264 0.306899368763 -0.00358790112659 -0.0052975253202 0.00721790129319 1.0 8.43837938977e-14 8.438369903e-14 -7.24243798614e-14 0.309017002583 -0.951056480408 1.48359772276e-13 0.951056480408 0.309017002583 1.0 -3.36342078863e-14 -8.24389384882e-14 72.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
73 73.0 1 5 1 1 0.5 0.5 0.5 0.0749000000883 0.228239625692 0.29704400897 0.304708212614 -0.00375670660287 -0.00549271516502 0.00723527790979 1.0 8.5352121963e-14 8.53520203191e-14 -7.58904522341e-14 0.292371720076 -0.956304728985 1.48725527878e-13 0.956304728985 0.292371720076 1.0 -3.3134577032e-14 -8.43050130574e-14 73.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
74 74.0 1 5 1 1 0.5 0.5 0.5 0.0739484623657 0.225991547108 0.293922573328 0.301260143518 -0.00392613513395 -0.00569652067497 0.00725413672626 1.0 8.63686766962e-14 8.63685818285e-14 -7.93205019896e-14 0.275637388229 -0.961261689663 1.49138825747e-13 0.961261689663 0.275637388229 1.0 -3.26499251084e-14 -8.6183183257e-14 74.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
75 75.0 1 5 1 1 0.5 0.5 0.5 0.072654064954 0.22285503149 0.289548963308 0.296567112207 -0.00410118186846 -0.00588755588979 0.00727464165539 1.0 8.74348607838e-14 8.74347591399e-14 -8.2887859174e-14 0.258819073439 -0.965925812721 1.49606293066e-13 0.965925812721 0.258819073439 1.0 -3.21823527571e-14 -8.81655249567e-14 75.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
76 76.0 1 5 1 1 0.5 0.5 0.5 0.0710590306271 0.218880146742 0.284022808075 0.290706813335 -0.00428106123582 -0.00608264096081 0.00729729793966 1.0 8.85520972413e-14 8.85519955973e-14 -8.65618002082e-14 0.241921916604 -0.970295727253 1.50146700086e-13 0.970295727253 0.241921916604 1.0 -3.17401744536e-14 -9.02373878744e-14 76.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
77 77.0 1 5 1 1 0.5 0.5 0.5 0.0691430442849 0.214006558061 0.277257710695 0.283598601818 -0.00446529500186 -0.00628326507285 0.00731486734003 1.0 8.97219039515e-14 8.97218023076e-14 -9.03223757722e-14 0.224951073527 -0.974370062351 1.5050570653e-13 0.974370062351 0.224951073527 1.0 -3.11913851952e-14 -9.23900916167e-14 77.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
78 78.0 1 5 1 1 0.5 0.5 0.5 0.0668996797354 0.208257958293 0.269263744354 0.275259256363 -0.00465713022277 -0.00647377641872 0.00733389845118 1.0 9.09458462315e-14 9.09457378113e-14 -9.42809808631e-14 0.207911714911 -0.978147566319 1.50915440084e-13 0.978147566319 0.207911714911 1.0 -3.06547254486e-14 -9.46824541513e-14 78.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
79 79.0 1 5 1 1 0.5 0.5 0.5 0.0643428410009 0.201603919268 0.260032474995 0.265651851892 -0.00485096639022 -0.00666582910344 0.00735527323559 1.0 9.22255503845e-14 9.2225435188e-14 -9.82367805808e-14 0.190809011459 -0.981627166271 1.51408264181e-13 0.981627166271 0.190809011459 1.0 -3.0145265624e-14 -9.70135769136e-14 79.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
80 80.0 1 5 1 1 0.5 0.5 0.5 0.0614645414137 0.194048047066 0.249545291066 0.254766076803 -0.00505235884339 -0.00686362525448 0.00737208453938 1.0 9.35626901476e-14 9.35625817274e-14 -1.02382444428e-13 0.17364820838 -0.984807729721 1.51741778322e-13 0.984807729721 0.17364820838 1.0 -2.95383529648e-14 -9.94839518924e-14 80.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
81 81.0 1 5 1 1 0.5 0.5 0.5 0.0582718432416 0.185640856624 0.237853914499 0.242651328444 -0.00525587564334 -0.00704948464409 0.00739240786061 1.0 9.49590273492e-14 9.49589053765e-14 -1.06522917657e-13 0.156434491277 -0.987688362598 1.52202360958e-13 0.987688362598 0.156434491277 1.0 -2.89785014561e-14 -1.01996719176e-13 81.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
82 82.0 1 5 1 1 0.5 0.5 0.5 0.0548411210712 0.176334485412 0.224956199527 0.229332342744 -0.00546766957268 -0.00723874382675 0.00740870647132 1.0 9.64163441469e-14 9.64162289504e-14 -1.10871842306e-13 0.13917312026 -0.990268051624 1.52525670045e-13 0.990268051624 0.13917312026 1.0 -2.833043316e-14 -1.04662663898e-13 82.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
83 83.0 1 5 1 1 0.5 0.5 0.5 0.0511313149972 0.166156679392 0.210826560855 0.214756399393 -0.00568466819823 -0.00742362486199 0.00743296789005 1.0 9.79365040129e-14 9.79363888164e-14 -1.15317233844e-13 0.121869370341 -0.992546141148 1.53136387577e-13 0.992546141148 0.121869370341 1.0 -2.78072717305e-14 -1.07427616607e-13 83.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
84 84.0 1 5 1 1 0.5 0.5 0.5 0.0472034591648 0.155100286007 0.195493191481 0.198953047395 -0.00591072347015 -0.00760667771101 0.00744895776734 1.0 9.95214110774e-14 9.95212891047e-14 -1.19991419145e-13 0.104528486729 -0.994521915913 1.53463518477e-13 0.994521915913 0.104528486729 1.0 -2.7119679183e-14 -1.10360986571e-13 84.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
85 85.0 1 5 1 1 0.5 0.5 0.5 0.0431223359335 0.143299892545 0.179094478488 0.182078793645 -0.0061384961009 -0.00779581116512 0.00746519910172 1.0 1.01173003352e-13 1.01172881379e-13 -1.24631763785e-13 0.0871557667851 -0.996194720268 1.53809826204e-13 0.996194720268 0.0871557667851 1.0 -2.64190067528e-14 -1.13333658864e-13 85.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
86 86.0 1 5 1 1 0.5 0.5 0.5 0.0389558933784 0.130603894591 0.161492869258 0.163967981935 -0.00637407042086 -0.00797392893583 0.00748812500387 1.0 1.02893273058e-13 1.02893144309e-13 -1.29449253508e-13 0.069756500423 -0.997564077377 1.54404497543e-13 0.997564077377 0.069756500423 1.0 -2.58185688134e-14 -1.16454941393e-13 86.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
87 87.0 1 5 1 1 0.5 0.5 0.5 0.0348200893229 0.117140188813 0.142814710736 0.144770666957 -0.00661494443193 -0.00814818497747 0.0075002736412 1.0 1.04684239521e-13 1.04684103996e-13 -1.34351825997e-13 0.0523359812796 -0.998629510403 1.5463373854e-13 0.998629510403 0.0523359812796 1.0 -2.50090916129e-14 -1.19682055607e-13 87.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
88 88.0 1 5 1 1 0.5 0.5 0.5 0.0309165569603 0.102922193706 0.123093500733 0.124490439892 -0.00686429999769 -0.0083291567862 0.00751314265653 1.0 1.06547935622e-13 1.06547800097e-13 -1.39447996336e-13 0.0348995216191 -0.99939084053 1.54905873285e-13 0.99939084053 0.0348995216191 1.0 -2.41937732733e-14 -1.23072938559e-13 88.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
89 89.0 1 5 1 1 0.5 0.5 0.5 0.0275482477894 0.0879731550813 0.102374792099 0.103184834123 -0.00712391687557 -0.00850248057395 0.00752882473171 1.0 1.08486421344e-13 1.08486279042e-13 -1.44796799334e-13 0.0174524337053 -0.999847710133 1.55297460004e-13 0.999847710133 0.0174524337053 1.0 -2.34091547958e-14 -1.26660902361e-13 89.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0
90 90.0 1 5 1 1 0.5 0.5 0.5 0.0251206236511 0.0723270624876 0.0806358680129 0.0808931067586 -0.00738708348945 -0.00866825506091 0.00754283368587 1.0 1.10501756671e-13 1.10501614369e-13 -1.50157664081e-13 2.67917759089e-08 -1.0 1.55653268052e-13 1.0 2.67920814423e-08 1.0 -2.25758302298e-14 -1.30329706988e-13 90.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0 31000000.0

View File

@ -1,3 +0,0 @@
1 header
inc elem node ip grain ip.x ip.y ip.z 1_1_p 1_2_p 1_3_p 1_4_p 1_5_p 1_6_p 1_7_p 1_8_p 1_9_p 1_1_div(p) 1_2_div(p) 1_3_div(p) 1_norm(div(p))
90 1 5 1 1 0.5 0.5 0.5 0.0723270624876 0.00754283322021 0.00738708395511 -0.00738708348945 -0.00866825319827 -0.0806358680129 0.00754283368587 0.0808931067586 0.00866825692356 0.000000 0.000000 0.000000 0.000000

View File

@ -3,25 +3,12 @@
#-------------------#
[direct]
mech none # isostrain 1 grain
mech none
thermal adiabatic # thermal strain (stress) induced mass transport
initialT 300.0
thermal adiabatic
t0 330.0
(output) temperature
#-------------------#
<crystallite>
#-------------------#
[aLittleSomething]
(output) texture
(output) f
(output) p
(output) fe
(output) fi
(output) fp
#-------------------#
<phase>
#-------------------#
@ -34,6 +21,12 @@ plasticity none
{config/elastic_isotropic.config}
{config/thermal.config}
(output) f
(output) p
(output) fe
(output) fi
(output) fp
#.................
[Ti matrix]
@ -43,6 +36,12 @@ plasticity none
{config/elastic_Ti.config}
{config/thermal.config}
(output) f
(output) p
(output) fe
(output) fi
(output) fp
#.................
[isotropic inclusion]
@ -52,6 +51,12 @@ plasticity none
{config/thermal.config}
{config/thermalExpansion_isotropic.config}
(output) f
(output) p
(output) fe
(output) fi
(output) fp
#.................
[anisotropic inclusion]
@ -61,6 +66,12 @@ plasticity none
{config/thermal.config}
{config/thermalExpansion_fullyAnisotropic.config}
(output) f
(output) p
(output) fe
(output) fi
(output) fp
#.................
[Ti inclusion]
@ -71,32 +82,32 @@ plasticity none
{config/thermal.config}
{config/thermalExpansion_Ti.config}
(output) f
(output) p
(output) fe
(output) fi
(output) fp
#--------------------------#
<microstructure>
#--------------------------#
[isotropic matrix]
crystallite 1
(constituent) phase 1 texture 1 fraction 1.0
[Ti matrix]
crystallite 1
(constituent) phase 2 texture 1 fraction 1.0
[isotropic inclusion]
crystallite 1
(constituent) phase 3 texture 1 fraction 1.0
[anisotropic inclusion]
crystallite 1
(constituent) phase 4 texture 1 fraction 1.0
[rotated inclusion]
crystallite 1
(constituent) phase 4 texture 2 fraction 1.0
[Ti inclusion]
crystallite 1
(constituent) phase 5 texture 1 fraction 1.0
#--------------------------#
@ -104,8 +115,8 @@ crystallite 1
#--------------------------#
[cube]
(gauss) phi1 0.0 Phi 0.0 phi2 0.0 scatter 0.0 fraction 1.0
(gauss) phi1 0.0 Phi 0.0 phi2 0.0
[rotated]
(gauss) phi1 0.0 Phi 45.0 phi2 0.0 scatter 0.0 fraction 1.0
(gauss) phi1 0.0 Phi 45.0 phi2 0.0

View File

@ -1,54 +0,0 @@
#!/usr/bin/env bash
for geom in $(ls geom/*.geom)
do
base=${geom%.geom}
base=${base#geom/}
name=${base}_thermal
vtr=${base}.vtr
[[ -f ${name}.spectralOut ]] || \
DAMASK_spectral \
--workingdir ./ \
--load thermal.load \
--geom $geom \
> ${name}.out
if [ ! -f postProc/${name}_inc10.txt ]
then
postResults ${name}.spectralOut \
--ho temperature \
--cr f,fe,fi,fp,p \
--split \
--separation x,y,z \
addCauchy postProc/${name}_inc*.txt \
addDeviator postProc/${name}_inc*.txt \
--spherical \
--tensor p,Cauchy \
addDisplacement postProc/${name}_inc*.txt \
--nodal \
fi
geom_check ${geom}
for inc in {00..10}
do
echo "generating postProc/${name}_inc${inc}.vtr"
cp geom/${vtr} postProc/${name}_inc${inc}.vtr
vtk_addRectilinearGridData \
postProc/${name}_inc${inc}.txt \
--vtk postProc/${name}_inc${inc}.vtr \
--data 'sph(p)','sph(Cauchy)',temperature \
--tensor f,fe,fi,fp,p,Cauchy \
vtk_addRectilinearGridData \
postProc/${name}_inc${inc}_nodal.txt \
--vtk postProc/${name}_inc${inc}.vtr \
--data 'avg(f).pos','fluct(f).pos' \
done
done

View File

@ -51,64 +51,44 @@ atol_resistance 1
<microstructure>
#-------------------#
[Grain01]
crystallite 1
(constituent) phase 1 texture 01 fraction 1.0
[Grain02]
crystallite 1
(constituent) phase 1 texture 02 fraction 1.0
[Grain03]
crystallite 1
(constituent) phase 1 texture 03 fraction 1.0
[Grain04]
crystallite 1
(constituent) phase 1 texture 04 fraction 1.0
[Grain05]
crystallite 1
(constituent) phase 1 texture 05 fraction 1.0
[Grain06]
crystallite 1
(constituent) phase 1 texture 06 fraction 1.0
[Grain07]
crystallite 1
(constituent) phase 1 texture 07 fraction 1.0
[Grain08]
crystallite 1
(constituent) phase 1 texture 08 fraction 1.0
[Grain09]
crystallite 1
(constituent) phase 1 texture 09 fraction 1.0
[Grain10]
crystallite 1
(constituent) phase 1 texture 10 fraction 1.0
[Grain11]
crystallite 1
(constituent) phase 1 texture 11 fraction 1.0
[Grain12]
crystallite 1
(constituent) phase 1 texture 12 fraction 1.0
[Grain13]
crystallite 1
(constituent) phase 1 texture 13 fraction 1.0
[Grain14]
crystallite 1
(constituent) phase 1 texture 14 fraction 1.0
[Grain15]
crystallite 1
(constituent) phase 1 texture 15 fraction 1.0
[Grain16]
crystallite 1
(constituent) phase 1 texture 16 fraction 1.0
[Grain17]
crystallite 1
(constituent) phase 1 texture 17 fraction 1.0
[Grain18]
crystallite 1
(constituent) phase 1 texture 18 fraction 1.0
[Grain19]
crystallite 1
(constituent) phase 1 texture 19 fraction 1.0
[Grain20]
crystallite 1
(constituent) phase 1 texture 20 fraction 1.0
@ -116,42 +96,42 @@ crystallite 1
<texture>
#-------------------#
[Grain01]
(gauss) phi1 0.0 Phi 0.0 phi2 0.0 scatter 0.0 fraction 1.0
(gauss) phi1 0.0 Phi 0.0 phi2 0.0
[Grain02]
(gauss) phi1 257.468172 Phi 53.250534 phi2 157.331503 scatter 0.0 fraction 1.0
(gauss) phi1 257.468172 Phi 53.250534 phi2 157.331503
[Grain03]
(gauss) phi1 216.994815 Phi 94.418518 phi2 251.147231 scatter 0.0 fraction 1.0
(gauss) phi1 216.994815 Phi 94.418518 phi2 251.147231
[Grain04]
(gauss) phi1 196.157946 Phi 55.870978 phi2 21.68117 scatter 0.0 fraction 1.0
(gauss) phi1 196.157946 Phi 55.870978 phi2 21.68117
[Grain05]
(gauss) phi1 152.515728 Phi 139.769395 phi2 240.036018 scatter 0.0 fraction 1.0
(gauss) phi1 152.515728 Phi 139.769395 phi2 240.036018
[Grain06]
(gauss) phi1 232.521881 Phi 73.749222 phi2 241.429633 scatter 0.0 fraction 1.0
(gauss) phi1 232.521881 Phi 73.749222 phi2 241.429633
[Grain07]
(gauss) phi1 157.531396 Phi 135.503513 phi2 75.737722 scatter 0.0 fraction 1.0
(gauss) phi1 157.531396 Phi 135.503513 phi2 75.737722
[Grain08]
(gauss) phi1 321.03828 Phi 27.209843 phi2 46.413467 scatter 0.0 fraction 1.0
(gauss) phi1 321.03828 Phi 27.209843 phi2 46.413467
[Grain09]
(gauss) phi1 346.918594 Phi 87.495569 phi2 113.554206 scatter 0.0 fraction 1.0
(gauss) phi1 346.918594 Phi 87.495569 phi2 113.554206
[Grain10]
(gauss) phi1 138.038947 Phi 99.827132 phi2 130.935878 scatter 0.0 fraction 1.0
(gauss) phi1 138.038947 Phi 99.827132 phi2 130.935878
[Grain11]
(gauss) phi1 285.021014 Phi 118.092004 phi2 205.270837 scatter 0.0 fraction 1.0
(gauss) phi1 285.021014 Phi 118.092004 phi2 205.270837
[Grain12]
(gauss) phi1 190.402171 Phi 56.738068 phi2 157.896545 scatter 0.0 fraction 1.0
(gauss) phi1 190.402171 Phi 56.738068 phi2 157.896545
[Grain13]
(gauss) phi1 204.496042 Phi 95.031265 phi2 355.814582 scatter 0.0 fraction 1.0
(gauss) phi1 204.496042 Phi 95.031265 phi2 355.814582
[Grain14]
(gauss) phi1 333.21479 Phi 82.133355 phi2 36.736132 scatter 0.0 fraction 1.0
(gauss) phi1 333.21479 Phi 82.133355 phi2 36.736132
[Grain15]
(gauss) phi1 25.572981 Phi 164.242648 phi2 75.195632 scatter 0.0 fraction 1.0
(gauss) phi1 25.572981 Phi 164.242648 phi2 75.195632
[Grain16]
(gauss) phi1 31.366548 Phi 76.392403 phi2 58.071426 scatter 0.0 fraction 1.0
(gauss) phi1 31.366548 Phi 76.392403 phi2 58.071426
[Grain17]
(gauss) phi1 7.278623 Phi 77.044663 phi2 235.118997 scatter 0.0 fraction 1.0
(gauss) phi1 7.278623 Phi 77.044663 phi2 235.118997
[Grain18]
(gauss) phi1 299.743144 Phi 76.475096 phi2 91.184977 scatter 0.0 fraction 1.0
(gauss) phi1 299.743144 Phi 76.475096 phi2 91.184977
[Grain19]
(gauss) phi1 280.13643 Phi 27.439718 phi2 167.871878 scatter 0.0 fraction 1.0
(gauss) phi1 280.13643 Phi 27.439718 phi2 167.871878
[Grain20]
(gauss) phi1 313.204373 Phi 68.676053 phi2 87.993213 scatter 0.0 fraction 1.0
(gauss) phi1 313.204373 Phi 68.676053 phi2 87.993213

View File

@ -99,14 +99,9 @@ else
fi
# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
if test "$DAMASK_HDF5" = "ON";then
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
echo $FCOMP
else
FCOMP=ifort
fi
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
# AEM
if test "$MARCDLLOUTDIR" = ""; then

View File

@ -99,14 +99,9 @@ else
fi
# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
if test "$DAMASK_HDF5" = "ON";then
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
echo $FCOMP
else
FCOMP=ifort
fi
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
# AEM
if test "$MARCDLLOUTDIR" = ""; then

View File

@ -100,11 +100,9 @@ else
fi
# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
if test "$DAMASK_HDF5" = "ON";then
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
fi
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
# AEM
if test "$MARCDLLOUTDIR" = ""; then

View File

@ -9,14 +9,6 @@ cd DAMASK_ROOT
patch -p1 < installation/patch/nameOfPatch
```
## Available patches
* **disable_HDF5** disables all HDF5 output.
HDF5 output is an experimental feature. Also, some routines not present in HDF5 1.8.x are removed to allow compilation of DAMASK with HDF5 < 1.10.x
* **disable_old_output** disables all non-HDF5 output.
Saves some memory when using only HDF5 output
## Create patch
commit your changes

View File

@ -1,57 +0,0 @@
diff --git a/src/DAMASK_grid.f90 b/src/DAMASK_grid.f90
index 496bfd0d..7b0f499c 100644
--- a/src/DAMASK_grid.f90
+++ b/src/DAMASK_grid.f90
@@ -75,7 +75,6 @@ program DAMASK_spectral
use grid_mech_spectral_polarisation
use grid_damage_spectral
use grid_thermal_spectral
- use results
implicit none
@@ -153,8 +152,6 @@ program DAMASK_spectral
write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, 2019'
write(6,'(a)') ' https://doi.org/10.1007/978-981-10-6855-3_80'
- call results_openJobFile()
- call results_closeJobFile()
!--------------------------------------------------------------------------------------------------
! initialize field solver information
nActiveFields = 1
@@ -595,7 +592,6 @@ program DAMASK_spectral
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
- call CPFEM_results(totalIncsCounter,time)
endif
if ( loadCases(currentLoadCase)%restartFrequency > 0_pInt & ! writing of restart info requested ...
.and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! ... and at frequency of writing restart information
diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90
index a81aaee0..3d3cdee3 100644
--- a/src/HDF5_utilities.f90
+++ b/src/HDF5_utilities.f90
@@ -197,7 +197,6 @@ integer(HID_T) function HDF5_addGroup(fileHandle,groupName)
!-------------------------------------------------------------------------------------------------
! setting I/O mode to collective
#ifdef PETSc
- call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
#endif
@@ -232,7 +231,6 @@ integer(HID_T) function HDF5_openGroup(fileHandle,groupName)
!-------------------------------------------------------------------------------------------------
! setting I/O mode to collective
#ifdef PETSc
- call h5pget_all_coll_metadata_ops_f(aplist_id, is_collective, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
#endif
@@ -1646,7 +1644,6 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pcreate_f')
#ifdef PETSc
- call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pset_all_coll_metadata_ops_f')
#endif

View File

@ -1,178 +0,0 @@
From 6dbd904a4cfc28add3c39bb2a4ec9e2dbb2442b6 Mon Sep 17 00:00:00 2001
From: Martin Diehl <m.diehl@mpie.de>
Date: Thu, 18 Apr 2019 18:25:32 +0200
Subject: [PATCH] to create patch
---
src/DAMASK_grid.f90 | 81 +-----------------------------------------
src/homogenization.f90 | 2 ++
2 files changed, 3 insertions(+), 80 deletions(-)
diff --git a/src/DAMASK_grid.f90 b/src/DAMASK_grid.f90
index f2f52bb2..a7543f4d 100644
--- a/src/DAMASK_grid.f90
+++ b/src/DAMASK_grid.f90
@@ -18,7 +18,6 @@ program DAMASK_spectral
use DAMASK_interface, only: &
DAMASK_interface_init, &
loadCaseFile, &
- geometryFile, &
getSolverJobName, &
interface_restartInc
use IO, only: &
@@ -49,14 +48,9 @@ program DAMASK_spectral
restartInc
use numerics, only: &
worldrank, &
- worldsize, &
stagItMax, &
maxCutBack, &
continueCalculation
- use homogenization, only: &
- materialpoint_sizeResults, &
- materialpoint_results, &
- materialpoint_postResults
use material, only: &
thermal_type, &
damage_type, &
@@ -131,12 +125,6 @@ program DAMASK_spectral
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
type(tLoadCase) :: newLoadCase
type(tSolutionState), allocatable, dimension(:) :: solres
- integer(MPI_OFFSET_KIND) :: fileOffset
- integer(MPI_OFFSET_KIND), dimension(:), allocatable :: outputSize
- integer(pInt), parameter :: maxByteOut = 2147483647-4096 !< limit of one file output write https://trac.mpich.org/projects/mpich/ticket/1742
- integer(pInt), parameter :: maxRealOut = maxByteOut/pReal
- integer(pLongInt), dimension(2) :: outputIndex
- PetscErrorCode :: ierr
procedure(grid_mech_spectral_basic_init), pointer :: &
mech_init
procedure(grid_mech_spectral_basic_forward), pointer :: &
@@ -384,22 +372,6 @@ program DAMASK_spectral
! write header of output file
if (worldrank == 0) then
writeHeader: if (interface_restartInc < 1_pInt) then
- open(newunit=fileUnit,file=trim(getSolverJobName())//&
- '.spectralOut',form='UNFORMATTED',status='REPLACE')
- write(fileUnit) 'load:', trim(loadCaseFile) ! ... and write header
- write(fileUnit) 'workingdir:', 'n/a'
- write(fileUnit) 'geometry:', trim(geometryFile)
- write(fileUnit) 'grid:', grid
- write(fileUnit) 'size:', geomSize
- write(fileUnit) 'materialpoint_sizeResults:', materialpoint_sizeResults
- write(fileUnit) 'loadcases:', size(loadCases)
- write(fileUnit) 'frequencies:', loadCases%outputfrequency ! one entry per LoadCase
- write(fileUnit) 'times:', loadCases%time ! one entry per LoadCase
- write(fileUnit) 'logscales:', loadCases%logscale
- write(fileUnit) 'increments:', loadCases%incs ! one entry per LoadCase
- write(fileUnit) 'startingIncrement:', restartInc ! start with writing out the previous inc
- write(fileUnit) 'eoh'
- close(fileUnit) ! end of header
open(newunit=statUnit,file=trim(getSolverJobName())//&
'.sta',form='FORMATTED',status='REPLACE')
write(statUnit,'(a)') 'Increment Time CutbackLevel Converged IterationsNeeded' ! statistics file
@@ -412,39 +384,6 @@ program DAMASK_spectral
endif writeHeader
endif
-!--------------------------------------------------------------------------------------------------
-! prepare MPI parallel out (including opening of file)
- allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
- outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
- call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce')
- call MPI_file_open(PETSC_COMM_WORLD, trim(getSolverJobName())//'.spectralOut', &
- MPI_MODE_WRONLY + MPI_MODE_APPEND, &
- MPI_INFO_NULL, &
- fileUnit, &
- ierr)
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open')
- call MPI_file_get_position(fileUnit,fileOffset,ierr) ! get offset from header
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position')
- fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
- call MPI_file_seek (fileUnit,fileOffset,MPI_SEEK_SET,ierr)
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek')
-
- writeUndeformed: if (interface_restartInc < 1_pInt) then
- write(6,'(1/,a)') ' ... writing initial configuration to file ........................'
- call CPFEM_results(0_pInt,0.0_pReal)
- do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
- outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & ! QUESTION: why not starting i at 0 instead of murky 1?
- min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
- call MPI_file_write(fileUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
- [(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)]), &
- int((outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)), &
- MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write')
- enddo
- fileOffset = fileOffset + sum(outputSize) ! forward to current file position
- endif writeUndeformed
-
loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases)
time0 = time ! load case start time
@@ -574,7 +513,6 @@ program DAMASK_spectral
write(6,'(/,a)') ' cutting back '
else ! no more options to continue
call IO_warning(850_pInt)
- call MPI_file_close(fileUnit,ierr)
close(statUnit)
call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written
endif
@@ -593,24 +531,8 @@ program DAMASK_spectral
' increment ', totalIncsCounter, ' NOT converged'
endif; flush(6)
- if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0_pInt) then ! at output frequency
- write(6,'(1/,a)') ' ... writing results to file ......................................'
- flush(6)
- call materialpoint_postResults()
- call MPI_file_seek (fileUnit,fileOffset,MPI_SEEK_SET,ierr)
- if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
- do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
- outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
- min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
- call MPI_file_write(fileUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
- [(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)]), &
- int((outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)),&
- MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
- if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
- enddo
- fileOffset = fileOffset + sum(outputSize) ! forward to current file position
+ if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0_pInt) & ! at output frequency
call CPFEM_results(totalIncsCounter,time)
- endif
if ( loadCases(currentLoadCase)%restartFrequency > 0_pInt & ! writing of restart info requested ...
.and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! ... and at frequency of writing restart information
restartWrite = .true. ! set restart parameter for FEsolving
@@ -633,7 +555,6 @@ program DAMASK_spectral
real(convergedCounter, pReal)/&
real(notConvergedCounter + convergedCounter,pReal)*100.0_pReal, ' %) increments converged!'
flush(6)
- call MPI_file_close(fileUnit,ierr)
close(statUnit)
if (notConvergedCounter > 0_pInt) call quit(2_pInt) ! error if some are not converged
diff --git a/src/homogenization.f90 b/src/homogenization.f90
index 06da6ab2..0743d545 100644
--- a/src/homogenization.f90
+++ b/src/homogenization.f90
@@ -269,6 +269,7 @@ subroutine homogenization_init
+ homogenization_maxNgrains * (1 + crystallite_maxSizePostResults & ! crystallite size & crystallite results
+ 1 + constitutive_plasticity_maxSizePostResults & ! constitutive size & constitutive results
+ constitutive_source_maxSizePostResults)
+ materialpoint_sizeResults = 0
allocate(materialpoint_results(materialpoint_sizeResults,theMesh%elem%nIPs,theMesh%nElems))
write(6,'(/,a)') ' <<<+- homogenization init -+>>>'
@@ -682,6 +683,7 @@ subroutine materialpoint_postResults
i, & !< integration point number
e !< element number
+ return
!$OMP PARALLEL DO PRIVATE(myNgrains,myCrystallite,thePos,theSize)
elementLooping: do e = FEsolving_execElem(1),FEsolving_execElem(2)
myNgrains = homogenization_Ngrains(mesh_element(3,e))
--
2.21.0

View File

@ -1,8 +0,0 @@
#! /usr/bin/env bash
if [ $1x != 3to2x ]; then
echo 'python2.7 to python3'
find . -name '*.py' -type f | xargs sed -i 's/usr\/bin\/env python2.7/usr\/bin\/env python3/g'
else
echo 'python3 to python2.7'
find . -name '*.py' -type f | xargs sed -i 's/usr\/bin\/env python3/usr\/bin\/env python2.7/g'
fi

View File

@ -1,8 +1,10 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
@ -19,47 +21,10 @@ Convert TSL/EDAX *.ang file to ASCIItable
""", version = scriptID)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.txt' if name else name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# --- interpret header -----------------------------------------------------------------------------
table.head_read()
# --- read comments --------------------------------------------------------------------------------
table.info_clear()
while table.data_read(advance = False) and table.line.startswith('#'): # cautiously (non-progressing) read header
table.info_append(table.line) # add comment to info part
table.data_read() # wind forward
table.labels_clear()
table.labels_append(['1_Euler','2_Euler','3_Euler',
'1_pos','2_pos',
'IQ','CI','PhaseID','Intensity','Fit',
], # OIM Analysis 7.2 Manual, p 403 (of 517)
reset = True)
# ------------------------------------------ assemble header ---------------------------------------
table.head_write()
#--- write remainder of data file ------------------------------------------------------------------
outputAlive = True
while outputAlive and table.data_read():
outputAlive = table.data_write()
# ------------------------------------------ finalize output ---------------------------------------
table.close()
table = damask.Table.from_ang(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.txt')

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -39,61 +39,36 @@ for filename in options.filenames:
results = damask.DADF5(filename)
if not results.structured: continue
delta = results.size/results.grid*0.5
x, y, z = np.meshgrid(np.linspace(delta[2],results.size[2]-delta[2],results.grid[2]),
np.linspace(delta[1],results.size[1]-delta[1],results.grid[1]),
np.linspace(delta[0],results.size[0]-delta[0],results.grid[0]),
indexing = 'ij')
coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
if results.version_major == 0 and results.version_minor >= 5:
coords = damask.grid_filters.cell_coord0(results.grid,results.size,results.origin)
else:
coords = damask.grid_filters.cell_coord0(results.grid,results.size)
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
N_digits = 5 # hack to keep test intact
for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments)))
header = '1 header\n'
data = np.array([int(inc[3:]) for j in range(np.product(results.grid))]).reshape([np.product(results.grid),1])
header+= 'inc'
coords = coords.reshape([np.product(results.grid),3])
data = np.concatenate((data,coords),1)
header+=' 1_pos 2_pos 3_pos'
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})
table.add('pos',coords.reshape((-1,3)))
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0,plain=True)
d = np.product(np.shape(array)[1:])
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
if d>1:
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
else:
header+=' '+label
if len(x) != 0:
table.add(label,results.read_dataset(x,0,plain=True).reshape((results.grid.prod(),-1)))
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0,plain=True)
d = np.product(np.shape(array)[1:])
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
if d>1:
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
else:
header+=' '+label
if len(x) != 0:
table.add(label,results.read_dataset(x,0,plain=True).reshape((results.grid.prod(),-1)))
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0o755)
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits))
np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='')
table.to_ASCII(os.path.join(dirname,file_out))

View File

@ -1,144 +0,0 @@
#!/usr/bin/env python3
import os
import argparse
import h5py
import numpy as np
import vtk
from vtk.util import numpy_support
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = argparse.ArgumentParser()
#ToDo: We need to decide on a way of handling arguments of variable lentght
#https://stackoverflow.com/questions/15459997/passing-integer-lists-to-python
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
parser.add_argument('filenames', nargs='+',
help='DADF5 files')
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory relative to the location of the DADF5 file to hold output')
parser.add_argument('--mat', nargs='+',
help='labels for materialpoint',dest='mat')
parser.add_argument('--con', nargs='+',
help='labels for constituent',dest='con')
options = parser.parse_args()
if options.mat is None: options.mat=[]
if options.con is None: options.con=[]
# --- loop over input files ------------------------------------------------------------------------
for filename in options.filenames:
results = damask.DADF5(filename)
if results.structured: # for grid solvers use rectilinear grid
grid = vtk.vtkRectilinearGrid()
coordArray = [vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(),
]
grid.SetDimensions(*(results.grid+1))
for dim in [0,1,2]:
for c in np.linspace(0,results.size[dim],1+results.grid[dim]):
coordArray[dim].InsertNextValue(c)
grid.SetXCoordinates(coordArray[0])
grid.SetYCoordinates(coordArray[1])
grid.SetZCoordinates(coordArray[2])
else:
nodes = vtk.vtkPoints()
with h5py.File(filename) as f:
nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True))
grid = vtk.vtkUnstructuredGrid()
grid.SetPoints(nodes)
grid.Allocate(f['/geometry/T_c'].shape[0])
for i in f['/geometry/T_c']:
grid.InsertNextCell(vtk.VTK_HEXAHEDRON,8,i-1)
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments)))
vtk_data = []
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con:
for p in results.iter_visible('con_physics'):
if p != 'generic':
for c in results.iter_visible('constituents'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1])
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat:
for p in results.iter_visible('mat_physics'):
if p != 'generic':
for m in results.iter_visible('materialpoints'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1])
writer = vtk.vtkXMLRectilinearGridWriter() if results.structured else \
vtk.vtkXMLUnstructuredGridWriter()
results.set_visible('constituents', False)
results.set_visible('materialpoints',False)
x = results.get_dataset_location('u_n')
vtk_data.append(numpy_support.numpy_to_vtk(num_array=results.read_dataset(x,0),deep=True,array_type=vtk.VTK_DOUBLE))
vtk_data[-1].SetName('u')
grid.GetPointData().AddArray(vtk_data[-1])
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0o755)
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(dirname,file_out))
writer.SetInputData(grid)
writer.Write()

View File

@ -1,124 +0,0 @@
#!/usr/bin/env python3
import os
import argparse
import numpy as np
import vtk
from vtk.util import numpy_support
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = argparse.ArgumentParser()
#ToDo: We need to decide on a way of handling arguments of variable lentght
#https://stackoverflow.com/questions/15459997/passing-integer-lists-to-python
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
parser.add_argument('filenames', nargs='+',
help='DADF5 files')
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory relative to the location of the DADF5 file to hold output')
parser.add_argument('--mat', nargs='+',
help='labels for materialpoint',dest='mat')
parser.add_argument('--con', nargs='+',
help='labels for constituent',dest='con')
options = parser.parse_args()
if options.mat is None: options.mat=[]
if options.con is None: options.con=[]
# --- loop over input files ------------------------------------------------------------------------
for filename in options.filenames:
results = damask.DADF5(filename)
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for c in results.cell_coordinates():
pointID = Points.InsertNextPoint(c)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
Polydata = vtk.vtkPolyData()
Polydata.SetPoints(Points)
Polydata.SetVerts(Vertices)
Polydata.Modified()
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments)))
vtk_data = []
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con:
for p in results.iter_visible('con_physics'):
if p != 'generic':
for c in results.iter_visible('constituents'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat:
for p in results.iter_visible('mat_physics'):
if p != 'generic':
for m in results.iter_visible('materialpoints'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
writer = vtk.vtkXMLPolyDataWriter()
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0o755)
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(dirname,file_out))
writer.SetInputData(Polydata)
writer.Write()

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -24,61 +25,33 @@ Transform X,Y,Z,F APS BeamLine 34 coordinates to x,y,z APS strain coordinates.
parser.add_option('-f','--frame',dest='frame', metavar='string',
help='label of APS X,Y,Z coords')
parser.add_option('--depth', dest='depth', metavar='string',
parser.add_option('--depth', dest='depth', metavar='string',
help='depth')
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.frame is None:
parser.error('frame not specified')
if options.depth is None:
parser.error('depth not specified')
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
theta=-0.75*np.pi
RotMat2TSL=np.array([[1., 0., 0.],
[0., np.cos(theta), np.sin(theta)], # Orientation to account for -135 deg
[0., -np.sin(theta), np.cos(theta)]]) # rotation for TSL convention
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
coord = - table.get(options.frame)
coord[:,2] += table.get(options.depth)[:,0]
table.head_read()
table.add('coord',
np.einsum('ijk,ik->ij',np.broadcast_to(RotMat2TSL,(coord.shape[0],3,3)),coord),
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ sanity checks -----------------------------------------
errors = []
if table.label_dimension(options.frame) != 3:
errors.append('input {} does not have dimension 3.'.format(options.frame))
if table.label_dimension(options.depth) != 1:
errors.append('input {} does not have dimension 1.'.format(options.depth))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
# ------------------------------------------ assemble header ---------------------------------------
table.labels_append(['%i_coord'%(i+1) for i in range(3)]) # extend ASCII header with new labels
table.head_write()
# ------------------------------------------ process data ------------------------------------------
theta=-0.75*np.pi
RotMat2TSL=np.array([[1., 0., 0.],
[0., np.cos(theta), np.sin(theta)], # Orientation to account for -135 deg
[0., -np.sin(theta), np.cos(theta)]]) # rotation for TSL convention
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
coord = list(map(float,table.data[table.label_index(options.frame):table.label_index(options.frame)+3]))
depth = float(table.data[table.label_index(options.depth)])
table.data_append(np.dot(RotMat2TSL,np.array([-coord[0],-coord[1],-coord[2]+depth])))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -4,7 +4,7 @@ import os
import sys
from optparse import OptionParser
import re
import collections
from collections.abc import Iterable
import math # noqa
import scipy # noqa
@ -18,7 +18,7 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def listify(x):
return x if isinstance(x, collections.Iterable) else [x]
return x if isinstance(x, Iterable) else [x]
# --------------------------------------------------------------------
@ -65,9 +65,10 @@ for i in range(len(options.formulas)):
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
try:
table = damask.ASCIItable(name = name, buffered = False)
except IOError:
continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header -------------------------------------------

View File

@ -1,11 +1,11 @@
#!/usr/bin/env python3
import os
import math
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import scipy.ndimage
import damask
@ -13,78 +13,6 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
def cell2node(cellData,grid):
nodeData = 0.0
datalen = np.array(cellData.shape[3:]).prod()
for i in range(datalen):
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i],
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
mode = 'wrap',
origin = -1, # offset to have cell origin as center
) # now averaged at cell origins
node = np.append(node,node[np.newaxis,0,:,:,...],axis=0) # wrap along z
node = np.append(node,node[:,0,np.newaxis,:,...],axis=1) # wrap along y
node = np.append(node,node[:,:,0,np.newaxis,...],axis=2) # wrap along x
nodeData = node[...,np.newaxis] if i==0 else np.concatenate((nodeData,node[...,np.newaxis]),axis=-1)
return nodeData
#--------------------------------------------------------------------------------------------------
def deformationAvgFFT(F,grid,size,nodal=False,transformed=False):
"""Calculate average cell center (or nodal) deformation for deformation gradient field specified in each grid cell"""
if nodal:
x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]),
np.linspace(0,size[1],1+grid[1]),
np.linspace(0,size[0],1+grid[0]),
indexing = 'ij')
else:
x, y, z = np.meshgrid(np.linspace(size[2]/grid[2]/2.,size[2]-size[2]/grid[2]/2.,grid[2]),
np.linspace(size[1]/grid[1]/2.,size[1]-size[1]/grid[1]/2.,grid[1]),
np.linspace(size[0]/grid[0]/2.,size[0]-size[0]/grid[0]/2.,grid[0]),
indexing = 'ij')
origCoords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
F_fourier = F if transformed else np.fft.rfftn(F,axes=(0,1,2)) # transform or use provided data
Favg = np.real(F_fourier[0,0,0,:,:])/grid.prod() # take zero freq for average
avgDeformation = np.einsum('ml,ijkl->ijkm',Favg,origCoords) # dX = Favg.X
return avgDeformation
#--------------------------------------------------------------------------------------------------
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
"""Calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
integrator = 0.5j * size / math.pi
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1])),
np.arange(grid[0]//2+1),
indexing = 'ij')
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3)
k_sSquared = np.einsum('...l,...l',k_s,k_s)
k_sSquared[0,0,0] = 1.0 # ignore global average frequency
#--------------------------------------------------------------------------------------------------
# integration in Fourier space
displacement_fourier = -np.einsum('ijkml,ijkl,l->ijkm',
F if transformed else np.fft.rfftn(F,axes=(0,1,2)),
k_s,
integrator,
) / k_sSquared[...,np.newaxis]
#--------------------------------------------------------------------------------------------------
# backtransformation to real space
displacement = np.fft.irfftn(displacement_fourier,grid[::-1],axes=(0,1,2))
return cell2node(displacement,grid) if nodal else displacement
def volTetrahedron(coords):
"""
Return the volume of the tetrahedron with given vertices or sides.
@ -133,10 +61,10 @@ def volTetrahedron(coords):
def volumeMismatch(size,F,nodes):
"""
Calculates the volume mismatch
Calculates the volume mismatch.
volume mismatch is defined as the difference between volume of reconstructed
(compatible) cube and determinant of defgrad at the FP
(compatible) cube and determinant of deformation gradient at Fourier point.
"""
coords = np.empty([8,3])
vMismatch = np.empty(grid[::-1])
@ -169,11 +97,11 @@ def volumeMismatch(size,F,nodes):
def shapeMismatch(size,F,nodes,centres):
"""
Routine to calculate the shape mismatch
Routine to calculate the shape mismatch.
shape mismatch is defined as difference between the vectors from the central point to
the corners of reconstructed (combatible) volume element and the vectors calculated by deforming
the initial volume element with the current deformation gradient
the initial volume element with the current deformation gradient.
"""
coordsInitial = np.empty([8,3])
sMismatch = np.empty(grid[::-1])
@ -241,92 +169,29 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.defgrad) != 9:
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3:
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray([options.defgrad,options.pos])
table.data_rewind()
if table.data[:,9:].shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
grid,size = damask.util.coordGridAndSize(table.data[:,9:12])
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
F = table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3)
nodes = damask.grid_filters.node_coord(size,F)
# -----------------------------process data and assemble header -------------------------------------
F_fourier = np.fft.rfftn(table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),axes=(0,1,2)) # perform transform only once...
nodes = displacementFluctFFT(F_fourier,grid,size,True,transformed=True)\
+ deformationAvgFFT (F_fourier,grid,size,True,transformed=True)
if options.shape:
table.labels_append(['shapeMismatch({})'.format(options.defgrad)])
centres = displacementFluctFFT(F_fourier,grid,size,False,transformed=True)\
+ deformationAvgFFT (F_fourier,grid,size,False,transformed=True)
centers = damask.grid_filters.cell_coord(size,F)
shapeMismatch = shapeMismatch( size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes,centers)
table.add('shapeMismatch(({}))'.format(options.defgrad),
shapeMismatch.reshape((-1,1)),
scriptID+' '+' '.join(sys.argv[1:]))
if options.volume:
table.labels_append(['volMismatch({})'.format(options.defgrad)])
volumeMismatch = volumeMismatch(size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes)
table.add('volMismatch(({}))'.format(options.defgrad),
volumeMismatch.reshape((-1,1)),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_write()
if options.shape:
shapeMismatch = shapeMismatch( size,table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),nodes,centres)
if options.volume:
volumeMismatch = volumeMismatch(size,table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),nodes)
# ------------------------------------------ output data -------------------------------------------
for i in range(grid[2]):
for j in range(grid[1]):
for k in range(grid[0]):
table.data_read()
if options.shape: table.data_append(shapeMismatch[i,j,k])
if options.volume: table.data_append(volumeMismatch[i,j,k])
table.data_write()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -22,79 +23,26 @@ Add cumulative (sum of first to current row) values for given label(s).
""", version = scriptID)
parser.add_option('-l','--label',
dest='label',
dest='labels',
action = 'extend', metavar = '<string LIST>',
help = 'columns to cumulate')
parser.add_option('-p','--product',
dest='product', action = 'store_true',
help = 'product of values instead of sum')
(options,filenames) = parser.parse_args()
if options.label is None:
parser.error('no data column(s) specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None:
parser.error('no data column(s) specified.')
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except IOError: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
table.add('cum_{}({})'.format('prod' if options.product else 'sum',label),
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
how = 'prod' if options.product else 'sum'
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('cum_{}({})'.format(how,what) if dim == 1 else
['{}_cum_{}({})'.format(i+1,how,what) for i in range(dim)] ) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
mask = []
for col,dim in zip(columns,dims): mask += range(col,col+dim) # isolate data columns to cumulate
cumulated = np.ones(len(mask)) if options.product else np.zeros(len(mask)) # prepare output field
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
if options.product:
for i,col in enumerate(mask):
cumulated[i] *= float(table.data[col]) # cumulate values (multiplication)
else:
for i,col in enumerate(mask):
cumulated[i] += float(table.data[col]) # cumulate values (addition)
table.data_append(cumulated)
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -12,48 +13,6 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def merge_dicts(*dict_args):
"""Given any number of dicts, shallow copy and merge into a new dict, with precedence going to key value pairs in latter dicts."""
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def curlFFT(geomdim,field):
"""Calculate curl of a vector or tensor field by transforming into Fourier space."""
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
field_fourier = np.fft.rfftn(field,axes=(0,1,2),s=shapeFFT)
curl_fourier = np.empty(field_fourier.shape,'c16')
# differentiation in Fourier space
TWOPIIMG = 2.0j*np.pi
einsums = {
3:'slm,ijkl,ijkm->ijks', # vector, 3 -> 3
9:'slm,ijkl,ijknm->ijksn', # tensor, 3x3 -> 3x3
}
k_sk = np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2]))/geomdim[0]
if grid[2]%2 == 0: k_sk[grid[2]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/geomdim[1]
if grid[1]%2 == 0: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_si = np.arange(grid[0]//2+1)/geomdim[2]
kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij')
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3).astype('c16')
e = np.zeros((3, 3, 3))
e[0, 1, 2] = e[1, 2, 0] = e[2, 0, 1] = 1.0 # Levi-Civita symbols
e[0, 2, 1] = e[2, 1, 0] = e[1, 0, 2] = -1.0
curl_fourier = np.einsum(einsums[n],e,k_s,field_fourier)*TWOPIIMG
return np.fft.irfftn(curl_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n])
# --------------------------------------------------------------------
# MAIN
@ -61,8 +20,7 @@ def curlFFT(geomdim,field):
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing curl of requested column(s).
Operates on periodic ordered three-dimensional data sets
of vector and tensor fields.
Operates on periodic ordered three-dimensional data sets of vector and tensor fields.
""", version = scriptID)
parser.add_option('-p','--pos','--periodiccellcenter',
@ -70,93 +28,30 @@ parser.add_option('-p','--pos','--periodiccellcenter',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-l','--label',
dest = 'data',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'label(s) of field values')
parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if options.data is None: parser.error('no data column specified.')
# --- define possible data types -------------------------------------------------------------------
datatypes = {
3: {'name': 'vector',
'shape': [3],
},
9: {'name': 'tensor',
'shape': [3,3],
},
}
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None: parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
table.head_read()
remarks = []
errors = []
active = []
coordDim = table.label_dimension(options.pos)
if coordDim != 3:
errors.append('coordinates "{}" must be three-dimensional.'.format(options.pos))
else: coordCol = table.label_index(options.pos)
for me in options.data:
dim = table.label_dimension(me)
if dim in datatypes:
active.append(merge_dicts({'label':me},datatypes[dim]))
remarks.append('differentiating {} "{}"...'.format(datatypes[dim]['name'],me))
else:
remarks.append('skipping "{}" of dimension {}...'.format(me,dim) if dim != -1 else \
'"{}" not found...'.format(me) )
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for data in active:
table.labels_append(['{}_curlFFT({})'.format(i+1,data['label'])
for i in range(np.prod(np.array(data['shape'])))]) # extend ASCII header with new labels
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
for data in active:
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
stack.append(curlFFT(size[::-1],
table.data[:,table.label_indexrange(data['label'])].
reshape(grid[::-1].tolist()+data['shape'])))
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
for label in options.labels:
field = table.get(label)
shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor
field = field.reshape(np.append(grid[::-1],shape))
table.add('curlFFT({})'.format(label),
damask.grid_filters.curl(size[::-1],field).reshape((-1,np.prod(shape))),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -30,7 +31,7 @@ def derivative(coordinates,what):
(coordinates[0] - coordinates[1])
result[-1,:] = (what[-1,:] - what[-2,:]) / \
(coordinates[-1] - coordinates[-2])
return result
@ -48,78 +49,26 @@ parser.add_option('-c','--coordinates',
type = 'string', metavar='string',
help = 'heading of coordinate column')
parser.add_option('-l','--label',
dest = 'label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'heading of column(s) to differentiate')
(options,filenames) = parser.parse_args()
if options.coordinates is None:
parser.error('no coordinate column specified.')
if options.label is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.coordinates is None:
parser.error('no coordinate column specified.')
if options.labels is None:
parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
table.add('d({})/d({})'.format(label,options.coordinates),
derivative(table.get(options.coordinates),table.get(label)),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
if table.label_dimension(options.coordinates) != 1:
errors.append('coordinate column {} is not scalar.'.format(options.coordinates))
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('d({})/d({})'.format(what,options.coordinates) if dim == 1 else
['{}_d({})/d({})'.format(i+1,what,options.coordinates) for i in range(dim)] ) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
table.data_readArray()
mask = []
for col,dim in zip(columns,dims): mask += range(col,col+dim) # isolate data columns to differentiate
differentiated = derivative(table.data[:,table.label_index(options.coordinates)].reshape((len(table.data),1)),
table.data[:,mask]) # calculate numerical derivative
table.data = np.hstack((table.data,differentiated))
# ------------------------------------------ output result -----------------------------------------
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,10 +2,10 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import scipy.ndimage
import damask
@ -14,79 +14,6 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
def cell2node(cellData,grid):
nodeData = 0.0
datalen = np.array(cellData.shape[3:]).prod()
for i in range(datalen):
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i],
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
mode = 'wrap',
origin = -1, # offset to have cell origin as center
) # now averaged at cell origins
node = np.append(node,node[np.newaxis,0,:,:,...],axis=0) # wrap along z
node = np.append(node,node[:,0,np.newaxis,:,...],axis=1) # wrap along y
node = np.append(node,node[:,:,0,np.newaxis,...],axis=2) # wrap along x
nodeData = node[...,np.newaxis] if i==0 else np.concatenate((nodeData,node[...,np.newaxis]),axis=-1)
return nodeData
#--------------------------------------------------------------------------------------------------
def displacementAvgFFT(F,grid,size,nodal=False,transformed=False):
"""Calculate average cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
if nodal:
x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]),
np.linspace(0,size[1],1+grid[1]),
np.linspace(0,size[0],1+grid[0]),
indexing = 'ij')
else:
delta = size/grid*0.5
x, y, z = np.meshgrid(np.linspace(delta[2],size[2]-delta[2],grid[2]),
np.linspace(delta[1],size[1]-delta[1],grid[1]),
np.linspace(delta[0],size[0]-delta[0],grid[0]),
indexing = 'ij')
origCoords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
F_fourier = F if transformed else np.fft.rfftn(F,axes=(0,1,2)) # transform or use provided data
Favg = np.real(F_fourier[0,0,0,:,:])/grid.prod() # take zero freq for average
avgDisplacement = np.einsum('ml,ijkl->ijkm',Favg-np.eye(3),origCoords) # dX = Favg.X
return avgDisplacement
#--------------------------------------------------------------------------------------------------
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
"""Calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
integrator = 0.5j * size / np.pi
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1])),
np.arange(grid[0]//2+1),
indexing = 'ij')
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3)
k_sSquared = np.einsum('...l,...l',k_s,k_s)
k_sSquared[0,0,0] = 1.0 # ignore global average frequency
#--------------------------------------------------------------------------------------------------
# integration in Fourier space
displacement_fourier = -np.einsum('ijkml,ijkl,l->ijkm',
F if transformed else np.fft.rfftn(F,axes=(0,1,2)),
k_s,
integrator,
) / k_sSquared[...,np.newaxis]
#--------------------------------------------------------------------------------------------------
# backtransformation to real space
displacement = np.fft.irfftn(displacement_fourier,grid[::-1],axes=(0,1,2))
return cell2node(displacement,grid) if nodal else displacement
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
@ -100,7 +27,7 @@ Outputs at cell centers or cell nodes (into separate file).
parser.add_option('-f',
'--defgrad',
dest = 'defgrad',
dest = 'f',
metavar = 'string',
help = 'label of deformation gradient [%default]')
parser.add_option('-p',
@ -113,108 +40,34 @@ parser.add_option('--nodal',
action = 'store_true',
help = 'output nodal (instead of cell-centered) displacements')
parser.set_defaults(defgrad = 'f',
pos = 'pos',
parser.set_defaults(f = 'f',
pos = 'pos',
)
(options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
outname = (os.path.splitext(name)[0] +
'_nodal' +
os.path.splitext(name)[1]) if (options.nodal and name) else None
try: table = damask.ASCIItable(name = name,
outname = outname,
buffered = False)
except: continue
damask.util.report(scriptName,'{}{}'.format(name if name else '',
' --> {}'.format(outname) if outname else ''))
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.defgrad) != 9:
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3:
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray([options.defgrad,options.pos])
table.data_rewind()
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
if table.data[:,9:].shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
grid,size = damask.util.coordGridAndSize(table.data[:,9:12])
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ------------------------------------------
F_fourier = np.fft.rfftn(table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),axes=(0,1,2)) # perform transform only once...
fluctDisplacement = displacementFluctFFT(F_fourier,grid,size,options.nodal,transformed=True)
avgDisplacement = displacementAvgFFT (F_fourier,grid,size,options.nodal,transformed=True)
# ------------------------------------------ assemble header ---------------------------------------
if options.nodal:
table.info_clear()
table.labels_clear()
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append((['{}_pos' .format(i+1) for i in range(3)] if options.nodal else []) +
['{}_avg({}).{}' .format(i+1,options.defgrad,options.pos) for i in range(3)] +
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.pos) for i in range(3)] )
table.head_write()
# ------------------------------------------ output data -------------------------------------------
Zrange = np.linspace(0,size[2],1+grid[2]) if options.nodal else range(grid[2])
Yrange = np.linspace(0,size[1],1+grid[1]) if options.nodal else range(grid[1])
Xrange = np.linspace(0,size[0],1+grid[0]) if options.nodal else range(grid[0])
for i,z in enumerate(Zrange):
for j,y in enumerate(Yrange):
for k,x in enumerate(Xrange):
if options.nodal: table.data_clear()
else: table.data_read()
table.data_append([x,y,z] if options.nodal else [])
table.data_append(list( avgDisplacement[i,j,k,:]))
table.data_append(list(fluctDisplacement[i,j,k,:]))
table.data_write()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
F = table.get(options.f).reshape(np.append(grid[::-1],(3,3)))
if options.nodal:
table = damask.Table(damask.grid_filters.node_coord0(grid[::-1],size[::-1]).reshape((-1,3)),
{'pos':(3,)})
table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_avg(size[::-1],F).reshape((-1,3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_fluct(size[::-1],F).reshape((-1,3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')
else:
table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_avg(size[::-1],F).reshape((-1,3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_fluct(size[::-1],F).reshape((-1,3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -12,53 +13,14 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def merge_dicts(*dict_args):
"""Given any number of dicts, shallow copy and merge into a new dict, with precedence going to key value pairs in latter dicts."""
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def divFFT(geomdim,field):
"""Calculate divergence of a vector or tensor field by transforming into Fourier space."""
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
field_fourier = np.fft.rfftn(field,axes=(0,1,2),s=shapeFFT)
div_fourier = np.empty(field_fourier.shape[0:len(np.shape(field))-1],'c16')
# differentiation in Fourier space
TWOPIIMG = 2.0j*np.pi
einsums = {
3:'ijkl,ijkl->ijk', # vector, 3 -> 1
9:'ijkm,ijklm->ijkl', # tensor, 3x3 -> 3
}
k_sk = np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2]))/geomdim[0]
if grid[2]%2 == 0: k_sk[grid[2]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/geomdim[1]
if grid[1]%2 == 0: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_si = np.arange(grid[0]//2+1)/geomdim[2]
kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij')
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3).astype('c16')
div_fourier = np.einsum(einsums[n],k_s,field_fourier)*TWOPIIMG
return np.fft.irfftn(div_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,n//3])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
Add column(s) containing curl of requested column(s).
Operates on periodic ordered three-dimensional data sets
of vector and tensor fields.
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing divergence of requested column(s).
Operates on periodic ordered three-dimensional data sets of vector and tensor fields.
""", version = scriptID)
parser.add_option('-p','--pos','--periodiccellcenter',
@ -66,95 +28,30 @@ parser.add_option('-p','--pos','--periodiccellcenter',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-l','--label',
dest = 'data',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'label(s) of field values')
parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if options.data is None: parser.error('no data column specified.')
# --- define possible data types -------------------------------------------------------------------
datatypes = {
3: {'name': 'vector',
'shape': [3],
},
9: {'name': 'tensor',
'shape': [3,3],
},
}
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None: parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
table.head_read()
remarks = []
errors = []
active = []
coordDim = table.label_dimension(options.pos)
if coordDim != 3:
errors.append('coordinates "{}" must be three-dimensional.'.format(options.pos))
else: coordCol = table.label_index(options.pos)
for me in options.data:
dim = table.label_dimension(me)
if dim in datatypes:
active.append(merge_dicts({'label':me},datatypes[dim]))
remarks.append('differentiating {} "{}"...'.format(datatypes[dim]['name'],me))
else:
remarks.append('skipping "{}" of dimension {}...'.format(me,dim) if dim != -1 else \
'"{}" not found...'.format(me) )
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for data in active:
table.labels_append(['divFFT({})'.format(data['label']) if data['shape'] == [3] \
else '{}_divFFT({})'.format(i+1,data['label'])
for i in range(np.prod(np.array(data['shape']))//3)]) # extend ASCII header with new labels
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
for data in active:
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
stack.append(divFFT(size[::-1],
table.data[:,table.label_indexrange(data['label'])].
reshape(grid[::-1].tolist()+data['shape'])))
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
for label in options.labels:
field = table.get(label)
shape = (3,) if np.prod(field.shape)//np.prod(grid) == 3 else (3,3) # vector or tensor
field = field.reshape(np.append(grid[::-1],shape))
table.add('divFFT({})'.format(label),
damask.grid_filters.divergence(size[::-1],field).reshape((-1,np.prod(shape)//3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import itertools
@ -121,13 +122,14 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.type is None:
parser.error('no feature type selected.')
parser.error('no feature type selected.')
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
if 'biplane' in options.type and 'boundary' in options.type:
parser.error('only one from aliases "biplane" and "boundary" possible.')
parser.error('only one from aliases "biplane" and "boundary" possible.')
feature_list = []
for i,feature in enumerate(features):
@ -137,104 +139,49 @@ for i,feature in enumerate(features):
feature_list.append(i) # remember valid features
break
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name, buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
table.head_read()
neighborhood = neighborhoods[options.neighborhood]
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
microstructure = periodic_3Dpad(table.get(options.id).astype('i').reshape(grid,order='F'))
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if not 3 >= table.label_dimension(options.pos) >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
else: idCol = table.label_index(options.id)
if remarks != []:
damask.util.croak(remarks)
remarks = []
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for feature in feature_list:
table.labels_append('ED_{}({})'.format(features[feature]['names'][0],options.id)) # extend ASCII header with new labels
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
else: remarks.append('grid: {}x{}x{}'.format(*grid))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
neighborhood = neighborhoods[options.neighborhood]
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
microstructure = periodic_3Dpad(table.data[:,idCol].astype('i').reshape(grid,order='F'))
for i,p in enumerate(neighborhood):
stencil = np.zeros((3,3,3),'i')
stencil[1,1,1] = -1
stencil[p[0]+1,
p[1]+1,
p[2]+1] = 1
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
for i,p in enumerate(neighborhood):
stencil = np.zeros((3,3,3),'i')
stencil[1,1,1] = -1
stencil[p[0]+1,
p[1]+1,
p[2]+1] = 1
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
# ...to every one in the specified neighborhood
# for same IDs at both locations ==> 0
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
# ...reflects number of unique neighbors
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
for i in range(1,len(neighborhood)): # check remaining points in neighborhood
uniques += np.where(np.logical_and(
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
), # flip of ID difference detected?
1,0) # count that flip
for i in range(1,len(neighborhood)): # check remaining points in neighborhood
uniques += np.where(np.logical_and(
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
), # flip of ID difference detected?
1,0) # count that flip
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
for i,feature_id in enumerate(feature_list):
distance[i,:,:,:] = np.where(uniques >= features[feature_id]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
for i,feature_id in enumerate(feature_list):
distance[i,:,:,:] = np.where(uniques >= features[feature_id]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
for i in range(len(feature_list)):
stack.append(distance[i,:])
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
for i,feature in enumerate(feature_list):
table.add('ED_{}({})'.format(features[feature]['names'][0],options.id),
distance[i,:],
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,9 +2,9 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
from scipy import ndimage
import damask
@ -30,7 +30,7 @@ parser.add_option('-p','--pos','--periodiccellcenter',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-s','--scalar',
dest = 'scalar',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'label(s) of scalar field values')
parser.add_option('-o','--order',
@ -56,78 +56,21 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if options.scalar is None:
parser.error('no data column specified.')
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None: parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.grid_filters.coord0_check(table.get(options.pos))
table.head_read()
for label in options.labels:
table.add('Gauss{}({})'.format(options.sigma,label),
ndimage.filters.gaussian_filter(table.get(label).reshape((-1)),
options.sigma,options.order,
mode = 'wrap' if options.periodic else 'nearest'),
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ sanity checks ----------------------------------------
items = {
'scalar': {'dim': 1, 'shape': [1], 'labels':options.scalar, 'active':[], 'column': []},
}
errors = []
remarks = []
column = {}
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
for type, data in items.items():
for what in (data['labels'] if data['labels'] is not None else []):
dim = table.label_dimension(what)
if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type))
else:
items[type]['active'].append(what)
items[type]['column'].append(table.label_index(what))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for type, data in items.items():
for label in data['active']:
table.labels_append(['Gauss{}({})'.format(options.sigma,label)]) # extend ASCII header with new labels
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
for type, data in items.items():
for i,label in enumerate(data['active']):
stack.append(ndimage.filters.gaussian_filter(table.data[:,data['column'][i]],
options.sigma,options.order,
mode = 'wrap' if options.periodic else 'nearest'
).reshape([table.data.shape[0],1])
)
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -12,44 +13,6 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def merge_dicts(*dict_args):
"""Given any number of dicts, shallow copy and merge into a new dict, with precedence going to key value pairs in latter dicts."""
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
def gradFFT(geomdim,field):
"""Calculate gradient of a vector or scalar field by transforming into Fourier space."""
shapeFFT = np.array(np.shape(field))[0:3]
grid = np.array(np.shape(field)[2::-1])
N = grid.prod() # field size
n = np.array(np.shape(field)[3:]).prod() # data size
field_fourier = np.fft.rfftn(field,axes=(0,1,2),s=shapeFFT)
grad_fourier = np.empty(field_fourier.shape+(3,),'c16')
# differentiation in Fourier space
TWOPIIMG = 2.0j*np.pi
einsums = {
1:'ijkl,ijkm->ijkm', # scalar, 1 -> 3
3:'ijkl,ijkm->ijklm', # vector, 3 -> 3x3
}
k_sk = np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2]))/geomdim[0]
if grid[2]%2 == 0: k_sk[grid[2]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/geomdim[1]
if grid[1]%2 == 0: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_si = np.arange(grid[0]//2+1)/geomdim[2]
kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij')
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3).astype('c16')
grad_fourier = np.einsum(einsums[n],field_fourier,k_s)*TWOPIIMG
return np.fft.irfftn(grad_fourier,axes=(0,1,2),s=shapeFFT).reshape([N,3*n])
# --------------------------------------------------------------------
# MAIN
@ -57,9 +20,7 @@ def gradFFT(geomdim,field):
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing gradient of requested column(s).
Operates on periodic ordered three-dimensional data sets
of vector and scalar fields.
Operates on periodic ordered three-dimensional data sets of scalar and vector fields.
""", version = scriptID)
parser.add_option('-p','--pos','--periodiccellcenter',
@ -67,7 +28,7 @@ parser.add_option('-p','--pos','--periodiccellcenter',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-l','--label',
dest = 'data',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'label(s) of field values')
@ -75,85 +36,22 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if options.data is None: parser.error('no data column specified.')
# --- define possible data types -------------------------------------------------------------------
datatypes = {
1: {'name': 'scalar',
'shape': [1],
},
3: {'name': 'vector',
'shape': [3],
},
}
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None: parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
table.head_read()
remarks = []
errors = []
active = []
coordDim = table.label_dimension(options.pos)
if coordDim != 3:
errors.append('coordinates "{}" must be three-dimensional.'.format(options.pos))
else: coordCol = table.label_index(options.pos)
for me in options.data:
dim = table.label_dimension(me)
if dim in datatypes:
active.append(merge_dicts({'label':me},datatypes[dim]))
remarks.append('differentiating {} "{}"...'.format(datatypes[dim]['name'],me))
else:
remarks.append('skipping "{}" of dimension {}...'.format(me,dim) if dim != -1 else \
'"{}" not found...'.format(me) )
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for data in active:
table.labels_append(['{}_gradFFT({})'.format(i+1,data['label'])
for i in range(coordDim*np.prod(np.array(data['shape'])))]) # extend ASCII header with new labels
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
for data in active:
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
stack.append(gradFFT(size[::-1],
table.data[:,table.label_indexrange(data['label'])].
reshape(grid[::-1].tolist()+data['shape'])))
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
for label in options.labels:
field = table.get(label)
shape = (1,) if np.prod(field.shape)//np.prod(grid) == 1 else (3,) # scalar or vector
field = field.reshape(np.append(grid[::-1],shape))
table.add('gradFFT({})'.format(label),
damask.grid_filters.gradient(size[::-1],field).reshape((-1,np.prod(shape)*3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -43,54 +44,25 @@ parser.set_defaults(pole = (0.0,0.0,1.0),
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
# damask.Orientation requires Bravais lattice, but we are only interested in symmetry
symmetry2lattice={'cubic':'bcc','hexagonal':'hex','tetragonal':'bct'}
symmetry2lattice={'cubic':'fcc','hexagonal':'hex','tetragonal':'bct'}
lattice = symmetry2lattice[options.symmetry]
pole = np.array(options.pole)
pole /= np.linalg.norm(pole)
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
if not table.label_dimension(options.quaternion) == 4:
damask.util.croak('input {} does not have dimension 4.'.format(options.quaternion))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
column = table.label_index(options.quaternion)
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['{}_IPF_{:g}{:g}{:g}_{sym}'.format(i+1,*options.pole,sym = options.symmetry.lower()) for i in range(3)])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
o = damask.Orientation(np.array(list(map(float,table.data[column:column+4]))),
lattice = lattice).reduced()
table.data_append(o.IPFcolor(pole))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
orientation = table.get(options.quaternion)
color = np.empty((orientation.shape[0],3))
for i,o in enumerate(orientation):
color[i] = damask.Orientation(o,lattice = lattice).IPFcolor(pole)
table.add('IPF_{:g}{:g}{:g}_{sym}'.format(*options.pole,sym = options.symmetry.lower()),
color,
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -42,7 +43,7 @@ parser.add_option('-n','--norm',
type = 'choice', choices = normChoices, metavar='string',
help = 'type of element-wise p-norm [frobenius] {%s}'%(','.join(map(str,normChoices))))
parser.add_option('-l','--label',
dest = 'label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'heading of column(s) to calculate norm of')
@ -50,62 +51,25 @@ parser.set_defaults(norm = 'frobenius',
)
(options,filenames) = parser.parse_args()
if options.norm.lower() not in normChoices:
parser.error('invalid norm ({}) specified.'.format(options.norm))
if options.label is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.norm.lower() not in normChoices:
parser.error('invalid norm ({}) specified.'.format(options.norm))
if options.labels is None:
parser.error('no data column specified.')
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
data = table.get(label)
data_norm = np.empty((data.shape[0],1))
for i,d in enumerate(data):
data_norm[i] = norm(options.norm.capitalize(),d)
table.head_read()
table.add('norm{}({})'.format(options.norm.capitalize(),label),
data_norm,
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('norm{}({})'.format(options.norm.capitalize(),what)) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for column,dim in zip(columns,dims):
table.data_append(norm(options.norm.capitalize(),
map(float,table.data[column:column+dim])))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -125,9 +125,10 @@ R = damask.Rotation.fromAxisAngle(np.array(options.labrotation),options.degrees,
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except Exception: continue
try:
table = damask.ASCIItable(name = name, buffered = False)
except IOError:
continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -42,52 +43,23 @@ parser.set_defaults(pole = (1.0,0.0,0.0),
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
pole = np.array(options.pole)
pole /= np.linalg.norm(pole)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
if not table.label_dimension(options.quaternion) == 4:
damask.util.croak('input {} does not have dimension 4.'.format(options.quaternion))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
column = table.label_index(options.quaternion)
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['{}_pole_{}{}{}'.format(i+1,*options.pole) for i in range(2)])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
o = damask.Rotation(np.array(list(map(float,table.data[column:column+4]))))
rotatedPole = o*pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
table.data_append([np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y]) # cartesian coordinates
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
orientation = table.get(options.quaternion)
poles = np.empty((orientation.shape[0],2))
for i,o in enumerate(orientation):
rotatedPole = damask.Rotation(o)*pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
poles[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y] # cartesian coordinates
table.add('pole_{}{}{}'.format(*options.pole),
poles,
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,25 +2,24 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def operator(stretch,strain,eigenvalues):
def parameters(stretch,strain):
"""Albrecht Bertram: Elasticity and Plasticity of Large Deformations An Introduction (3rd Edition, 2012), p. 102."""
return {
'V#ln': np.log(eigenvalues) ,
'U#ln': np.log(eigenvalues) ,
'V#Biot': ( np.ones(3,'d') - 1.0/eigenvalues ) ,
'U#Biot': ( eigenvalues - np.ones(3,'d') ) ,
'V#Green': ( np.ones(3,'d') - 1.0/eigenvalues/eigenvalues) *0.5,
'U#Green': ( eigenvalues*eigenvalues - np.ones(3,'d')) *0.5,
'V#ln': ('V',0.0),
'U#ln': ('U',0.0),
'V#Biot': ('V',-.5),
'U#Biot': ('U',+.5),
'V#Green': ('V',-1.),
'U#Green': ('U',+1.),
}[stretch+'#'+strain]
@ -64,9 +63,10 @@ parser.set_defaults(
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if len(options.defgrad) > 1:
options.defgrad = options.defgrad[1:]
options.defgrad = options.defgrad[1:]
stretches = []
strains = []
@ -78,84 +78,21 @@ if options.biot: strains.append('Biot')
if options.green: strains.append('Green')
if options.defgrad is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
parser.error('no data column specified.')
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except IOError: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
items = {
'tensor': {'dim': 9, 'shape': [3,3], 'labels':options.defgrad, 'column': []},
}
errors = []
remarks = []
for type, data in items.items():
for what in data['labels']:
dim = table.label_dimension(what)
if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type))
else:
items[type]['column'].append(table.label_index(what))
for defgrad in options.defgrad:
F = table.get(defgrad).reshape((-1,3,3))
for theStretch in stretches:
for theStrain in strains:
table.labels_append(['{}_{}({}){}'.format(i+1, # extend ASCII header with new labels
theStrain,
theStretch,
what if what != 'f' else '') for i in range(9)])
for theStrain in strains:
(t,m) = parameters(theStretch,theStrain)
label = '{}({}){}'.format(theStrain,theStretch,defgrad if defgrad != 'f' else '')
table.add(label,
damask.mechanics.strain_tensor(F,t,m).reshape((-1,9)),
scriptID+' '+' '.join(sys.argv[1:]))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
stretch = {}
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for column in items['tensor']['column']: # loop over all requested defgrads
F = np.array(list(map(float,table.data[column:column+items['tensor']['dim']])),'d').reshape(items['tensor']['shape'])
(U,S,Vh) = np.linalg.svd(F) # singular value decomposition
R_inv = np.dot(U,Vh).T # rotation of polar decomposition
stretch['U'] = np.dot(R_inv,F) # F = RU
stretch['V'] = np.dot(F,R_inv) # F = VR
for theStretch in stretches:
stretch[theStretch] = np.where(abs(stretch[theStretch]) < 1e-12, 0, stretch[theStretch]) # kill nasty noisy data
(D,V) = np.linalg.eigh((stretch[theStretch]+stretch[theStretch].T)*0.5) # eigen decomposition (of symmetric(ed) matrix)
neg = np.where(D < 0.0) # find negative eigenvalues ...
D[neg] *= -1. # ... flip value ...
V[:,neg] *= -1. # ... and vector
for theStrain in strains:
d = operator(theStretch,theStrain,D) # operate on eigenvalues of U or V
eps = np.dot(V,np.dot(np.diag(d),V.T)).reshape(9) # build tensor back from eigenvalue/vector basis
table.data_append(list(eps))
# ------------------------------------------ output result -----------------------------------------
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
@ -25,56 +26,19 @@ parser.add_option('-a', '--add','--table',
help = 'tables to add')
(options,filenames) = parser.parse_args()
if options.table is None:
parser.error('no table specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.table is None:
parser.error('no table specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
tables = []
for addTable in options.table:
try: tables.append(damask.ASCIItable(name = addTable,
buffered = False,
readonly = True)
)
except: continue
for addTable in options.table:
table2 = damask.Table.from_ASCII(addTable)
table2.data = table2.data[:table.data.shape[0]]
table.join(table2)
# ------------------------------------------ read headers ------------------------------------------
table.head_read()
for addTable in tables: addTable.head_read()
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for addTable in tables: table.labels_append(addTable.labels(raw = True)) # extend ASCII header with new labels
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read():
for addTable in tables:
outputAlive = addTable.data_read() # read next table's data
if not outputAlive: break
table.data_append(addTable.data) # append to master table
if outputAlive:
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
for addTable in tables:
addTable.close()
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -49,6 +50,7 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
packing = np.array(options.packing,dtype = int)
shift = np.array(options.shift, dtype = int)
@ -56,46 +58,14 @@ shift = np.array(options.shift, dtype = int)
prefix = 'averagedDown{}x{}x{}_'.format(*packing)
if any(shift != 0): prefix += 'shift{:+}{:+}{:+}_'.format(*shift)
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
if (options.grid is None or options.size is None):
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
else:
grid = np.array(options.grid,'i')
size = np.array(options.size,'d')
@ -104,37 +74,25 @@ for name in filenames:
shift = np.where(grid == 1,0,shift) # reset shift to 0 where grid==1
packedGrid = np.maximum(np.ones(3,'i'),grid//packing)
data = table.data.values.reshape(tuple(grid)+(-1,),order = 'F')
averagedDown = scipy.ndimage.filters.uniform_filter( \
np.roll(
np.roll(
np.roll(table.data.reshape(list(grid)+[table.data.shape[1]],order = 'F'),
np.roll(data,
-shift[0],axis = 0),
-shift[1],axis = 1),
-shift[2],axis = 2),
size = list(packing) + [1],
mode = 'wrap',
origin = list(-(packing//2)) + [0])\
[::packing[0],::packing[1],::packing[2],:].reshape((packedGrid.prod(),table.data.shape[1]),order = 'F')
[::packing[0],::packing[1],::packing[2],:].reshape((packedGrid.prod(),-1),order = 'F')
table.data = averagedDown
table = damask.Table(averagedDown,table.shapes,table.comments)
#--- generate grid --------------------------------------------------------------------------------
coords = damask.grid_filters.cell_coord0(packedGrid,size,shift/packedGrid*size+origin)
table.set(options.pos, coords.reshape((-1,3)))
x = (0.5 + shift[0] + np.arange(packedGrid[0],dtype=float))/packedGrid[0]*size[0]
y = (0.5 + shift[1] + np.arange(packedGrid[1],dtype=float))/packedGrid[1]*size[1]
z = (0.5 + shift[2] + np.arange(packedGrid[2],dtype=float))/packedGrid[2]*size[2]
xx = np.tile( x, packedGrid[1]* packedGrid[2])
yy = np.tile(np.repeat(y,packedGrid[0] ),packedGrid[2])
zz = np.repeat(z,packedGrid[0]*packedGrid[1])
table.data[:,table.label_indexrange(options.pos)] = np.squeeze(np.dstack((xx,yy,zz)))
# ------------------------------------------ output result -----------------------------------------
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name))
table.to_ASCII(sys.stdout if name is None else outname)

View File

@ -2,8 +2,10 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask
@ -42,81 +44,29 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
options.packing = np.array(options.packing)
prefix = 'blowUp{}x{}x{}_'.format(*options.packing)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.pos) != 3: errors.append('coordinates "{}" are not a vector.'.format(options.pos))
colElem = table.label_index('elem')
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray(options.pos)
table.data_rewind()
grid,size = damask.util.coordGridAndSize(table.data)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
packing = np.array(options.packing,'i')
outSize = grid*packing
# ------------------------------------------ assemble header --------------------------------------
data = table.data.values.reshape(tuple(grid)+(-1,))
blownUp = ndimage.interpolation.zoom(data,tuple(packing)+(1,),order=0,mode='nearest').reshape((outSize.prod(),-1))
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
table = damask.Table(blownUp,table.shapes,table.comments)
# ------------------------------------------ process data -------------------------------------------
data = np.zeros(outSize.tolist()+[len(table.labels(raw = True))])
p = np.zeros(3,'i')
coords = damask.grid_filters.cell_coord0(outSize,size,origin)
table.set(options.pos,coords.reshape((-1,3)))
table.set('elem',np.arange(1,outSize.prod()+1))
for p[2] in range(grid[2]):
for p[1] in range(grid[1]):
for p[0] in range(grid[0]):
d = p*packing
table.data_read()
data[d[0]:d[0]+packing[0],
d[1]:d[1]+packing[1],
d[2]:d[2]+packing[2],
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
elementSize = size/grid/packing
elem = 1
for c in range(outSize[2]):
for b in range(outSize[1]):
for a in range(outSize[0]):
data[a,b,c,table.label_indexrange(options.pos)] = [a+0.5,b+0.5,c+0.5]*elementSize
if colElem != -1: data[a,b,c,colElem] = elem
table.data = data[a,b,c,:].tolist()
outputAlive = table.data_write() # output processed line
elem += 1
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name))
table.to_ASCII(sys.stdout if name is None else outname)

View File

@ -2,10 +2,9 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
@ -27,53 +26,18 @@ parser.add_option('-a', '--add','--table',
help = 'tables to add')
(options,filenames) = parser.parse_args()
if options.table is None:
parser.error('no table specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.table is None:
parser.error('no table specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
tables = []
for addTable in options.table:
try: tables.append(damask.ASCIItable(name = addTable,
buffered = False,
readonly = True)
)
except: continue
for growTable in options.table:
table2 = damask.Table.from_ASCII(growTable)
table.append(table2)
# ------------------------------------------ read headers ------------------------------------------
table.head_read()
for addTable in tables: addTable.head_read()
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
table.data_readArray()
data = table.data
for addTable in tables:
addTable.data_readArray(table.labels(raw = True))
data = np.vstack((data,addTable.data))
table.data = data
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
for addTable in tables:
addTable.close()
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,8 +1,9 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
from collections import defaultdict
import vtk
from vtk.util import numpy_support
@ -18,11 +19,10 @@ scriptID = ' '.join([scriptName,damask.version])
# MAIN
# --------------------------------------------------------------------
msg = "Add scalars, vectors, and/or an RGB tuple from"
msg += "an ASCIItable to existing VTK grid (.vtr/.vtk/.vtu)."
parser = OptionParser(option_class=damask.extendableOption,
usage='%prog options [ASCIItable(s)]',
description = msg,
description = "Add scalars, vectors, tensors, and/or an RGB tuple from ASCIItable "
+ "to existing VTK grid (.vtr/.vtk/.vtu).",
version = scriptID)
parser.add_option( '--vtk',
@ -49,10 +49,10 @@ parser.add_option('-c', '--color',
parser.set_defaults(data = [],
tensor = [],
color = [],
render = False,
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if not options.vtk: parser.error('No VTK file specified.')
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
@ -87,65 +87,28 @@ Ncells = rGrid.GetNumberOfCells()
damask.util.croak('{}: {} points and {} cells...'.format(options.vtk,Npoints,Ncells))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName, name)
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
remarks = []
errors = []
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
VTKarray = {}
active = defaultdict(list)
for data in options.data:
VTKarray[data] = numpy_support.numpy_to_vtk(table.get(data).copy(),
deep=True,array_type=vtk.VTK_DOUBLE)
VTKarray[data].SetName(data)
for color in options.color:
VTKarray[color] = numpy_support.numpy_to_vtk((table.get(color)*255).astype(int).copy(),
deep=True,array_type=vtk.VTK_UNSIGNED_CHAR)
VTKarray[color].SetName(color)
for datatype,dimension,label in [['data',99,options.data],
['tensor',9,options.tensor],
['color' ,3,options.color],
]:
for i,dim in enumerate(table.label_dimension(label)):
me = label[i]
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
else:
remarks.append('adding {} "{}"...'.format(datatype,me))
active[datatype].append(me)
for tensor in options.tensor:
data = damask.mechanics.symmetric(table.get(tensor).reshape((-1,3,3))).reshape((-1,9))
VTKarray[tensor] = numpy_support.numpy_to_vtk(data.copy(),
deep=True,array_type=vtk.VTK_DOUBLE)
VTKarray[tensor].SetName(tensor)
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ---------------------------------------
table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data
for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items
VTKtype = vtk.VTK_DOUBLE
VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout
if datatype == 'color':
VTKtype = vtk.VTK_UNSIGNED_CHAR
VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR
elif datatype == 'tensor':
VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3])
VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6])
VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7])
VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype)
VTKarray[me].SetName(me)
table.close() # close input ASCII table
# ------------------------------------------ add data ---------------------------------------
@ -157,16 +120,10 @@ for name in filenames:
damask.util.croak('{} mode...'.format(mode))
for datatype,labels in active.items(): # loop over scalar,color
if datatype == 'color':
if mode == 'cell': rGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
elif mode == 'point': rGrid.GetPointData().SetScalars(VTKarray[active['color'][0]])
for me in labels: # loop over all requested items
if mode == 'cell': rGrid.GetCellData().AddArray(VTKarray[me])
elif mode == 'point': rGrid.GetPointData().AddArray(VTKarray[me])
for data in VTKarray:
if mode == 'cell': rGrid.GetCellData().AddArray(VTKarray[data])
elif mode == 'point': rGrid.GetPointData().AddArray(VTKarray[data])
rGrid.Modified()
if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update()
# ------------------------------------------ output result ---------------------------------------
@ -184,7 +141,7 @@ if options.render:
actor.SetMapper(mapper)
# Create the graphics structure. The renderer renders into the
# render window. The render window interactor captures mouse events
# render window. The render window interactively captures mouse events
# and will perform appropriate camera or actor manipulation
# depending on the nature of the events.

View File

@ -1,8 +1,9 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
from collections import defaultdict
import vtk
from vtk.util import numpy_support
@ -20,7 +21,8 @@ scriptID = ' '.join([scriptName,damask.version])
parser = OptionParser(option_class=damask.extendableOption,
usage='%prog options [ASCIItable(s)]',
description = """Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).""",
description = "Add scalars, vectors, tensors, and/or an RGB tuple from ASCIItable "
+ "VTK point cloud (.vtp).",
version = scriptID)
parser.add_option( '--vtk',
@ -39,9 +41,10 @@ parser.add_option('-t', '--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'tensor (3x3) value label(s)')
parser.add_option('-c', '--color', dest='color', action='extend',
metavar ='<string LIST>',
help = 'RGB color tuples')
parser.add_option('-c', '--color',
dest = 'color',
action = 'extend', metavar = '<string LIST>',
help = 'RGB color tuple label')
parser.set_defaults(data = [],
tensor = [],
@ -49,8 +52,9 @@ parser.set_defaults(data = [],
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if not options.vtk: parser.error('no VTK file specified.')
if not options.vtk: parser.error('No VTK file specified.')
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
vtk_file,vtk_ext = os.path.splitext(options.vtk)
@ -77,81 +81,35 @@ if Npoints != Ncells or Npoints != Nvertices:
damask.util.croak('{}: {} points/vertices/cells...'.format(options.vtk,Npoints))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName, name)
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
remarks = []
errors = []
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
VTKarray = {}
active = defaultdict(list)
for data in options.data:
VTKarray[data] = numpy_support.numpy_to_vtk(table.get(data).copy(),
deep=True,array_type=vtk.VTK_DOUBLE)
VTKarray[data].SetName(data)
for color in options.color:
VTKarray[color] = numpy_support.numpy_to_vtk((table.get(color)*255).astype(int).copy(),
deep=True,array_type=vtk.VTK_UNSIGNED_CHAR)
VTKarray[color].SetName(color)
for datatype,dimension,label in [['data',0,options.data],
['tensor',9,options.tensor],
['color' ,3,options.color],
]:
for i,dim in enumerate(table.label_dimension(label)):
me = label[i]
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
elif dimension > 0 \
and dim != dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
else:
remarks.append('adding {}{} "{}"...'.format(datatype if dim > 1 else 'scalar',
'' if dimension > 0 or dim == 1 else '[{}]'.format(dim),
me))
active[datatype].append(me)
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --------------------------------------- process and add data -----------------------------------
table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data
for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items
VTKtype = vtk.VTK_DOUBLE
VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout
if datatype == 'color':
VTKtype = vtk.VTK_UNSIGNED_CHAR
VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR
elif datatype == 'tensor':
VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3])
VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6])
VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7])
VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype)
VTKarray[me].SetName(me)
if datatype == 'color':
Polydata.GetPointData().SetScalars(VTKarray[me])
Polydata.GetCellData().SetScalars(VTKarray[me])
else:
Polydata.GetPointData().AddArray(VTKarray[me])
Polydata.GetCellData().AddArray(VTKarray[me])
for tensor in options.tensor:
data = damask.mechanics.symmetric(table.get(tensor).reshape((-1,3,3))).reshape((-1,9))
VTKarray[tensor] = numpy_support.numpy_to_vtk(data.copy(),
deep=True,array_type=vtk.VTK_DOUBLE)
VTKarray[tensor].SetName(tensor)
table.input_close() # close input ASCII table
for data in VTKarray:
Polydata.GetPointData().AddArray(VTKarray[data])
Polydata.Modified()
# ------------------------------------------ output result ---------------------------------------
Polydata.Modified()
writer = vtk.vtkXMLPolyDataWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()

View File

@ -1,199 +0,0 @@
#!/usr/bin/env python3
import os
from optparse import OptionParser
from collections import defaultdict
import vtk
from vtk.util import numpy_support
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
msg = "Add scalars, vectors, and/or an RGB tuple from"
msg += "an ASCIItable to existing VTK rectilinear grid (.vtr/.vtk)."
parser = OptionParser(option_class=damask.extendableOption,
usage='%prog options [file[s]]',
description = msg,
version = scriptID)
parser.add_option( '--vtk',
dest = 'vtk',
type = 'string', metavar = 'string',
help = 'VTK file name')
parser.add_option('-r', '--render',
dest = 'render',
action = 'store_true',
help = 'open output in VTK render window')
parser.add_option('-d', '--data',
dest = 'data',
action = 'extend', metavar = '<string LIST>',
help = 'scalar/vector value(s) label(s)')
parser.add_option('-t', '--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'tensor (3x3) value label(s)')
parser.add_option('-c', '--color',
dest = 'color',
action = 'extend', metavar = '<string LIST>',
help = 'RGB color tuple label')
parser.set_defaults(data = [],
tensor = [],
color = [],
render = False,
)
(options, filenames) = parser.parse_args()
if not options.vtk: parser.error('no VTK file specified.')
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
vtk_file,vtk_ext = os.path.splitext(options.vtk)
if vtk_ext == '.vtr':
reader = vtk.vtkXMLRectilinearGridReader()
reader.SetFileName(options.vtk)
reader.Update()
rGrid = reader.GetOutput()
elif vtk_ext == '.vtk':
reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(options.vtk)
reader.Update()
rGrid = reader.GetRectilinearGridOutput()
else:
parser.error('unsupported VTK file type extension.')
Npoints = rGrid.GetNumberOfPoints()
Ncells = rGrid.GetNumberOfCells()
damask.util.croak('{}: {} points and {} cells...'.format(options.vtk,Npoints,Ncells))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName, name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
remarks = []
errors = []
VTKarray = {}
active = defaultdict(list)
for datatype,dimension,label in [['data',0,options.data],
['tensor',9,options.tensor],
['color' ,3,options.color],
]:
for i,dim in enumerate(table.label_dimension(label)):
me = label[i]
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
elif dimension > 0 \
and dim != dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
else:
remarks.append('adding {}{} "{}"...'.format(datatype if dim > 1 else 'scalar',
'' if dimension > 0 or dim == 1 else '[{}]'.format(dim),
me))
active[datatype].append(me)
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ---------------------------------------
table.data_readArray([item for sublist in active.values() for item in sublist]) # read all requested data
for datatype,labels in active.items(): # loop over scalar,color
for me in labels: # loop over all requested items
VTKtype = vtk.VTK_DOUBLE
VTKdata = table.data[:, table.label_indexrange(me)].copy() # copy to force contiguous layout
if datatype == 'color':
VTKtype = vtk.VTK_UNSIGNED_CHAR
VTKdata = (VTKdata*255).astype(int) # translate to 0..255 UCHAR
elif datatype == 'tensor':
VTKdata[:,1] = VTKdata[:,3] = 0.5*(VTKdata[:,1]+VTKdata[:,3])
VTKdata[:,2] = VTKdata[:,6] = 0.5*(VTKdata[:,2]+VTKdata[:,6])
VTKdata[:,5] = VTKdata[:,7] = 0.5*(VTKdata[:,5]+VTKdata[:,7])
VTKarray[me] = numpy_support.numpy_to_vtk(num_array=VTKdata,deep=True,array_type=VTKtype)
VTKarray[me].SetName(me)
table.close() # close input ASCII table
# ------------------------------------------ add data ---------------------------------------
if len(table.data) == Npoints: mode = 'point'
elif len(table.data) == Ncells: mode = 'cell'
else:
damask.util.croak('data count is incompatible with grid...')
continue
damask.util.croak('{} mode...'.format(mode))
for datatype,labels in active.items(): # loop over scalar,color
if datatype == 'color':
if mode == 'cell': rGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
elif mode == 'point': rGrid.GetPointData().SetScalars(VTKarray[active['color'][0]])
for me in labels: # loop over all requested items
if mode == 'cell': rGrid.GetCellData().AddArray(VTKarray[me])
elif mode == 'point': rGrid.GetPointData().AddArray(VTKarray[me])
rGrid.Modified()
# ------------------------------------------ output result ---------------------------------------
writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(vtk_file+'.'+writer.GetDefaultFileExtension())
writer.SetInputData(rGrid)
writer.Write()
# ------------------------------------------ render result ---------------------------------------
if options.render:
mapper = vtk.vtkDataSetMapper()
mapper.SetInputData(rGrid)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# Create the graphics structure. The renderer renders into the
# render window. The render window interactor captures mouse events
# and will perform appropriate camera or actor manipulation
# depending on the nature of the events.
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
ren.AddActor(actor)
ren.SetBackground(1, 1, 1)
renWin.SetSize(200, 200)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
iren.Initialize()
renWin.Render()
iren.Start()

View File

@ -2,10 +2,10 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import vtk
import numpy as np
import damask
@ -33,49 +33,20 @@ parser.set_defaults(pos = 'pos',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
errors = []
remarks = []
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
# ------------------------------------------ process data ---------------------------------------
table.data_readArray(options.pos)
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
Polydata = vtk.vtkPolyData()
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for p in table.data:
for p in table.get(options.pos):
pointID = Points.InsertNextPoint(p)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
@ -104,5 +75,3 @@ for name in filenames:
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString())
table.close()

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import vtk
@ -40,48 +41,14 @@ parser.set_defaults(mode = 'cell',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = True,
readonly = True,
)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
remarks = []
errors = []
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray(options.pos)
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
coords = [np.unique(table.data[:,i]) for i in range(3)]
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
coords = [np.unique(table.get(options.pos)[:,i]) for i in range(3)]
if options.mode == 'cell':
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
[coords[i][j-1] + coords[i][j] for j in range(1,len(coords[i]))] + \
@ -90,13 +57,6 @@ for name in filenames:
grid = np.array(list(map(len,coords)),'i')
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
if N != len(table.data):
errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*(grid - (options.mode == 'cell')) ))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ---------------------------------------
rGrid = vtk.vtkRectilinearGrid()
@ -135,5 +95,3 @@ for name in filenames:
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString())
table.close()

View File

@ -145,7 +145,6 @@ for name in filenames:
config_header += ['<microstructure>']
for i in range(np.nanmax(microstructure)):
config_header += ['[{}{}]'.format(label,i+1),
'crystallite 1',
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(phase[i],i+1),
]

View File

@ -126,15 +126,12 @@ for i in range(3,np.max(microstructure)):
config_header = ['<microstructure>',
'[canal]',
'crystallite 1',
'(constituent)\tphase 1\ttexture 1\tfraction 1.0',
'[interstitial]',
'crystallite 1',
'(constituent)\tphase 2\ttexture 2\tfraction 1.0'
]
for i in range(3,np.max(microstructure)):
config_header += ['[Point{}]'.format(i-2),
'crystallite 1',
'(constituent)\tphase 3\ttexture {}\tfraction 1.0'.format(i)
]

View File

@ -78,36 +78,15 @@ for name in filenames:
table = damask.ASCIItable(name = name,readonly=True)
table.head_read() # read ASCII header info
# ------------------------------------------ sanity checks ---------------------------------------
coordDim = table.label_dimension(options.pos)
errors = []
if not 3 >= coordDim >= 2:
errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.pos))
if not np.all(table.label_dimension(label) == dim):
errors.append('input "{}" needs to have dimension {}.'.format(label,dim))
if options.phase and table.label_dimension(options.phase) != 1:
errors.append('phase column "{}" is not scalar.'.format(options.phase))
if errors != []:
damask.util.croak(errors)
continue
table.data_readArray([options.pos] \
+ (label if isinstance(label, list) else [label]) \
+ ([options.phase] if options.phase else []))
if coordDim == 2:
table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input
if options.phase is None:
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
grid,size = damask.util.coordGridAndSize(table.data[:,0:3])
coords = [np.unique(table.data[:,i]) for i in range(3)]
mincorner = np.array(list(map(min,coords)))
origin = mincorner - 0.5*size/grid # shift from cell center to corner
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.data[:,0:3])
indices = np.lexsort((table.data[:,0],table.data[:,1],table.data[:,2])) # indices of position when sorting x fast, z slow
microstructure = np.empty(grid,dtype = int) # initialize empty microstructure
@ -142,7 +121,6 @@ for name in filenames:
config_header += ['<microstructure>']
for i,data in enumerate(unique):
config_header += ['[Grain{}]'.format(i+1),
'crystallite 1',
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(int(data[4]),i+1),
]

View File

@ -290,7 +290,6 @@ for name in filenames:
config_header += ['<microstructure>']
for ID in grainIDs:
config_header += ['[Grain{}]'.format(ID),
'crystallite 1',
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(options.phase,ID)
]

View File

@ -2,10 +2,8 @@
import os
import sys
from optparse import OptionParser
from io import StringIO
import numpy as np
from optparse import OptionParser
import damask
@ -24,38 +22,25 @@ Translate geom description into ASCIItable containing position and microstructur
""", version = scriptID)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom)
damask.util.croak(geom)
coord0 = damask.grid_filters.cell_coord0(geom.grid,geom.size,geom.origin).reshape((-1,3))
# --- generate grid --------------------------------------------------------------------------------
comments = geom.comments \
+ [scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*geom.grid),
"size\tx {}\ty {}\tz {}".format(*geom.size),
"origin\tx {}\ty {}\tz {}".format(*geom.origin),
"homogenization\t{}".format(geom.homogenization)]
grid = geom.get_grid()
size = geom.get_size()
origin = geom.get_origin()
table = damask.Table(coord0,{'pos':(3,)},comments)
table.add('microstructure',geom.microstructure.reshape((-1,1),order='F'))
x = (0.5 + np.arange(grid[0],dtype=float))/grid[0]*size[0]+origin[0]
y = (0.5 + np.arange(grid[1],dtype=float))/grid[1]*size[1]+origin[1]
z = (0.5 + np.arange(grid[2],dtype=float))/grid[2]*size[2]+origin[2]
xx = np.tile( x, grid[1]* grid[2])
yy = np.tile(np.repeat(y,grid[0] ),grid[2])
zz = np.repeat(z,grid[0]*grid[1])
# --- create ASCII table --------------------------------------------------------------------------
table = damask.ASCIItable(outname = os.path.splitext(name)[0]+'.txt' if name else name)
table.info_append(geom.get_comments() + [scriptID + '\t' + ' '.join(sys.argv[1:])])
table.labels_append(['{}_{}'.format(1+i,'pos') for i in range(3)]+['microstructure'])
table.head_write()
table.output_flush()
table.data = np.squeeze(np.dstack((xx,yy,zz,geom.microstructure.flatten('F'))),axis=0)
table.data_writeArray()
table.close()
table.to_ASCII(sys.stdout if name is None else \
os.path.splitext(name)[0]+'.txt')

View File

@ -19,7 +19,7 @@ def integerFactorization(i):
return j
def binAsBins(bin,intervals):
"""Explode compound bin into 3D bins list"""
"""Explode compound bin into 3D bins list."""
bins = [0]*3
bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0]
bins[1] = (bin//intervals[2]) % intervals[1]
@ -27,17 +27,17 @@ def binAsBins(bin,intervals):
return bins
def binsAsBin(bins,intervals):
"""Implode 3D bins into compound bin"""
"""Implode 3D bins into compound bin."""
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
def EulersAsBins(Eulers,intervals,deltas,center):
"""Return list of Eulers translated into 3D bins list"""
"""Return list of Eulers translated into 3D bins list."""
return [int((euler+(0.5-center)*delta)//delta)%interval \
for euler,delta,interval in zip(Eulers,deltas,intervals) \
]
def binAsEulers(bin,intervals,deltas,center):
"""Compound bin number translated into list of Eulers"""
"""Compound bin number translated into list of Eulers."""
Eulers = [0.0]*3
Eulers[2] = (bin%intervals[2] + center)*deltas[2]
Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1]
@ -45,7 +45,7 @@ def binAsEulers(bin,intervals,deltas,center):
return Eulers
def directInvRepetitions(probability,scale):
"""Calculate number of samples drawn by direct inversion"""
"""Calculate number of samples drawn by direct inversion."""
nDirectInv = 0
for bin in range(len(probability)): # loop over bins
nDirectInv += int(round(probability[bin]*scale)) # calc repetition
@ -56,7 +56,7 @@ def directInvRepetitions(probability,scale):
# ----- efficient algorithm ---------
def directInversion (ODF,nSamples):
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)."""
nOptSamples = max(ODF['nNonZero'],nSamples) # random subsampling if too little samples requested
nInvSamples = 0
@ -118,7 +118,7 @@ def directInversion (ODF,nSamples):
# ----- trial and error algorithms ---------
def MonteCarloEulers (ODF,nSamples):
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)."""
countMC = 0
maxdV_V = max(ODF['dV_V'])
orientations = np.zeros((nSamples,3),'f')
@ -141,7 +141,7 @@ def MonteCarloEulers (ODF,nSamples):
def MonteCarloBins (ODF,nSamples):
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)."""
countMC = 0
maxdV_V = max(ODF['dV_V'])
orientations = np.zeros((nSamples,3),'f')
@ -163,7 +163,7 @@ def MonteCarloBins (ODF,nSamples):
def TothVanHoutteSTAT (ODF,nSamples):
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)."""
orientations = np.zeros((nSamples,3),'f')
reconstructedODF = np.zeros(ODF['nBins'],'f')
unitInc = 1.0/nSamples
@ -211,10 +211,6 @@ parser.add_option('-p','--phase',
dest = 'phase',
type = 'int', metavar = 'int',
help = 'phase index to be used [%default]')
parser.add_option('--crystallite',
dest = 'crystallite',
type = 'int', metavar = 'int',
help = 'crystallite index to be used [%default]')
parser.add_option('-r', '--rnd',
dest = 'randomSeed',
type = 'int', metavar = 'int', \
@ -223,7 +219,6 @@ parser.set_defaults(randomSeed = None,
number = 500,
algorithm = 'IA',
phase = 1,
crystallite = 1,
ang = True,
)
@ -240,7 +235,7 @@ if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name, buffered = False, readonly=True)
except:
except IOError:
continue
damask.util.report(scriptName,name)
@ -351,7 +346,6 @@ for name in filenames:
for i,ID in enumerate(range(nSamples)):
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
'crystallite %i'%options.crystallite,
'(constituent) phase %i texture %s fraction 1.0'%(options.phase,str(ID+1).rjust(formatwidth)),
]

View File

@ -1,9 +1,10 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys
import numpy as np
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
@ -191,78 +192,45 @@ parser.add_option('-p', '--port',
dest = 'port',
type = 'int', metavar = 'int',
help = 'Mentat connection port [%default]')
parser.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index to be used [auto]')
parser.set_defaults(port = None,
homogenization = None,
)
parser.set_defaults(port = None,
)
(options, filenames) = parser.parse_args()
if options.port:
try:
import py_mentat
except:
parser.error('no valid Mentat release found.')
if options.port is not None:
try:
import py_mentat
except ImportError:
parser.error('no valid Mentat release found.')
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.proc' if name else name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
if options.homogenization: info['homogenization'] = options.homogenization
damask.util.report(scriptName,name)
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
microstructure = geom.get_microstructure().flatten(order='F')
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'].prod(),order='F') # read microstructure
cmds = [\
init(),
mesh(info['grid'],info['size']),
material(),
geometry(),
initial_conditions(info['homogenization'],microstructure),
'*identify_sets',
'*show_model',
'*redraw',
'*draw_automatic',
]
outputLocals = {}
if options.port:
py_mentat.py_connect('',options.port)
output(cmds,outputLocals,'Mentat')
py_mentat.py_disconnect()
else:
output(cmds,outputLocals,table.__IO__['out']) # bad hack into internals of table class...
table.close()
cmds = [\
init(),
mesh(geom.grid,geom.size),
material(),
geometry(),
initial_conditions(geom.homogenization,microstructure),
'*identify_sets',
'*show_model',
'*redraw',
'*draw_automatic',
]
outputLocals = {}
if options.port:
py_mentat.py_connect('',options.port)
output(cmds,outputLocals,'Mentat')
py_mentat.py_disconnect()
else:
with sys.stdout if name is None else open(os.path.splitext(name)[0]+'.proc','w') as f:
output(cmds,outputLocals,f)

View File

@ -78,13 +78,11 @@ def rcbOrientationParser(content,idcolumn):
damask.util.croak('You might not have chosen the correct column for the grain IDs! '+
'Please check the "--id" option.')
raise
except:
raise
return grains
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
"""Parser for TSL-OIM reconstructed boundary files"""
"""Parser for TSL-OIM reconstructed boundary files."""
# find bounding box
boxX = [1.*sys.maxint,-1.*sys.maxint]
boxY = [1.*sys.maxint,-1.*sys.maxint]
@ -99,8 +97,6 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
damask.util.croak('You might not have chosen the correct column for the segment end points! '+
'Please check the "--segment" option.')
raise
except:
raise
(x[0],y[0]) = (M[0]*x[0]+M[1]*y[0],M[2]*x[0]+M[3]*y[0]) # apply transformation to coordinates
(x[1],y[1]) = (M[0]*x[1]+M[1]*y[1],M[2]*x[1]+M[3]*y[1]) # to get rcb --> Euler system
boxX[0] = min(boxX[0],x[0],x[1])
@ -728,7 +724,7 @@ def image(name,imgsize,marginX,marginY,rcData):
# -------------------------
def inside(x,y,points):
"""Tests whether point(x,y) is within polygon described by points"""
"""Tests whether point(x,y) is within polygon described by points."""
inside = False
npoints=len(points)
(x1,y1) = points[npoints-1] # start with last point of points
@ -750,7 +746,7 @@ def inside(x,y,points):
# -------------------------
def fftbuild(rcData,height,xframe,yframe,grid,extrusion):
"""Build array of grain numbers"""
"""Build array of grain numbers."""
maxX = -1.*sys.maxint
maxY = -1.*sys.maxint
for line in rcData['point']: # find data range
@ -883,7 +879,7 @@ try:
boundaryFile = open(args[0])
boundarySegments = boundaryFile.readlines()
boundaryFile.close()
except:
except IOError:
damask.util.croak('unable to read boundary file "{}".'.format(args[0]))
raise
@ -941,19 +937,15 @@ if any(output in options.output for output in ['spectral','mentat']):
for i,grain in enumerate(rcData['grainMapping']):
config+=['[grain{}]'.format(grain),
'crystallite\t1',
'(constituent)\tphase 1\ttexture {}\tfraction 1.0'.format(i+1)]
if (options.xmargin > 0.0):
config+=['[x-margin]',
'crystallite\t1',
'(constituent)\tphase 2\ttexture {}\tfraction 1.0\n'.format(len(rcData['grainMapping'])+1)]
if (options.ymargin > 0.0):
config+=['[y-margin]',
'crystallite\t1',
'(constituent)\tphase 2\ttexture {}\tfraction 1.0\n'.format(len(rcData['grainMapping'])+1)]
if (options.xmargin > 0.0 and options.ymargin > 0.0):
config+=['[xy-margin]',
'crystallite\t1',
'(constituent)\tphase 2\ttexture {}\tfraction 1.0\n'.format(len(rcData['grainMapping'])+1)]
if (options.xmargin > 0.0 or options.ymargin > 0.0):

View File

@ -6,7 +6,6 @@ do
vtk_addPointCloudData $seeds \
--data microstructure,weight \
--inplace \
--vtk ${seeds%.*}.vtp \
done

View File

@ -1,9 +1,12 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys
import numpy as np
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
@ -29,88 +32,39 @@ parser.add_option('-b',
action = 'extend', metavar = '<int LIST>',
dest = 'blacklist',
help = 'blacklist of grain IDs')
parser.add_option('-p',
'--pos', '--seedposition',
dest = 'pos',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.set_defaults(whitelist = [],
blacklist = [],
pos = 'pos',
)
(options,filenames) = parser.parse_args()
options.whitelist = list(map(int,options.whitelist))
options.blacklist = list(map(int,options.blacklist))
# --- loop over output files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
options.whitelist = [int(i) for i in options.whitelist]
options.blacklist = [int(i) for i in options.blacklist]
for name in filenames:
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
microstructure = geom.get_microstructure().reshape((-1,1),order='F')
# --- interpret header ----------------------------------------------------------------------------
mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist else \
np.full(geom.grid.prod(),True,dtype=bool),
np.in1d(microstructure,options.blacklist,invert=True) if options.blacklist else \
np.full(geom.grid.prod(),True,dtype=bool))
seeds = np.concatenate((damask.grid_filters.cell_coord0(geom.grid,geom.size).reshape((-1,3)),
microstructure),
axis=1)[mask]
comments = geom.comments \
+ [scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*geom.grid),
"size\tx {}\ty {}\tz {}".format(*geom.size),
"origin\tx {}\ty {}\tz {}".format(*geom.origin),
"homogenization\t{}".format(geom.homogenization)]
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']) # read (linear) microstructure
# --- generate grid --------------------------------------------------------------------------------
x = (0.5 + np.arange(info['grid'][0],dtype=float))/info['grid'][0]*info['size'][0]+info['origin'][0]
y = (0.5 + np.arange(info['grid'][1],dtype=float))/info['grid'][1]*info['size'][1]+info['origin'][1]
z = (0.5 + np.arange(info['grid'][2],dtype=float))/info['grid'][2]*info['size'][2]+info['origin'][2]
xx = np.tile( x, info['grid'][1]* info['grid'][2])
yy = np.tile(np.repeat(y,info['grid'][0] ),info['grid'][2])
zz = np.repeat(z,info['grid'][0]*info['grid'][1])
mask = np.logical_and(np.in1d(microstructure,options.whitelist,invert=False) if options.whitelist != []
else np.full_like(microstructure,True,dtype=bool),
np.in1d(microstructure,options.blacklist,invert=True ) if options.blacklist != []
else np.full_like(microstructure,True,dtype=bool))
# ------------------------------------------ assemble header ---------------------------------------
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(info['microstructures']),
])
table.labels_clear()
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.pos) for i in range(3)]+['microstructure'])
table.head_write()
table.output_flush()
# --- write seeds information ------------------------------------------------------------
table.data = np.squeeze(np.dstack((xx,yy,zz,microstructure)))[mask]
table.data_writeArray()
# ------------------------------------------ finalize output ---------------------------------------
table.close()
table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments)
table.to_ASCII(sys.stdout if name is None else \
os.path.splitext(name)[0]+'.seeds')

View File

@ -1,11 +1,14 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,math,sys
import numpy as np
import damask
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
@ -35,117 +38,58 @@ parser.add_option('-y',
action = 'store_true',
dest = 'y',
help = 'poke 45 deg along y')
parser.add_option('-p','--position',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for coordinates [%default]')
parser.set_defaults(x = False,
y = False,
box = [0.0,1.0,0.0,1.0,0.0,1.0],
N = 16,
position = 'pos',
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
options.box = np.array(options.box).reshape(3,2)
# --- loop over output files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[-2]+'_poked_{}.seeds'.format(options.N) if name else name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'microstructures': 0,
}
offset = (np.amin(options.box, axis=1)*info['grid']/info['size']).astype(int)
box = np.amax(options.box, axis=1) - np.amin(options.box, axis=1)
Nx = int(options.N/math.sqrt(options.N*info['size'][1]*box[1]/info['size'][0]/box[0]))
Ny = int(options.N/math.sqrt(options.N*info['size'][0]*box[0]/info['size'][1]/box[1]))
Nz = int(box[2]*info['grid'][2])
damask.util.croak('poking {} x {} x {} in box {} {} {}...'.format(Nx,Ny,Nz,*box))
seeds = np.zeros((Nx*Ny*Nz,4),'d')
grid = np.zeros(3,'i')
n = 0
for i in range(Nx):
for j in range(Ny):
grid[0] = round((i+0.5)*box[0]*info['grid'][0]/Nx-0.5)+offset[0]
grid[1] = round((j+0.5)*box[1]*info['grid'][1]/Ny-0.5)+offset[1]
for k in range(Nz):
grid[2] = k + offset[2]
grid %= info['grid']
seeds[n,0:3] = (0.5+grid)/info['grid'] # normalize coordinates to box
seeds[n, 3] = microstructure[grid[0],grid[1],grid[2]]
if options.x: grid[0] += 1
if options.y: grid[1] += 1
n += 1
newInfo['microstructures'] = len(np.unique(seeds[:,3]))
# --- report ---------------------------------------------------------------------------------------
if (newInfo['microstructures'] != info['microstructures']):
damask.util.croak('--> microstructures: %i'%newInfo['microstructures'])
# ------------------------------------------ assemble header ---------------------------------------
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"poking\ta {}\tb {}\tc {}".format(Nx,Ny,Nz),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(newInfo['microstructures']),
])
table.labels_clear()
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in range(3)]+['microstructure'])
table.head_write()
table.output_flush()
# --- write seeds information ------------------------------------------------------------
table.data = seeds
table.data_writeArray()
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table
offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int)
box = np.amax(options.box, axis=1) \
- np.amin(options.box, axis=1)
Nx = int(options.N/np.sqrt(options.N*geom.size[1]*box[1]/geom.size[0]/box[0]))
Ny = int(options.N/np.sqrt(options.N*geom.size[0]*box[0]/geom.size[1]/box[1]))
Nz = int(box[2]*geom.grid[2])
damask.util.croak('poking {} x {} x {} in box {} {} {}...'.format(Nx,Ny,Nz,*box))
seeds = np.zeros((Nx*Ny*Nz,4),'d')
g = np.zeros(3,'i')
n = 0
for i in range(Nx):
for j in range(Ny):
g[0] = round((i+0.5)*box[0]*geom.grid[0]/Nx-0.5)+offset[0]
g[1] = round((j+0.5)*box[1]*geom.grid[1]/Ny-0.5)+offset[1]
for k in range(Nz):
g[2] = k + offset[2]
g %= geom.grid
seeds[n,0:3] = (g+0.5)/geom.grid # normalize coordinates to box
seeds[n, 3] = geom.microstructure[g[0],g[1],g[2]]
if options.x: g[0] += 1
if options.y: g[1] += 1
n += 1
comments = geom.comments \
+ [scriptID + ' ' + ' '.join(sys.argv[1:]),
"poking\ta {}\tb {}\tc {}".format(Nx,Ny,Nz),
"grid\ta {}\tb {}\tc {}".format(*geom.grid),
"size\tx {}\ty {}\tz {}".format(*geom.size),
"origin\tx {}\ty {}\tz {}".format(*geom.origin),
"homogenization\t{}".format(geom.homogenization)]
table = damask.Table(seeds,{'pos':(3,),'microstructure':(1,)},comments)
table.to_ASCII(sys.stdout if name is None else \
os.path.splitext(name)[0]+'_poked_{}.seeds'.format(options.N))

View File

@ -23,4 +23,5 @@ from .util import extendableOption # noqa
# functions in modules
from . import mechanics # noqa
from . import grid_filters # noqa

View File

@ -1,5 +1,3 @@
import math
import numpy as np
class Color():
@ -328,11 +326,11 @@ class Color():
if self.model != 'CIELAB': return
Msh = np.zeros(3,'d')
Msh[0] = math.sqrt(np.dot(self.color,self.color))
Msh[0] = np.sqrt(np.dot(self.color,self.color))
if (Msh[0] > 0.001):
Msh[1] = math.acos(self.color[0]/Msh[0])
Msh[1] = np.arccos(self.color[0]/Msh[0])
if (self.color[1] != 0.0):
Msh[2] = math.atan2(self.color[2],self.color[1])
Msh[2] = np.arctan2(self.color[2],self.color[1])
converted = Color('MSH', Msh)
self.model = converted.model
@ -349,9 +347,9 @@ class Color():
if self.model != 'MSH': return
Lab = np.zeros(3,'d')
Lab[0] = self.color[0] * math.cos(self.color[1])
Lab[1] = self.color[0] * math.sin(self.color[1]) * math.cos(self.color[2])
Lab[2] = self.color[0] * math.sin(self.color[1]) * math.sin(self.color[2])
Lab[0] = self.color[0] * np.cos(self.color[1])
Lab[1] = self.color[0] * np.sin(self.color[1]) * np.cos(self.color[2])
Lab[2] = self.color[0] * np.sin(self.color[1]) * np.sin(self.color[2])
converted = Color('CIELAB', Lab)
self.model = converted.model
@ -476,14 +474,14 @@ class Colormap():
if Msh_sat[0] >= Msh_unsat[0]:
return Msh_sat[2]
else:
hSpin = Msh_sat[1]/math.sin(Msh_sat[1])*math.sqrt(Msh_unsat[0]**2.0-Msh_sat[0]**2)/Msh_sat[0]
if Msh_sat[2] < - math.pi/3.0: hSpin *= -1.0
hSpin = Msh_sat[1]/np.sin(Msh_sat[1])*np.sqrt(Msh_unsat[0]**2.0-Msh_sat[0]**2)/Msh_sat[0]
if Msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
return Msh_sat[2] + hSpin
Msh1 = np.array(lo[:])
Msh2 = np.array(hi[:])
if (Msh1[1] > 0.05 and Msh2[1] > 0.05 and rad_diff(Msh1,Msh2) > math.pi/3.0):
if (Msh1[1] > 0.05 and Msh2[1] > 0.05 and rad_diff(Msh1,Msh2) > np.pi/3.0):
M_mid = max(Msh1[0],Msh2[0],88.0)
if frac < 0.5:
Msh2 = np.array([M_mid,0.0,0.0],'d')

View File

@ -1,7 +1,10 @@
from queue import Queue
import re
import glob
import os
import vtk
from vtk.util import numpy_support
import h5py
import numpy as np
@ -37,7 +40,7 @@ class DADF5():
self.version_major = f.attrs['DADF5-major']
self.version_minor = f.attrs['DADF5-minor']
if self.version_major != 0 or not 2 <= self.version_minor <= 4:
if self.version_major != 0 or not 2 <= self.version_minor <= 5:
raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
self.structured = 'grid' in f['geometry'].attrs.keys()
@ -45,6 +48,9 @@ class DADF5():
if self.structured:
self.grid = f['geometry'].attrs['grid']
self.size = f['geometry'].attrs['size']
if self.version_major == 0 and self.version_minor >= 5:
self.origin = f['geometry'].attrs['origin']
r=re.compile('inc[0-9]+')
increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)}
@ -96,7 +102,7 @@ class DADF5():
elif datasets is False:
datasets = []
choice = [datasets] if isinstance(datasets,str) else datasets
valid = [e for e_ in [glob.fnmatch.filter(getattr(self,what),s) for s in choice] for e in e_]
existing = set(self.visible[what])
@ -333,8 +339,8 @@ class DADF5():
"""Return information on all active datasets in the file."""
message = ''
with h5py.File(self.fname,'r') as f:
for s,i in enumerate(self.iter_visible('increments')):
message+='\n{} ({}s)\n'.format(i,self.times[s])
for i in self.iter_visible('increments'):
message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o):
message+=' {}\n'.format(oo)
@ -360,7 +366,7 @@ class DADF5():
f[k]
path.append(k)
except KeyError as e:
print('unable to locate geometry dataset: {}'.format(str(e)))
pass
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o):
for pp in self.iter_visible(p):
@ -369,7 +375,7 @@ class DADF5():
f[k]
path.append(k)
except KeyError as e:
print('unable to locate {} dataset: {}'.format(o,str(e)))
pass
return path
@ -433,7 +439,7 @@ class DADF5():
np.linspace(delta[1],self.size[1]-delta[1],self.grid[1]),
np.linspace(delta[0],self.size[0]-delta[0],self.grid[0]),
)
return np.concatenate((x[:,:,:,None],y[:,:,:,None],y[:,:,:,None]),axis = 3).reshape([np.product(self.grid),3])
return np.concatenate((x[:,:,:,None],y[:,:,:,None],z[:,:,:,None]),axis = 3).reshape([np.product(self.grid),3])
else:
with h5py.File(self.fname,'r') as f:
return f['geometry/x_c'][()]
@ -830,7 +836,7 @@ class DADF5():
N_not_calculated = len(todo)
while N_not_calculated > 0:
result = results.get()
with h5py.File(self.fname,'a') as f: # write to file
with h5py.File(self.fname,'a') as f: # write to file
dataset_out = f[result['group']].create_dataset(result['label'],data=result['data'])
for k in result['meta'].keys():
dataset_out.attrs[k] = result['meta'][k].encode()
@ -841,3 +847,142 @@ class DADF5():
N_added +=1
pool.wait_completion()
def to_vtk(self,labels,mode='Cell'):
"""
Export to vtk cell/point data.
Parameters
----------
labels : str or list of
Labels of the datasets to be exported.
mode : str, either 'Cell' or 'Point'
Export in cell format or point format.
Default value is 'Cell'.
"""
if mode=='Cell':
if self.structured:
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
for dim in [0,1,2]:
for c in np.linspace(0,self.size[dim],1+self.grid[dim]):
coordArray[dim].InsertNextValue(c)
vtk_geom = vtk.vtkRectilinearGrid()
vtk_geom.SetDimensions(*(self.grid+1))
vtk_geom.SetXCoordinates(coordArray[0])
vtk_geom.SetYCoordinates(coordArray[1])
vtk_geom.SetZCoordinates(coordArray[2])
else:
nodes = vtk.vtkPoints()
with h5py.File(self.fname,'r') as f:
nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True))
vtk_geom = vtk.vtkUnstructuredGrid()
vtk_geom.SetPoints(nodes)
vtk_geom.Allocate(f['/geometry/T_c'].shape[0])
for i in f['/geometry/T_c']:
vtk_geom.InsertNextCell(vtk.VTK_HEXAHEDRON,8,i-1) # not for all elements!
elif mode == 'Point':
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for c in self.cell_coordinates():
pointID = Points.InsertNextPoint(c)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
vtk_geom = vtk.vtkPolyData()
vtk_geom.SetPoints(Points)
vtk_geom.SetVerts(Vertices)
vtk_geom.Modified()
N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1
for i,inc in enumerate(self.iter_visible('increments')):
vtk_data = []
materialpoints_backup = self.visible['materialpoints'].copy()
self.set_visible('materialpoints',False)
for label in (labels if isinstance(labels,list) else [labels]):
for p in self.iter_visible('con_physics'):
if p != 'generic':
for c in self.iter_visible('constituents'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
vtk_geom.GetCellData().AddArray(vtk_data[-1])
else:
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name
vtk_data[-1].SetName(dset_name)
vtk_geom.GetCellData().AddArray(vtk_data[-1])
self.set_visible('materialpoints',materialpoints_backup)
constituents_backup = self.visible['constituents'].copy()
self.set_visible('constituents',False)
for label in (labels if isinstance(labels,list) else [labels]):
for p in self.iter_visible('mat_physics'):
if p != 'generic':
for m in self.iter_visible('materialpoints'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
vtk_geom.GetCellData().AddArray(vtk_data[-1])
else:
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),
deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
vtk_geom.GetCellData().AddArray(vtk_data[-1])
self.set_visible('constituents',constituents_backup)
if mode=='Cell':
writer = vtk.vtkXMLRectilinearGridWriter() if self.structured else \
vtk.vtkXMLUnstructuredGridWriter()
x = self.get_dataset_location('u_n')
vtk_data.append(numpy_support.numpy_to_vtk(num_array=self.read_dataset(x,0),
deep=True,array_type=vtk.VTK_DOUBLE))
vtk_data[-1].SetName('u')
vtk_geom.GetPointData().AddArray(vtk_data[-1])
elif mode == 'Point':
writer = vtk.vtkXMLPolyDataWriter()
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(file_out)
writer.SetInputData(vtk_geom)
writer.Write()

View File

@ -205,6 +205,9 @@ class Geom():
else:
self.homogenization = homogenization
@property
def grid(self):
return self.get_grid()
def get_microstructure(self):
"""Return the microstructure representation."""
@ -419,7 +422,7 @@ class Geom():
ext = os.path.splitext(fname)[1]
if ext == '':
name = fname + '.' + writer.GetDefaultFileExtension()
elif ext == writer.GetDefaultFileExtension():
elif ext[1:] == writer.GetDefaultFileExtension():
name = fname
else:
raise ValueError("unknown extension {}".format(ext))

View File

@ -0,0 +1,379 @@
from scipy import spatial
import numpy as np
def __ks(size,grid,first_order=False):
"""
Get wave numbers operator.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
"""
k_sk = np.where(np.arange(grid[0])>grid[0]//2,np.arange(grid[0])-grid[0],np.arange(grid[0]))/size[0]
if grid[0]%2 == 0 and first_order: k_sk[grid[0]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_sj = np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1]))/size[1]
if grid[1]%2 == 0 and first_order: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_si = np.arange(grid[2]//2+1)/size[2]
kk, kj, ki = np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij')
return np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3)
def curl(size,field):
"""
Calculate curl of a vector or tensor field in Fourier space.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
"""
n = np.prod(field.shape[3:])
k_s = __ks(size,field.shape[:3],True)
e = np.zeros((3, 3, 3))
e[0, 1, 2] = e[1, 2, 0] = e[2, 0, 1] = +1.0 # Levi-Civita symbol
e[0, 2, 1] = e[2, 1, 0] = e[1, 0, 2] = -1.0
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
curl = (np.einsum('slm,ijkl,ijkm ->ijks', e,k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 3
np.einsum('slm,ijkl,ijknm->ijksn',e,k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3x3
return np.fft.irfftn(curl,axes=(0,1,2),s=field.shape[:3])
def divergence(size,field):
"""
Calculate divergence of a vector or tensor field in Fourier space.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
"""
n = np.prod(field.shape[3:])
k_s = __ks(size,field.shape[:3],True)
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
divergence = (np.einsum('ijkl,ijkl ->ijk', k_s,field_fourier)*2.0j*np.pi if n == 3 else # vector, 3 -> 1
np.einsum('ijkm,ijklm->ijkl',k_s,field_fourier)*2.0j*np.pi) # tensor, 3x3 -> 3
return np.fft.irfftn(divergence,axes=(0,1,2),s=field.shape[:3])
def gradient(size,field):
"""
Calculate gradient of a vector or scalar field in Fourier space.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
"""
n = np.prod(field.shape[3:])
k_s = __ks(size,field.shape[:3],True)
field_fourier = np.fft.rfftn(field,axes=(0,1,2))
gradient = (np.einsum('ijkl,ijkm->ijkm', field_fourier,k_s)*2.0j*np.pi if n == 1 else # scalar, 1 -> 3
np.einsum('ijkl,ijkm->ijklm',field_fourier,k_s)*2.0j*np.pi) # vector, 3 -> 3x3
return np.fft.irfftn(gradient,axes=(0,1,2),s=field.shape[:3])
def cell_coord0(grid,size,origin=np.zeros(3)):
"""
Cell center positions (undeformed).
Parameters
----------
grid : numpy.ndarray
number of grid points.
size : numpy.ndarray
physical size of the periodic field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
start = origin + size/grid*.5
end = origin - size/grid*.5 + size
x, y, z = np.meshgrid(np.linspace(start[2],end[2],grid[2]),
np.linspace(start[1],end[1],grid[1]),
np.linspace(start[0],end[0],grid[0]),
indexing = 'ij')
return np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
def cell_displacement_fluct(size,F):
"""
Cell center displacement field from fluctuation part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
integrator = 0.5j*size/np.pi
k_s = __ks(size,F.shape[:3],False)
k_s_squared = np.einsum('...l,...l',k_s,k_s)
k_s_squared[0,0,0] = 1.0
displacement = -np.einsum('ijkml,ijkl,l->ijkm',
np.fft.rfftn(F,axes=(0,1,2)),
k_s,
integrator,
) / k_s_squared[...,np.newaxis]
return np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
def cell_displacement_avg(size,F):
"""
Cell center displacement field from average part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
F_avg = np.average(F,axis=(0,1,2))
return np.einsum('ml,ijkl->ijkm',F_avg-np.eye(3),cell_coord0(F.shape[:3][::-1],size))
def cell_displacement(size,F):
"""
Cell center displacement field from deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
return cell_displacement_avg(size,F) + cell_displacement_fluct(size,F)
def cell_coord(size,F,origin=np.zeros(3)):
"""
Cell center positions.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
return cell_coord0(F.shape[:3][::-1],size,origin) + cell_displacement(size,F)
def cell_coord0_gridSizeOrigin(coord0,ordered=True):
"""
Return grid 'DNA', i.e. grid, size, and origin from array of cell positions.
Parameters
----------
coord0 : numpy.ndarray
array of undeformed cell coordinates.
ordered : bool, optional
expect coord0 data to be ordered (x fast, z slow).
"""
coords = [np.unique(coord0[:,i]) for i in range(3)]
mincorner = np.array(list(map(min,coords)))
maxcorner = np.array(list(map(max,coords)))
grid = np.array(list(map(len,coords)),'i')
size = grid/np.maximum(grid-1,1) * (maxcorner-mincorner)
delta = size/grid
origin = mincorner - delta*.5
# 1D/2D: size/origin combination undefined, set origin to 0.0
size [np.where(grid==1)] = origin[np.where(grid==1)]*2.
origin[np.where(grid==1)] = 0.0
if grid.prod() != len(coord0):
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
start = origin + delta*.5
end = origin - delta*.5 + size
if not np.allclose(coords[0],np.linspace(start[0],end[0],grid[0])) and \
np.allclose(coords[1],np.linspace(start[1],end[1],grid[1])) and \
np.allclose(coords[2],np.linspace(start[2],end[2],grid[2])):
raise ValueError('Regular grid spacing violated.')
if ordered and not np.allclose(coord0.reshape(tuple(grid[::-1])+(3,)),cell_coord0(grid,size,origin)):
raise ValueError('Input data is not a regular grid.')
return (grid,size,origin)
def coord0_check(coord0):
"""
Check whether coordinates lie on a regular grid.
Parameters
----------
coord0 : numpy.ndarray
array of undeformed cell coordinates.
"""
cell_coord0_gridSizeOrigin(coord0,ordered=True)
def node_coord0(grid,size,origin=np.zeros(3)):
"""
Nodal positions (undeformed).
Parameters
----------
grid : numpy.ndarray
number of grid points.
size : numpy.ndarray
physical size of the periodic field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
x, y, z = np.meshgrid(np.linspace(origin[2],size[2]+origin[2],1+grid[2]),
np.linspace(origin[1],size[1]+origin[1],1+grid[1]),
np.linspace(origin[0],size[0]+origin[0],1+grid[0]),
indexing = 'ij')
return np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
def node_displacement_fluct(size,F):
"""
Nodal displacement field from fluctuation part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
return cell_2_node(cell_displacement_fluct(size,F))
def node_displacement_avg(size,F):
"""
Nodal displacement field from average part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
F_avg = np.average(F,axis=(0,1,2))
return np.einsum('ml,ijkl->ijkm',F_avg-np.eye(3),node_coord0(F.shape[:3][::-1],size))
def node_displacement(size,F):
"""
Nodal displacement field from deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
return node_displacement_avg(size,F) + node_displacement_fluct(size,F)
def node_coord(size,F,origin=np.zeros(3)):
"""
Nodal positions.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
return node_coord0(F.shape[:3][::-1],size,origin) + node_displacement(size,F)
def cell_2_node(cell_data):
"""Interpolate periodic cell data to nodal data."""
n = ( cell_data + np.roll(cell_data,1,(0,1,2))
+ np.roll(cell_data,1,(0,)) + np.roll(cell_data,1,(1,)) + np.roll(cell_data,1,(2,))
+ np.roll(cell_data,1,(0,1)) + np.roll(cell_data,1,(1,2)) + np.roll(cell_data,1,(2,0)))*0.125
return np.pad(n,((0,1),(0,1),(0,1))+((0,0),)*len(cell_data.shape[3:]),mode='wrap')
def node_2_cell(node_data):
"""Interpolate periodic nodal data to cell data."""
c = ( node_data + np.roll(node_data,1,(0,1,2))
+ np.roll(node_data,1,(0,)) + np.roll(node_data,1,(1,)) + np.roll(node_data,1,(2,))
+ np.roll(node_data,1,(0,1)) + np.roll(node_data,1,(1,2)) + np.roll(node_data,1,(2,0)))*0.125
return c[:-1,:-1,:-1]
def node_coord0_gridSizeOrigin(coord0,ordered=False):
"""
Return grid 'DNA', i.e. grid, size, and origin from array of nodal positions.
Parameters
----------
coord0 : numpy.ndarray
array of undeformed nodal coordinates
ordered : bool, optional
expect coord0 data to be ordered (x fast, z slow).
"""
coords = [np.unique(coord0[:,i]) for i in range(3)]
mincorner = np.array(list(map(min,coords)))
maxcorner = np.array(list(map(max,coords)))
grid = np.array(list(map(len,coords)),'i') - 1
size = maxcorner-mincorner
origin = mincorner
if (grid+1).prod() != len(coord0):
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
if not np.allclose(coords[0],np.linspace(mincorner[0],maxcorner[0],grid[0]+1)) and \
np.allclose(coords[1],np.linspace(mincorner[1],maxcorner[1],grid[1]+1)) and \
np.allclose(coords[2],np.linspace(mincorner[2],maxcorner[2],grid[2]+1)):
raise ValueError('Regular grid spacing violated.')
if ordered and not np.allclose(coord0.reshape(tuple((grid+1)[::-1])+(3,)),node_coord0(grid,size,origin)):
raise ValueError('Input data is not a regular grid.')
return (grid,size,origin)
def regrid(size,F,new_grid):
"""tbd."""
c = cell_coord0(F.shape[:3][::-1],size) \
+ cell_displacement_avg(size,F) \
+ cell_displacement_fluct(size,F)
outer = np.dot(np.average(F,axis=(0,1,2)),size)
for d in range(3):
c[np.where(c[:,:,:,d]<0)] += outer[d]
c[np.where(c[:,:,:,d]>outer[d])] -= outer[d]
tree = spatial.cKDTree(c.reshape((-1,3)),boxsize=outer)
return tree.query(cell_coord0(new_grid,outer))[1]

View File

@ -58,10 +58,10 @@ def strain_tensor(F,t,m):
"""
F_ = F.reshape((1,3,3)) if F.shape == (3,3) else F
if t == 'U':
if t == 'V':
B = np.matmul(F_,transpose(F_))
w,n = np.linalg.eigh(B)
elif t == 'V':
elif t == 'U':
C = np.matmul(transpose(F_),F_)
w,n = np.linalg.eigh(C)
@ -92,21 +92,27 @@ def deviatoric_part(x):
x - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[x.shape[0],3,3]),spherical_part(x))
def spherical_part(x):
def spherical_part(x,tensor=False):
"""
Return spherical (hydrostatic) part of a tensor.
A single scalar is returned, i.e. the hydrostatic part is not mapped on the 3rd order identity
matrix.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the hydrostatic part is computed.
tensor : bool, optional
Map spherical part onto identity tensor. Default is false
"""
return np.trace(x)/3.0 if np.shape(x) == (3,3) else \
np.trace(x,axis1=1,axis2=2)/3.0
if x.shape == (3,3):
sph = np.trace(x)/3.0
return sph if not tensor else np.eye(3)*sph
else:
sph = np.trace(x,axis1=1,axis2=2)/3.0
if not tensor:
return sph
else:
return np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),(x.shape[0],3,3)),sph)
def Mises_stress(sigma):

View File

@ -170,9 +170,18 @@ class Rotation:
################################################################################################
# convert to different orientation representations (numpy arrays)
def asQuaternion(self):
"""Unit quaternion: (q, p_1, p_2, p_3)."""
return self.quaternion.asArray()
def asQuaternion(self,
quaternion = False):
"""
Unit quaternion [q, p_1, p_2, p_3] unless quaternion == True: damask.quaternion object.
Parameters
----------
quaternion : bool, optional
return quaternion as DAMASK object.
"""
return self.quaternion if quaternion else self.quaternion.asArray()
def asEulers(self,
degrees = False):
@ -190,33 +199,36 @@ class Rotation:
return eu
def asAxisAngle(self,
degrees = False):
degrees = False,
pair = False):
"""
Axis angle pair: ([n_1, n_2, n_3], ω).
Axis angle representation [n_1, n_2, n_3, ω] unless pair == True: ([n_1, n_2, n_3], ω).
Parameters
----------
degrees : bool, optional
return rotation angle in degrees.
pair : bool, optional
return tuple of axis and angle.
"""
ax = qu2ax(self.quaternion.asArray())
if degrees: ax[3] = np.degrees(ax[3])
return ax
return (ax[:3],np.degrees(ax[3])) if pair else ax
def asMatrix(self):
"""Rotation matrix."""
return qu2om(self.quaternion.asArray())
def asRodrigues(self,
vector=False):
vector = False):
"""
Rodrigues-Frank vector: ([n_1, n_2, n_3], tan(ω/2)).
Rodrigues-Frank vector representation [n_1, n_2, n_3, tan(ω/2)] unless vector == True: [n_1, n_2, n_3] * tan(ω/2).
Parameters
----------
vector : bool, optional
return as array of length 3, i.e. scale the unit vector giving the rotation axis.
return as actual Rodrigues--Frank vector, i.e. rotation axis scaled by tan(ω/2).
"""
ro = qu2ro(self.quaternion.asArray())
@ -251,8 +263,8 @@ class Rotation:
acceptHomomorph = False,
P = -1):
qu = quaternion if isinstance(quaternion, np.ndarray) and quaternion.dtype == np.dtype(float) \
else np.array(quaternion,dtype=float)
qu = quaternion if isinstance(quaternion, np.ndarray) and quaternion.dtype == np.dtype(float) \
else np.array(quaternion,dtype=float)
if P > 0: qu[1:4] *= -1 # convert from P=1 to P=-1
if qu[0] < 0.0:
if acceptHomomorph:
@ -692,14 +704,14 @@ class Symmetry:
v = np.array(vector,dtype=float)
if proper: # check both improper ...
theComponents = np.dot(basis['improper'],v)
theComponents = np.around(np.dot(basis['improper'],v),12)
inSST = np.all(theComponents >= 0.0)
if not inSST: # ... and proper SST
theComponents = np.dot(basis['proper'],v)
theComponents = np.around(np.dot(basis['proper'],v),12)
inSST = np.all(theComponents >= 0.0)
else:
v[2] = abs(v[2]) # z component projects identical
theComponents = np.dot(basis['improper'],v) # for positive and negative values
theComponents = np.around(np.dot(basis['improper'],v),12) # for positive and negative values
inSST = np.all(theComponents >= 0.0)
if color: # have to return color array
@ -866,7 +878,7 @@ class Lattice:
[[ 17, 12, 5],[ 17, 7, 17]],
[[ 5, 17, 12],[ 17, 17, 7]],
[[ 12, -5,-17],[ 7,-17,-17]],
[[-17,-12, 5],[-17, 7, 17]]],dtype='float')}
[[-17,-12, 5],[-17,-7, 17]]],dtype='float')}
# Greninger--Troiano' orientation relationship for fcc <-> bcc transformation
# from Y. He et al., Journal of Applied Crystallography 39:72-81, 2006
@ -892,7 +904,7 @@ class Lattice:
[[-17,-17, 7],[-17, -5, 12]],
[[ 7,-17,-17],[ 12,-17, -5]],
[[ 17, -7,-17],[ 5, -12,-17]],
[[ 17,-17, 7],[ 17, -5,-12]],
[[ 17,-17, -7],[ 17, -5,-12]],
[[ -7, 17,-17],[-12, 17, -5]],
[[-17, 7,-17],[ -5, 12,-17]],
[[-17, 17, -7],[-17, 5,-12]]],dtype='float'),
@ -948,7 +960,7 @@ class Lattice:
[[ 2, 1, -1],[ 0, -1, 1]],
[[ -1, -2, -1],[ 0, -1, 1]],
[[ -1, 1, 2],[ 0, -1, 1]],
[[ -1, 2, 1],[ 0, -1, 1]],
[[ 2, -1, 1],[ 0, -1, 1]], #It is wrong in the paper, but matrix is correct
[[ -1, 2, 1],[ 0, -1, 1]],
[[ -1, -1, -2],[ 0, -1, 1]]],dtype='float')}
@ -1016,7 +1028,7 @@ class Lattice:
https://doi.org/10.1016/j.actamat.2004.11.021
"""
models={'KS':self.KS, 'GT':self.GT, "GT'":self.GTprime,
models={'KS':self.KS, 'GT':self.GT, 'GT_prime':self.GTprime,
'NW':self.NW, 'Pitsch': self.Pitsch, 'Bain':self.Bain}
try:
relationship = models[model]
@ -1037,13 +1049,13 @@ class Lattice:
for miller in np.hstack((relationship['planes'],relationship['directions'])):
myPlane = miller[myPlane_id]/ np.linalg.norm(miller[myPlane_id])
myDir = miller[myDir_id]/ np.linalg.norm(miller[myDir_id])
myMatrix = np.array([myDir,np.cross(myPlane,myDir),myPlane]).T
myMatrix = np.array([myDir,np.cross(myPlane,myDir),myPlane])
otherPlane = miller[otherPlane_id]/ np.linalg.norm(miller[otherPlane_id])
otherDir = miller[otherDir_id]/ np.linalg.norm(miller[otherDir_id])
otherMatrix = np.array([otherDir,np.cross(otherPlane,otherDir),otherPlane]).T
otherMatrix = np.array([otherDir,np.cross(otherPlane,otherDir),otherPlane])
r['rotations'].append(Rotation.fromMatrix(np.dot(otherMatrix,myMatrix.T)))
r['rotations'].append(Rotation.fromMatrix(np.dot(otherMatrix.T,myMatrix)))
return r
@ -1117,10 +1129,9 @@ class Orientation:
return (Orientation(r,self.lattice), i,j, k == 1) if symmetries else r # disorientation ...
# ... own sym, other sym,
# self-->other: True, self<--other: False
def inFZ(self):
return self.lattice.symmetry.inFZ(self.rotation.asRodrigues(vector=True))
def equivalentOrientations(self,members=[]):
"""List of orientations which are symmetrically equivalent."""
@ -1135,7 +1146,8 @@ class Orientation:
def relatedOrientations(self,model):
"""List of orientations related by the given orientation relationship."""
r = self.lattice.relationOperations(model)
return [self.__class__(self.rotation*o,r['lattice']) for o in r['rotations']]
return [self.__class__(o*self.rotation,r['lattice']) for o in r['rotations']]
def reduced(self):
"""Transform orientation to fall into fundamental zone according to symmetry."""
@ -1143,7 +1155,8 @@ class Orientation:
if self.lattice.symmetry.inFZ(me.rotation.asRodrigues(vector=True)): break
return self.__class__(me.rotation,self.lattice)
def inversePole(self,
axis,
proper = False,
@ -1182,9 +1195,9 @@ class Orientation:
ref = orientations[0]
for o in orientations:
closest.append(o.equivalentOrientations(
ref.disorientation(o,
SST = False, # select (o[ther]'s) sym orientation
symmetries = True)[2]).rotation) # with lowest misorientation
ref.disorientation(o,
SST = False, # select (o[ther]'s) sym orientation
symmetries = True)[2]).rotation) # with lowest misorientation
return Orientation(Rotation.fromAverage(closest,weights),ref.lattice)

View File

@ -3,6 +3,8 @@ import re
import pandas as pd
import numpy as np
from . import version
class Table():
"""Store spreadsheet-like data."""
@ -69,12 +71,14 @@ class Table():
f = open(fname)
except TypeError:
f = fname
f.seek(0)
header,keyword = f.readline().split()
if keyword == 'header':
header = int(header)
else:
raise Exception
raise TypeError
comments = [f.readline()[:-1] for i in range(1,header)]
labels = f.readline().split()
@ -95,9 +99,52 @@ class Table():
return Table(data,shapes,comments)
@staticmethod
def from_ang(fname):
"""
Create table from TSL ang file.
A valid TSL ang file needs to contains the following columns:
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
* Spatial position in meters, 2 floats, label 'pos'.
* Image quality, 1 float, label 'IQ'.
* Confidence index, 1 float, label 'CI'.
* Phase ID, 1 int, label 'ID'.
* SEM signal, 1 float, label 'intensity'.
* Fit, 1 float, label 'fit'.
Parameters
----------
fname : file, str, or pathlib.Path
Filename or file for reading.
"""
shapes = {'eu':(3,), 'pos':(2,),
'IQ':(1,), 'CI':(1,), 'ID':(1,), 'intensity':(1,), 'fit':(1,)}
try:
f = open(fname)
except TypeError:
f = fname
f.seek(0)
content = f.readlines()
comments = ['table.py:from_ang v {}'.format(version)]
for line in content:
if line.startswith('#'):
comments.append(line.strip())
else:
break
data = np.loadtxt(content)
for c in range(data.shape[1]-10):
shapes['n/a_{}'.format(c+1)] = (1,)
return Table(data,shapes,comments)
@property
def labels(self):
"""Return the labels of all columns."""
return list(self.shapes.keys())
@ -203,17 +250,17 @@ class Table():
'' if info is None else ': {}'.format(info),
))
self.shapes[label_new] = self.shapes.pop(label_old)
self.shapes = {(label if label != label_old else label_new):self.shapes[label] for label in self.shapes}
def sort_by(self,labels,ascending=True):
"""
Get column data.
Sort table by values of given labels.
Parameters
----------
label : str or list
Column labels.
Column labels for sorting.
ascending : bool or list, optional
Set sort order.
@ -224,6 +271,44 @@ class Table():
self.comments.append('sorted by [{}]'.format(', '.join(labels)))
def append(self,other):
"""
Append other table vertically (similar to numpy.vstack).
Requires matching labels/shapes and order.
Parameters
----------
other : Table
Table to append
"""
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
raise KeyError('Labels or shapes or order do not match')
else:
self.data = self.data.append(other.data,ignore_index=True)
def join(self,other):
"""
Append other table horizontally (similar to numpy.hstack).
Requires matching number of rows and no common labels.
Parameters
----------
other : Table
Table to join
"""
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
raise KeyError('Dublicated keys or row count mismatch')
else:
self.data = self.data.join(other.data)
for key in other.shapes:
self.shapes[key] = other.shapes[key]
def to_ASCII(self,fname):
"""
Store as plain text file.
@ -234,8 +319,9 @@ class Table():
Filename or file for reading.
"""
seen = set()
labels = []
for l in self.shapes:
for l in [x for x in self.data.columns if not (x in seen or seen.add(x))]:
if(self.shapes[l] == (1,)):
labels.append('{}'.format(l))
elif(len(self.shapes[l]) == 1):

View File

@ -7,9 +7,6 @@ from optparse import Option
from queue import Queue
from threading import Thread
import numpy as np
class bcolors:
"""
ASCII Colors (Blender code).
@ -64,19 +61,6 @@ def report(who = None,
croak( (emph(who)+': ' if who is not None else '') + (what if what is not None else '') + '\n' )
# -----------------------------
def report_geom(info,
what = ['grid','size','origin','homogenization','microstructures']):
"""Reports (selected) geometry information."""
output = {
'grid' : 'grid a b c: {}'.format(' x '.join(list(map(str,info['grid' ])))),
'size' : 'size x y z: {}'.format(' x '.join(list(map(str,info['size' ])))),
'origin' : 'origin x y z: {}'.format(' : '.join(list(map(str,info['origin'])))),
'homogenization' : 'homogenization: {}'.format(info['homogenization']),
'microstructures' : 'microstructures: {}'.format(info['microstructures']),
}
for item in what: croak(output[item.lower()])
# -----------------------------
def emph(what):
"""Formats string with emphasis."""
@ -119,30 +103,6 @@ def execute(cmd,
if process.returncode != 0: raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
return out,error
def coordGridAndSize(coordinates):
"""Determines grid count and overall physical size along each dimension of an ordered array of coordinates."""
dim = coordinates.shape[1]
coords = [np.unique(coordinates[:,i]) for i in range(dim)]
mincorner = np.array(list(map(min,coords)))
maxcorner = np.array(list(map(max,coords)))
grid = np.array(list(map(len,coords)),'i')
size = grid/np.maximum(np.ones(dim,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
delta = size/grid
N = grid.prod()
if N != len(coordinates):
raise ValueError('Data count {} does not match grid {}.'.format(len(coordinates),' x '.join(map(repr,grid))))
if np.any(np.abs(np.log10((coords[0][1:]-coords[0][:-1])/delta[0])) > 0.01) \
or np.any(np.abs(np.log10((coords[1][1:]-coords[1][:-1])/delta[1])) > 0.01):
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
if dim==3 and np.any(np.abs(np.log10((coords[2][1:]-coords[2][:-1])/delta[2])) > 0.01):
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
return grid,size
# -----------------------------
class extendableOption(Option):
"""
@ -221,263 +181,6 @@ class return_message():
return srepr(self.message)
def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
col_deriv=0, ftol=1.49012e-8, xtol=1.49012e-8,
gtol=0.0, maxfev=0, epsfcn=None, factor=100, diag=None):
from scipy.optimize import _minpack
"""
Non-linear least square fitting (Levenberg-Marquardt method) with
bounded parameters.
the codes of transformation between int <-> ext refers to the work of
Jonathan J. Helmus: https://github.com/jjhelmus/leastsqbound-scipy
other codes refer to the source code of minpack.py:
An internal parameter list is used to enforce contraints on the fitting
parameters. The transfomation is based on that of MINUIT package.
please see: F. James and M. Winkler. MINUIT User's Guide, 2004.
bounds : list
(min, max) pairs for each parameter, use None for 'min' or 'max'
when there is no bound in that direction.
For example: if there are two parameters needed to be fitting, then
bounds is [(min1,max1), (min2,max2)]
This function is based on 'leastsq' of minpack.py, the annotation of
other parameters can be found in 'least_squares.py'.
"""
def _check_func(checker, argname, thefunc, x0, args, numinputs,
output_shape=None):
from numpy import shape
"""The same as that of minpack.py"""
res = np.atleast_1d(thefunc(*((x0[:numinputs],) + args)))
if (output_shape is not None) and (shape(res) != output_shape):
if (output_shape[0] != 1):
if len(output_shape) > 1:
if output_shape[1] == 1:
return shape(res)
msg = "%s: there is a mismatch between the input and output " \
"shape of the '%s' argument" % (checker, argname)
func_name = getattr(thefunc, '__name__', None)
if func_name:
msg += " '%s'." % func_name
else:
msg += "."
raise TypeError(msg)
if np.issubdtype(res.dtype, np.inexact):
dt = res.dtype
else:
dt = dtype(float)
return shape(res), dt
def _int2extGrad(p_int, bounds):
"""Calculate the gradients of transforming the internal (unconstrained) to external (constrained) parameter."""
grad = np.empty_like(p_int)
for i, (x, bound) in enumerate(zip(p_int, bounds)):
lower, upper = bound
if lower is None and upper is None: # No constraints
grad[i] = 1.0
elif upper is None: # only lower bound
grad[i] = x/np.sqrt(x*x + 1.0)
elif lower is None: # only upper bound
grad[i] = -x/np.sqrt(x*x + 1.0)
else: # lower and upper bounds
grad[i] = (upper - lower)*np.cos(x)/2.0
return grad
def _int2extFunc(bounds):
"""Transform internal parameters into external parameters."""
local = [_int2extLocal(b) for b in bounds]
def _transform_i2e(p_int):
p_ext = np.empty_like(p_int)
p_ext[:] = [i(j) for i, j in zip(local, p_int)]
return p_ext
return _transform_i2e
def _ext2intFunc(bounds):
"""Transform external parameters into internal parameters."""
local = [_ext2intLocal(b) for b in bounds]
def _transform_e2i(p_ext):
p_int = np.empty_like(p_ext)
p_int[:] = [i(j) for i, j in zip(local, p_ext)]
return p_int
return _transform_e2i
def _int2extLocal(bound):
"""Transform a single internal parameter to an external parameter."""
lower, upper = bound
if lower is None and upper is None: # no constraints
return lambda x: x
elif upper is None: # only lower bound
return lambda x: lower - 1.0 + np.sqrt(x*x + 1.0)
elif lower is None: # only upper bound
return lambda x: upper + 1.0 - np.sqrt(x*x + 1.0)
else:
return lambda x: lower + ((upper - lower)/2.0)*(np.sin(x) + 1.0)
def _ext2intLocal(bound):
"""Transform a single external parameter to an internal parameter."""
lower, upper = bound
if lower is None and upper is None: # no constraints
return lambda x: x
elif upper is None: # only lower bound
return lambda x: np.sqrt((x - lower + 1.0)**2 - 1.0)
elif lower is None: # only upper bound
return lambda x: np.sqrt((x - upper - 1.0)**2 - 1.0)
else:
return lambda x: np.arcsin((2.0*(x - lower)/(upper - lower)) - 1.0)
i2e = _int2extFunc(bounds)
e2i = _ext2intFunc(bounds)
x0 = np.asarray(x0).flatten()
n = len(x0)
if len(bounds) != n:
raise ValueError('the length of bounds is inconsistent with the number of parameters ')
if not isinstance(args, tuple):
args = (args,)
shape, dtype = _check_func('leastsq', 'func', func, x0, args, n)
m = shape[0]
if n > m:
raise TypeError('Improper input: N=%s must not exceed M=%s' % (n, m))
if epsfcn is None:
epsfcn = np.finfo(dtype).eps
def funcWarp(x, *args):
return func(i2e(x), *args)
xi0 = e2i(x0)
if Dfun is None:
if maxfev == 0:
maxfev = 200*(n + 1)
retval = _minpack._lmdif(funcWarp, xi0, args, full_output, ftol, xtol,
gtol, maxfev, epsfcn, factor, diag)
else:
if col_deriv:
_check_func('leastsq', 'Dfun', Dfun, x0, args, n, (n, m))
else:
_check_func('leastsq', 'Dfun', Dfun, x0, args, n, (m, n))
if maxfev == 0:
maxfev = 100*(n + 1)
def DfunWarp(x, *args):
return Dfun(i2e(x), *args)
retval = _minpack._lmder(funcWarp, DfunWarp, xi0, args, full_output, col_deriv,
ftol, xtol, gtol, maxfev, factor, diag)
errors = {0: ["Improper input parameters.", TypeError],
1: ["Both actual and predicted relative reductions "
"in the sum of squares\n are at most %f" % ftol, None],
2: ["The relative error between two consecutive "
"iterates is at most %f" % xtol, None],
3: ["Both actual and predicted relative reductions in "
"the sum of squares\n are at most %f and the "
"relative error between two consecutive "
"iterates is at \n most %f" % (ftol, xtol), None],
4: ["The cosine of the angle between func(x) and any "
"column of the\n Jacobian is at most %f in "
"absolute value" % gtol, None],
5: ["Number of calls to function has reached "
"maxfev = %d." % maxfev, ValueError],
6: ["ftol=%f is too small, no further reduction "
"in the sum of squares\n is possible.""" % ftol,
ValueError],
7: ["xtol=%f is too small, no further improvement in "
"the approximate\n solution is possible." % xtol,
ValueError],
8: ["gtol=%f is too small, func(x) is orthogonal to the "
"columns of\n the Jacobian to machine "
"precision." % gtol, ValueError],
'unknown': ["Unknown error.", TypeError]}
info = retval[-1] # The FORTRAN return value
if info not in [1, 2, 3, 4] and not full_output:
if info in [5, 6, 7, 8]:
np.warnings.warn(errors[info][0], RuntimeWarning)
else:
try:
raise errors[info][1](errors[info][0])
except KeyError:
raise errors['unknown'][1](errors['unknown'][0])
mesg = errors[info][0]
x = i2e(retval[0])
if full_output:
grad = _int2extGrad(retval[0], bounds)
retval[1]['fjac'] = (retval[1]['fjac'].T / np.take(grad,
retval[1]['ipvt'] - 1)).T
cov_x = None
if info in [1, 2, 3, 4]:
from numpy.dual import inv
from numpy.linalg import LinAlgError
perm = np.take(np.eye(n), retval[1]['ipvt'] - 1, 0)
r = np.triu(np.transpose(retval[1]['fjac'])[:n, :])
R = np.dot(r, perm)
try:
cov_x = inv(np.dot(np.transpose(R), R))
except LinAlgError as inverror:
print(inverror)
pass
return (x, cov_x) + retval[1:-1] + (mesg, info)
else:
return (x, info)
def _general_function(params, ydata, xdata, function):
return function(xdata, *params) - ydata
def _weighted_general_function(params, ydata, xdata, function, weights):
return (function(xdata, *params) - ydata)*weights
def curve_fit_bound(f, xdata, ydata, p0=None, sigma=None, bounds=None, **kw):
"""Similar as 'curve_fit' in minpack.py."""
if p0 is None:
# determine number of parameters by inspecting the function
import inspect
args, varargs, varkw, defaults = inspect.getargspec(f)
if len(args) < 2:
msg = "Unable to determine number of fit parameters."
raise ValueError(msg)
if 'self' in args:
p0 = [1.0] * (len(args)-2)
else:
p0 = [1.0] * (len(args)-1)
if np.isscalar(p0):
p0 = np.array([p0])
args = (ydata, xdata, f)
if sigma is None:
func = _general_function
else:
func = _weighted_general_function
args += (1.0/np.asarray(sigma),)
return_full = kw.pop('full_output', False)
res = leastsqBound(func, p0, args=args, bounds = bounds, full_output=True, **kw)
(popt, pcov, infodict, errmsg, ier) = res
if ier not in [1, 2, 3, 4]:
msg = "Optimal parameters not found: " + errmsg
raise RuntimeError(msg)
if (len(ydata) > len(p0)) and pcov is not None:
s_sq = (func(popt, *args)**2).sum()/(len(ydata)-len(p0))
pcov = pcov * s_sq
else:
pcov = np.inf
return (popt, pcov, infodict, errmsg, ier) if return_full else (popt, pcov)
class ThreadPool:
"""Pool of threads consuming tasks from a queue."""

View File

@ -15,6 +15,7 @@ setuptools.setup(
packages=setuptools.find_packages(),
include_package_data=True,
install_requires = [
"pandas",
"scipy",
"h5py",
"vtk"

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,38 @@
% Start MTEX first in Matlab
tmp = matlab.desktop.editor.getActive;
cd(fileparts(tmp.Filename));
symmetry = {crystalSymmetry('m-3m', [1 1 1], 'mineral', 'Iron', 'color', 'light blue')}
% plotting convention
setMTEXpref('xAxisDirection','north');
setMTEXpref('zAxisDirection','outOfPlane');
lattice_types = {'BCC','FCC'};
models = {'Bain','GT','GT_prime','KS','NW','Pitsch'};
rotation = containers.Map;
rotation('BCC') = 'Passive Rotation';
rotation('FCC') = 'Active Rotation';
for lattice = lattice_types
for p = 0:length(models)/3-1
EBSD_data = {loadEBSD(strcat(lattice,'_',models{p*3+1},'.txt'),symmetry,'interface','generic',...
'ColumnNames', { 'phi1' 'Phi' 'phi2' 'x' 'y'}, 'Bunge', rotation(char(lattice))),
loadEBSD(strcat(lattice,'_',models{p*3+2},'.txt'),symmetry,'interface','generic',...
'ColumnNames', { 'phi1' 'Phi' 'phi2' 'x' 'y'}, 'Bunge', rotation(char(lattice))),
loadEBSD(strcat(lattice,'_',models{p*3+3},'.txt'),symmetry,'interface','generic',...
'ColumnNames', { 'phi1' 'Phi' 'phi2' 'x' 'y'}, 'Bunge', rotation(char(lattice)))}
h = [Miller(1,0,0,symmetry{1}),Miller(1,1,0,symmetry{1}),Miller(1,1,1,symmetry{1})]; % 3 pole figures
plotPDF(EBSD_data{1}.orientations,h,'MarkerSize',5,'MarkerColor','r','DisplayName',models{p*3+1})
hold on
plotPDF(EBSD_data{2}.orientations,h,'MarkerSize',4,'MarkerColor','b','DisplayName',models{p*3+2})
plotPDF(EBSD_data{3}.orientations,h,'MarkerSize',3,'MarkerColor','g','DisplayName',models{p*3+3})
legend('show','location','southoutside','Interpreter', 'none')
orient('landscape')
print('-bestfit',strcat(int2str(p+1),'_',char(lattice),'.pdf'),'-dpdf')
close
end
end

View File

@ -0,0 +1,5 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
0.0 45.00000000000001 0.0 1 1
90.0 45.00000000000001 270.0 1 2
45.00000000000001 0.0 0.0 1 3

View File

@ -0,0 +1,26 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
283.60440567265294 9.976439066337804 33.24637065555936 1 1
167.8261034151001 43.397849654402556 183.40022280897963 1 2
262.1156357053931 43.82007387041961 104.07478363123654 1 3
103.604405672653 9.976439066337804 213.24637065555936 1 4
347.8261034151001 43.39784965440255 3.400222808979685 1 5
82.11563570539313 43.82007387041961 284.0747836312365 1 6
76.39559432734703 9.976439066337806 326.75362934444064 1 7
192.17389658489986 43.397849654402556 176.59977719102034 1 8
97.88436429460687 43.82007387041961 255.92521636876344 1 9
256.395594327347 9.976439066337804 146.75362934444064 1 10
12.173896584899929 43.39784965440254 356.59977719102034 1 11
277.8843642946069 43.82007387041961 75.92521636876346 1 12
102.17389658489992 43.39784965440254 266.59977719102034 1 13
346.395594327347 9.976439066337804 56.75362934444064 1 14
7.884364294606862 43.82007387041961 345.9252163687635 1 15
282.17389658489986 43.39784965440254 86.59977719102032 1 16
166.39559432734703 9.976439066337804 236.75362934444058 1 17
187.88436429460683 43.82007387041961 165.92521636876344 1 18
257.8261034151001 43.39784965440255 93.40022280897969 1 19
13.604405672652977 9.976439066337804 303.24637065555936 1 20
352.1156357053931 43.82007387041961 14.074783631236542 1 21
77.82610341510008 43.397849654402556 273.4002228089796 1 22
193.60440567265297 9.976439066337806 123.24637065555939 1 23
172.11563570539317 43.82007387041961 194.07478363123653 1 24

View File

@ -0,0 +1,26 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
303.24637065555936 9.976439066337804 13.604405672652977 1 1
165.92521636876344 43.82007387041961 187.88436429460683 1 2
266.59977719102034 43.39784965440254 102.17389658489992 1 3
123.24637065555939 9.976439066337804 193.604405672653 1 4
345.9252163687635 43.82007387041961 7.884364294606862 1 5
86.59977719102032 43.39784965440254 282.17389658489986 1 6
56.75362934444064 9.976439066337804 346.395594327347 1 7
194.07478363123653 43.82007387041961 172.11563570539317 1 8
93.40022280897969 43.39784965440255 257.8261034151001 1 9
236.75362934444058 9.976439066337804 166.39559432734697 1 10
14.074783631236542 43.82007387041961 352.1156357053931 1 11
273.4002228089796 43.397849654402556 77.82610341510008 1 12
104.07478363123654 43.82007387041961 262.1156357053931 1 13
326.75362934444064 9.976439066337806 76.39559432734703 1 14
3.400222808979685 43.39784965440255 347.8261034151001 1 15
284.0747836312365 43.82007387041961 82.11563570539313 1 16
146.75362934444064 9.976439066337804 256.395594327347 1 17
183.40022280897963 43.397849654402556 167.8261034151001 1 18
255.92521636876344 43.82007387041961 97.88436429460687 1 19
33.24637065555936 9.976439066337804 283.60440567265294 1 20
356.59977719102034 43.39784965440254 12.173896584899929 1 21
75.92521636876346 43.82007387041961 277.8843642946069 1 22
213.24637065555936 9.976439066337804 103.604405672653 1 23
176.59977719102034 43.397849654402556 192.17389658489986 1 24

View File

@ -0,0 +1,26 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
335.7965716606702 10.528779365509317 65.79657166067024 1 1
228.77270547567446 80.40593177313953 85.64260312151849 1 2
131.22729452432552 80.40593177313954 4.357396878481506 1 3
24.20342833932977 10.52877936550932 24.20342833932976 1 4
221.95489158457983 85.70366403943002 80.37863910890589 1 5
138.04510841542015 85.70366403943004 9.621360891094124 1 6
131.22729452432552 80.40593177313953 94.35739687848151 1 7
24.203428339329765 10.52877936550932 114.20342833932976 1 8
221.95489158457983 85.70366403943004 170.37863910890587 1 9
138.04510841542015 85.70366403943004 99.62136089109411 1 10
335.7965716606702 10.52877936550932 155.79657166067025 1 11
228.77270547567448 80.40593177313954 175.6426031215185 1 12
335.7965716606702 10.52877936550932 335.7965716606702 1 13
228.77270547567448 80.40593177313954 355.6426031215185 1 14
131.2272945243255 80.40593177313954 274.35739687848144 1 15
24.203428339329747 10.52877936550932 294.2034283393298 1 16
221.95489158457985 85.70366403943004 350.3786391089059 1 17
138.04510841542015 85.70366403943004 279.6213608910941 1 18
41.95489158457986 94.29633596056998 9.621360891094133 1 19
318.04510841542015 94.29633596056996 80.37863910890589 1 20
155.79657166067025 169.4712206344907 24.203428339329754 1 21
48.77270547567448 99.59406822686046 4.357396878481504 1 22
311.2272945243255 99.59406822686046 85.64260312151852 1 23
204.20342833932975 169.4712206344907 65.79657166067024 1 24

View File

@ -0,0 +1,14 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
225.41555594321144 83.13253115922213 83.08266205989301 1 1
134.58444405678856 83.13253115922211 6.917337940107012 1 2
4.702125169424418e-15 9.735610317245317 45.0 1 3
134.58444405678856 83.13253115922213 276.91733794010696 1 4
225.4155559432114 83.13253115922213 353.082662059893 1 5
0.0 9.735610317245317 315.0 1 6
134.58444405678858 83.13253115922213 96.91733794010702 1 7
225.41555594321142 83.13253115922213 173.082662059893 1 8
0.0 9.735610317245317 135.0 1 9
99.59803029876785 45.81931182053557 166.36129272052355 1 10
260.40196970123213 45.81931182053556 283.6387072794765 1 11
180.0 99.73561031724535 225.0 1 12

View File

@ -0,0 +1,14 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
6.9173379401070045 83.13253115922213 44.58444405678856 1 1
45.0 89.99999999999999 279.7356103172453 1 2
166.36129272052352 45.819311820535574 279.59803029876787 1 3
83.08266205989301 83.13253115922213 225.41555594321144 1 4
256.3612927205235 45.819311820535574 189.59803029876787 1 5
315.0 90.0 9.735610317245369 1 6
186.917337940107 83.13253115922213 224.58444405678856 1 7
315.0 90.0 80.26438968275463 1 8
13.638707279476478 45.81931182053557 260.40196970123213 1 9
263.082662059893 83.13253115922213 45.415555943211444 1 10
103.63870727947646 45.819311820535574 170.40196970123213 1 11
224.99999999999997 90.0 170.26438968275465 1 12

View File

@ -0,0 +1,5 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
180.0 45.00000000000001 180.0 1 1
270.0 45.00000000000001 90.0 1 2
315.0 0.0 0.0 1 3

View File

@ -0,0 +1,26 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
146.75362934444064 9.976439066337804 256.395594327347 1 1
356.59977719102034 43.39784965440254 12.173896584899929 1 2
75.92521636876346 43.82007387041961 277.8843642946069 1 3
326.75362934444064 9.976439066337806 76.39559432734703 1 4
176.59977719102034 43.397849654402556 192.17389658489986 1 5
255.92521636876344 43.82007387041961 97.88436429460687 1 6
213.24637065555936 9.976439066337804 103.604405672653 1 7
3.400222808979685 43.39784965440255 347.8261034151001 1 8
284.0747836312365 43.82007387041961 82.11563570539313 1 9
33.24637065555936 9.976439066337804 283.60440567265294 1 10
183.40022280897963 43.397849654402556 167.8261034151001 1 11
104.07478363123654 43.82007387041961 262.1156357053931 1 12
273.4002228089796 43.397849654402556 77.82610341510008 1 13
123.24637065555939 9.976439066337806 193.60440567265297 1 14
194.07478363123653 43.82007387041961 172.11563570539317 1 15
93.40022280897969 43.39784965440255 257.8261034151001 1 16
303.24637065555936 9.976439066337804 13.604405672652977 1 17
14.074783631236542 43.82007387041961 352.1156357053931 1 18
86.59977719102032 43.39784965440254 282.17389658489986 1 19
236.75362934444058 9.976439066337804 166.39559432734703 1 20
165.92521636876344 43.82007387041961 187.88436429460683 1 21
266.59977719102034 43.39784965440254 102.17389658489992 1 22
56.75362934444064 9.976439066337804 346.395594327347 1 23
345.9252163687635 43.82007387041961 7.884364294606862 1 24

View File

@ -0,0 +1,26 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
166.39559432734697 9.976439066337804 236.75362934444058 1 1
352.1156357053931 43.82007387041961 14.074783631236542 1 2
77.82610341510008 43.397849654402556 273.4002228089796 1 3
346.395594327347 9.976439066337804 56.75362934444064 1 4
172.11563570539317 43.82007387041961 194.07478363123653 1 5
257.8261034151001 43.39784965440255 93.40022280897969 1 6
193.604405672653 9.976439066337804 123.24637065555939 1 7
7.884364294606862 43.82007387041961 345.9252163687635 1 8
282.17389658489986 43.39784965440254 86.59977719102032 1 9
13.604405672652977 9.976439066337804 303.24637065555936 1 10
187.88436429460683 43.82007387041961 165.92521636876344 1 11
102.17389658489992 43.39784965440254 266.59977719102034 1 12
277.8843642946069 43.82007387041961 75.92521636876346 1 13
103.604405672653 9.976439066337804 213.24637065555936 1 14
192.17389658489986 43.397849654402556 176.59977719102034 1 15
97.88436429460687 43.82007387041961 255.92521636876344 1 16
283.60440567265294 9.976439066337804 33.24637065555936 1 17
12.173896584899929 43.39784965440254 356.59977719102034 1 18
82.11563570539313 43.82007387041961 284.0747836312365 1 19
256.395594327347 9.976439066337804 146.75362934444064 1 20
167.8261034151001 43.397849654402556 183.40022280897963 1 21
262.1156357053931 43.82007387041961 104.07478363123654 1 22
76.39559432734703 9.976439066337806 326.75362934444064 1 23
347.8261034151001 43.39784965440255 3.400222808979685 1 24

View File

@ -0,0 +1,26 @@
1 header
1_Eulers 2_Eulers 3_Eulers 1_pos 2_pos
114.20342833932975 10.52877936550932 204.20342833932972 1 1
94.3573968784815 80.40593177313954 311.22729452432543 1 2
175.6426031215185 80.40593177313954 48.77270547567447 1 3
155.79657166067025 10.52877936550932 155.79657166067025 1 4
99.62136089109411 85.70366403943004 318.04510841542015 1 5
170.37863910890587 85.70366403943002 41.954891584579855 1 6
85.64260312151852 80.40593177313954 48.77270547567448 1 7
65.79657166067024 10.52877936550932 155.79657166067025 1 8
9.621360891094124 85.70366403943004 318.04510841542015 1 9
80.37863910890587 85.70366403943004 41.95489158457987 1 10
24.203428339329758 10.52877936550932 204.20342833932975 1 11
4.357396878481486 80.40593177313954 311.2272945243255 1 12
204.20342833932972 10.52877936550932 204.20342833932972 1 13
184.35739687848147 80.40593177313954 311.2272945243255 1 14
265.64260312151845 80.40593177313953 48.77270547567449 1 15
245.79657166067025 10.528779365509317 155.79657166067025 1 16
189.62136089109413 85.70366403943004 318.04510841542015 1 17
260.3786391089059 85.70366403943002 41.954891584579855 1 18
170.37863910890587 94.29633596056996 138.04510841542015 1 19
99.62136089109411 94.29633596056998 221.95489158457983 1 20
155.79657166067025 169.4712206344907 24.203428339329754 1 21
175.64260312151848 99.59406822686046 131.22729452432552 1 22
94.35739687848151 99.59406822686046 228.77270547567446 1 23
114.20342833932975 169.4712206344907 335.7965716606702 1 24

Some files were not shown because too many files have changed in this diff Show More