Merge branch 'MiscImprovements' into development

This commit is contained in:
Martin Diehl 2020-01-12 19:37:56 +01:00
commit 11678357ad
76 changed files with 1035 additions and 1881 deletions

View File

@ -391,7 +391,6 @@ Marc_compileIfort:
stage: compileMarc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- Marc_compileIfort/test.py
except:
- master
@ -402,7 +401,6 @@ Hex_elastic:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- Hex_elastic/test.py
except:
- master
@ -412,7 +410,6 @@ CubicFCC_elastic:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- CubicFCC_elastic/test.py
except:
- master
@ -422,7 +419,6 @@ CubicBCC_elastic:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- CubicBCC_elastic/test.py
except:
- master
@ -432,7 +428,6 @@ J2_plasticBehavior:
stage: marc
script:
- module load $IntelMarc $HDF5Marc $MSC
- export DAMASK_HDF5=ON
- J2_plasticBehavior/test.py
except:
- master

4
CONFIG
View File

@ -1,11 +1,7 @@
# "set"-syntax needed only for tcsh (but works with bash and zsh)
# DAMASK_ROOT will be expanded
set DAMASK_NUM_THREADS = 4
set MSC_ROOT = /opt/msc
set MARC_VERSION = 2019
set ABAQUS_VERSION = 2019
set DAMASK_HDF5 = ON

View File

@ -2,129 +2,129 @@
# GNU Compiler
###################################################################################################
if (OPENMP)
set (OPENMP_FLAGS "-fopenmp")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-fopenmp")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize")
endif ()
set (STANDARD_CHECK "-std=f2008ts -pedantic-errors" )
set (LINKER_FLAGS "${LINKER_FLAGS} -Wl")
# options parsed directly to the linker
set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
# ensure to link against dynamic libraries
set (STANDARD_CHECK "-std=f2008ts -pedantic-errors" )
set (LINKER_FLAGS "${LINKER_FLAGS} -Wl")
# options parsed directly to the linker
set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
# ensure to link against dynamic libraries
#------------------------------------------------------------------------------------------------
# Fine tuning compilation options
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
# restrict line length to the standard 132 characters (lattice.f90 require more characters)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
# restrict line length to the standard 132 characters (lattice.f90 require more characters)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fimplicit-none")
# assume "implicit none" even if not present in source
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fimplicit-none")
# assume "implicit none" even if not present in source
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wall")
# sets the following Fortran options:
# -Waliasing: warn about possible aliasing of dummy arguments. Specifically, it warns if the same actual argument is associated with a dummy argument with "INTENT(IN)" and a dummy argument with "INTENT(OUT)" in a call with an explicit interface.
# -Wampersand: checks if a character expression is continued proberly by an ampersand at the end of the line and at the beginning of the new line
# -Warray-bounds: checks if array reference is out of bounds at compile time. use -fcheck-bounds to also check during runtime
# -Wconversion: warn about implicit conversions between different type
# -Wsurprising: warn when "suspicious" code constructs are encountered. While technically legal these usually indicate that an error has been made.
# -Wc-binding-type:
# -Wintrinsics-std: only standard intrisics are available, e.g. "call flush(6)" will cause an error
# -Wno-tabs: do not allow tabs in source
# -Wintrinsic-shadow: warn if a user-defined procedure or module procedure has the same name as an intrinsic
# -Wline-truncation:
# -Wtarget-lifetime:
# -Wreal-q-constant: warn about real-literal-constants with 'q' exponent-letter
# -Wunused: a number of unused-xxx warnings
# and sets the general (non-Fortran options) options:
# -Waddress
# -Warray-bounds (only with -O2)
# -Wc++11-compat
# -Wchar-subscripts
# -Wcomment
# -Wformat
# -Wmaybe-uninitialized
# -Wnonnull
# -Wparentheses
# -Wpointer-sign
# -Wreorder
# -Wreturn-type
# -Wsequence-point
# -Wstrict-aliasing
# -Wstrict-overflow=1
# -Wswitch
# -Wtrigraphs
# -Wuninitialized
# -Wunknown-pragmas
# -Wunused-function
# -Wunused-label
# -Wunused-value
# -Wunused-variable
# -Wvolatile-register-var
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wall")
# sets the following Fortran options:
# -Waliasing: warn about possible aliasing of dummy arguments. Specifically, it warns if the same actual argument is associated with a dummy argument with "INTENT(IN)" and a dummy argument with "INTENT(OUT)" in a call with an explicit interface.
# -Wampersand: checks if a character expression is continued proberly by an ampersand at the end of the line and at the beginning of the new line
# -Warray-bounds: checks if array reference is out of bounds at compile time. use -fcheck-bounds to also check during runtime
# -Wconversion: warn about implicit conversions between different type
# -Wsurprising: warn when "suspicious" code constructs are encountered. While technically legal these usually indicate that an error has been made.
# -Wc-binding-type:
# -Wintrinsics-std: only standard intrisics are available, e.g. "call flush(6)" will cause an error
# -Wno-tabs: do not allow tabs in source
# -Wintrinsic-shadow: warn if a user-defined procedure or module procedure has the same name as an intrinsic
# -Wline-truncation:
# -Wtarget-lifetime:
# -Wreal-q-constant: warn about real-literal-constants with 'q' exponent-letter
# -Wunused: a number of unused-xxx warnings
# and sets the general (non-Fortran options) options:
# -Waddress
# -Warray-bounds (only with -O2)
# -Wc++11-compat
# -Wchar-subscripts
# -Wcomment
# -Wformat
# -Wmaybe-uninitialized
# -Wnonnull
# -Wparentheses
# -Wpointer-sign
# -Wreorder
# -Wreturn-type
# -Wsequence-point
# -Wstrict-aliasing
# -Wstrict-overflow=1
# -Wswitch
# -Wtrigraphs
# -Wuninitialized
# -Wunknown-pragmas
# -Wunused-function
# -Wunused-label
# -Wunused-value
# -Wunused-variable
# -Wvolatile-register-var
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wextra")
# sets the following Fortran options:
# -Wunuses-parameter:
# -Wcompare-reals:
# and sets the general (non-Fortran options) options:
# -Wclobbered
# -Wempty-body
# -Wignored-qualifiers
# -Wmissing-field-initializers
# -Woverride-init
# -Wsign-compare
# -Wtype-limits
# -Wuninitialized
# -Wunused-but-set-parameter (only with -Wunused or -Wall)
# -Wno-globals
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wextra")
# sets the following Fortran options:
# -Wunuses-parameter:
# -Wcompare-reals:
# and sets the general (non-Fortran options) options:
# -Wclobbered
# -Wempty-body
# -Wignored-qualifiers
# -Wmissing-field-initializers
# -Woverride-init
# -Wsign-compare
# -Wtype-limits
# -Wuninitialized
# -Wunused-but-set-parameter (only with -Wunused or -Wall)
# -Wno-globals
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wcharacter-truncation")
# warn if character expressions (strings) are truncated
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wcharacter-truncation")
# warn if character expressions (strings) are truncated
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wunderflow")
# produce a warning when numerical constant expressions are encountered, which yield an UNDERFLOW during compilation
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wunderflow")
# produce a warning when numerical constant expressions are encountered, which yield an UNDERFLOW during compilation
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=pure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=noreturn")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wconversion-extra")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wimplicit-procedure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wno-unused-parameter")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffpe-summary=all")
# print summary of floating point exeptions (invalid,zero,overflow,underflow,inexact,denormal)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=pure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=noreturn")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wconversion-extra")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wimplicit-procedure")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wno-unused-parameter")
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffpe-summary=all")
# print summary of floating point exeptions (invalid,zero,overflow,underflow,inexact,denormal)
# Additional options
# -Warray-temporarieswarnings: because we have many temporary arrays (performance issue?):
# -Wimplicit-interface: no interfaces for lapack/MPI routines
# -Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
# Additional options
# -Warray-temporarieswarnings: because we have many temporary arrays (performance issue?):
# -Wimplicit-interface: no interfaces for lapack/MPI routines
# -Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions.
#------------------------------------------------------------------------------------------------
# Runtime debugging
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ffpe-trap=invalid,zero,overflow")
# stop execution if floating point exception is detected (NaN is silent)
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ffpe-trap=invalid,zero,overflow")
# stop execution if floating point exception is detected (NaN is silent)
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fbacktrace")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fdump-core")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fcheck=all")
# checks for (array-temps,bounds,do,mem,pointer,recursion)
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fbacktrace")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fdump-core")
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fcheck=all")
# checks for (array-temps,bounds,do,mem,pointer,recursion)
# Additional options
# -ffpe-trap=precision,denormal,underflow
# Additional options
# -ffpe-trap=precision,denormal,underflow
#------------------------------------------------------------------------------------------------
# precision settings
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-real-8")
# set precision to 8 bytes for standard real (=8 for pReal). Will set size of double to 16 bytes as long as -fdefault-double-8 is not set
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-double-8")
# set precision to 8 bytes for double real, would be 16 bytes if -fdefault-real-8 is used
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-real-8")
# set precision to 8 bytes for standard real (=8 for pReal). Will set size of double to 16 bytes as long as -fdefault-double-8 is not set
set (PRECISION_FLAGS "${PRECISION_FLAGS} -fdefault-double-8")
# set precision to 8 bytes for double real, would be 16 bytes if -fdefault-real-8 is used

View File

@ -1,116 +1,116 @@
###################################################################################################
# Intel Compiler
###################################################################################################
if (OPENMP)
set (OPENMP_FLAGS "-qopenmp -parallel")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-qopenmp -parallel")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0 -no-ip")
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-ipo -O3 -no-prec-div -fp-model fast=2 -xHost")
# -fast = -ipo, -O3, -no-prec-div, -static, -fp-model fast=2, and -xHost"
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0 -no-ip")
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-ipo -O3 -no-prec-div -fp-model fast=2 -xHost")
# -fast = -ipo, -O3, -no-prec-div, -static, -fp-model fast=2, and -xHost"
endif ()
# -assume std_mod_proc_name (included in -standard-semantics) causes problems if other modules
# (PETSc, HDF5) are not compiled with this option (https://software.intel.com/en-us/forums/intel-fortran-compiler-for-linux-and-mac-os-x/topic/62172)
set (STANDARD_CHECK "-stand f15 -standard-semantics -assume nostd_mod_proc_name")
set (LINKER_FLAGS "${LINKER_FLAGS} -shared-intel")
# Link against shared Intel libraries instead of static ones
# -assume std_mod_proc_name (included in -standard-semantics) causes problems if other modules
# (PETSc, HDF5) are not compiled with this option (https://software.intel.com/en-us/forums/intel-fortran-compiler-for-linux-and-mac-os-x/topic/62172)
set (STANDARD_CHECK "-stand f15 -standard-semantics -assume nostd_mod_proc_name")
set (LINKER_FLAGS "${LINKER_FLAGS} -shared-intel")
# Link against shared Intel libraries instead of static ones
#------------------------------------------------------------------------------------------------
# Fine tuning compilation options
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fpp")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fpp")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ftz")
# flush underflow to zero, automatically set if -O[1,2,3]
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ftz")
# flush underflow to zero, automatically set if -O[1,2,3]
set (COMPILE_FLAGS "${COMPILE_FLAGS} -diag-disable")
# disables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} 5268")
# ... the text exceeds right hand column allowed on the line (we have only comments there)
set (COMPILE_FLAGS "${COMPILE_FLAGS},7624")
# ... about deprecated forall (has nice syntax and most likely a performance advantage)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -diag-disable")
# disables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} 5268")
# ... the text exceeds right hand column allowed on the line (we have only comments there)
set (COMPILE_FLAGS "${COMPILE_FLAGS},7624")
# ... about deprecated forall (has nice syntax and most likely a performance advantage)
set (COMPILE_FLAGS "${COMPILE_FLAGS} -warn")
# enables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} declarations")
# ... any undeclared names (alternative name: -implicitnone)
set (COMPILE_FLAGS "${COMPILE_FLAGS},general")
# ... warning messages and informational messages are issued by the compiler
set (COMPILE_FLAGS "${COMPILE_FLAGS},usage")
# ... questionable programming practices
set (COMPILE_FLAGS "${COMPILE_FLAGS},interfaces")
# ... checks the interfaces of all SUBROUTINEs called and FUNCTIONs invoked in your compilation against an external set of interface blocks
set (COMPILE_FLAGS "${COMPILE_FLAGS},ignore_loc")
# ... %LOC is stripped from an actual argument
set (COMPILE_FLAGS "${COMPILE_FLAGS},alignments")
# ... data that is not naturally aligned
set (COMPILE_FLAGS "${COMPILE_FLAGS},unused")
# ... declared variables that are never used
set (COMPILE_FLAGS "${COMPILE_FLAGS} -warn")
# enables warnings ...
set (COMPILE_FLAGS "${COMPILE_FLAGS} declarations")
# ... any undeclared names (alternative name: -implicitnone)
set (COMPILE_FLAGS "${COMPILE_FLAGS},general")
# ... warning messages and informational messages are issued by the compiler
set (COMPILE_FLAGS "${COMPILE_FLAGS},usage")
# ... questionable programming practices
set (COMPILE_FLAGS "${COMPILE_FLAGS},interfaces")
# ... checks the interfaces of all SUBROUTINEs called and FUNCTIONs invoked in your compilation against an external set of interface blocks
set (COMPILE_FLAGS "${COMPILE_FLAGS},ignore_loc")
# ... %LOC is stripped from an actual argument
set (COMPILE_FLAGS "${COMPILE_FLAGS},alignments")
# ... data that is not naturally aligned
set (COMPILE_FLAGS "${COMPILE_FLAGS},unused")
# ... declared variables that are never used
# Additional options
# -warn: enables warnings, where
# truncated_source: Determines whether warnings occur when source exceeds the maximum column width in fixed-format files.
# (too many warnings because we have comments beyond character 132)
# uncalled: Determines whether warnings occur when a statement function is never called
# all:
# -name as_is: case sensitive Fortran!
# Additional options
# -warn: enables warnings, where
# truncated_source: Determines whether warnings occur when source exceeds the maximum column width in fixed-format files.
# (too many warnings because we have comments beyond character 132)
# uncalled: Determines whether warnings occur when a statement function is never called
# all:
# -name as_is: case sensitive Fortran!
#------------------------------------------------------------------------------------------------
# Runtime debugging
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Generate symbolic debugging information in the object file
set (DEBUG_FLAGS "${DEBUG_FLAGS} -traceback")
# Generate extra information in the object file to provide source file traceback information when a severe error occurs at run time
set (DEBUG_FLAGS "${DEBUG_FLAGS} -traceback")
# Generate extra information in the object file to provide source file traceback information when a severe error occurs at run time
set (DEBUG_FLAGS "${DEBUG_FLAGS} -gen-interfaces")
# Generate an interface block for each routine. http://software.intel.com/en-us/blogs/2012/01/05/doctor-fortran-gets-explicit-again/
set (DEBUG_FLAGS "${DEBUG_FLAGS} -gen-interfaces")
# Generate an interface block for each routine. http://software.intel.com/en-us/blogs/2012/01/05/doctor-fortran-gets-explicit-again/
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-stack-check")
# Generate extra code after every function call to ensure that the floating-point (FP) stack is in the expected state
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-stack-check")
# Generate extra code after every function call to ensure that the floating-point (FP) stack is in the expected state
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-model strict")
# Trap uninitalized variables
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fp-model strict")
# Trap uninitalized variables
set (DEBUG_FLAGS "${DEBUG_FLAGS} -check" )
# Checks at runtime ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} bounds")
# ... if an array index is too small (<1) or too large!
set (DEBUG_FLAGS "${DEBUG_FLAGS},format")
# ... for the data type of an item being formatted for output.
set (DEBUG_FLAGS "${DEBUG_FLAGS},output_conversion")
# ... for the fit of data items within a designated format descriptor field.
set (DEBUG_FLAGS "${DEBUG_FLAGS},pointers")
# ... for certain disassociated or uninitialized pointers or unallocated allocatable objects.
set (DEBUG_FLAGS "${DEBUG_FLAGS},uninit")
# ... for uninitialized variables.
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ftrapuv")
# ... initializes stack local variables to an unusual value to aid error detection
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fpe-all=0")
# ... capture all floating-point exceptions, sets -ftz automatically
set (DEBUG_FLAGS "${DEBUG_FLAGS} -check" )
# Checks at runtime ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} bounds")
# ... if an array index is too small (<1) or too large!
set (DEBUG_FLAGS "${DEBUG_FLAGS},format")
# ... for the data type of an item being formatted for output.
set (DEBUG_FLAGS "${DEBUG_FLAGS},output_conversion")
# ... for the fit of data items within a designated format descriptor field.
set (DEBUG_FLAGS "${DEBUG_FLAGS},pointers")
# ... for certain disassociated or uninitialized pointers or unallocated allocatable objects.
set (DEBUG_FLAGS "${DEBUG_FLAGS},uninit")
# ... for uninitialized variables.
set (DEBUG_FLAGS "${DEBUG_FLAGS} -ftrapuv")
# ... initializes stack local variables to an unusual value to aid error detection
set (DEBUG_FLAGS "${DEBUG_FLAGS} -fpe-all=0")
# ... capture all floating-point exceptions, sets -ftz automatically
set (DEBUG_FLAGS "${DEBUG_FLAGS} -warn")
# enables warnings ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} errors")
# ... warnings are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS},stderrors")
# ... warnings about Fortran standard violations are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS} -warn")
# enables warnings ...
set (DEBUG_FLAGS "${DEBUG_FLAGS} errors")
# ... warnings are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS},stderrors")
# ... warnings about Fortran standard violations are changed to errors
set (DEBUG_FLAGS "${DEBUG_FLAGS} -debug-parameters all")
# generate debug information for parameters
set (DEBUG_FLAGS "${DEBUG_FLAGS} -debug-parameters all")
# generate debug information for parameters
# Additional options
# -heap-arrays: Should not be done for OpenMP, but set "ulimit -s unlimited" on shell. Probably it helps also to unlimit other limits
# -check: Checks at runtime, where
# arg_temp_created: will cause a lot of warnings because we create a bunch of temporary arrays (performance?)
# stack:
# Additional options
# -heap-arrays: Should not be done for OpenMP, but set "ulimit -s unlimited" on shell. Probably it helps also to unlimit other limits
# -check: Checks at runtime, where
# arg_temp_created: will cause a lot of warnings because we create a bunch of temporary arrays (performance?)
# stack:
#------------------------------------------------------------------------------------------------
# precision settings
set (PRECISION_FLAGS "${PRECISION_FLAGS} -real-size 64")
# set precision for standard real to 32 | 64 | 128 (= 4 | 8 | 16 bytes, type pReal is always 8 bytes)
set (PRECISION_FLAGS "${PRECISION_FLAGS} -real-size 64")
# set precision for standard real to 32 | 64 | 128 (= 4 | 8 | 16 bytes, type pReal is always 8 bytes)

View File

@ -1,25 +1,24 @@
###################################################################################################
# PGI Compiler
###################################################################################################
elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "PGI")
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3")
endif ()
if (OPTIMIZATION STREQUAL "OFF")
set (OPTIMIZATION_FLAGS "-O0" )
elseif (OPTIMIZATION STREQUAL "DEFENSIVE")
set (OPTIMIZATION_FLAGS "-O2")
elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3")
endif ()
#------------------------------------------------------------------------------------------------
# Fine tuning compilation options
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Mpreprocess")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Mpreprocess")
# preprocessor
set (STANDARD_CHECK "-Mallocatable=03")
set (STANDARD_CHECK "-Mallocatable=03")
#------------------------------------------------------------------------------------------------
# Runtime debugging
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Includes debugging information in the object module; sets the optimization level to zero unless a -O option is present on the command line
set (DEBUG_FLAGS "${DEBUG_FLAGS} -g")
# Includes debugging information in the object module; sets the optimization level to zero unless a -O option is present on the command line

22
env/DAMASK.csh vendored
View File

@ -7,12 +7,6 @@ set DAMASK_ROOT=`python -c "import os,sys; print(os.path.realpath(os.path.expand
source $DAMASK_ROOT/CONFIG
# add BRANCH if DAMASK_ROOT is a git repository
cd $DAMASK_ROOT >/dev/null
set BRANCH = `git branch 2>/dev/null| grep -E '^\* ')`
cd - >/dev/null
# if DAMASK_BIN is present
set path = ($DAMASK_ROOT/bin $path)
set SOLVER=`which DAMASK_spectral`
@ -21,20 +15,12 @@ if ( "x$DAMASK_NUM_THREADS" == "x" ) then
set DAMASK_NUM_THREADS=1
endif
# currently, there is no information that unlimited causes problems
# currently, there is no information that unlimited stack size causes problems
# still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
# more info https://jblevins.org/log/segfault
# https://stackoverflow.com/questions/79923/what-and-where-are-the-stack-and-heap
# http://superuser.com/questions/220059/what-parameters-has-ulimit
limit datasize unlimited # maximum heap size (kB)
# http://superuser.com/questions/220059/what-parameters-has-ulimit
limit stacksize unlimited # maximum stack size (kB)
endif
if ( `limit | grep memoryuse` != "" ) then
limit memoryuse unlimited # maximum physical memory size
endif
if ( `limit | grep vmemoryuse` != "" ) then
limit vmemoryuse unlimited # maximum virtual memory size
endif
# disable output in case of scp
if ( $?prompt ) then
@ -44,8 +30,8 @@ if ( $?prompt ) then
echo https://damask.mpie.de
echo
echo Using environment with ...
echo "DAMASK $DAMASK_ROOT $BRANCH"
echo "Spectral Solver $SOLVER"
echo "DAMASK $DAMASK_ROOT"
echo "Grid Solver $SOLVER"
echo "Post Processing $PROCESSING"
if ( $?PETSC_DIR) then
echo "PETSc location $PETSC_DIR"

11
env/DAMASK.sh vendored
View File

@ -43,15 +43,12 @@ PROCESSING=$(type -p postResults || true 2>/dev/null)
[ "x$DAMASK_NUM_THREADS" == "x" ] && DAMASK_NUM_THREADS=1
# currently, there is no information that unlimited causes problems
# currently, there is no information that unlimited stack size causes problems
# still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
# more info https://jblevins.org/log/segfault
# https://stackoverflow.com/questions/79923/what-and-where-are-the-stack-and-heap
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -d unlimited 2>/dev/null # maximum heap size (kB)
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -s unlimited 2>/dev/null # maximum stack size (kB)
ulimit -v unlimited 2>/dev/null # maximum virtual memory size
ulimit -m unlimited 2>/dev/null # maximum physical memory size
# disable output in case of scp
if [ ! -z "$PS1" ]; then
@ -62,7 +59,7 @@ if [ ! -z "$PS1" ]; then
echo
echo Using environment with ...
echo "DAMASK $DAMASK_ROOT $BRANCH"
echo "Spectral Solver $SOLVER"
echo "Grid Solver $SOLVER"
echo "Post Processing $PROCESSING"
if [ "x$PETSC_DIR" != "x" ]; then
echo -n "PETSc location "
@ -96,7 +93,7 @@ fi
export DAMASK_NUM_THREADS
export PYTHONPATH=$DAMASK_ROOT/python:$PYTHONPATH
for var in BASE STAT SOLVER PROCESSING FREE DAMASK_BIN BRANCH; do
for var in BASE STAT SOLVER PROCESSING BRANCH; do
unset "${var}"
done
for var in DAMASK MSC; do

12
env/DAMASK.zsh vendored
View File

@ -24,7 +24,6 @@ unset -f set
# add BRANCH if DAMASK_ROOT is a git repository
cd $DAMASK_ROOT >/dev/null; BRANCH=$(git branch 2>/dev/null| grep -E '^\* '); cd - >/dev/null
# add DAMASK_BIN if present
PATH=${DAMASK_ROOT}/bin:$PATH
SOLVER=$(which DAMASK_spectral || true 2>/dev/null)
@ -35,15 +34,12 @@ PROCESSING=$(which postResults || true 2>/dev/null)
[[ "x$DAMASK_NUM_THREADS" == "x" ]] && DAMASK_NUM_THREADS=1
# currently, there is no information that unlimited causes problems
# currently, there is no information that unlimited stack size causes problems
# still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
# more info https://jblevins.org/log/segfault
# https://stackoverflow.com/questions/79923/what-and-where-are-the-stack-and-heap
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -d unlimited 2>/dev/null # maximum heap size (kB)
# http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -s unlimited 2>/dev/null # maximum stack size (kB)
ulimit -v unlimited 2>/dev/null # maximum virtual memory size
ulimit -m unlimited 2>/dev/null # maximum physical memory size
# disable output in case of scp
if [ ! -z "$PS1" ]; then
@ -54,7 +50,7 @@ if [ ! -z "$PS1" ]; then
echo
echo "Using environment with ..."
echo "DAMASK $DAMASK_ROOT $BRANCH"
echo "Spectral Solver $SOLVER"
echo "Grid Solver $SOLVER"
echo "Post Processing $PROCESSING"
if [ "x$PETSC_DIR" != "x" ]; then
echo -n "PETSc location "
@ -90,7 +86,7 @@ fi
export DAMASK_NUM_THREADS
export PYTHONPATH=$DAMASK_ROOT/python:$PYTHONPATH
for var in BASE STAT SOLVER PROCESSING FREE DAMASK_BIN BRANCH; do
for var in SOLVER PROCESSING BRANCH; do
unset "${var}"
done
for var in DAMASK MSC; do

View File

@ -99,14 +99,9 @@ else
fi
# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
if test "$DAMASK_HDF5" = "ON";then
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
echo $FCOMP
else
FCOMP=ifort
fi
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
# AEM
if test "$MARCDLLOUTDIR" = ""; then

View File

@ -99,14 +99,9 @@ else
fi
# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
if test "$DAMASK_HDF5" = "ON";then
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
echo $FCOMP
else
FCOMP=ifort
fi
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
# AEM
if test "$MARCDLLOUTDIR" = ""; then

View File

@ -100,11 +100,9 @@ else
fi
# DAMASK uses the HDF5 compiler wrapper around the Intel compiler
if test "$DAMASK_HDF5" = "ON";then
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
fi
H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5"
# AEM
if test "$MARCDLLOUTDIR" = ""; then

View File

@ -1,8 +1,10 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
@ -19,47 +21,10 @@ Convert TSL/EDAX *.ang file to ASCIItable
""", version = scriptID)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.txt' if name else name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# --- interpret header -----------------------------------------------------------------------------
table.head_read()
# --- read comments --------------------------------------------------------------------------------
table.info_clear()
while table.data_read(advance = False) and table.line.startswith('#'): # cautiously (non-progressing) read header
table.info_append(table.line) # add comment to info part
table.data_read() # wind forward
table.labels_clear()
table.labels_append(['1_Euler','2_Euler','3_Euler',
'1_pos','2_pos',
'IQ','CI','PhaseID','Intensity','Fit',
], # OIM Analysis 7.2 Manual, p 403 (of 517)
reset = True)
# ------------------------------------------ assemble header ---------------------------------------
table.head_write()
#--- write remainder of data file ------------------------------------------------------------------
outputAlive = True
while outputAlive and table.data_read():
outputAlive = table.data_write()
# ------------------------------------------ finalize output ---------------------------------------
table.close()
table = damask.Table.from_ang(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.to_ASCII(sys.stdout if name is None else os.path.splitext(name)[0]+'.txt')

View File

@ -39,61 +39,36 @@ for filename in options.filenames:
results = damask.DADF5(filename)
if not results.structured: continue
delta = results.size/results.grid*0.5
x, y, z = np.meshgrid(np.linspace(delta[2],results.size[2]-delta[2],results.grid[2]),
np.linspace(delta[1],results.size[1]-delta[1],results.grid[1]),
np.linspace(delta[0],results.size[0]-delta[0],results.grid[0]),
indexing = 'ij')
coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
if results.version_major == 0 and results.version_minor >= 5:
coords = damask.grid_filters.cell_coord0(results.grid,results.size,results.origin)
else:
coords = damask.grid_filters.cell_coord0(results.grid,results.size)
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
N_digits = 5 # hack to keep test intact
for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments)))
header = '1 header\n'
data = np.array([int(inc[3:]) for j in range(np.product(results.grid))]).reshape([np.product(results.grid),1])
header+= 'inc'
coords = coords.reshape([np.product(results.grid),3])
data = np.concatenate((data,coords),1)
header+=' 1_pos 2_pos 3_pos'
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})
table.add('pos',coords.reshape((-1,3)))
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0,plain=True)
d = np.product(np.shape(array)[1:])
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
if d>1:
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
else:
header+=' '+label
if len(x) != 0:
table.add(label,results.read_dataset(x,0,plain=True).reshape((results.grid.prod(),-1)))
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0,plain=True)
d = np.product(np.shape(array)[1:])
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
if d>1:
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
else:
header+=' '+label
if len(x) != 0:
table.add(label,results.read_dataset(x,0,plain=True).reshape((results.grid.prod(),-1)))
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0o755)
file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits))
np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='')
table.to_ASCII(os.path.join(dirname,file_out))

View File

@ -1,147 +0,0 @@
#!/usr/bin/env python3
import os
import argparse
import re
import h5py
import numpy as np
import vtk
from vtk.util import numpy_support
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = argparse.ArgumentParser()
#ToDo: We need to decide on a way of handling arguments of variable lentght
#https://stackoverflow.com/questions/15459997/passing-integer-lists-to-python
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
parser.add_argument('filenames', nargs='+',
help='DADF5 files')
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory relative to the location of the DADF5 file to hold output')
parser.add_argument('--mat', nargs='+',
help='labels for materialpoint',dest='mat')
parser.add_argument('--con', nargs='+',
help='labels for constituent',dest='con')
options = parser.parse_args()
if options.mat is None: options.mat=[]
if options.con is None: options.con=[]
# --- loop over input files ------------------------------------------------------------------------
for filename in options.filenames:
results = damask.DADF5(filename)
if results.structured: # for grid solvers use rectilinear grid
grid = vtk.vtkRectilinearGrid()
coordArray = [vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(),
]
grid.SetDimensions(*(results.grid+1))
for dim in [0,1,2]:
for c in np.linspace(0,results.size[dim],1+results.grid[dim]):
coordArray[dim].InsertNextValue(c)
grid.SetXCoordinates(coordArray[0])
grid.SetYCoordinates(coordArray[1])
grid.SetZCoordinates(coordArray[2])
else:
nodes = vtk.vtkPoints()
with h5py.File(filename) as f:
nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True))
grid = vtk.vtkUnstructuredGrid()
grid.SetPoints(nodes)
grid.Allocate(f['/geometry/T_c'].shape[0])
for i in f['/geometry/T_c']:
grid.InsertNextCell(vtk.VTK_HEXAHEDRON,8,i-1)
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments)))
vtk_data = []
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con:
for p in results.iter_visible('con_physics'):
if p != 'generic':
for c in results.iter_visible('constituents'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') #looking for phase name in dataset name
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) #removing phase name from generic dataset
vtk_data[-1].SetName(dset_name)
grid.GetCellData().AddArray(vtk_data[-1])
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat:
for p in results.iter_visible('mat_physics'):
if p != 'generic':
for m in results.iter_visible('materialpoints'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1])
writer = vtk.vtkXMLRectilinearGridWriter() if results.structured else \
vtk.vtkXMLUnstructuredGridWriter()
results.set_visible('constituents', False)
results.set_visible('materialpoints',False)
x = results.get_dataset_location('u_n')
vtk_data.append(numpy_support.numpy_to_vtk(num_array=results.read_dataset(x,0),deep=True,array_type=vtk.VTK_DOUBLE))
vtk_data[-1].SetName('u')
grid.GetPointData().AddArray(vtk_data[-1])
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0o755)
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(dirname,file_out))
writer.SetInputData(grid)
writer.Write()

View File

@ -1,127 +0,0 @@
#!/usr/bin/env python3
import os
import argparse
import re
import numpy as np
import vtk
from vtk.util import numpy_support
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = argparse.ArgumentParser()
#ToDo: We need to decide on a way of handling arguments of variable lentght
#https://stackoverflow.com/questions/15459997/passing-integer-lists-to-python
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
parser.add_argument('filenames', nargs='+',
help='DADF5 files')
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory relative to the location of the DADF5 file to hold output')
parser.add_argument('--mat', nargs='+',
help='labels for materialpoint',dest='mat')
parser.add_argument('--con', nargs='+',
help='labels for constituent',dest='con')
options = parser.parse_args()
if options.mat is None: options.mat=[]
if options.con is None: options.con=[]
# --- loop over input files ------------------------------------------------------------------------
for filename in options.filenames:
results = damask.DADF5(filename)
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for c in results.cell_coordinates():
pointID = Points.InsertNextPoint(c)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
Polydata = vtk.vtkPolyData()
Polydata.SetPoints(Points)
Polydata.SetVerts(Vertices)
Polydata.Modified()
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments)))
vtk_data = []
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con:
for p in results.iter_visible('con_physics'):
if p != 'generic':
for c in results.iter_visible('constituents'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') #looking for phase name in dataset name
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) #removing phase name from generic dataset
vtk_data[-1].SetName(dset_name)
Polydata.GetCellData().AddArray(vtk_data[-1])
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat:
for p in results.iter_visible('mat_physics'):
if p != 'generic':
for m in results.iter_visible('materialpoints'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
writer = vtk.vtkXMLPolyDataWriter()
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0o755)
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(dirname,file_out))
writer.SetInputData(Polydata)
writer.Write()

View File

@ -1,11 +1,11 @@
#!/usr/bin/env python3
import os
import math
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import scipy.ndimage
import damask
@ -13,78 +13,6 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
def cell2node(cellData,grid):
nodeData = 0.0
datalen = np.array(cellData.shape[3:]).prod()
for i in range(datalen):
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid[::-1])+(datalen,))[...,i],
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
mode = 'wrap',
origin = -1, # offset to have cell origin as center
) # now averaged at cell origins
node = np.append(node,node[np.newaxis,0,:,:,...],axis=0) # wrap along z
node = np.append(node,node[:,0,np.newaxis,:,...],axis=1) # wrap along y
node = np.append(node,node[:,:,0,np.newaxis,...],axis=2) # wrap along x
nodeData = node[...,np.newaxis] if i==0 else np.concatenate((nodeData,node[...,np.newaxis]),axis=-1)
return nodeData
#--------------------------------------------------------------------------------------------------
def deformationAvgFFT(F,grid,size,nodal=False,transformed=False):
"""Calculate average cell center (or nodal) deformation for deformation gradient field specified in each grid cell"""
if nodal:
x, y, z = np.meshgrid(np.linspace(0,size[2],1+grid[2]),
np.linspace(0,size[1],1+grid[1]),
np.linspace(0,size[0],1+grid[0]),
indexing = 'ij')
else:
x, y, z = np.meshgrid(np.linspace(size[2]/grid[2]/2.,size[2]-size[2]/grid[2]/2.,grid[2]),
np.linspace(size[1]/grid[1]/2.,size[1]-size[1]/grid[1]/2.,grid[1]),
np.linspace(size[0]/grid[0]/2.,size[0]-size[0]/grid[0]/2.,grid[0]),
indexing = 'ij')
origCoords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
F_fourier = F if transformed else np.fft.rfftn(F,axes=(0,1,2)) # transform or use provided data
Favg = np.real(F_fourier[0,0,0,:,:])/grid.prod() # take zero freq for average
avgDeformation = np.einsum('ml,ijkl->ijkm',Favg,origCoords) # dX = Favg.X
return avgDeformation
#--------------------------------------------------------------------------------------------------
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
"""Calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
integrator = 0.5j * size / math.pi
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1])),
np.arange(grid[0]//2+1),
indexing = 'ij')
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3)
k_sSquared = np.einsum('...l,...l',k_s,k_s)
k_sSquared[0,0,0] = 1.0 # ignore global average frequency
#--------------------------------------------------------------------------------------------------
# integration in Fourier space
displacement_fourier = -np.einsum('ijkml,ijkl,l->ijkm',
F if transformed else np.fft.rfftn(F,axes=(0,1,2)),
k_s,
integrator,
) / k_sSquared[...,np.newaxis]
#--------------------------------------------------------------------------------------------------
# backtransformation to real space
displacement = np.fft.irfftn(displacement_fourier,grid[::-1],axes=(0,1,2))
return cell2node(displacement,grid) if nodal else displacement
def volTetrahedron(coords):
"""
Return the volume of the tetrahedron with given vertices or sides.
@ -133,10 +61,10 @@ def volTetrahedron(coords):
def volumeMismatch(size,F,nodes):
"""
Calculates the volume mismatch
Calculates the volume mismatch.
volume mismatch is defined as the difference between volume of reconstructed
(compatible) cube and determinant of defgrad at the FP
(compatible) cube and determinant of deformation gradient at Fourier point.
"""
coords = np.empty([8,3])
vMismatch = np.empty(grid[::-1])
@ -169,11 +97,11 @@ def volumeMismatch(size,F,nodes):
def shapeMismatch(size,F,nodes,centres):
"""
Routine to calculate the shape mismatch
Routine to calculate the shape mismatch.
shape mismatch is defined as difference between the vectors from the central point to
the corners of reconstructed (combatible) volume element and the vectors calculated by deforming
the initial volume element with the current deformation gradient
the initial volume element with the current deformation gradient.
"""
coordsInitial = np.empty([8,3])
sMismatch = np.empty(grid[::-1])
@ -241,92 +169,29 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.defgrad) != 9:
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos))
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3:
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray([options.defgrad,options.pos])
table.data_rewind()
if table.data[:,9:].shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
grid,size = damask.util.coordGridAndSize(table.data[:,9:12])
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
F = table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3)
nodes = damask.grid_filters.node_coord(size,F)
# -----------------------------process data and assemble header -------------------------------------
F_fourier = np.fft.rfftn(table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),axes=(0,1,2)) # perform transform only once...
nodes = displacementFluctFFT(F_fourier,grid,size,True,transformed=True)\
+ deformationAvgFFT (F_fourier,grid,size,True,transformed=True)
if options.shape:
table.labels_append(['shapeMismatch({})'.format(options.defgrad)])
centres = displacementFluctFFT(F_fourier,grid,size,False,transformed=True)\
+ deformationAvgFFT (F_fourier,grid,size,False,transformed=True)
centers = damask.grid_filters.cell_coord(size,F)
shapeMismatch = shapeMismatch( size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes,centers)
table.add('shapeMismatch(({}))'.format(options.defgrad),
shapeMismatch.reshape((-1,1)),
scriptID+' '+' '.join(sys.argv[1:]))
if options.volume:
table.labels_append(['volMismatch({})'.format(options.defgrad)])
volumeMismatch = volumeMismatch(size,table.get(options.defgrad).reshape(grid[2],grid[1],grid[0],3,3),nodes)
table.add('volMismatch(({}))'.format(options.defgrad),
volumeMismatch.reshape((-1,1)),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_write()
if options.shape:
shapeMismatch = shapeMismatch( size,table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),nodes,centres)
if options.volume:
volumeMismatch = volumeMismatch(size,table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),nodes)
# ------------------------------------------ output data -------------------------------------------
for i in range(grid[2]):
for j in range(grid[1]):
for k in range(grid[0]):
table.data_read()
if options.shape: table.data_append(shapeMismatch[i,j,k])
if options.volume: table.data_append(volumeMismatch[i,j,k])
table.data_write()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -22,79 +23,26 @@ Add cumulative (sum of first to current row) values for given label(s).
""", version = scriptID)
parser.add_option('-l','--label',
dest='label',
dest='labels',
action = 'extend', metavar = '<string LIST>',
help = 'columns to cumulate')
parser.add_option('-p','--product',
dest='product', action = 'store_true',
help = 'product of values instead of sum')
(options,filenames) = parser.parse_args()
if options.label is None:
parser.error('no data column(s) specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None:
parser.error('no data column(s) specified.')
for name in filenames:
try:
table = damask.ASCIItable(name = name, buffered = False)
except IOError:
continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
table.add('cum_{}({})'.format('prod' if options.product else 'sum',label),
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
how = 'prod' if options.product else 'sum'
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('cum_{}({})'.format(how,what) if dim == 1 else
['{}_cum_{}({})'.format(i+1,how,what) for i in range(dim)] ) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
mask = []
for col,dim in zip(columns,dims): mask += range(col,col+dim) # isolate data columns to cumulate
cumulated = np.ones(len(mask)) if options.product else np.zeros(len(mask)) # prepare output field
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
if options.product:
for i,col in enumerate(mask):
cumulated[i] *= float(table.data[col]) # cumulate values (multiplication)
else:
for i,col in enumerate(mask):
cumulated[i] += float(table.data[col]) # cumulate values (addition)
table.data_append(cumulated)
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -30,7 +31,7 @@ def derivative(coordinates,what):
(coordinates[0] - coordinates[1])
result[-1,:] = (what[-1,:] - what[-2,:]) / \
(coordinates[-1] - coordinates[-2])
return result
@ -48,78 +49,26 @@ parser.add_option('-c','--coordinates',
type = 'string', metavar='string',
help = 'heading of coordinate column')
parser.add_option('-l','--label',
dest = 'label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'heading of column(s) to differentiate')
(options,filenames) = parser.parse_args()
if options.coordinates is None:
parser.error('no coordinate column specified.')
if options.label is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.coordinates is None:
parser.error('no coordinate column specified.')
if options.labels is None:
parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
table.add('d({})/d({})'.format(label,options.coordinates),
derivative(table.get(options.coordinates),table.get(label)),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
if table.label_dimension(options.coordinates) != 1:
errors.append('coordinate column {} is not scalar.'.format(options.coordinates))
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('d({})/d({})'.format(what,options.coordinates) if dim == 1 else
['{}_d({})/d({})'.format(i+1,what,options.coordinates) for i in range(dim)] ) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
table.data_readArray()
mask = []
for col,dim in zip(columns,dims): mask += range(col,col+dim) # isolate data columns to differentiate
differentiated = derivative(table.data[:,table.label_index(options.coordinates)].reshape((len(table.data),1)),
table.data[:,mask]) # calculate numerical derivative
table.data = np.hstack((table.data,differentiated))
# ------------------------------------------ output result -----------------------------------------
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import itertools
@ -121,13 +122,14 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.type is None:
parser.error('no feature type selected.')
parser.error('no feature type selected.')
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
if 'biplane' in options.type and 'boundary' in options.type:
parser.error('only one from aliases "biplane" and "boundary" possible.')
parser.error('only one from aliases "biplane" and "boundary" possible.')
feature_list = []
for i,feature in enumerate(features):
@ -137,104 +139,49 @@ for i,feature in enumerate(features):
feature_list.append(i) # remember valid features
break
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name, buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos))
table.head_read()
neighborhood = neighborhoods[options.neighborhood]
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
microstructure = periodic_3Dpad(table.get(options.id).astype('i').reshape(grid,order='F'))
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if not 3 >= table.label_dimension(options.pos) >= 1:
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
else: idCol = table.label_index(options.id)
if remarks != []:
damask.util.croak(remarks)
remarks = []
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for feature in feature_list:
table.labels_append('ED_{}({})'.format(features[feature]['names'][0],options.id)) # extend ASCII header with new labels
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
else: remarks.append('grid: {}x{}x{}'.format(*grid))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
neighborhood = neighborhoods[options.neighborhood]
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
microstructure = periodic_3Dpad(table.data[:,idCol].astype('i').reshape(grid,order='F'))
for i,p in enumerate(neighborhood):
stencil = np.zeros((3,3,3),'i')
stencil[1,1,1] = -1
stencil[p[0]+1,
p[1]+1,
p[2]+1] = 1
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
for i,p in enumerate(neighborhood):
stencil = np.zeros((3,3,3),'i')
stencil[1,1,1] = -1
stencil[p[0]+1,
p[1]+1,
p[2]+1] = 1
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
# ...to every one in the specified neighborhood
# for same IDs at both locations ==> 0
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
# ...reflects number of unique neighbors
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
for i in range(1,len(neighborhood)): # check remaining points in neighborhood
uniques += np.where(np.logical_and(
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
), # flip of ID difference detected?
1,0) # count that flip
for i in range(1,len(neighborhood)): # check remaining points in neighborhood
uniques += np.where(np.logical_and(
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
), # flip of ID difference detected?
1,0) # count that flip
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
for i,feature_id in enumerate(feature_list):
distance[i,:,:,:] = np.where(uniques >= features[feature_id]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
for i,feature_id in enumerate(feature_list):
distance[i,:,:,:] = np.where(uniques >= features[feature_id]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
for i in range(len(feature_list)):
stack.append(distance[i,:])
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
for i,feature in enumerate(feature_list):
table.add('ED_{}({})'.format(features[feature]['names'][0],options.id),
distance[i,:],
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,9 +2,9 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
from scipy import ndimage
import damask
@ -30,7 +30,7 @@ parser.add_option('-p','--pos','--periodiccellcenter',
type = 'string', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-s','--scalar',
dest = 'scalar',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'label(s) of scalar field values')
parser.add_option('-o','--order',
@ -56,78 +56,21 @@ parser.set_defaults(pos = 'pos',
)
(options,filenames) = parser.parse_args()
if options.scalar is None:
parser.error('no data column specified.')
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None: parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.grid_filters.coord0_check(table.get(options.pos))
table.head_read()
for label in options.labels:
table.add('Gauss{}({})'.format(options.sigma,label),
ndimage.filters.gaussian_filter(table.get(label).reshape((-1)),
options.sigma,options.order,
mode = 'wrap' if options.periodic else 'nearest'),
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ sanity checks ----------------------------------------
items = {
'scalar': {'dim': 1, 'shape': [1], 'labels':options.scalar, 'active':[], 'column': []},
}
errors = []
remarks = []
column = {}
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
else: colCoord = table.label_index(options.pos)
for type, data in items.items():
for what in (data['labels'] if data['labels'] is not None else []):
dim = table.label_dimension(what)
if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type))
else:
items[type]['active'].append(what)
items[type]['column'].append(table.label_index(what))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for type, data in items.items():
for label in data['active']:
table.labels_append(['Gauss{}({})'.format(options.sigma,label)]) # extend ASCII header with new labels
table.head_write()
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
# ------------------------------------------ process value field -----------------------------------
stack = [table.data]
for type, data in items.items():
for i,label in enumerate(data['active']):
stack.append(ndimage.filters.gaussian_filter(table.data[:,data['column'][i]],
options.sigma,options.order,
mode = 'wrap' if options.periodic else 'nearest'
).reshape([table.data.shape[0],1])
)
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -43,54 +44,25 @@ parser.set_defaults(pole = (0.0,0.0,1.0),
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
# damask.Orientation requires Bravais lattice, but we are only interested in symmetry
symmetry2lattice={'cubic':'bcc','hexagonal':'hex','tetragonal':'bct'}
symmetry2lattice={'cubic':'fcc','hexagonal':'hex','tetragonal':'bct'}
lattice = symmetry2lattice[options.symmetry]
pole = np.array(options.pole)
pole /= np.linalg.norm(pole)
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
if not table.label_dimension(options.quaternion) == 4:
damask.util.croak('input {} does not have dimension 4.'.format(options.quaternion))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
column = table.label_index(options.quaternion)
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['{}_IPF_{:g}{:g}{:g}_{sym}'.format(i+1,*options.pole,sym = options.symmetry.lower()) for i in range(3)])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
o = damask.Orientation(np.array(list(map(float,table.data[column:column+4]))),
lattice = lattice).reduced()
table.data_append(o.IPFcolor(pole))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
orientation = table.get(options.quaternion)
color = np.empty((orientation.shape[0],3))
for i,o in enumerate(orientation):
color[i] = damask.Orientation(o,lattice = lattice).IPFcolor(pole)
table.add('IPF_{:g}{:g}{:g}_{sym}'.format(*options.pole,sym = options.symmetry.lower()),
color,
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -42,7 +43,7 @@ parser.add_option('-n','--norm',
type = 'choice', choices = normChoices, metavar='string',
help = 'type of element-wise p-norm [frobenius] {%s}'%(','.join(map(str,normChoices))))
parser.add_option('-l','--label',
dest = 'label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'heading of column(s) to calculate norm of')
@ -50,62 +51,25 @@ parser.set_defaults(norm = 'frobenius',
)
(options,filenames) = parser.parse_args()
if options.norm.lower() not in normChoices:
parser.error('invalid norm ({}) specified.'.format(options.norm))
if options.label is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.norm.lower() not in normChoices:
parser.error('invalid norm ({}) specified.'.format(options.norm))
if options.labels is None:
parser.error('no data column specified.')
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
data = table.get(label)
data_norm = np.empty((data.shape[0],1))
for i,d in enumerate(data):
data_norm[i] = norm(options.norm.capitalize(),d)
table.head_read()
table.add('norm{}({})'.format(options.norm.capitalize(),label),
data_norm,
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('norm{}({})'.format(options.norm.capitalize(),what)) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for column,dim in zip(columns,dims):
table.data_append(norm(options.norm.capitalize(),
map(float,table.data[column:column+dim])))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -42,52 +43,23 @@ parser.set_defaults(pole = (1.0,0.0,0.0),
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
pole = np.array(options.pole)
pole /= np.linalg.norm(pole)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
if not table.label_dimension(options.quaternion) == 4:
damask.util.croak('input {} does not have dimension 4.'.format(options.quaternion))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
column = table.label_index(options.quaternion)
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['{}_pole_{}{}{}'.format(i+1,*options.pole) for i in range(2)])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
o = damask.Rotation(np.array(list(map(float,table.data[column:column+4]))))
rotatedPole = o*pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
table.data_append([np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y]) # cartesian coordinates
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
orientation = table.get(options.quaternion)
poles = np.empty((orientation.shape[0],2))
for i,o in enumerate(orientation):
rotatedPole = damask.Rotation(o)*pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
poles[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y] # cartesian coordinates
table.add('pole_{}{}{}'.format(*options.pole),
poles,
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -65,7 +65,8 @@ for name in filenames:
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
except: continue
except IOError:
continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
@ -95,7 +96,7 @@ for name in filenames:
table.data_readArray()
if (options.grid is None or options.size is None):
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.data[:,table.label_indexrange(options.pos)])
else:
grid = np.array(options.grid,'i')
size = np.array(options.size,'d')

View File

@ -55,7 +55,8 @@ for name in filenames:
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
except: continue
except IOError:
continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
@ -82,7 +83,7 @@ for name in filenames:
table.data_readArray(options.pos)
table.data_rewind()
grid,size = damask.util.coordGridAndSize(table.data)
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.data)
packing = np.array(options.packing,'i')
outSize = grid*packing

View File

@ -2,10 +2,10 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import vtk
import numpy as np
import damask
@ -33,49 +33,20 @@ parser.set_defaults(pos = 'pos',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
errors = []
remarks = []
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
# ------------------------------------------ process data ---------------------------------------
table.data_readArray(options.pos)
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
Polydata = vtk.vtkPolyData()
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for p in table.data:
for p in table.get(options.pos):
pointID = Points.InsertNextPoint(p)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
@ -104,5 +75,3 @@ for name in filenames:
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString())
table.close()

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import vtk
@ -40,48 +41,14 @@ parser.set_defaults(mode = 'cell',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = True,
readonly = True,
)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
remarks = []
errors = []
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray(options.pos)
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
coords = [np.unique(table.data[:,i]) for i in range(3)]
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
coords = [np.unique(table.get(options.pos)[:,i]) for i in range(3)]
if options.mode == 'cell':
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
[coords[i][j-1] + coords[i][j] for j in range(1,len(coords[i]))] + \
@ -90,13 +57,6 @@ for name in filenames:
grid = np.array(list(map(len,coords)),'i')
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
if N != len(table.data):
errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*(grid - (options.mode == 'cell')) ))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ---------------------------------------
rGrid = vtk.vtkRectilinearGrid()
@ -135,5 +95,3 @@ for name in filenames:
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString())
table.close()

View File

@ -6,7 +6,6 @@ do
vtk_addPointCloudData $seeds \
--data microstructure,weight \
--inplace \
--vtk ${seeds%.*}.vtp \
done

View File

@ -40,7 +40,7 @@ class DADF5():
self.version_major = f.attrs['DADF5-major']
self.version_minor = f.attrs['DADF5-minor']
if self.version_major != 0 or not 2 <= self.version_minor <= 4:
if self.version_major != 0 or not 2 <= self.version_minor <= 5:
raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
self.structured = 'grid' in f['geometry'].attrs.keys()
@ -48,6 +48,9 @@ class DADF5():
if self.structured:
self.grid = f['geometry'].attrs['grid']
self.size = f['geometry'].attrs['size']
if self.version_major == 0 and self.version_minor >= 5:
self.origin = f['geometry'].attrs['origin']
r=re.compile('inc[0-9]+')
increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)}

View File

@ -97,6 +97,8 @@ def cell_coord0(grid,size,origin=np.zeros(3)):
number of grid points.
size : numpy.ndarray
physical size of the periodic field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
start = origin + size/grid*.5
@ -149,6 +151,36 @@ def cell_displacement_avg(size,F):
F_avg = np.average(F,axis=(0,1,2))
return np.einsum('ml,ijkl->ijkm',F_avg-np.eye(3),cell_coord0(F.shape[:3][::-1],size))
def cell_displacement(size,F):
"""
Cell center displacement field from deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
return cell_displacement_avg(size,F) + cell_displacement_fluct(size,F)
def cell_coord(size,F,origin=np.zeros(3)):
"""
Cell center positions.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
return cell_coord0(F.shape[:3][::-1],size,origin) + cell_displacement(size,F)
def cell_coord0_2_DNA(coord0,ordered=True):
"""
Return grid 'DNA', i.e. grid, size, and origin from array of cell positions.
@ -185,6 +217,19 @@ def cell_coord0_2_DNA(coord0,ordered=True):
return (grid,size,origin)
def coord0_check(coord0):
"""
Check whether coordinates lie on a regular grid.
Parameters
----------
coord0 : numpy.ndarray
array of undeformed cell coordinates.
"""
cell_coord0_2_DNA(coord0,ordered=True)
def node_coord0(grid,size,origin=np.zeros(3)):
"""
@ -196,6 +241,8 @@ def node_coord0(grid,size,origin=np.zeros(3)):
number of grid points.
size : numpy.ndarray
physical size of the periodic field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
x, y, z = np.meshgrid(np.linspace(origin[2],size[2]+origin[2],1+grid[2]),
@ -234,9 +281,38 @@ def node_displacement_avg(size,F):
F_avg = np.average(F,axis=(0,1,2))
return np.einsum('ml,ijkl->ijkm',F_avg-np.eye(3),node_coord0(F.shape[:3][::-1],size))
def node_displacement(size,F):
"""
Nodal displacement field from deformation gradient field.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
"""
return node_displacement_avg(size,F) + node_displacement_fluct(size,F)
def node_coord(size,F,origin=np.zeros(3)):
"""
Nodal positions.
Parameters
----------
size : numpy.ndarray
physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Default is [0.0,0.0,0.0].
"""
return node_coord0(F.shape[:3][::-1],size,origin) + node_displacement(size,F)
def cell_2_node(cell_data):
"""Interpolate cell data to nodal data."""
"""Interpolate periodic cell data to nodal data."""
n = ( cell_data + np.roll(cell_data,1,(0,1,2))
+ np.roll(cell_data,1,(0,)) + np.roll(cell_data,1,(1,)) + np.roll(cell_data,1,(2,))
+ np.roll(cell_data,1,(0,1)) + np.roll(cell_data,1,(1,2)) + np.roll(cell_data,1,(2,0)))*0.125
@ -244,7 +320,7 @@ def cell_2_node(cell_data):
return np.pad(n,((0,1),(0,1),(0,1))+((0,0),)*len(cell_data.shape[3:]),mode='wrap')
def node_2_cell(node_data):
"""Interpolate nodal data to cell data."""
"""Interpolate periodic nodal data to cell data."""
c = ( node_data + np.roll(node_data,1,(0,1,2))
+ np.roll(node_data,1,(0,)) + np.roll(node_data,1,(1,)) + np.roll(node_data,1,(2,))
+ np.roll(node_data,1,(0,1)) + np.roll(node_data,1,(1,2)) + np.roll(node_data,1,(2,0)))*0.125

View File

@ -3,6 +3,8 @@ import re
import pandas as pd
import numpy as np
from . import version
class Table():
"""Store spreadsheet-like data."""
@ -20,7 +22,7 @@ class Table():
Additional, human-readable information.
"""
self.comments = [] if comments is None else [c for c in comments]
self.comments = ['table.py v {}'.format(version)] if not comments else [c for c in comments]
self.data = pd.DataFrame(data=data)
self.shapes = shapes
self.__label_condensed()
@ -69,13 +71,16 @@ class Table():
f = open(fname)
except TypeError:
f = fname
f.seek(0)
header,keyword = f.readline().split()
if keyword == 'header':
header = int(header)
else:
raise Exception
comments = [f.readline()[:-1] for i in range(1,header)]
comments = ['table.py:from_ASCII v {}'.format(version)]
comments+= [f.readline()[:-1] for i in range(1,header)]
labels = f.readline().split()
shapes = {}
@ -95,6 +100,47 @@ class Table():
return Table(data,shapes,comments)
@staticmethod
def from_ang(fname):
"""
Create table from TSL ang file.
A valid TSL ang file needs to contains the following columns:
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
* Spatial position in meters, 2 floats, label 'pos'.
* Image quality, 1 float, label 'IQ'.
* Confidence index, 1 float, label 'CI'.
* Phase ID, 1 int, label 'ID'.
* SEM signal, 1 float, label 'intensity'.
* Fit, 1 float, label 'fit'.
Parameters
----------
fname : file, str, or pathlib.Path
Filename or file for reading.
"""
shapes = {'eu':(3,), 'pos':(2,),
'IQ':(1,), 'CI':(1,), 'ID':(1,), 'intensity':(1,), 'fit':(1,)}
try:
f = open(fname)
except TypeError:
f = fname
f.seek(0)
content = f.readlines()
comments = ['table.py:from_ang v {}'.format(version)]
for line in content:
if line.startswith('#'):
comments.append(line.strip())
else:
break
data = np.loadtxt(content)
return Table(data,shapes,comments)
@property
def labels(self):
return list(self.shapes.keys())

View File

@ -7,9 +7,6 @@ from optparse import Option
from queue import Queue
from threading import Thread
import numpy as np
class bcolors:
"""
ASCII Colors (Blender code).
@ -64,19 +61,6 @@ def report(who = None,
croak( (emph(who)+': ' if who is not None else '') + (what if what is not None else '') + '\n' )
# -----------------------------
def report_geom(info,
what = ['grid','size','origin','homogenization','microstructures']):
"""Reports (selected) geometry information."""
output = {
'grid' : 'grid a b c: {}'.format(' x '.join(list(map(str,info['grid' ])))),
'size' : 'size x y z: {}'.format(' x '.join(list(map(str,info['size' ])))),
'origin' : 'origin x y z: {}'.format(' : '.join(list(map(str,info['origin'])))),
'homogenization' : 'homogenization: {}'.format(info['homogenization']),
'microstructures' : 'microstructures: {}'.format(info['microstructures']),
}
for item in what: croak(output[item.lower()])
# -----------------------------
def emph(what):
"""Formats string with emphasis."""
@ -119,30 +103,6 @@ def execute(cmd,
if process.returncode != 0: raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
return out,error
def coordGridAndSize(coordinates):
"""Determines grid count and overall physical size along each dimension of an ordered array of coordinates."""
dim = coordinates.shape[1]
coords = [np.unique(coordinates[:,i]) for i in range(dim)]
mincorner = np.array(list(map(min,coords)))
maxcorner = np.array(list(map(max,coords)))
grid = np.array(list(map(len,coords)),'i')
size = grid/np.maximum(np.ones(dim,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
delta = size/grid
N = grid.prod()
if N != len(coordinates):
raise ValueError('Data count {} does not match grid {}.'.format(len(coordinates),' x '.join(map(repr,grid))))
if np.any(np.abs(np.log10((coords[0][1:]-coords[0][:-1])/delta[0])) > 0.01) \
or np.any(np.abs(np.log10((coords[1][1:]-coords[1][:-1])/delta[1])) > 0.01):
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
if dim==3 and np.any(np.abs(np.log10((coords[2][1:]-coords[2][:-1])/delta[2])) > 0.01):
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
return grid,size
# -----------------------------
class extendableOption(Option):
"""

View File

@ -0,0 +1,138 @@
# TEM_PIXperUM 1.000000
# x-star 240.000000
# y-star 240.000000
# z-star 240.000000
# WorkingDistance 20.000000
#
# Phase 1
# MaterialName Iron(Alpha)
# Formula
# Info
# Symmetry 43
# LatticeConstants 2.870 2.870 2.870 90.000 90.000 90.000
# NumberFamilies 100
# hklFamilies 9223440 0 2 32763 0.000000 32763
# hklFamilies 0 0 0 9218712 0.000000 9218712
# hklFamilies 0 0 3801155 0 0.000000 0
# hklFamilies 5570652 6619251 7536754 -1203738484 0.000000 -1203738484
# hklFamilies 7143516 5111900 7864421 32763 0.000000 32763
# hklFamilies 6488180 7274604 6553717 9220480 0.000000 9220480
# hklFamilies 3145820 2949169 3145777 0 0.000000 0
# hklFamilies 3014704 7209057 103 9220488 0.000000 9220488
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 9220032 0.000000 9220032
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 -1203728363 0.000000 -1203728363
# hklFamilies 0 0 0 32763 0.000000 32763
# hklFamilies 0 0 0 9218628 0.000000 9218628
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 9218504 0.000000 9218504
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 9219904 0.000000 9219904
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 0 -0.000046 0
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 256 0.000000 256
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 -1203753636 0.000000 -1203753636
# hklFamilies 0 0 0 32763 0.000000 32763
# hklFamilies 0 0 0 9220576 0.000000 9220576
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 9218736 0.000000 9218736
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 103219574 0.000000 103219574
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 9220576 0.000000 9220576
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 9220692 0.000000 9220692
# hklFamilies 1434293657 0 0 0 0.000000 0
# hklFamilies 0 0 0 9218584 0.000000 9218584
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 9219976 0.000000 9219976
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 0 0 256 0.000000 256
# hklFamilies 0 0 69473872 0 0.000000 0
# hklFamilies 0 1889785611 -1546188227 -1203753636 -0.000046 -1203753636
# hklFamilies 9224144 0 1434294456 32763 0.000000 32763
# hklFamilies 0 9224160 0 9220672 0.000000 9220672
# hklFamilies -1168390977 32763 851982 0 0.000000 0
# hklFamilies 0 304 0 9218816 0.000000 9218816
# hklFamilies 27030208 0 1434297593 0 0.000000 0
# hklFamilies 0 9224160 0 101654020 0.000000 101654020
# hklFamilies 9224064 0 0 0 0.000000 0
# hklFamilies 0 25563456 0 9220672 0.000000 9220672
# hklFamilies 9224544 0 25559040 0 0.000000 0
# hklFamilies 0 25559788 0 9220788 0.000000 9220788
# hklFamilies 176 0 304 24 0.000000 24
# hklFamilies 0 25562304 0 4 0.000000 4
# hklFamilies 9224208 0 0 0 0.000000 0
# hklFamilies 0 281 0 9220032 0.000000 9220032
# hklFamilies 0 0 0 0 0.000000 0
# hklFamilies 0 -1168390977 32763 9220660 0.000000 9220660
# hklFamilies 21 0 -1168390977 8 0.000000 8
# hklFamilies 32763 2490388 0 24 0.000000 24
# hklFamilies 48 0 69650048 25 0.000000 25
# hklFamilies 0 -1216995621 32763 65535 -0.000046 65535
# hklFamilies 0 0 25562688 1 0.000000 1
# hklFamilies 0 0 21776 0 -0.000058 0
# hklFamilies 25562688 0 25559724 0 0.000000 0
# hklFamilies 0 25559040 0 1179652 0.000000 1179652
# hklFamilies 25559724 0 25562304 32763 0.000000 32763
# hklFamilies 0 48 0 9219904 0.000000 9219904
# hklFamilies 25562304 0 28 0 0.000000 0
# hklFamilies 0 0 0 8781958 0.000000 8781958
# hklFamilies 31 0 0 0 0.000000 0
# hklFamilies 0 0 0 103304392 0.000000 103304392
# hklFamilies 3 0 48 0 0.000000 0
# hklFamilies 0 9224505 0 103219694 -0.000046 103219694
# hklFamilies 27000832 0 -1168393705 0 0.000000 0
# hklFamilies 32763 25559040 0 9220192 0.000000 9220192
# hklFamilies 0 32763 31 0 0.000000 0
# hklFamilies 0 0 0 9219872 0.000000 9219872
# hklFamilies 69729712 0 9224640 0 0.000000 0
# hklFamilies 0 69729904 0 1397706823 0.000000 1397706823
# hklFamilies 69911504 0 0 59 0.000000 59
# hklFamilies 0 27007968 0 103219200 0.000000 103219200
# hklFamilies 0 0 -1216843775 0 0.000000 0
# hklFamilies 32763 69911504 0 0 0.000000 0
# hklFamilies -1168296496 32763 9225328 0 0.000000 0
# hklFamilies 0 1434343267 0 9632160 0.000000 9632160
# hklFamilies 69908840 0 -1216995621 0 0.000000 0
# hklFamilies 32763 256 0 9632112 0.000000 9632112
# hklFamilies 0 0 399376220 0 0.000000 0
# hklFamilies 21776 1966087 4456474 262148 0.000000 262148
# hklFamilies 9224704 0 1434198234 0 0.000000 0
# hklFamilies 0 0 0 9704044 0.000000 9704044
# hklFamilies -1168373699 32763 1 0 0.000000 0
# hklFamilies 0 69911504 0 94961568 -0.000046 94961568
# hklFamilies 1 0 69911504 0 0.000000 0
# hklFamilies 0 10 0 9220016 0.000000 9220016
# hklFamilies -1 0 27030208 0 0.000000 0
# hklFamilies 0 1434488087 18 9219992 -0.000046 9219992
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
# ElasticConstants 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
# Categories1 1 1 1 1
#
# GRID: SqrGrid
# XSTEP: 0.050000
# YSTEP: 0.050000
# NCOLS_ODD: 2
# NCOLS_EVEN: 2
# NROWS: 2
#
# OPERATOR:
#
# SAMPLEID:
#
# SCANID:
#
0.0 0.0 0.0 0.00 0.00 60.0 20.0 1 2.0 1.5
0.0 2.0 0.0 0.05 0.00 60.0 20.0 1 2.0 1.5
0.0 2.0 0.0 0.00 0.05 60.0 20.0 1 2.0 1.5
0.0 0.0 1.0 0.05 0.05 60.0 20.0 1 2.0 1.5

View File

@ -47,6 +47,17 @@ class TestTable:
new = Table.from_ASCII(f)
assert all(default.data==new.data)
def test_read_ang_str(self,reference_dir):
new = Table.from_ang(os.path.join(reference_dir,'simple.ang'))
assert new.data.shape == (4,10) and \
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
def test_read_ang_file(self,reference_dir):
f = open(os.path.join(reference_dir,'simple.ang'))
new = Table.from_ang(f)
assert new.data.shape == (4,10) and \
new.labels == ['eu', 'pos', 'IQ', 'CI', 'ID', 'intensity', 'fit']
@pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt'])
def test_read_strange(self,reference_dir,fname):
with open(os.path.join(reference_dir,fname)) as f:

View File

@ -30,11 +30,36 @@ class TestGridFilters:
grid = np.random.randint(8,32,(3))
size = np.random.random(3)
origin = np.random.random(3)
coord0 = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode)) # noqa
_grid,_size,_origin = eval('grid_filters.{}_coord0_2_DNA(coord0.reshape((-1,3)))'.format(mode))
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin)
def test_displacement_fluct_equivalence(self):
"""Ensure that fluctuations are periodic."""
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3))
assert np.allclose(grid_filters.node_displacement_fluct(size,F),
grid_filters.cell_2_node(grid_filters.cell_displacement_fluct(size,F)))
def test_interpolation_nonperiodic(self):
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3))
assert np.allclose(grid_filters.node_coord(size,F) [1:-1,1:-1,1:-1],grid_filters.cell_2_node(
grid_filters.cell_coord(size,F))[1:-1,1:-1,1:-1])
@pytest.mark.parametrize('mode',[('cell'),('node')])
def test_coord0_origin(self,mode):
origin= np.random.random(3)
size = np.random.random(3) # noqa
grid = np.random.randint(8,32,(3))
shifted = eval('grid_filters.{}_coord0(grid,size,origin)'.format(mode))
unshifted = eval('grid_filters.{}_coord0(grid,size)'.format(mode))
if mode == 'cell':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid[::-1]) +(3,)))
elif mode == 'node':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid[::-1]+1)+(3,)))
@pytest.mark.parametrize('mode',[('cell'),('node')])
def test_displacement_avg_vanishes(self,mode):

View File

@ -87,10 +87,8 @@ subroutine CPFEM_initAll(el,ip)
call math_init
call rotations_init
call FE_init
#ifdef DAMASK_HDF5
call HDF5_utilities_init
call results_init
#endif
call mesh_init(ip, el)
call lattice_init
call material_init
@ -374,7 +372,6 @@ subroutine CPFEM_results(inc,time)
integer(pInt), intent(in) :: inc
real(pReal), intent(in) :: time
#ifdef DAMASK_HDF5
call results_openJobFile
call results_addIncrement(inc,time)
call constitutive_results
@ -382,7 +379,6 @@ subroutine CPFEM_results(inc,time)
call homogenization_results
call results_removeLink('current') ! ToDo: put this into closeJobFile
call results_closeJobFile
#endif
end subroutine CPFEM_results

View File

@ -14,10 +14,10 @@ module CPFEM2
use material
use lattice
use IO
use HDF5
use DAMASK_interface
use results
use discretization
use HDF5
use HDF5_utilities
use homogenization
use constitutive

View File

@ -5,9 +5,7 @@
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!--------------------------------------------------------------------------------------------------
module HDF5_utilities
#if defined(PETSc) || defined(DAMASK_HDF5)
use HDF5
#endif
#ifdef PETSc
use PETSC
#endif
@ -20,7 +18,6 @@ module HDF5_utilities
implicit none
public
#if defined(PETSc) || defined(DAMASK_HDF5)
!--------------------------------------------------------------------------------------------------
!> @brief reads integer or float data of defined shape from file ! ToDo: order of arguments wrong
!> @details for parallel IO, all dimension except for the last need to match
@ -279,8 +276,8 @@ logical function HDF5_objectExists(loc_id,path)
integer(HID_T), intent(in) :: loc_id
character(len=*), intent(in), optional :: path
integer :: hdferr
character(len=256) :: p
integer :: hdferr
character(len=pStringLen) :: p
if (present(path)) then
p = trim(path)
@ -308,10 +305,10 @@ subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path)
character(len=*), intent(in) :: attrLabel, attrValue
character(len=*), intent(in), optional :: path
integer :: hdferr
integer(HID_T) :: attr_id, space_id, type_id
logical :: attrExists
character(len=256) :: p
integer :: hdferr
integer(HID_T) :: attr_id, space_id, type_id
logical :: attrExists
character(len=pStringLen) :: p
if (present(path)) then
p = trim(path)
@ -355,10 +352,10 @@ subroutine HDF5_addAttribute_int(loc_id,attrLabel,attrValue,path)
integer, intent(in) :: attrValue
character(len=*), intent(in), optional :: path
integer :: hdferr
integer(HID_T) :: attr_id, space_id
logical :: attrExists
character(len=256) :: p
integer :: hdferr
integer(HID_T) :: attr_id, space_id
logical :: attrExists
character(len=pStringLen) :: p
if (present(path)) then
p = trim(path)
@ -396,10 +393,10 @@ subroutine HDF5_addAttribute_real(loc_id,attrLabel,attrValue,path)
real(pReal), intent(in) :: attrValue
character(len=*), intent(in), optional :: path
integer :: hdferr
integer(HID_T) :: attr_id, space_id
logical :: attrExists
character(len=256) :: p
integer :: hdferr
integer(HID_T) :: attr_id, space_id
logical :: attrExists
character(len=pStringLen) :: p
if (present(path)) then
p = trim(path)
@ -441,7 +438,7 @@ subroutine HDF5_addAttribute_int_array(loc_id,attrLabel,attrValue,path)
integer(HID_T) :: attr_id, space_id
integer(HSIZE_T),dimension(1) :: array_size
logical :: attrExists
character(len=256) :: p
character(len=pStringLen) :: p
if (present(path)) then
p = trim(path)
@ -485,7 +482,7 @@ subroutine HDF5_addAttribute_real_array(loc_id,attrLabel,attrValue,path)
integer(HID_T) :: attr_id, space_id
integer(HSIZE_T),dimension(1) :: array_size
logical :: attrExists
character(len=256) :: p
character(len=pStringLen) :: p
if (present(path)) then
p = trim(path)
@ -1928,6 +1925,5 @@ subroutine finalize_write(plist_id, dset_id, filespace_id, memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: h5sclose_f/memspace_id')
end subroutine finalize_write
#endif
end module HDF5_Utilities

View File

@ -11,9 +11,9 @@ module IO
implicit none
private
character(len=5), parameter, public :: &
character(len=*), parameter, public :: &
IO_EOF = '#EOF#' !< end of file string
character(len=207), parameter, private :: &
character(len=*), parameter, private :: &
IO_DIVIDER = '───────────────────'//&
'───────────────────'//&
'───────────────────'//&
@ -243,12 +243,12 @@ subroutine IO_open_inputFile(fileUnit)
integer, allocatable, dimension(:) :: chunkPos
character(len=65536) :: line,fname
character(len=pStringLen :: line,fname
logical :: createSuccess,fexist
do
read(unit2,'(A65536)',END=220) line
read(unit2,'(A256)',END=220) line
chunkPos = IO_stringPos(line)
if (IO_lc(IO_StringValue(line,chunkPos,1))=='*include') then
@ -401,7 +401,7 @@ function IO_stringValue(string,chunkPos,myChunk,silent)
character(len=:), allocatable :: IO_stringValue
logical, optional,intent(in) :: silent !< switch to trigger verbosity
character(len=16), parameter :: MYNAME = 'IO_stringValue: '
character(len=*), parameter :: MYNAME = 'IO_stringValue: '
logical :: warn
@ -429,8 +429,8 @@ real(pReal) function IO_floatValue (string,chunkPos,myChunk)
integer, dimension(:), intent(in) :: chunkPos !< positions of start and end of each tag/chunk in given string
integer, intent(in) :: myChunk !< position number of desired chunk
character(len=*), intent(in) :: string !< raw input with known start and end of each chunk
character(len=15), parameter :: MYNAME = 'IO_floatValue: '
character(len=17), parameter :: VALIDCHARACTERS = '0123456789eEdD.+-'
character(len=*), parameter :: MYNAME = 'IO_floatValue: '
character(len=*), parameter :: VALIDCHARACTERS = '0123456789eEdD.+-'
IO_floatValue = 0.0_pReal
@ -453,8 +453,8 @@ integer function IO_intValue(string,chunkPos,myChunk)
character(len=*), intent(in) :: string !< raw input with known start and end of each chunk
integer, intent(in) :: myChunk !< position number of desired chunk
integer, dimension(:), intent(in) :: chunkPos !< positions of start and end of each tag/chunk in given string
character(len=13), parameter :: MYNAME = 'IO_intValue: '
character(len=12), parameter :: VALIDCHARACTERS = '0123456789+-'
character(len=*), parameter :: MYNAME = 'IO_intValue: '
character(len=*), parameter :: VALIDCHARACTERS = '0123456789+-'
IO_intValue = 0
@ -477,9 +477,9 @@ real(pReal) function IO_fixedNoEFloatValue (string,ends,myChunk)
character(len=*), intent(in) :: string !< raw input with known ends of each chunk
integer, intent(in) :: myChunk !< position number of desired chunk
integer, dimension(:), intent(in) :: ends !< positions of end of each tag/chunk in given string
character(len=22), parameter :: MYNAME = 'IO_fixedNoEFloatValue '
character(len=13), parameter :: VALIDBASE = '0123456789.+-'
character(len=12), parameter :: VALIDEXP = '0123456789+-'
character(len=*), parameter :: MYNAME = 'IO_fixedNoEFloatValue '
character(len=*), parameter :: VALIDBASE = '0123456789.+-'
character(len=*), parameter :: VALIDEXP = '0123456789+-'
real(pReal) :: base
integer :: expon
@ -509,8 +509,8 @@ integer function IO_fixedIntValue(string,ends,myChunk)
character(len=*), intent(in) :: string !< raw input with known ends of each chunk
integer, intent(in) :: myChunk !< position number of desired chunk
integer, dimension(:), intent(in) :: ends !< positions of end of each tag/chunk in given string
character(len=20), parameter :: MYNAME = 'IO_fixedIntValue: '
character(len=12), parameter :: VALIDCHARACTERS = '0123456789+-'
character(len=*), parameter :: MYNAME = 'IO_fixedIntValue: '
character(len=*), parameter :: VALIDCHARACTERS = '0123456789+-'
IO_fixedIntValue = IO_verifyIntValue(trim(adjustl(string(ends(myChunk)+1:ends(myChunk+1)))),&
VALIDCHARACTERS,MYNAME)
@ -884,7 +884,7 @@ end subroutine IO_warning
!--------------------------------------------------------------------------------------------------
function IO_read(fileUnit) result(line)
integer, intent(in) :: fileUnit !< file unit
integer, intent(in) :: fileUnit !< file unit
character(len=pStringLen) :: line
@ -924,7 +924,7 @@ integer function IO_countDataLines(fileUnit)
integer, allocatable, dimension(:) :: chunkPos
character(len=65536) :: line, &
character(len=pStringLen) :: line, &
tmp
IO_countDataLines = 0
@ -956,7 +956,7 @@ integer function IO_countNumericalDataLines(fileUnit)
integer, allocatable, dimension(:) :: chunkPos
character(len=65536) :: line, &
character(len=pStringLen) :: line, &
tmp
IO_countNumericalDataLines = 0
@ -991,7 +991,7 @@ integer function IO_countContinuousIntValues(fileUnit)
integer :: l,c
#endif
integer, allocatable, dimension(:) :: chunkPos
character(len=65536) :: line
character(len=pStringLen) :: line
IO_countContinuousIntValues = 0
line = ''
@ -1048,21 +1048,21 @@ function IO_continuousIntValues(fileUnit,maxN,lookupName,lookupMap,lookupMaxN)
integer, intent(in) :: fileUnit, &
lookupMaxN
integer, dimension(:,:), intent(in) :: lookupMap
character(len=64), dimension(:), intent(in) :: lookupName
character(len=*), dimension(:), intent(in) :: lookupName
integer :: i,first,last
#ifdef Abaqus
integer :: j,l,c
#endif
integer, allocatable, dimension(:) :: chunkPos
character(len=65536) line
logical rangeGeneration
character(len=pStringLen) :: line
logical :: rangeGeneration
IO_continuousIntValues = 0
rangeGeneration = .false.
#if defined(Marc4DAMASK)
do
read(fileUnit,'(A65536)',end=100) line
read(fileUnit,'(A256)',end=100) line
chunkPos = IO_stringPos(line)
if (chunkPos(1) < 1) then ! empty line
exit
@ -1103,14 +1103,14 @@ function IO_continuousIntValues(fileUnit,maxN,lookupName,lookupMap,lookupMaxN)
!--------------------------------------------------------------------------------------------------
! check if the element values in the elset are auto generated
backspace(fileUnit)
read(fileUnit,'(A65536)',end=100) line
read(fileUnit,'(A256)',end=100) line
chunkPos = IO_stringPos(line)
do i = 1,chunkPos(1)
if (IO_lc(IO_stringValue(line,chunkPos,i)) == 'generate') rangeGeneration = .true.
enddo
do l = 1,c
read(fileUnit,'(A65536)',end=100) line
read(fileUnit,'(A256)',end=100) line
chunkPos = IO_stringPos(line)
if (verify(IO_stringValue(line,chunkPos,1),'0123456789') > 0) then ! a non-int, i.e. set names follow on this line
do i = 1,chunkPos(1) ! loop over set names in line

View File

@ -582,9 +582,9 @@ end subroutine constitutive_collectDeltaState
!> @brief writes constitutive results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine constitutive_results
#if defined(PETSc) || defined(DAMASK_HDF5)
integer :: p
character(len=256) :: group
character(len=pStringLen) :: group
do p=1,size(config_name_phase)
group = trim('current/constituent')//'/'//trim(config_name_phase(p))
call HDF5_closeGroup(results_addGroup(group))
@ -613,8 +613,8 @@ subroutine constitutive_results
call plastic_nonlocal_results(phase_plasticityInstance(p),group)
end select
enddo
#endif
enddo
end subroutine constitutive_results
end module constitutive

View File

@ -77,7 +77,7 @@ module crystallite
crystallite_localPlasticity !< indicates this grain to have purely local constitutive law
type :: tOutput !< new requested output (per phase)
character(len=65536), allocatable, dimension(:) :: &
character(len=pStringLen), allocatable, dimension(:) :: &
label
end type tOutput
type(tOutput), allocatable, dimension(:) :: output_constituent
@ -727,7 +727,7 @@ end function crystallite_push33ToRef
!> @brief writes crystallite results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine crystallite_results
#if defined(PETSc) || defined(DAMASK_HDF5)
integer :: p,o
real(pReal), allocatable, dimension(:,:,:) :: selected_tensors
type(rotation), allocatable, dimension(:) :: selected_rotations
@ -848,7 +848,7 @@ subroutine crystallite_results
enddo
end function select_rotations
#endif
end subroutine crystallite_results

View File

@ -5,8 +5,8 @@
module damage_local
use prec
use material
use numerics
use config
use numerics
use source_damage_isoBrittle
use source_damage_isoDuctile
use source_damage_anisoBrittle
@ -15,11 +15,6 @@ module damage_local
implicit none
private
character(len=64), dimension(:,:), allocatable, target, public :: &
damage_local_output
integer, dimension(:), allocatable, target, public :: &
damage_local_Noutput
enum, bind(c)
enumerator :: &
@ -27,9 +22,6 @@ module damage_local
damage_ID
end enum
integer(kind(undefined_ID)), dimension(:,:), allocatable :: &
damage_local_outputID !< ID of each post result output
type :: tParameters
integer(kind(undefined_ID)), dimension(:), allocatable :: &
outputID
@ -51,67 +43,41 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine damage_local_init
integer :: maxNinstance,homog,instance,i
integer :: sizeState
integer :: NofMyHomog, h
integer(kind(undefined_ID)) :: &
outputID
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
character(len=65536), dimension(:), allocatable :: &
outputs
integer :: maxNinstance,o,NofMyHomog,h
character(len=pStringLen), dimension(:), allocatable :: outputs
write(6,'(/,a)') ' <<<+- damage_'//DAMAGE_local_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- damage_'//DAMAGE_local_label//' init -+>>>'; flush(6)
maxNinstance = count(damage_type == DAMAGE_local_ID)
if (maxNinstance == 0) return
allocate(damage_local_output (maxval(homogenization_Noutput),maxNinstance))
damage_local_output = ''
allocate(damage_local_outputID (maxval(homogenization_Noutput),maxNinstance),source=undefined_ID)
allocate(damage_local_Noutput (maxNinstance), source=0)
allocate(param(maxNinstance))
do h = 1, size(damage_type)
if (damage_type(h) /= DAMAGE_LOCAL_ID) cycle
associate(prm => param(damage_typeInstance(h)), &
config => config_homogenization(h))
associate(prm => param(damage_typeInstance(h)),config => config_homogenization(h))
outputs = config%getStrings('(output)',defaultVal=emptyStringArray)
allocate(prm%outputID(0))
do i=1, size(outputs)
outputID = undefined_ID
select case(outputs(i))
case ('damage')
damage_local_output(i,damage_typeInstance(h)) = outputs(i)
damage_local_Noutput(instance) = damage_local_Noutput(instance) + 1
prm%outputID = [prm%outputID , damage_ID]
end select
do o=1, size(outputs)
select case(outputs(o))
case ('damage')
prm%outputID = [prm%outputID , damage_ID]
end select
enddo
homog = h
NofMyHomog = count(material_homogenizationAt == homog)
instance = damage_typeInstance(homog)
! allocate state arrays
sizeState = 1
damageState(homog)%sizeState = sizeState
allocate(damageState(homog)%state0 (sizeState,NofMyHomog), source=damage_initialPhi(homog))
allocate(damageState(homog)%subState0(sizeState,NofMyHomog), source=damage_initialPhi(homog))
allocate(damageState(homog)%state (sizeState,NofMyHomog), source=damage_initialPhi(homog))
NofMyHomog = count(material_homogenizationAt == h)
damageState(h)%sizeState = 1
allocate(damageState(h)%state0 (1,NofMyHomog), source=damage_initialPhi(h))
allocate(damageState(h)%subState0(1,NofMyHomog), source=damage_initialPhi(h))
allocate(damageState(h)%state (1,NofMyHomog), source=damage_initialPhi(h))
nullify(damageMapping(homog)%p)
damageMapping(homog)%p => mappingHomogenization(1,:,:)
deallocate(damage(homog)%p)
damage(homog)%p => damageState(homog)%state(1,:)
nullify(damageMapping(h)%p)
damageMapping(h)%p => mappingHomogenization(1,:,:)
deallocate(damage(h)%p)
damage(h)%p => damageState(h)%state(1,:)
end associate
enddo
@ -213,11 +179,9 @@ subroutine damage_local_results(homog,group)
integer, intent(in) :: homog
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
integer :: o, instance
integer :: o
instance = damage_typeInstance(homog)
associate(prm => param(instance))
associate(prm => param(damage_typeInstance(homog)))
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
@ -228,7 +192,6 @@ subroutine damage_local_results(homog,group)
end select
enddo outputsLoop
end associate
#endif
end subroutine damage_local_results

View File

@ -19,26 +19,23 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine damage_none_init
integer :: &
homog, &
NofMyHomog
integer :: h,NofMyHomog
write(6,'(/,a)') ' <<<+- damage_'//DAMAGE_NONE_LABEL//' init -+>>>'
write(6,'(/,a)') ' <<<+- damage_'//DAMAGE_NONE_LABEL//' init -+>>>'; flush(6)
initializeInstances: do homog = 1, size(config_homogenization)
do h = 1, size(config_homogenization)
if (damage_type(h) /= DAMAGE_NONE_ID) cycle
NofMyHomog = count(material_homogenizationAt == h)
damageState(h)%sizeState = 0
allocate(damageState(h)%state0 (0,NofMyHomog))
allocate(damageState(h)%subState0(0,NofMyHomog))
allocate(damageState(h)%state (0,NofMyHomog))
myhomog: if (damage_type(homog) == DAMAGE_NONE_ID) then
NofMyHomog = count(material_homogenizationAt == homog)
damageState(homog)%sizeState = 0
allocate(damageState(homog)%state0 (0,NofMyHomog))
allocate(damageState(homog)%subState0(0,NofMyHomog))
allocate(damageState(homog)%state (0,NofMyHomog))
deallocate(damage(h)%p)
allocate (damage(h)%p(1), source=damage_initialPhi(h))
deallocate(damage(homog)%p)
allocate (damage(homog)%p(1), source=damage_initialPhi(homog))
endif myhomog
enddo initializeInstances
enddo
end subroutine damage_none_init

View File

@ -1,13 +1,12 @@
!--------------------------------------------------------------------------------------------------
!> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH
!> @brief material subroutine for non-locally evolving damage field
!> @details to be done
!--------------------------------------------------------------------------------------------------
module damage_nonlocal
use prec
use material
use numerics
use config
use numerics
use crystallite
use lattice
use source_damage_isoBrittle
@ -18,11 +17,6 @@ module damage_nonlocal
implicit none
private
character(len=64), dimension(:,:), allocatable, target, public :: &
damage_nonlocal_output
integer, dimension(:), allocatable, target, public :: &
damage_nonlocal_Noutput
enum, bind(c)
enumerator :: &
@ -54,67 +48,44 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine damage_nonlocal_init
integer :: maxNinstance,homog,instance,i
integer :: sizeState
integer :: NofMyHomog, h
integer(kind(undefined_ID)) :: &
outputID
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
character(len=65536), dimension(:), allocatable :: &
outputs
integer :: maxNinstance,o,NofMyHomog,h
character(len=pStringLen), dimension(:), allocatable :: outputs
write(6,'(/,a)') ' <<<+- damage_'//DAMAGE_nonlocal_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- damage_'//DAMAGE_nonlocal_label//' init -+>>>'; flush(6)
maxNinstance = count(damage_type == DAMAGE_nonlocal_ID)
if (maxNinstance == 0) return
allocate(damage_nonlocal_output (maxval(homogenization_Noutput),maxNinstance))
damage_nonlocal_output = ''
allocate(damage_nonlocal_Noutput (maxNinstance), source=0)
allocate(param(maxNinstance))
do h = 1, size(damage_type)
if (damage_type(h) /= DAMAGE_NONLOCAL_ID) cycle
associate(prm => param(damage_typeInstance(h)), &
config => config_homogenization(h))
associate(prm => param(damage_typeInstance(h)),config => config_homogenization(h))
instance = damage_typeInstance(h)
outputs = config%getStrings('(output)',defaultVal=emptyStringArray)
allocate(prm%outputID(0))
do i=1, size(outputs)
outputID = undefined_ID
select case(outputs(i))
case ('damage')
damage_nonlocal_output(i,damage_typeInstance(h)) = outputs(i)
damage_nonlocal_Noutput(instance) = damage_nonlocal_Noutput(instance) + 1
prm%outputID = [prm%outputID , damage_ID]
end select
do o=1, size(outputs)
select case(outputs(o))
case ('damage')
prm%outputID = [prm%outputID, damage_ID]
end select
enddo
homog = h
NofMyHomog = count(material_homogenizationAt == h)
damageState(h)%sizeState = 1
allocate(damageState(h)%state0 (1,NofMyHomog), source=damage_initialPhi(h))
allocate(damageState(h)%subState0(1,NofMyHomog), source=damage_initialPhi(h))
allocate(damageState(h)%state (1,NofMyHomog), source=damage_initialPhi(h))
NofMyHomog = count(material_homogenizationAt == homog)
instance = damage_typeInstance(homog)
! allocate state arrays
sizeState = 1
damageState(homog)%sizeState = sizeState
allocate(damageState(homog)%state0 (sizeState,NofMyHomog), source=damage_initialPhi(homog))
allocate(damageState(homog)%subState0(sizeState,NofMyHomog), source=damage_initialPhi(homog))
allocate(damageState(homog)%state (sizeState,NofMyHomog), source=damage_initialPhi(homog))
nullify(damageMapping(homog)%p)
damageMapping(homog)%p => mappingHomogenization(1,:,:)
deallocate(damage(homog)%p)
damage(homog)%p => damageState(homog)%state(1,:)
nullify(damageMapping(h)%p)
damageMapping(h)%p => mappingHomogenization(1,:,:)
deallocate(damage(h)%p)
damage(h)%p => damageState(h)%state(1,:)
end associate
enddo
end subroutine damage_nonlocal_init
@ -249,11 +220,9 @@ subroutine damage_nonlocal_results(homog,group)
integer, intent(in) :: homog
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
integer :: o, instance
integer :: o
instance = damage_typeInstance(homog)
associate(prm => param(instance))
associate(prm => param(damage_typeInstance(homog)))
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
@ -264,7 +233,6 @@ subroutine damage_nonlocal_results(homog,group)
end select
enddo outputsLoop
end associate
#endif
end subroutine damage_nonlocal_results

View File

@ -78,7 +78,7 @@ end subroutine discretization_init
!> @brief write the displacements
!--------------------------------------------------------------------------------------------------
subroutine discretization_results
#if defined(PETSc) || defined(DAMASK_HDF5)
real(pReal), dimension(:,:), allocatable :: u
call results_closeGroup(results_addGroup(trim('current/geometry')))
@ -90,7 +90,7 @@ subroutine discretization_results
u = discretization_IPcoords &
- discretization_IPcoords0
call results_writeDataset('current/geometry',u,'u_c','cell center displacements','m')
#endif
end subroutine discretization_results

View File

@ -10,7 +10,7 @@ module future
contains
#if defined(__GFORTRAN__) && __GNUC__<9 || __INTEL_COMPILER<1800
#if defined(__GFORTRAN__) && __GNUC__<9 || defined(__INTEL_COMPILER) && INTEL_COMPILER<1800
!--------------------------------------------------------------------------------------------------
!> @brief substitute for the findloc intrinsic (only for integer, dimension(:) at the moment)
!--------------------------------------------------------------------------------------------------

View File

@ -122,7 +122,6 @@ subroutine geometry_plastic_nonlocal_results
integer, dimension(:), allocatable :: shp
#if defined(PETSc) || defined(DAMASK_HDF5)
call results_openJobFile
writeVolume: block
@ -151,7 +150,6 @@ subroutine geometry_plastic_nonlocal_results
call results_closeJobFile
#endif
end subroutine geometry_plastic_nonlocal_results

View File

@ -36,7 +36,7 @@ program DAMASK_spectral
N_t = 0, & !< # of time indicators found in load case file
N_n = 0, & !< # of increment specifiers found in load case file
N_def = 0 !< # of rate of deformation specifiers found in load case file
character(len=65536) :: &
character(len=pStringLen) :: &
line
!--------------------------------------------------------------------------------------------------
@ -247,7 +247,7 @@ program DAMASK_spectral
reportAndCheck: if (worldrank == 0) then
write (loadcase_string, '(i6)' ) currentLoadCase
write(6,'(/,1x,a,i6)') 'load case: ', currentLoadCase
write(6,'(/,1x,a,i0)') 'load case: ', currentLoadCase
if (.not. newLoadCase%followFormerTrajectory) write(6,'(2x,a)') 'drop guessing along trajectory'
if (newLoadCase%deformation%myType == 'l') then
do j = 1, 3
@ -288,14 +288,14 @@ program DAMASK_spectral
write(6,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',&
transpose(newLoadCase%rot%asMatrix())
if (newLoadCase%time < 0.0_pReal) errorID = 834 ! negative time increment
write(6,'(2x,a,f12.6)') 'time: ', newLoadCase%time
write(6,'(2x,a,f0.3)') 'time: ', newLoadCase%time
if (newLoadCase%incs < 1) errorID = 835 ! non-positive incs count
write(6,'(2x,a,i5)') 'increments: ', newLoadCase%incs
write(6,'(2x,a,i0)') 'increments: ', newLoadCase%incs
if (newLoadCase%outputfrequency < 1) errorID = 836 ! non-positive result frequency
write(6,'(2x,a,i5)') 'output frequency: ', newLoadCase%outputfrequency
write(6,'(2x,a,i0)') 'output frequency: ', newLoadCase%outputfrequency
if (newLoadCase%restartfrequency < 1) errorID = 839 ! non-positive restart frequency
if (newLoadCase%restartfrequency < huge(0)) &
write(6,'(2x,a,i5)') 'restart frequency: ', newLoadCase%restartfrequency
write(6,'(2x,a,i0)') 'restart frequency: ', newLoadCase%restartfrequency
if (errorID > 0) call IO_error(error_ID = errorID, ext_msg = loadcase_string) ! exit with error message
endif reportAndCheck
loadCases = [loadCases,newLoadCase] ! load case is ok, append it

View File

@ -591,7 +591,6 @@ end subroutine averageStressAndItsTangent
!> @brief writes homogenization results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine homogenization_results
#if defined(PETSc) || defined(DAMASK_HDF5)
use material, only: &
material_homogenization_type => homogenization_type
@ -638,10 +637,8 @@ subroutine homogenization_results
call thermal_conduction_results(p,group)
end select
enddo
#endif
end subroutine homogenization_results
end module homogenization

View File

@ -74,12 +74,10 @@ module subroutine mech_RGC_init
NofMyHomog, &
sizeState, nIntFaceTot
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- homogenization_'//HOMOGENIZATION_RGC_label//' init -+>>>'
@ -928,7 +926,6 @@ end subroutine mech_RGC_averageStressAndItsTangent
!> @brief writes results to HDF5 output file
!--------------------------------------------------------------------------------------------------
module subroutine mech_RGC_results(instance,group)
#if defined(PETSc) || defined(DAMASK_HDF5)
integer, intent(in) :: instance
character(len=*), intent(in) :: group
@ -962,11 +959,6 @@ module subroutine mech_RGC_results(instance,group)
enddo outputsLoop
end associate
#else
integer, intent(in) :: instance
character(len=*), intent(in) :: group
#endif
end subroutine mech_RGC_results

View File

@ -33,7 +33,7 @@ module subroutine mech_isostrain_init
Ninstance, &
h, &
NofMyHomog
character(len=65536) :: &
character(len=pStringLen) :: &
tag = ''
write(6,'(/,a)') ' <<<+- homogenization_'//HOMOGENIZATION_ISOSTRAIN_label//' init -+>>>'

View File

@ -492,7 +492,7 @@ contains
subroutine lattice_init
integer :: Nphases
character(len=65536) :: &
character(len=pStringLen) :: &
tag = ''
integer :: i,p
real(pReal), dimension(:), allocatable :: &

View File

@ -261,7 +261,7 @@ end function getInt
!! error unless default is given. If raw is true, the the complete string is returned, otherwise
!! the individual chunks are returned
!--------------------------------------------------------------------------------------------------
character(len=65536) function getString(this,key,defaultVal,raw)
character(len=pStringLen) function getString(this,key,defaultVal,raw)
class(tPartitionedStringList), target, intent(in) :: this
character(len=*), intent(in) :: key
@ -400,13 +400,13 @@ end function getInts
!--------------------------------------------------------------------------------------------------
function getStrings(this,key,defaultVal,raw)
character(len=65536),dimension(:), allocatable :: getStrings
character(len=pStringLen),dimension(:), allocatable :: getStrings
class(tPartitionedStringList),target, intent(in) :: this
character(len=*), intent(in) :: key
character(len=*), dimension(:), intent(in), optional :: defaultVal
logical, intent(in), optional :: raw
type(tPartitionedStringList), pointer :: item
character(len=65536) :: str
character(len=pStringLen) :: str
integer :: i
logical :: found, &
whole, &

View File

@ -354,12 +354,10 @@ subroutine material_init
call config_deallocate('material.config/microstructure')
call config_deallocate('material.config/texture')
#if defined(PETSc) || defined(DAMASK_HDF5)
call results_openJobFile
call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase)
call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization)
call results_closeJobFile
#endif
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@ -393,8 +391,8 @@ end subroutine material_init
!--------------------------------------------------------------------------------------------------
subroutine material_parseHomogenization
integer :: h
character(len=65536) :: tag
integer :: h
character(len=pStringLen) :: tag
allocate(homogenization_type(size(config_homogenization)), source=HOMOGENIZATION_undefined_ID)
allocate(thermal_type(size(config_homogenization)), source=THERMAL_isothermal_ID)
@ -484,11 +482,11 @@ end subroutine material_parseHomogenization
!--------------------------------------------------------------------------------------------------
subroutine material_parseMicrostructure
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
strings
integer, allocatable, dimension(:) :: chunkPos
integer :: e, m, c, i
character(len=65536) :: &
character(len=pStringLen) :: &
tag
allocate(microstructure_Nconstituents(size(config_microstructure)), source=0)
@ -542,7 +540,7 @@ end subroutine material_parseMicrostructure
subroutine material_parsePhase
integer :: sourceCtr, kinematicsCtr, stiffDegradationCtr, p
character(len=65536), dimension(:), allocatable :: str
character(len=pStringLen), dimension(:), allocatable :: str
allocate(phase_elasticity(size(config_phase)),source=ELASTICITY_undefined_ID)
@ -596,9 +594,9 @@ subroutine material_parsePhase
#if defined(__GFORTRAN__) || defined(__PGI)
str = ['GfortranBug86277']
str = config_phase(p)%getStrings('(source)',defaultVal=str)
if (str(1) == 'GfortranBug86277') str = [character(len=65536)::]
if (str(1) == 'GfortranBug86277') str = [character(len=pStringLen)::]
#else
str = config_phase(p)%getStrings('(source)',defaultVal=[character(len=65536)::])
str = config_phase(p)%getStrings('(source)',defaultVal=[character(len=pStringLen)::])
#endif
do sourceCtr = 1, size(str)
select case (trim(str(sourceCtr)))
@ -620,9 +618,9 @@ subroutine material_parsePhase
#if defined(__GFORTRAN__) || defined(__PGI)
str = ['GfortranBug86277']
str = config_phase(p)%getStrings('(kinematics)',defaultVal=str)
if (str(1) == 'GfortranBug86277') str = [character(len=65536)::]
if (str(1) == 'GfortranBug86277') str = [character(len=pStringLen)::]
#else
str = config_phase(p)%getStrings('(kinematics)',defaultVal=[character(len=65536)::])
str = config_phase(p)%getStrings('(kinematics)',defaultVal=[character(len=pStringLen)::])
#endif
do kinematicsCtr = 1, size(str)
select case (trim(str(kinematicsCtr)))
@ -637,9 +635,9 @@ subroutine material_parsePhase
#if defined(__GFORTRAN__) || defined(__PGI)
str = ['GfortranBug86277']
str = config_phase(p)%getStrings('(stiffness_degradation)',defaultVal=str)
if (str(1) == 'GfortranBug86277') str = [character(len=65536)::]
if (str(1) == 'GfortranBug86277') str = [character(len=pStringLen)::]
#else
str = config_phase(p)%getStrings('(stiffness_degradation)',defaultVal=[character(len=65536)::])
str = config_phase(p)%getStrings('(stiffness_degradation)',defaultVal=[character(len=pStringLen)::])
#endif
do stiffDegradationCtr = 1, size(str)
select case (trim(str(stiffDegradationCtr)))
@ -665,8 +663,8 @@ end subroutine material_parsePhase
!--------------------------------------------------------------------------------------------------
subroutine material_parseTexture
integer :: j, t
character(len=65536), dimension(:), allocatable :: strings ! Values for given key in material config
integer :: j,t
character(len=pStringLen), dimension(:), allocatable :: strings ! Values for given key in material config
integer, dimension(:), allocatable :: chunkPos
real(pReal), dimension(3,3) :: transformation ! maps texture to microstructure coordinate system
real(pReal), dimension(3) :: Eulers ! Euler angles in degrees from file
@ -702,29 +700,27 @@ subroutine material_parseTexture
do j = 1, 3 ! look for "x", "y", and "z" entries
select case (strings(j))
case('x', '+x')
transformation(j,1:3) = [ 1.0_pReal, 0.0_pReal, 0.0_pReal] ! original axis is now +x-axis
transformation(j,1:3) = [ 1.0_pReal, 0.0_pReal, 0.0_pReal] ! original axis is now +x-axis
case('-x')
transformation(j,1:3) = [-1.0_pReal, 0.0_pReal, 0.0_pReal] ! original axis is now -x-axis
transformation(j,1:3) = [-1.0_pReal, 0.0_pReal, 0.0_pReal] ! original axis is now -x-axis
case('y', '+y')
transformation(j,1:3) = [ 0.0_pReal, 1.0_pReal, 0.0_pReal] ! original axis is now +y-axis
transformation(j,1:3) = [ 0.0_pReal, 1.0_pReal, 0.0_pReal] ! original axis is now +y-axis
case('-y')
transformation(j,1:3) = [ 0.0_pReal,-1.0_pReal, 0.0_pReal] ! original axis is now -y-axis
transformation(j,1:3) = [ 0.0_pReal,-1.0_pReal, 0.0_pReal] ! original axis is now -y-axis
case('z', '+z')
transformation(j,1:3) = [ 0.0_pReal, 0.0_pReal, 1.0_pReal] ! original axis is now +z-axis
transformation(j,1:3) = [ 0.0_pReal, 0.0_pReal, 1.0_pReal] ! original axis is now +z-axis
case('-z')
transformation(j,1:3) = [ 0.0_pReal, 0.0_pReal,-1.0_pReal] ! original axis is now -z-axis
transformation(j,1:3) = [ 0.0_pReal, 0.0_pReal,-1.0_pReal] ! original axis is now -z-axis
case default
call IO_error(157,t)
end select
enddo
if(dNeq(math_det33(transformation),1.0_pReal)) call IO_error(157,t)
call transformation_%fromMatrix(transformation)
texture_orientation(t) = texture_orientation(t) * transformation_
endif
enddo
end subroutine material_parseTexture
@ -732,18 +728,14 @@ end subroutine material_parseTexture
!> @brief allocates the plastic state of a phase
!--------------------------------------------------------------------------------------------------
subroutine material_allocatePlasticState(phase,NofMyPhase,&
sizeState,sizeDotState,sizeDeltaState,&
Nslip,Ntwin,Ntrans)
sizeState,sizeDotState,sizeDeltaState)
integer, intent(in) :: &
phase, &
NofMyPhase, &
sizeState, &
sizeDotState, &
sizeDeltaState, &
Nslip, &
Ntwin, &
Ntrans
sizeDeltaState
plasticState(phase)%sizeState = sizeState
plasticState(phase)%sizeDotState = sizeDotState

View File

@ -27,7 +27,7 @@ program DAMASK_FEM
integer, allocatable, dimension(:) :: chunkPos ! this is longer than needed for geometry parsing
integer :: &
N_def = 0 !< # of rate of deformation specifiers found in load case file
character(len=65536) :: &
character(len=pStringLen) :: &
line
!--------------------------------------------------------------------------------------------------

View File

@ -27,9 +27,8 @@ module mesh_grid
integer, public, protected :: &
grid3, & !< (local) grid in 3rd direction
grid3Offset !< (local) grid offset in 3rd direction
real(pReal), dimension(3), public, protected :: &
geomSize
geomSize !< (global) physical size
real(pReal), public, protected :: &
size3, & !< (local) size in 3rd direction
size3offset !< (local) size offset in 3rd direction
@ -49,7 +48,8 @@ subroutine mesh_init(ip,el)
include 'fftw3-mpi.f03'
real(pReal), dimension(3) :: &
mySize !< domain size of this process
mySize, & !< domain size of this process
origin !< (global) distance to origin
integer, dimension(3) :: &
myGrid !< domain grid of this process
@ -61,9 +61,9 @@ subroutine mesh_init(ip,el)
integer(C_INTPTR_T) :: &
devNull, z, z_offset
write(6,'(/,a)') ' <<<+- mesh_grid init -+>>>'
write(6,'(/,a)') ' <<<+- mesh_grid init -+>>>'; flush(6)
call readGeom(grid,geomSize,microstructureAt,homogenizationAt)
call readGeom(grid,geomSize,origin,microstructureAt,homogenizationAt)
!--------------------------------------------------------------------------------------------------
! grid solver specific quantities
@ -104,8 +104,9 @@ subroutine mesh_init(ip,el)
! store geometry information for post processing
call results_openJobFile
call results_closeGroup(results_addGroup('geometry'))
call results_addAttribute('grid',grid,'geometry')
call results_addAttribute('size',geomSize,'geometry')
call results_addAttribute('grid', grid, 'geometry')
call results_addAttribute('size', geomSize,'geometry')
call results_addAttribute('origin',origin, 'geometry')
call results_closeJobFile
!--------------------------------------------------------------------------------------------------
@ -129,10 +130,13 @@ end subroutine mesh_init
!> @details important variables have an implicit "save" attribute. Therefore, this function is
! supposed to be called only once!
!--------------------------------------------------------------------------------------------------
subroutine readGeom(grid,geomSize,microstructure,homogenization)
subroutine readGeom(grid,geomSize,origin,microstructure,homogenization)
integer, dimension(3), intent(out) :: grid ! grid (for all processes!)
real(pReal), dimension(3), intent(out) :: geomSize ! size (for all processes!)
integer, dimension(3), intent(out) :: &
grid ! grid (for all processes!)
real(pReal), dimension(3), intent(out) :: &
geomSize, & ! size (for all processes!)
origin ! origin (for all processes!)
integer, dimension(:), intent(out), allocatable :: &
microstructure, &
homogenization
@ -181,6 +185,7 @@ subroutine readGeom(grid,geomSize,microstructure,homogenization)
!--------------------------------------------------------------------------------------------------
! read and interprete header
origin = 0.0_pReal
l = 0
do while (l < headerLength .and. startPos < len(rawData))
endPos = startPos + index(rawData(startPos:),new_line('')) - 1
@ -221,8 +226,23 @@ subroutine readGeom(grid,geomSize,microstructure,homogenization)
enddo
endif
case ('origin')
if (chunkPos(1) > 6) then
do j = 2,6,2
select case (IO_lc(IO_stringValue(line,chunkPos,j)))
case('x')
origin(1) = IO_floatValue(line,chunkPos,j+1)
case('y')
origin(2) = IO_floatValue(line,chunkPos,j+1)
case('z')
origin(3) = IO_floatValue(line,chunkPos,j+1)
end select
enddo
endif
case ('homogenization')
if (chunkPos(1) > 1) h = IO_intValue(line,chunkPos,2)
end select
enddo
@ -276,7 +296,7 @@ end subroutine readGeom
!---------------------------------------------------------------------------------------------------
!> @brief Calculate undeformed position of IPs/cell centres (pretend to be an element)
!> @brief Calculate undeformed position of IPs/cell centers (pretend to be an element)
!---------------------------------------------------------------------------------------------------
function IPcoordinates0(grid,geomSize,grid3Offset)
@ -347,7 +367,7 @@ pure function cellEdgeNormal(nElems)
integer, intent(in) :: nElems
real, dimension(3,6,1,nElems) :: cellEdgeNormal
real(pReal), dimension(3,6,1,nElems) :: cellEdgeNormal
cellEdgeNormal(1:3,1,1,:) = spread([+1.0_pReal, 0.0_pReal, 0.0_pReal],2,nElems)
cellEdgeNormal(1:3,2,1,:) = spread([-1.0_pReal, 0.0_pReal, 0.0_pReal],2,nElems)

View File

@ -18,7 +18,6 @@ module mesh
use element
use discretization
use geometry_plastic_nonlocal
use HDF5_utilities
use results
implicit none
@ -150,9 +149,8 @@ subroutine writeGeometry(elemType, &
real(pReal), dimension(:,:), allocatable :: &
coordinates_temp
#if defined(DAMASK_HDF5)
call results_openJobFile
call HDF5_closeGroup(results_addGroup('geometry'))
call results_closeGroup(results_addGroup('geometry'))
connectivity_temp = connectivity_elem
call results_writeDataset('geometry',connectivity_temp,'T_e',&
@ -171,7 +169,6 @@ subroutine writeGeometry(elemType, &
'coordinates of the material points','m')
call results_closeJobFile
#endif
end subroutine writeGeometry

View File

@ -121,16 +121,12 @@ subroutine plastic_disloUCLA_init()
sizeState, sizeDotState, &
startIndex, endIndex
integer, dimension(0), parameter :: emptyIntArray = [integer::]
real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::]
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- plastic_'//PLASTICITY_DISLOUCLA_label//' init -+>>>'
@ -290,8 +286,7 @@ subroutine plastic_disloUCLA_init()
sizeDotState = size(['rho_mob ','rho_dip ','gamma_sl']) * prm%sum_N_sl
sizeState = sizeDotState
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
prm%sum_N_sl,0,0)
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0)
!--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState
@ -463,7 +458,6 @@ end subroutine plastic_disloUCLA_dependentState
!> @brief writes results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine plastic_disloUCLA_results(instance,group)
#if defined(PETSc) || defined(DAMASK_HDF5)
integer, intent(in) :: instance
character(len=*), intent(in) :: group
@ -491,11 +485,6 @@ subroutine plastic_disloUCLA_results(instance,group)
end select
enddo outputsLoop
end associate
#else
integer, intent(in) :: instance
character(len=*), intent(in) :: group
#endif
end subroutine plastic_disloUCLA_results

View File

@ -180,16 +180,12 @@ subroutine plastic_dislotwin_init
sizeState, sizeDotState, &
startIndex, endIndex
integer, dimension(0), parameter :: emptyIntArray = [integer::]
real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::]
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_DISLOTWIN_label//' init -+>>>'
@ -510,8 +506,7 @@ subroutine plastic_dislotwin_init
+ size(['f_tr']) * prm%sum_N_tr
sizeState = sizeDotState
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
prm%sum_N_sl,prm%sum_N_tw,prm%sum_N_tr)
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0)
!--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState
@ -926,7 +921,6 @@ end subroutine plastic_dislotwin_dependentState
!> @brief writes results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine plastic_dislotwin_results(instance,group)
#if defined(PETSc) || defined(DAMASK_HDF5)
integer, intent(in) :: instance
character(len=*) :: group
@ -969,11 +963,6 @@ subroutine plastic_dislotwin_results(instance,group)
end select
enddo outputsLoop
end associate
#else
integer, intent(in) :: instance
character(len=*) :: group
#endif
end subroutine plastic_dislotwin_results

View File

@ -85,14 +85,12 @@ subroutine plastic_isotropic_init
NipcMyPhase, &
sizeState, sizeDotState
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- plastic_'//PLASTICITY_ISOTROPIC_label//' init -+>>>'
@ -181,8 +179,7 @@ subroutine plastic_isotropic_init
sizeDotState = size(['xi ','accumulated_shear'])
sizeState = sizeDotState
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
1,0,0)
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0)
!--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState
@ -373,7 +370,6 @@ end subroutine plastic_isotropic_dotState
!> @brief writes results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine plastic_isotropic_results(instance,group)
#if defined(PETSc) || defined(DAMASK_HDF5)
integer, intent(in) :: instance
character(len=*), intent(in) :: group
@ -388,10 +384,6 @@ subroutine plastic_isotropic_results(instance,group)
end select
enddo outputsLoop
end associate
#else
integer, intent(in) :: instance
character(len=*) :: group
#endif
end subroutine plastic_isotropic_results

View File

@ -103,16 +103,12 @@ subroutine plastic_kinehardening_init
sizeState, sizeDeltaState, sizeDotState, &
startIndex, endIndex
integer, dimension(0), parameter :: emptyIntArray = [integer::]
real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::]
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- plastic_'//PLASTICITY_KINEHARDENING_label//' init -+>>>'
@ -249,8 +245,7 @@ subroutine plastic_kinehardening_init
sizeDeltaState = size(['sense ', 'chi0 ', 'gamma0' ]) * prm%totalNslip
sizeState = sizeDotState + sizeDeltaState
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState, &
prm%totalNslip,0,0)
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,sizeDeltaState)
!--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState
@ -437,7 +432,6 @@ end subroutine plastic_kinehardening_deltaState
!> @brief writes results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine plastic_kinehardening_results(instance,group)
#if defined(PETSc) || defined(DAMASK_HDF5)
integer, intent(in) :: instance
character(len=*) :: group
@ -470,10 +464,6 @@ subroutine plastic_kinehardening_results(instance,group)
end select
enddo outputsLoop
end associate
#else
integer, intent(in) :: instance
character(len=*) :: group
#endif
end subroutine plastic_kinehardening_results

View File

@ -38,8 +38,8 @@ subroutine plastic_none_init
if (phase_plasticity(p) /= PLASTICITY_NONE_ID) cycle
NipcMyPhase = count(material_phaseAt == p) * discretization_nIP
call material_allocatePlasticState(p,NipcMyPhase,0,0,0, &
0,0,0)
call material_allocatePlasticState(p,NipcMyPhase,0,0,0)
enddo
end subroutine plastic_none_init

View File

@ -12,6 +12,7 @@ module plastic_nonlocal
use material
use lattice
use rotations
use results
use config
use lattice
use discretization
@ -27,9 +28,6 @@ module plastic_nonlocal
real(pReal), parameter :: &
KB = 1.38e-23_pReal !< Physical parameter, Boltzmann constant in J/Kelvin
character(len=64), dimension(:,:), allocatable :: &
plastic_nonlocal_output !< name of each post result output
! storage order of dislocation types
integer, dimension(8), parameter :: &
sgl = [1,2,3,4,5,6,7,8] !< signed (single)
@ -201,9 +199,6 @@ module plastic_nonlocal
type(tNonlocalMicrostructure), dimension(:), allocatable :: microstructure
integer(kind(undefined_ID)), dimension(:,:), allocatable :: &
plastic_nonlocal_outputID !< ID of each post result output
public :: &
plastic_nonlocal_init, &
plastic_nonlocal_dependentState, &
@ -224,10 +219,6 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine plastic_nonlocal_init
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer, dimension(0), parameter :: emptyIntArray = [integer::]
real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::]
integer :: &
sizeState, sizeDotState,sizeDependentState, sizeDeltaState, &
maxNinstances, &
@ -240,10 +231,10 @@ subroutine plastic_nonlocal_init
integer(kind(undefined_ID)) :: &
outputID
character(len=512) :: &
character(len=pStringLen) :: &
extmsg = '', &
structure
character(len=65536), dimension(:), allocatable :: outputs
character(len=pStringLen), dimension(:), allocatable :: outputs
integer :: NofMyPhase
write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_NONLOCAL_label//' init -+>>>'
@ -264,9 +255,6 @@ subroutine plastic_nonlocal_init
allocate(deltaState(maxNinstances))
allocate(microstructure(maxNinstances))
allocate(plastic_nonlocal_output(maxval(phase_Noutput), maxNinstances))
plastic_nonlocal_output = ''
allocate(plastic_nonlocal_outputID(maxval(phase_Noutput), maxNinstances), source=undefined_ID)
allocate(totalNslip(maxNinstances), source=0)
@ -492,7 +480,6 @@ subroutine plastic_nonlocal_init
end select
if (outputID /= undefined_ID) then
plastic_nonlocal_output(i,phase_plasticityInstance(p)) = outputs(i)
prm%outputID = [prm%outputID , outputID]
endif
@ -514,8 +501,8 @@ subroutine plastic_nonlocal_init
'maxDipoleHeightEdge ','maxDipoleHeightScrew' ]) * prm%totalNslip !< other dependent state variables that are not updated by microstructure
sizeDeltaState = sizeDotState
call material_allocatePlasticState(p,NofMyPhase,sizeState,sizeDotState,sizeDeltaState, &
prm%totalNslip,0,0)
call material_allocatePlasticState(p,NofMyPhase,sizeState,sizeDotState,sizeDeltaState)
plasticState(p)%nonlocal = .true.
plasticState(p)%offsetDeltaState = 0 ! ToDo: state structure does not follow convention
@ -1974,9 +1961,6 @@ end function getRho
!> @brief writes results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine plastic_nonlocal_results(instance,group)
#if defined(PETSc) || defined(DAMASK_HDF5)
use results, only: &
results_writeDataset
integer, intent(in) :: instance
character(len=*) :: group
@ -2039,10 +2023,6 @@ subroutine plastic_nonlocal_results(instance,group)
end select
enddo outputsLoop
end associate
#else
integer, intent(in) :: instance
character(len=*) :: group
#endif
end subroutine plastic_nonlocal_results

View File

@ -113,16 +113,12 @@ subroutine plastic_phenopowerlaw_init
sizeState, sizeDotState, &
startIndex, endIndex
integer, dimension(0), parameter :: emptyIntArray = [integer::]
real(pReal), dimension(0), parameter :: emptyRealArray = [real(pReal)::]
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- plastic_'//PLASTICITY_PHENOPOWERLAW_label//' init -+>>>'
@ -308,8 +304,7 @@ subroutine plastic_phenopowerlaw_init
+ size(['tau_twin ','gamma_twin']) * prm%totalNtwin
sizeState = sizeDotState
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0, &
prm%totalNslip,prm%totalNtwin,0)
call material_allocatePlasticState(p,NipcMyPhase,sizeState,sizeDotState,0)
!--------------------------------------------------------------------------------------------------
! locally defined state aliases and initialization of state0 and aTolState
@ -464,7 +459,6 @@ end subroutine plastic_phenopowerlaw_dotState
!> @brief writes results to HDF5 output file
!--------------------------------------------------------------------------------------------------
subroutine plastic_phenopowerlaw_results(instance,group)
#if defined(PETSc) || defined(DAMASK_HDF5)
integer, intent(in) :: instance
character(len=*), intent(in) :: group
@ -492,11 +486,6 @@ subroutine plastic_phenopowerlaw_results(instance,group)
end select
enddo outputsLoop
end associate
#else
integer, intent(in) :: instance
character(len=*), intent(in) :: group
#endif
end subroutine plastic_phenopowerlaw_results

View File

@ -79,6 +79,10 @@ module prec
real(pReal), private, parameter :: PREAL_EPSILON = epsilon(0.0_pReal) !< minimum positive number such that 1.0 + EPSILON /= 1.0.
real(pReal), private, parameter :: PREAL_MIN = tiny(0.0_pReal) !< smallest normalized floating point number
integer, dimension(0), parameter, public :: emptyIntArray = [integer::]
real(pReal), dimension(0), parameter, public :: emptyRealArray = [real(pReal)::]
character(len=pStringLen), dimension(0), parameter, public :: emptyStringArray = [character(len=pStringLen)::]
private :: &
unitTest

View File

@ -16,7 +16,6 @@ module results
implicit none
private
#if defined(PETSc) || defined(DAMASK_HDF5)
integer(HID_T) :: resultsFile
interface results_writeDataset
@ -70,7 +69,7 @@ subroutine results_init
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
call HDF5_addAttribute(resultsFile,'DADF5_version_major',0)
call HDF5_addAttribute(resultsFile,'DADF5_version_minor',4)
call HDF5_addAttribute(resultsFile,'DADF5_version_minor',5)
call HDF5_addAttribute(resultsFile,'DAMASK_version',DAMASKVERSION)
call get_command(commandLine)
call HDF5_addAttribute(resultsFile,'call',trim(commandLine))
@ -978,5 +977,4 @@ end subroutine results_mapping_materialpoint
!end subroutine HDF5_mappingCells
#endif
end module results

View File

@ -596,7 +596,7 @@ function om2ax(om) result(ax)
else
call dgeev('N','V',3,om_,3,Wr,Wi,devNull,3,VR,3,work,size(work,1),ierr)
if (ierr /= 0) call IO_error(0,ext_msg='Error in om2ax: DGEEV return not zero')
#if defined(__GFORTRAN__) && __GNUC__ < 9 || __INTEL_COMPILER < 1800
#if defined(__GFORTRAN__) && __GNUC__<9 || defined(__INTEL_COMPILER) && INTEL_COMPILER<1800
i = maxloc(merge(1,0,cEq(cmplx(Wr,Wi,pReal),cmplx(1.0_pReal,0.0_pReal,pReal),tol=1.0e-14_pReal)),dim=1)
#else
i = findloc(cEq(cmplx(Wr,Wi,pReal),cmplx(1.0_pReal,0.0_pReal,pReal),tol=1.0e-14_pReal),.true.,dim=1) !find eigenvalue (1,0)

View File

@ -18,12 +18,9 @@ module source_damage_anisoBrittle
implicit none
private
integer, dimension(:), allocatable, public, protected :: &
integer, dimension(:), allocatable :: &
source_damage_anisoBrittle_offset, & !< which source is my current source mechanism?
source_damage_anisoBrittle_instance !< instance of source mechanism
character(len=64), dimension(:,:), allocatable :: &
source_damage_anisoBrittle_output !< name of each post result output
integer, dimension(:,:), allocatable :: &
source_damage_anisoBrittle_Ncleavage !< number of cleavage systems per family
@ -72,17 +69,15 @@ subroutine source_damage_anisoBrittle_init
integer :: Ninstance,phase,instance,source,sourceOffset
integer :: NofMyPhase,p ,i
integer, dimension(0), parameter :: emptyIntArray = [integer::]
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- source_'//SOURCE_DAMAGE_ANISOBRITTLE_LABEL//' init -+>>>'
write(6,'(/,a)') ' <<<+- source_'//SOURCE_DAMAGE_ANISOBRITTLE_LABEL//' init -+>>>'; flush(6)
Ninstance = count(phase_source == SOURCE_damage_anisoBrittle_ID)
if (Ninstance == 0) return
@ -100,9 +95,6 @@ subroutine source_damage_anisoBrittle_init
enddo
enddo
allocate(source_damage_anisoBrittle_output(maxval(phase_Noutput),Ninstance))
source_damage_anisoBrittle_output = ''
allocate(source_damage_anisoBrittle_Ncleavage(lattice_maxNcleavageFamily,Ninstance), source=0)
allocate(param(Ninstance))
@ -151,7 +143,6 @@ subroutine source_damage_anisoBrittle_init
select case(outputs(i))
case ('anisobrittle_drivingforce')
source_damage_anisoBrittle_output(i,source_damage_anisoBrittle_instance(p)) = outputs(i)
prm%outputID = [prm%outputID, damage_drivingforce_ID]
end select
@ -266,8 +257,7 @@ end subroutine source_damage_anisoBrittle_getRateAndItsTangent
subroutine source_damage_anisoBrittle_results(phase,group)
integer, intent(in) :: phase
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
character(len=*), intent(in) :: group
integer :: sourceOffset, o, instance
instance = source_damage_anisoBrittle_instance(phase)
@ -281,7 +271,6 @@ subroutine source_damage_anisoBrittle_results(phase,group)
end select
enddo outputsLoop
end associate
#endif
end subroutine source_damage_anisoBrittle_results

View File

@ -17,14 +17,10 @@ module source_damage_anisoDuctile
implicit none
private
integer, dimension(:), allocatable, public, protected :: &
integer, dimension(:), allocatable :: &
source_damage_anisoDuctile_offset, & !< which source is my current damage mechanism?
source_damage_anisoDuctile_instance !< instance of damage source mechanism
character(len=64), dimension(:,:), allocatable, target, public :: &
source_damage_anisoDuctile_output !< name of each post result output
enum, bind(c)
enumerator :: undefined_ID, &
damage_drivingforce_ID
@ -66,17 +62,15 @@ subroutine source_damage_anisoDuctile_init
integer :: Ninstance,phase,instance,source,sourceOffset
integer :: NofMyPhase,p ,i
integer, dimension(0), parameter :: emptyIntArray = [integer::]
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- source_'//SOURCE_DAMAGE_ANISODUCTILE_LABEL//' init -+>>>'
write(6,'(/,a)') ' <<<+- source_'//SOURCE_DAMAGE_ANISODUCTILE_LABEL//' init -+>>>'; flush(6)
Ninstance = count(phase_source == SOURCE_damage_anisoDuctile_ID)
if (Ninstance == 0) return
@ -93,10 +87,6 @@ subroutine source_damage_anisoDuctile_init
source_damage_anisoDuctile_offset(phase) = source
enddo
enddo
allocate(source_damage_anisoDuctile_output(maxval(phase_Noutput),Ninstance))
source_damage_anisoDuctile_output = ''
allocate(param(Ninstance))
@ -136,7 +126,6 @@ subroutine source_damage_anisoDuctile_init
select case(outputs(i))
case ('anisoductile_drivingforce')
source_damage_anisoDuctile_output(i,source_damage_anisoDuctile_instance(p)) = outputs(i)
prm%outputID = [prm%outputID, damage_drivingforce_ID]
end select
@ -227,7 +216,6 @@ subroutine source_damage_anisoDuctile_results(phase,group)
integer, intent(in) :: phase
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
integer :: sourceOffset, o, instance
instance = source_damage_anisoDuctile_instance(phase)
@ -241,7 +229,6 @@ subroutine source_damage_anisoDuctile_results(phase,group)
end select
enddo outputsLoop
end associate
#endif
end subroutine source_damage_anisoDuctile_results

View File

@ -16,11 +16,9 @@ module source_damage_isoBrittle
implicit none
private
integer, dimension(:), allocatable, public, protected :: &
integer, dimension(:), allocatable :: &
source_damage_isoBrittle_offset, &
source_damage_isoBrittle_instance
character(len=64), dimension(:,:), allocatable :: &
source_damage_isoBrittle_output
enum, bind(c)
enumerator :: &
@ -58,16 +56,15 @@ subroutine source_damage_isoBrittle_init
integer :: Ninstance,phase,instance,source,sourceOffset
integer :: NofMyPhase,p,i
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- source_'//SOURCE_DAMAGE_ISOBRITTLE_LABEL//' init -+>>>'
write(6,'(/,a)') ' <<<+- source_'//SOURCE_DAMAGE_ISOBRITTLE_LABEL//' init -+>>>'; flush(6)
Ninstance = count(phase_source == SOURCE_damage_isoBrittle_ID)
if (Ninstance == 0) return
@ -84,9 +81,6 @@ subroutine source_damage_isoBrittle_init
source_damage_isoBrittle_offset(phase) = source
enddo
enddo
allocate(source_damage_isoBrittle_output(maxval(phase_Noutput),Ninstance))
source_damage_isoBrittle_output = ''
allocate(param(Ninstance))
@ -120,7 +114,6 @@ subroutine source_damage_isoBrittle_init
select case(outputs(i))
case ('isobrittle_drivingforce')
source_damage_isoBrittle_output(i,source_damage_isoBrittle_instance(p)) = outputs(i)
prm%outputID = [prm%outputID, damage_drivingforce_ID]
end select
@ -218,8 +211,7 @@ end subroutine source_damage_isoBrittle_getRateAndItsTangent
subroutine source_damage_isoBrittle_results(phase,group)
integer, intent(in) :: phase
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
character(len=*), intent(in) :: group
integer :: sourceOffset, o, instance
instance = source_damage_isoBrittle_instance(phase)
@ -233,7 +225,6 @@ subroutine source_damage_isoBrittle_results(phase,group)
end select
enddo outputsLoop
end associate
#endif
end subroutine source_damage_isoBrittle_results

View File

@ -15,18 +15,14 @@ module source_damage_isoDuctile
implicit none
private
integer, dimension(:), allocatable, public, protected :: &
integer, dimension(:), allocatable :: &
source_damage_isoDuctile_offset, & !< which source is my current damage mechanism?
source_damage_isoDuctile_instance !< instance of damage source mechanism
character(len=64), dimension(:,:), allocatable, target, public :: &
source_damage_isoDuctile_output !< name of each post result output
enum, bind(c)
enumerator :: undefined_ID, &
damage_drivingforce_ID
end enum !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!11 ToDo
end enum !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ToDo
type, private :: tParameters !< container type for internal constitutive parameters
real(pReal) :: &
@ -57,13 +53,12 @@ subroutine source_damage_isoDuctile_init
integer :: Ninstance,phase,instance,source,sourceOffset
integer :: NofMyPhase,p,i
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
integer(kind(undefined_ID)) :: &
outputID
character(len=pStringLen) :: &
extmsg = ''
character(len=65536), dimension(:), allocatable :: &
character(len=pStringLen), dimension(:), allocatable :: &
outputs
write(6,'(/,a)') ' <<<+- source_'//SOURCE_DAMAGE_ISODUCTILE_LABEL//' init -+>>>'
@ -83,9 +78,6 @@ subroutine source_damage_isoDuctile_init
source_damage_isoDuctile_offset(phase) = source
enddo
enddo
allocate(source_damage_isoDuctile_output(maxval(phase_Noutput),Ninstance))
source_damage_isoDuctile_output = ''
allocate(param(Ninstance))
@ -119,7 +111,6 @@ subroutine source_damage_isoDuctile_init
select case(outputs(i))
case ('isoductile_drivingforce')
source_damage_isoDuctile_output(i,source_damage_isoDuctile_instance(p)) = outputs(i)
prm%outputID = [prm%outputID, damage_drivingforce_ID]
end select
@ -198,8 +189,7 @@ end subroutine source_damage_isoDuctile_getRateAndItsTangent
subroutine source_damage_isoDuctile_results(phase,group)
integer, intent(in) :: phase
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
character(len=*), intent(in) :: group
integer :: sourceOffset, o, instance
instance = source_damage_isoDuctile_instance(phase)
@ -213,7 +203,6 @@ subroutine source_damage_isoDuctile_results(phase,group)
end select
enddo outputsLoop
end associate
#endif
end subroutine source_damage_isoDuctile_results

View File

@ -14,7 +14,7 @@ module source_thermal_dissipation
implicit none
private
integer, dimension(:), allocatable, public, protected :: &
integer, dimension(:), allocatable :: &
source_thermal_dissipation_offset, & !< which source is my current thermal dissipation mechanism?
source_thermal_dissipation_instance !< instance of thermal dissipation source mechanism
@ -39,10 +39,9 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine source_thermal_dissipation_init
integer :: Ninstance,instance,source,sourceOffset
integer :: NofMyPhase,p
integer :: Ninstance,instance,source,sourceOffset,NofMyPhase,p
write(6,'(/,a)') ' <<<+- source_'//SOURCE_thermal_dissipation_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- source_'//SOURCE_thermal_dissipation_label//' init -+>>>'; flush(6)
Ninstance = count(phase_source == SOURCE_thermal_dissipation_ID)

View File

@ -14,7 +14,7 @@ module source_thermal_externalheat
implicit none
private
integer, dimension(:), allocatable, public, protected :: &
integer, dimension(:), allocatable :: &
source_thermal_externalheat_offset, & !< which source is my current thermal dissipation mechanism?
source_thermal_externalheat_instance !< instance of thermal dissipation source mechanism
@ -43,9 +43,9 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine source_thermal_externalheat_init
integer :: maxNinstance,instance,source,sourceOffset,NofMyPhase,p
integer :: maxNinstance,instance,source,sourceOffset,NofMyPhase,p
write(6,'(/,a)') ' <<<+- source_'//SOURCE_thermal_externalheat_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- source_'//SOURCE_thermal_externalheat_label//' init -+>>>'; flush(6)
maxNinstance = count(phase_source == SOURCE_thermal_externalheat_ID)

View File

@ -15,20 +15,19 @@ module thermal_adiabatic
implicit none
private
character(len=64), dimension(:,:), allocatable, target, public :: &
thermal_adiabatic_output !< name of each post result output
integer, dimension(:), allocatable, target, public :: &
thermal_adiabatic_Noutput !< number of outputs per instance of this thermal model
enum, bind(c)
enumerator :: undefined_ID, &
temperature_ID
end enum
integer(kind(undefined_ID)), dimension(:,:), allocatable :: &
thermal_adiabatic_outputID !< ID of each post result output
type :: tParameters
integer(kind(undefined_ID)), dimension(:), allocatable :: &
outputID
end type tParameters
type(tparameters), dimension(:), allocatable :: &
param
public :: &
thermal_adiabatic_init, &
@ -47,50 +46,45 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine thermal_adiabatic_init
integer :: maxNinstance,section,instance,i,sizeState,NofMyHomog
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
character(len=65536), dimension(:), allocatable :: outputs
integer :: maxNinstance,o,h,NofMyHomog
character(len=pStringLen), dimension(:), allocatable :: outputs
write(6,'(/,a)') ' <<<+- thermal_'//THERMAL_ADIABATIC_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- thermal_'//THERMAL_ADIABATIC_label//' init -+>>>'; flush(6)
maxNinstance = count(thermal_type == THERMAL_adiabatic_ID)
if (maxNinstance == 0) return
allocate(thermal_adiabatic_output (maxval(homogenization_Noutput),maxNinstance))
thermal_adiabatic_output = ''
allocate(thermal_adiabatic_outputID (maxval(homogenization_Noutput),maxNinstance),source=undefined_ID)
allocate(thermal_adiabatic_Noutput (maxNinstance), source=0)
allocate(param(maxNinstance))
initializeInstances: do section = 1, size(thermal_type)
if (thermal_type(section) /= THERMAL_adiabatic_ID) cycle
NofMyHomog=count(material_homogenizationAt==section)
instance = thermal_typeInstance(section)
outputs = config_homogenization(section)%getStrings('(output)',defaultVal=emptyStringArray)
do i=1, size(outputs)
select case(outputs(i))
do h = 1, size(thermal_type)
if (thermal_type(h) /= THERMAL_adiabatic_ID) cycle
associate(prm => param(thermal_typeInstance(h)),config => config_homogenization(h))
outputs = config%getStrings('(output)',defaultVal=emptyStringArray)
allocate(prm%outputID(0))
do o=1, size(outputs)
select case(outputs(o))
case('temperature')
thermal_adiabatic_Noutput(instance) = thermal_adiabatic_Noutput(instance) + 1
thermal_adiabatic_outputID(thermal_adiabatic_Noutput(instance),instance) = temperature_ID
thermal_adiabatic_output(thermal_adiabatic_Noutput(instance),instance) = outputs(i)
prm%outputID = [prm%outputID, temperature_ID]
end select
enddo
NofMyHomog=count(material_homogenizationAt==h)
thermalState(h)%sizeState = 1
allocate(thermalState(h)%state0 (1,NofMyHomog), source=thermal_initialT(h))
allocate(thermalState(h)%subState0(1,NofMyHomog), source=thermal_initialT(h))
allocate(thermalState(h)%state (1,NofMyHomog), source=thermal_initialT(h))
! allocate state arrays
sizeState = 1
thermalState(section)%sizeState = sizeState
allocate(thermalState(section)%state0 (sizeState,NofMyHomog), source=thermal_initialT(section))
allocate(thermalState(section)%subState0(sizeState,NofMyHomog), source=thermal_initialT(section))
allocate(thermalState(section)%state (sizeState,NofMyHomog), source=thermal_initialT(section))
nullify(thermalMapping(section)%p)
thermalMapping(section)%p => mappingHomogenization(1,:,:)
deallocate(temperature(section)%p)
temperature(section)%p => thermalState(section)%state(1,:)
deallocate(temperatureRate(section)%p)
allocate (temperatureRate(section)%p(NofMyHomog), source=0.0_pReal)
enddo initializeInstances
nullify(thermalMapping(h)%p)
thermalMapping(h)%p => mappingHomogenization(1,:,:)
deallocate(temperature(h)%p)
temperature(h)%p => thermalState(h)%state(1,:)
deallocate(temperatureRate(h)%p)
allocate (temperatureRate(h)%p(NofMyHomog), source=0.0_pReal)
end associate
enddo
end subroutine thermal_adiabatic_init
@ -254,20 +248,19 @@ subroutine thermal_adiabatic_results(homog,group)
integer, intent(in) :: homog
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
integer :: o, instance
integer :: o
instance = thermal_typeInstance(homog)
associate(prm => param(damage_typeInstance(homog)))
outputsLoop: do o = 1,thermal_adiabatic_Noutput(instance)
select case(thermal_adiabatic_outputID(o,instance))
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
case (temperature_ID)
call results_writeDataset(group,temperature(homog)%p,'T',&
'temperature','K')
end select
enddo outputsLoop
#endif
end associate
end subroutine thermal_adiabatic_results

View File

@ -14,20 +14,20 @@ module thermal_conduction
implicit none
private
character(len=64), dimension(:,:), allocatable, target, public :: &
thermal_conduction_output !< name of each post result output
integer, dimension(:), allocatable, target, public :: &
thermal_conduction_Noutput !< number of outputs per instance of this damage
enum, bind(c)
enumerator :: undefined_ID, &
temperature_ID
enumerator :: &
undefined_ID, &
temperature_ID
end enum
integer(kind(undefined_ID)), dimension(:,:), allocatable, private :: &
thermal_conduction_outputID !< ID of each post result output
type :: tParameters
integer(kind(undefined_ID)), dimension(:), allocatable :: &
outputID
end type tParameters
type(tparameters), dimension(:), allocatable :: &
param
public :: &
thermal_conduction_init, &
@ -48,53 +48,45 @@ contains
subroutine thermal_conduction_init
integer :: maxNinstance,section,instance,i
integer :: sizeState
integer :: NofMyHomog
character(len=65536), dimension(0), parameter :: emptyStringArray = [character(len=65536)::]
character(len=65536), dimension(:), allocatable :: outputs
integer :: maxNinstance,o,NofMyHomog,h
character(len=pStringLen), dimension(:), allocatable :: outputs
write(6,'(/,a)') ' <<<+- thermal_'//THERMAL_CONDUCTION_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- thermal_'//THERMAL_CONDUCTION_label//' init -+>>>'; flush(6)
maxNinstance = count(thermal_type == THERMAL_conduction_ID)
if (maxNinstance == 0) return
allocate(thermal_conduction_output (maxval(homogenization_Noutput),maxNinstance))
thermal_conduction_output = ''
allocate(thermal_conduction_outputID (maxval(homogenization_Noutput),maxNinstance),source=undefined_ID)
allocate(thermal_conduction_Noutput (maxNinstance), source=0)
allocate(param(maxNinstance))
initializeInstances: do section = 1, size(thermal_type)
if (thermal_type(section) /= THERMAL_conduction_ID) cycle
NofMyHomog=count(material_homogenizationAt==section)
instance = thermal_typeInstance(section)
outputs = config_homogenization(section)%getStrings('(output)',defaultVal=emptyStringArray)
do i=1, size(outputs)
select case(outputs(i))
do h = 1, size(thermal_type)
if (thermal_type(h) /= THERMAL_conduction_ID) cycle
associate(prm => param(thermal_typeInstance(h)),config => config_homogenization(h))
outputs = config%getStrings('(output)',defaultVal=emptyStringArray)
allocate(prm%outputID(0))
do o=1, size(outputs)
select case(outputs(o))
case('temperature')
thermal_conduction_Noutput(instance) = thermal_conduction_Noutput(instance) + 1
thermal_conduction_outputID(thermal_conduction_Noutput(instance),instance) = temperature_ID
thermal_conduction_output(thermal_conduction_Noutput(instance),instance) = outputs(i)
prm%outputID = [prm%outputID, temperature_ID]
end select
enddo
NofMyHomog=count(material_homogenizationAt==h)
thermalState(h)%sizeState = 0
allocate(thermalState(h)%state0 (0,NofMyHomog))
allocate(thermalState(h)%subState0(0,NofMyHomog))
allocate(thermalState(h)%state (0,NofMyHomog))
! allocate state arrays
sizeState = 0
thermalState(section)%sizeState = sizeState
allocate(thermalState(section)%state0 (sizeState,NofMyHomog))
allocate(thermalState(section)%subState0(sizeState,NofMyHomog))
allocate(thermalState(section)%state (sizeState,NofMyHomog))
nullify(thermalMapping(section)%p)
thermalMapping(section)%p => mappingHomogenization(1,:,:)
deallocate(temperature (section)%p)
allocate (temperature (section)%p(NofMyHomog), source=thermal_initialT(section))
deallocate(temperatureRate(section)%p)
allocate (temperatureRate(section)%p(NofMyHomog), source=0.0_pReal)
enddo initializeInstances
nullify(thermalMapping(h)%p)
thermalMapping(h)%p => mappingHomogenization(1,:,:)
deallocate(temperature (h)%p)
allocate (temperature (h)%p(NofMyHomog), source=thermal_initialT(h))
deallocate(temperatureRate(h)%p)
allocate (temperatureRate(h)%p(NofMyHomog), source=0.0_pReal)
end associate
enddo
end subroutine thermal_conduction_init
@ -209,7 +201,8 @@ function thermal_conduction_getSpecificHeat(ip,el)
thermal_conduction_getSpecificHeat/real(homogenization_Ngrains(material_homogenizationAt(el)),pReal)
end function thermal_conduction_getSpecificHeat
!--------------------------------------------------------------------------------------------------
!> @brief returns homogenized mass density
!--------------------------------------------------------------------------------------------------
@ -267,20 +260,19 @@ subroutine thermal_conduction_results(homog,group)
integer, intent(in) :: homog
character(len=*), intent(in) :: group
#if defined(PETSc) || defined(DAMASK_HDF5)
integer :: o, instance
integer :: o
instance = thermal_typeInstance(homog)
associate(prm => param(damage_typeInstance(homog)))
outputsLoop: do o = 1,thermal_conduction_Noutput(instance)
select case(thermal_conduction_outputID(o,instance))
outputsLoop: do o = 1,size(prm%outputID)
select case(prm%outputID(o))
case (temperature_ID)
call results_writeDataset(group,temperature(homog)%p,'T',&
'temperature','K')
end select
enddo outputsLoop
#endif
end associate
end subroutine thermal_conduction_results

View File

@ -3,7 +3,6 @@
!> @brief material subroutine for isothermal temperature field
!--------------------------------------------------------------------------------------------------
module thermal_isothermal
use prec
use config
use material
@ -20,27 +19,25 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine thermal_isothermal_init
integer :: &
homog, &
NofMyHomog
integer :: h,NofMyHomog
write(6,'(/,a)') ' <<<+- thermal_'//THERMAL_isothermal_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- thermal_'//THERMAL_isothermal_label//' init -+>>>'; flush(6)
initializeInstances: do homog = 1, material_Nhomogenization
if (thermal_type(homog) /= THERMAL_isothermal_ID) cycle
NofMyHomog = count(material_homogenizationAt == homog)
thermalState(homog)%sizeState = 0
allocate(thermalState(homog)%state0 (0,NofMyHomog), source=0.0_pReal)
allocate(thermalState(homog)%subState0(0,NofMyHomog), source=0.0_pReal)
allocate(thermalState(homog)%state (0,NofMyHomog), source=0.0_pReal)
do h = 1, size(config_homogenization)
if (thermal_type(h) /= THERMAL_isothermal_ID) cycle
NofMyHomog = count(material_homogenizationAt == h)
thermalState(h)%sizeState = 0
allocate(thermalState(h)%state0 (0,NofMyHomog))
allocate(thermalState(h)%subState0(0,NofMyHomog))
allocate(thermalState(h)%state (0,NofMyHomog))
deallocate(temperature (homog)%p)
allocate (temperature (homog)%p(1), source=thermal_initialT(homog))
deallocate(temperatureRate(homog)%p)
allocate (temperatureRate(homog)%p(1), source=0.0_pReal)
deallocate(temperature (h)%p)
allocate (temperature (h)%p(1), source=thermal_initialT(h))
deallocate(temperatureRate(h)%p)
allocate (temperatureRate(h)%p(1))
enddo initializeInstances
enddo
end subroutine thermal_isothermal_init