Merge remote-tracking branch 'origin/development' into fix-orientation-relationship-2

This commit is contained in:
Martin Diehl 2019-12-08 21:10:50 +01:00
commit 6dfe24290c
132 changed files with 4472 additions and 4294 deletions

2
.gitattributes vendored
View File

@ -3,8 +3,8 @@
# always use LF, even if the files are edited on windows, they need to be compiled/used on unix # always use LF, even if the files are edited on windows, they need to be compiled/used on unix
* text eol=lf * text eol=lf
installation/mods_Abaqus/abaqus_v6_windows.env eol=crlf
# Denote all files that are truly binary and should not be modified. # Denote all files that are truly binary and should not be modified.
*.png binary *.png binary
*.jpg binary *.jpg binary
*.cae binary *.cae binary
*.hdf5 binary

View File

@ -1,6 +1,7 @@
--- ---
stages: stages:
- prepareAll - prepareAll
- python
- preprocessing - preprocessing
- postprocessing - postprocessing
- compilePETSc - compilePETSc
@ -103,6 +104,16 @@ checkout:
- master - master
- release - release
###################################################################################################
Pytest:
stage: python
script:
- cd $DAMASKROOT/python
- pytest
except:
- master
- release
################################################################################################### ###################################################################################################
OrientationRelationship: OrientationRelationship:
stage: preprocessing stage: preprocessing
@ -308,13 +319,6 @@ nonlocal_densityConservation:
- master - master
- release - release
Spectral_ipNeighborhood:
stage: grid
script: Spectral_ipNeighborhood/test.py
except:
- master
- release
RGC_DetectChanges: RGC_DetectChanges:
stage: grid stage: grid
script: RGC_DetectChanges/test.py script: RGC_DetectChanges/test.py
@ -506,7 +510,6 @@ Processing:
stage: createDocumentation stage: createDocumentation
script: script:
- cd $DAMASKROOT/processing/pre - cd $DAMASKROOT/processing/pre
- rm abq_addUserOutput.py marc_addUserOutput.py
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py - $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
- cd $DAMASKROOT/processing/post - cd $DAMASKROOT/processing/post
- rm vtk2ang.py DAD*.py - rm vtk2ang.py DAD*.py

View File

@ -1,6 +1,6 @@
######################################################################################## ########################################################################################
# Compiler options for building DAMASK # Compiler options for building DAMASK
cmake_minimum_required (VERSION 3.6.0 FATAL_ERROR) cmake_minimum_required (VERSION 3.10.0 FATAL_ERROR)
#--------------------------------------------------------------------------------------- #---------------------------------------------------------------------------------------
# Find PETSc from system environment # Find PETSc from system environment

@ -1 +1 @@
Subproject commit 214c69be8b51adb39eb7ad25b139727c8b98afce Subproject commit 524e86c117d816e3bd873eed7663e258a6f2e139

View File

@ -1 +1 @@
v2.0.3-957-gccbcc0d0 v2.0.3-1228-g3e269f04

View File

@ -1,9 +0,0 @@
[all]
(output) orientation # quaternion
(output) grainrotation # deviation from initial orientation as axis (1-3) and angle in degree (4) in crystal reference coordinates
(output) F # deformation gradient tensor
(output) Fe # elastic deformation gradient tensor
(output) Fp # plastic deformation gradient tensor
(output) P # first Piola-Kichhoff stress tensor
(output) S # second Piola-Kichhoff stress tensor
(output) Lp # plastic velocity gradient tensor

View File

@ -14,7 +14,7 @@ SolidSolutionStrength 1.5e8 # Strength due to elements in solid solution
### Dislocation glide parameters ### ### Dislocation glide parameters ###
#per family #per family
Nslip 12 0 Nslip 12
slipburgers 2.72e-10 # Burgers vector of slip system [m] slipburgers 2.72e-10 # Burgers vector of slip system [m]
rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3] rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3]
rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3] rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3]

View File

@ -4,58 +4,23 @@ elasticity hooke
plasticity nonlocal plasticity nonlocal
/nonlocal/ /nonlocal/
(output) rho (output) rho_sgl_mob_edg_pos
(output) rho_edge (output) rho_sgl_imm_edg_pos
(output) rho_screw (output) rho_sgl_mob_edg_neg
(output) rho_sgl (output) rho_sgl_imm_edg_neg
(output) rho_sgl_edge_pos (output) rho_sgl_mob_scr_pos
(output) rho_sgl_edge_neg (output) rho_sgl_imm_scr_pos
(output) rho_sgl_screw_pos (output) rho_sgl_mob_scr_neg
(output) rho_sgl_screw_neg (output) rho_sgl_imm_scr_neg
(output) rho_sgl_edge_pos_mobile (output) rho_dip_edg
(output) rho_sgl_edge_neg_mobile (output) rho_dip_scr
(output) rho_sgl_screw_pos_mobile
(output) rho_sgl_screw_neg_mobile
(output) rho_sgl_edge_pos_immobile
(output) rho_sgl_edge_neg_immobile
(output) rho_sgl_screw_pos_immobile
(output) rho_sgl_screw_neg_immobile
(output) rho_dip_edge
(output) rho_dip_screw
(output) rho_forest (output) rho_forest
(output) delta (output) gamma
(output) delta_sgl (output) tau_pass
(output) delta_dip (output) v_edg_pos
(output) shearrate (output) v_edg_neg
(output) resolvedstress (output) v_scr_pos
(output) resolvedstress_back (output) v_scr_neg
(output) resolvedstress_external
(output) resistance
(output) rho_dot
(output) rho_dot_sgl
(output) rho_dot_sgl_mobile
(output) rho_dot_dip
(output) rho_dot_gen_edge
(output) rho_dot_gen_screw
(output) rho_dot_sgl2dip_edge
(output) rho_dot_sgl2dip_screw
(output) rho_dot_ann_ath
(output) rho_dot_ann_the
(output) rho_dot_ann_the_edge
(output) rho_dot_ann_the_screw
(output) rho_dot_edgejogs
(output) rho_dot_flux
(output) rho_dot_flux_mobile
(output) rho_dot_flux_edge
(output) rho_dot_flux_screw
(output) velocity_edge_pos
(output) velocity_edge_neg
(output) velocity_screw_pos
(output) velocity_screw_neg
(output) maximumDipoleHeight_edge
(output) maximumDipoleHeight_screw
(output) accumulated_shear
(output) dislocationstress
lattice_structure fcc lattice_structure fcc
Nslip 12 # number of slip systems Nslip 12 # number of slip systems

View File

@ -3,31 +3,24 @@
elasticity hooke elasticity hooke
plasticity nonlocal plasticity nonlocal
/nonlocal/ /nonlocal/
(output) rho_sgl_mob_edg_pos
(output) rho (output) rho_sgl_imm_edg_pos
(output) rho_sgl_mobile (output) rho_sgl_mob_edg_neg
(output) rho_sgl_immobile (output) rho_sgl_imm_edg_neg
(output) rho_sgl_edge_pos (output) rho_sgl_mob_scr_pos
(output) rho_sgl_edge_neg (output) rho_sgl_imm_scr_pos
(output) rho_sgl_screw_pos (output) rho_sgl_mob_scr_neg
(output) rho_sgl_screw_neg (output) rho_sgl_imm_scr_neg
(output) rho_dip_edge (output) rho_dip_edg
(output) rho_dip_screw (output) rho_dip_scr
(output) rho_forest (output) rho_forest
(output) accumulatedshear (output) gamma
(output) shearrate (output) tau_pass
(output) resolvedstress (output) v_edg_pos
(output) resistance (output) v_edg_neg
(output) velocity_edge_pos (output) v_scr_pos
(output) rho_dot_gen (output) v_scr_neg
(output) rho_dot_sgl2dip_edge
(output) rho_dot_sgl2dip_screw
(output) rho_dot_ann_ath
(output) rho_dot_ann_the_edge
(output) rho_dot_ann_the_screw
(output) rho_dot_edgejogs
(output) rho_dot_flux_edge
(output) rho_dot_flux_screw
lattice_structure fcc lattice_structure fcc
Nslip 12 # number of slip systems per family Nslip 12 # number of slip systems per family

View File

@ -5,18 +5,6 @@
[SX] [SX]
mech none mech none
#-------------------#
<crystallite>
#-------------------#
[almostAll]
(output) orientation # quaternion
(output) grainrotation # deviation from initial orientation as axis (1-3) and angle in degree (4)
(output) F # deformation gradient tensor
(output) Fe # elastic deformation gradient tensor
(output) Fp # plastic deformation gradient tensor
(output) P # first Piola-Kichhoff stress tensor
(output) Lp # plastic velocity gradient tensor
#-------------------# #-------------------#
<phase> <phase>
#-------------------# #-------------------#

View File

@ -546,15 +546,15 @@ fi
# DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3 # DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3
DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O0 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O1 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias -O2 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
@ -572,20 +572,6 @@ then
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM"
fi fi
# DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3
DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
FORTLOWT="$FORTLOW" FORTLOWT="$FORTLOW"
FORTRANT="$FORTRAN" FORTRANT="$FORTRAN"

View File

@ -546,15 +546,15 @@ fi
# DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3 # DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3
DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O0 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O1 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias -O2 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
@ -572,20 +572,6 @@ then
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM"
fi fi
# DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3
DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
FORTLOWT="$FORTLOW" FORTLOWT="$FORTLOW"
FORTRANT="$FORTRAN" FORTRANT="$FORTRAN"

View File

@ -554,16 +554,16 @@ fi
# DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3 # DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3
DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O0 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2019 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -O1 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2019 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias -O2 $I8FFLAGS -I$MARC_SOURCE/common \ DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \ -fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2019 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\ -qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
@ -579,19 +579,6 @@ then
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM" $MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM"
fi fi
# DAMASK compiler calls: additional flags are in line 2 OpenMP flags in line 3
DFORTLOWMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTRANMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
DFORTHIGHMP="$FCOMP -c -implicitnone -stand f08 -standard-semantics -assume nostd_mod_proc_name -safe_cray_ptr $PROFILE -zero -mp1 -WB -fno-alias $I8FFLAGS -I$MARC_SOURCE/common \
-fpp -ftz -diag-disable 5268 -warn declarations -warn general -warn usage -warn interfaces -warn ignore_loc -warn alignments -DMarc4DAMASK=2018.1 -DDAMASKVERSION=$DAMASKVERSION \
-qopenmp -qopenmp-threadprivate=compat\
$MUMPS_INCLUDE $I8DEFINES -DLinux -DLINUX -DLinux_intel $FDEFINES $DDM $SOLVERFLAGS -I$KDTREE2_MOD"
FORTLOWT="$FORTLOW" FORTLOWT="$FORTLOW"
FORTRANT="$FORTRAN" FORTRANT="$FORTRAN"

View File

@ -1,66 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import os,string,scipy
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Apply filter(s) to Gwyddion data.
""" + string.replace(scriptID,'\n','\\n')
)
for option in ['opening',
'closing',
'erosion',
'dilation',
'average',
'median',
]:
parser.add_option('-%s'%option[0], '--%s'%option, dest=option, type='int',
help = 'stencil size for %s filter'%option)
parser.set_default(option, 0)
(options, filenames) = parser.parse_args()
# ------------------------------------------ read Gwyddion data ---------------------------------------
for file in filenames:
filters = ''
header = []
with open(file,'r') as f:
for line in f:
pieces = line.split()
if pieces[0] != '#': break
if pieces[1] == 'Width:': width = float(pieces[2])
if pieces[1] == 'Height:': height = float(pieces[2])
header.append(line.lstrip('#').strip())
elevation = np.loadtxt(file)#*1e6
if options.opening > 0:
elevation = scipy.ndimage.morphology.grey_opening(elevation,options.opening)
filters += '_opening%i'%options.opening
if options.closing > 0:
elevation = scipy.ndimage.morphology.grey_closing(elevation,options.closing)
filters += '_closing%i'%options.closing
if options.erosion > 0:
elevation = scipy.ndimage.morphology.grey_erosion(elevation,options.erosion)
filters += '_erosion%i'%options.erosion
if options.dilation > 0:
elevation = scipy.ndimage.morphology.grey_dilation(elevation,options.dilation)
filters += '_dilation%i'%options.dilation
if options.average > 0:
elevation = scipy.ndimage.filters.uniform_filter(elevation,options.average)
filters += '_avg%i'%options.average
if options.median > 0:
elevation = scipy.ndimage.filters.median_filter(elevation,options.median)
filters += '_median%i'%options.median
np.savetxt(os.path.splitext(file)[0]+filters+os.path.splitext(file)[1],elevation,header='\n'.join(header))

View File

@ -1,98 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import os,string,vtk
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
scalingFactor = { \
'm': {
'm': 1e0,
'mm': 1e-3,
'µm': 1e-6,
},
'mm': {
'm': 1e+3,
'mm': 1e0,
'µm': 1e-3,
},
'µm': {
'm': 1e+6,
'mm': 1e+3,
'µm': 1e0,
},
}
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Produce VTK rectilinear grid from Gwyddion dataset exported as text.
""" + string.replace(scriptID,'\n','\\n')
)
parser.add_option('-s', '--scaling', dest='scaling', type='float',
help = 'scaling factor for elevation data [auto]')
parser.set_defaults(scaling = 0.0)
(options, filenames) = parser.parse_args()
# ------------------------------------------ read Gwyddion data ---------------------------------------
for file in filenames:
with open(file,'r') as f:
for line in f:
pieces = line.split()
if pieces[0] != '#': break
if len(pieces) < 2: continue
if pieces[1] == 'Width:':
width = float(pieces[2])
lateralunit = pieces[3]
if pieces[1] == 'Height:':
height = float(pieces[2])
lateralunit = pieces[3]
if pieces[1] == 'Value' and pieces[2] == 'units:':
elevationunit = pieces[3]
if options.scaling == 0.0:
options.scaling = scalingFactor[lateralunit][elevationunit]
elevation = np.loadtxt(file)*options.scaling
grid = vtk.vtkRectilinearGrid()
grid.SetDimensions(elevation.shape[1],elevation.shape[0],1)
xCoords = vtk.vtkDoubleArray()
for x in np.arange(0.0,width,width/elevation.shape[1],'d'):
xCoords.InsertNextValue(x)
yCoords = vtk.vtkDoubleArray()
for y in np.arange(0.0,height,height/elevation.shape[0],'d'):
yCoords.InsertNextValue(y)
zCoords = vtk.vtkDoubleArray()
zCoords.InsertNextValue(0.0)
grid.SetXCoordinates(xCoords)
grid.SetYCoordinates(yCoords)
grid.SetZCoordinates(zCoords)
vector = vtk.vtkFloatArray()
vector.SetName("elevation");
vector.SetNumberOfComponents(3);
vector.SetNumberOfTuples(np.prod(elevation.shape));
for i,z in enumerate(np.ravel(elevation)):
vector.SetTuple3(i,0,0,z)
grid.GetPointData().AddArray(vector)
writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.splitext(file)[0]+'.vtr')
if vtk.VTK_MAJOR_VERSION <= 5:
writer.SetInput(grid)
else:
writer.SetInputData(grid)
writer.Write()

View File

@ -47,6 +47,8 @@ for filename in options.filenames:
coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3) coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
N_digits = 5 # hack to keep test intact
for i,inc in enumerate(results.iter_visible('increments')): for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments))) print('Output step {}/{}'.format(i+1,len(results.increments)))
@ -59,14 +61,14 @@ for filename in options.filenames:
data = np.concatenate((data,coords),1) data = np.concatenate((data,coords),1)
header+=' 1_pos 2_pos 3_pos' header+=' 1_pos 2_pos 3_pos'
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con: for label in options.con:
for p in results.iter_visible('con_physics'):
for c in results.iter_visible('constituents'):
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) == 0: if len(x) == 0:
continue continue
array = results.read_dataset(x,0) array = results.read_dataset(x,0,plain=True)
d = int(np.product(np.shape(array)[1:])) d = np.product(np.shape(array)[1:])
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1) data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
if d>1: if d>1:
@ -74,14 +76,14 @@ for filename in options.filenames:
else: else:
header+=' '+label header+=' '+label
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat: for label in options.mat:
for p in results.iter_visible('mat_physics'):
for m in results.iter_visible('materialpoints'):
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) == 0: if len(x) == 0:
continue continue
array = results.read_dataset(x,0) array = results.read_dataset(x,0,plain=True)
d = int(np.product(np.shape(array)[1:])) d = np.product(np.shape(array)[1:])
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1) data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
if d>1: if d>1:
@ -92,5 +94,6 @@ for filename in options.filenames:
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir)) dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname): if not os.path.isdir(dirname):
os.mkdir(dirname,0o755) os.mkdir(dirname,0o755)
file_out = '{}_{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],inc) file_out = '{}_inc{}.txt'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits))
np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='') np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='')

View File

@ -42,7 +42,7 @@ for filename in options.filenames:
results = damask.DADF5(filename) results = damask.DADF5(filename)
if results.structured: # for grid solvers use rectilinear grid if results.structured: # for grid solvers use rectilinear grid
grid = vtk.vtkRectilineagrid() grid = vtk.vtkRectilinearGrid()
coordArray = [vtk.vtkDoubleArray(), coordArray = [vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(), vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(), vtk.vtkDoubleArray(),
@ -66,7 +66,7 @@ for filename in options.filenames:
for i in f['/geometry/T_c']: for i in f['/geometry/T_c']:
grid.InsertNextCell(vtk.VTK_HEXAHEDRON,8,i-1) grid.InsertNextCell(vtk.VTK_HEXAHEDRON,8,i-1)
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
for i,inc in enumerate(results.iter_visible('increments')): for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments))) print('Output step {}/{}'.format(i+1,len(results.increments)))
vtk_data = [] vtk_data = []
@ -74,7 +74,6 @@ for filename in options.filenames:
results.set_visible('materialpoints',False) results.set_visible('materialpoints',False)
results.set_visible('constituents', True) results.set_visible('constituents', True)
for label in options.con: for label in options.con:
for p in results.iter_visible('con_physics'): for p in results.iter_visible('con_physics'):
if p != 'generic': if p != 'generic':
for c in results.iter_visible('constituents'): for c in results.iter_visible('constituents'):
@ -120,14 +119,22 @@ for filename in options.filenames:
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
grid.GetCellData().AddArray(vtk_data[-1]) grid.GetCellData().AddArray(vtk_data[-1])
writer = vtk.vtkXMLRectilineagridWriter() if results.structured else \ writer = vtk.vtkXMLRectilinearGridWriter() if results.structured else \
vtk.vtkXMLUnstructuredGridWriter() vtk.vtkXMLUnstructuredGridWriter()
results.set_visible('constituents', False)
results.set_visible('materialpoints',False)
x = results.get_dataset_location('u_n')
vtk_data.append(numpy_support.numpy_to_vtk(num_array=results.read_dataset(x,0),deep=True,array_type=vtk.VTK_DOUBLE))
vtk_data[-1].SetName('u')
grid.GetPointData().AddArray(vtk_data[-1])
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir)) dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname): if not os.path.isdir(dirname):
os.mkdir(dirname,0o755) os.mkdir(dirname,0o755)
file_out = '{}_{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],inc,writer.GetDefaultFileExtension()) file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib() writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()

View File

@ -52,6 +52,7 @@ for filename in options.filenames:
Polydata.SetVerts(Vertices) Polydata.SetVerts(Vertices)
Polydata.Modified() Polydata.Modified()
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
for i,inc in enumerate(results.iter_visible('increments')): for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments))) print('Output step {}/{}'.format(i+1,len(results.increments)))
vtk_data = [] vtk_data = []
@ -111,7 +112,9 @@ for filename in options.filenames:
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir)) dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname): if not os.path.isdir(dirname):
os.mkdir(dirname,0o755) os.mkdir(dirname,0o755)
file_out = '{}_{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],inc,writer.GetDefaultFileExtension()) file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib() writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()

View File

@ -49,7 +49,7 @@ Phase_types = {'Primary': 0} #further additions to these can be done by looking
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = argparse.ArgumentParser(description='Creating a file for DREAM3D from DAMASK data') parser = argparse.ArgumentParser(description='Creating a file for DREAM3D from DAMASK data')
parser.add_argument('filenames',nargs='+',help='HDF5 based output file') parser.add_argument('filenames',nargs='+',help='HDF5 based output file')
parser.add_argument('--inc',nargs='+',help='Increment for which DREAM3D to be used, eg. 00025',type=int) parser.add_argument('--inc',nargs='+',help='Increment for which DREAM3D to be used, eg. 25',type=int)
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string', parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory to hold output') help='name of subdirectory to hold output')
@ -59,15 +59,13 @@ options = parser.parse_args()
# loop over input files # loop over input files
for filename in options.filenames: for filename in options.filenames:
f = damask.DADF5(filename) #DAMASK output file f = damask.DADF5(filename) #DAMASK output file
count = 0 for increment in options.inc:
for increment in f.increments: f.set_by_increment(increment,increment)
if int(increment[3:]) not in options.inc: if len(f.visible['increments']) == 0:
count = count + 1
continue continue
#-------output file creation------------------------------------- #-------output file creation-------------------------------------
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir)) dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
print(dirname)
try: try:
os.mkdir(dirname) os.mkdir(dirname)
except FileExistsError: except FileExistsError:
@ -90,11 +88,10 @@ for filename in options.filenames:
# Phase information of DREAM.3D is constituent ID in DAMASK # Phase information of DREAM.3D is constituent ID in DAMASK
o[cell_data_label + '/Phases'] = f.get_constituent_ID().reshape(tuple(f.grid)+(1,)) o[cell_data_label + '/Phases'] = f.get_constituent_ID().reshape(tuple(f.grid)+(1,))
# Data quaternions # Data quaternions
DAMASK_quaternion = f.read_dataset(f.get_dataset_location('orientation'),0) DAMASK_quaternion = f.read_dataset(f.get_dataset_location('orientation'))
DREAM_3D_quaternion = np.empty((np.prod(f.grid),4),dtype=np.float32)
# Convert: DAMASK uses P = -1, DREAM.3D uses P = +1. Also change position of imagninary part # Convert: DAMASK uses P = -1, DREAM.3D uses P = +1. Also change position of imagninary part
DREAM_3D_quaternion = np.hstack((-DAMASK_quaternion['x'],-DAMASK_quaternion['y'],-DAMASK_quaternion['z'], DREAM_3D_quaternion = np.hstack((-DAMASK_quaternion['x'],-DAMASK_quaternion['y'],-DAMASK_quaternion['z'],
DAMASK_quaternion['w'])) DAMASK_quaternion['w'])).astype(np.float32)
o[cell_data_label + '/Quats'] = DREAM_3D_quaternion.reshape(tuple(f.grid)+(4,)) o[cell_data_label + '/Quats'] = DREAM_3D_quaternion.reshape(tuple(f.grid)+(4,))
# Attributes to CellData group # Attributes to CellData group
@ -109,10 +106,12 @@ for filename in options.filenames:
# phase attributes # phase attributes
o[cell_data_label + '/Phases'].attrs['ComponentDimensions'] = np.array([1],np.uint64) o[cell_data_label + '/Phases'].attrs['ComponentDimensions'] = np.array([1],np.uint64)
o[cell_data_label + '/Phases'].attrs['ObjectType'] = 'DataArray<int32_t>' o[cell_data_label + '/Phases'].attrs['ObjectType'] = 'DataArray<int32_t>'
o[cell_data_label + '/Phases'].attrs['TupleDimensions'] = f.grid.astype(np.uint64)
# Quats attributes # Quats attributes
o[cell_data_label + '/Quats'].attrs['ComponentDimensions'] = np.array([4],np.uint64) o[cell_data_label + '/Quats'].attrs['ComponentDimensions'] = np.array([4],np.uint64)
o[cell_data_label + '/Quats'].attrs['ObjectType'] = 'DataArray<float>' o[cell_data_label + '/Quats'].attrs['ObjectType'] = 'DataArray<float>'
o[cell_data_label + '/Quats'].attrs['TupleDimensions'] = f.grid.astype(np.uint64)
# Create EnsembleAttributeMatrix # Create EnsembleAttributeMatrix
ensemble_label = data_container_label + '/EnsembleAttributeMatrix' ensemble_label = data_container_label + '/EnsembleAttributeMatrix'

View File

@ -2,10 +2,9 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -36,54 +35,15 @@ parser.set_defaults(defgrad = 'f',
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name, buffered = False)
except:
continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.add('Cauchy',
damask.mechanics.Cauchy(table.get(options.defgrad).reshape(-1,3,3),
table.get(options.stress ).reshape(-1,3,3)).reshape(-1,9),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
column = {}
for tensor in [options.defgrad,options.stress]:
dim = table.label_dimension(tensor)
if dim < 0: errors.append('column {} not found.'.format(tensor))
elif dim != 9: errors.append('column {} is not a tensor.'.format(tensor))
else:
column[tensor] = table.label_index(tensor)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['{}_Cauchy'.format(i+1) for i in range(9)]) # extend ASCII header with new labels
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
F = np.array(list(map(float,table.data[column[options.defgrad]:column[options.defgrad]+9])),'d').reshape(3,3)
P = np.array(list(map(float,table.data[column[options.stress ]:column[options.stress ]+9])),'d').reshape(3,3)
table.data_append(list(1.0/np.linalg.det(F)*np.dot(P,F.T).reshape(9))) # [Cauchy] = (1/det(F)) * [P].[F_transpose]
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)

View File

@ -2,22 +2,16 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def determinant(m):
return +m[0]*m[4]*m[8] \
+m[1]*m[5]*m[6] \
+m[2]*m[3]*m[7] \
-m[2]*m[4]*m[6] \
-m[1]*m[3]*m[8] \
-m[0]*m[5]*m[7]
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
@ -34,61 +28,18 @@ parser.add_option('-t','--tensor',
help = 'heading of columns containing tensor field values') help = 'heading of columns containing tensor field values')
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.tensor is None: if options.tensor is None:
parser.error('no data column specified.') parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for tensor in options.tensor:
table.add('det({})'.format(tensor),
np.linalg.det(table.get(tensor).reshape(-1,3,3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
# ------------------------------------------ sanity checks ----------------------------------------
items = {
'tensor': {'dim': 9, 'shape': [3,3], 'labels':options.tensor, 'column': []},
}
errors = []
remarks = []
for type, data in items.items():
for what in data['labels']:
dim = table.label_dimension(what)
if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type))
else:
items[type]['column'].append(table.label_index(what))
table.labels_append('det({})'.format(what)) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for type, data in items.items():
for column in data['column']:
table.data_append(determinant(list(map(float,table.data[column: column+data['dim']]))))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)

View File

@ -2,6 +2,7 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import damask import damask
@ -9,17 +10,6 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
oneThird = 1.0/3.0
def deviator(m,spherical = False): # Careful, do not change the value of m, its intent(inout)!
sph = oneThird*(m[0]+m[4]+m[8])
dev = [
m[0]-sph, m[1], m[2],
m[3], m[4]-sph, m[5],
m[6], m[7], m[8]-sph,
]
return dev,sph if spherical else dev
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
@ -40,67 +30,22 @@ parser.add_option('-s','--spherical',
help = 'report spherical part of tensor (hydrostatic component, pressure)') help = 'report spherical part of tensor (hydrostatic component, pressure)')
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.tensor is None: if options.tensor is None:
parser.error('no data column specified...') parser.error('no data column specified...')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name, buffered = False)
except:
continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for tensor in options.tensor:
table.add('dev({})'.format(tensor),
damask.mechanics.deviatoric_part(table.get(tensor).reshape(-1,3,3)).reshape((-1,9)),
scriptID+' '+' '.join(sys.argv[1:]))
if options.spherical:
table.add('sph({})'.format(tensor),
damask.mechanics.spherical_part(table.get(tensor).reshape(-1,3,3)),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
# ------------------------------------------ sanity checks ----------------------------------------
items = {
'tensor': {'dim': 9, 'shape': [3,3], 'labels':options.tensor, 'active':[], 'column': []},
}
errors = []
remarks = []
column = {}
for type, data in items.items():
for what in data['labels']:
dim = table.label_dimension(what)
if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type))
else:
items[type]['active'].append(what)
items[type]['column'].append(table.label_index(what))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for type, data in items.items():
for label in data['active']:
table.labels_append(['{}_dev({})'.format(i+1,label) for i in range(data['dim'])] + \
(['sph({})'.format(label)] if options.spherical else [])) # extend ASCII header with new labels
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for type, data in items.items():
for column in data['column']:
table.data_append(deviator(list(map(float,table.data[column:
column+data['dim']])),options.spherical))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)

View File

@ -1,6 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os import os
import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import damask import damask
@ -24,35 +26,16 @@ parser.add_option('-i',
dest = 'info', action = 'extend', metavar = '<string LIST>', dest = 'info', action = 'extend', metavar = '<string LIST>',
help = 'items to add') help = 'items to add')
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.info is None: if options.info is None:
parser.error('no info specified.') parser.error('no info specified.')
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ assemble header --------------------------------------- table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.comments += options.info
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
table.info_append(options.info)
table.head_write()
# ------------------------------------------ pass through data -------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -2,10 +2,8 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
from collections import OrderedDict
import numpy as np
import damask import damask
@ -13,15 +11,6 @@ import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def Mises(what,tensor):
dev = tensor - np.trace(tensor)/3.0*np.eye(3)
symdev = 0.5*(dev+dev.T)
return np.sqrt(np.sum(symdev*symdev.T)*
{
'stress': 3.0/2.0,
'strain': 2.0/3.0,
}[what.lower()])
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
@ -49,60 +38,19 @@ parser.set_defaults(strain = [],
if options.stress is [] and options.strain is []: if options.stress is [] and options.strain is []:
parser.error('no data column specified...') parser.error('no data column specified...')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for strain in options.strain:
table.add('Mises({})'.format(strain),
damask.mechanics.Mises_strain(damask.mechanics.symmetric(table.get(strain).reshape(-1,3,3))),
scriptID+' '+' '.join(sys.argv[1:]))
for stress in options.stress:
table.add('Mises({})'.format(stress),
damask.mechanics.Mises_stress(damask.mechanics.symmetric(table.get(stress).reshape(-1,3,3))),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
# ------------------------------------------ sanity checks ----------------------------------------
items = OrderedDict([
('strain', {'dim': 9, 'shape': [3,3], 'labels':options.strain, 'active':[], 'column': []}),
('stress', {'dim': 9, 'shape': [3,3], 'labels':options.stress, 'active':[], 'column': []})
])
errors = []
remarks = []
for type, data in items.items():
for what in data['labels']:
dim = table.label_dimension(what)
if dim != data['dim']: remarks.append('column {} is not a {}...'.format(what,type))
else:
items[type]['active'].append(what)
items[type]['column'].append(table.label_index(what))
table.labels_append('Mises({})'.format(what)) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for type, data in items.items():
for column in data['column']:
table.data_append(Mises(type,
np.array(table.data[column:column+data['dim']],'d').reshape(data['shape'])))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)

View File

@ -2,10 +2,9 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -36,53 +35,16 @@ parser.set_defaults(defgrad = 'f',
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() table.add('S',
damask.mechanics.PK2(table.get(options.defgrad).reshape(-1,3,3),
table.get(options.stress ).reshape(-1,3,3)).reshape(-1,9),
scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ sanity checks ---------------------------------------- table.to_ASCII(sys.stdout if name is None else name)
errors = []
column = {}
for tensor in [options.defgrad,options.stress]:
dim = table.label_dimension(tensor)
if dim < 0: errors.append('column {} not found.'.format(tensor))
elif dim != 9: errors.append('column {} is not a tensor.'.format(tensor))
else:
column[tensor] = table.label_index(tensor)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['{}_S'.format(i+1) for i in range(9)]) # extend ASCII header with new labels
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
F = np.array(list(map(float,table.data[column[options.defgrad]:column[options.defgrad]+9])),'d').reshape(3,3)
P = np.array(list(map(float,table.data[column[options.stress ]:column[options.stress ]+9])),'d').reshape(3,3)
table.data_append(list(np.dot(np.linalg.inv(F),P).reshape(9))) # [S] =[P].[F-1]
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)

View File

@ -1,150 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import os
import sys
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def outMentat(cmd,locals):
if cmd[0:3] == '(!)':
exec(cmd[3:])
elif cmd[0:3] == '(?)':
cmd = eval(cmd[3:])
py_mentat.py_send(cmd)
else:
py_mentat.py_send(cmd)
return
# -----------------------------
def outStdout(cmd,locals):
if cmd[0:3] == '(!)':
exec(cmd[3:])
elif cmd[0:3] == '(?)':
cmd = eval(cmd[3:])
print(cmd)
else:
print(cmd)
return
# -----------------------------
def output(cmds,locals,dest):
for cmd in cmds:
if isinstance(cmd,list):
output(cmd,locals,dest)
else:
{\
'Mentat': outMentat,\
'Stdout': outStdout,\
}[dest](cmd,locals)
return
# -----------------------------
def colorMap(colors,baseIdx=32):
cmds = [ "*color %i %f %f %f"%(idx+baseIdx,color[0],color[1],color[2])
for idx,color in enumerate(colors) ]
return cmds
# -----------------------------
# MAIN FUNCTION STARTS HERE
# -----------------------------
parser = OptionParser(option_class=damask.extendableOption,
usage="%prog [options] predefinedScheme | (lower_h,s,l upper_h,s,l)", description = """
Changes the color map in MSC.Mentat.
Interpolates colors between "lower_hsl" and "upper_hsl".
""", version = scriptID)
parser.add_option("-i","--inverse", action = "store_true",
dest = "inverse",
help = "invert legend")
parser.add_option( "--palette", action = "store_true",
dest = "palette",
help = "output plain rgb palette integer values (0-255)")
parser.add_option( "--palettef", action = "store_true",
dest = "palettef",
help = "output plain rgb palette float values (0.0-1.0)")
parser.add_option("-p", "--port", type = "int",
dest = "port",
metavar ='int',
help = "Mentat connection port [%default]")
parser.add_option("-b", "--baseindex", type = "int",
metavar ='int',
dest = "baseIdx",
help = "base index of colormap [%default]")
parser.add_option("-n", "--colorcount", type = "int",
metavar ='int',
dest = "colorcount",
help = "number of colors [%default]")
parser.add_option("-v", "--verbose", action="store_true",
dest = "verbose",
help = "write Mentat command stream also to STDOUT")
parser.set_defaults(port = 40007)
parser.set_defaults(baseIdx = 32)
parser.set_defaults(colorcount = 32)
parser.set_defaults(inverse = False)
parser.set_defaults(palette = False)
parser.set_defaults(palettef = False)
parser.set_defaults(verbose = False)
msg = []
(options, colors) = parser.parse_args()
if len(colors) == 0:
parser.error('missing color information')
elif len(colors) == 1:
theMap = damask.Colormap(predefined = colors[0])
elif len(colors) == 2:
theMap = damask.Colormap(damask.Color('HSL',map(float, colors[0].split(','))),
damask.Color('HSL',map(float, colors[1].split(','))) )
else:
theMap = damask.Colormap()
if options.inverse:
theMap = theMap.invert()
if options.palettef:
print(theMap.export(format='raw',steps=options.colorcount))
elif options.palette:
for theColor in theMap.export(format='list',steps=options.colorcount):
print('\t'.join(map(lambda x: str(int(255*x)),theColor)))
else: # connect to Mentat and change colorMap
sys.path.append(damask.solver.Marc().libraryPath())
try:
import py_mentat
print('waiting to connect...')
py_mentat.py_connect('',options.port)
print('connected...')
mentat = True
except:
sys.stderr.write('warning: no valid Mentat release found\n')
mentat = False
outputLocals = {}
cmds = colorMap(theMap.export(format='list',steps=options.colorcount),options.baseIdx)
if mentat:
output(['*show_table']+cmds+['*show_model *redraw'],outputLocals,'Mentat')
py_mentat.py_disconnect()
if options.verbose:
output(cmds,outputLocals,'Stdout')

View File

@ -2,8 +2,8 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import re
import damask import damask
@ -35,62 +35,18 @@ parser.set_defaults(label = [],
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
pattern = [re.compile('^()(.+)$'), # label pattern for scalar
re.compile('^(\d+_)?(.+)$'), # label pattern for multidimension
]
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
if len(options.label) != len(options.substitute):
parser.error('number of column labels and substitutes do not match.')
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for i,label in enumerate(options.label):
table.rename(label,
options.substitute[i],
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
# ------------------------------------------ process labels ---------------------------------------
errors = []
remarks = []
if len(options.label) == 0:
errors.append('no labels specified.')
elif len(options.label) != len(options.substitute):
errors.append('mismatch between number of labels ({}) and substitutes ({}).'.format(len(options.label),
len(options.substitute)))
else:
indices = table.label_index (options.label)
dimensions = table.label_dimension(options.label)
for i,index in enumerate(indices):
if index == -1: remarks.append('label "{}" not present...'.format(options.label[i]))
else:
m = pattern[int(dimensions[i]>1)].match(table.tags[index]) # isolate label name
for j in range(dimensions[i]):
table.tags[index+j] = table.tags[index+j].replace(m.group(2),options.substitute[i]) # replace name with substitute
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -2,10 +2,9 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -23,7 +22,7 @@ Uniformly scale column values by given factor.
""", version = scriptID) """, version = scriptID)
parser.add_option('-l','--label', parser.add_option('-l','--label',
dest = 'label', dest = 'labels',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help ='column(s) to scale') help ='column(s) to scale')
parser.add_option('-f','--factor', parser.add_option('-f','--factor',
@ -32,61 +31,21 @@ parser.add_option('-f','--factor',
help = 'factor(s) per column') help = 'factor(s) per column')
parser.set_defaults(label = [], parser.set_defaults(label = [],
) factor = [])
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if len(options.label) != len(options.factor):
parser.error('number of column labels and factors do not match.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
if len(options.labels) != len(options.factor):
parser.error('number of column labels and factors do not match.')
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for i,label in enumerate(options.labels):
table.set(label,
table.get(label)*float(options.factor[i]),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
errors = []
remarks = []
columns = []
dims = []
factors = []
for what,factor in zip(options.label,options.factor):
col = table.label_index(what)
if col < 0: remarks.append('column {} not found...'.format(what,type))
else:
columns.append(col)
factors.append(float(factor))
dims.append(table.label_dimension(what))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for col,dim,factor in zip(columns,dims,factors): # loop over items
table.data[col:col+dim] = factor * np.array(table.data[col:col+dim],'d')
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -2,10 +2,9 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -23,7 +22,7 @@ Uniformly shift column values by given offset.
""", version = scriptID) """, version = scriptID)
parser.add_option('-l','--label', parser.add_option('-l','--label',
dest = 'label', dest = 'labels',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help ='column(s) to shift') help ='column(s) to shift')
parser.add_option('-o','--offset', parser.add_option('-o','--offset',
@ -32,61 +31,21 @@ parser.add_option('-o','--offset',
help = 'offset(s) per column') help = 'offset(s) per column')
parser.set_defaults(label = [], parser.set_defaults(label = [],
) offset = [])
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if len(options.label) != len(options.offset):
parser.error('number of column labels and offsets do not match.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
if len(options.labels) != len(options.offset):
parser.error('number of column labels and offsets do not match.')
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------ table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for i,label in enumerate(options.labels):
table.set(label,
table.get(label)+float(options.offset[i]),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
errors = []
remarks = []
columns = []
dims = []
offsets = []
for what,offset in zip(options.label,options.offset):
col = table.label_index(what)
if col < 0: remarks.append('column {} not found...'.format(what,type))
else:
columns.append(col)
offsets.append(float(offset))
dims.append(table.label_dimension(what))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
for col,dim,offset in zip(columns,dims,offsets): # loop over items
table.data[col:col+dim] = offset + np.array(table.data[col:col+dim],'d')
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -2,10 +2,9 @@
import os import os
import sys import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -26,7 +25,7 @@ With coordinates in columns "x", "y", and "z"; sorting with x slowest and z fast
parser.add_option('-l','--label', parser.add_option('-l','--label',
dest = 'keys', dest = 'labels',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help = 'list of column labels (a,b,c,...)') help = 'list of column labels (a,b,c,...)')
parser.add_option('-r','--reverse', parser.add_option('-r','--reverse',
@ -38,42 +37,14 @@ parser.set_defaults(reverse = False,
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
if options.labels is None:
parser.error('no labels specified.')
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# ------------------------------------------ assemble header --------------------------------------- table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.sort_by(options.labels,not options.reverse)
table.head_read() table.to_ASCII(sys.stdout if name is None else name)
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ---------------------------------------
table.data_readArray()
keys = table.labels(raw = True)[::-1] if options.keys is None else options.keys[::-1] # numpy sorts with most significant column as last
cols = []
remarks = []
for i,column in enumerate(table.label_index(keys)):
if column < 0: remarks.append('label "{}" not present...'.format(keys[i]))
else: cols += [table.data[:,column]]
if remarks != []: damask.util.croak(remarks)
ind = np.lexsort(cols) if cols != [] else np.arange(table.data.shape[0])
if options.reverse: ind = ind[::-1]
# ------------------------------------------ output result ---------------------------------------
table.data = table.data[ind]
table.data_writeArray()
table.close() # close ASCII table

View File

@ -1,167 +0,0 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import sys,os,re
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def ParseOutputFormat(filename,what,me):
format = {'outputs':{},'specials':{'brothers':[]}}
outputmetafile = filename+'.output'+what
try:
myFile = open(outputmetafile)
except:
print('Could not open file %s'%outputmetafile)
raise
else:
content = myFile.readlines()
myFile.close()
tag = ''
tagID = 0
for line in content:
if re.match("\s*$",line) or re.match("#",line): # skip blank lines and comments
continue
m = re.match("\[(.+)\]",line) # look for block indicator
if m: # next section
tag = m.group(1)
tagID += 1
format['specials']['brothers'].append(tag)
if tag == me or (me.isdigit() and tagID == int(me)):
format['specials']['_id'] = tagID
format['outputs'] = []
tag = me
else: # data from section
if tag == me:
(output,length) = line.split()
output.lower()
if length.isdigit():
length = int(length)
if re.match("\((.+)\)",output): # special data, (e.g. (Ngrains)
format['specials'][output] = length
elif length > 0:
format['outputs'].append([output,length])
return format
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Abaqus.Inputfile(s)', description = """
Transfer the output variables requested in the material.config to
properly labelled user-defined variables within the Abaqus input file (*.inp).
Requires the files
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
that are written during the first run of the model.
Specify which user block format you want to apply by stating the homogenization, crystallite, and phase identifiers.
Or have an existing set of user variables copied over from another *.inp file.
""", version = scriptID)
parser.add_option('-m', dest='number', type='int', metavar = 'int',
help='maximum requested User Defined Variable [%default]')
parser.add_option('--homogenization', dest='homog', metavar = 'string',
help='homogenization name or index [%default]')
parser.add_option('--crystallite', dest='cryst', metavar = 'string',
help='crystallite identifier name or index [%default]')
parser.add_option('--phase', dest='phase', metavar = 'string',
help='phase identifier name or index [%default]')
parser.add_option('--use', dest='useFile', metavar = 'string',
help='optionally parse output descriptors from '+
'outputXXX files of given name')
parser.add_option('--option', dest='damaskOption', metavar = 'string',
help='Add DAMASK option to input file, e.g. "periodic x z"')
parser.set_defaults(number = 0,
homog = '1',
cryst = '1',
phase = '1')
(options, files) = parser.parse_args()
if not files:
parser.error('no file(s) specified.')
me = { 'Homogenization': options.homog,
'Crystallite': options.cryst,
'Constitutive': options.phase,
}
for myFile in files:
damask.util.report(scriptName,myFile)
if options.useFile is not None:
formatFile = os.path.splitext(options.useFile)[0]
else:
formatFile = os.path.splitext(myFile)[0]
myFile = os.path.splitext(myFile)[0]+'.inp'
if not os.path.lexists(myFile):
print('{} not found'.format(myFile))
continue
print('Scanning format files of: {}'.format(formatFile))
if options.number < 1:
outputFormat = {}
for what in me:
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
if '_id' not in outputFormat[what]['specials']:
print("'{}' not found in <{}>".format(me[what],what))
print('\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers'])))
sys.exit(1)
UserVars = ['HomogenizationCount']
for var in outputFormat['Homogenization']['outputs']:
if var[1] > 1:
UserVars += ['%i_%s'%(i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%s'%(var[0]) for i in range(var[1])]
UserVars += ['GrainCount']
for grain in range(outputFormat['Homogenization']['specials']['(ngrains)']):
UserVars += ['%i_CrystalliteCount'%(grain+1)]
for var in outputFormat['Crystallite']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
UserVars += ['%i_ConstitutiveCount'%(grain+1)]
for var in outputFormat['Constitutive']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
# Now change *.inp file(s)
print('Adding labels to: {}'.format(myFile))
inFile = open(myFile)
input = inFile.readlines()
inFile.close()
output = open(myFile,'w')
thisSection = ''
if options.damaskOption is not None:
output.write('$damask {0}\n'.format(options.damaskOption))
for line in input:
#Abaqus keyword line begins with: *keyword, argument1, ...
m = re.match('([*]\w+)\s',line)
if m:
lastSection = thisSection
thisSection = m.group(1)
if (lastSection.upper() == '*DEPVAR' and thisSection.upper() == '*USER'): # Abaqus keyword can be upper or lower case
if options.number > 0:
output.write('{}\n'.format(options.number)) # Abaqus needs total number of SDVs in the line after *Depvar keyword
else:
output.write('{}\n'.format(len(UserVars)))
for i in range(len(UserVars)):
output.write('%i,"%i%s","%i%s"\n'%(i+1,0,UserVars[i],0,UserVars[i])) #index,output variable key,output variable description
if (thisSection.upper() != '*DEPVAR' or not re.match('\s*\d',line)):
output.write(line)
output.close()

View File

@ -132,7 +132,4 @@ for name in filenames:
damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill))) damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill)))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -61,7 +61,7 @@ for name in filenames:
canvas = np.full(new,options.fill if options.fill is not None canvas = np.full(new,options.fill if options.fill is not None
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype) else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
l = np.clip( offset, 0,np.minimum(old +offset,new)) l = np.clip( offset, 0,np.minimum(old +offset,new)) # noqa
r = np.clip( offset+old,0,np.minimum(old*2+offset,new)) r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
L = np.clip(-offset, 0,np.minimum(new -offset,old)) L = np.clip(-offset, 0,np.minimum(new -offset,old))
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old)) R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
@ -71,7 +71,4 @@ for name in filenames:
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True)) damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -5,9 +5,6 @@ import sys
from io import StringIO from io import StringIO
from optparse import OptionParser from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask import damask
@ -15,11 +12,6 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def mostFrequent(arr):
unique, inverse = np.unique(arr, return_inverse=True)
return unique[np.argmax(np.bincount(inverse))]
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
@ -45,13 +37,6 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.clean(options.stencil))
damask.util.croak(geom.update(ndimage.filters.generic_filter(
geom.microstructure,mostFrequent,
size=(options.stencil,)*3).astype(geom.microstructure.dtype)))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
if name is None:
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -86,7 +86,7 @@ for name in filenames:
* inFile[os.path.join(group_geom,'SPACING')][...] * inFile[os.path.join(group_geom,'SPACING')][...]
grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...] grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
origin = inFile[os.path.join(group_geom,'ORIGIN')][...] origin = inFile[os.path.join(group_geom,'ORIGIN')][...]
except: except KeyError:
errors.append('Geometry data ({}) not found'.format(group_geom)) errors.append('Geometry data ({}) not found'.format(group_geom))
@ -98,13 +98,13 @@ for name in filenames:
try: try:
quats = np.reshape(inFile[dataset][...],(np.product(grid),4)) quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats] rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
except: except KeyError:
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset)) errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
dataset = os.path.join(group_pointwise,options.phase) dataset = os.path.join(group_pointwise,options.phase)
try: try:
phase = np.reshape(inFile[dataset][...],(np.product(grid))) phase = np.reshape(inFile[dataset][...],(np.product(grid)))
except: except KeyError:
errors.append('Pointwise phase data ({}) not readable'.format(dataset)) errors.append('Pointwise phase data ({}) not readable'.format(dataset))
microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F') microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F')
@ -116,7 +116,7 @@ for name in filenames:
dataset = os.path.join(group_pointwise,options.microstructure) dataset = os.path.join(group_pointwise,options.microstructure)
try: try:
microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering
except: except KeyError:
errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset)) errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset))
group_average = os.path.join(rootDir,options.basegroup,options.average) group_average = os.path.join(rootDir,options.basegroup,options.average)
@ -124,13 +124,13 @@ for name in filenames:
dataset = os.path.join(group_average,options.quaternion) dataset = os.path.join(group_average,options.quaternion)
try: try:
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed) rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
except: except KeyError:
errors.append('Average orientation data ({}) not readable'.format(dataset)) errors.append('Average orientation data ({}) not readable'.format(dataset))
dataset = os.path.join(group_average,options.phase) dataset = os.path.join(group_average,options.phase)
try: try:
phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed) phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed)
except: except KeyError:
errors.append('Average phase data ({}) not readable'.format(dataset)) errors.append('Average phase data ({}) not readable'.format(dataset))
if errors != []: if errors != []:
@ -155,4 +155,4 @@ for name in filenames:
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
geom.to_file(os.path.splitext(name)[0]+'.geom') geom.to_file(os.path.splitext(name)[0]+'.geom',pack=False)

View File

@ -89,7 +89,4 @@ geom=damask.Geom(microstructure,options.size,
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])]) comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
damask.util.croak(geom) damask.util.croak(geom)
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -145,7 +145,4 @@ geom = damask.Geom(microstructure.reshape(grid),
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -63,7 +63,4 @@ geom = damask.Geom(microstructure=np.full(options.grid,options.fill,dtype=dtype)
comments=scriptID + ' ' + ' '.join(sys.argv[1:])) comments=scriptID + ' ' + ' '.join(sys.argv[1:]))
damask.util.croak(geom) damask.util.croak(geom)
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -152,7 +152,4 @@ for name in filenames:
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
if name is None: geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(os.path.splitext(name)[0]+'.geom')

View File

@ -302,7 +302,4 @@ for name in filenames:
homogenization=options.homogenization,comments=header) homogenization=options.homogenization,comments=header)
damask.util.croak(geom) damask.util.croak(geom)
if name is None: geom.to_file(sys.stdout if name is None else os.path.splitext(name)[0]+'.geom',pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(os.path.splitext(name)[0]+'.geom')

View File

@ -172,7 +172,4 @@ for name in filenames:
damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]])) damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -5,8 +5,6 @@ import sys
from io import StringIO from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -38,35 +36,12 @@ parser.set_defaults(reflect = False)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if options.directions is None:
parser.error('no direction given.')
if not set(options.directions).issubset(validDirections):
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
parser.error('invalid directions {}. '.format(*invalidDirections))
limits = [None,None] if options.reflect else [-2,0]
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.mirror(options.directions,options.reflect))
microstructure = geom.get_microstructure()
if 'z' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[:,:,limits[0]:limits[1]:-1]],2)
if 'y' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[:,limits[0]:limits[1]:-1,:]],1)
if 'x' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[limits[0]:limits[1]:-1,:,:]],0)
damask.util.croak(geom.update(microstructure,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
if name is None:
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -34,41 +34,4 @@ for name in filenames:
damask.util.croak(geom) damask.util.croak(geom)
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
compressType = None geom.to_file(sys.stdout if name is None else name,pack=True)
former = start = -1
reps = 0
if name is None:
f = sys.stdout
else:
f= open(name,'w')
for current in geom.microstructure.flatten('F'):
if abs(current - former) == 1 and (start - current) == reps*(former - current):
compressType = 'to'
reps += 1
elif current == former and start == former:
compressType = 'of'
reps += 1
else:
if compressType is None:
f.write('\n'.join(geom.get_header())+'\n')
elif compressType == '.':
f.write('{}\n'.format(former))
elif compressType == 'to':
f.write('{} to {}\n'.format(start,former))
elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former))
compressType = '.'
start = current
reps = 1
former = current
if compressType == '.':
f.write('{}\n'.format(former))
elif compressType == 'to':
f.write('{} to {}\n'.format(start,former))
elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former))

View File

@ -5,8 +5,6 @@ import sys
from io import StringIO from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -32,15 +30,6 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.renumber())
renumbered = np.empty(geom.get_grid(),dtype=geom.microstructure.dtype)
for i, oldID in enumerate(np.unique(geom.microstructure)):
renumbered = np.where(geom.microstructure == oldID, i+1, renumbered)
damask.util.croak(geom.update(renumbered))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
if name is None:
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -2,11 +2,10 @@
import os import os
import sys import sys
import numpy as np
from io import StringIO from io import StringIO
from optparse import OptionParser from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask import damask
@ -55,20 +54,7 @@ for name in filenames:
np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \ np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \
else float(n) for o,n in zip(size,options.size)],dtype=float) else float(n) for o,n in zip(size,options.size)],dtype=float)
damask.util.croak(geom.update(microstructure = geom.scale(new_grid)
ndimage.interpolation.zoom( damask.util.croak(geom.update(microstructure = None,size = new_size))
geom.microstructure,
new_grid/grid,
output=geom.microstructure.dtype,
order=0,
mode='nearest',
prefilter=False,
) if np.any(new_grid != grid) \
else None,
size = new_size))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)
if name is None:
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -95,7 +95,4 @@ for name in filenames:
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True)) damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -58,7 +58,4 @@ for name in filenames:
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin)) damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -34,7 +34,4 @@ for name in filenames:
damask.util.croak(geom) damask.util.croak(geom)
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -82,7 +82,4 @@ for name in filenames:
geom.microstructure + offset,geom.microstructure))) geom.microstructure + offset,geom.microstructure)))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: geom.to_file(sys.stdout if name is None else name,pack=False)
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -1,164 +0,0 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import sys,os,re
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# -----------------------------
def ParseOutputFormat(filename,what,me):
format = {'outputs':{},'specials':{'brothers':[]}}
outputmetafile = filename+'.output'+what
try:
myFile = open(outputmetafile)
except:
print('Could not open file %s'%outputmetafile)
raise
else:
content = myFile.readlines()
myFile.close()
tag = ''
tagID = 0
for line in content:
if re.match("\s*$",line) or re.match("#",line): # skip blank lines and comments
continue
m = re.match("\[(.+)\]",line) # look for block indicator
if m: # next section
tag = m.group(1)
tagID += 1
format['specials']['brothers'].append(tag)
if tag == me or (me.isdigit() and tagID == int(me)):
format['specials']['_id'] = tagID
format['outputs'] = []
tag = me
else: # data from section
if tag == me:
(output,length) = line.split()
output.lower()
if length.isdigit():
length = int(length)
if re.match("\((.+)\)",output): # special data, (e.g. (Ngrains)
format['specials'][output] = length
elif length > 0:
format['outputs'].append([output,length])
return format
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] Marc.Inputfile(s)', description = """
Transfer the output variables requested in the material.config to
properly labelled user-defined variables within the Marc input file (*.dat).
Requires the files
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
that are written during the first run of the model.
Specify which user block format you want to apply by stating the homogenization, crystallite, and phase identifiers.
Or have an existing set of user variables copied over from another *.dat file.
""", version = scriptID)
parser.add_option('-m', dest='number', type='int', metavar = 'int',
help='maximum requested User Defined Variable [%default]')
parser.add_option('--homogenization', dest='homog', metavar = 'string',
help='homogenization name or index [%default]')
parser.add_option('--crystallite', dest='cryst', metavar = 'string',
help='crystallite identifier name or index [%default]')
parser.add_option('--phase', dest='phase', metavar = 'string',
help='phase identifier name or index [%default]')
parser.add_option('--use', dest='useFile', metavar = 'string',
help='optionally parse output descriptors from '+
'outputXXX files of given name')
parser.add_option('--option', dest='damaskOption', metavar = 'string',
help='Add DAMASK option to input file, e.g. "periodic x z"')
parser.set_defaults(number = 0,
homog = '1',
cryst = '1',
phase = '1')
(options, files) = parser.parse_args()
if not files:
parser.error('no file(s) specified.')
me = { 'Homogenization': options.homog,
'Crystallite': options.cryst,
'Constitutive': options.phase,
}
for myFile in files:
damask.util.report(scriptName,myFile)
if options.useFile is not None:
formatFile = os.path.splitext(options.useFile)[0]
else:
formatFile = os.path.splitext(myFile)[0]
myFile = os.path.splitext(myFile)[0]+'.dat'
if not os.path.lexists(myFile):
print('{} not found'.format(myFile))
continue
print('Scanning format files of: {}'.format(formatFile))
if options.number < 1:
outputFormat = {}
for what in me:
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
if '_id' not in outputFormat[what]['specials']:
print("'{}' not found in <{}>".format(me[what],what))
print('\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers'])))
sys.exit(1)
UserVars = ['HomogenizationCount']
for var in outputFormat['Homogenization']['outputs']:
if var[1] > 1:
UserVars += ['%i_%s'%(i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%s'%(var[0]) for i in range(var[1])]
UserVars += ['GrainCount']
for grain in range(outputFormat['Homogenization']['specials']['(ngrains)']):
UserVars += ['%i_CrystalliteCount'%(grain+1)]
for var in outputFormat['Crystallite']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
UserVars += ['%i_ConstitutiveCount'%(grain+1)]
for var in outputFormat['Constitutive']['outputs']:
if var[1] > 1:
UserVars += ['%i_%i_%s'%(grain+1,i+1,var[0]) for i in range(var[1])]
else:
UserVars += ['%i_%s'%(grain+1,var[0]) for i in range(var[1])]
# Now change *.dat file(s)
print('Adding labels to: {}'.format(myFile))
inFile = open(myFile)
input = inFile.readlines()
inFile.close()
output = open(myFile,'w')
thisSection = ''
if options.damaskOption is not None:
output.write('$damask {0}\n'.format(options.damaskOption))
for line in input:
m = re.match('(\w+)\s',line)
if m:
lastSection = thisSection
thisSection = m.group(1)
if (lastSection == 'post' and thisSection == 'parameters'):
if options.number > 0:
for i in range(options.number):
output.write('%10i%10i\n'%(-i-1,0))
else:
for i in range(len(UserVars)):
output.write('%10i%10i%s\n'%(-i-1,0,UserVars[i]))
if (thisSection != 'post' or not re.match('\s*\-',line)):
output.write(line)
output.close()

View File

@ -6,16 +6,21 @@ with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f:
name = 'damask' name = 'damask'
# classes
from .environment import Environment # noqa from .environment import Environment # noqa
from .asciitable import ASCIItable # noqa from .asciitable import ASCIItable # noqa
from .table import Table # noqa
from .config import Material # noqa from .config import Material # noqa
from .colormaps import Colormap, Color # noqa from .colormaps import Colormap, Color # noqa
from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa
from .dadf5 import DADF5 # noqa from .dadf5 import DADF5 # noqa
#from .block import Block # only one class
from .geom import Geom # noqa from .geom import Geom # noqa
from .solver import Solver # noqa from .solver import Solver # noqa
from .test import Test # noqa from .test import Test # noqa
from .util import extendableOption # noqa from .util import extendableOption # noqa
# functions in modules
from . import mechanics # noqa

View File

@ -2,7 +2,7 @@ import os
import sys import sys
import re import re
import shlex import shlex
from collections import Iterable from collections.abc import Iterable
import numpy as np import numpy as np
@ -15,7 +15,7 @@ except NameError:
# ------------------------------------------------------------------ # ------------------------------------------------------------------
class ASCIItable(): class ASCIItable():
"""Read and write to ASCII tables""" """Read and write to ASCII tables."""
tmpext = '_tmp' # filename extension for in-place access tmpext = '_tmp' # filename extension for in-place access
@ -27,6 +27,7 @@ class ASCIItable():
labeled = True, # assume table has labels labeled = True, # assume table has labels
readonly = False, # no reading from file readonly = False, # no reading from file
): ):
"""Read and write to ASCII tables."""
self.__IO__ = {'output': [], self.__IO__ = {'output': [],
'buffered': buffered, 'buffered': buffered,
'labeled': labeled, # header contains labels 'labeled': labeled, # header contains labels
@ -72,7 +73,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def _removeCRLF(self, def _removeCRLF(self,
string): string):
"""Delete any carriage return and line feed from string""" """Delete any carriage return and line feed from string."""
try: try:
return string.replace('\n','').replace('\r','') return string.replace('\n','').replace('\r','')
except AttributeError: except AttributeError:
@ -82,7 +83,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def _quote(self, def _quote(self,
what): what):
"""Quote empty or white space-containing output""" """Quote empty or white space-containing output."""
return '{quote}{content}{quote}'.format( return '{quote}{content}{quote}'.format(
quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''), quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''),
content = what) content = what)
@ -103,7 +104,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def output_write(self, def output_write(self,
what): what):
"""Aggregate a single row (string) or list of (possibly containing further lists of) rows into output""" """Aggregate a single row (string) or list of (possibly containing further lists of) rows into output."""
if isinstance(what, (str, unicode)): if isinstance(what, (str, unicode)):
self.__IO__['output'] += [what] self.__IO__['output'] += [what]
else: else:
@ -143,7 +144,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def head_read(self): def head_read(self):
""" """
Get column labels Get column labels.
by either reading the first row or, by either reading the first row or,
if keyword "head[*]" is present, the last line of the header if keyword "head[*]" is present, the last line of the header
@ -154,7 +155,7 @@ class ASCIItable():
pass pass
firstline = self.__IO__['in'].readline().strip() firstline = self.__IO__['in'].readline().strip()
m = re.search('(\d+)\s+head', firstline.lower()) # search for "head" keyword m = re.search(r'(\d+)\s+head', firstline.lower()) # search for "head" keyword
if m: # proper ASCIItable format if m: # proper ASCIItable format
@ -194,7 +195,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def head_write(self, def head_write(self,
header = True): header = True):
"""Write current header information (info + labels)""" """Write current header information (info + labels)."""
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else [] head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
head.append(self.info) head.append(self.info)
if self.__IO__['labeled']: if self.__IO__['labeled']:
@ -205,7 +206,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def head_getGeom(self): def head_getGeom(self):
"""Interpret geom header""" """Interpret geom header."""
identifiers = { identifiers = {
'grid': ['a','b','c'], 'grid': ['a','b','c'],
'size': ['x','y','z'], 'size': ['x','y','z'],
@ -247,7 +248,7 @@ class ASCIItable():
def labels_append(self, def labels_append(self,
what, what,
reset = False): reset = False):
"""Add item or list to existing set of labels (and switch on labeling)""" """Add item or list to existing set of labels (and switch on labeling)."""
if isinstance(what, (str, unicode)): if isinstance(what, (str, unicode)):
self.tags += [self._removeCRLF(what)] self.tags += [self._removeCRLF(what)]
else: else:
@ -261,7 +262,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def labels_clear(self): def labels_clear(self):
"""Delete existing labels and switch to no labeling""" """Delete existing labels and switch to no labeling."""
self.tags = [] self.tags = []
self.__IO__['labeled'] = False self.__IO__['labeled'] = False
@ -392,7 +393,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def info_append(self, def info_append(self,
what): what):
"""Add item or list to existing set of infos""" """Add item or list to existing set of infos."""
if isinstance(what, (str, unicode)): if isinstance(what, (str, unicode)):
self.info += [self._removeCRLF(what)] self.info += [self._removeCRLF(what)]
else: else:
@ -403,7 +404,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def info_clear(self): def info_clear(self):
"""Delete any info block""" """Delete any info block."""
self.info = [] self.info = []
# ------------------------------------------------------------------ # ------------------------------------------------------------------
@ -416,7 +417,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def data_skipLines(self, def data_skipLines(self,
count): count):
"""Wind forward by count number of lines""" """Wind forward by count number of lines."""
for i in range(count): for i in range(count):
alive = self.data_read() alive = self.data_read()
@ -426,7 +427,7 @@ class ASCIItable():
def data_read(self, def data_read(self,
advance = True, advance = True,
respectLabels = True): respectLabels = True):
"""Read next line (possibly buffered) and parse it into data array""" """Read next line (possibly buffered) and parse it into data array."""
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \ self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
@ -446,9 +447,11 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def data_readArray(self, def data_readArray(self,
labels = []): labels = []):
"""Read whole data of all (given) labels as numpy array""" """Read whole data of all (given) labels as numpy array."""
try: self.data_rewind() # try to wind back to start of data try:
except: pass # assume/hope we are at data start already... self.data_rewind() # try to wind back to start of data
except IOError:
pass # assume/hope we are at data start already...
if labels is None or labels == []: if labels is None or labels == []:
use = None # use all columns (and keep labels intact) use = None # use all columns (and keep labels intact)
@ -480,7 +483,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def data_write(self, def data_write(self,
delimiter = '\t'): delimiter = '\t'):
"""Write current data array and report alive output back""" """Write current data array and report alive output back."""
if len(self.data) == 0: return True if len(self.data) == 0: return True
if isinstance(self.data[0],list): if isinstance(self.data[0],list):
@ -492,16 +495,16 @@ class ASCIItable():
def data_writeArray(self, def data_writeArray(self,
fmt = None, fmt = None,
delimiter = '\t'): delimiter = '\t'):
"""Write whole numpy array data""" """Write whole numpy array data."""
for row in self.data: for row in self.data:
try: try:
output = [fmt % value for value in row] if fmt else list(map(repr,row)) output = [fmt % value for value in row] if fmt else list(map(repr,row))
except: except Exception:
output = [fmt % row] if fmt else [repr(row)] output = [fmt % row] if fmt else [repr(row)]
try: try:
self.__IO__['out'].write(delimiter.join(output) + '\n') self.__IO__['out'].write(delimiter.join(output) + '\n')
except: except Exception:
pass pass
# ------------------------------------------------------------------ # ------------------------------------------------------------------
@ -545,7 +548,7 @@ class ASCIItable():
grid, grid,
type = 'i', type = 'i',
strict = False): strict = False):
"""Read microstructure data (from .geom format)""" """Read microstructure data (from .geom format)."""
def datatype(item): def datatype(item):
return int(item) if type.lower() == 'i' else float(item) return int(item) if type.lower() == 'i' else float(item)

View File

@ -7,6 +7,7 @@ import numpy as np
from . import util from . import util
from . import version from . import version
from . import mechanics
# ------------------------------------------------------------------ # ------------------------------------------------------------------
class DADF5(): class DADF5():
@ -17,19 +18,26 @@ class DADF5():
""" """
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def __init__(self,filename): def __init__(self,fname):
""" """
Opens an existing DADF5 file. Opens an existing DADF5 file.
Parameters Parameters
---------- ----------
filename : str fname : str
name of the DADF5 file to be openend. name of the DADF5 file to be openend.
""" """
with h5py.File(filename,'r') as f: with h5py.File(fname,'r') as f:
if f.attrs['DADF5-major'] != 0 or f.attrs['DADF5-minor'] != 2: try:
self.version_major = f.attrs['DADF5_version_major']
self.version_minor = f.attrs['DADF5_version_minor']
except KeyError:
self.version_major = f.attrs['DADF5-major']
self.version_minor = f.attrs['DADF5-minor']
if self.version_major != 0 or not 2 <= self.version_minor <= 4:
raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version'])) raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
self.structured = 'grid' in f['geometry'].attrs.keys() self.structured = 'grid' in f['geometry'].attrs.keys()
@ -39,7 +47,8 @@ class DADF5():
self.size = f['geometry'].attrs['size'] self.size = f['geometry'].attrs['size']
r=re.compile('inc[0-9]+') r=re.compile('inc[0-9]+')
self.increments = [i for i in f.keys() if r.match(i)] increments_unsorted = {int(i[3:]):i for i in f.keys() if r.match(i)}
self.increments = [increments_unsorted[i] for i in sorted(increments_unsorted)]
self.times = [round(f[i].attrs['time/s'],12) for i in self.increments] self.times = [round(f[i].attrs['time/s'],12) for i in self.increments]
self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent']) self.Nmaterialpoints, self.Nconstituents = np.shape(f['mapping/cellResults/constituent'])
@ -63,7 +72,7 @@ class DADF5():
'con_physics': self.con_physics, 'con_physics': self.con_physics,
'mat_physics': self.mat_physics} 'mat_physics': self.mat_physics}
self.filename = filename self.fname = fname
def __manage_visible(self,datasets,what,action): def __manage_visible(self,datasets,what,action):
@ -102,21 +111,21 @@ class DADF5():
def __time_to_inc(self,start,end): def __time_to_inc(self,start,end):
selected = [] selected = []
for i,time in enumerate(self.times): for i,time in enumerate(self.times):
if start <= time < end: if start <= time <= end:
selected.append(self.increments[i]) selected.append(self.increments[i])
return selected return selected
def set_by_time(self,start,end): def set_by_time(self,start,end):
""" """
Sets active time increments based on start and end time. Set active increments based on start and end time.
Parameters Parameters
---------- ----------
start : float start : float
start time (included) start time (included)
end : float end : float
end time (exclcuded) end time (included)
""" """
self.__manage_visible(self.__time_to_inc(start,end),'increments','set') self.__manage_visible(self.__time_to_inc(start,end),'increments','set')
@ -124,14 +133,14 @@ class DADF5():
def add_by_time(self,start,end): def add_by_time(self,start,end):
""" """
Adds to active time increments based on start and end time. Add to active increments based on start and end time.
Parameters Parameters
---------- ----------
start : float start : float
start time (included) start time (included)
end : float end : float
end time (exclcuded) end time (included)
""" """
self.__manage_visible(self.__time_to_inc(start,end),'increments','add') self.__manage_visible(self.__time_to_inc(start,end),'increments','add')
@ -139,22 +148,76 @@ class DADF5():
def del_by_time(self,start,end): def del_by_time(self,start,end):
""" """
Delets from active time increments based on start and end time. Delete from active increments based on start and end time.
Parameters Parameters
---------- ----------
start : float start : float
start time (included) start time (included)
end : float end : float
end time (exclcuded) end time (included)
""" """
self.__manage_visible(self.__time_to_inc(start,end),'increments','del') self.__manage_visible(self.__time_to_inc(start,end),'increments','del')
def set_by_increment(self,start,end):
"""
Set active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
if self.version_minor >= 4:
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','set')
else:
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','set')
def add_by_increment(self,start,end):
"""
Add to active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
if self.version_minor >= 4:
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','add')
else:
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','add')
def del_by_increment(self,start,end):
"""
Delet from active time increments based on start and end increment.
Parameters
----------
start : int
start increment (included)
end : int
end increment (included)
"""
if self.version_minor >= 4:
self.__manage_visible([ 'inc{}'.format(i) for i in range(start,end+1)],'increments','del')
else:
self.__manage_visible(['inc{:05d}'.format(i) for i in range(start,end+1)],'increments','del')
def iter_visible(self,what): def iter_visible(self,what):
""" """
Iterates over visible items by setting each one visible. Iterate over visible items by setting each one visible.
Parameters Parameters
---------- ----------
@ -176,7 +239,7 @@ class DADF5():
def set_visible(self,what,datasets): def set_visible(self,what,datasets):
""" """
Sets active groups. Set active groups.
Parameters Parameters
---------- ----------
@ -192,7 +255,7 @@ class DADF5():
def add_visible(self,what,datasets): def add_visible(self,what,datasets):
""" """
Adds to active groups. Add to active groups.
Parameters Parameters
---------- ----------
@ -208,7 +271,7 @@ class DADF5():
def del_visible(self,what,datasets): def del_visible(self,what,datasets):
""" """
Removes from active groupse. Delete from active groupse.
Parameters Parameters
---------- ----------
@ -252,7 +315,7 @@ class DADF5():
groups = [] groups = []
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
for i in self.iter_visible('increments'): for i in self.iter_visible('increments'):
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o): for oo in self.iter_visible(o):
@ -267,11 +330,11 @@ class DADF5():
def list_data(self): def list_data(self):
"""Gives information on all active datasets in the file.""" """Return information on all active datasets in the file."""
message = '' message = ''
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
for i in self.iter_visible('increments'): for s,i in enumerate(self.iter_visible('increments')):
message+='\n{}\n'.format(i) message+='\n{} ({}s)\n'.format(i,self.times[s])
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o): for oo in self.iter_visible(o):
message+=' {}\n'.format(oo) message+=' {}\n'.format(oo)
@ -280,17 +343,24 @@ class DADF5():
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
for d in f[group].keys(): for d in f[group].keys():
try: try:
message+=' {} ({})\n'.format(d,f['/'.join([group,d])].attrs['Description'].decode()) dataset = f['/'.join([group,d])]
message+=' {} / ({}): {}\n'.format(d,dataset.attrs['Unit'].decode(),dataset.attrs['Description'].decode())
except KeyError: except KeyError:
pass pass
return message return message
def get_dataset_location(self,label): def get_dataset_location(self,label):
"""Returns the location of all active datasets with given label.""" """Return the location of all active datasets with given label."""
path = [] path = []
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
for i in self.iter_visible('increments'): for i in self.iter_visible('increments'):
k = '/'.join([i,'geometry',label])
try:
f[k]
path.append(k)
except KeyError as e:
print('unable to locate geometry dataset: {}'.format(str(e)))
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']): for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_visible(o): for oo in self.iter_visible(o):
for pp in self.iter_visible(p): for pp in self.iter_visible(p):
@ -299,39 +369,43 @@ class DADF5():
f[k] f[k]
path.append(k) path.append(k)
except KeyError as e: except KeyError as e:
print('unable to locate constituents dataset: '+ str(e)) print('unable to locate {} dataset: {}'.format(o,str(e)))
return path return path
def get_constituent_ID(self,c=0): def get_constituent_ID(self,c=0):
"""Pointwise constituent ID.""" """Pointwise constituent ID."""
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
names = f['/mapping/cellResults/constituent']['Name'][:,c].astype('str') names = f['/mapping/cellResults/constituent']['Name'][:,c].astype('str')
return np.array([int(n.split('_')[0]) for n in names.tolist()],dtype=np.int32) return np.array([int(n.split('_')[0]) for n in names.tolist()],dtype=np.int32)
def get_crystal_structure(self): # ToDo: extension to multi constituents/phase def get_crystal_structure(self): # ToDo: extension to multi constituents/phase
"""Info about the crystal structure.""" """Info about the crystal structure."""
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string
def read_dataset(self,path,c): def read_dataset(self,path,c=0,plain=False):
""" """
Dataset for all points/cells. Dataset for all points/cells.
If more than one path is given, the dataset is composed of the individual contributions If more than one path is given, the dataset is composed of the individual contributions.
""" """
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:] shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:]
if len(shape) == 1: shape = shape +(1,) if len(shape) == 1: shape = shape +(1,)
dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]])) dataset = np.full(shape,np.nan,dtype=np.dtype(f[path[0]]))
for pa in path: for pa in path:
label = pa.split('/')[2] label = pa.split('/')[2]
if (pa.split('/')[1] == 'geometry'):
dataset = np.array(f[pa])
continue
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0] p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
if len(p)>0: if len(p)>0:
u = (f['mapping/cellResults/constituent'][p,c]['Position']) u = (f['mapping/cellResults/constituent']['Position'][p,c])
a = np.array(f[pa]) a = np.array(f[pa])
if len(a.shape) == 1: if len(a.shape) == 1:
a=a.reshape([a.shape[0],1]) a=a.reshape([a.shape[0],1])
@ -339,17 +413,20 @@ class DADF5():
p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0] p = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(label))[0]
if len(p)>0: if len(p)>0:
u = (f['mapping/cellResults/materialpoint'][p.tolist()]['Position']) u = (f['mapping/cellResults/materialpoint']['Position'][p.tolist()])
a = np.array(f[pa]) a = np.array(f[pa])
if len(a.shape) == 1: if len(a.shape) == 1:
a=a.reshape([a.shape[0],1]) a=a.reshape([a.shape[0],1])
dataset[p,:] = a[u,:] dataset[p,:] = a[u,:]
if plain and dataset.dtype.names is not None:
return dataset.view(('float64',len(dataset.dtype.names)))
else:
return dataset return dataset
def cell_coordinates(self): def cell_coordinates(self):
"""Initial coordinates of the cell centers.""" """Return initial coordinates of the cell centers."""
if self.structured: if self.structured:
delta = self.size/self.grid*0.5 delta = self.size/self.grid*0.5
z, y, x = np.meshgrid(np.linspace(delta[2],self.size[2]-delta[2],self.grid[2]), z, y, x = np.meshgrid(np.linspace(delta[2],self.size[2]-delta[2],self.grid[2]),
@ -358,76 +435,237 @@ class DADF5():
) )
return np.concatenate((x[:,:,:,None],y[:,:,:,None],y[:,:,:,None]),axis = 3).reshape([np.product(self.grid),3]) return np.concatenate((x[:,:,:,None],y[:,:,:,None],y[:,:,:,None]),axis = 3).reshape([np.product(self.grid),3])
else: else:
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
return f['geometry/x_c'][()] return f['geometry/x_c'][()]
def add_absolute(self,x):
"""
Add absolute value.
Parameters
----------
x : str
Label of the dataset containing a scalar, vector, or tensor.
"""
def __add_absolute(x):
return {
'data': np.abs(x['data']),
'label': '|{}|'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_abs v{}'.format(version)
}
}
requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(__add_absolute,requested)
def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True):
"""
Add result of a general formula.
Parameters
----------
formula : str
Formula, refer to datasets by #Label#.
label : str
Label of the dataset containing the result of the calculation.
unit : str, optional
Physical unit of the result.
description : str, optional
Human readable description of the result.
vectorized : bool, optional
Indicate whether the formula is written in vectorized form. Default is True.
"""
if vectorized is not True:
raise NotImplementedError
def __add_calculation(**kwargs):
formula = kwargs['formula']
for d in re.findall(r'#(.*?)#',formula):
formula = formula.replace('#{}#'.format(d),"kwargs['{}']['data']".format(d))
return {
'data': eval(formula),
'label': kwargs['label'],
'meta': {
'Unit': kwargs['unit'],
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
'Creator': 'dadf5.py:add_calculation v{}'.format(version)
}
}
requested = [{'label':d,'arg':d} for d in set(re.findall(r'#(.*?)#',formula))] # datasets used in the formula
pass_through = {'formula':formula,'label':label,'unit':unit,'description':description}
self.__add_generic_pointwise(__add_calculation,requested,pass_through)
def add_Cauchy(self,P='P',F='F'): def add_Cauchy(self,P='P',F='F'):
""" """
Adds Cauchy stress calculated from 1st Piola-Kirchhoff stress and deformation gradient. Add Cauchy stress calculated from 1. Piola-Kirchhoff stress and deformation gradient.
Parameters
----------
P : str, optional
Label of the dataset containing the 1. Piola-Kirchhoff stress. Default value is P.
F : str, optional
Label of the dataset containing the deformation gradient. Default value is F.
Resulting tensor is symmetrized as the Cauchy stress should be symmetric.
""" """
def Cauchy(F,P): def __add_Cauchy(F,P):
sigma = np.einsum('i,ijk,ilk->ijl',1.0/np.linalg.det(F['data']),P['data'],F['data'])
sigma = (sigma + np.transpose(sigma,(0,2,1)))*0.5 # enforce symmetry
return { return {
'data' : sigma, 'data': mechanics.Cauchy(F['data'],P['data']),
'label' : 'sigma', 'label': 'sigma',
'meta' : { 'meta': {
'Unit' : P['meta']['Unit'], 'Unit': P['meta']['Unit'],
'Description' : 'Cauchy stress calculated from {} ({}) '.format(P['label'],P['meta']['Description'])+\ 'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],P['meta']['Description'])+\
'and deformation gradient {} ({})'.format(F['label'],F['meta']['Description']), 'and deformation gradient {} ({})'.format(F['label'],F['meta']['Description']),
'Creator' : 'dadf5.py:add_Cauchy v{}'.format(version) 'Creator': 'dadf5.py:add_Cauchy v{}'.format(version)
} }
} }
requested = [{'label':F,'arg':'F'}, requested = [{'label':F,'arg':'F'},
{'label':P,'arg':'P'} ] {'label':P,'arg':'P'} ]
self.__add_generic_pointwise(Cauchy,requested) self.__add_generic_pointwise(__add_Cauchy,requested)
def add_Mises(self,x): def add_determinant(self,x):
"""Adds the equivalent Mises stress or strain of a tensor.""" """
def Mises(x): Add the determinant of a tensor.
if x['meta']['Unit'] == b'Pa': #ToDo: Should we use this? Then add_Cauchy and add_strain_tensors also should perform sanity checks Parameters
factor = 3.0/2.0 ----------
t = 'stress' x : str
elif x['meta']['Unit'] == b'1': Label of the dataset containing a tensor.
factor = 2.0/3.0
t = 'strain'
else:
print(x['meta']['Unit'])
raise ValueError
d = x['data'] """
dev = d - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[d.shape[0],3,3]),np.trace(d,axis1=1,axis2=2)/3.0) def __add_determinant(x):
#dev_sym = (dev + np.einsum('ikj',dev))*0.5 # ToDo: this is not needed (only if the input is not symmetric, but then the whole concept breaks down)
return { return {
'data' : np.sqrt(np.einsum('ijk->i',dev**2)*factor), 'data': np.linalg.det(x['data']),
'label' : '{}_vM'.format(x['label']), 'label': 'det({})'.format(x['label']),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : 'Mises equivalent {} of {} ({})'.format(t,x['label'],x['meta']['Description']), 'Description': 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_Mises_stress v{}'.format(version) 'Creator': 'dadf5.py:add_determinant v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(Mises,requested) self.__add_generic_pointwise(__add_determinant,requested)
def add_deviator(self,x):
"""
Add the deviatoric part of a tensor.
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def __add_deviator(x):
if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
raise ValueError
return {
'data': mechanics.deviatoric_part(x['data']),
'label': 's_{}'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_deviator v{}'.format(version)
}
}
requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(__add_deviator,requested)
def add_maximum_shear(self,x):
"""
Add maximum shear components of symmetric tensor.
Parameters
----------
x : str
Label of the dataset containing a symmetric tensor.
"""
def __add_maximum_shear(x):
return {
'data': mechanics.maximum_shear(x['data']),
'label': 'max_shear({})'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Maximum shear component of of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_maximum_shear v{}'.format(version)
}
}
requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(__add_maximum_shear,requested)
def add_Mises(self,x):
"""
Add the equivalent Mises stress or strain of a symmetric tensor.
Parameters
----------
x : str
Label of the dataset containing a symmetric stress or strain tensor.
"""
def __add_Mises(x):
t = 'strain' if x['meta']['Unit'] == '1' else \
'stress'
return {
'data': mechanics.Mises_strain(x['data']) if t=='strain' else mechanics.Mises_stress(x['data']),
'label': '{}_vM'.format(x['label']),
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,x['label'],x['meta']['Description']),
'Creator': 'dadf5.py:add_Mises v{}'.format(version)
}
}
requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(__add_Mises,requested)
def add_norm(self,x,ord=None): def add_norm(self,x,ord=None):
""" """
Adds norm of vector or tensor. Add the norm of vector or tensor.
Parameters
----------
x : str
Label of the dataset containing a vector or tensor.
ord : {non-zero int, inf, -inf, fro, nuc}, optional
Order of the norm. inf means numpys inf object. For details refer to numpy.linalg.norm.
See numpy.linalg.norm manual for details.
""" """
def norm(x,ord): def __add_norm(x,ord):
o = ord o = ord
if len(x['data'].shape) == 2: if len(x['data'].shape) == 2:
@ -442,177 +680,111 @@ class DADF5():
raise ValueError raise ValueError
return { return {
'data' : np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True), 'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
'label' : '|{}|_{}'.format(x['label'],o), 'label': '|{}|_{}'.format(x['label'],o),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : '{}-Norm of {} {} ({})'.format(ord,t,x['label'],x['meta']['Description']), 'Description': '{}-Norm of {} {} ({})'.format(ord,t,x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_norm v{}'.format(version) 'Creator': 'dadf5.py:add_norm v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(norm,requested,{'ord':ord}) self.__add_generic_pointwise(__add_norm,requested,{'ord':ord})
def add_absolute(self,x): def add_principal_components(self,x):
"""Adds absolute value.""" """
def absolute(x): Add principal components of symmetric tensor.
The principal components are sorted in descending order, each repeated according to its multiplicity.
Parameters
----------
x : str
Label of the dataset containing a symmetric tensor.
"""
def __add_principal_components(x):
return { return {
'data' : np.abs(x['data']), 'data': mechanics.principal_components(x['data']),
'label' : '|{}|'.format(x['label']), 'label': 'lambda_{}'.format(x['label']),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']), 'Description': 'Pricipal components of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_abs v{}'.format(version) 'Creator': 'dadf5.py:add_principal_components v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(absolute,requested) self.__add_generic_pointwise(__add_principal_components,requested)
def add_determinant(self,x):
"""Adds the determinant component of a tensor."""
def determinant(x):
return {
'data' : np.linalg.det(x['data']),
'label' : 'det({})'.format(x['label']),
'meta' : {
'Unit' : x['meta']['Unit'],
'Description' : 'Determinant of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_determinant v{}'.format(version)
}
}
requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(determinant,requested)
def add_spherical(self,x): def add_spherical(self,x):
"""Adds the spherical component of a tensor.""" """
def spherical(x): Add the spherical (hydrostatic) part of a tensor.
Parameters
----------
x : str
Label of the dataset containing a tensor.
"""
def __add_spherical(x):
if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])): if not np.all(np.array(x['data'].shape[1:]) == np.array([3,3])):
raise ValueError raise ValueError
return { return {
'data' : np.trace(x['data'],axis1=1,axis2=2)/3.0, 'data': mechanics.spherical_part(x['data']),
'label' : 'sph({})'.format(x['label']), 'label': 'p_{}'.format(x['label']),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': x['meta']['Unit'],
'Description' : 'Spherical component of tensor {} ({})'.format(x['label'],x['meta']['Description']), 'Description': 'Spherical component of tensor {} ({})'.format(x['label'],x['meta']['Description']),
'Creator' : 'dadf5.py:add_spherical v{}'.format(version) 'Creator': 'dadf5.py:add_spherical v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':x,'arg':'x'}]
self.__add_generic_pointwise(spherical,requested) self.__add_generic_pointwise(__add_spherical,requested)
def add_deviator(self,x): def add_strain_tensor(self,F='F',t='U',m=0):
"""Adds the deviator of a tensor.""" """
def deviator(x): Add strain tensor calculated from a deformation gradient.
d = x['data']
if not np.all(np.array(d.shape[1:]) == np.array([3,3])): For details refer to damask.mechanics.strain_tensor
raise ValueError
Parameters
----------
F : str, optional
Label of the dataset containing the deformation gradient. Default value is F.
t : {V, U}, optional
Type of the polar decomposition, V for right stretch tensor and U for left stretch tensor.
Defaults value is U.
m : float, optional
Order of the strain calculation. Default value is 0.0.
"""
def __add_strain_tensor(F,t,m):
return { return {
'data' : d - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[d.shape[0],3,3]),np.trace(d,axis1=1,axis2=2)/3.0), 'data': mechanics.strain_tensor(F['data'],t,m),
'label' : 'dev({})'.format(x['label']), 'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
'meta' : { 'meta': {
'Unit' : x['meta']['Unit'], 'Unit': F['meta']['Unit'],
'Description' : 'Deviator of tensor {} ({})'.format(x['label'],x['meta']['Description']), 'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator' : 'dadf5.py:add_deviator v{}'.format(version) 'Creator': 'dadf5.py:add_strain_tensor v{}'.format(version)
} }
} }
requested = [{'label':x,'arg':'x'}] requested = [{'label':F,'arg':'F'}]
self.__add_generic_pointwise(deviator,requested) self.__add_generic_pointwise(__add_strain_tensor,requested,{'t':t,'m':m})
def add_calculation(self,formula,label,unit='n/a',description=None,vectorized=True):
"""
General formula.
Works currently only for vectorized expressions
"""
if vectorized is not True:
raise NotImplementedError
def calculation(**kwargs):
formula = kwargs['formula']
for d in re.findall(r'#(.*?)#',formula):
formula = re.sub('#{}#'.format(d),"kwargs['{}']['data']".format(d),formula)
return {
'data' : eval(formula),
'label' : kwargs['label'],
'meta' : {
'Unit' : kwargs['unit'],
'Description' : '{}'.format(kwargs['description']),
'Creator' : 'dadf5.py:add_calculation v{}'.format(version)
}
}
requested = [{'label':d,'arg':d} for d in re.findall(r'#(.*?)#',formula)] # datasets used in the formula
pass_through = {'formula':formula,'label':label,'unit':unit,'description':description}
self.__add_generic_pointwise(calculation,requested,pass_through)
def add_strain_tensor(self,t,ord,defgrad='F'): #ToDo: Use t and ord
"""
Adds the a strain tensor.
Albrecht Bertram: Elasticity and Plasticity of Large Deformations An Introduction (3rd Edition, 2012), p. 102.
"""
def strain_tensor(defgrad,t,ord):
operator = {
'V#ln': lambda V: np.log(V),
'U#ln': lambda U: np.log(U),
'V#Biot': lambda V: np.broadcast_to(np.ones(3),[V.shape[0],3]) - 1.0/V,
'U#Biot': lambda U: U - np.broadcast_to(np.ones(3),[U.shape[0],3]),
'V#Green':lambda V: np.broadcast_to(np.ones(3),[V.shape[0],3]) - 1.0/V**2,
'U#Green':lambda U: U**2 - np.broadcast_to(np.ones(3),[U.shape[0],3]),
}
(U,S,Vh) = np.linalg.svd(defgrad['data']) # singular value decomposition
R_inv = np.transpose(np.matmul(U,Vh),(0,2,1)) # transposed rotation of polar decomposition
U = np.matmul(R_inv,defgrad['data']) # F = RU
(D,V) = np.linalg.eigh((U+np.transpose(U,(0,2,1)))*.5) # eigen decomposition (of symmetric(ed) matrix)
neg = np.where(D < 0.0) # find negative eigenvalues ...
D[neg[0],neg[1]] = D[neg[0],neg[1]]* -1 # ... flip value ...
V[neg[0],:,neg[1]] = V[neg[0],:,neg[1]]* -1 # ... and vector
d = operator['V#ln'](D)
a = np.matmul(V,np.einsum('ij,ikj->ijk',d,V))
return {
'data' : a,
'label' : 'ln(V)({})'.format(defgrad['label']),
'meta' : {
'Unit' : defgrad['meta']['Unit'],
'Description' : 'Strain tensor ln(V){} ({})'.format(defgrad['label'],defgrad['meta']['Description']),
'Creator' : 'dadf5.py:add_deviator v{}'.format(version)
}
}
requested = [{'label':defgrad,'arg':'defgrad'}]
self.__add_generic_pointwise(strain_tensor,requested,{'t':t,'ord':ord})
def __add_generic_pointwise(self,func,datasets_requested,extra_args={}): def __add_generic_pointwise(self,func,datasets_requested,extra_args={}):
@ -643,12 +815,12 @@ class DADF5():
todo = [] todo = []
# ToDo: It would be more memory efficient to read only from file when required, i.e. do to it in pool.add_task # ToDo: It would be more memory efficient to read only from file when required, i.e. do to it in pool.add_task
for group in self.groups_with_datasets([d['label'] for d in datasets_requested]): for group in self.groups_with_datasets([d['label'] for d in datasets_requested]):
with h5py.File(self.filename,'r') as f: with h5py.File(self.fname,'r') as f:
datasets_in = {} datasets_in = {}
for d in datasets_requested: for d in datasets_requested:
loc = f[group+'/'+d['label']] loc = f[group+'/'+d['label']]
data = loc[()] data = loc[()]
meta = {k:loc.attrs[k] for k in loc.attrs.keys()} meta = {k:loc.attrs[k].decode() for k in loc.attrs.keys()}
datasets_in[d['arg']] = {'data': data, 'meta' : meta, 'label' : d['label']} datasets_in[d['arg']] = {'data': data, 'meta' : meta, 'label' : d['label']}
todo.append({'in':{**datasets_in,**extra_args},'func':func,'group':group,'results':results}) todo.append({'in':{**datasets_in,**extra_args},'func':func,'group':group,'results':results})
@ -658,10 +830,10 @@ class DADF5():
N_not_calculated = len(todo) N_not_calculated = len(todo)
while N_not_calculated > 0: while N_not_calculated > 0:
result = results.get() result = results.get()
with h5py.File(self.filename,'a') as f: # write to file with h5py.File(self.fname,'a') as f: # write to file
dataset_out = f[result['group']].create_dataset(result['label'],data=result['data']) dataset_out = f[result['group']].create_dataset(result['label'],data=result['data'])
for k in result['meta'].keys(): for k in result['meta'].keys():
dataset_out.attrs[k] = result['meta'][k] dataset_out.attrs[k] = result['meta'][k].encode()
N_not_calculated-=1 N_not_calculated-=1
if N_added < len(todo): # add more jobs if N_added < len(todo): # add more jobs

View File

@ -2,6 +2,7 @@ import os
from io import StringIO from io import StringIO
import numpy as np import numpy as np
from scipy import ndimage
import vtk import vtk
from vtk.util import numpy_support from vtk.util import numpy_support
@ -30,7 +31,6 @@ class Geom():
comments lines. comments lines.
""" """
self.__transforms__ = \
self.set_microstructure(microstructure) self.set_microstructure(microstructure)
self.set_size(size) self.set_size(size)
self.set_origin(origin) self.set_origin(origin)
@ -239,8 +239,8 @@ class Geom():
header.append('homogenization {}'.format(self.get_homogenization())) header.append('homogenization {}'.format(self.get_homogenization()))
return header return header
@classmethod @staticmethod
def from_file(cls,fname): def from_file(fname):
""" """
Reads a geom file. Reads a geom file.
@ -250,7 +250,11 @@ class Geom():
geometry file to read. geometry file to read.
""" """
with (open(fname) if isinstance(fname,str) else fname) as f: try:
f = open(fname)
except TypeError:
f = fname
f.seek(0) f.seek(0)
header_length,keyword = f.readline().split()[:2] header_length,keyword = f.readline().split()[:2]
header_length = int(header_length) header_length = int(header_length)
@ -286,7 +290,6 @@ class Geom():
abs(int(items[2])-int(items[0]))+1,dtype=float) abs(int(items[2])-int(items[0]))+1,dtype=float)
else: items = list(map(float,items)) else: items = list(map(float,items))
else: items = list(map(float,items)) else: items = list(map(float,items))
microstructure[i:i+len(items)] = items microstructure[i:i+len(items)] = items
i += len(items) i += len(items)
@ -297,9 +300,10 @@ class Geom():
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
microstructure = microstructure.astype('int') microstructure = microstructure.astype('int')
return cls(microstructure.reshape(grid),size,origin,homogenization,comments) return Geom(microstructure.reshape(grid),size,origin,homogenization,comments)
def to_file(self,fname):
def to_file(self,fname,pack=None):
""" """
Writes a geom file. Writes a geom file.
@ -307,16 +311,62 @@ class Geom():
---------- ----------
fname : str or file handle fname : str or file handle
geometry file to write. geometry file to write.
pack : bool, optional
compress geometry with 'x of y' and 'a to b'.
""" """
header = self.get_header() header = self.get_header()
grid = self.get_grid() grid = self.get_grid()
format_string = '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure))))) if self.microstructure.dtype == int \
else '%g' if pack is None:
plain = grid.prod()/np.unique(self.microstructure).size < 250
else:
plain = not pack
if plain:
format_string = '%g' if self.microstructure.dtype in np.sctypes['float'] else \
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure)))))
np.savetxt(fname, np.savetxt(fname,
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T, self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
header='\n'.join(header), fmt=format_string, comments='') header='\n'.join(header), fmt=format_string, comments='')
else:
try:
f = open(fname,'w')
except TypeError:
f = fname
compressType = None
former = start = -1
reps = 0
for current in self.microstructure.flatten('F'):
if abs(current - former) == 1 and (start - current) == reps*(former - current):
compressType = 'to'
reps += 1
elif current == former and start == former:
compressType = 'of'
reps += 1
else:
if compressType is None:
f.write('\n'.join(self.get_header())+'\n')
elif compressType == '.':
f.write('{}\n'.format(former))
elif compressType == 'to':
f.write('{} to {}\n'.format(start,former))
elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former))
compressType = '.'
start = current
reps = 1
former = current
if compressType == '.':
f.write('{}\n'.format(former))
elif compressType == 'to':
f.write('{} to {}\n'.format(start,former))
elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former))
def to_vtk(self,fname=None): def to_vtk(self,fname=None):
@ -387,3 +437,91 @@ class Geom():
self.to_file(f) self.to_file(f)
f.seek(0) f.seek(0)
return ''.join(f.readlines()) return ''.join(f.readlines())
def mirror(self,directions,reflect=False):
"""
Mirror microstructure along given directions.
Parameters
----------
directions : iterable containing str
direction(s) along which the microstructure is mirrored. Valid entries are 'x', 'y', 'z'.
reflect : bool, optional
reflect (include) outermost layers.
"""
valid = {'x','y','z'}
if not all(isinstance(d, str) for d in directions):
raise TypeError('Directions are not of type str.')
elif not set(directions).issubset(valid):
raise ValueError('Invalid direction specified {}'.format(*set(directions).difference(valid)))
limits = [None,None] if reflect else [-2,0]
ms = self.get_microstructure()
if 'z' in directions:
ms = np.concatenate([ms,ms[:,:,limits[0]:limits[1]:-1]],2)
if 'y' in directions:
ms = np.concatenate([ms,ms[:,limits[0]:limits[1]:-1,:]],1)
if 'x' in directions:
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
return self.update(ms,rescale=True)
#self.add_comments('tbd')
def scale(self,grid):
"""
Scale microstructure to new grid.
Parameters
----------
grid : iterable of int
new grid dimension
"""
return self.update(
ndimage.interpolation.zoom(
self.microstructure,
grid/self.get_grid(),
output=self.microstructure.dtype,
order=0,
mode='nearest',
prefilter=False
)
)
#self.add_comments('tbd')
def clean(self,stencil=3):
"""
Smooth microstructure by selecting most frequent index within given stencil at each location.
Parameters
----------
stencil : int, optional
size of smoothing stencil.
"""
def mostFrequent(arr):
unique, inverse = np.unique(arr, return_inverse=True)
return unique[np.argmax(np.bincount(inverse))]
return self.update(ndimage.filters.generic_filter(
self.microstructure,
mostFrequent,
size=(stencil,)*3
).astype(self.microstructure.dtype)
)
#self.add_comments('tbd')
def renumber(self):
"""Renumber sorted microstructure indices to 1,...,N."""
renumbered = np.empty(self.get_grid(),dtype=self.microstructure.dtype)
for i, oldID in enumerate(np.unique(self.microstructure)):
renumbered = np.where(self.microstructure == oldID, i+1, renumbered)
return self.update(renumbered)
#self.add_comments('tbd')

266
python/damask/mechanics.py Normal file
View File

@ -0,0 +1,266 @@
import numpy as np
def Cauchy(F,P):
"""
Return Cauchy stress calculated from 1. Piola-Kirchhoff stress and deformation gradient.
Resulting tensor is symmetrized as the Cauchy stress needs to be symmetric.
Parameters
----------
F : numpy.array of shape (:,3,3) or (3,3)
Deformation gradient.
P : numpy.array of shape (:,3,3) or (3,3)
1. Piola-Kirchhoff stress.
"""
if np.shape(F) == np.shape(P) == (3,3):
sigma = 1.0/np.linalg.det(F) * np.dot(P,F.T)
else:
sigma = np.einsum('i,ijk,ilk->ijl',1.0/np.linalg.det(F),P,F)
return symmetric(sigma)
def PK2(F,P):
"""
Return 2. Piola-Kirchhoff stress calculated from 1. Piola-Kirchhoff stress and deformation gradient.
Parameters
----------
F : numpy.array of shape (:,3,3) or (3,3)
Deformation gradient.
P : numpy.array of shape (:,3,3) or (3,3)
1. Piola-Kirchhoff stress.
"""
if np.shape(F) == np.shape(P) == (3,3):
S = np.dot(np.linalg.inv(F),P)
else:
S = np.einsum('ijk,ikl->ijl',np.linalg.inv(F),P)
return S
def strain_tensor(F,t,m):
"""
Return strain tensor calculated from deformation gradient.
For details refer to https://en.wikipedia.org/wiki/Finite_strain_theory and
https://de.wikipedia.org/wiki/Verzerrungstensor
Parameters
----------
F : numpy.array of shape (:,3,3) or (3,3)
Deformation gradient.
t : {V, U}
Type of the polar decomposition, V for left stretch tensor and U for right stretch tensor.
m : float
Order of the strain.
"""
F_ = F.reshape((1,3,3)) if F.shape == (3,3) else F
if t == 'U':
B = np.matmul(F_,transpose(F_))
w,n = np.linalg.eigh(B)
elif t == 'V':
C = np.matmul(transpose(F_),F_)
w,n = np.linalg.eigh(C)
if m > 0.0:
eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
- np.broadcast_to(np.eye(3),[F_.shape[0],3,3]))
elif m < 0.0:
eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
+ np.broadcast_to(np.eye(3),[F_.shape[0],3,3]))
else:
eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n))
return eps.reshape((3,3)) if np.shape(F) == (3,3) else \
eps
def deviatoric_part(x):
"""
Return deviatoric part of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the deviatoric part is computed.
"""
return x - np.eye(3)*spherical_part(x) if np.shape(x) == (3,3) else \
x - np.einsum('ijk,i->ijk',np.broadcast_to(np.eye(3),[x.shape[0],3,3]),spherical_part(x))
def spherical_part(x):
"""
Return spherical (hydrostatic) part of a tensor.
A single scalar is returned, i.e. the hydrostatic part is not mapped on the 3rd order identity
matrix.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the hydrostatic part is computed.
"""
return np.trace(x)/3.0 if np.shape(x) == (3,3) else \
np.trace(x,axis1=1,axis2=2)/3.0
def Mises_stress(sigma):
"""
Return the Mises equivalent of a stress tensor.
Parameters
----------
sigma : numpy.array of shape (:,3,3) or (3,3)
Symmetric stress tensor of which the von Mises equivalent is computed.
"""
s = deviatoric_part(sigma)
return np.sqrt(3.0/2.0*(np.sum(s**2.0))) if np.shape(sigma) == (3,3) else \
np.sqrt(3.0/2.0*np.einsum('ijk->i',s**2.0))
def Mises_strain(epsilon):
"""
Return the Mises equivalent of a strain tensor.
Parameters
----------
epsilon : numpy.array of shape (:,3,3) or (3,3)
Symmetric strain tensor of which the von Mises equivalent is computed.
"""
s = deviatoric_part(epsilon)
return np.sqrt(2.0/3.0*(np.sum(s**2.0))) if np.shape(epsilon) == (3,3) else \
np.sqrt(2.0/3.0*np.einsum('ijk->i',s**2.0))
def symmetric(x):
"""
Return the symmetrized tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the symmetrized values are computed.
"""
return (x+transpose(x))*0.5
def maximum_shear(x):
"""
Return the maximum shear component of a symmetric tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Symmetric tensor of which the maximum shear is computed.
"""
w = np.linalg.eigvalsh(symmetric(x)) # eigenvalues in ascending order
return (w[2] - w[0])*0.5 if np.shape(x) == (3,3) else \
(w[:,2] - w[:,0])*0.5
def principal_components(x):
"""
Return the principal components of a symmetric tensor.
The principal components (eigenvalues) are sorted in descending order, each repeated according to
its multiplicity.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Symmetric tensor of which the principal compontents are computed.
"""
w = np.linalg.eigvalsh(symmetric(x)) # eigenvalues in ascending order
return w[::-1] if np.shape(x) == (3,3) else \
w[:,::-1]
def transpose(x):
"""
Return the transpose of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the transpose is computed.
"""
return x.T if np.shape(x) == (3,3) else \
np.transpose(x,(0,2,1))
def rotational_part(x):
"""
Return the rotational part of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the rotational part is computed.
"""
return __polar_decomposition(x,'R')[0]
def left_stretch(x):
"""
Return the left stretch of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the left stretch is computed.
"""
return __polar_decomposition(x,'V')[0]
def right_stretch(x):
"""
Return the right stretch of a tensor.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the right stretch is computed.
"""
return __polar_decomposition(x,'U')[0]
def __polar_decomposition(x,requested):
"""
Singular value decomposition.
Parameters
----------
x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the singular values are computed.
requested : iterable of str
Requested outputs: R for the rotation tensor,
V for left stretch tensor and U for right stretch tensor.
"""
u, s, vh = np.linalg.svd(x)
R = np.dot(u,vh) if np.shape(x) == (3,3) else \
np.einsum('ijk,ikl->ijl',u,vh)
output = []
if 'R' in requested:
output.append(R)
if 'V' in requested:
output.append(np.dot(x,R.T) if np.shape(x) == (3,3) else np.einsum('ijk,ilk->ijl',x,R))
if 'U' in requested:
output.append(np.dot(R.T,x) if np.shape(x) == (3,3) else np.einsum('ikj,ikl->ijl',R,x))
return tuple(output)

View File

@ -79,9 +79,9 @@ class Marc(Solver):
exitnumber = -1 exitnumber = -1
fid_out = open(outFile,'r') fid_out = open(outFile,'r')
for line in fid_out: for line in fid_out:
if (string.find(line,'tress iteration') is not -1): if (string.find(line,'tress iteration') != -1):
print(line) print(line)
elif (string.find(line,'Exit number') is not -1): elif (string.find(line,'Exit number') != -1):
substr = line[string.find(line,'Exit number'):len(line)] substr = line[string.find(line,'Exit number'):len(line)]
exitnumber = int(substr[12:16]) exitnumber = int(substr[12:16])

257
python/damask/table.py Normal file
View File

@ -0,0 +1,257 @@
import re
import pandas as pd
import numpy as np
class Table():
"""Store spreadsheet-like data."""
def __init__(self,data,shapes,comments=None):
"""
New spreadsheet.
Parameters
----------
data : numpy.ndarray
Data.
shapes : dict with str:tuple pairs
Shapes of the columns. Example 'F':(3,3) for a deformation gradient.
comments : iterable of str, optional
Additional, human-readable information.
"""
self.comments = [] if comments is None else [c for c in comments]
self.data = pd.DataFrame(data=data)
self.shapes = shapes
self.__label_condensed()
def __label_flat(self):
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v."""
labels = []
for label,shape in self.shapes.items():
size = np.prod(shape)
labels += ['{}{}'.format('' if size == 1 else '{}_'.format(i+1),label) for i in range(size)]
self.data.columns = labels
def __label_condensed(self):
"""Label data condensed, e.g. 1_v 2_v 3_v ==> v v v."""
labels = []
for label,shape in self.shapes.items():
labels += [label] * np.prod(shape)
self.data.columns = labels
def __add_comment(self,label,shape,info):
if info is not None:
self.comments.append('{}{}: {}'.format(label,
' '+str(shape) if np.prod(shape,dtype=int) > 1 else '',
info))
@staticmethod
def from_ASCII(fname):
"""
Create table from ASCII file.
The first line needs to indicate the number of subsequent header lines as 'n header'.
Vector data column labels are indicated by '1_v, 2_v, ..., n_v'.
Tensor data column labels are indicated by '3x3:1_T, 3x3:2_T, ..., 3x3:9_T'.
Parameters
----------
fname : file, str, or pathlib.Path
Filename or file for reading.
"""
try:
f = open(fname)
except TypeError:
f = fname
header,keyword = f.readline().split()
if keyword == 'header':
header = int(header)
else:
raise Exception
comments = [f.readline()[:-1] for i in range(1,header)]
labels = f.readline().split()
shapes = {}
for label in labels:
tensor_column = re.search(r'[0-9,x]*?:[0-9]*?_',label)
if tensor_column:
my_shape = tensor_column.group().split(':',1)[0].split('x')
shapes[label.split('_',1)[1]] = tuple([int(d) for d in my_shape])
else:
vector_column = re.match(r'[0-9]*?_',label)
if vector_column:
shapes[label.split('_',1)[1]] = (int(label.split('_',1)[0]),)
else:
shapes[label] = (1,)
data = pd.read_csv(f,names=list(range(len(labels))),sep=r'\s+').to_numpy()
return Table(data,shapes,comments)
@property
def labels(self):
"""Return the labels of all columns."""
return list(self.shapes.keys())
def get(self,label):
"""
Get column data.
Parameters
----------
label : str
Column label.
"""
if re.match(r'[0-9]*?_',label):
idx,key = label.split('_',1)
data = self.data[key].to_numpy()[:,int(idx)-1].reshape((-1,1))
else:
data = self.data[label].to_numpy().reshape((-1,)+self.shapes[label])
return data.astype(type(data.flatten()[0]))
def set(self,label,data,info=None):
"""
Set column data.
Parameters
----------
label : str
Column label.
data : np.ndarray
New data.
info : str, optional
Human-readable information about the new data.
"""
self.__add_comment(label,data.shape[1:],info)
if re.match(r'[0-9]*?_',label):
idx,key = label.split('_',1)
iloc = self.data.columns.get_loc(key).tolist().index(True) + int(idx) -1
self.data.iloc[:,iloc] = data
else:
self.data[label] = data.reshape(self.data[label].shape)
def add(self,label,data,info=None):
"""
Add column data.
Parameters
----------
label : str
Column label.
data : np.ndarray
Modified data.
info : str, optional
Human-readable information about the modified data.
"""
self.__add_comment(label,data.shape[1:],info)
self.shapes[label] = data.shape[1:] if len(data.shape) > 1 else (1,)
size = np.prod(data.shape[1:],dtype=int)
new = pd.DataFrame(data=data.reshape(-1,size),
columns=[label]*size,
)
new.index = self.data.index
self.data = pd.concat([self.data,new],axis=1)
def delete(self,label):
"""
Delete column data.
Parameters
----------
label : str
Column label.
"""
self.data.drop(columns=label,inplace=True)
del self.shapes[label]
def rename(self,label_old,label_new,info=None):
"""
Rename column data.
Parameters
----------
label_old : str
Old column label.
label_new : str
New column label.
"""
self.data.rename(columns={label_old:label_new},inplace=True)
self.comments.append('{} => {}{}'.format(label_old,
label_new,
'' if info is None else ': {}'.format(info),
))
self.shapes[label_new] = self.shapes.pop(label_old)
def sort_by(self,labels,ascending=True):
"""
Get column data.
Parameters
----------
label : str or list
Column labels.
ascending : bool or list, optional
Set sort order.
"""
self.__label_flat()
self.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
self.__label_condensed()
self.comments.append('sorted by [{}]'.format(', '.join(labels)))
def to_ASCII(self,fname):
"""
Store as plain text file.
Parameters
----------
fname : file, str, or pathlib.Path
Filename or file for reading.
"""
labels = []
for l in self.shapes:
if(self.shapes[l] == (1,)):
labels.append('{}'.format(l))
elif(len(self.shapes[l]) == 1):
labels += ['{}_{}'.format(i+1,l) \
for i in range(self.shapes[l][0])]
else:
labels += ['{}:{}_{}'.format('x'.join([str(d) for d in self.shapes[l]]),i+1,l) \
for i in range(np.prod(self.shapes[l],dtype=int))]
header = ['{} header'.format(len(self.comments)+1)] \
+ self.comments \
+ [' '.join(labels)]
try:
f = open(fname,'w')
except TypeError:
f = fname
for line in header: f.write(line+'\n')
self.data.to_csv(f,sep=' ',index=False,header=False)

View File

@ -1,10 +1,8 @@
# -*- coding: UTF-8 no BOM -*-
import os,sys,shutil import os,sys,shutil
import logging,logging.config import logging,logging.config
import damask import damask
import numpy as np import numpy as np
from collections import Iterable from collections.abc import Iterable
from optparse import OptionParser from optparse import OptionParser
class Test(): class Test():
@ -17,7 +15,7 @@ class Test():
variants = [] variants = []
def __init__(self, **kwargs): def __init__(self, **kwargs):
"""New test."""
defaults = {'description': '', defaults = {'description': '',
'keep': False, 'keep': False,
'accept': False, 'accept': False,
@ -120,22 +118,22 @@ class Test():
"""Delete directory tree containing current results.""" """Delete directory tree containing current results."""
try: try:
shutil.rmtree(self.dirCurrent()) shutil.rmtree(self.dirCurrent())
except: except FileNotFoundError:
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent())) logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
try: try:
os.mkdir(self.dirCurrent()) os.mkdir(self.dirCurrent())
return True return True
except: except FileExistsError:
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent())) logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
return False return False
def prepareAll(self): def prepareAll(self):
"""Do all necessary preparations for the whole test""" """Do all necessary preparations for the whole test."""
return True return True
def prepare(self,variant): def prepare(self,variant):
"""Do all necessary preparations for the run of each test variant""" """Do all necessary preparations for the run of each test variant."""
return True return True
@ -207,9 +205,9 @@ class Test():
for source,target in zip(list(map(mapA,A)),list(map(mapB,B))): for source,target in zip(list(map(mapA,A)),list(map(mapB,B))):
try: try:
shutil.copy2(source,target) shutil.copy2(source,target)
except: except FileNotFoundError:
logging.critical('error copying {} to {}'.format(source,target)) logging.critical('error copying {} to {}'.format(source,target))
raise raise FileNotFoundError
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]): def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
@ -218,9 +216,9 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f)) logging.critical('Reference2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]): def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
@ -230,10 +228,10 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i])) shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.error(os.path.join(source,f)) logging.error(os.path.join(source,f))
logging.critical('Base2Current: Unable to copy file "{}"'.format(f)) logging.critical('Base2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]): def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
@ -242,9 +240,9 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i])) shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f)) logging.critical('Current2Reference: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]): def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
@ -253,9 +251,9 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f)) logging.critical('Proof2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]): def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
@ -263,9 +261,10 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Current2Current: Unable to copy file "{}"'.format(f)) logging.critical('Current2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def execute_inCurrentDir(self,cmd,streamIn=None,env=None): def execute_inCurrentDir(self,cmd,streamIn=None,env=None):
@ -439,7 +438,7 @@ class Test():
stdTol = 1.0e-6, stdTol = 1.0e-6,
preFilter = 1.0e-9): preFilter = 1.0e-9):
""" """
Calculate statistics of tables Calculate statistics of tables.
threshold can be used to ignore small values (a negative number disables this feature) threshold can be used to ignore small values (a negative number disables this feature)
""" """
@ -492,7 +491,7 @@ class Test():
rtol = 1e-5, rtol = 1e-5,
atol = 1e-8, atol = 1e-8,
debug = False): debug = False):
"""Compare multiple tables with np.allclose""" """Compare multiple tables with np.allclose."""
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
files = [str(files)] files = [str(files)]

View File

@ -1,3 +0,0 @@
"""Test functionality."""
from .test import Test # noqa

21
python/tests/conftest.py Normal file
View File

@ -0,0 +1,21 @@
import os
import pytest
import damask
def pytest_addoption(parser):
parser.addoption("--update",
action="store_true",
default=False)
@pytest.fixture
def update(request):
"""Store current results as new reference results."""
return request.config.getoption("--update")
@pytest.fixture
def reference_dir_base():
"""Directory containing reference results."""
env = damask.Environment()
return os.path.join(env.rootDir(),'python','tests','reference')

View File

@ -0,0 +1,125 @@
68 header
geom_fromVoronoiTessellation 2.0.3-1073-g6f3cb071
<texture>
[Grain1]
(gauss) phi1 358.98 Phi 65.62 phi2 24.48
[Grain2]
(gauss) phi1 121.05 Phi 176.11 phi2 295.73
[Grain3]
(gauss) phi1 43.79 Phi 113.76 phi2 345.90
[Grain4]
(gauss) phi1 265.15 Phi 62.52 phi2 299.71
[Grain5]
(gauss) phi1 221.23 Phi 26.54 phi2 207.05
[Grain6]
(gauss) phi1 249.81 Phi 61.47 phi2 152.14
[Grain7]
(gauss) phi1 332.45 Phi 99.16 phi2 345.34
[Grain8]
(gauss) phi1 312.27 Phi 118.27 phi2 181.59
[Grain9]
(gauss) phi1 303.10 Phi 48.21 phi2 358.03
[Grain10]
(gauss) phi1 338.26 Phi 48.11 phi2 176.78
[Grain11]
(gauss) phi1 115.17 Phi 56.54 phi2 223.84
[Grain12]
(gauss) phi1 281.04 Phi 97.48 phi2 27.94
<microstructure>
[Grain1]
crystallite 1
(constituent) phase 1 texture 1 fraction 1.0
[Grain2]
crystallite 1
(constituent) phase 1 texture 2 fraction 1.0
[Grain3]
crystallite 1
(constituent) phase 1 texture 3 fraction 1.0
[Grain4]
crystallite 1
(constituent) phase 1 texture 4 fraction 1.0
[Grain5]
crystallite 1
(constituent) phase 1 texture 5 fraction 1.0
[Grain6]
crystallite 1
(constituent) phase 1 texture 6 fraction 1.0
[Grain7]
crystallite 1
(constituent) phase 1 texture 7 fraction 1.0
[Grain8]
crystallite 1
(constituent) phase 1 texture 8 fraction 1.0
[Grain9]
crystallite 1
(constituent) phase 1 texture 9 fraction 1.0
[Grain10]
crystallite 1
(constituent) phase 1 texture 10 fraction 1.0
[Grain11]
crystallite 1
(constituent) phase 1 texture 11 fraction 1.0
[Grain12]
crystallite 1
(constituent) phase 1 texture 12 fraction 1.0
<!skip>
grid a 6 b 7 c 8
size x 0.75 y 0.875 z 1.0
origin x 0.0 y 0.0 z 0.0
homogenization 1
9 3 3 10 9 9
9 1 1 1 9 9
9 11 1 1 7 9
7 11 11 7 7 7
7 11 11 7 7 7
12 3 3 10 7 12
12 3 3 10 10 12
12 3 3 1 9 9
9 1 1 1 9 9
9 1 1 1 7 7
7 1 1 7 7 7
12 12 3 7 7 7
12 3 3 3 12 12
12 3 3 3 12 12
12 3 3 1 1 12
9 1 1 1 1 9
6 1 1 1 8 8
7 6 8 8 8 8
12 12 8 8 8 12
12 3 3 3 12 12
12 3 3 3 12 12
5 6 6 6 1 12
6 6 6 6 8 8
6 6 6 8 8 8
8 6 8 8 8 8
12 5 8 8 8 8
12 5 5 8 8 12
5 5 5 3 12 12
5 5 6 6 6 5
6 6 6 6 6 6
6 6 6 6 8 8
4 4 6 8 8 8
4 4 2 2 2 8
5 5 5 2 2 2
5 5 5 5 2 5
5 5 5 10 10 5
6 6 6 6 10 4
4 4 11 11 2 4
4 4 11 2 2 4
4 4 2 2 2 2
5 5 5 2 2 2
5 5 5 10 10 5
5 5 10 10 10 9
4 11 11 11 10 9
4 4 11 11 11 4
4 4 11 11 2 4
4 4 2 2 2 2
5 5 2 2 2 2
5 5 10 10 10 10
9 10 10 10 10 9
9 11 11 10 9 9
4 11 11 11 9 9
4 11 11 11 7 7
4 4 11 2 7 7
12 10 10 10 10 7
9 10 10 10 10 9

View File

@ -0,0 +1,129 @@
<homogenization>
[none]
mech none
ngrains 1
<texture>
[Grain1]
(gauss) phi1 358.98 Phi 65.62 phi2 24.48
[Grain2]
(gauss) phi1 121.05 Phi 176.11 phi2 295.73
[Grain3]
(gauss) phi1 43.79 Phi 113.76 phi2 345.90
[Grain4]
(gauss) phi1 265.15 Phi 62.52 phi2 299.71
[Grain5]
(gauss) phi1 221.23 Phi 26.54 phi2 207.05
[Grain6]
(gauss) phi1 249.81 Phi 61.47 phi2 152.14
[Grain7]
(gauss) phi1 332.45 Phi 99.16 phi2 345.34
[Grain8]
(gauss) phi1 312.27 Phi 118.27 phi2 181.59
[Grain9]
(gauss) phi1 303.10 Phi 48.21 phi2 358.03
[Grain10]
(gauss) phi1 338.26 Phi 48.11 phi2 176.78
[Grain11]
(gauss) phi1 115.17 Phi 56.54 phi2 223.84
[Grain12]
(gauss) phi1 281.04 Phi 97.48 phi2 27.94
<microstructure>
[Grain1]
crystallite 1
(constituent) phase 1 texture 1 fraction 1.0
[Grain2]
crystallite 1
(constituent) phase 1 texture 2 fraction 1.0
[Grain3]
crystallite 1
(constituent) phase 1 texture 3 fraction 1.0
[Grain4]
crystallite 1
(constituent) phase 1 texture 4 fraction 1.0
[Grain5]
crystallite 1
(constituent) phase 1 texture 5 fraction 1.0
[Grain6]
crystallite 1
(constituent) phase 1 texture 6 fraction 1.0
[Grain7]
crystallite 1
(constituent) phase 2 texture 7 fraction 1.0
[Grain8]
crystallite 1
(constituent) phase 2 texture 8 fraction 1.0
[Grain9]
crystallite 1
(constituent) phase 2 texture 9 fraction 1.0
[Grain10]
crystallite 1
(constituent) phase 2 texture 10 fraction 1.0
[Grain11]
crystallite 1
(constituent) phase 2 texture 11 fraction 1.0
[Grain12]
crystallite 1
(constituent) phase 2 texture 12 fraction 1.0
<phase>
[pheno_fcc]
elasticity hooke
plasticity phenopowerlaw
(output) orientation # quaternion
(output) F # deformation gradient tensor
(output) Fe # elastic deformation gradient tensor
(output) Fp # plastic deformation gradient tensor
(output) P # first Piola-Kichhoff stress tensor
(output) Lp # plastic velocity gradient tensor
lattice_structure fcc
Nslip 12 # per family
Ntwin 0 # per family
c11 106.75e9
c12 60.41e9
c44 28.34e9
gdot0_slip 0.001
n_slip 20
tau0_slip 31e6 # per family
tausat_slip 63e6 # per family
a_slip 2.25
h0_slipslip 75e6
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
atol_resistance 1
[pheno_bcc]
elasticity hooke
plasticity phenopowerlaw
(output) orientation # quaternion
(output) F # deformation gradient tensor
(output) Fe # elastic deformation gradient tensor
(output) Fp # plastic deformation gradient tensor
(output) P # first Piola-Kichhoff stress tensor
(output) Lp # plastic velocity gradient tensor
lattice_structure bcc
Nslip 12 # per family
c11 106.75e9
c12 60.41e9
c44 28.34e9
gdot0_slip 0.001
n_slip 20
tau0_slip 31e6 # per family
tausat_slip 63e6 # per family
a_slip 2.25
h0_slipslip 75e6
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
atol_resistance 1
<crystallite>
[dummy]

View File

@ -0,0 +1 @@
fdot * 0 0 0 1.0e-3 0 0 0 * stress 0 * * * * * * * 0 time 20 incs 40 freq 4

View File

@ -0,0 +1,25 @@
4 header
grid a 8 b 5 c 4
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 22 2 2 1 21
1 1 6 26 2 2 5 25
1 1 10 30 2 2 9 29
1 1 14 34 2 2 13 33
1 1 18 38 2 2 17 37
1 1 3 23 2 2 2 22
1 1 7 27 2 2 6 26
1 1 11 31 2 2 10 30
1 1 15 35 2 2 14 34
1 1 19 39 2 2 18 38
1 1 4 24 2 2 3 23
1 1 8 28 2 2 7 27
1 1 12 32 2 2 11 31
1 1 16 36 2 2 15 35
1 1 20 40 2 2 19 39
1 1 5 25 2 2 4 24
1 1 9 29 2 2 8 28
1 1 13 33 2 2 12 32
1 1 17 37 2 2 16 36
1 1 21 41 2 2 20 40

View File

@ -0,0 +1,25 @@
4 header
grid a 8 b 5 c 4
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 1 2 2 2 1 1
1 1 1 2 2 2 2 1
1 1 1 6 2 2 2 5
1 1 1 10 2 2 2 9
1 1 1 14 2 2 2 13
1 1 1 2 2 2 2 1
1 1 1 2 2 2 2 1
1 1 1 6 2 2 2 5
1 1 1 10 2 2 2 9
1 1 1 14 2 2 2 13
1 1 1 3 2 2 2 2
1 1 1 3 2 2 2 2
1 1 1 7 2 2 2 6
1 1 1 11 2 2 2 10
1 1 1 15 2 2 2 14
1 1 1 4 2 2 2 3
1 1 1 4 2 2 2 3
1 1 1 8 2 2 2 7
1 1 1 12 2 2 2 11
1 1 1 16 2 2 2 15

View File

@ -0,0 +1,25 @@
4 header
grid a 8 b 5 c 4
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 1 2 2 2 2 21
1 1 1 2 2 2 2 21
1 1 1 2 2 2 2 25
1 1 1 2 2 2 2 29
1 1 1 2 2 2 2 37
1 1 1 2 2 2 2 21
1 1 1 2 2 2 2 21
1 1 1 2 2 2 2 25
1 1 1 2 2 2 2 29
1 1 1 2 2 2 2 37
1 1 1 2 2 2 2 22
1 1 1 2 2 2 2 22
1 1 1 2 2 2 2 26
1 1 1 2 2 2 2 30
1 1 1 2 2 2 2 38
1 1 1 2 2 2 2 24
1 1 1 2 2 2 2 24
1 1 1 2 2 2 2 28
1 1 1 2 2 2 2 32
1 1 1 2 2 2 2 40

View File

@ -0,0 +1,25 @@
4 header
grid a 8 b 5 c 4
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 1 2 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 2 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2
1 1 1 1 2 2 2 2

View File

@ -0,0 +1,85 @@
4 header
grid a 16 b 10 c 8
size x 1.6e-05 y 1e-05 z 8e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 22 2 2 1 21 21 1 2 2 22 2 1 1
1 1 6 26 2 2 5 25 25 5 2 2 26 6 1 1
1 1 10 30 2 2 9 29 29 9 2 2 30 10 1 1
1 1 14 34 2 2 13 33 33 13 2 2 34 14 1 1
1 1 18 38 2 2 17 37 37 17 2 2 38 18 1 1
1 1 18 38 2 2 17 37 37 17 2 2 38 18 1 1
1 1 14 34 2 2 13 33 33 13 2 2 34 14 1 1
1 1 10 30 2 2 9 29 29 9 2 2 30 10 1 1
1 1 6 26 2 2 5 25 25 5 2 2 26 6 1 1
1 1 2 22 2 2 1 21 21 1 2 2 22 2 1 1
1 1 3 23 2 2 2 22 22 2 2 2 23 3 1 1
1 1 7 27 2 2 6 26 26 6 2 2 27 7 1 1
1 1 11 31 2 2 10 30 30 10 2 2 31 11 1 1
1 1 15 35 2 2 14 34 34 14 2 2 35 15 1 1
1 1 19 39 2 2 18 38 38 18 2 2 39 19 1 1
1 1 19 39 2 2 18 38 38 18 2 2 39 19 1 1
1 1 15 35 2 2 14 34 34 14 2 2 35 15 1 1
1 1 11 31 2 2 10 30 30 10 2 2 31 11 1 1
1 1 7 27 2 2 6 26 26 6 2 2 27 7 1 1
1 1 3 23 2 2 2 22 22 2 2 2 23 3 1 1
1 1 4 24 2 2 3 23 23 3 2 2 24 4 1 1
1 1 8 28 2 2 7 27 27 7 2 2 28 8 1 1
1 1 12 32 2 2 11 31 31 11 2 2 32 12 1 1
1 1 16 36 2 2 15 35 35 15 2 2 36 16 1 1
1 1 20 40 2 2 19 39 39 19 2 2 40 20 1 1
1 1 20 40 2 2 19 39 39 19 2 2 40 20 1 1
1 1 16 36 2 2 15 35 35 15 2 2 36 16 1 1
1 1 12 32 2 2 11 31 31 11 2 2 32 12 1 1
1 1 8 28 2 2 7 27 27 7 2 2 28 8 1 1
1 1 4 24 2 2 3 23 23 3 2 2 24 4 1 1
1 1 5 25 2 2 4 24 24 4 2 2 25 5 1 1
1 1 9 29 2 2 8 28 28 8 2 2 29 9 1 1
1 1 13 33 2 2 12 32 32 12 2 2 33 13 1 1
1 1 17 37 2 2 16 36 36 16 2 2 37 17 1 1
1 1 21 41 2 2 20 40 40 20 2 2 41 21 1 1
1 1 21 41 2 2 20 40 40 20 2 2 41 21 1 1
1 1 17 37 2 2 16 36 36 16 2 2 37 17 1 1
1 1 13 33 2 2 12 32 32 12 2 2 33 13 1 1
1 1 9 29 2 2 8 28 28 8 2 2 29 9 1 1
1 1 5 25 2 2 4 24 24 4 2 2 25 5 1 1
1 1 5 25 2 2 4 24 24 4 2 2 25 5 1 1
1 1 9 29 2 2 8 28 28 8 2 2 29 9 1 1
1 1 13 33 2 2 12 32 32 12 2 2 33 13 1 1
1 1 17 37 2 2 16 36 36 16 2 2 37 17 1 1
1 1 21 41 2 2 20 40 40 20 2 2 41 21 1 1
1 1 21 41 2 2 20 40 40 20 2 2 41 21 1 1
1 1 17 37 2 2 16 36 36 16 2 2 37 17 1 1
1 1 13 33 2 2 12 32 32 12 2 2 33 13 1 1
1 1 9 29 2 2 8 28 28 8 2 2 29 9 1 1
1 1 5 25 2 2 4 24 24 4 2 2 25 5 1 1
1 1 4 24 2 2 3 23 23 3 2 2 24 4 1 1
1 1 8 28 2 2 7 27 27 7 2 2 28 8 1 1
1 1 12 32 2 2 11 31 31 11 2 2 32 12 1 1
1 1 16 36 2 2 15 35 35 15 2 2 36 16 1 1
1 1 20 40 2 2 19 39 39 19 2 2 40 20 1 1
1 1 20 40 2 2 19 39 39 19 2 2 40 20 1 1
1 1 16 36 2 2 15 35 35 15 2 2 36 16 1 1
1 1 12 32 2 2 11 31 31 11 2 2 32 12 1 1
1 1 8 28 2 2 7 27 27 7 2 2 28 8 1 1
1 1 4 24 2 2 3 23 23 3 2 2 24 4 1 1
1 1 3 23 2 2 2 22 22 2 2 2 23 3 1 1
1 1 7 27 2 2 6 26 26 6 2 2 27 7 1 1
1 1 11 31 2 2 10 30 30 10 2 2 31 11 1 1
1 1 15 35 2 2 14 34 34 14 2 2 35 15 1 1
1 1 19 39 2 2 18 38 38 18 2 2 39 19 1 1
1 1 19 39 2 2 18 38 38 18 2 2 39 19 1 1
1 1 15 35 2 2 14 34 34 14 2 2 35 15 1 1
1 1 11 31 2 2 10 30 30 10 2 2 31 11 1 1
1 1 7 27 2 2 6 26 26 6 2 2 27 7 1 1
1 1 3 23 2 2 2 22 22 2 2 2 23 3 1 1
1 1 2 22 2 2 1 21 21 1 2 2 22 2 1 1
1 1 6 26 2 2 5 25 25 5 2 2 26 6 1 1
1 1 10 30 2 2 9 29 29 9 2 2 30 10 1 1
1 1 14 34 2 2 13 33 33 13 2 2 34 14 1 1
1 1 18 38 2 2 17 37 37 17 2 2 38 18 1 1
1 1 18 38 2 2 17 37 37 17 2 2 38 18 1 1
1 1 14 34 2 2 13 33 33 13 2 2 34 14 1 1
1 1 10 30 2 2 9 29 29 9 2 2 30 10 1 1
1 1 6 26 2 2 5 25 25 5 2 2 26 6 1 1
1 1 2 22 2 2 1 21 21 1 2 2 22 2 1 1

View File

@ -0,0 +1,25 @@
4 header
grid a 14 b 5 c 4
size x 1.4e-05 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 22 2 2 1 21 1 2 2 22 2 1
1 1 6 26 2 2 5 25 5 2 2 26 6 1
1 1 10 30 2 2 9 29 9 2 2 30 10 1
1 1 14 34 2 2 13 33 13 2 2 34 14 1
1 1 18 38 2 2 17 37 17 2 2 38 18 1
1 1 3 23 2 2 2 22 2 2 2 23 3 1
1 1 7 27 2 2 6 26 6 2 2 27 7 1
1 1 11 31 2 2 10 30 10 2 2 31 11 1
1 1 15 35 2 2 14 34 14 2 2 35 15 1
1 1 19 39 2 2 18 38 18 2 2 39 19 1
1 1 4 24 2 2 3 23 3 2 2 24 4 1
1 1 8 28 2 2 7 27 7 2 2 28 8 1
1 1 12 32 2 2 11 31 11 2 2 32 12 1
1 1 16 36 2 2 15 35 15 2 2 36 16 1
1 1 20 40 2 2 19 39 19 2 2 40 20 1
1 1 5 25 2 2 4 24 4 2 2 25 5 1
1 1 9 29 2 2 8 28 8 2 2 29 9 1
1 1 13 33 2 2 12 32 12 2 2 33 13 1
1 1 17 37 2 2 16 36 16 2 2 37 17 1
1 1 21 41 2 2 20 40 20 2 2 41 21 1

View File

@ -0,0 +1,53 @@
4 header
grid a 8 b 8 c 6
size x 8e-06 y 8.000000000000001e-06 z 6e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 22 2 2 1 21
1 1 6 26 2 2 5 25
1 1 10 30 2 2 9 29
1 1 14 34 2 2 13 33
1 1 18 38 2 2 17 37
1 1 14 34 2 2 13 33
1 1 10 30 2 2 9 29
1 1 6 26 2 2 5 25
1 1 3 23 2 2 2 22
1 1 7 27 2 2 6 26
1 1 11 31 2 2 10 30
1 1 15 35 2 2 14 34
1 1 19 39 2 2 18 38
1 1 15 35 2 2 14 34
1 1 11 31 2 2 10 30
1 1 7 27 2 2 6 26
1 1 4 24 2 2 3 23
1 1 8 28 2 2 7 27
1 1 12 32 2 2 11 31
1 1 16 36 2 2 15 35
1 1 20 40 2 2 19 39
1 1 16 36 2 2 15 35
1 1 12 32 2 2 11 31
1 1 8 28 2 2 7 27
1 1 5 25 2 2 4 24
1 1 9 29 2 2 8 28
1 1 13 33 2 2 12 32
1 1 17 37 2 2 16 36
1 1 21 41 2 2 20 40
1 1 17 37 2 2 16 36
1 1 13 33 2 2 12 32
1 1 9 29 2 2 8 28
1 1 4 24 2 2 3 23
1 1 8 28 2 2 7 27
1 1 12 32 2 2 11 31
1 1 16 36 2 2 15 35
1 1 20 40 2 2 19 39
1 1 16 36 2 2 15 35
1 1 12 32 2 2 11 31
1 1 8 28 2 2 7 27
1 1 3 23 2 2 2 22
1 1 7 27 2 2 6 26
1 1 11 31 2 2 10 30
1 1 15 35 2 2 14 34
1 1 19 39 2 2 18 38
1 1 15 35 2 2 14 34
1 1 11 31 2 2 10 30
1 1 7 27 2 2 6 26

View File

@ -0,0 +1,53 @@
4 header
grid a 14 b 8 c 6
size x 1.4e-05 y 8.000000000000001e-06 z 6e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 22 2 2 1 21 1 2 2 22 2 1
1 1 6 26 2 2 5 25 5 2 2 26 6 1
1 1 10 30 2 2 9 29 9 2 2 30 10 1
1 1 14 34 2 2 13 33 13 2 2 34 14 1
1 1 18 38 2 2 17 37 17 2 2 38 18 1
1 1 14 34 2 2 13 33 13 2 2 34 14 1
1 1 10 30 2 2 9 29 9 2 2 30 10 1
1 1 6 26 2 2 5 25 5 2 2 26 6 1
1 1 3 23 2 2 2 22 2 2 2 23 3 1
1 1 7 27 2 2 6 26 6 2 2 27 7 1
1 1 11 31 2 2 10 30 10 2 2 31 11 1
1 1 15 35 2 2 14 34 14 2 2 35 15 1
1 1 19 39 2 2 18 38 18 2 2 39 19 1
1 1 15 35 2 2 14 34 14 2 2 35 15 1
1 1 11 31 2 2 10 30 10 2 2 31 11 1
1 1 7 27 2 2 6 26 6 2 2 27 7 1
1 1 4 24 2 2 3 23 3 2 2 24 4 1
1 1 8 28 2 2 7 27 7 2 2 28 8 1
1 1 12 32 2 2 11 31 11 2 2 32 12 1
1 1 16 36 2 2 15 35 15 2 2 36 16 1
1 1 20 40 2 2 19 39 19 2 2 40 20 1
1 1 16 36 2 2 15 35 15 2 2 36 16 1
1 1 12 32 2 2 11 31 11 2 2 32 12 1
1 1 8 28 2 2 7 27 7 2 2 28 8 1
1 1 5 25 2 2 4 24 4 2 2 25 5 1
1 1 9 29 2 2 8 28 8 2 2 29 9 1
1 1 13 33 2 2 12 32 12 2 2 33 13 1
1 1 17 37 2 2 16 36 16 2 2 37 17 1
1 1 21 41 2 2 20 40 20 2 2 41 21 1
1 1 17 37 2 2 16 36 16 2 2 37 17 1
1 1 13 33 2 2 12 32 12 2 2 33 13 1
1 1 9 29 2 2 8 28 8 2 2 29 9 1
1 1 4 24 2 2 3 23 3 2 2 24 4 1
1 1 8 28 2 2 7 27 7 2 2 28 8 1
1 1 12 32 2 2 11 31 11 2 2 32 12 1
1 1 16 36 2 2 15 35 15 2 2 36 16 1
1 1 20 40 2 2 19 39 19 2 2 40 20 1
1 1 16 36 2 2 15 35 15 2 2 36 16 1
1 1 12 32 2 2 11 31 11 2 2 32 12 1
1 1 8 28 2 2 7 27 7 2 2 28 8 1
1 1 3 23 2 2 2 22 2 2 2 23 3 1
1 1 7 27 2 2 6 26 6 2 2 27 7 1
1 1 11 31 2 2 10 30 10 2 2 31 11 1
1 1 15 35 2 2 14 34 14 2 2 35 15 1
1 1 19 39 2 2 18 38 18 2 2 39 19 1
1 1 15 35 2 2 14 34 14 2 2 35 15 1
1 1 11 31 2 2 10 30 10 2 2 31 11 1
1 1 7 27 2 2 6 26 6 2 2 27 7 1

View File

@ -0,0 +1,105 @@
4 header
grid a 10 b 10 c 10
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40

View File

@ -0,0 +1,115 @@
4 header
grid a 10 b 11 c 10
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40

View File

@ -0,0 +1,135 @@
4 header
grid a 10 b 13 c 10
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 3 3 23 2 2 2 2 22
1 1 3 3 23 2 2 2 2 22
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 7 7 27 2 2 2 6 26
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 11 11 31 2 2 2 10 30
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 15 15 35 2 2 2 14 34
1 1 19 19 39 2 2 2 18 38
1 1 19 19 39 2 2 2 18 38
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 4 4 24 2 2 2 3 23
1 1 4 4 24 2 2 2 3 23
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 8 8 28 2 2 2 7 27
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 12 12 32 2 2 2 11 31
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 16 16 36 2 2 2 15 35
1 1 20 20 40 2 2 2 19 39
1 1 20 20 40 2 2 2 19 39
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40

View File

@ -0,0 +1,45 @@
4 header
grid a 10 b 20 c 2
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 2 2 22 2 2 2 1 21
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 6 6 26 2 2 2 5 25
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 10 10 30 2 2 2 9 29
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 14 14 34 2 2 2 13 33
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 18 18 38 2 2 2 17 37
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 5 5 25 2 2 2 4 24
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 9 9 29 2 2 2 8 28
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 13 13 33 2 2 2 12 32
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 17 17 37 2 2 2 16 36
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40
1 1 21 21 41 2 2 2 20 40

View File

@ -0,0 +1,85 @@
4 header
grid a 5 b 4 c 20
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 2 2 2 21
1 6 2 2 25
1 14 2 2 33
1 18 2 2 37
1 2 2 2 21
1 6 2 2 25
1 14 2 2 33
1 18 2 2 37
1 2 2 2 21
1 6 2 2 25
1 14 2 2 33
1 18 2 2 37
1 2 2 2 21
1 6 2 2 25
1 14 2 2 33
1 18 2 2 37
1 3 2 2 22
1 7 2 2 26
1 15 2 2 34
1 19 2 2 38
1 3 2 2 22
1 7 2 2 26
1 15 2 2 34
1 19 2 2 38
1 3 2 2 22
1 7 2 2 26
1 15 2 2 34
1 19 2 2 38
1 3 2 2 22
1 7 2 2 26
1 15 2 2 34
1 19 2 2 38
1 3 2 2 22
1 7 2 2 26
1 15 2 2 34
1 19 2 2 38
1 3 2 2 22
1 7 2 2 26
1 15 2 2 34
1 19 2 2 38
1 4 2 2 23
1 8 2 2 27
1 16 2 2 35
1 20 2 2 39
1 4 2 2 23
1 8 2 2 27
1 16 2 2 35
1 20 2 2 39
1 4 2 2 23
1 8 2 2 27
1 16 2 2 35
1 20 2 2 39
1 4 2 2 23
1 8 2 2 27
1 16 2 2 35
1 20 2 2 39
1 4 2 2 23
1 8 2 2 27
1 16 2 2 35
1 20 2 2 39
1 4 2 2 23
1 8 2 2 27
1 16 2 2 35
1 20 2 2 39
1 5 2 2 24
1 9 2 2 28
1 17 2 2 36
1 21 2 2 40
1 5 2 2 24
1 9 2 2 28
1 17 2 2 36
1 21 2 2 40
1 5 2 2 24
1 9 2 2 28
1 17 2 2 36
1 21 2 2 40
1 5 2 2 24
1 9 2 2 28
1 17 2 2 36
1 21 2 2 40

View File

@ -0,0 +1,125 @@
4 header
grid a 8 b 10 c 12
size x 8e-06 y 5e-06 z 4e-06
origin x 0.0 y 0.0 z 0.0
homogenization 1
1 1 2 22 2 2 1 21
1 1 2 22 2 2 1 21
1 1 6 26 2 2 5 25
1 1 6 26 2 2 5 25
1 1 10 30 2 2 9 29
1 1 10 30 2 2 9 29
1 1 14 34 2 2 13 33
1 1 14 34 2 2 13 33
1 1 18 38 2 2 17 37
1 1 18 38 2 2 17 37
1 1 2 22 2 2 1 21
1 1 2 22 2 2 1 21
1 1 6 26 2 2 5 25
1 1 6 26 2 2 5 25
1 1 10 30 2 2 9 29
1 1 10 30 2 2 9 29
1 1 14 34 2 2 13 33
1 1 14 34 2 2 13 33
1 1 18 38 2 2 17 37
1 1 18 38 2 2 17 37
1 1 3 23 2 2 2 22
1 1 3 23 2 2 2 22
1 1 7 27 2 2 6 26
1 1 7 27 2 2 6 26
1 1 11 31 2 2 10 30
1 1 11 31 2 2 10 30
1 1 15 35 2 2 14 34
1 1 15 35 2 2 14 34
1 1 19 39 2 2 18 38
1 1 19 39 2 2 18 38
1 1 3 23 2 2 2 22
1 1 3 23 2 2 2 22
1 1 7 27 2 2 6 26
1 1 7 27 2 2 6 26
1 1 11 31 2 2 10 30
1 1 11 31 2 2 10 30
1 1 15 35 2 2 14 34
1 1 15 35 2 2 14 34
1 1 19 39 2 2 18 38
1 1 19 39 2 2 18 38
1 1 3 23 2 2 2 22
1 1 3 23 2 2 2 22
1 1 7 27 2 2 6 26
1 1 7 27 2 2 6 26
1 1 11 31 2 2 10 30
1 1 11 31 2 2 10 30
1 1 15 35 2 2 14 34
1 1 15 35 2 2 14 34
1 1 19 39 2 2 18 38
1 1 19 39 2 2 18 38
1 1 3 23 2 2 2 22
1 1 3 23 2 2 2 22
1 1 7 27 2 2 6 26
1 1 7 27 2 2 6 26
1 1 11 31 2 2 10 30
1 1 11 31 2 2 10 30
1 1 15 35 2 2 14 34
1 1 15 35 2 2 14 34
1 1 19 39 2 2 18 38
1 1 19 39 2 2 18 38
1 1 4 24 2 2 3 23
1 1 4 24 2 2 3 23
1 1 8 28 2 2 7 27
1 1 8 28 2 2 7 27
1 1 12 32 2 2 11 31
1 1 12 32 2 2 11 31
1 1 16 36 2 2 15 35
1 1 16 36 2 2 15 35
1 1 20 40 2 2 19 39
1 1 20 40 2 2 19 39
1 1 4 24 2 2 3 23
1 1 4 24 2 2 3 23
1 1 8 28 2 2 7 27
1 1 8 28 2 2 7 27
1 1 12 32 2 2 11 31
1 1 12 32 2 2 11 31
1 1 16 36 2 2 15 35
1 1 16 36 2 2 15 35
1 1 20 40 2 2 19 39
1 1 20 40 2 2 19 39
1 1 4 24 2 2 3 23
1 1 4 24 2 2 3 23
1 1 8 28 2 2 7 27
1 1 8 28 2 2 7 27
1 1 12 32 2 2 11 31
1 1 12 32 2 2 11 31
1 1 16 36 2 2 15 35
1 1 16 36 2 2 15 35
1 1 20 40 2 2 19 39
1 1 20 40 2 2 19 39
1 1 4 24 2 2 3 23
1 1 4 24 2 2 3 23
1 1 8 28 2 2 7 27
1 1 8 28 2 2 7 27
1 1 12 32 2 2 11 31
1 1 12 32 2 2 11 31
1 1 16 36 2 2 15 35
1 1 16 36 2 2 15 35
1 1 20 40 2 2 19 39
1 1 20 40 2 2 19 39
1 1 5 25 2 2 4 24
1 1 5 25 2 2 4 24
1 1 9 29 2 2 8 28
1 1 9 29 2 2 8 28
1 1 13 33 2 2 12 32
1 1 13 33 2 2 12 32
1 1 17 37 2 2 16 36
1 1 17 37 2 2 16 36
1 1 21 41 2 2 20 40
1 1 21 41 2 2 20 40
1 1 5 25 2 2 4 24
1 1 5 25 2 2 4 24
1 1 9 29 2 2 8 28
1 1 9 29 2 2 8 28
1 1 13 33 2 2 12 32
1 1 13 33 2 2 12 32
1 1 17 37 2 2 16 36
1 1 17 37 2 2 16 36
1 1 21 41 2 2 20 40
1 1 21 41 2 2 20 40

View File

@ -0,0 +1,4 @@
1 header
a b
1.0 hallo
0.1 "hallo test"

View File

@ -0,0 +1,6 @@
1 header
a b 1_c 2_c
1 2 3 4
5 6 7 8
9 10. 12. 12

View File

@ -0,0 +1,82 @@
import shutil
import os
import pytest
import numpy as np
from damask import DADF5
from damask import mechanics
@pytest.fixture
def default(tmp_path,reference_dir):
"""Small DADF5 file in temp location for modification."""
fname = '12grains6x7x8_tensionY.hdf5'
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
f = DADF5(os.path.join(tmp_path,fname))
f.set_by_time(20.0,20.0)
return f
@pytest.fixture
def reference_dir(reference_dir_base):
"""Directory containing reference results."""
return os.path.join(reference_dir_base,'DADF5')
class TestDADF5:
def test_time_increments(self,default):
shape = default.read_dataset(default.get_dataset_location('F'),0).shape
default.set_by_time(0.0,20.0)
for i in default.iter_visible('increments'):
assert shape == default.read_dataset(default.get_dataset_location('F'),0).shape
def test_add_absolute(self,default):
default.add_absolute('Fe')
loc = {'Fe': default.get_dataset_location('Fe'),
'|Fe|': default.get_dataset_location('|Fe|')}
in_memory = np.abs(default.read_dataset(loc['Fe'],0))
in_file = default.read_dataset(loc['|Fe|'],0)
assert np.allclose(in_memory,in_file)
def test_add_Cauchy(self,default):
default.add_Cauchy('P','F')
loc = {'F': default.get_dataset_location('F'),
'P': default.get_dataset_location('P'),
'sigma':default.get_dataset_location('sigma')}
in_memory = mechanics.Cauchy(default.read_dataset(loc['F'],0),
default.read_dataset(loc['P'],0))
in_file = default.read_dataset(loc['sigma'],0)
assert np.allclose(in_memory,in_file)
def test_add_determinant(self,default):
default.add_determinant('P')
loc = {'P': default.get_dataset_location('P'),
'det(P)':default.get_dataset_location('det(P)')}
in_memory = np.linalg.det(default.read_dataset(loc['P'],0)).reshape((-1,1))
in_file = default.read_dataset(loc['det(P)'],0)
assert np.allclose(in_memory,in_file)
def test_add_deviator(self,default):
default.add_deviator('P')
loc = {'P' :default.get_dataset_location('P'),
's_P':default.get_dataset_location('s_P')}
in_memory = mechanics.deviatoric_part(default.read_dataset(loc['P'],0))
in_file = default.read_dataset(loc['s_P'],0)
assert np.allclose(in_memory,in_file)
def test_add_norm(self,default):
default.add_norm('F',1)
loc = {'F': default.get_dataset_location('F'),
'|F|_1':default.get_dataset_location('|F|_1')}
in_memory = np.linalg.norm(default.read_dataset(loc['F'],0),ord=1,axis=(1,2),keepdims=True)
in_file = default.read_dataset(loc['|F|_1'],0)
assert np.allclose(in_memory,in_file)
def test_add_spherical(self,default):
default.add_spherical('P')
loc = {'P': default.get_dataset_location('P'),
'p_P': default.get_dataset_location('p_P')}
in_memory = mechanics.spherical_part(default.read_dataset(loc['P'],0)).reshape(-1,1)
in_file = default.read_dataset(loc['p_P'],0)
assert np.allclose(in_memory,in_file)

99
python/tests/test_Geom.py Normal file
View File

@ -0,0 +1,99 @@
import copy
import os
import pytest
import numpy as np
from damask import Geom
def geom_equal(a,b):
return np.all(a.get_microstructure() == b.get_microstructure()) and \
np.all(a.get_size() == b.get_size()) and \
np.all(a.get_grid() == b.get_grid())
@pytest.fixture
def default():
"""Simple geometry."""
x=np.concatenate((np.ones(40,dtype=int),
np.arange(2,42),
np.ones(40,dtype=int)*2,
np.arange(1,41))).reshape((8,5,4))
return Geom(x,[8e-6,5e-6,4e-6])
@pytest.fixture
def reference_dir(reference_dir_base):
"""Directory containing reference results."""
return os.path.join(reference_dir_base,'Geom')
class TestGeom:
def test_update(self,default):
modified = copy.deepcopy(default)
modified.update(
default.get_microstructure(),
default.get_size(),
default.get_origin()
)
assert geom_equal(modified,default)
def test_write_read_str(self,default,tmpdir):
default.to_file(str(tmpdir.join('default.geom')))
new = Geom.from_file(str(tmpdir.join('default.geom')))
assert geom_equal(new,default)
def test_write_read_file(self,default,tmpdir):
with open(tmpdir.join('default.geom'),'w') as f:
default.to_file(f)
with open(tmpdir.join('default.geom')) as f:
new = Geom.from_file(f)
assert geom_equal(new,default)
@pytest.mark.parametrize('pack',[True,False])
def test_pack(self,default,tmpdir,pack):
default.to_file(tmpdir.join('default.geom'),pack=pack)
new = Geom.from_file(tmpdir.join('default.geom'))
assert geom_equal(new,default)
@pytest.mark.parametrize('directions,reflect',[
(['x'], False),
(['x','y','z'],True),
(['z','x','y'],False),
(['y','z'], False)
]
)
def test_mirror(self,default,update,reference_dir,directions,reflect):
modified = copy.deepcopy(default)
modified.mirror(directions,reflect)
tag = 'directions={}_reflect={}'.format('-'.join(directions),reflect)
reference = os.path.join(reference_dir,'mirror_{}.geom'.format(tag))
if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference))
@pytest.mark.parametrize('stencil',[(1),(2),(3),(4)])
def test_clean(self,default,update,reference_dir,stencil):
modified = copy.deepcopy(default)
modified.clean(stencil)
tag = 'stencil={}'.format(stencil)
reference = os.path.join(reference_dir,'clean_{}.geom'.format(tag))
if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference))
@pytest.mark.parametrize('grid',[
((10,11,10)),
([10,13,10]),
(np.array((10,10,10))),
(np.array((8, 10,12))),
(np.array((5, 4, 20))),
(np.array((10,20,2)) )
]
)
def test_scale(self,default,update,reference_dir,grid):
modified = copy.deepcopy(default)
modified.scale(grid)
tag = 'grid={}'.format('-'.join([str(x) for x in grid]))
reference = os.path.join(reference_dir,'scale_{}.geom'.format(tag))
if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference))

View File

@ -0,0 +1,55 @@
import pytest
import numpy as np
from damask import Rotation
n = 1000
@pytest.fixture
def default():
"""A set of n random rotations."""
return [Rotation.fromRandom() for r in range(n)]
class TestRotation:
def test_Eulers(self,default):
for rot in default:
assert np.allclose(rot.asQuaternion(),
Rotation.fromEulers(rot.asEulers()).asQuaternion())
def test_AxisAngle(self,default):
for rot in default:
assert np.allclose(rot.asEulers(),
Rotation.fromAxisAngle(rot.asAxisAngle()).asEulers())
def test_Matrix(self,default):
for rot in default:
assert np.allclose(rot.asAxisAngle(),
Rotation.fromMatrix(rot.asMatrix()).asAxisAngle())
def test_Rodriques(self,default):
for rot in default:
assert np.allclose(rot.asMatrix(),
Rotation.fromRodrigues(rot.asRodrigues()).asMatrix())
def test_Homochoric(self,default):
for rot in default:
assert np.allclose(rot.asRodrigues(),
Rotation.fromHomochoric(rot.asHomochoric()).asRodrigues())
def test_Cubochoric(self,default):
for rot in default:
assert np.allclose(rot.asHomochoric(),
Rotation.fromCubochoric(rot.asCubochoric()).asHomochoric())
def test_Quaternion(self,default):
for rot in default:
assert np.allclose(rot.asCubochoric(),
Rotation.fromQuaternion(rot.asQuaternion()).asCubochoric())

128
python/tests/test_Table.py Normal file
View File

@ -0,0 +1,128 @@
import os
import pytest
import numpy as np
from damask import Table
@pytest.fixture
def default():
"""Simple Table."""
x = np.ones((5,13),dtype=float)
return Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['test data','contains only ones'])
@pytest.fixture
def reference_dir(reference_dir_base):
"""Directory containing reference results."""
return os.path.join(reference_dir_base,'Table')
class TestTable:
def test_get_scalar(self,default):
d = default.get('s')
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
def test_get_vector(self,default):
d = default.get('v')
assert np.allclose(d,1.0) and d.shape[1:] == (3,)
def test_get_tensor(self,default):
d = default.get('F')
assert np.allclose(d,1.0) and d.shape[1:] == (3,3)
def test_get_component(self,default):
d = default.get('5_F')
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
def test_write_read_str(self,default,tmpdir):
default.to_ASCII(str(tmpdir.join('default.txt')))
new = Table.from_ASCII(str(tmpdir.join('default.txt')))
assert all(default.data==new.data)
def test_write_read_file(self,default,tmpdir):
with open(tmpdir.join('default.txt'),'w') as f:
default.to_ASCII(f)
with open(tmpdir.join('default.txt')) as f:
new = Table.from_ASCII(f)
assert all(default.data==new.data)
@pytest.mark.parametrize('fname',['datatype-mix.txt','whitespace-mix.txt'])
def test_read_strange(self,reference_dir,fname):
with open(os.path.join(reference_dir,fname)) as f:
Table.from_ASCII(f)
def test_set(self,default):
default.set('F',np.zeros((5,3,3)),'set to zero')
d=default.get('F')
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
def test_labels(self,default):
assert default.labels == ['F','v','s']
def test_add(self,default):
d = np.random.random((5,9))
default.add('nine',d,'random data')
assert np.allclose(d,default.get('nine'))
def test_rename_equivalent(self,default):
v = default.get('v')
default.rename('v','u')
u = default.get('u')
assert np.all(v == u)
def test_rename_gone(self,default):
default.rename('v','V')
with pytest.raises(KeyError):
default.get('v')
def test_delete(self,default):
default.delete('v')
with pytest.raises(KeyError):
default.get('v')
def test_invalid_initialization(self):
x = np.random.random((5,10))
with pytest.raises(ValueError):
Table(x,{'F':(3,3)})
def test_invalid_set(self,default):
x = default.get('v')
with pytest.raises(ValueError):
default.set('F',x,'does not work')
def test_invalid_get(self,default):
with pytest.raises(KeyError):
default.get('n')
def test_sort_scalar(self):
x = np.random.random((5,13))
t = Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['random test data'])
unsort = t.get('s')
t.sort_by('s')
sort = t.get('s')
assert np.all(np.sort(unsort,0)==sort)
def test_sort_component(self):
x = np.random.random((5,12))
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
unsort = t.get('4_F')
t.sort_by('4_F')
sort = t.get('4_F')
assert np.all(np.sort(unsort,0)==sort)
def test_sort_revert(self):
x = np.random.random((5,12))
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
t.sort_by('4_F',ascending=False)
sort = t.get('4_F')
assert np.all(np.sort(sort,0)==sort[::-1,:])
def test_sort(self):
t = Table(np.array([[0,1,],[2,1,]]),
{'v':(2,)},
['test data'])
t.add('s',np.array(['b','a']))
t.sort_by('s')
assert np.all(t.get('1_v') == np.array([2,0]).reshape((2,1)))

View File

@ -0,0 +1,142 @@
import numpy as np
from damask import mechanics
class TestMechanics:
n = 1000
c = np.random.randint(n)
def test_vectorize_Cauchy(self):
P = np.random.random((self.n,3,3))
F = np.random.random((self.n,3,3))
assert np.allclose(mechanics.Cauchy(F,P)[self.c],
mechanics.Cauchy(F[self.c],P[self.c]))
def test_vectorize_strain_tensor(self):
F = np.random.random((self.n,3,3))
t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*10. -5.0
assert np.allclose(mechanics.strain_tensor(F,t,m)[self.c],
mechanics.strain_tensor(F[self.c],t,m))
def test_vectorize_deviatoric_part(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.deviatoric_part(x)[self.c],
mechanics.deviatoric_part(x[self.c]))
def test_vectorize_spherical_part(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.spherical_part(x)[self.c],
mechanics.spherical_part(x[self.c]))
def test_vectorize_Mises_stress(self):
sigma = np.random.random((self.n,3,3))
assert np.allclose(mechanics.Mises_stress(sigma)[self.c],
mechanics.Mises_stress(sigma[self.c]))
def test_vectorize_Mises_strain(self):
epsilon = np.random.random((self.n,3,3))
assert np.allclose(mechanics.Mises_strain(epsilon)[self.c],
mechanics.Mises_strain(epsilon[self.c]))
def test_vectorize_symmetric(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.symmetric(x)[self.c],
mechanics.symmetric(x[self.c]))
def test_vectorize_maximum_shear(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.maximum_shear(x)[self.c],
mechanics.maximum_shear(x[self.c]))
def test_vectorize_principal_components(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.principal_components(x)[self.c],
mechanics.principal_components(x[self.c]))
def test_vectorize_transpose(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.transpose(x)[self.c],
mechanics.transpose(x[self.c]))
def test_vectorize_rotational_part(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.rotational_part(x)[self.c],
mechanics.rotational_part(x[self.c]))
def test_vectorize_left_stretch(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.left_stretch(x)[self.c],
mechanics.left_stretch(x[self.c]))
def test_vectorize_right_stretch(self):
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.right_stretch(x)[self.c],
mechanics.right_stretch(x[self.c]))
def test_Cauchy(self):
"""Ensure Cauchy stress is symmetrized 1. Piola-Kirchhoff stress for no deformation."""
P = np.random.random((self.n,3,3))
assert np.allclose(mechanics.Cauchy(np.broadcast_to(np.eye(3),(self.n,3,3)),P),
mechanics.symmetric(P))
def test_strain_tensor_no_rotation(self):
"""Ensure that left and right stretch give same results for no rotation."""
F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.random((self.n,3,3))
m = np.random.random()*20.0-10.0
assert np.allclose(mechanics.strain_tensor(F,'U',m),
mechanics.strain_tensor(F,'V',m))
def test_strain_tensor_rotation(self):
"""Ensure that pure rotation results in no strain."""
F = mechanics.rotational_part(np.random.random((self.n,3,3)))
t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0
assert np.allclose(mechanics.strain_tensor(F,t,m),
0.0)
def test_spherical_deviatoric_part(self):
"""Ensure that full tensor is sum of spherical and deviatoric part."""
x = np.random.random((self.n,3,3))
sph = np.broadcast_to(np.eye(3),(self.n,3,3))\
* np.repeat(mechanics.spherical_part(x),9).reshape(self.n,3,3)
assert np.allclose(sph + mechanics.deviatoric_part(x),
x)
def test_symmetric(self):
"""Ensure that a symmetric tensor is half of the sum of a tensor and its transpose."""
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.symmetric(x)*2.0,
mechanics.transpose(x)+x)
def test_transpose(self):
"""Ensure that a symmetric tensor equals its transpose."""
x = mechanics.symmetric(np.random.random((self.n,3,3)))
assert np.allclose(mechanics.transpose(x),
x)
def test_Mises(self):
"""Ensure that equivalent stress is 3/2 of equivalent strain."""
x = np.random.random((self.n,3,3))
assert np.allclose(mechanics.Mises_stress(x)/mechanics.Mises_strain(x),
1.5)

View File

@ -178,19 +178,19 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) & if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) &
CPFEM_dcsde = CPFEM_dcsde_knownGood CPFEM_dcsde = CPFEM_dcsde_knownGood
!*** age results and write restart data if requested !*** age results
if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) then if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) then
crystallite_F0 = crystallite_partionedF ! crystallite deformation (_subF is perturbed...) crystallite_F0 = crystallite_partionedF ! crystallite deformation
crystallite_Fp0 = crystallite_Fp ! crystallite plastic deformation crystallite_Fp0 = crystallite_Fp ! crystallite plastic deformation
crystallite_Lp0 = crystallite_Lp ! crystallite plastic velocity crystallite_Lp0 = crystallite_Lp ! crystallite plastic velocity
crystallite_Fi0 = crystallite_Fi ! crystallite intermediate deformation crystallite_Fi0 = crystallite_Fi ! crystallite intermediate deformation
crystallite_Li0 = crystallite_Li ! crystallite intermediate velocity crystallite_Li0 = crystallite_Li ! crystallite intermediate velocity
crystallite_S0 = crystallite_S ! crystallite 2nd Piola Kirchhoff stress crystallite_S0 = crystallite_S ! crystallite 2nd Piola Kirchhoff stress
forall ( i = 1:size(plasticState )) plasticState(i)%state0 = plasticState(i)%state ! copy state in this lenghty way because: A component cannot be an array if the encompassing structure is an array forall (i = 1:size(plasticState)) plasticState(i)%state0 = plasticState(i)%state
do i = 1, size(sourceState) do i = 1, size(sourceState)
do mySource = 1,phase_Nsources(i) do mySource = 1,phase_Nsources(i)
sourceState(i)%p(mySource)%state0 = sourceState(i)%p(mySource)%state ! copy state in this lenghty way because: A component cannot be an array if the encompassing structure is an array sourceState(i)%p(mySource)%state0 = sourceState(i)%p(mySource)%state
enddo; enddo enddo; enddo
if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) then if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt) then
write(6,'(a)') '<< CPFEM >> aging states' write(6,'(a)') '<< CPFEM >> aging states'
@ -275,7 +275,6 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) & if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
write(6,'(a,i8,1x,i2)') '<< CPFEM >> calculation for elFE ip ',elFE,ip write(6,'(a,i8,1x,i2)') '<< CPFEM >> calculation for elFE ip ',elFE,ip
call materialpoint_stressAndItsTangent(updateJaco, dt) ! calculate stress and its tangent call materialpoint_stressAndItsTangent(updateJaco, dt) ! calculate stress and its tangent
call materialpoint_postResults()
!* parallel computation and calulation not yet done !* parallel computation and calulation not yet done
@ -284,7 +283,6 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
write(6,'(a,i8,a,i8)') '<< CPFEM >> calculation for elements ',FEsolving_execElem(1),& write(6,'(a,i8,a,i8)') '<< CPFEM >> calculation for elements ',FEsolving_execElem(1),&
' to ',FEsolving_execElem(2) ' to ',FEsolving_execElem(2)
call materialpoint_stressAndItsTangent(updateJaco, dt) ! calculate stress and its tangent (parallel execution inside) call materialpoint_stressAndItsTangent(updateJaco, dt) ! calculate stress and its tangent (parallel execution inside)
call materialpoint_postResults()
CPFEM_calc_done = .true. CPFEM_calc_done = .true.
endif endif

View File

@ -11,7 +11,6 @@ module CPFEM2
use FEsolving use FEsolving
use math use math
use rotations use rotations
use mesh
use material use material
use lattice use lattice
use IO use IO
@ -25,15 +24,19 @@ module CPFEM2
use crystallite use crystallite
#ifdef FEM #ifdef FEM
use FEM_Zoo use FEM_Zoo
use mesh
#else
use mesh_grid
#endif #endif
implicit none implicit none
private private
public :: & public :: &
CPFEM_age, & CPFEM_forward, &
CPFEM_initAll, & CPFEM_initAll, &
CPFEM_results CPFEM_results, &
CPFEM_restartWrite
contains contains
@ -54,10 +57,10 @@ subroutine CPFEM_initAll
call config_init call config_init
call math_init call math_init
call rotations_init call rotations_init
call mesh_init
call lattice_init call lattice_init
call HDF5_utilities_init call HDF5_utilities_init
call results_init call results_init
call mesh_init
call material_init call material_init
call constitutive_init call constitutive_init
call crystallite_init call crystallite_init
@ -90,24 +93,24 @@ subroutine CPFEM_init
fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5') fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5')
call HDF5_read(fileHandle,crystallite_F0, 'convergedF') call HDF5_read(fileHandle,crystallite_F0, 'F')
call HDF5_read(fileHandle,crystallite_Fp0,'convergedFp') call HDF5_read(fileHandle,crystallite_Fp0,'Fp')
call HDF5_read(fileHandle,crystallite_Fi0,'convergedFi') call HDF5_read(fileHandle,crystallite_Fi0,'Fi')
call HDF5_read(fileHandle,crystallite_Lp0,'convergedLp') call HDF5_read(fileHandle,crystallite_Lp0,'Lp')
call HDF5_read(fileHandle,crystallite_Li0,'convergedLi') call HDF5_read(fileHandle,crystallite_Li0,'Li')
call HDF5_read(fileHandle,crystallite_S0, 'convergedS') call HDF5_read(fileHandle,crystallite_S0, 'S')
groupPlasticID = HDF5_openGroup(fileHandle,'PlasticPhases') groupPlasticID = HDF5_openGroup(fileHandle,'constituent')
do ph = 1,size(phase_plasticity) do ph = 1,size(phase_plasticity)
write(PlasticItem,*) ph,'_' write(PlasticItem,*) ph,'_'
call HDF5_read(groupPlasticID,plasticState(ph)%state0,trim(PlasticItem)//'convergedStateConst') call HDF5_read(groupPlasticID,plasticState(ph)%state0,trim(PlasticItem)//'omega_plastic')
enddo enddo
call HDF5_closeGroup(groupPlasticID) call HDF5_closeGroup(groupPlasticID)
groupHomogID = HDF5_openGroup(fileHandle,'HomogStates') groupHomogID = HDF5_openGroup(fileHandle,'materialpoint')
do homog = 1, material_Nhomogenization do homog = 1, material_Nhomogenization
write(HomogItem,*) homog,'_' write(HomogItem,*) homog,'_'
call HDF5_read(groupHomogID,homogState(homog)%state0, trim(HomogItem)//'convergedStateHomog') call HDF5_read(groupHomogID,homogState(homog)%state0, trim(HomogItem)//'omega_homogenization')
enddo enddo
call HDF5_closeGroup(groupHomogID) call HDF5_closeGroup(groupHomogID)
@ -118,13 +121,12 @@ end subroutine CPFEM_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forwards data after successful increment !> @brief Forward data after successful increment.
! ToDo: Any guessing for the current states possible?
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_age subroutine CPFEM_forward
integer :: i, ph, homog, mySource integer :: i, homog, mySource
character(len=32) :: rankStr, PlasticItem, HomogItem
integer(HID_T) :: fileHandle, groupPlastic, groupHomog
if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0) & if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0) &
write(6,'(a)') '<< CPFEM >> aging states' write(6,'(a)') '<< CPFEM >> aging states'
@ -149,46 +151,52 @@ subroutine CPFEM_age
damageState (homog)%state0 = damageState (homog)%state damageState (homog)%state0 = damageState (homog)%state
enddo enddo
if (restartWrite) then end subroutine CPFEM_forward
if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0) &
write(6,'(a)') '<< CPFEM >> writing restart variables of last converged step to hdf5 file'
!--------------------------------------------------------------------------------------------------
!> @brief Write current constitutive variables for restart to file.
!--------------------------------------------------------------------------------------------------
subroutine CPFEM_restartWrite
integer :: ph, homog
character(len=32) :: rankStr, PlasticItem, HomogItem
integer(HID_T) :: fileHandle, groupPlastic, groupHomog
write(6,'(a)') ' writing constitutive data required for restart to file';flush(6)
write(rankStr,'(a1,i0)')'_',worldrank write(rankStr,'(a1,i0)')'_',worldrank
fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5','a') fileHandle = HDF5_openFile(trim(getSolverJobName())//trim(rankStr)//'.hdf5','a')
call HDF5_write(fileHandle,crystallite_F0, 'convergedF') call HDF5_write(fileHandle,crystallite_partionedF,'F')
call HDF5_write(fileHandle,crystallite_Fp0, 'convergedFp') call HDF5_write(fileHandle,crystallite_Fp, 'Fp')
call HDF5_write(fileHandle,crystallite_Fi0, 'convergedFi') call HDF5_write(fileHandle,crystallite_Fi, 'Fi')
call HDF5_write(fileHandle,crystallite_Lp0, 'convergedLp') call HDF5_write(fileHandle,crystallite_Lp, 'Lp')
call HDF5_write(fileHandle,crystallite_Li0, 'convergedLi') call HDF5_write(fileHandle,crystallite_Li, 'Li')
call HDF5_write(fileHandle,crystallite_S0, 'convergedS') call HDF5_write(fileHandle,crystallite_S, 'S')
groupPlastic = HDF5_addGroup(fileHandle,'PlasticPhases') groupPlastic = HDF5_addGroup(fileHandle,'constituent')
do ph = 1,size(phase_plasticity) do ph = 1,size(phase_plasticity)
write(PlasticItem,*) ph,'_' write(PlasticItem,*) ph,'_'
call HDF5_write(groupPlastic,plasticState(ph)%state0,trim(PlasticItem)//'convergedStateConst') call HDF5_write(groupPlastic,plasticState(ph)%state,trim(PlasticItem)//'omega_plastic')
enddo enddo
call HDF5_closeGroup(groupPlastic) call HDF5_closeGroup(groupPlastic)
groupHomog = HDF5_addGroup(fileHandle,'HomogStates') groupHomog = HDF5_addGroup(fileHandle,'materialpoint')
do homog = 1, material_Nhomogenization do homog = 1, material_Nhomogenization
write(HomogItem,*) homog,'_' write(HomogItem,*) homog,'_'
call HDF5_write(groupHomog,homogState(homog)%state0,trim(HomogItem)//'convergedStateHomog') call HDF5_write(groupHomog,homogState(homog)%state,trim(HomogItem)//'omega_homogenization')
enddo enddo
call HDF5_closeGroup(groupHomog) call HDF5_closeGroup(groupHomog)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
restartWrite = .false.
endif
if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0) & end subroutine CPFEM_restartWrite
write(6,'(a)') '<< CPFEM >> done aging states'
end subroutine CPFEM_age
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief triggers writing of the results !> @brief Trigger writing of results.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_results(inc,time) subroutine CPFEM_results(inc,time)

View File

@ -21,7 +21,6 @@ module DAMASK_interface
implicit none implicit none
private private
character(len=4), dimension(2), parameter, public :: INPUTFILEEXTENSION = ['.pes','.inp'] character(len=4), dimension(2), parameter, public :: INPUTFILEEXTENSION = ['.pes','.inp']
character(len=4), parameter, public :: LOGFILEEXTENSION = '.log'
public :: & public :: &
DAMASK_interface_init, & DAMASK_interface_init, &

View File

@ -41,8 +41,7 @@ module DAMASK_interface
implicit none implicit none
private private
character(len=4), parameter, public :: InputFileExtension = '.dat' character(len=4), parameter, public :: INPUTFILEEXTENSION = '.dat'
character(len=4), parameter, public :: LogFileExtension = '.log'
public :: & public :: &
DAMASK_interface_init, & DAMASK_interface_init, &
@ -363,37 +362,3 @@ subroutine uedinc(inc,incsub)
call CPFEM_results(inc,cptim) call CPFEM_results(inc,cptim)
end subroutine uedinc end subroutine uedinc
!--------------------------------------------------------------------------------------------------
!> @brief sets user defined output variables for Marc
!> @details select a variable contour plotting (user subroutine).
!--------------------------------------------------------------------------------------------------
subroutine plotv(v,s,sp,etot,eplas,ecreep,t,m,nn,layer,ndi,nshear,jpltcd)
use prec
use mesh
use IO
use homogenization
implicit none
integer, intent(in) :: &
m, & !< element number
nn, & !< integration point number
layer, & !< layer number
ndi, & !< number of direct stress components
nshear, & !< number of shear stress components
jpltcd !< user variable index
real(pReal), dimension(*), intent(in) :: &
s, & !< stress array
sp, & !< stresses in preferred direction
etot, & !< total strain (generalized)
eplas, & !< total plastic strain
ecreep, & !< total creep strain
t !< current temperature
real(pReal), intent(out) :: &
v !< variable
if (jpltcd > materialpoint_sizeResults) call IO_error(700,jpltcd) ! complain about out of bounds error
v = materialpoint_results(jpltcd,nn,mesh_FEasCP('elem', m))
end subroutine plotv

View File

@ -5,7 +5,6 @@
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module FEsolving module FEsolving
use prec use prec
use debug
use IO use IO
use DAMASK_interface use DAMASK_interface
@ -13,10 +12,6 @@ module FEsolving
private private
logical, public :: & logical, public :: &
#if defined(Marc4DAMASK) || defined(Abaqus)
restartRead = .false., & !< restart information to continue calculation from saved state
#endif
restartWrite = .false., & !< write current state to enable restart
terminallyIll = .false. !< at least one material point is terminally ill terminallyIll = .false. !< at least one material point is terminally ill
integer, dimension(:,:), allocatable, public :: & integer, dimension(:,:), allocatable, public :: &
@ -26,9 +21,7 @@ module FEsolving
#if defined(Marc4DAMASK) || defined(Abaqus) #if defined(Marc4DAMASK) || defined(Abaqus)
logical, public, protected :: & logical, public, protected :: &
symmetricSolver = .false. !< use a symmetric FEM solver (only Abaqus) symmetricSolver = .false. !< use a symmetric FEM solver
character(len=1024), public :: &
modelName !< needs description
logical, dimension(:,:), allocatable, public :: & logical, dimension(:,:), allocatable, public :: &
calcMode !< do calculation or simply collect when using ping pong scheme calcMode !< do calculation or simply collect when using ping pong scheme
@ -37,90 +30,36 @@ module FEsolving
contains contains
#if defined(Marc4DAMASK) || defined(Abaqus) #if defined(Marc4DAMASK) || defined(Abaqus)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief determine whether a symmetric solver is used and whether restart is requested !> @brief determine whether a symmetric solver is used
!> @details restart information is found in input file in case of FEM solvers, in case of spectal
!> solver the information is provided by the interface module
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FE_init subroutine FE_init
integer, parameter :: &
FILEUNIT = 222
integer :: j
character(len=65536) :: tag, line
integer, allocatable, dimension(:) :: chunkPos
write(6,'(/,a)') ' <<<+- FEsolving init -+>>>' write(6,'(/,a)') ' <<<+- FEsolving init -+>>>'
modelName = getSolverJobName() #if defined(Marc4DAMASK)
call IO_open_inputFile(FILEUNIT,modelName) block
integer, parameter :: FILEUNIT = 222
character(len=pStringLen) :: line
integer, allocatable, dimension(:) :: chunkPos
call IO_open_inputFile(FILEUNIT)
rewind(FILEUNIT) rewind(FILEUNIT)
do do
read (FILEUNIT,'(a1024)',END=100) line read (FILEUNIT,'(a256)',END=100) line
chunkPos = IO_stringPos(line) chunkPos = IO_stringPos(line)
tag = IO_lc(IO_stringValue(line,chunkPos,1)) ! extract key if(IO_lc(IO_stringValue(line,chunkPos,1)) == 'solver') then
select case(tag) read (FILEUNIT,'(a256)',END=100) line ! next line
case ('solver')
read (FILEUNIT,'(a1024)',END=100) line ! next line
chunkPos = IO_stringPos(line) chunkPos = IO_stringPos(line)
symmetricSolver = (IO_intValue(line,chunkPos,2) /= 1) symmetricSolver = (IO_intValue(line,chunkPos,2) /= 1)
case ('restart')
read (FILEUNIT,'(a1024)',END=100) line ! next line
chunkPos = IO_stringPos(line)
restartWrite = iand(IO_intValue(line,chunkPos,1),1) > 0
restartRead = iand(IO_intValue(line,chunkPos,1),2) > 0
case ('*restart')
do j=2,chunkPos(1)
restartWrite = (IO_lc(IO_StringValue(line,chunkPos,j)) == 'write') .or. restartWrite
restartRead = (IO_lc(IO_StringValue(line,chunkPos,j)) == 'read') .or. restartRead
enddo
if(restartWrite) then
do j=2,chunkPos(1)
restartWrite = (IO_lc(IO_StringValue(line,chunkPos,j)) /= 'frequency=0') .and. restartWrite
enddo
endif
end select
enddo
100 close(FILEUNIT)
if (restartRead) then
#ifdef Marc4DAMASK
call IO_open_logFile(FILEUNIT)
rewind(FILEUNIT)
do
read (FILEUNIT,'(a1024)',END=200) line
chunkPos = IO_stringPos(line)
if ( IO_lc(IO_stringValue(line,chunkPos,1)) == 'restart' &
.and. IO_lc(IO_stringValue(line,chunkPos,2)) == 'file' &
.and. IO_lc(IO_stringValue(line,chunkPos,3)) == 'job' &
.and. IO_lc(IO_stringValue(line,chunkPos,4)) == 'id' ) &
modelName = IO_StringValue(line,chunkPos,6)
enddo
#else
call IO_open_inputFile(FILEUNIT,modelName)
rewind(FILEUNIT)
do
read (FILEUNIT,'(a1024)',END=200) line
chunkPos = IO_stringPos(line)
if (IO_lc(IO_stringValue(line,chunkPos,1))=='*heading') then
read (FILEUNIT,'(a1024)',END=200) line
chunkPos = IO_stringPos(line)
modelName = IO_StringValue(line,chunkPos,1)
endif endif
enddo enddo
100 close(FILEUNIT)
end block
#endif #endif
200 close(FILEUNIT)
endif
if (iand(debug_level(debug_FEsolving),debug_levelBasic) /= 0) then
write(6,'(a21,l1)') ' restart writing: ', restartWrite
write(6,'(a21,l1)') ' restart reading: ', restartRead
if (restartRead) write(6,'(a,/)') ' restart Job: '//trim(modelName)
endif
end subroutine FE_init end subroutine FE_init
#endif #endif
end module FEsolving end module FEsolving

View File

@ -37,7 +37,6 @@ module IO
#if defined(Marc4DAMASK) || defined(Abaqus) #if defined(Marc4DAMASK) || defined(Abaqus)
public :: & public :: &
IO_open_inputFile, & IO_open_inputFile, &
IO_open_logFile, &
IO_countContinuousIntValues, & IO_countContinuousIntValues, &
IO_continuousIntValues, & IO_continuousIntValues, &
#if defined(Abaqus) #if defined(Abaqus)
@ -207,10 +206,9 @@ end function IO_open_binary
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief opens FEM input file for reading located in current working directory to given unit !> @brief opens FEM input file for reading located in current working directory to given unit
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine IO_open_inputFile(fileUnit,modelName) subroutine IO_open_inputFile(fileUnit)
integer, intent(in) :: fileUnit !< file unit integer, intent(in) :: fileUnit !< file unit
character(len=*), intent(in) :: modelName !< model name, in case of restart not solver job name
integer :: myStat integer :: myStat
character(len=1024) :: path character(len=1024) :: path
@ -218,16 +216,16 @@ subroutine IO_open_inputFile(fileUnit,modelName)
integer :: fileType integer :: fileType
fileType = 1 ! assume .pes fileType = 1 ! assume .pes
path = trim(modelName)//inputFileExtension(fileType) ! attempt .pes, if it exists: it should be used path = trim(getSolverJobName())//inputFileExtension(fileType) ! attempt .pes, if it exists: it should be used
open(fileUnit+1,status='old',iostat=myStat,file=path,action='read',position='rewind') open(fileUnit+1,status='old',iostat=myStat,file=path,action='read',position='rewind')
if(myStat /= 0) then ! if .pes does not work / exist; use conventional extension, i.e.".inp" if(myStat /= 0) then ! if .pes does not work / exist; use conventional extension, i.e.".inp"
fileType = 2 fileType = 2
path = trim(modelName)//inputFileExtension(fileType) path = trim(getSolverJobName())//inputFileExtension(fileType)
open(fileUnit+1,status='old',iostat=myStat,file=path,action='read',position='rewind') open(fileUnit+1,status='old',iostat=myStat,file=path,action='read',position='rewind')
endif endif
if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=path) if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=path)
path = trim(modelName)//inputFileExtension(fileType)//'_assembly' path = trim(getSolverJobName())//inputFileExtension(fileType)//'_assembly'
open(fileUnit,iostat=myStat,file=path) open(fileUnit,iostat=myStat,file=path)
if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=path) if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=path)
if (.not.abaqus_assembleInputFile(fileUnit,fileUnit+1)) call IO_error(103) ! strip comments and concatenate any "include"s if (.not.abaqus_assembleInputFile(fileUnit,fileUnit+1)) call IO_error(103) ! strip comments and concatenate any "include"s
@ -258,10 +256,8 @@ subroutine IO_open_inputFile(fileUnit,modelName)
fname = trim(line(9+scan(line(9:),'='):)) fname = trim(line(9+scan(line(9:),'='):))
inquire(file=fname, exist=fexist) inquire(file=fname, exist=fexist)
if (.not.(fexist)) then if (.not.(fexist)) then
!$OMP CRITICAL (write2out)
write(6,*)'ERROR: file does not exist error in abaqus_assembleInputFile' write(6,*)'ERROR: file does not exist error in abaqus_assembleInputFile'
write(6,*)'filename: ', trim(fname) write(6,*)'filename: ', trim(fname)
!$OMP END CRITICAL (write2out)
createSuccess = .false. createSuccess = .false.
return return
endif endif
@ -285,30 +281,12 @@ subroutine IO_open_inputFile(fileUnit,modelName)
end function abaqus_assembleInputFile end function abaqus_assembleInputFile
#elif defined(Marc4DAMASK) #elif defined(Marc4DAMASK)
path = trim(modelName)//inputFileExtension path = trim(getSolverJobName())//inputFileExtension
open(fileUnit,status='old',iostat=myStat,file=path) open(fileUnit,status='old',iostat=myStat,file=path)
if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=path) if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=path)
#endif #endif
end subroutine IO_open_inputFile end subroutine IO_open_inputFile
!--------------------------------------------------------------------------------------------------
!> @brief opens existing FEM log file for reading to given unit. File is named after solver job
!! name and located in current working directory
!--------------------------------------------------------------------------------------------------
subroutine IO_open_logFile(fileUnit)
integer, intent(in) :: fileUnit !< file unit
integer :: myStat
character(len=1024) :: path
path = trim(getSolverJobName())//LogFileExtension
open(fileUnit,status='old',iostat=myStat,file=path,action='read',position='rewind')
if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=path)
end subroutine IO_open_logFile
#endif #endif
@ -734,11 +712,6 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
case (602) case (602)
msg = 'invalid selection for debug' msg = 'invalid selection for debug'
!-------------------------------------------------------------------------------------------------
! DAMASK_marc errors
case (700)
msg = 'invalid materialpoint result requested'
!------------------------------------------------------------------------------------------------- !-------------------------------------------------------------------------------------------------
! errors related to the grid solver ! errors related to the grid solver
case (809) case (809)

View File

@ -85,6 +85,7 @@ subroutine config_init
case (trim('crystallite')) case (trim('crystallite'))
call parse_materialConfig(config_name_crystallite,config_crystallite,line,fileContent(i+1:)) call parse_materialConfig(config_name_crystallite,config_crystallite,line,fileContent(i+1:))
if (verbose) write(6,'(a)') ' Crystallite parsed'; flush(6) if (verbose) write(6,'(a)') ' Crystallite parsed'; flush(6)
deallocate(config_crystallite)
case (trim('homogenization')) case (trim('homogenization'))
call parse_materialConfig(config_name_homogenization,config_homogenization,line,fileContent(i+1:)) call parse_materialConfig(config_name_homogenization,config_homogenization,line,fileContent(i+1:))
@ -102,8 +103,6 @@ subroutine config_init
call IO_error(160,ext_msg='<homogenization>') call IO_error(160,ext_msg='<homogenization>')
if (.not. allocated(config_microstructure) .or. size(config_microstructure) < 1) & if (.not. allocated(config_microstructure) .or. size(config_microstructure) < 1) &
call IO_error(160,ext_msg='<microstructure>') call IO_error(160,ext_msg='<microstructure>')
if (.not. allocated(config_crystallite) .or. size(config_crystallite) < 1) &
call IO_error(160,ext_msg='<crystallite>')
if (.not. allocated(config_phase) .or. size(config_phase) < 1) & if (.not. allocated(config_phase) .or. size(config_phase) < 1) &
call IO_error(160,ext_msg='<phase>') call IO_error(160,ext_msg='<phase>')
if (.not. allocated(config_texture) .or. size(config_texture) < 1) & if (.not. allocated(config_texture) .or. size(config_texture) < 1) &
@ -295,9 +294,6 @@ subroutine config_deallocate(what)
case('material.config/microstructure') case('material.config/microstructure')
deallocate(config_microstructure) deallocate(config_microstructure)
case('material.config/crystallite')
deallocate(config_crystallite)
case('material.config/homogenization') case('material.config/homogenization')
deallocate(config_homogenization) deallocate(config_homogenization)

View File

@ -13,7 +13,6 @@ module constitutive
use results use results
use HDF5_utilities use HDF5_utilities
use lattice use lattice
use mesh
use discretization use discretization
use plastic_none use plastic_none
use plastic_isotropic use plastic_isotropic
@ -37,7 +36,6 @@ module constitutive
private private
integer, public, protected :: & integer, public, protected :: &
constitutive_plasticity_maxSizePostResults, &
constitutive_plasticity_maxSizeDotState, & constitutive_plasticity_maxSizeDotState, &
constitutive_source_maxSizePostResults, & constitutive_source_maxSizePostResults, &
constitutive_source_maxSizeDotState constitutive_source_maxSizeDotState
@ -73,8 +71,7 @@ subroutine constitutive_init
integer, dimension(:,:), pointer :: thisSize integer, dimension(:,:), pointer :: thisSize
character(len=64), dimension(:,:), pointer :: thisOutput character(len=64), dimension(:,:), pointer :: thisOutput
character(len=32) :: outputName !< name of output, intermediate fix until HDF5 output is ready character(len=32) :: outputName !< name of output, intermediate fix until HDF5 output is ready
logical :: knownPlasticity, knownSource, nonlocalConstitutionPresent logical :: knownSource
nonlocalConstitutionPresent = .false.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialized plasticity ! initialized plasticity
@ -112,64 +109,11 @@ subroutine constitutive_init
call IO_write_jobFile(FILEUNIT,'outputConstitutive') call IO_write_jobFile(FILEUNIT,'outputConstitutive')
PhaseLoop: do ph = 1,material_Nphase PhaseLoop: do ph = 1,material_Nphase
activePhase: if (any(material_phaseAt == ph)) then activePhase: if (any(material_phaseAt == ph)) then
ins = phase_plasticityInstance(ph)
knownPlasticity = .true. ! assume valid
plasticityType: select case(phase_plasticity(ph))
case (PLASTICITY_NONE_ID) plasticityType
outputName = PLASTICITY_NONE_label
thisOutput => null()
thisSize => null()
case (PLASTICITY_ISOTROPIC_ID) plasticityType
outputName = PLASTICITY_ISOTROPIC_label
thisOutput => plastic_isotropic_output
thisSize => plastic_isotropic_sizePostResult
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
outputName = PLASTICITY_PHENOPOWERLAW_label
thisOutput => plastic_phenopowerlaw_output
thisSize => plastic_phenopowerlaw_sizePostResult
case (PLASTICITY_KINEHARDENING_ID) plasticityType
outputName = PLASTICITY_KINEHARDENING_label
thisOutput => plastic_kinehardening_output
thisSize => plastic_kinehardening_sizePostResult
case (PLASTICITY_DISLOTWIN_ID) plasticityType
outputName = PLASTICITY_DISLOTWIN_label
thisOutput => plastic_dislotwin_output
thisSize => plastic_dislotwin_sizePostResult
case (PLASTICITY_DISLOUCLA_ID) plasticityType
outputName = PLASTICITY_DISLOUCLA_label
thisOutput => plastic_disloucla_output
thisSize => plastic_disloucla_sizePostResult
case (PLASTICITY_NONLOCAL_ID) plasticityType
outputName = PLASTICITY_NONLOCAL_label
thisOutput => plastic_nonlocal_output
thisSize => plastic_nonlocal_sizePostResult
case default plasticityType
knownPlasticity = .false.
end select plasticityType
write(FILEUNIT,'(/,a,/)') '['//trim(config_name_phase(ph))//']' write(FILEUNIT,'(/,a,/)') '['//trim(config_name_phase(ph))//']'
if (knownPlasticity) then
write(FILEUNIT,'(a)') '(plasticity)'//char(9)//trim(outputName)
if (phase_plasticity(ph) /= PLASTICITY_NONE_ID) then
OutputPlasticityLoop: do o = 1,size(thisOutput(:,ins))
if(len_trim(thisOutput(o,ins)) > 0) &
write(FILEUNIT,'(a,i4)') trim(thisOutput(o,ins))//char(9),thisSize(o,ins)
enddo OutputPlasticityLoop
endif
endif
SourceLoop: do s = 1, phase_Nsources(ph) SourceLoop: do s = 1, phase_Nsources(ph)
knownSource = .true. ! assume valid knownSource = .true. ! assume valid
sourceType: select case (phase_source(s,ph)) sourceType: select case (phase_source(s,ph))
case (SOURCE_thermal_dissipation_ID) sourceType
ins = source_thermal_dissipation_instance(ph)
outputName = SOURCE_thermal_dissipation_label
thisOutput => source_thermal_dissipation_output
thisSize => source_thermal_dissipation_sizePostResult
case (SOURCE_thermal_externalheat_ID) sourceType
ins = source_thermal_externalheat_instance(ph)
outputName = SOURCE_thermal_externalheat_label
thisOutput => source_thermal_externalheat_output
thisSize => source_thermal_externalheat_sizePostResult
case (SOURCE_damage_isoBrittle_ID) sourceType case (SOURCE_damage_isoBrittle_ID) sourceType
ins = source_damage_isoBrittle_instance(ph) ins = source_damage_isoBrittle_instance(ph)
outputName = SOURCE_damage_isoBrittle_label outputName = SOURCE_damage_isoBrittle_label
@ -207,7 +151,6 @@ subroutine constitutive_init
endif mainProcess endif mainProcess
constitutive_plasticity_maxSizeDotState = 0 constitutive_plasticity_maxSizeDotState = 0
constitutive_plasticity_maxSizePostResults = 0
constitutive_source_maxSizeDotState = 0 constitutive_source_maxSizeDotState = 0
constitutive_source_maxSizePostResults = 0 constitutive_source_maxSizePostResults = 0
@ -224,8 +167,6 @@ subroutine constitutive_init
! determine max size of state and output ! determine max size of state and output
constitutive_plasticity_maxSizeDotState = max(constitutive_plasticity_maxSizeDotState, & constitutive_plasticity_maxSizeDotState = max(constitutive_plasticity_maxSizeDotState, &
plasticState(ph)%sizeDotState) plasticState(ph)%sizeDotState)
constitutive_plasticity_maxSizePostResults = max(constitutive_plasticity_maxSizePostResults, &
plasticState(ph)%sizePostResults)
constitutive_source_maxSizeDotState = max(constitutive_source_maxSizeDotState, & constitutive_source_maxSizeDotState = max(constitutive_source_maxSizeDotState, &
maxval(sourceState(ph)%p(:)%sizeDotState)) maxval(sourceState(ph)%p(:)%sizeDotState))
constitutive_source_maxSizePostResults = max(constitutive_source_maxSizePostResults, & constitutive_source_maxSizePostResults = max(constitutive_source_maxSizePostResults, &
@ -707,61 +648,21 @@ function constitutive_postResults(S, Fi, ipc, ip, el)
ipc, & !< component-ID of integration point ipc, & !< component-ID of integration point
ip, & !< integration point ip, & !< integration point
el !< element el !< element
real(pReal), dimension(plasticState(material_phaseAt(ipc,el))%sizePostResults + & real(pReal), dimension(sum(sourceState(material_phaseAt(ipc,el))%p(:)%sizePostResults)) :: &
sum(sourceState(material_phaseAt(ipc,el))%p(:)%sizePostResults)) :: &
constitutive_postResults constitutive_postResults
real(pReal), intent(in), dimension(3,3) :: & real(pReal), intent(in), dimension(3,3) :: &
Fi !< intermediate deformation gradient Fi !< intermediate deformation gradient
real(pReal), intent(in), dimension(3,3) :: & real(pReal), intent(in), dimension(3,3) :: &
S !< 2nd Piola Kirchhoff stress S !< 2nd Piola Kirchhoff stress
real(pReal), dimension(3,3) :: &
Mp !< Mandel stress
integer :: & integer :: &
startPos, endPos startPos, endPos
integer :: & integer :: &
ho, & !< homogenization
tme, & !< thermal member position
i, of, instance !< counter in source loop i, of, instance !< counter in source loop
constitutive_postResults = 0.0_pReal constitutive_postResults = 0.0_pReal
Mp = matmul(matmul(transpose(Fi),Fi),S)
ho = material_homogenizationAt(el) endPos = 0
tme = thermalMapping(ho)%p(ip,el)
startPos = 1
endPos = plasticState(material_phaseAt(ipc,el))%sizePostResults
of = material_phasememberAt(ipc,ip,el)
instance = phase_plasticityInstance(material_phaseAt(ipc,el))
plasticityType: select case (phase_plasticity(material_phaseAt(ipc,el)))
case (PLASTICITY_ISOTROPIC_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_isotropic_postResults(Mp,instance,of)
case (PLASTICITY_PHENOPOWERLAW_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_phenopowerlaw_postResults(Mp,instance,of)
case (PLASTICITY_KINEHARDENING_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_kinehardening_postResults(Mp,instance,of)
case (PLASTICITY_DISLOTWIN_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_dislotwin_postResults(Mp,temperature(ho)%p(tme),instance,of)
case (PLASTICITY_DISLOUCLA_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_disloucla_postResults(Mp,temperature(ho)%p(tme),instance,of)
case (PLASTICITY_NONLOCAL_ID) plasticityType
constitutive_postResults(startPos:endPos) = &
plastic_nonlocal_postResults (material_phaseAt(ipc,el),instance,of)
end select plasticityType
SourceLoop: do i = 1, phase_Nsources(material_phaseAt(ipc,el)) SourceLoop: do i = 1, phase_Nsources(material_phaseAt(ipc,el))
startPos = endPos + 1 startPos = endPos + 1

View File

@ -16,29 +16,16 @@ module crystallite
use numerics use numerics
use rotations use rotations
use math use math
use mesh
use FEsolving use FEsolving
use material use material
use constitutive use constitutive
use discretization use discretization
use lattice use lattice
use plastic_nonlocal use plastic_nonlocal
use geometry_plastic_nonlocal, only: &
nIPneighbors => geometry_plastic_nonlocal_nIPneighbors, &
IPneighborhood => geometry_plastic_nonlocal_IPneighborhood
use HDF5_utilities
use results use results
implicit none implicit none
private private
character(len=64), dimension(:,:), allocatable :: &
crystallite_output !< name of each post result output
integer, public, protected :: &
crystallite_maxSizePostResults !< description not available
integer, dimension(:), allocatable, public, protected :: &
crystallite_sizePostResults !< description not available
integer, dimension(:,:), allocatable :: &
crystallite_sizePostResult !< description not available
real(pReal), dimension(:,:,:), allocatable, public :: & real(pReal), dimension(:,:,:), allocatable, public :: &
crystallite_dt !< requested time increment of each grain crystallite_dt !< requested time increment of each grain
@ -89,27 +76,6 @@ module crystallite
crystallite_todo, & !< flag to indicate need for further computation crystallite_todo, & !< flag to indicate need for further computation
crystallite_localPlasticity !< indicates this grain to have purely local constitutive law crystallite_localPlasticity !< indicates this grain to have purely local constitutive law
enum, bind(c)
enumerator :: undefined_ID, &
phase_ID, &
texture_ID, &
orientation_ID, &
grainrotation_ID, &
defgrad_ID, &
fe_ID, &
fp_ID, &
fi_ID, &
lp_ID, &
li_ID, &
p_ID, &
s_ID, &
elasmatrix_ID, &
neighboringip_ID, &
neighboringelement_ID
end enum
integer(kind(undefined_ID)),dimension(:,:), allocatable :: &
crystallite_outputID !< ID of each post result output
type :: tOutput !< new requested output (per phase) type :: tOutput !< new requested output (per phase)
character(len=65536), allocatable, dimension(:) :: & character(len=65536), allocatable, dimension(:) :: &
label label
@ -159,15 +125,10 @@ subroutine crystallite_init
c, & !< counter in integration point component loop c, & !< counter in integration point component loop
i, & !< counter in integration point loop i, & !< counter in integration point loop
e, & !< counter in element loop e, & !< counter in element loop
o = 0, & !< counter in output loop
r, &
cMax, & !< maximum number of integration point components cMax, & !< maximum number of integration point components
iMax, & !< maximum number of integration points iMax, & !< maximum number of integration points
eMax, & !< maximum number of elements eMax, & !< maximum number of elements
myNcomponents, & !< number of components at current IP myNcomponents !< number of components at current IP
mySize
character(len=65536), dimension(:), allocatable :: str
write(6,'(/,a)') ' <<<+- crystallite init -+>>>' write(6,'(/,a)') ' <<<+- crystallite init -+>>>'
@ -214,13 +175,6 @@ subroutine crystallite_init
allocate(crystallite_requested(cMax,iMax,eMax), source=.false.) allocate(crystallite_requested(cMax,iMax,eMax), source=.false.)
allocate(crystallite_todo(cMax,iMax,eMax), source=.false.) allocate(crystallite_todo(cMax,iMax,eMax), source=.false.)
allocate(crystallite_converged(cMax,iMax,eMax), source=.true.) allocate(crystallite_converged(cMax,iMax,eMax), source=.true.)
allocate(crystallite_output(maxval(crystallite_Noutput), &
size(config_crystallite))) ; crystallite_output = ''
allocate(crystallite_outputID(maxval(crystallite_Noutput), &
size(config_crystallite)), source=undefined_ID)
allocate(crystallite_sizePostResults(size(config_crystallite)),source=0)
allocate(crystallite_sizePostResult(maxval(crystallite_Noutput), &
size(config_crystallite)), source=0)
num%subStepMinCryst = config_numerics%getFloat('substepmincryst', defaultVal=1.0e-3_pReal) num%subStepMinCryst = config_numerics%getFloat('substepmincryst', defaultVal=1.0e-3_pReal)
num%subStepSizeCryst = config_numerics%getFloat('substepsizecryst', defaultVal=0.25_pReal) num%subStepSizeCryst = config_numerics%getFloat('substepsizecryst', defaultVal=0.25_pReal)
@ -267,55 +221,6 @@ subroutine crystallite_init
integrateState => integrateStateRKCK45 integrateState => integrateStateRKCK45
end select end select
do c = 1, size(config_crystallite)
#if defined(__GFORTRAN__)
str = ['GfortranBug86277']
str = config_crystallite(c)%getStrings('(output)',defaultVal=str)
if (str(1) == 'GfortranBug86277') str = [character(len=65536)::]
#else
str = config_crystallite(c)%getStrings('(output)',defaultVal=[character(len=65536)::])
#endif
do o = 1, size(str)
crystallite_output(o,c) = str(o)
outputName: select case(str(o))
case ('phase') outputName
crystallite_outputID(o,c) = phase_ID
case ('texture') outputName
crystallite_outputID(o,c) = texture_ID
case ('orientation') outputName
crystallite_outputID(o,c) = orientation_ID
case ('grainrotation') outputName
crystallite_outputID(o,c) = grainrotation_ID
case ('defgrad','f') outputName ! ToDo: no alias (f only)
crystallite_outputID(o,c) = defgrad_ID
case ('fe') outputName
crystallite_outputID(o,c) = fe_ID
case ('fp') outputName
crystallite_outputID(o,c) = fp_ID
case ('fi') outputName
crystallite_outputID(o,c) = fi_ID
case ('lp') outputName
crystallite_outputID(o,c) = lp_ID
case ('li') outputName
crystallite_outputID(o,c) = li_ID
case ('p','firstpiola','1stpiola') outputName ! ToDo: no alias (p only)
crystallite_outputID(o,c) = p_ID
case ('s','tstar','secondpiola','2ndpiola') outputName ! ToDo: no alias (s only)
crystallite_outputID(o,c) = s_ID
case ('elasmatrix') outputName
crystallite_outputID(o,c) = elasmatrix_ID
case ('neighboringip') outputName ! ToDo: this is not a result, it is static. Should be written out by mesh
crystallite_outputID(o,c) = neighboringip_ID
case ('neighboringelement') outputName ! ToDo: this is not a result, it is static. Should be written out by mesh
crystallite_outputID(o,c) = neighboringelement_ID
case default outputName
call IO_error(105,ext_msg=trim(str(o))//' (Crystallite)')
end select outputName
enddo
enddo
allocate(output_constituent(size(config_phase))) allocate(output_constituent(size(config_phase)))
do c = 1, size(config_phase) do c = 1, size(config_phase)
#if defined(__GFORTRAN__) #if defined(__GFORTRAN__)
@ -328,51 +233,14 @@ subroutine crystallite_init
#endif #endif
enddo enddo
do r = 1,size(config_crystallite)
do o = 1,crystallite_Noutput(r)
select case(crystallite_outputID(o,r))
case(phase_ID,texture_ID)
mySize = 1
case(orientation_ID,grainrotation_ID)
mySize = 4
case(defgrad_ID,fe_ID,fp_ID,fi_ID,lp_ID,li_ID,p_ID,s_ID)
mySize = 9
case(elasmatrix_ID)
mySize = 36
case(neighboringip_ID,neighboringelement_ID)
mySize = nIPneighbors
case default
mySize = 0
end select
crystallite_sizePostResult(o,r) = mySize
crystallite_sizePostResults(r) = crystallite_sizePostResults(r) + mySize
enddo
enddo
crystallite_maxSizePostResults = &
maxval(crystallite_sizePostResults(microstructure_crystallite),microstructure_active)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! write description file for crystallite output ! write description file for crystallite output
if (worldrank == 0) then if (worldrank == 0) then
call IO_write_jobFile(FILEUNIT,'outputCrystallite') call IO_write_jobFile(FILEUNIT,'outputCrystallite')
write(FILEUNIT,'(/,a,/)') '[not supported anymore]'
do r = 1,size(config_crystallite)
if (any(microstructure_crystallite(discretization_microstructureAt) == r)) then
write(FILEUNIT,'(/,a,/)') '['//trim(config_name_crystallite(r))//']'
do o = 1,crystallite_Noutput(r)
write(FILEUNIT,'(a,i4)') trim(crystallite_output(o,r))//char(9),crystallite_sizePostResult(o,r)
enddo
endif
enddo
close(FILEUNIT) close(FILEUNIT)
endif endif
call config_deallocate('material.config/phase') call config_deallocate('material.config/phase')
call config_deallocate('material.config/crystallite')
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize ! initialize
@ -874,96 +742,18 @@ function crystallite_postResults(ipc, ip, el)
ip, & !< integration point index ip, & !< integration point index
ipc !< grain index ipc !< grain index
real(pReal), dimension(1+crystallite_sizePostResults(microstructure_crystallite(discretization_microstructureAt(el))) + & real(pReal), dimension(1+ &
1+plasticState(material_phaseAt(ipc,el))%sizePostResults + & 1+sum(sourceState(material_phaseAt(ipc,el))%p(:)%sizePostResults)) :: &
sum(sourceState(material_phaseAt(ipc,el))%p(:)%sizePostResults)) :: &
crystallite_postResults crystallite_postResults
integer :: & integer :: &
o, & c
c, &
crystID, &
mySize, &
n
type(rotation) :: rot
crystID = microstructure_crystallite(discretization_microstructureAt(el))
crystallite_postResults = 0.0_pReal crystallite_postResults = 0.0_pReal
crystallite_postResults(1) = real(crystallite_sizePostResults(crystID),pReal) ! header-like information (length) crystallite_postResults(1) = 0.0_pReal ! header-like information (length)
c = 1 c = 1
do o = 1,crystallite_Noutput(crystID) crystallite_postResults(c+1) = real(sum(sourceState(material_phaseAt(ipc,el))%p(:)%sizePostResults),pReal) ! size of constitutive results
mySize = 0
select case(crystallite_outputID(o,crystID))
case (phase_ID)
mySize = 1
crystallite_postResults(c+1) = real(material_phaseAt(ipc,el),pReal) ! phaseID of grain
case (texture_ID)
mySize = 1
crystallite_postResults(c+1) = real(material_texture(ipc,ip,el),pReal) ! textureID of grain
case (orientation_ID)
mySize = 4
crystallite_postResults(c+1:c+mySize) = crystallite_orientation(ipc,ip,el)%asQuaternion()
case (grainrotation_ID)
rot = material_orientation0(ipc,ip,el)%misorientation(crystallite_orientation(ipc,ip,el))
mySize = 4
crystallite_postResults(c+1:c+mySize) = rot%asAxisAngle()
crystallite_postResults(c+4) = inDeg * crystallite_postResults(c+4) ! angle in degree
! remark: tensor output is of the form 11,12,13, 21,22,23, 31,32,33
! thus row index i is slow, while column index j is fast. reminder: "row is slow"
case (defgrad_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_partionedF(1:3,1:3,ipc,ip,el)),[mySize])
case (fe_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Fe(1:3,1:3,ipc,ip,el)),[mySize])
case (fp_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Fp(1:3,1:3,ipc,ip,el)),[mySize])
case (fi_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Fi(1:3,1:3,ipc,ip,el)),[mySize])
case (lp_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Lp(1:3,1:3,ipc,ip,el)),[mySize])
case (li_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_Li(1:3,1:3,ipc,ip,el)),[mySize])
case (p_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(transpose(crystallite_P(1:3,1:3,ipc,ip,el)),[mySize])
case (s_ID)
mySize = 9
crystallite_postResults(c+1:c+mySize) = &
reshape(crystallite_S(1:3,1:3,ipc,ip,el),[mySize])
case (elasmatrix_ID)
mySize = 36
crystallite_postResults(c+1:c+mySize) = reshape(constitutive_homogenizedC(ipc,ip,el),[mySize])
case(neighboringelement_ID)
mySize = nIPneighbors
crystallite_postResults(c+1:c+mySize) = 0.0_pReal
forall (n = 1:mySize) &
crystallite_postResults(c+n) = real(IPneighborhood(1,n,ip,el),pReal)
case(neighboringip_ID)
mySize = nIPneighbors
crystallite_postResults(c+1:c+mySize) = 0.0_pReal
forall (n = 1:mySize) &
crystallite_postResults(c+n) = real(IPneighborhood(2,n,ip,el),pReal)
end select
c = c + mySize
enddo
crystallite_postResults(c+1) = real(plasticState(material_phaseAt(ipc,el))%sizePostResults,pReal) ! size of constitutive results
c = c + 1 c = c + 1
if (size(crystallite_postResults)-c > 0) & if (size(crystallite_postResults)-c > 0) &
crystallite_postResults(c+1:size(crystallite_postResults)) = & crystallite_postResults(c+1:size(crystallite_postResults)) = &
@ -986,7 +776,7 @@ subroutine crystallite_results
do p=1,size(config_name_phase) do p=1,size(config_name_phase)
group = trim('current/constituent')//'/'//trim(config_name_phase(p))//'/generic' group = trim('current/constituent')//'/'//trim(config_name_phase(p))//'/generic'
call HDF5_closeGroup(results_addGroup(group)) call results_closeGroup(results_addGroup(group))
do o = 1, size(output_constituent(p)%label) do o = 1, size(output_constituent(p)%label)
select case (output_constituent(p)%label(o)) select case (output_constituent(p)%label(o))
@ -1056,12 +846,12 @@ subroutine crystallite_results
real(pReal), allocatable, dimension(:,:,:) :: select_tensors real(pReal), allocatable, dimension(:,:,:) :: select_tensors
integer :: e,i,c,j integer :: e,i,c,j
allocate(select_tensors(3,3,count(material_phaseAt==instance)*homogenization_maxNgrains)) allocate(select_tensors(3,3,count(material_phaseAt==instance)*homogenization_maxNgrains*discretization_nIP))
j=0 j=0
do e = 1, size(material_phaseAt,2) do e = 1, size(material_phaseAt,2)
do i = 1, homogenization_maxNgrains !ToDo: this needs to be changed for varying Ngrains do i = 1, discretization_nIP
do c = 1, size(material_phaseAt,1) do c = 1, size(material_phaseAt,1) !ToDo: this needs to be changed for varying Ngrains
if (material_phaseAt(c,e) == instance) then if (material_phaseAt(c,e) == instance) then
j = j + 1 j = j + 1
select_tensors(1:3,1:3,j) = dataset(1:3,1:3,c,i,e) select_tensors(1:3,1:3,j) = dataset(1:3,1:3,c,i,e)
@ -1083,12 +873,12 @@ subroutine crystallite_results
type(rotation), allocatable, dimension(:) :: select_rotations type(rotation), allocatable, dimension(:) :: select_rotations
integer :: e,i,c,j integer :: e,i,c,j
allocate(select_rotations(count(material_phaseAt==instance)*homogenization_maxNgrains)) allocate(select_rotations(count(material_phaseAt==instance)*homogenization_maxNgrains*discretization_nIP))
j=0 j=0
do e = 1, size(material_phaseAt,2) do e = 1, size(material_phaseAt,2)
do i = 1, homogenization_maxNgrains !ToDo: this needs to be changed for varying Ngrains do i = 1, discretization_nIP
do c = 1, size(material_phaseAt,1) do c = 1, size(material_phaseAt,1) !ToDo: this needs to be changed for varying Ngrains
if (material_phaseAt(c,e) == instance) then if (material_phaseAt(c,e) == instance) then
j = j + 1 j = j + 1
select_rotations(j) = dataset(c,i,e) select_rotations(j) = dataset(c,i,e)

View File

@ -30,7 +30,6 @@ subroutine damage_none_init
myhomog: if (damage_type(homog) == DAMAGE_NONE_ID) then myhomog: if (damage_type(homog) == DAMAGE_NONE_ID) then
NofMyHomog = count(material_homogenizationAt == homog) NofMyHomog = count(material_homogenizationAt == homog)
damageState(homog)%sizeState = 0 damageState(homog)%sizeState = 0
damageState(homog)%sizePostResults = 0
allocate(damageState(homog)%state0 (0,NofMyHomog)) allocate(damageState(homog)%state0 (0,NofMyHomog))
allocate(damageState(homog)%subState0(0,NofMyHomog)) allocate(damageState(homog)%subState0(0,NofMyHomog))
allocate(damageState(homog)%state (0,NofMyHomog)) allocate(damageState(homog)%state (0,NofMyHomog))

View File

@ -10,7 +10,6 @@ module damage_nonlocal
use config use config
use crystallite use crystallite
use lattice use lattice
use mesh
use source_damage_isoBrittle use source_damage_isoBrittle
use source_damage_isoDuctile use source_damage_isoDuctile
use source_damage_anisoBrittle use source_damage_anisoBrittle

View File

@ -6,9 +6,6 @@ module discretization
use prec use prec
use results use results
#if defined(PETSc) || defined(DAMASK_HDF5)
use HDF5_utilities
#endif
implicit none implicit none
private private
@ -27,17 +24,23 @@ module discretization
discretization_IPcoords, & discretization_IPcoords, &
discretization_NodeCoords discretization_NodeCoords
integer :: &
discretization_sharedNodesBeginn
public :: & public :: &
discretization_init, & discretization_init, &
discretization_results, & discretization_results, &
discretization_setIPcoords discretization_setIPcoords, &
discretization_setNodeCoords
contains contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief stores the relevant information in globally accesible variables !> @brief stores the relevant information in globally accesible variables
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine discretization_init(homogenizationAt,microstructureAt,IPcoords0,NodeCoords0) subroutine discretization_init(homogenizationAt,microstructureAt,&
IPcoords0,NodeCoords0,&
sharedNodesBeginn)
integer, dimension(:), intent(in) :: & integer, dimension(:), intent(in) :: &
homogenizationAt, & homogenizationAt, &
@ -45,6 +48,8 @@ subroutine discretization_init(homogenizationAt,microstructureAt,IPcoords0,NodeC
real(pReal), dimension(:,:), intent(in) :: & real(pReal), dimension(:,:), intent(in) :: &
IPcoords0, & IPcoords0, &
NodeCoords0 NodeCoords0
integer, optional, intent(in) :: &
sharedNodesBeginn
write(6,'(/,a)') ' <<<+- discretization init -+>>>' write(6,'(/,a)') ' <<<+- discretization init -+>>>'
@ -60,6 +65,12 @@ subroutine discretization_init(homogenizationAt,microstructureAt,IPcoords0,NodeC
discretization_NodeCoords0 = NodeCoords0 discretization_NodeCoords0 = NodeCoords0
discretization_NodeCoords = NodeCoords0 discretization_NodeCoords = NodeCoords0
if(present(sharedNodesBeginn)) then
discretization_sharedNodesBeginn = sharedNodesBeginn
else
discretization_sharedNodesBeginn = size(discretization_NodeCoords0,2)
endif
end subroutine discretization_init end subroutine discretization_init
@ -70,12 +81,14 @@ subroutine discretization_results
#if defined(PETSc) || defined(DAMASK_HDF5) #if defined(PETSc) || defined(DAMASK_HDF5)
real(pReal), dimension(:,:), allocatable :: u real(pReal), dimension(:,:), allocatable :: u
call HDF5_closeGroup(results_addGroup(trim('current/geometry'))) call results_closeGroup(results_addGroup(trim('current/geometry')))
u = discretization_NodeCoords - discretization_NodeCoords0 u = discretization_NodeCoords (1:3,:discretization_sharedNodesBeginn) &
- discretization_NodeCoords0(1:3,:discretization_sharedNodesBeginn)
call results_writeDataset('current/geometry',u,'u_n','nodal displacements','m') call results_writeDataset('current/geometry',u,'u_n','nodal displacements','m')
u = discretization_IPcoords - discretization_IPcoords0 u = discretization_IPcoords &
- discretization_IPcoords0
call results_writeDataset('current/geometry',u,'u_c','cell center displacements','m') call results_writeDataset('current/geometry',u,'u_c','cell center displacements','m')
#endif #endif
end subroutine discretization_results end subroutine discretization_results
@ -92,4 +105,17 @@ subroutine discretization_setIPcoords(IPcoords)
end subroutine discretization_setIPcoords end subroutine discretization_setIPcoords
!--------------------------------------------------------------------------------------------------
!> @brief stores current IP coordinates
!--------------------------------------------------------------------------------------------------
subroutine discretization_setNodeCoords(NodeCoords)
real(pReal), dimension(:,:), intent(in) :: NodeCoords
discretization_NodeCoords = NodeCoords
end subroutine discretization_setNodeCoords
end module discretization end module discretization

View File

@ -122,7 +122,7 @@ subroutine geometry_plastic_nonlocal_results
integer, dimension(:), allocatable :: shp integer, dimension(:), allocatable :: shp
#if defined(DAMASK_HDF5) #if defined(PETSc) || defined(DAMASK_HDF5)
call results_openJobFile call results_openJobFile
writeVolume: block writeVolume: block

View File

@ -15,7 +15,7 @@ program DAMASK_spectral
use config use config
use debug use debug
use math use math
use mesh use mesh_grid
use CPFEM2 use CPFEM2
use FEsolving use FEsolving
use numerics use numerics
@ -27,9 +27,7 @@ program DAMASK_spectral
use grid_mech_FEM use grid_mech_FEM
use grid_damage_spectral use grid_damage_spectral
use grid_thermal_spectral use grid_thermal_spectral
use HDF5_utilities
use results use results
use rotations
implicit none implicit none
@ -60,7 +58,8 @@ program DAMASK_spectral
remainingLoadCaseTime = 0.0_pReal !< remaining time of current load case remainingLoadCaseTime = 0.0_pReal !< remaining time of current load case
logical :: & logical :: &
guess, & !< guess along former trajectory guess, & !< guess along former trajectory
stagIterate stagIterate, &
cutBack = .false.
integer :: & integer :: &
i, j, k, l, field, & i, j, k, l, field, &
errorID = 0, & errorID = 0, &
@ -70,17 +69,14 @@ program DAMASK_spectral
currentLoadcase = 0, & !< current load case currentLoadcase = 0, & !< current load case
inc, & !< current increment in current load case inc, & !< current increment in current load case
totalIncsCounter = 0, & !< total # of increments totalIncsCounter = 0, & !< total # of increments
convergedCounter = 0, & !< # of converged increments
notConvergedCounter = 0, & !< # of non-converged increments
fileUnit = 0, & !< file unit for reading load case and writing results fileUnit = 0, & !< file unit for reading load case and writing results
myStat, & myStat, &
statUnit = 0, & !< file unit for statistics output statUnit = 0, & !< file unit for statistics output
lastRestartWritten = 0, & !< total increment # at which last restart information was written stagIter, &
stagIter nActiveFields = 0
character(len=6) :: loadcase_string character(len=6) :: loadcase_string
character(len=1024) :: & character(len=1024) :: &
incInfo incInfo
type(rotation) :: R
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
type(tLoadCase) :: newLoadCase type(tLoadCase) :: newLoadCase
type(tSolutionState), allocatable, dimension(:) :: solres type(tSolutionState), allocatable, dimension(:) :: solres
@ -96,6 +92,10 @@ program DAMASK_spectral
mech_forward mech_forward
procedure(grid_mech_spectral_basic_solution), pointer :: & procedure(grid_mech_spectral_basic_solution), pointer :: &
mech_solution mech_solution
procedure(grid_mech_spectral_basic_updateCoords), pointer :: &
mech_updateCoords
procedure(grid_mech_spectral_basic_restartWrite), pointer :: &
mech_restartWrite
external :: & external :: &
quit quit
@ -123,6 +123,8 @@ program DAMASK_spectral
mech_init => grid_mech_spectral_basic_init mech_init => grid_mech_spectral_basic_init
mech_forward => grid_mech_spectral_basic_forward mech_forward => grid_mech_spectral_basic_forward
mech_solution => grid_mech_spectral_basic_solution mech_solution => grid_mech_spectral_basic_solution
mech_updateCoords => grid_mech_spectral_basic_updateCoords
mech_restartWrite => grid_mech_spectral_basic_restartWrite
case ('polarisation') case ('polarisation')
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0) & if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0) &
@ -130,6 +132,8 @@ program DAMASK_spectral
mech_init => grid_mech_spectral_polarisation_init mech_init => grid_mech_spectral_polarisation_init
mech_forward => grid_mech_spectral_polarisation_forward mech_forward => grid_mech_spectral_polarisation_forward
mech_solution => grid_mech_spectral_polarisation_solution mech_solution => grid_mech_spectral_polarisation_solution
mech_updateCoords => grid_mech_spectral_polarisation_updateCoords
mech_restartWrite => grid_mech_spectral_polarisation_restartWrite
case ('fem') case ('fem')
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0) & if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0) &
@ -137,6 +141,8 @@ program DAMASK_spectral
mech_init => grid_mech_FEM_init mech_init => grid_mech_FEM_init
mech_forward => grid_mech_FEM_forward mech_forward => grid_mech_FEM_forward
mech_solution => grid_mech_FEM_solution mech_solution => grid_mech_FEM_solution
mech_updateCoords => grid_mech_FEM_updateCoords
mech_restartWrite => grid_mech_FEM_restartWrite
case default case default
call IO_error(error_ID = 891, ext_msg = config_numerics%getString('spectral_solver')) call IO_error(error_ID = 891, ext_msg = config_numerics%getString('spectral_solver'))
@ -158,11 +164,11 @@ program DAMASK_spectral
chunkPos = IO_stringPos(line) chunkPos = IO_stringPos(line)
do i = 1, chunkPos(1) ! reading compulsory parameters for loadcase do i = 1, chunkPos(1) ! reading compulsory parameters for loadcase
select case (IO_lc(IO_stringValue(line,chunkPos,i))) select case (IO_lc(IO_stringValue(line,chunkPos,i)))
case('l','velocitygrad','velgrad','velocitygradient','fdot','dotf','f') case('l','fdot','dotf','f')
N_def = N_def + 1 N_def = N_def + 1
case('t','time','delta') case('t','time','delta')
N_t = N_t + 1 N_t = N_t + 1
case('n','incs','increments','steps','logincs','logincrements','logsteps') case('n','incs','increments','logincs','logincrements')
N_n = N_n + 1 N_n = N_n + 1
end select end select
enddo enddo
@ -181,9 +187,10 @@ program DAMASK_spectral
newLoadCase%ID(field) = FIELD_DAMAGE_ID newLoadCase%ID(field) = FIELD_DAMAGE_ID
endif damageActive endif damageActive
call newLoadCase%rot%fromEulers(real([0.0,0.0,0.0],pReal))
readIn: do i = 1, chunkPos(1) readIn: do i = 1, chunkPos(1)
select case (IO_lc(IO_stringValue(line,chunkPos,i))) select case (IO_lc(IO_stringValue(line,chunkPos,i)))
case('fdot','dotf','l','velocitygrad','velgrad','velocitygradient','f') ! assign values for the deformation BC matrix case('fdot','dotf','l','f') ! assign values for the deformation BC matrix
temp_valueVector = 0.0_pReal temp_valueVector = 0.0_pReal
if (IO_lc(IO_stringValue(line,chunkPos,i)) == 'fdot'.or. & ! in case of Fdot, set type to fdot if (IO_lc(IO_stringValue(line,chunkPos,i)) == 'fdot'.or. & ! in case of Fdot, set type to fdot
IO_lc(IO_stringValue(line,chunkPos,i)) == 'dotf') then IO_lc(IO_stringValue(line,chunkPos,i)) == 'dotf') then
@ -200,7 +207,7 @@ program DAMASK_spectral
newLoadCase%deformation%maskLogical = transpose(reshape(temp_maskVector,[ 3,3])) ! logical mask in 3x3 notation newLoadCase%deformation%maskLogical = transpose(reshape(temp_maskVector,[ 3,3])) ! logical mask in 3x3 notation
newLoadCase%deformation%maskFloat = merge(ones,zeros,newLoadCase%deformation%maskLogical)! float (1.0/0.0) mask in 3x3 notation newLoadCase%deformation%maskFloat = merge(ones,zeros,newLoadCase%deformation%maskLogical)! float (1.0/0.0) mask in 3x3 notation
newLoadCase%deformation%values = math_9to33(temp_valueVector) ! values in 3x3 notation newLoadCase%deformation%values = math_9to33(temp_valueVector) ! values in 3x3 notation
case('p','pk1','piolakirchhoff','stress', 's') case('p','stress', 's')
temp_valueVector = 0.0_pReal temp_valueVector = 0.0_pReal
do j = 1, 9 do j = 1, 9
temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk temp_maskVector(j) = IO_stringValue(line,chunkPos,i+j) /= '*' ! true if not an asterisk
@ -211,9 +218,9 @@ program DAMASK_spectral
newLoadCase%stress%values = math_9to33(temp_valueVector) newLoadCase%stress%values = math_9to33(temp_valueVector)
case('t','time','delta') ! increment time case('t','time','delta') ! increment time
newLoadCase%time = IO_floatValue(line,chunkPos,i+1) newLoadCase%time = IO_floatValue(line,chunkPos,i+1)
case('n','incs','increments','steps') ! number of increments case('n','incs','increments') ! number of increments
newLoadCase%incs = IO_intValue(line,chunkPos,i+1) newLoadCase%incs = IO_intValue(line,chunkPos,i+1)
case('logincs','logincrements','logsteps') ! number of increments (switch to log time scaling) case('logincs','logincrements') ! number of increments (switch to log time scaling)
newLoadCase%incs = IO_intValue(line,chunkPos,i+1) newLoadCase%incs = IO_intValue(line,chunkPos,i+1)
newLoadCase%logscale = 1 newLoadCase%logscale = 1
case('freq','frequency','outputfreq') ! frequency of result writings case('freq','frequency','outputfreq') ! frequency of result writings
@ -236,14 +243,13 @@ program DAMASK_spectral
do j = 1, 3 do j = 1, 3
temp_valueVector(j) = IO_floatValue(line,chunkPos,i+k+j) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+k+j)
enddo enddo
call R%fromEulers(temp_valueVector(1:3),degrees=(l==1)) call newLoadCase%rot%fromEulers(temp_valueVector(1:3),degrees=(l==1))
newLoadCase%rotation = R%asMatrix()
case('rotation','rot') ! assign values for the rotation matrix case('rotation','rot') ! assign values for the rotation matrix
temp_valueVector = 0.0_pReal temp_valueVector = 0.0_pReal
do j = 1, 9 do j = 1, 9
temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j) temp_valueVector(j) = IO_floatValue(line,chunkPos,i+j)
enddo enddo
newLoadCase%rotation = math_9to33(temp_valueVector) call newLoadCase%rot%fromMatrix(math_9to33(temp_valueVector))
end select end select
enddo readIn enddo readIn
@ -287,14 +293,12 @@ program DAMASK_spectral
endif endif
enddo; write(6,'(/)',advance='no') enddo; write(6,'(/)',advance='no')
enddo enddo
if (any(abs(matmul(newLoadCase%rotation, & if (any(abs(matmul(newLoadCase%rot%asMatrix(), &
transpose(newLoadCase%rotation))-math_I3) > & transpose(newLoadCase%rot%asMatrix()))-math_I3) > &
reshape(spread(tol_math_check,1,9),[ 3,3]))& reshape(spread(tol_math_check,1,9),[ 3,3]))) errorID = 846 ! given rotation matrix contains strain
.or. abs(math_det33(newLoadCase%rotation)) > & if (any(dNeq(newLoadCase%rot%asMatrix(), math_I3))) &
1.0_pReal + tol_math_check) errorID = 846 ! given rotation matrix contains strain
if (any(dNeq(newLoadCase%rotation, math_I3))) &
write(6,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',& write(6,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',&
transpose(newLoadCase%rotation) transpose(newLoadCase%rot%asMatrix())
if (newLoadCase%time < 0.0_pReal) errorID = 834 ! negative time increment if (newLoadCase%time < 0.0_pReal) errorID = 834 ! negative time increment
write(6,'(2x,a,f12.6)') 'time: ', newLoadCase%time write(6,'(2x,a,f12.6)') 'time: ', newLoadCase%time
if (newLoadCase%incs < 1) errorID = 835 ! non-positive incs count if (newLoadCase%incs < 1) errorID = 835 ! non-positive incs count
@ -310,15 +314,9 @@ program DAMASK_spectral
enddo enddo
close(fileUnit) close(fileUnit)
call results_openJobFile
call HDF5_closeGroup(results_addGroup('geometry'))
call results_addAttribute('grid',grid,'geometry')
call results_addAttribute('size',geomSize,'geometry')
call results_closeJobFile
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! doing initialization depending on active solvers ! doing initialization depending on active solvers
call Utilities_init() call Utilities_init
do field = 1, nActiveFields do field = 1, nActiveFields
select case (loadCases(1)%ID(field)) select case (loadCases(1)%ID(field))
case(FIELD_MECH_ID) case(FIELD_MECH_ID)
@ -464,15 +462,16 @@ program DAMASK_spectral
select case(loadCases(currentLoadCase)%ID(field)) select case(loadCases(currentLoadCase)%ID(field))
case(FIELD_MECH_ID) case(FIELD_MECH_ID)
call mech_forward (& call mech_forward (&
guess,timeinc,timeIncOld,remainingLoadCaseTime, & cutBack,guess,timeinc,timeIncOld,remainingLoadCaseTime, &
deformation_BC = loadCases(currentLoadCase)%deformation, & deformation_BC = loadCases(currentLoadCase)%deformation, &
stress_BC = loadCases(currentLoadCase)%stress, & stress_BC = loadCases(currentLoadCase)%stress, &
rotation_BC = loadCases(currentLoadCase)%rotation) rotation_BC = loadCases(currentLoadCase)%rot)
case(FIELD_THERMAL_ID); call grid_thermal_spectral_forward case(FIELD_THERMAL_ID); call grid_thermal_spectral_forward(cutBack)
case(FIELD_DAMAGE_ID); call grid_damage_spectral_forward case(FIELD_DAMAGE_ID); call grid_damage_spectral_forward(cutBack)
end select end select
enddo enddo
if(.not. cutBack) call CPFEM_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve fields ! solve fields
@ -485,7 +484,7 @@ program DAMASK_spectral
solres(field) = mech_solution (& solres(field) = mech_solution (&
incInfo,timeinc,timeIncOld, & incInfo,timeinc,timeIncOld, &
stress_BC = loadCases(currentLoadCase)%stress, & stress_BC = loadCases(currentLoadCase)%stress, &
rotation_BC = loadCases(currentLoadCase)%rotation) rotation_BC = loadCases(currentLoadCase)%rot)
case(FIELD_THERMAL_ID) case(FIELD_THERMAL_ID)
solres(field) = grid_thermal_spectral_solution(timeinc,timeIncOld) solres(field) = grid_thermal_spectral_solution(timeinc,timeIncOld)
@ -507,8 +506,9 @@ program DAMASK_spectral
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check solution for either advance or retry ! check solution for either advance or retry
if ( (continueCalculation .or. all(solres(:)%converged .and. solres(:)%stagConverged)) & ! don't care or did converge if ( (all(solres(:)%converged .and. solres(:)%stagConverged)) & ! converged
.and. .not. solres(1)%termIll) then ! and acceptable solution found .and. .not. solres(1)%termIll) then ! and acceptable solution found
call mech_updateCoords
timeIncOld = timeinc timeIncOld = timeinc
cutBack = .false. cutBack = .false.
guess = .true. ! start guessing after first converged (sub)inc guess = .true. ! start guessing after first converged (sub)inc
@ -528,7 +528,7 @@ program DAMASK_spectral
call IO_warning(850) call IO_warning(850)
call MPI_File_close(fileUnit,ierr) call MPI_File_close(fileUnit,ierr)
close(statUnit) close(statUnit)
call quit(-1*(lastRestartWritten+1)) ! quit and provide information about last restart inc written call quit(0) ! quit
endif endif
enddo subStepLooping enddo subStepLooping
@ -536,11 +536,9 @@ program DAMASK_spectral
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc
if (all(solres(:)%converged)) then if (all(solres(:)%converged)) then
convergedCounter = convergedCounter + 1
write(6,'(/,a,'//IO_intOut(totalIncsCounter)//',a)') & ! report converged inc write(6,'(/,a,'//IO_intOut(totalIncsCounter)//',a)') & ! report converged inc
' increment ', totalIncsCounter, ' converged' ' increment ', totalIncsCounter, ' converged'
else else
notConvergedCounter = notConvergedCounter + 1
write(6,'(/,a,'//IO_intOut(totalIncsCounter)//',a)') & ! report non-converged inc write(6,'(/,a,'//IO_intOut(totalIncsCounter)//',a)') & ! report non-converged inc
' increment ', totalIncsCounter, ' NOT converged' ' increment ', totalIncsCounter, ' NOT converged'
endif; flush(6) endif; flush(6)
@ -563,11 +561,10 @@ program DAMASK_spectral
fileOffset = fileOffset + sum(outputSize) ! forward to current file position fileOffset = fileOffset + sum(outputSize) ! forward to current file position
call CPFEM_results(totalIncsCounter,time) call CPFEM_results(totalIncsCounter,time)
endif endif
if (mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0) then ! at frequency of writing restart information if (mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0) then
restartWrite = .true. ! set restart parameter for FEsolving call mech_restartWrite
lastRestartWritten = inc ! QUESTION: first call to CPFEM_general will write? call CPFEM_restartWrite
endif endif
endif skipping endif skipping
enddo incLooping enddo incLooping
@ -578,16 +575,9 @@ program DAMASK_spectral
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! report summary of whole calculation ! report summary of whole calculation
write(6,'(/,a)') ' ###########################################################################' write(6,'(/,a)') ' ###########################################################################'
write(6,'(1x,'//IO_intOut(convergedCounter)//',a,'//IO_intOut(notConvergedCounter + convergedCounter)//',a,f5.1,a)') &
convergedCounter, ' out of ', &
notConvergedCounter + convergedCounter, ' (', &
real(convergedCounter, pReal)/&
real(notConvergedCounter + convergedCounter,pReal)*100.0_pReal, ' %) increments converged!'
flush(6)
call MPI_file_close(fileUnit,ierr) call MPI_file_close(fileUnit,ierr)
close(statUnit) close(statUnit)
if (notConvergedCounter > 0) call quit(2) ! error if some are not converged
call quit(0) ! no complains ;) call quit(0) ! no complains ;)
end program DAMASK_spectral end program DAMASK_spectral

View File

@ -12,7 +12,7 @@ module grid_damage_spectral
use prec use prec
use spectral_utilities use spectral_utilities
use mesh use mesh_grid
use damage_nonlocal use damage_nonlocal
use numerics use numerics
use damage_nonlocal use damage_nonlocal
@ -203,8 +203,9 @@ end function grid_damage_spectral_solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief spectral damage forwarding routine !> @brief spectral damage forwarding routine
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_damage_spectral_forward subroutine grid_damage_spectral_forward(cutBack)
logical, intent(in) :: cutBack
integer :: i, j, k, cell integer :: i, j, k, cell
DM :: dm_local DM :: dm_local
PetscScalar, dimension(:,:,:), pointer :: x_scal PetscScalar, dimension(:,:,:), pointer :: x_scal

Some files were not shown because too many files have changed in this diff Show More