Merge branch 'polishing' into 'development'
various improvements See merge request damask/DAMASK!538
This commit is contained in:
commit
51d220dce0
|
@ -5,7 +5,6 @@ stages:
|
|||
- compile
|
||||
- fortran
|
||||
- performance
|
||||
- deploy
|
||||
- finalize
|
||||
|
||||
|
||||
|
@ -212,14 +211,6 @@ grid_runtime:
|
|||
- if [ ${CI_COMMIT_BRANCH} == development ]; then git commit -am ${CI_PIPELINE_ID}_${CI_COMMIT_SHA}; git push; fi
|
||||
|
||||
|
||||
###################################################################################################
|
||||
source_distribution:
|
||||
stage: deploy
|
||||
script:
|
||||
- cd $(mktemp -d)
|
||||
- ${CI_PROJECT_DIR}/PRIVATE/releasing/tar.xz/create.sh ${CI_PROJECT_DIR} ${CI_COMMIT_SHA}
|
||||
|
||||
|
||||
###################################################################################################
|
||||
update_revision:
|
||||
stage: finalize
|
||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
|||
Subproject commit 68111b8fc3fb45f77d7471ae0f57961b4c77641d
|
||||
Subproject commit 80152236ef4259e97e10838fdd3bce1dc8b50105
|
|
@ -1,7 +1,7 @@
|
|||
###################################################################################################
|
||||
# GNU Compiler
|
||||
###################################################################################################
|
||||
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 8.0)
|
||||
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 9.0)
|
||||
message (FATAL_ERROR "GCC Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
|
||||
endif ()
|
||||
|
||||
|
@ -102,12 +102,11 @@ set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=pure")
|
|||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wsuggest-attribute=noreturn")
|
||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wconversion-extra")
|
||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wimplicit-procedure")
|
||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wno-unused-parameter")
|
||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -Wunused-parameter")
|
||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffpe-summary=all")
|
||||
# print summary of floating point exeptions (invalid,zero,overflow,underflow,inexact,denormal)
|
||||
|
||||
# Additional options
|
||||
# -Warray-temporarieswarnings: because we have many temporary arrays (performance issue?)
|
||||
# -Wimplicit-interface: no interfaces for lapack/MPI routines
|
||||
# -Wunsafe-loop-optimizations: warn if the loop cannot be optimized due to nontrivial assumptions
|
||||
|
||||
|
|
|
@ -27,6 +27,276 @@ lattice_symmetries: Dict[CrystalLattice, CrystalFamily] = {
|
|||
'cF': 'cubic',
|
||||
}
|
||||
|
||||
orientation_relationships: Dict[str, Dict[CrystalLattice,np.ndarray]] = {
|
||||
'KS': {
|
||||
'cF': np.array([
|
||||
[[-1, 0, 1],[ 1, 1, 1]],
|
||||
[[-1, 0, 1],[ 1, 1, 1]],
|
||||
[[ 0, 1,-1],[ 1, 1, 1]],
|
||||
[[ 0, 1,-1],[ 1, 1, 1]],
|
||||
[[ 1,-1, 0],[ 1, 1, 1]],
|
||||
[[ 1,-1, 0],[ 1, 1, 1]],
|
||||
[[ 1, 0,-1],[ 1,-1, 1]],
|
||||
[[ 1, 0,-1],[ 1,-1, 1]],
|
||||
[[-1,-1, 0],[ 1,-1, 1]],
|
||||
[[-1,-1, 0],[ 1,-1, 1]],
|
||||
[[ 0, 1, 1],[ 1,-1, 1]],
|
||||
[[ 0, 1, 1],[ 1,-1, 1]],
|
||||
[[ 0,-1, 1],[-1, 1, 1]],
|
||||
[[ 0,-1, 1],[-1, 1, 1]],
|
||||
[[-1, 0,-1],[-1, 1, 1]],
|
||||
[[-1, 0,-1],[-1, 1, 1]],
|
||||
[[ 1, 1, 0],[-1, 1, 1]],
|
||||
[[ 1, 1, 0],[-1, 1, 1]],
|
||||
[[-1, 1, 0],[ 1, 1,-1]],
|
||||
[[-1, 1, 0],[ 1, 1,-1]],
|
||||
[[ 0,-1,-1],[ 1, 1,-1]],
|
||||
[[ 0,-1,-1],[ 1, 1,-1]],
|
||||
[[ 1, 0, 1],[ 1, 1,-1]],
|
||||
[[ 1, 0, 1],[ 1, 1,-1]],
|
||||
],dtype=float),
|
||||
'cI': np.array([
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'GT': {
|
||||
'cF': np.array([
|
||||
[[ -5,-12, 17],[ 1, 1, 1]],
|
||||
[[ 17, -5,-12],[ 1, 1, 1]],
|
||||
[[-12, 17, -5],[ 1, 1, 1]],
|
||||
[[ 5, 12, 17],[ -1, -1, 1]],
|
||||
[[-17, 5,-12],[ -1, -1, 1]],
|
||||
[[ 12,-17, -5],[ -1, -1, 1]],
|
||||
[[ -5, 12,-17],[ -1, 1, 1]],
|
||||
[[ 17, 5, 12],[ -1, 1, 1]],
|
||||
[[-12,-17, 5],[ -1, 1, 1]],
|
||||
[[ 5,-12,-17],[ 1, -1, 1]],
|
||||
[[-17, -5, 12],[ 1, -1, 1]],
|
||||
[[ 12, 17, 5],[ 1, -1, 1]],
|
||||
[[ -5, 17,-12],[ 1, 1, 1]],
|
||||
[[-12, -5, 17],[ 1, 1, 1]],
|
||||
[[ 17,-12, -5],[ 1, 1, 1]],
|
||||
[[ 5,-17,-12],[ -1, -1, 1]],
|
||||
[[ 12, 5, 17],[ -1, -1, 1]],
|
||||
[[-17, 12, -5],[ -1, -1, 1]],
|
||||
[[ -5,-17, 12],[ -1, 1, 1]],
|
||||
[[-12, 5,-17],[ -1, 1, 1]],
|
||||
[[ 17, 12, 5],[ -1, 1, 1]],
|
||||
[[ 5, 17, 12],[ 1, -1, 1]],
|
||||
[[ 12, -5,-17],[ 1, -1, 1]],
|
||||
[[-17,-12, 5],[ 1, -1, 1]],
|
||||
],dtype=float),
|
||||
'cI': np.array([
|
||||
[[-17, -7, 17],[ 1, 0, 1]],
|
||||
[[ 17,-17, -7],[ 1, 1, 0]],
|
||||
[[ -7, 17,-17],[ 0, 1, 1]],
|
||||
[[ 17, 7, 17],[ -1, 0, 1]],
|
||||
[[-17, 17, -7],[ -1, -1, 0]],
|
||||
[[ 7,-17,-17],[ 0, -1, 1]],
|
||||
[[-17, 7,-17],[ -1, 0, 1]],
|
||||
[[ 17, 17, 7],[ -1, 1, 0]],
|
||||
[[ -7,-17, 17],[ 0, 1, 1]],
|
||||
[[ 17, -7,-17],[ 1, 0, 1]],
|
||||
[[-17,-17, 7],[ 1, -1, 0]],
|
||||
[[ 7, 17, 17],[ 0, -1, 1]],
|
||||
[[-17, 17, -7],[ 1, 1, 0]],
|
||||
[[ -7,-17, 17],[ 0, 1, 1]],
|
||||
[[ 17, -7,-17],[ 1, 0, 1]],
|
||||
[[ 17,-17, -7],[ -1, -1, 0]],
|
||||
[[ 7, 17, 17],[ 0, -1, 1]],
|
||||
[[-17, 7,-17],[ -1, 0, 1]],
|
||||
[[-17,-17, 7],[ -1, 1, 0]],
|
||||
[[ -7, 17,-17],[ 0, 1, 1]],
|
||||
[[ 17, 7, 17],[ -1, 0, 1]],
|
||||
[[ 17, 17, 7],[ 1, -1, 0]],
|
||||
[[ 7,-17,-17],[ 0, -1, 1]],
|
||||
[[-17, -7, 17],[ 1, 0, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'GT_prime': {
|
||||
'cF' : np.array([
|
||||
[[ 0, 1, -1],[ 7, 17, 17]],
|
||||
[[ -1, 0, 1],[ 17, 7, 17]],
|
||||
[[ 1, -1, 0],[ 17, 17, 7]],
|
||||
[[ 0, -1, -1],[ -7,-17, 17]],
|
||||
[[ 1, 0, 1],[-17, -7, 17]],
|
||||
[[ 1, -1, 0],[-17,-17, 7]],
|
||||
[[ 0, 1, -1],[ 7,-17,-17]],
|
||||
[[ 1, 0, 1],[ 17, -7,-17]],
|
||||
[[ -1, -1, 0],[ 17,-17, -7]],
|
||||
[[ 0, -1, -1],[ -7, 17,-17]],
|
||||
[[ -1, 0, 1],[-17, 7,-17]],
|
||||
[[ -1, -1, 0],[-17, 17, -7]],
|
||||
[[ 0, -1, 1],[ 7, 17, 17]],
|
||||
[[ 1, 0, -1],[ 17, 7, 17]],
|
||||
[[ -1, 1, 0],[ 17, 17, 7]],
|
||||
[[ 0, 1, 1],[ -7,-17, 17]],
|
||||
[[ -1, 0, -1],[-17, -7, 17]],
|
||||
[[ -1, 1, 0],[-17,-17, 7]],
|
||||
[[ 0, -1, 1],[ 7,-17,-17]],
|
||||
[[ -1, 0, -1],[ 17, -7,-17]],
|
||||
[[ 1, 1, 0],[ 17,-17, -7]],
|
||||
[[ 0, 1, 1],[ -7, 17,-17]],
|
||||
[[ 1, 0, -1],[-17, 7,-17]],
|
||||
[[ 1, 1, 0],[-17, 17, -7]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 1, 1, -1],[ 12, 5, 17]],
|
||||
[[ -1, 1, 1],[ 17, 12, 5]],
|
||||
[[ 1, -1, 1],[ 5, 17, 12]],
|
||||
[[ -1, -1, -1],[-12, -5, 17]],
|
||||
[[ 1, -1, 1],[-17,-12, 5]],
|
||||
[[ 1, -1, -1],[ -5,-17, 12]],
|
||||
[[ -1, 1, -1],[ 12, -5,-17]],
|
||||
[[ 1, 1, 1],[ 17,-12, -5]],
|
||||
[[ -1, -1, 1],[ 5,-17,-12]],
|
||||
[[ 1, -1, -1],[-12, 5,-17]],
|
||||
[[ -1, -1, 1],[-17, 12, -5]],
|
||||
[[ -1, -1, -1],[ -5, 17,-12]],
|
||||
[[ 1, -1, 1],[ 12, 17, 5]],
|
||||
[[ 1, 1, -1],[ 5, 12, 17]],
|
||||
[[ -1, 1, 1],[ 17, 5, 12]],
|
||||
[[ -1, 1, 1],[-12,-17, 5]],
|
||||
[[ -1, -1, -1],[ -5,-12, 17]],
|
||||
[[ -1, 1, -1],[-17, -5, 12]],
|
||||
[[ -1, -1, 1],[ 12,-17, -5]],
|
||||
[[ -1, 1, -1],[ 5,-12,-17]],
|
||||
[[ 1, 1, 1],[ 17, -5,-12]],
|
||||
[[ 1, 1, 1],[-12, 17, -5]],
|
||||
[[ 1, -1, -1],[ -5, 12,-17]],
|
||||
[[ 1, 1, -1],[-17, 5,-12]],
|
||||
],dtype=float),
|
||||
},
|
||||
'NW': {
|
||||
'cF' : np.array([
|
||||
[[ 2, -1, -1],[ 1, 1, 1]],
|
||||
[[ -1, 2, -1],[ 1, 1, 1]],
|
||||
[[ -1, -1, 2],[ 1, 1, 1]],
|
||||
[[ -2, -1, -1],[ -1, 1, 1]],
|
||||
[[ 1, 2, -1],[ -1, 1, 1]],
|
||||
[[ 1, -1, 2],[ -1, 1, 1]],
|
||||
[[ 2, 1, -1],[ 1, -1, 1]],
|
||||
[[ -1, -2, -1],[ 1, -1, 1]],
|
||||
[[ -1, 1, 2],[ 1, -1, 1]],
|
||||
[[ 2, -1, 1],[ -1, -1, 1]],
|
||||
[[ -1, 2, 1],[ -1, -1, 1]],
|
||||
[[ -1, -1, -2],[ -1, -1, 1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'Pitsch': {
|
||||
'cF' : np.array([
|
||||
[[ 1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 1, 0],[ 0, 0, 1]],
|
||||
[[ 0, 1, 1],[ 1, 0, 0]],
|
||||
[[ 0, 1, -1],[ 1, 0, 0]],
|
||||
[[ -1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, -1, 0],[ 0, 0, 1]],
|
||||
[[ 1, 0, -1],[ 0, 1, 0]],
|
||||
[[ -1, 1, 0],[ 0, 0, 1]],
|
||||
[[ 0, -1, 1],[ 1, 0, 0]],
|
||||
[[ 0, 1, 1],[ 1, 0, 0]],
|
||||
[[ 1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 1, 0],[ 0, 0, 1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 1, -1, 1],[ -1, 0, 1]],
|
||||
[[ 1, 1, -1],[ 1, -1, 0]],
|
||||
[[ -1, 1, 1],[ 0, 1, -1]],
|
||||
[[ -1, 1, -1],[ 0, -1, -1]],
|
||||
[[ -1, -1, 1],[ -1, 0, -1]],
|
||||
[[ 1, -1, -1],[ -1, -1, 0]],
|
||||
[[ 1, -1, -1],[ -1, 0, -1]],
|
||||
[[ -1, 1, -1],[ -1, -1, 0]],
|
||||
[[ -1, -1, 1],[ 0, -1, -1]],
|
||||
[[ -1, 1, 1],[ 0, -1, 1]],
|
||||
[[ 1, -1, 1],[ 1, 0, -1]],
|
||||
[[ 1, 1, -1],[ -1, 1, 0]],
|
||||
],dtype=float),
|
||||
},
|
||||
'Bain': {
|
||||
'cF' : np.array([
|
||||
[[ 0, 1, 0],[ 1, 0, 0]],
|
||||
[[ 0, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 0, 0],[ 0, 0, 1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 0, 1, 1],[ 1, 0, 0]],
|
||||
[[ 1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 1, 0],[ 0, 0, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'Burgers' : {
|
||||
'cI' : np.array([
|
||||
[[ -1, 1, 1],[ 1, 1, 0]],
|
||||
[[ -1, 1, -1],[ 1, 1, 0]],
|
||||
[[ 1, 1, 1],[ 1, -1, 0]],
|
||||
[[ 1, 1, -1],[ 1, -1, 0]],
|
||||
|
||||
[[ 1, 1, -1],[ 1, 0, 1]],
|
||||
[[ -1, 1, 1],[ 1, 0, 1]],
|
||||
[[ 1, 1, 1],[ -1, 0, 1]],
|
||||
[[ 1, -1, 1],[ -1, 0, 1]],
|
||||
|
||||
[[ -1, 1, -1],[ 0, 1, 1]],
|
||||
[[ 1, 1, -1],[ 0, 1, 1]],
|
||||
[[ -1, 1, 1],[ 0, -1, 1]],
|
||||
[[ 1, 1, 1],[ 0, -1, 1]],
|
||||
],dtype=float),
|
||||
'hP' : np.array([
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
}
|
||||
|
||||
class Crystal():
|
||||
"""
|
||||
|
@ -200,6 +470,12 @@ class Crystal():
|
|||
return _immutable[self.family]
|
||||
|
||||
|
||||
@property
|
||||
def orientation_relationships(self):
|
||||
"""Return labels of orientation relationships."""
|
||||
return [k for k,v in orientation_relationships.items() if self.lattice in v]
|
||||
|
||||
|
||||
@property
|
||||
def standard_triangle(self) -> Union[Dict[str, np.ndarray], None]:
|
||||
"""
|
||||
|
@ -727,12 +1003,10 @@ class Crystal():
|
|||
}
|
||||
}
|
||||
master = _kinematics[self.lattice][mode]
|
||||
if self.lattice == 'hP':
|
||||
return {'direction':[util.Bravais_to_Miller(uvtw=m[:,0:4]) for m in master],
|
||||
'plane': [util.Bravais_to_Miller(hkil=m[:,4:8]) for m in master]}
|
||||
else:
|
||||
return {'direction':[m[:,0:3] for m in master],
|
||||
'plane': [m[:,3:6] for m in master]}
|
||||
return {'direction':[util.Bravais_to_Miller(uvtw=m[:,0:4]) if self.lattice == 'hP'
|
||||
else m[:,0:3] for m in master],
|
||||
'plane': [util.Bravais_to_Miller(hkil=m[:,4:8]) if self.lattice == 'hP'
|
||||
else m[:,3:6] for m in master]}
|
||||
|
||||
|
||||
def relation_operations(self,
|
||||
|
@ -768,280 +1042,10 @@ class Crystal():
|
|||
https://doi.org/10.1016/j.actamat.2004.11.021
|
||||
|
||||
"""
|
||||
_orientation_relationships: Dict[str, Dict[CrystalLattice,np.ndarray]] = {
|
||||
'KS': {
|
||||
'cF' : np.array([
|
||||
[[-1, 0, 1],[ 1, 1, 1]],
|
||||
[[-1, 0, 1],[ 1, 1, 1]],
|
||||
[[ 0, 1,-1],[ 1, 1, 1]],
|
||||
[[ 0, 1,-1],[ 1, 1, 1]],
|
||||
[[ 1,-1, 0],[ 1, 1, 1]],
|
||||
[[ 1,-1, 0],[ 1, 1, 1]],
|
||||
[[ 1, 0,-1],[ 1,-1, 1]],
|
||||
[[ 1, 0,-1],[ 1,-1, 1]],
|
||||
[[-1,-1, 0],[ 1,-1, 1]],
|
||||
[[-1,-1, 0],[ 1,-1, 1]],
|
||||
[[ 0, 1, 1],[ 1,-1, 1]],
|
||||
[[ 0, 1, 1],[ 1,-1, 1]],
|
||||
[[ 0,-1, 1],[-1, 1, 1]],
|
||||
[[ 0,-1, 1],[-1, 1, 1]],
|
||||
[[-1, 0,-1],[-1, 1, 1]],
|
||||
[[-1, 0,-1],[-1, 1, 1]],
|
||||
[[ 1, 1, 0],[-1, 1, 1]],
|
||||
[[ 1, 1, 0],[-1, 1, 1]],
|
||||
[[-1, 1, 0],[ 1, 1,-1]],
|
||||
[[-1, 1, 0],[ 1, 1,-1]],
|
||||
[[ 0,-1,-1],[ 1, 1,-1]],
|
||||
[[ 0,-1,-1],[ 1, 1,-1]],
|
||||
[[ 1, 0, 1],[ 1, 1,-1]],
|
||||
[[ 1, 0, 1],[ 1, 1,-1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
[[-1,-1, 1],[ 0, 1, 1]],
|
||||
[[-1, 1,-1],[ 0, 1, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'GT': {
|
||||
'cF' : np.array([
|
||||
[[ -5,-12, 17],[ 1, 1, 1]],
|
||||
[[ 17, -5,-12],[ 1, 1, 1]],
|
||||
[[-12, 17, -5],[ 1, 1, 1]],
|
||||
[[ 5, 12, 17],[ -1, -1, 1]],
|
||||
[[-17, 5,-12],[ -1, -1, 1]],
|
||||
[[ 12,-17, -5],[ -1, -1, 1]],
|
||||
[[ -5, 12,-17],[ -1, 1, 1]],
|
||||
[[ 17, 5, 12],[ -1, 1, 1]],
|
||||
[[-12,-17, 5],[ -1, 1, 1]],
|
||||
[[ 5,-12,-17],[ 1, -1, 1]],
|
||||
[[-17, -5, 12],[ 1, -1, 1]],
|
||||
[[ 12, 17, 5],[ 1, -1, 1]],
|
||||
[[ -5, 17,-12],[ 1, 1, 1]],
|
||||
[[-12, -5, 17],[ 1, 1, 1]],
|
||||
[[ 17,-12, -5],[ 1, 1, 1]],
|
||||
[[ 5,-17,-12],[ -1, -1, 1]],
|
||||
[[ 12, 5, 17],[ -1, -1, 1]],
|
||||
[[-17, 12, -5],[ -1, -1, 1]],
|
||||
[[ -5,-17, 12],[ -1, 1, 1]],
|
||||
[[-12, 5,-17],[ -1, 1, 1]],
|
||||
[[ 17, 12, 5],[ -1, 1, 1]],
|
||||
[[ 5, 17, 12],[ 1, -1, 1]],
|
||||
[[ 12, -5,-17],[ 1, -1, 1]],
|
||||
[[-17,-12, 5],[ 1, -1, 1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[-17, -7, 17],[ 1, 0, 1]],
|
||||
[[ 17,-17, -7],[ 1, 1, 0]],
|
||||
[[ -7, 17,-17],[ 0, 1, 1]],
|
||||
[[ 17, 7, 17],[ -1, 0, 1]],
|
||||
[[-17, 17, -7],[ -1, -1, 0]],
|
||||
[[ 7,-17,-17],[ 0, -1, 1]],
|
||||
[[-17, 7,-17],[ -1, 0, 1]],
|
||||
[[ 17, 17, 7],[ -1, 1, 0]],
|
||||
[[ -7,-17, 17],[ 0, 1, 1]],
|
||||
[[ 17, -7,-17],[ 1, 0, 1]],
|
||||
[[-17,-17, 7],[ 1, -1, 0]],
|
||||
[[ 7, 17, 17],[ 0, -1, 1]],
|
||||
[[-17, 17, -7],[ 1, 1, 0]],
|
||||
[[ -7,-17, 17],[ 0, 1, 1]],
|
||||
[[ 17, -7,-17],[ 1, 0, 1]],
|
||||
[[ 17,-17, -7],[ -1, -1, 0]],
|
||||
[[ 7, 17, 17],[ 0, -1, 1]],
|
||||
[[-17, 7,-17],[ -1, 0, 1]],
|
||||
[[-17,-17, 7],[ -1, 1, 0]],
|
||||
[[ -7, 17,-17],[ 0, 1, 1]],
|
||||
[[ 17, 7, 17],[ -1, 0, 1]],
|
||||
[[ 17, 17, 7],[ 1, -1, 0]],
|
||||
[[ 7,-17,-17],[ 0, -1, 1]],
|
||||
[[-17, -7, 17],[ 1, 0, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'GT_prime': {
|
||||
'cF' : np.array([
|
||||
[[ 0, 1, -1],[ 7, 17, 17]],
|
||||
[[ -1, 0, 1],[ 17, 7, 17]],
|
||||
[[ 1, -1, 0],[ 17, 17, 7]],
|
||||
[[ 0, -1, -1],[ -7,-17, 17]],
|
||||
[[ 1, 0, 1],[-17, -7, 17]],
|
||||
[[ 1, -1, 0],[-17,-17, 7]],
|
||||
[[ 0, 1, -1],[ 7,-17,-17]],
|
||||
[[ 1, 0, 1],[ 17, -7,-17]],
|
||||
[[ -1, -1, 0],[ 17,-17, -7]],
|
||||
[[ 0, -1, -1],[ -7, 17,-17]],
|
||||
[[ -1, 0, 1],[-17, 7,-17]],
|
||||
[[ -1, -1, 0],[-17, 17, -7]],
|
||||
[[ 0, -1, 1],[ 7, 17, 17]],
|
||||
[[ 1, 0, -1],[ 17, 7, 17]],
|
||||
[[ -1, 1, 0],[ 17, 17, 7]],
|
||||
[[ 0, 1, 1],[ -7,-17, 17]],
|
||||
[[ -1, 0, -1],[-17, -7, 17]],
|
||||
[[ -1, 1, 0],[-17,-17, 7]],
|
||||
[[ 0, -1, 1],[ 7,-17,-17]],
|
||||
[[ -1, 0, -1],[ 17, -7,-17]],
|
||||
[[ 1, 1, 0],[ 17,-17, -7]],
|
||||
[[ 0, 1, 1],[ -7, 17,-17]],
|
||||
[[ 1, 0, -1],[-17, 7,-17]],
|
||||
[[ 1, 1, 0],[-17, 17, -7]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 1, 1, -1],[ 12, 5, 17]],
|
||||
[[ -1, 1, 1],[ 17, 12, 5]],
|
||||
[[ 1, -1, 1],[ 5, 17, 12]],
|
||||
[[ -1, -1, -1],[-12, -5, 17]],
|
||||
[[ 1, -1, 1],[-17,-12, 5]],
|
||||
[[ 1, -1, -1],[ -5,-17, 12]],
|
||||
[[ -1, 1, -1],[ 12, -5,-17]],
|
||||
[[ 1, 1, 1],[ 17,-12, -5]],
|
||||
[[ -1, -1, 1],[ 5,-17,-12]],
|
||||
[[ 1, -1, -1],[-12, 5,-17]],
|
||||
[[ -1, -1, 1],[-17, 12, -5]],
|
||||
[[ -1, -1, -1],[ -5, 17,-12]],
|
||||
[[ 1, -1, 1],[ 12, 17, 5]],
|
||||
[[ 1, 1, -1],[ 5, 12, 17]],
|
||||
[[ -1, 1, 1],[ 17, 5, 12]],
|
||||
[[ -1, 1, 1],[-12,-17, 5]],
|
||||
[[ -1, -1, -1],[ -5,-12, 17]],
|
||||
[[ -1, 1, -1],[-17, -5, 12]],
|
||||
[[ -1, -1, 1],[ 12,-17, -5]],
|
||||
[[ -1, 1, -1],[ 5,-12,-17]],
|
||||
[[ 1, 1, 1],[ 17, -5,-12]],
|
||||
[[ 1, 1, 1],[-12, 17, -5]],
|
||||
[[ 1, -1, -1],[ -5, 12,-17]],
|
||||
[[ 1, 1, -1],[-17, 5,-12]],
|
||||
],dtype=float),
|
||||
},
|
||||
'NW': {
|
||||
'cF' : np.array([
|
||||
[[ 2, -1, -1],[ 1, 1, 1]],
|
||||
[[ -1, 2, -1],[ 1, 1, 1]],
|
||||
[[ -1, -1, 2],[ 1, 1, 1]],
|
||||
[[ -2, -1, -1],[ -1, 1, 1]],
|
||||
[[ 1, 2, -1],[ -1, 1, 1]],
|
||||
[[ 1, -1, 2],[ -1, 1, 1]],
|
||||
[[ 2, 1, -1],[ 1, -1, 1]],
|
||||
[[ -1, -2, -1],[ 1, -1, 1]],
|
||||
[[ -1, 1, 2],[ 1, -1, 1]],
|
||||
[[ 2, -1, 1],[ -1, -1, 1]],
|
||||
[[ -1, 2, 1],[ -1, -1, 1]],
|
||||
[[ -1, -1, -2],[ -1, -1, 1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
[[ 0, -1, 1],[ 0, 1, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'Pitsch': {
|
||||
'cF' : np.array([
|
||||
[[ 1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 1, 0],[ 0, 0, 1]],
|
||||
[[ 0, 1, 1],[ 1, 0, 0]],
|
||||
[[ 0, 1, -1],[ 1, 0, 0]],
|
||||
[[ -1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, -1, 0],[ 0, 0, 1]],
|
||||
[[ 1, 0, -1],[ 0, 1, 0]],
|
||||
[[ -1, 1, 0],[ 0, 0, 1]],
|
||||
[[ 0, -1, 1],[ 1, 0, 0]],
|
||||
[[ 0, 1, 1],[ 1, 0, 0]],
|
||||
[[ 1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 1, 0],[ 0, 0, 1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 1, -1, 1],[ -1, 0, 1]],
|
||||
[[ 1, 1, -1],[ 1, -1, 0]],
|
||||
[[ -1, 1, 1],[ 0, 1, -1]],
|
||||
[[ -1, 1, -1],[ 0, -1, -1]],
|
||||
[[ -1, -1, 1],[ -1, 0, -1]],
|
||||
[[ 1, -1, -1],[ -1, -1, 0]],
|
||||
[[ 1, -1, -1],[ -1, 0, -1]],
|
||||
[[ -1, 1, -1],[ -1, -1, 0]],
|
||||
[[ -1, -1, 1],[ 0, -1, -1]],
|
||||
[[ -1, 1, 1],[ 0, -1, 1]],
|
||||
[[ 1, -1, 1],[ 1, 0, -1]],
|
||||
[[ 1, 1, -1],[ -1, 1, 0]],
|
||||
],dtype=float),
|
||||
},
|
||||
'Bain': {
|
||||
'cF' : np.array([
|
||||
[[ 0, 1, 0],[ 1, 0, 0]],
|
||||
[[ 0, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 0, 0],[ 0, 0, 1]],
|
||||
],dtype=float),
|
||||
'cI' : np.array([
|
||||
[[ 0, 1, 1],[ 1, 0, 0]],
|
||||
[[ 1, 0, 1],[ 0, 1, 0]],
|
||||
[[ 1, 1, 0],[ 0, 0, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
'Burgers' : {
|
||||
'cI' : np.array([
|
||||
[[ -1, 1, 1],[ 1, 1, 0]],
|
||||
[[ -1, 1, -1],[ 1, 1, 0]],
|
||||
[[ 1, 1, 1],[ 1, -1, 0]],
|
||||
[[ 1, 1, -1],[ 1, -1, 0]],
|
||||
|
||||
[[ 1, 1, -1],[ 1, 0, 1]],
|
||||
[[ -1, 1, 1],[ 1, 0, 1]],
|
||||
[[ 1, 1, 1],[ -1, 0, 1]],
|
||||
[[ 1, -1, 1],[ -1, 0, 1]],
|
||||
|
||||
[[ -1, 1, -1],[ 0, 1, 1]],
|
||||
[[ 1, 1, -1],[ 0, 1, 1]],
|
||||
[[ -1, 1, 1],[ 0, -1, 1]],
|
||||
[[ 1, 1, 1],[ 0, -1, 1]],
|
||||
],dtype=float),
|
||||
'hP' : np.array([
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, 2, -1, 0],[ 0, 0, 0, 1]],
|
||||
[[ -1, -1, 2, 0],[ 0, 0, 0, 1]],
|
||||
],dtype=float),
|
||||
},
|
||||
}
|
||||
orientation_relationships = {k:v for k,v in _orientation_relationships.items() if self.lattice in v}
|
||||
if model not in orientation_relationships:
|
||||
my_relationships = {k:v for k,v in orientation_relationships.items() if self.lattice in v}
|
||||
if model not in my_relationships:
|
||||
raise KeyError(f'unknown orientation relationship "{model}"')
|
||||
r = orientation_relationships[model]
|
||||
r = my_relationships[model]
|
||||
|
||||
sl = self.lattice
|
||||
ol = (set(r)-{sl}).pop()
|
||||
|
|
|
@ -18,7 +18,7 @@ from . import grid_filters
|
|||
from . import Rotation
|
||||
from . import Table
|
||||
from . import Colormap
|
||||
from ._typehints import FloatSequence, IntSequence
|
||||
from ._typehints import FloatSequence, IntSequence, IntCollection, NumpyRngSeed
|
||||
|
||||
class Grid:
|
||||
"""
|
||||
|
@ -92,10 +92,10 @@ class Grid:
|
|||
"""
|
||||
if not isinstance(other, Grid):
|
||||
return NotImplemented
|
||||
return bool(np.allclose(other.size,self.size)
|
||||
and np.allclose(other.origin,self.origin)
|
||||
and np.all(other.cells == self.cells)
|
||||
and np.all(other.material == self.material))
|
||||
return bool( np.allclose(other.size,self.size)
|
||||
and np.allclose(other.origin,self.origin)
|
||||
and np.all(other.cells == self.cells)
|
||||
and np.all(other.material == self.material))
|
||||
|
||||
|
||||
@property
|
||||
|
@ -191,8 +191,8 @@ class Grid:
|
|||
ic = {label:v.get(label).reshape(cells,order='F') for label in set(v.labels['Cell Data']) - {'material'}}
|
||||
|
||||
return Grid(material = v.get('material').reshape(cells,order='F'),
|
||||
size = bbox[1] - bbox[0],
|
||||
origin = bbox[0],
|
||||
size = bbox[1] - bbox[0],
|
||||
origin = bbox[0],
|
||||
comments = comments,
|
||||
initial_conditions = ic)
|
||||
|
||||
|
@ -247,7 +247,7 @@ class Grid:
|
|||
else:
|
||||
comments.append(line.strip())
|
||||
|
||||
material = np.empty(int(cells.prod())) # initialize as flat array
|
||||
material = np.empty(cells.prod()) # initialize as flat array
|
||||
i = 0
|
||||
for line in content[header_length:]:
|
||||
if len(items := line.split('#')[0].split()) == 3:
|
||||
|
@ -265,7 +265,7 @@ class Grid:
|
|||
raise TypeError(f'mismatch between {cells.prod()} expected entries and {i} found')
|
||||
|
||||
if not np.any(np.mod(material,1) != 0.0): # no float present
|
||||
material = material.astype('int') - (1 if material.min() > 0 else 0)
|
||||
material = material.astype(int) - (1 if material.min() > 0 else 0)
|
||||
|
||||
return Grid(material.reshape(cells,order='F'),size,origin,comments)
|
||||
|
||||
|
@ -285,6 +285,20 @@ class Grid:
|
|||
loaded : damask.Grid
|
||||
Grid-based geometry from file.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Read a periodic polycrystal generated with Neper.
|
||||
|
||||
>>> import damask
|
||||
>>> N_grains = 20
|
||||
>>> cells = (32,32,32)
|
||||
>>> damask.util.run(f'neper -T -n {N_grains} -tesrsize {cells[0]}:{cells[1]}:{cells[2]} -periodicity "all" -format "vtk"')
|
||||
>>> damask.Grid.load_Neper(f'n{N_grains}-id1.vtk')
|
||||
cells: 32 × 32 × 32
|
||||
size: 1.0 × 1.0 × 1.0 m³
|
||||
origin: 0.0 0.0 0.0 m
|
||||
# materials: 20
|
||||
|
||||
"""
|
||||
v = VTK.load(fname,'ImageData')
|
||||
cells = np.array(v.vtk_data.GetDimensions())-1
|
||||
|
@ -913,7 +927,7 @@ class Grid:
|
|||
cells/self.cells,
|
||||
output=self.material.dtype,
|
||||
order=0,
|
||||
mode=('wrap' if periodic else 'nearest'),
|
||||
mode='wrap' if periodic else 'nearest',
|
||||
prefilter=False
|
||||
),
|
||||
size = self.size,
|
||||
|
@ -923,42 +937,64 @@ class Grid:
|
|||
|
||||
|
||||
def clean(self,
|
||||
stencil: int = 3,
|
||||
selection: IntSequence = None,
|
||||
periodic: bool = True) -> 'Grid':
|
||||
distance: float = np.sqrt(3),
|
||||
selection: IntCollection = None,
|
||||
invert_selection: bool = False,
|
||||
periodic: bool = True,
|
||||
rng_seed: NumpyRngSeed = None) -> 'Grid':
|
||||
"""
|
||||
Smooth grid by selecting most frequent material index within given stencil at each location.
|
||||
Smooth grid by selecting most frequent material ID within given stencil at each location.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
stencil : int, optional
|
||||
Size of smoothing stencil.
|
||||
selection : sequence of int, optional
|
||||
Field values that can be altered. Defaults to all.
|
||||
distance : float, optional
|
||||
Voxel distance checked for presence of other materials.
|
||||
Defaults to sqrt(3).
|
||||
selection : int or collection of int, optional
|
||||
Material IDs to consider.
|
||||
invert_selection : bool, optional
|
||||
Consider all material IDs except those in selection. Defaults to False.
|
||||
periodic : bool, optional
|
||||
Assume grid to be periodic. Defaults to True.
|
||||
rng_seed : {None, int, array_like[ints], SeedSequence, BitGenerator, Generator}, optional
|
||||
A seed to initialize the BitGenerator. Defaults to None.
|
||||
If None, then fresh, unpredictable entropy will be pulled from the OS.
|
||||
|
||||
Returns
|
||||
-------
|
||||
updated : damask.Grid
|
||||
Updated grid-based geometry.
|
||||
|
||||
Notes
|
||||
-----
|
||||
If multiple material IDs are most frequent within a stencil, a random choice is taken.
|
||||
|
||||
"""
|
||||
def mostFrequent(arr: np.ndarray, selection = None):
|
||||
me = arr[arr.size//2]
|
||||
if selection is None or me in selection:
|
||||
unique, inverse = np.unique(arr, return_inverse=True)
|
||||
return unique[np.argmax(np.bincount(inverse))]
|
||||
def most_frequent(stencil: np.ndarray,
|
||||
selection: set,
|
||||
rng):
|
||||
me = stencil[stencil.size//2]
|
||||
if not selection or me in selection:
|
||||
unique, counts = np.unique(stencil,return_counts=True)
|
||||
return rng.choice(unique[counts==np.max(counts)])
|
||||
else:
|
||||
return me
|
||||
|
||||
return Grid(material = ndimage.filters.generic_filter(
|
||||
self.material,
|
||||
mostFrequent,
|
||||
size=(stencil if selection is None else stencil//2*2+1,)*3,
|
||||
mode=('wrap' if periodic else 'nearest'),
|
||||
extra_keywords=dict(selection=selection),
|
||||
).astype(self.material.dtype),
|
||||
rng = np.random.default_rng(rng_seed)
|
||||
d = np.floor(distance).astype(int)
|
||||
ext = np.linspace(-d,d,1+2*d,dtype=float),
|
||||
xx,yy,zz = np.meshgrid(ext,ext,ext)
|
||||
footprint = xx**2+yy**2+zz**2 <= distance**2+distance*1e-8
|
||||
selection_ = set(self.material.flatten()) - set(util.aslist(selection)) if invert_selection else \
|
||||
set(util.aslist(selection))
|
||||
material = ndimage.filters.generic_filter(
|
||||
self.material,
|
||||
most_frequent,
|
||||
footprint=footprint,
|
||||
mode='wrap' if periodic else 'nearest',
|
||||
extra_keywords=dict(selection=selection_,rng=rng),
|
||||
).astype(self.material.dtype)
|
||||
return Grid(material = material,
|
||||
size = self.size,
|
||||
origin = self.origin,
|
||||
comments = self.comments+[util.execution_stamp('Grid','clean')],
|
||||
|
@ -988,14 +1024,15 @@ class Grid:
|
|||
R: Rotation,
|
||||
fill: int = None) -> 'Grid':
|
||||
"""
|
||||
Rotate grid (pad if required).
|
||||
Rotate grid (and pad if required).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
R : damask.Rotation
|
||||
Rotation to apply to the grid.
|
||||
fill : int, optional
|
||||
Material index to fill the corners. Defaults to material.max() + 1.
|
||||
Material ID to fill enlarged bounding box.
|
||||
Defaults to material.max() + 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -1035,9 +1072,11 @@ class Grid:
|
|||
cells : sequence of int, len (3), optional
|
||||
Number of cells x,y,z direction.
|
||||
offset : sequence of int, len (3), optional
|
||||
Offset (measured in cells) from old to new grid [0,0,0].
|
||||
Offset (measured in cells) from old to new grid.
|
||||
Defaults to [0,0,0].
|
||||
fill : int, optional
|
||||
Material index to fill the background. Defaults to material.max() + 1.
|
||||
Material ID to fill the background.
|
||||
Defaults to material.max() + 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -1046,15 +1085,15 @@ class Grid:
|
|||
|
||||
Examples
|
||||
--------
|
||||
Remove 1/2 of the microstructure in z-direction.
|
||||
Remove lower 1/2 of the microstructure in z-direction.
|
||||
|
||||
>>> import numpy as np
|
||||
>>> import damask
|
||||
>>> g = damask.Grid(np.zeros([32]*3,int),np.ones(3)*1e-4)
|
||||
>>> g.canvas([32,32,16])
|
||||
>>> g.canvas([32,32,16],[0,0,16])
|
||||
cells : 33 x 32 x 16
|
||||
size : 0.0001 x 0.0001 x 5e-05 m³
|
||||
origin: 0.0 0.0 0.0 m
|
||||
origin: 0.0 0.0 5e-05 m
|
||||
# materials: 1
|
||||
|
||||
"""
|
||||
|
@ -1078,16 +1117,16 @@ class Grid:
|
|||
|
||||
|
||||
def substitute(self,
|
||||
from_material: IntSequence,
|
||||
to_material: IntSequence) -> 'Grid':
|
||||
from_material: Union[int,IntSequence],
|
||||
to_material: Union[int,IntSequence]) -> 'Grid':
|
||||
"""
|
||||
Substitute material indices.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
from_material : sequence of int
|
||||
from_material : int or sequence of int
|
||||
Material indices to be substituted.
|
||||
to_material : sequence of int
|
||||
to_material : int or sequence of int
|
||||
New material indices.
|
||||
|
||||
Returns
|
||||
|
@ -1097,7 +1136,8 @@ class Grid:
|
|||
|
||||
"""
|
||||
material = self.material.copy()
|
||||
for f,t in zip(from_material,to_material): # ToDo Python 3.10 has strict mode for zip
|
||||
for f,t in zip(from_material if isinstance(from_material,(Sequence,np.ndarray)) else [from_material],
|
||||
to_material if isinstance(to_material,(Sequence,np.ndarray)) else [to_material]): # ToDo Python 3.10 has strict mode for zip
|
||||
material[self.material==f] = t
|
||||
|
||||
return Grid(material = material,
|
||||
|
@ -1130,28 +1170,31 @@ class Grid:
|
|||
|
||||
|
||||
def vicinity_offset(self,
|
||||
vicinity: int = 1,
|
||||
distance: float = np.sqrt(3),
|
||||
offset: int = None,
|
||||
trigger: IntSequence = [],
|
||||
selection: IntCollection = None,
|
||||
invert_selection: bool = False,
|
||||
periodic: bool = True) -> 'Grid':
|
||||
"""
|
||||
Offset material index of points in the vicinity of xxx.
|
||||
Offset material ID of points in the vicinity of selected (or just other) material IDs.
|
||||
|
||||
Different from themselves (or listed as triggers) within a given (cubic) vicinity,
|
||||
i.e. within the region close to a grain/phase boundary.
|
||||
ToDo: use include/exclude as in seeds.from_grid
|
||||
Trigger points are variations in material ID, i.e. grain/phase
|
||||
boundaries or explicitly given material IDs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
vicinity : int, optional
|
||||
distance : float, optional
|
||||
Voxel distance checked for presence of other materials.
|
||||
Defaults to 1.
|
||||
Defaults to sqrt(3).
|
||||
offset : int, optional
|
||||
Offset (positive or negative) to tag material indices,
|
||||
defaults to material.max()+1.
|
||||
trigger : sequence of int, optional
|
||||
List of material indices that trigger a change.
|
||||
Defaults to [], meaning that any different neighbor triggers a change.
|
||||
Offset (positive or negative) to tag material IDs.
|
||||
Defaults to material.max()+1.
|
||||
selection : int or collection of int, optional
|
||||
Material IDs that trigger an offset.
|
||||
Defaults to any other than own material ID.
|
||||
invert_selection : bool, optional
|
||||
Consider all material IDs except those in selection.
|
||||
Defaults to False.
|
||||
periodic : bool, optional
|
||||
Assume grid to be periodic. Defaults to True.
|
||||
|
||||
|
@ -1161,17 +1204,24 @@ class Grid:
|
|||
Updated grid-based geometry.
|
||||
|
||||
"""
|
||||
def tainted_neighborhood(stencil: np.ndarray, trigger):
|
||||
me = stencil[stencil.shape[0]//2]
|
||||
return np.any(stencil != me if len(trigger) == 0 else
|
||||
np.in1d(stencil,np.array(list(set(trigger) - {me}))))
|
||||
def tainted_neighborhood(stencil: np.ndarray, selection: set):
|
||||
me = stencil[stencil.size//2]
|
||||
return np.any(stencil != me if not selection else
|
||||
np.in1d(stencil,np.array(list(selection - {me}))))
|
||||
|
||||
d = np.floor(distance).astype(int)
|
||||
ext = np.linspace(-d,d,1+2*d,dtype=float),
|
||||
xx,yy,zz = np.meshgrid(ext,ext,ext)
|
||||
footprint = xx**2+yy**2+zz**2 <= distance**2+distance*1e-8
|
||||
offset_ = np.nanmax(self.material)+1 if offset is None else offset
|
||||
selection_ = set(self.material.flatten()) - set(util.aslist(selection)) if invert_selection else \
|
||||
set(util.aslist(selection))
|
||||
mask = ndimage.filters.generic_filter(self.material,
|
||||
tainted_neighborhood,
|
||||
size=1+2*vicinity,
|
||||
footprint=footprint,
|
||||
mode='wrap' if periodic else 'nearest',
|
||||
extra_keywords={'trigger':trigger})
|
||||
extra_keywords=dict(selection=selection_),
|
||||
)
|
||||
|
||||
return Grid(material = np.where(mask, self.material + offset_,self.material),
|
||||
size = self.size,
|
||||
|
|
|
@ -824,7 +824,7 @@ class Orientation(Rotation,Crystal):
|
|||
|
||||
Parameters
|
||||
----------
|
||||
N_slip|N_twin : '*' or iterable of int
|
||||
N_slip|N_twin : '*' or sequence of int
|
||||
Number of deformation systems per family of the deformation system.
|
||||
Use '*' to select all.
|
||||
|
||||
|
@ -870,10 +870,36 @@ class Orientation(Rotation,Crystal):
|
|||
def related(self: MyType,
|
||||
model: str) -> MyType:
|
||||
"""
|
||||
Orientations derived from the given relationship.
|
||||
All orientations related to self by given relationship model.
|
||||
|
||||
One dimension (length according to number of related orientations)
|
||||
is added to the left of the Rotation array.
|
||||
Parameters
|
||||
----------
|
||||
model : str
|
||||
Orientation relationship model selected from self.orientation_relationships.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Orientations related to self following the selected
|
||||
model for the orientation relationship.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Face-centered cubic orientations following from a
|
||||
body-centered cubic crystal in "Cube" orientation according
|
||||
to the Bain orientation relationship (cI -> cF).
|
||||
|
||||
>>> import numpy as np
|
||||
>>> import damask
|
||||
>>> np.set_printoptions(3,suppress=True,floatmode='fixed')
|
||||
>>> damask.Orientation(lattice='cI').related('Bain')
|
||||
Crystal family: cubic
|
||||
Bravais lattice: cF
|
||||
a=1 m, b=1 m, c=1 m
|
||||
α=90°, β=90°, γ=90°
|
||||
Quaternions of shape (3,)
|
||||
[[0.924 0.383 0.000 0.000]
|
||||
[0.924 0.000 0.383 0.000]
|
||||
[0.924 0.000 0.000 0.383]]
|
||||
|
||||
"""
|
||||
lattice,o = self.relation_operations(model)
|
||||
|
|
|
@ -4,7 +4,6 @@ import fnmatch
|
|||
import os
|
||||
import copy
|
||||
import datetime
|
||||
import warnings
|
||||
import xml.etree.ElementTree as ET # noqa
|
||||
import xml.dom.minidom
|
||||
from pathlib import Path
|
||||
|
@ -28,21 +27,6 @@ h5py3 = h5py.__version__[0] == '3'
|
|||
|
||||
chunk_size = 1024**2//8 # for compression in HDF5
|
||||
|
||||
def _view_transition(what,datasets,increments,times,phases,homogenizations,fields):
|
||||
if (datasets is not None and what is None) or (what is not None and datasets is None):
|
||||
raise ValueError('"what" and "datasets" need to be used as a pair')
|
||||
if datasets is not None or what is not None:
|
||||
warnings.warn('arguments "what" and "datasets" will be removed in DAMASK v3.0.0-alpha7', DeprecationWarning,2)
|
||||
return what,datasets
|
||||
if sum(1 for _ in filter(None.__ne__, [increments,times,phases,homogenizations,fields])) > 1:
|
||||
raise ValueError('only one out of "increments", "times", "phases", "homogenizations", and "fields" can be used')
|
||||
else:
|
||||
if increments is not None: return "increments", increments
|
||||
if times is not None: return "times", times
|
||||
if phases is not None: return "phases", phases
|
||||
if homogenizations is not None: return "homogenizations", homogenizations
|
||||
if fields is not None: return "fields", fields
|
||||
|
||||
def _read(dataset):
|
||||
"""Read a dataset and its metadata into a numpy.ndarray."""
|
||||
metadata = {k:(v.decode() if not h5py3 and type(v) is bytes else v) for k,v in dataset.attrs.items()}
|
||||
|
@ -185,7 +169,13 @@ class Result:
|
|||
return util.srepr([util.deemph(header)] + first + in_between + last)
|
||||
|
||||
|
||||
def _manage_view(self,action,what,datasets):
|
||||
def _manage_view(self,
|
||||
action,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
homogenizations=None,
|
||||
fields=None):
|
||||
"""
|
||||
Manages the visibility of the groups.
|
||||
|
||||
|
@ -193,11 +183,6 @@ class Result:
|
|||
----------
|
||||
action : str
|
||||
Select from 'set', 'add', and 'del'.
|
||||
what : str
|
||||
Attribute to change (must be from self.visible).
|
||||
datasets : (list of) int (for increments), (list of) float (for times), (list of) str, or bool
|
||||
Name of datasets; supports '?' and '*' wildcards.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -205,47 +190,52 @@ class Result:
|
|||
Modified or new view on the DADF5 file.
|
||||
|
||||
"""
|
||||
# allow True/False and string arguments
|
||||
if datasets is True:
|
||||
datasets = '*'
|
||||
elif datasets is False or datasets is None:
|
||||
datasets = []
|
||||
choice = list(datasets).copy() if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
|
||||
[datasets]
|
||||
|
||||
what_ = what if what.endswith('s') else what+'s'
|
||||
|
||||
if what_ == 'increments':
|
||||
choice = [c if isinstance(c,str) and c.startswith('increment_') else
|
||||
self.increments[c] if isinstance(c,int) and c<0 else
|
||||
f'increment_{c}' for c in choice]
|
||||
elif what_ == 'times':
|
||||
what_ = 'increments'
|
||||
if choice == ['*']:
|
||||
choice = self.increments
|
||||
else:
|
||||
iterator = map(float,choice)
|
||||
choice = []
|
||||
for c in iterator:
|
||||
idx = np.searchsorted(self.times,c)
|
||||
if idx >= len(self.times): continue
|
||||
if np.isclose(c,self.times[idx]):
|
||||
choice.append(self.increments[idx])
|
||||
elif np.isclose(c,self.times[idx+1]):
|
||||
choice.append(self.increments[idx+1])
|
||||
|
||||
valid = _match(choice,getattr(self,what_))
|
||||
existing = set(self.visible[what_])
|
||||
if increments is not None and times is not None:
|
||||
raise ValueError('"increments" and "times" are mutually exclusive')
|
||||
|
||||
dup = self.copy()
|
||||
if action == 'set':
|
||||
dup.visible[what_] = sorted(set(valid), key=util.natural_sort)
|
||||
elif action == 'add':
|
||||
add = existing.union(valid)
|
||||
dup.visible[what_] = sorted(add, key=util.natural_sort)
|
||||
elif action == 'del':
|
||||
diff = existing.difference(valid)
|
||||
dup.visible[what_] = sorted(diff, key=util.natural_sort)
|
||||
for what,datasets in zip(['increments','times','phases','homogenizations','fields'],
|
||||
[ increments, times, phases, homogenizations, fields ]):
|
||||
if datasets is None:
|
||||
continue
|
||||
# allow True/False and string arguments
|
||||
elif datasets is True:
|
||||
datasets = '*'
|
||||
elif datasets is False:
|
||||
datasets = []
|
||||
choice = list(datasets).copy() if hasattr(datasets,'__iter__') and not isinstance(datasets,str) else \
|
||||
[datasets]
|
||||
|
||||
if what == 'increments':
|
||||
choice = [c if isinstance(c,str) and c.startswith('increment_') else
|
||||
self.increments[c] if isinstance(c,int) and c<0 else
|
||||
f'increment_{c}' for c in choice]
|
||||
elif what == 'times':
|
||||
what = 'increments'
|
||||
if choice == ['*']:
|
||||
choice = self.increments
|
||||
else:
|
||||
iterator = map(float,choice)
|
||||
choice = []
|
||||
for c in iterator:
|
||||
idx = np.searchsorted(self.times,c)
|
||||
if idx >= len(self.times): continue
|
||||
if np.isclose(c,self.times[idx]):
|
||||
choice.append(self.increments[idx])
|
||||
elif np.isclose(c,self.times[idx+1]):
|
||||
choice.append(self.increments[idx+1])
|
||||
|
||||
valid = _match(choice,getattr(self,what))
|
||||
existing = set(self.visible[what])
|
||||
|
||||
if action == 'set':
|
||||
dup.visible[what] = sorted(set(valid), key=util.natural_sort)
|
||||
elif action == 'add':
|
||||
add = existing.union(valid)
|
||||
dup.visible[what] = sorted(add, key=util.natural_sort)
|
||||
elif action == 'del':
|
||||
diff = existing.difference(valid)
|
||||
dup.visible[what] = sorted(diff, key=util.natural_sort)
|
||||
|
||||
return dup
|
||||
|
||||
|
@ -298,13 +288,13 @@ class Result:
|
|||
return selected
|
||||
|
||||
|
||||
def view(self,what=None,datasets=None,*,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
homogenizations=None,
|
||||
fields=None,
|
||||
protected=None):
|
||||
def view(self,*,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
homogenizations=None,
|
||||
fields=None,
|
||||
protected=None):
|
||||
"""
|
||||
Set view.
|
||||
|
||||
|
@ -313,11 +303,6 @@ class Result:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
what : {'increments', 'times', 'phases', 'homogenizations', 'fields'}
|
||||
Attribute to change. DEPRECATED.
|
||||
datasets : (list of) int (for increments), (list of) float (for times), (list of) str, or bool
|
||||
Name of datasets; supports '?' and '*' wildcards. DEPRECATED.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
increments: (list of) int, (list of) str, or bool, optional.
|
||||
Number(s) of increments to select.
|
||||
times: (list of) float, (list of) str, or bool, optional.
|
||||
|
@ -351,24 +336,16 @@ class Result:
|
|||
>>> r_t10to40 = r.view(times=r.times_in_range(10.0,40.0))
|
||||
|
||||
"""
|
||||
v = _view_transition(what,datasets,increments,times,phases,homogenizations,fields)
|
||||
dup = self._manage_view('set',increments,times,phases,homogenizations,fields)
|
||||
if protected is not None:
|
||||
if v is None:
|
||||
dup = self.copy()
|
||||
else:
|
||||
what_,datasets_ = v
|
||||
dup = self._manage_view('set',what_,datasets_)
|
||||
if not protected:
|
||||
print(util.warn('Warning: Modification of existing datasets allowed!'))
|
||||
dup._protected = protected
|
||||
else:
|
||||
what_,datasets_ = v
|
||||
dup = self._manage_view('set',what_,datasets_)
|
||||
|
||||
return dup
|
||||
|
||||
|
||||
def view_more(self,what=None,datasets=None,*,
|
||||
def view_more(self,*,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
|
@ -382,11 +359,6 @@ class Result:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
what : {'increments', 'times', 'phases', 'homogenizations', 'fields'}
|
||||
Attribute to change. DEPRECATED.
|
||||
datasets : (list of) int (for increments), (list of) float (for times), (list of) str, or bool
|
||||
Name of datasets; supports '?' and '*' wildcards. DEPRECATED.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
increments: (list of) int, (list of) str, or bool, optional.
|
||||
Number(s) of increments to select.
|
||||
times: (list of) float, (list of) str, or bool, optional.
|
||||
|
@ -413,11 +385,10 @@ class Result:
|
|||
>>> r_first_and_last = r.first.view_more(increments=-1)
|
||||
|
||||
"""
|
||||
what_, datasets_ = _view_transition(what,datasets,increments,times,phases,homogenizations,fields)
|
||||
return self._manage_view('add',what_,datasets_)
|
||||
return self._manage_view('add',increments,times,phases,homogenizations,fields)
|
||||
|
||||
|
||||
def view_less(self,what=None,datasets=None,*,
|
||||
def view_less(self,*,
|
||||
increments=None,
|
||||
times=None,
|
||||
phases=None,
|
||||
|
@ -431,11 +402,6 @@ class Result:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
what : {'increments', 'times', 'phases', 'homogenizations', 'fields'}
|
||||
Attribute to change. DEPRECATED.
|
||||
datasets : (list of) int (for increments), (list of) float (for times), (list of) str, or bool
|
||||
Name of datasets; supports '?' and '*' wildcards. DEPRECATED.
|
||||
True is equivalent to '*', False is equivalent to [].
|
||||
increments: (list of) int, (list of) str, or bool, optional.
|
||||
Number(s) of increments to select.
|
||||
times: (list of) float, (list of) str, or bool, optional.
|
||||
|
@ -461,8 +427,7 @@ class Result:
|
|||
>>> r_deformed = r_all.view_less(increments=0)
|
||||
|
||||
"""
|
||||
what_, datasets_ = _view_transition(what,datasets,increments,times,phases,homogenizations,fields)
|
||||
return self._manage_view('del',what_,datasets_)
|
||||
return self._manage_view('del',increments,times,phases,homogenizations,fields)
|
||||
|
||||
|
||||
def rename(self,name_src,name_dst):
|
||||
|
@ -1839,9 +1804,9 @@ class Result:
|
|||
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
|
||||
if not Path(name).exists() or overwrite:
|
||||
with open(name,'w') as f_out: f_out.write(obj[0].decode())
|
||||
print(f"Exported {d} to '{name}'.")
|
||||
print(f'Exported {d} to "{name}".')
|
||||
else:
|
||||
print(f"'{name}' exists, {d} not exported.")
|
||||
print(f'"{name}" exists, {d} not exported.')
|
||||
elif type(obj) == h5py.Group:
|
||||
os.makedirs(name, exist_ok=True)
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"""Functionality for typehints."""
|
||||
|
||||
from typing import Sequence, Union, Literal, TextIO
|
||||
from typing import Sequence, Union, Literal, TextIO, Collection
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
|
@ -8,6 +8,7 @@ import numpy as np
|
|||
|
||||
FloatSequence = Union[np.ndarray,Sequence[float]]
|
||||
IntSequence = Union[np.ndarray,Sequence[int]]
|
||||
IntCollection = Union[np.ndarray,Collection[int]]
|
||||
FileHandle = Union[TextIO, str, Path]
|
||||
CrystalFamily = Union[None,Literal['triclinic', 'monoclinic', 'orthorhombic', 'tetragonal', 'hexagonal', 'cubic']]
|
||||
CrystalLattice = Union[None,Literal['aP', 'mP', 'mS', 'oP', 'oS', 'oI', 'oF', 'tP', 'tI', 'hP', 'cP', 'cI', 'cF']]
|
||||
|
|
|
@ -6,7 +6,8 @@ from typing import Tuple as _Tuple
|
|||
from scipy import spatial as _spatial
|
||||
import numpy as _np
|
||||
|
||||
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence, NumpyRngSeed as _NumpyRngSeed
|
||||
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence, \
|
||||
NumpyRngSeed as _NumpyRngSeed, IntCollection as _IntCollection
|
||||
from . import util as _util
|
||||
from . import grid_filters as _grid_filters
|
||||
|
||||
|
@ -106,7 +107,7 @@ def from_Poisson_disc(size: _FloatSequence,
|
|||
|
||||
|
||||
def from_grid(grid,
|
||||
selection: _IntSequence = None,
|
||||
selection: _IntCollection = None,
|
||||
invert_selection: bool = False,
|
||||
average: bool = False,
|
||||
periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
|
||||
|
@ -117,7 +118,7 @@ def from_grid(grid,
|
|||
----------
|
||||
grid : damask.Grid
|
||||
Grid from which the material IDs are used as seeds.
|
||||
selection : sequence of int, optional
|
||||
selection : int or collection of int, optional
|
||||
Material IDs to consider.
|
||||
invert_selection : bool, optional
|
||||
Consider all material IDs except those in selection. Defaults to False.
|
||||
|
@ -134,7 +135,7 @@ def from_grid(grid,
|
|||
"""
|
||||
material = grid.material.reshape((-1,1),order='F')
|
||||
mask = _np.full(grid.cells.prod(),True,dtype=bool) if selection is None else \
|
||||
_np.isin(material,selection,invert=invert_selection).flatten()
|
||||
_np.isin(material,_util.aslist(selection),invert=invert_selection).flatten()
|
||||
coords = _grid_filters.coordinates0_point(grid.cells,grid.size).reshape(-1,3,order='F')
|
||||
|
||||
if not average:
|
||||
|
|
|
@ -6,17 +6,18 @@ import os
|
|||
import subprocess
|
||||
import shlex
|
||||
import re
|
||||
import signal
|
||||
import fractions
|
||||
from collections import abc
|
||||
from functools import reduce
|
||||
from typing import Callable, Union, Iterable, Sequence, Dict, List, Tuple, Literal, Any
|
||||
from functools import reduce, partial
|
||||
from typing import Callable, Union, Iterable, Sequence, Dict, List, Tuple, Literal, Any, Collection
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
import h5py
|
||||
|
||||
from . import version
|
||||
from ._typehints import FloatSequence, NumpyRngSeed
|
||||
from ._typehints import FloatSequence, NumpyRngSeed, IntCollection
|
||||
|
||||
# limit visibility
|
||||
__all__=[
|
||||
|
@ -174,21 +175,35 @@ def run(cmd: str,
|
|||
Output of the executed command.
|
||||
|
||||
"""
|
||||
def pass_signal(sig,_,proc,default):
|
||||
proc.send_signal(sig)
|
||||
signal.signal(sig,default)
|
||||
signal.raise_signal(sig)
|
||||
|
||||
signals = [signal.SIGINT,signal.SIGTERM]
|
||||
|
||||
print(f"running '{cmd}' in '{wd}'")
|
||||
process = subprocess.run(shlex.split(cmd),
|
||||
process = subprocess.Popen(shlex.split(cmd),
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.PIPE,
|
||||
env = os.environ if env is None else env,
|
||||
cwd = wd,
|
||||
encoding = 'utf-8',
|
||||
timeout = timeout)
|
||||
encoding = 'utf-8')
|
||||
# ensure that process is terminated (https://stackoverflow.com/questions/22916783)
|
||||
sig_states = [signal.signal(sig,partial(pass_signal,proc=process,default=signal.getsignal(sig))) for sig in signals]
|
||||
|
||||
try:
|
||||
stdout,stderr = process.communicate(timeout=timeout)
|
||||
finally:
|
||||
for sig,state in zip(signals,sig_states):
|
||||
signal.signal(sig,state)
|
||||
|
||||
if process.returncode != 0:
|
||||
print(process.stdout)
|
||||
print(process.stderr)
|
||||
print(stdout)
|
||||
print(stderr)
|
||||
raise RuntimeError(f"'{cmd}' failed with returncode {process.returncode}")
|
||||
|
||||
return process.stdout, process.stderr
|
||||
return stdout, stderr
|
||||
|
||||
|
||||
execute = run
|
||||
|
@ -722,7 +737,6 @@ def dict_flatten(d: Dict) -> Dict:
|
|||
return new
|
||||
|
||||
|
||||
|
||||
def tail_repack(extended: Union[str, Sequence[str]],
|
||||
existing: List[str] = []) -> List[str]:
|
||||
"""
|
||||
|
@ -753,6 +767,24 @@ def tail_repack(extended: Union[str, Sequence[str]],
|
|||
list(extended[len(existing):]))
|
||||
|
||||
|
||||
def aslist(arg: Union[IntCollection,int,None]) -> List:
|
||||
"""
|
||||
Transform argument to list.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arg : int or collection of int or None
|
||||
Entity to transform into list.
|
||||
|
||||
Returns
|
||||
-------
|
||||
transformed : list
|
||||
Entity transformed into list.
|
||||
|
||||
"""
|
||||
return [] if arg is None else list(arg) if isinstance(arg,(np.ndarray,Collection)) else [arg]
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# Classes
|
||||
####################################################################################################
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
|
@ -11,7 +11,7 @@
|
|||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
||||
AQAAAACAAAAABQAAhAAAAA==eF7t0jcOAjEURVGGnHOOQ4b9b5CC8xtLNAwlbk5hPUuWblZ6n+zvV5ZZYZU11tlgky22E+PdPnsccsARx+ww9jPOuWDOFddM/3Pljfvk/sgTz7ww3d/54JPlv4X8VYexj466jP6ix+gvekw7nHDKTx0umXa44ZY7Rn8H5iza4QtoVQaf
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
|
@ -0,0 +1,19 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="40">
|
||||
AQAAAACAAAAABQAAkAAAAA==eF7t070OgkAQRWExKo0GDREMaNjCv/d/QgvPbU5CQw3Nl5nZZZPlUm3+T7W6yK3c4Un9rK/V9/youlN9wzO2ml/xobrHghfM+Xcc0edPWDTP/ie+5AHf+MEv+h5Wl+nvsVff+ZvLcfKUHHtdctegc9RK5zL7Cmb/IJPf5DLvT36Sx7n/MHnMPTh/8QfFJwU8
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
|
@ -0,0 +1,19 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||
AQAAAACAAAAABQAAZwAAAA==eF7t0rcOgmAAhVEgNmyo2AuoWN//BR04EwsJcfzvcvabL47qxcFOJg177HPAIUdMOeaEU844Z8YFl1wx55obbrnjngceeeKZFxYseeWNd1Z88MkX3/zwy+Z/wf8YOqzX1uEPlgwHCA==
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
|
@ -11,7 +11,7 @@
|
|||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||
AQAAAACAAAAABQAAGQAAAA==eF5jZIAAxlF6lB4AmmmUHqUHkAYA/M8A8Q==
|
||||
AQAAAACAAAAABQAAKwAAAA==eF5jZIAAxlGaLJoJjcYlTqw6Qvpx2U+pu0iVJ8QnJD5Kj9KDgQYAUc4BDA==
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
|
@ -3,7 +3,7 @@
|
|||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wcy/KTNFLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFSoEKQ=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
|
@ -11,7 +11,7 @@
|
|||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||
AQAAAACAAAAABQAAGwAAAA==eF5jZIAAxlF6lB4AmmmUpogeDUfKaAD7jwDw
|
||||
AQAAAACAAAAABQAAPwAAAA==eF5jZIAAxlGaLJqJAE2pPlx8atG4zCXkXmLFaeXuURqVJhQfhNIhLj4h9YTil1h7COkn5D9C+nGJAwBKngD7
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||
AQAAAACAAAAABQAAagAAAA==eF7t0rkOglAARFExLrgCKuKuqLj8/w9acCoSY7B+05x+cqNOvSj4l92GPfY54JAxRxxzwilnnDNhyowLLrlizjULbrjljnseeOSJZ15Y8sob76z44JMvvtn8L9jObz2GDuv96vADk5QHBg==
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||
AQAAAACAAAAABQAAIgAAAA==eF5jZIAAxlGaLJoJjSakntr6hzqN7v9RepSmJw0AC04A9Q==
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||
AQAAAACAAAAABQAALwAAAA==eF5jZIAAxlGaLJoJjSakHpc+cvUTUkdrmlL3j9KU0dROF5TqH2iaVPcDAALOANU=
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||
AQAAAACAAAAABQAAcQAAAA==eF7t0rkOglAUBFAxKu6igvsKrv//gxYcm9fQGEPBNKe6yc1kolaZqPEndthljzH7HHDIEceccMoZE8654JIpM6645oZb7rjngUeeeOaFV+YseOOdDz754pthf+3Aqr7rdv9vw3+/NjssU7XDD0/8BuQ=
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="41">
|
||||
AQAAAACAAAAABQAAYQAAAA==eF7t0scVglAAAEHgqZgBA2ZExdR/gx6YCpDj38s0sEnUlgR7ccAhR0w55oRTzjjngktmzFlwxTU33LLkjnseeOSJZ15Y8cqaN975YMMnX3zzwy/j4F+GD9u6fvgD+gwHCA==
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="2" RangeMax="41">
|
||||
AQAAAACAAAAABQAAZAAAAA==eF7t0rcSglAARFEHE0bAgBkE8///oAWnF8b2bXP6nRv1mkXBv+xzwCFHHDPmhFPOOOeCSyZMmXHFNTfcMueOex545IlnXliw5JUVa95454NPvvjmh79+DXYzdNisbYdfSqMHMg==
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||
AQAAAACAAAAABQAAIAAAAA==eF5jZIAAxlF6lB4AmokAPdj1DzRNyP2jNH4aAMufANU=
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -1,19 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<VTKFile type="ImageData" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
|
||||
<ImageData WholeExtent="0 8 0 5 0 4" Origin="0 0 0" Spacing="0.000001 0.0000010000000000000002 0.000001" Direction="1 0 0 0 1 0 0 0 1">
|
||||
<FieldData>
|
||||
<Array type="String" Name="comments" NumberOfTuples="1" format="binary">
|
||||
AQAAAACAAAA+AAAAQQAAAA==eF5LScxNLM7Wc0/Nz9VLzklNzFMos7TUAyNdSyDQLagsSS0uUdAwMjC01DU01DUwUjA0tDK1sDIw0GQAAFT8EKY=
|
||||
</Array>
|
||||
</FieldData>
|
||||
<Piece Extent="0 8 0 5 0 4">
|
||||
<PointData>
|
||||
</PointData>
|
||||
<CellData>
|
||||
<DataArray type="Int64" Name="material" format="binary" RangeMin="1" RangeMax="2">
|
||||
AQAAAACAAAAABQAAMAAAAA==eF5jYoAAJhw0IwEalz566aeUptT+oa6fUppS+4e6fkppSu0f6voppSm1HwBAngDh
|
||||
</DataArray>
|
||||
</CellData>
|
||||
</Piece>
|
||||
</ImageData>
|
||||
</VTKFile>
|
|
@ -102,3 +102,11 @@ class TestCrystal:
|
|||
assert [len(s) for s in crystal.kinematics('twin')['direction']] == length
|
||||
assert [len(s) for s in crystal.kinematics('twin')['plane']] == length
|
||||
|
||||
@pytest.mark.parametrize('crystal', [Crystal(lattice='cF'),
|
||||
Crystal(lattice='cI'),
|
||||
Crystal(lattice='hP'),
|
||||
Crystal(lattice='tI',c=1.2)])
|
||||
def test_related(self,crystal):
|
||||
for r in crystal.orientation_relationships:
|
||||
crystal.relation_operations(r)
|
||||
|
||||
|
|
|
@ -26,6 +26,14 @@ def default():
|
|||
np.arange(1,41))).reshape(8,5,4,order='F')
|
||||
return Grid(x,[8e-6,5e-6,4e-6])
|
||||
|
||||
@pytest.fixture
|
||||
def random():
|
||||
"""Simple geometry."""
|
||||
size = (1+np.random.rand(3))*1e-5
|
||||
cells = np.random.randint(10,20,3)
|
||||
s = seeds.from_random(size,np.random.randint(5,25),cells)
|
||||
return Grid.from_Voronoi_tessellation(cells,size,s)
|
||||
|
||||
@pytest.fixture
|
||||
def ref_path(ref_path_base):
|
||||
"""Directory containing reference results."""
|
||||
|
@ -157,17 +165,26 @@ class TestGrid:
|
|||
default.flip(directions)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('stencil',[1,2,3,4])
|
||||
@pytest.mark.parametrize('selection',[None,[1],[1,2,3]])
|
||||
@pytest.mark.parametrize('distance',[1.,np.sqrt(3)])
|
||||
@pytest.mark.parametrize('selection',[None,1,[1],[1,2,3]])
|
||||
@pytest.mark.parametrize('periodic',[True,False])
|
||||
def test_clean(self,default,update,ref_path,stencil,selection,periodic):
|
||||
current = default.clean(stencil,selection,periodic)
|
||||
reference = ref_path/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}.vti'
|
||||
if update and stencil > 1:
|
||||
def test_clean_reference(self,default,update,ref_path,distance,selection,periodic):
|
||||
current = default.clean(distance,selection,periodic=periodic,rng_seed=0)
|
||||
reference = ref_path/f'clean_{distance}_{"+".join(map(str,util.aslist(selection)))}_{periodic}.vti'
|
||||
if update:
|
||||
current.save(reference)
|
||||
assert grid_equal(Grid.load(reference) if stencil > 1 else default,
|
||||
current
|
||||
)
|
||||
assert grid_equal(Grid.load(reference),current)
|
||||
|
||||
@pytest.mark.parametrize('selection',[list(np.random.randint(1,20,6)),set(np.random.randint(1,20,6)),np.random.randint(1,20,6)])
|
||||
@pytest.mark.parametrize('invert',[True,False])
|
||||
def test_clean_invert(self,default,selection,invert):
|
||||
selection_inverse = set(default.material.flatten()) - set(selection)
|
||||
assert default.clean(selection=selection,invert_selection=invert,rng_seed=0) == \
|
||||
default.clean(selection=selection_inverse,invert_selection=not invert,rng_seed=0)
|
||||
|
||||
def test_clean_selection_empty(self,random):
|
||||
assert random.clean(selection=None,invert_selection=True,rng_seed=0) == random.clean(rng_seed=0) and \
|
||||
random.clean(selection=None,invert_selection=False,rng_seed=0) == random.clean(rng_seed=0)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('cells',[
|
||||
|
@ -211,6 +228,11 @@ class TestGrid:
|
|||
modified.substitute(np.arange(default.material.max())+1+offset,
|
||||
np.arange(default.material.max())+1))
|
||||
|
||||
def test_substitute_integer_list(self,random):
|
||||
f = np.random.randint(30)
|
||||
t = np.random.randint(30)
|
||||
assert random.substitute(f,t) == random.substitute([f],[t])
|
||||
|
||||
def test_substitute_invariant(self,default):
|
||||
f = np.unique(default.material.flatten())[:np.random.randint(1,default.material.max())]
|
||||
t = np.random.permutation(f)
|
||||
|
@ -302,31 +324,42 @@ class TestGrid:
|
|||
assert grid_equal(G_1,G_2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('trigger',[[1],[]])
|
||||
def test_vicinity_offset(self,trigger):
|
||||
@pytest.mark.parametrize('selection',[1,None])
|
||||
def test_vicinity_offset(self,selection):
|
||||
offset = np.random.randint(2,4)
|
||||
vicinity = np.random.randint(2,4)
|
||||
distance = np.random.randint(2,4)
|
||||
|
||||
g = np.random.randint(28,40,(3))
|
||||
m = np.ones(g,'i')
|
||||
x = (g*np.random.permutation(np.array([.5,1,1]))).astype('i')
|
||||
x = (g*np.random.permutation(np.array([.5,1,1]))).astype(int)
|
||||
m[slice(0,x[0]),slice(0,x[1]),slice(0,x[2])] = 2
|
||||
m2 = m.copy()
|
||||
for i in [0,1,2]:
|
||||
m2[(np.roll(m,+vicinity,i)-m)!=0] += offset
|
||||
m2[(np.roll(m,-vicinity,i)-m)!=0] += offset
|
||||
if len(trigger) > 0:
|
||||
m2[(np.roll(m,+distance,i)-m)!=0] += offset
|
||||
m2[(np.roll(m,-distance,i)-m)!=0] += offset
|
||||
if selection == 1:
|
||||
m2[m==1] = 1
|
||||
|
||||
grid = Grid(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger)
|
||||
grid = Grid(m,np.random.rand(3)).vicinity_offset(distance,offset,selection=selection)
|
||||
|
||||
assert np.all(m2==grid.material)
|
||||
|
||||
@pytest.mark.parametrize('selection',[list(np.random.randint(1,20,6)),set(np.random.randint(1,20,6)),np.random.randint(1,20,6)])
|
||||
@pytest.mark.parametrize('invert',[True,False])
|
||||
def test_vicinit_offset_invert(self,random,selection,invert):
|
||||
selection_inverse = set(random.material.flatten()) - set(selection)
|
||||
assert random.vicinity_offset(selection=selection,invert_selection=invert) == \
|
||||
random.vicinity_offset(selection=selection_inverse,invert_selection=not invert)
|
||||
|
||||
def test_vicinity_offset_selection_empty(self,random):
|
||||
assert random.vicinity_offset(selection=None,invert_selection=False) == random.vicinity_offset() and \
|
||||
random.vicinity_offset(selection=None,invert_selection=True ) == random.vicinity_offset()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('periodic',[True,False])
|
||||
def test_vicinity_offset_invariant(self,default,periodic):
|
||||
offset = default.vicinity_offset(trigger=[default.material.max()+1,
|
||||
default.material.min()-1])
|
||||
offset = default.vicinity_offset(selection=[default.material.max()+1,
|
||||
default.material.min()-1])
|
||||
assert np.all(offset.material==default.material)
|
||||
|
||||
|
||||
|
|
|
@ -69,8 +69,8 @@ class TestResult:
|
|||
|
||||
@pytest.mark.parametrize('what',['increments','times','phases','fields']) # ToDo: discuss homogenizations
|
||||
def test_view_none(self,default,what):
|
||||
n0 = default.view(what,False)
|
||||
n1 = default.view(what,[])
|
||||
n0 = default.view(**{what:False})
|
||||
n1 = default.view(**{what:[]})
|
||||
|
||||
label = 'increments' if what == 'times' else what
|
||||
|
||||
|
@ -79,29 +79,25 @@ class TestResult:
|
|||
|
||||
@pytest.mark.parametrize('what',['increments','times','phases','fields']) # ToDo: discuss homogenizations
|
||||
def test_view_more(self,default,what):
|
||||
empty = default.view(what,False)
|
||||
empty = default.view(**{what:False})
|
||||
|
||||
a = empty.view_more(what,'*').get('F')
|
||||
b = empty.view_more(what,True).get('F')
|
||||
a = empty.view_more(**{what:'*'}).get('F')
|
||||
b = empty.view_more(**{what:True}).get('F')
|
||||
|
||||
assert dict_equal(a,b)
|
||||
|
||||
@pytest.mark.parametrize('what',['increments','times','phases','fields']) # ToDo: discuss homogenizations
|
||||
def test_view_less(self,default,what):
|
||||
full = default.view(what,True)
|
||||
full = default.view(**{what:True})
|
||||
|
||||
n0 = full.view_less(what,'*')
|
||||
n1 = full.view_less(what,True)
|
||||
n0 = full.view_less(**{what:'*'})
|
||||
n1 = full.view_less(**{what:True})
|
||||
|
||||
label = 'increments' if what == 'times' else what
|
||||
|
||||
assert n0.get('F') is n1.get('F') is None and \
|
||||
len(n0.visible[label]) == len(n1.visible[label]) == 0
|
||||
|
||||
def test_view_invalid(self,default):
|
||||
with pytest.raises(AttributeError):
|
||||
default.view('invalid',True)
|
||||
|
||||
def test_add_invalid(self,default):
|
||||
default.add_absolute('xxxx')
|
||||
|
||||
|
@ -469,7 +465,7 @@ class TestResult:
|
|||
def test_get(self,update,request,ref_path,view,output,flatten,prune):
|
||||
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
|
||||
for key,value in view.items():
|
||||
result = result.view(key,value)
|
||||
result = result.view(**{key:value})
|
||||
|
||||
fname = request.node.name
|
||||
cur = result.get(output,flatten,prune)
|
||||
|
@ -494,7 +490,7 @@ class TestResult:
|
|||
def test_place(self,update,request,ref_path,view,output,flatten,prune,constituents):
|
||||
result = Result(ref_path/'4grains2x4x3_compressionY.hdf5')
|
||||
for key,value in view.items():
|
||||
result = result.view(key,value)
|
||||
result = result.view(**{key:value})
|
||||
|
||||
fname = request.node.name
|
||||
cur = result.place(output,flatten,prune,constituents)
|
||||
|
|
|
@ -117,6 +117,10 @@ class TestUtil:
|
|||
def test_decorate(self,style):
|
||||
assert 'DAMASK' in style('DAMASK')
|
||||
|
||||
@pytest.mark.parametrize('lst',[1,[1,2],set([1,2,3]),np.arange(4)])
|
||||
def test_aslist(self,lst):
|
||||
assert len(util.aslist(lst)) > 0
|
||||
|
||||
@pytest.mark.parametrize('complete',[True,False])
|
||||
def test_D3D_base_group(self,tmp_path,complete):
|
||||
base_group = ''.join(random.choices('DAMASK', k=10))
|
||||
|
|
|
@ -70,9 +70,8 @@ subroutine DAMASK_interface_init
|
|||
external :: &
|
||||
quit
|
||||
|
||||
print'(/,1x,a)', '<<<+- DAMASK_interface init -+>>>'
|
||||
|
||||
if(worldrank == 0) open(OUTPUT_UNIT, encoding='UTF-8') ! for special characters in output
|
||||
print'(/,1x,a)', '<<<+- DAMASK_interface init -+>>>'
|
||||
|
||||
! http://patorjk.com/software/taag/#p=display&f=Lean&t=DAMASK%203
|
||||
#ifdef DEBUG
|
||||
|
|
|
@ -56,9 +56,6 @@ program DAMASK_grid
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! loop variables, convergence etc.
|
||||
real(pReal), dimension(3,3), parameter :: &
|
||||
ones = 1.0_pReal, &
|
||||
zeros = 0.0_pReal
|
||||
integer, parameter :: &
|
||||
subStepFactor = 2 !< for each substep, divide the last time increment by 2.0
|
||||
real(pReal) :: &
|
||||
|
|
|
@ -102,7 +102,7 @@ subroutine discretization_grid_init(restart)
|
|||
|
||||
print'(/,1x,a,i0,a,i0,a,i0)', 'cells: ', cells(1), ' × ', cells(2), ' × ', cells(3)
|
||||
print '(1x,a,es8.2,a,es8.2,a,es8.2,a)', 'size: ', geomSize(1), ' × ', geomSize(2), ' × ', geomSize(3), ' m³'
|
||||
print '(1x,a,es8.2,a,es8.2,a,es8.2,a)', 'origin: ', origin(1), ' ', origin(2), ' ', origin(3), ' m'
|
||||
print '(1x,a,es9.2,a,es9.2,a,es9.2,a)', 'origin: ', origin(1), ' ', origin(2), ' ', origin(3), ' m'
|
||||
|
||||
if (worldsize>cells(3)) call IO_error(894, ext_msg='number of processes exceeds cells(3)')
|
||||
|
||||
|
|
|
@ -256,7 +256,7 @@ subroutine spectral_utilities_init
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! general initialization of FFTW (see manual on fftw.org for more details)
|
||||
if (pReal /= C_DOUBLE .or. kind(1) /= C_INT) error stop 'C and Fortran datatypes do not match'
|
||||
call fftw_set_timelimit(num_grid%get_asFloat('fftw_timelimit',defaultVal=-1.0_pReal))
|
||||
call fftw_set_timelimit(num_grid%get_asFloat('fftw_timelimit',defaultVal=300.0_pReal))
|
||||
|
||||
print'(/,1x,a)', 'FFTW initialized'; flush(IO_STDOUT)
|
||||
|
||||
|
|
|
@ -21,6 +21,15 @@ module homogenization
|
|||
implicit none
|
||||
private
|
||||
|
||||
type :: tState
|
||||
integer :: &
|
||||
sizeState = 0 !< size of state
|
||||
! http://stackoverflow.com/questions/3948210
|
||||
real(pReal), pointer, dimension(:,:), contiguous :: & !< is basically an allocatable+target, but in a type needs to be pointer
|
||||
state0, &
|
||||
state
|
||||
end type
|
||||
|
||||
enum, bind(c); enumerator :: &
|
||||
THERMAL_UNDEFINED_ID, &
|
||||
THERMAL_PASS_ID, &
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
module parallelization
|
||||
use, intrinsic :: ISO_fortran_env, only: &
|
||||
OUTPUT_UNIT
|
||||
OUTPUT_UNIT, &
|
||||
ERROR_UNIT
|
||||
|
||||
#ifdef PETSC
|
||||
#include <petsc/finclude/petscsys.h>
|
||||
|
@ -86,17 +87,28 @@ subroutine parallelization_init
|
|||
if (err_MPI /= 0_MPI_INTEGER_KIND) &
|
||||
error stop 'Could not determine worldrank'
|
||||
|
||||
if (worldrank == 0) then
|
||||
print'(/,1x,a)', '<<<+- parallelization init -+>>>'
|
||||
|
||||
call MPI_Get_library_version(MPI_library_version,devNull,err_MPI)
|
||||
print'(/,1x,a)', trim(MPI_library_version)
|
||||
call MPI_Get_version(version,subversion,err_MPI)
|
||||
print'(1x,a,i0,a,i0)', 'MPI standard: ',version,'.',subversion
|
||||
#ifdef _OPENMP
|
||||
print'(1x,a,i0)', 'OpenMP version: ',openmp_version
|
||||
#ifdef LOGFILE
|
||||
write(rank_str,'(i4.4)') worldrank
|
||||
open(OUTPUT_UNIT,file='out.'//rank_str,status='replace',encoding='UTF-8')
|
||||
open(ERROR_UNIT,file='error.'//rank_str,status='replace',encoding='UTF-8')
|
||||
#else
|
||||
if (worldrank /= 0) then
|
||||
close(OUTPUT_UNIT) ! disable output
|
||||
open(OUTPUT_UNIT,file='/dev/null',status='replace') ! close() alone will leave some temp files in cwd
|
||||
else
|
||||
open(OUTPUT_UNIT,encoding='UTF-8') ! for special characters in output
|
||||
endif
|
||||
#endif
|
||||
|
||||
print'(/,1x,a)', '<<<+- parallelization init -+>>>'
|
||||
|
||||
call MPI_Get_library_version(MPI_library_version,devNull,err_MPI)
|
||||
print'(/,1x,a)', trim(MPI_library_version)
|
||||
call MPI_Get_version(version,subversion,err_MPI)
|
||||
print'(1x,a,i0,a,i0)', 'MPI standard: ',version,'.',subversion
|
||||
#ifdef _OPENMP
|
||||
print'(1x,a,i0)', 'OpenMP version: ',openmp_version
|
||||
#endif
|
||||
end if
|
||||
|
||||
call MPI_Comm_size(MPI_COMM_WORLD,worldsize,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) &
|
||||
|
@ -121,12 +133,6 @@ subroutine parallelization_init
|
|||
if (typeSize*8_MPI_INTEGER_KIND /= int(storage_size(0.0_pReal),MPI_INTEGER_KIND)) &
|
||||
error stop 'Mismatch between MPI_DOUBLE and DAMASK pReal'
|
||||
|
||||
if (worldrank /= 0) then
|
||||
close(OUTPUT_UNIT) ! disable output
|
||||
write(rank_str,'(i4.4)') worldrank ! use for MPI debug filenames
|
||||
open(OUTPUT_UNIT,file='/dev/null',status='replace') ! close() alone will leave some temp files in cwd
|
||||
endif
|
||||
|
||||
!$ call get_environment_variable(name='OMP_NUM_THREADS',value=NumThreadsString,STATUS=got_env)
|
||||
!$ if(got_env /= 0) then
|
||||
!$ print'(1x,a)', 'Could not get $OMP_NUM_THREADS, using default'
|
||||
|
|
|
@ -22,6 +22,32 @@ module phase
|
|||
implicit none
|
||||
private
|
||||
|
||||
type :: tState
|
||||
integer :: &
|
||||
sizeState = 0, & !< size of state
|
||||
sizeDotState = 0, & !< size of dot state, i.e. state(1:sizeDot) follows time evolution by dotState rates
|
||||
offsetDeltaState = 0, & !< index offset of delta state
|
||||
sizeDeltaState = 0 !< size of delta state, i.e. state(offset+1:offset+sizeDelta) follows time evolution by deltaState increments
|
||||
real(pReal), allocatable, dimension(:) :: &
|
||||
atol
|
||||
! http://stackoverflow.com/questions/3948210
|
||||
real(pReal), pointer, dimension(:,:), contiguous :: & !< is basically an allocatable+target, but in a type needs to be pointer
|
||||
state0, &
|
||||
state, & !< state
|
||||
dotState, & !< rate of state change
|
||||
deltaState !< increment of state change
|
||||
real(pReal), pointer, dimension(:,:) :: &
|
||||
deltaState2
|
||||
end type
|
||||
|
||||
type, extends(tState) :: tPlasticState
|
||||
logical :: nonlocal = .false.
|
||||
end type
|
||||
|
||||
type :: tSourceState
|
||||
type(tState), dimension(:), allocatable :: p !< tState for each active source mechanism in a phase
|
||||
end type
|
||||
|
||||
|
||||
character(len=2), allocatable, dimension(:) :: phase_lattice
|
||||
real(pReal), allocatable, dimension(:) :: phase_cOverA
|
||||
|
|
|
@ -920,11 +920,11 @@ subroutine results(group,ph)
|
|||
call results_writeDataset(phase_mechanical_S(ph)%data,group//'/mechanical/','S', &
|
||||
'second Piola-Kirchhoff stress','Pa')
|
||||
case('O')
|
||||
call results_writeDataset(to_quaternion(phase_O(ph)%data),group//'/mechanical',output_mechanical(ph)%label(ou),&
|
||||
call results_writeDataset(to_quaternion(phase_O(ph)%data),group//'/mechanical','O', &
|
||||
'crystal orientation as quaternion','q_0 (q_1 q_2 q_3)')
|
||||
call results_addAttribute('lattice',phase_lattice(ph),group//'/mechanical/'//output_mechanical(ph)%label(ou))
|
||||
call results_addAttribute('lattice',phase_lattice(ph),group//'/mechanical/O')
|
||||
if (any(phase_lattice(ph) == ['hP', 'tI'])) &
|
||||
call results_addAttribute('c/a',phase_cOverA(ph),group//'/mechanical/'//output_mechanical(ph)%label(ou))
|
||||
call results_addAttribute('c/a',phase_cOverA(ph),group//'/mechanical/O')
|
||||
end select
|
||||
end do
|
||||
|
||||
|
|
25
src/prec.f90
25
src/prec.f90
|
@ -31,31 +31,6 @@ module prec
|
|||
|
||||
real(pReal), parameter :: tol_math_check = 1.0e-8_pReal !< tolerance for internal math self-checks (rotation)
|
||||
|
||||
type :: tState
|
||||
integer :: &
|
||||
sizeState = 0, & !< size of state
|
||||
sizeDotState = 0, & !< size of dot state, i.e. state(1:sizeDot) follows time evolution by dotState rates
|
||||
offsetDeltaState = 0, & !< index offset of delta state
|
||||
sizeDeltaState = 0 !< size of delta state, i.e. state(offset+1:offset+sizeDelta) follows time evolution by deltaState increments
|
||||
real(pReal), allocatable, dimension(:) :: &
|
||||
atol
|
||||
! http://stackoverflow.com/questions/3948210
|
||||
real(pReal), pointer, dimension(:,:), contiguous :: & !< is basically an allocatable+target, but in a type needs to be pointer
|
||||
state0, &
|
||||
state, & !< state
|
||||
dotState, & !< rate of state change
|
||||
deltaState !< increment of state change
|
||||
real(pReal), pointer, dimension(:,:) :: &
|
||||
deltaState2
|
||||
end type
|
||||
|
||||
type, extends(tState) :: tPlasticState
|
||||
logical :: nonlocal = .false.
|
||||
end type
|
||||
|
||||
type :: tSourceState
|
||||
type(tState), dimension(:), allocatable :: p !< tState for each active source mechanism in a phase
|
||||
end type
|
||||
|
||||
real(pReal), private, parameter :: PREAL_EPSILON = epsilon(0.0_pReal) !< minimum positive number such that 1.0 + EPSILON /= 1.0.
|
||||
real(pReal), private, parameter :: PREAL_MIN = tiny(0.0_pReal) !< smallest normalized floating point number
|
||||
|
|
|
@ -388,14 +388,11 @@ subroutine results_writeVectorDataset_real(dataset,group,label,description,SIuni
|
|||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
if (present(systems)) then
|
||||
if (size(systems)*size(dataset,2) == 0 ) return !ToDo: maybe also implement for other results_write (not sure about scalar)
|
||||
end if
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
call HDF5_write(dataset,groupHandle,label)
|
||||
call executionStamp(group//'/'//label,description,SIunit)
|
||||
if (present(systems)) call HDF5_addAttribute(resultsFile,'systems',systems,group//'/'//label)
|
||||
if (present(systems) .and. HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(resultsFile,'systems',systems,group//'/'//label)
|
||||
call HDF5_closeGroup(groupHandle)
|
||||
|
||||
end subroutine results_writeVectorDataset_real
|
||||
|
@ -454,14 +451,11 @@ subroutine results_writeVectorDataset_int(dataset,group,label,description,SIunit
|
|||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
if (present(systems)) then
|
||||
if (size(systems)*size(dataset,2) == 0 ) return !ToDo: maybe also implement for other results_write (not sure about scalar)
|
||||
end if
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
call HDF5_write(dataset,groupHandle,label)
|
||||
call executionStamp(group//'/'//label,description,SIunit)
|
||||
if (present(systems)) call HDF5_addAttribute(resultsFile,'systems',systems,group//'/'//label)
|
||||
if (present(systems) .and. HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(resultsFile,'systems',systems,group//'/'//label)
|
||||
call HDF5_closeGroup(groupHandle)
|
||||
|
||||
end subroutine results_writeVectorDataset_int
|
||||
|
|
Loading…
Reference in New Issue