Merge branch 'development' into new-gmsh-version

This commit is contained in:
Sharan Roongta 2021-01-07 16:38:16 +01:00
commit 793e5d0d2b
153 changed files with 4006 additions and 5438 deletions

View File

@ -60,11 +60,11 @@ variables:
MPI_Intel: "$IMPI2020Intel19_1" MPI_Intel: "$IMPI2020Intel19_1"
MPI_GNU: "$OMPI4_0GNU10" MPI_GNU: "$OMPI4_0GNU10"
# ++++++++++++ PETSc ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # ++++++++++++ PETSc ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
PETSc3_14_0IMPI2020Intel19_1: "Libraries/PETSc/3.14.0/Intel-19.1.2-IntelMPI-2019" PETSc3_14_2IMPI2020Intel19_1: "Libraries/PETSc/3.14.2/Intel-19.1.2-IntelMPI-2019"
PETSc3_14_0OMPI4_0GNU10: "Libraries/PETSc/3.14.0/GNU-10-OpenMPI-4.0.5" PETSc3_14_2OMPI4_0GNU10: "Libraries/PETSc/3.14.2/GNU-10-OpenMPI-4.0.5"
# ------------ Defaults ---------------------------------------------- # ------------ Defaults ----------------------------------------------
PETSc_Intel: "$PETSc3_14_0IMPI2020Intel19_1" PETSc_Intel: "$PETSc3_14_2IMPI2020Intel19_1"
PETSc_GNU: "$PETSc3_14_0OMPI4_0GNU10" PETSc_GNU: "$PETSc3_14_2OMPI4_0GNU10"
# ++++++++++++ commercial FEM ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # ++++++++++++ commercial FEM ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
MSC2020: "FEM/MSC/2020" MSC2020: "FEM/MSC/2020"
# ------------ Defaults ---------------------------------------------- # ------------ Defaults ----------------------------------------------
@ -262,20 +262,6 @@ Pytest_grid:
- master - master
- release - release
Thermal:
stage: grid
script: Thermal/test.py
except:
- master
- release
Nonlocal_Damage_DetectChanges:
stage: grid
script: Nonlocal_Damage_DetectChanges/test.py
except:
- master
- release
Plasticity_DetectChanges: Plasticity_DetectChanges:
stage: grid stage: grid
script: Plasticity_DetectChanges/test.py script: Plasticity_DetectChanges/test.py
@ -353,7 +339,6 @@ backupData:
- mkdir $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA} - mkdir $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}
- mv $LOCAL_HOME/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/ - mv $LOCAL_HOME/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $LOCAL_HOME/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/ - mv $LOCAL_HOME/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $DAMASKROOT/PRIVATE/documenting/DAMASK_* $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
only: only:
- development - development

1
.gitmodules vendored
View File

@ -2,3 +2,4 @@
path = PRIVATE path = PRIVATE
url = ../PRIVATE.git url = ../PRIVATE.git
branch = master branch = master
shallow = true

View File

@ -1,6 +1,18 @@
######################################################################################## cmake_minimum_required (VERSION 3.10.0)
# Compiler options for building DAMASK include (FindPkgConfig REQUIRED)
cmake_minimum_required (VERSION 3.10.0 FATAL_ERROR)
# Dummy project to determine compiler names and version
project (Prerequisites LANGUAGES)
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig")
pkg_check_modules (PETSC REQUIRED PETSc>=3.12.0 PETSc<3.15.0)
pkg_get_variable (CMAKE_Fortran_COMPILER PETSc fcompiler)
pkg_get_variable (CMAKE_C_COMPILER PETSc ccompiler)
find_program (CAT_EXECUTABLE NAMES cat)
execute_process (COMMAND ${CAT_EXECUTABLE} ${PROJECT_SOURCE_DIR}/VERSION
RESULT_VARIABLE DAMASK_VERSION_RETURN
OUTPUT_VARIABLE DAMASK_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE)
#--------------------------------------------------------------------------------------- #---------------------------------------------------------------------------------------
# Find PETSc from system environment # Find PETSc from system environment
@ -28,19 +40,10 @@ include ${petsc_conf_rules}
include ${petsc_conf_variables} include ${petsc_conf_variables}
INCLUDE_DIRS := \${PETSC_FC_INCLUDES} INCLUDE_DIRS := \${PETSC_FC_INCLUDES}
LIBRARIES := \${PETSC_WITH_EXTERNAL_LIB} LIBRARIES := \${PETSC_WITH_EXTERNAL_LIB}
COMPILERF := \${FC}
COMPILERC := \${CC}
LINKERNAME := \${FLINKER}
includes: includes:
\t@echo \${INCLUDE_DIRS} \t@echo \${INCLUDE_DIRS}
extlibs: extlibs:
\t@echo \${LIBRARIES} \t@echo \${LIBRARIES}
compilerf:
\t@echo \${COMPILERF}
compilerc:
\t@echo \${COMPILERC}
linker:
\t@echo \${LINKERNAME}
") ")
# CMake will execute each target in the ${petsc_config_makefile} # CMake will execute each target in the ${petsc_config_makefile}
@ -52,26 +55,10 @@ execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_conf
OUTPUT_VARIABLE petsc_includes OUTPUT_VARIABLE petsc_includes
OUTPUT_STRIP_TRAILING_WHITESPACE) OUTPUT_STRIP_TRAILING_WHITESPACE)
# Find the PETSc external linking directory settings # Find the PETSc external linking directory settings
# required for final linking, must be appended after the executable
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "extlibs" execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "extlibs"
RESULT_VARIABLE PETSC_EXTERNAL_LIB_RETURN RESULT_VARIABLE PETSC_EXTERNAL_LIB_RETURN
OUTPUT_VARIABLE petsc_external_lib OUTPUT_VARIABLE petsc_external_lib
OUTPUT_STRIP_TRAILING_WHITESPACE) OUTPUT_STRIP_TRAILING_WHITESPACE)
# PETSc specified fortran compiler
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "compilerf"
RESULT_VARIABLE PETSC_MPIFC_RETURN
OUTPUT_VARIABLE PETSC_MPIFC
OUTPUT_STRIP_TRAILING_WHITESPACE)
# PETSc specified C compiler
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "compilerc"
RESULT_VARIABLE PETSC_MPICC_RETURN
OUTPUT_VARIABLE PETSC_MPICC
OUTPUT_STRIP_TRAILING_WHITESPACE)
# PETSc specified linker (Fortran compiler + PETSc linking flags)
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "linker"
RESULT_VARIABLE PETSC_LINKER_RETURN
OUTPUT_VARIABLE PETSC_LINKER
OUTPUT_STRIP_TRAILING_WHITESPACE)
# Remove temporary makefile, no need to keep it anymore. # Remove temporary makefile, no need to keep it anymore.
file (REMOVE_RECURSE ${TEMPDIR}) file (REMOVE_RECURSE ${TEMPDIR})
@ -90,14 +77,6 @@ endforeach (exlib)
message ("Found PETSC_DIR:\n${PETSC_DIR}\n" ) message ("Found PETSC_DIR:\n${PETSC_DIR}\n" )
message ("Found PETSC_INCLUDES:\n${PETSC_INCLUDES}\n" ) message ("Found PETSC_INCLUDES:\n${PETSC_INCLUDES}\n" )
message ("Found PETSC_EXTERNAL_LIB:\n${PETSC_EXTERNAL_LIB}\n") message ("Found PETSC_EXTERNAL_LIB:\n${PETSC_EXTERNAL_LIB}\n")
message ("Found PETSC_LINKER:\n${PETSC_LINKER}\n" )
message ("Found MPI Fortran Compiler:\n${PETSC_MPIFC}\n" )
message ("Found MPI C Compiler:\n${PETSC_MPICC}\n" )
# set compiler commands to match PETSc (needs to be done before defining the project)
# https://cmake.org/Wiki/CMake_FAQ#How_do_I_use_a_different_compiler.3F
set (CMAKE_Fortran_COMPILER "${PETSC_MPIFC}")
set (CMAKE_C_COMPILER "${PETSC_MPICC}")
#--------------------------------------------------------------------------------------- #---------------------------------------------------------------------------------------
# Now start to care about DAMASK # Now start to care about DAMASK
@ -105,17 +84,18 @@ set (CMAKE_C_COMPILER "${PETSC_MPICC}")
# DAMASK solver defines project to build # DAMASK solver defines project to build
string(TOLOWER ${DAMASK_SOLVER} DAMASK_SOLVER) string(TOLOWER ${DAMASK_SOLVER} DAMASK_SOLVER)
if (DAMASK_SOLVER STREQUAL "grid") if (DAMASK_SOLVER STREQUAL "grid")
project (damask-grid Fortran C) project (damask-grid HOMEPAGE_URL https://damask.mpie.de LANGUAGES Fortran C)
add_definitions (-DGrid) add_definitions (-DGrid)
message ("Building Grid Solver\n")
elseif (DAMASK_SOLVER STREQUAL "mesh") elseif (DAMASK_SOLVER STREQUAL "mesh")
project (damask-mesh Fortran C) project (damask-mesh HOMEPAGE_URL https://damask.mpie.de LANGUAGES Fortran C)
add_definitions (-DMesh) add_definitions (-DMesh)
message ("Building Mesh Solver\n")
else () else ()
message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined") message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined")
endif () endif ()
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) add_definitions (-DDAMASKVERSION="${DAMASK_VERSION}")
add_definitions (-DPETSc)
message ("\nBuilding ${CMAKE_PROJECT_NAME}\n")
if (CMAKE_BUILD_TYPE STREQUAL "") if (CMAKE_BUILD_TYPE STREQUAL "")
set (CMAKE_BUILD_TYPE "RELEASE") set (CMAKE_BUILD_TYPE "RELEASE")
@ -153,17 +133,8 @@ if (CMAKE_BUILD_TYPE STREQUAL "SYNTAXONLY")
set (BUILDCMD_POST "${BUILDCMD_POST} -fsyntax-only") set (BUILDCMD_POST "${BUILDCMD_POST} -fsyntax-only")
endif () endif ()
# Parse DAMASK version from VERSION file
find_program (CAT_EXECUTABLE NAMES cat)
execute_process (COMMAND ${CAT_EXECUTABLE} ${PROJECT_SOURCE_DIR}/VERSION
RESULT_VARIABLE DAMASK_VERSION_RETURN
OUTPUT_VARIABLE DAMASK_V
OUTPUT_STRIP_TRAILING_WHITESPACE)
add_definitions (-DDAMASKVERSION="${DAMASK_V}")
# definition of other macros
add_definitions (-DPETSc)
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake)
if (CMAKE_Fortran_COMPILER_ID STREQUAL "Intel") if (CMAKE_Fortran_COMPILER_ID STREQUAL "Intel")
include (Compiler-Intel) include (Compiler-Intel)
elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
@ -174,9 +145,8 @@ else ()
message (FATAL_ERROR "Compiler type (CMAKE_Fortran_COMPILER_ID) not recognized") message (FATAL_ERROR "Compiler type (CMAKE_Fortran_COMPILER_ID) not recognized")
endif () endif ()
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${BUILDCMD_PRE} ${OPENMP_FLAGS} ${STANDARD_CHECK} ${OPTIMIZATION_FLAGS} ${COMPILE_FLAGS} ${PRECISION_FLAGS}") set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${BUILDCMD_PRE} ${OPENMP_FLAGS} ${STANDARD_CHECK} ${OPTIMIZATION_FLAGS} ${COMPILE_FLAGS} ${PRECISION_FLAGS}")
set (CMAKE_Fortran_LINK_EXECUTABLE "${BUILDCMD_PRE} ${PETSC_LINKER} ${OPENMP_FLAGS} ${OPTIMIZATION_FLAGS} ${LINKER_FLAGS}") set (CMAKE_Fortran_LINK_EXECUTABLE "${BUILDCMD_PRE} ${CMAKE_Fortran_COMPILER} ${OPENMP_FLAGS} ${OPTIMIZATION_FLAGS} ${LINKER_FLAGS}")
if (CMAKE_BUILD_TYPE STREQUAL "DEBUG") if (CMAKE_BUILD_TYPE STREQUAL "DEBUG")
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${DEBUG_FLAGS}") set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${DEBUG_FLAGS}")

View File

@ -2,27 +2,20 @@ SHELL = /bin/sh
######################################################################################## ########################################################################################
# Makefile for the installation of DAMASK # Makefile for the installation of DAMASK
######################################################################################## ########################################################################################
DAMASK_ROOT = $(shell python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
.PHONY: all .PHONY: all
all: grid mesh processing all: grid mesh processing
.PHONY: grid .PHONY: grid
grid: build/grid grid:
@(cd build/grid;make -j${DAMASK_NUM_THREADS} all install;) @cmake -B build/grid -DDAMASK_SOLVER=GRID -DCMAKE_INSTALL_PREFIX=${PWD} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP}
@cmake --build build/grid --parallel ${DAMASK_NUM_THRADS}
@cmake --install build/grid
.PHONY: mesh .PHONY: mesh
mesh: build/mesh mesh:
@(cd build/mesh; make -j${DAMASK_NUM_THREADS} all install;) @cmake -B build/mesh -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${PWD} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP}
@cmake --build build/mesh --parallel ${DAMASK_NUM_THRADS}
.PHONY: build/grid @cmake --install build/mesh
build/grid:
@mkdir -p build/grid
@(cd build/grid; cmake -Wno-dev -DDAMASK_SOLVER=GRID -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
.PHONY: build/mesh
build/mesh:
@mkdir -p build/mesh
@(cd build/mesh; cmake -Wno-dev -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
.PHONY: clean .PHONY: clean
clean: clean:

View File

@ -1 +1 @@
v3.0.0-alpha-920-gccf1a849f v3.0.0-alpha2-153-gf8dd5df0c

View File

@ -20,7 +20,7 @@ endif ()
# -assume std_mod_proc_name (included in -standard-semantics) causes problems if other modules # -assume std_mod_proc_name (included in -standard-semantics) causes problems if other modules
# (PETSc, HDF5) are not compiled with this option (https://software.intel.com/en-us/forums/intel-fortran-compiler-for-linux-and-mac-os-x/topic/62172) # (PETSc, HDF5) are not compiled with this option (https://software.intel.com/en-us/forums/intel-fortran-compiler-for-linux-and-mac-os-x/topic/62172)
set (STANDARD_CHECK "-stand f15 -standard-semantics -assume nostd_mod_proc_name") set (STANDARD_CHECK "-stand f18 -standard-semantics -assume nostd_mod_proc_name")
set (LINKER_FLAGS "${LINKER_FLAGS} -shared-intel") set (LINKER_FLAGS "${LINKER_FLAGS} -shared-intel")
# Link against shared Intel libraries instead of static ones # Link against shared Intel libraries instead of static ones

View File

@ -5,8 +5,8 @@ homogenization:
phase: phase:
Aluminum: Aluminum:
mechanics:
lattice: cF lattice: cF
mechanics:
output: [F, P, F_e, F_p, L_p] output: [F, P, F_e, F_p, L_p]
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke} elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
plasticity: plasticity:

View File

@ -33,12 +33,12 @@ for filename in options.filenames:
results = damask.Result(filename) results = damask.Result(filename)
if not results.structured: continue if not results.structured: continue
coords = damask.grid_filters.cell_coord0(results.grid,results.size,results.origin).reshape(-1,3,order='F') coords = damask.grid_filters.coordinates0_point(results.cells,results.size,results.origin).reshape(-1,3,order='F')
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1 N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
N_digits = 5 # hack to keep test intact N_digits = 5 # hack to keep test intact
for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)): for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)):
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})\ table = damask.Table(np.ones(np.product(results.cells),dtype=int)*int(inc[3:]),{'inc':(1,)})\
.add('pos',coords.reshape(-1,3)) .add('pos',coords.reshape(-1,3))
results.pick('homogenizations',False) results.pick('homogenizations',False)
@ -46,14 +46,14 @@ for filename in options.filenames:
for label in options.con: for label in options.con:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) != 0: if len(x) != 0:
table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.grid.prod(),-1)) table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.cells.prod(),-1))
results.pick('phases',False) results.pick('phases',False)
results.pick('homogenizations',True) results.pick('homogenizations',True)
for label in options.mat: for label in options.mat:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) != 0: if len(x) != 0:
table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.grid.prod(),-1)) table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.cells.prod(),-1))
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir)) dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname): if not os.path.isdir(dirname):

View File

@ -71,13 +71,13 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
nodes = damask.grid_filters.node_coord(size,F) nodes = damask.grid_filters.coordinates_node(size,F)
if options.shape: if options.shape:
centers = damask.grid_filters.cell_coord(size,F) centers = damask.grid_filters.coordinates_point(size,F)
shapeMismatch = shapeMismatch(size,F,nodes,centers) shapeMismatch = shapeMismatch(size,F,nodes,centers)
table = table.add('shapeMismatch(({}))'.format(options.defgrad), table = table.add('shapeMismatch(({}))'.format(options.defgrad),
shapeMismatch.reshape(-1,1,order='F'), shapeMismatch.reshape(-1,1,order='F'),

View File

@ -44,7 +44,7 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
for label in options.labels: for label in options.labels:
field = table.get(label) field = table.get(label)

View File

@ -48,24 +48,24 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3)) F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
if options.nodal: if options.nodal:
damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'), damask.Table(damask.grid_filters.coordinates0_node(grid,size).reshape(-1,3,order='F'),
{'pos':(3,)})\ {'pos':(3,)})\
.add('avg({}).{}'.format(options.f,options.pos), .add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'), damask.grid_filters.displacement_avg_node(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\ scriptID+' '+' '.join(sys.argv[1:]))\
.add('fluct({}).{}'.format(options.f,options.pos), .add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'), damask.grid_filters.displacement_fluct_node(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\ scriptID+' '+' '.join(sys.argv[1:]))\
.save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt')) .save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'))
else: else:
table.add('avg({}).{}'.format(options.f,options.pos), table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'), damask.grid_filters.displacement_avg_point(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\ scriptID+' '+' '.join(sys.argv[1:]))\
.add('fluct({}).{}'.format(options.f,options.pos), .add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'), damask.grid_filters.displacement_fluct_point(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\ scriptID+' '+' '.join(sys.argv[1:]))\
.save((sys.stdout if name is None else name)) .save((sys.stdout if name is None else name))

View File

@ -44,7 +44,7 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
for label in options.labels: for label in options.labels:
field = table.get(label) field = table.get(label)

View File

@ -143,7 +143,7 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
neighborhood = neighborhoods[options.neighborhood] neighborhood = neighborhoods[options.neighborhood]
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i') diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')

View File

@ -44,7 +44,7 @@ for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name) table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos)) grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
for label in options.labels: for label in options.labels:
field = table.get(label) field = table.get(label)

View File

@ -65,7 +65,7 @@ if filenames == []: parser.error('no input file specified.')
for name in filenames: for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
geom = damask.Geom.load_DREAM3D(name,options.basegroup,options.pointwise) geom = damask.Grid.load_DREAM3D(name,options.basegroup,options.pointwise)
damask.util.croak(geom) damask.util.croak(geom)
geom.save_ASCII(os.path.splitext(name)[0]+'.geom') geom.save_ASCII(os.path.splitext(name)[0]+'.geom')

View File

@ -133,7 +133,7 @@ for i in range(3,np.max(microstructure)):
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\ header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
+ config_header + config_header
geom = damask.Geom(microstructure.reshape(grid), geom = damask.Grid(microstructure.reshape(grid),
size,-size/2, size,-size/2,
comments=header) comments=header)
damask.util.croak(geom) damask.util.croak(geom)

View File

@ -62,9 +62,9 @@ if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
geom = damask.Geom.load(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Grid.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid_original = geom.grid grid_original = geom.cells
damask.util.croak(geom) damask.util.croak(geom)
material = np.tile(geom.material,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1 material = np.tile(geom.material,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
grid = np.array(material.shape) grid = np.array(material.shape)
@ -169,7 +169,7 @@ for name in filenames:
# undo any changes involving immutable materials # undo any changes involving immutable materials
material = np.where(immutable, material_original,material) material = np.where(immutable, material_original,material)
damask.Geom(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]], damask.Grid(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]],
size = geom.size, size = geom.size,
origin = geom.origin, origin = geom.origin,
comments = geom.comments + [scriptID + ' ' + ' '.join(sys.argv[1:])], comments = geom.comments + [scriptID + ' ' + ' '.join(sys.argv[1:])],

View File

@ -196,12 +196,12 @@ if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
geom = damask.Geom.load(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Grid.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
material = geom.material.flatten(order='F') material = geom.material.flatten(order='F')
cmds = [\ cmds = [\
init(), init(),
mesh(geom.grid,geom.size), mesh(geom.cells,geom.size),
materials(), materials(),
geometry(), geometry(),
initial_conditions(material), initial_conditions(material),

View File

@ -91,7 +91,7 @@ class myThread (threading.Thread):
perturbedSeedsTable.set('pos',coords).save(perturbedSeedsVFile,legacy=True) perturbedSeedsTable.set('pos',coords).save(perturbedSeedsVFile,legacy=True)
#--- do tesselation with perturbed seed file ------------------------------------------------------ #--- do tesselation with perturbed seed file ------------------------------------------------------
perturbedGeom = damask.Geom.from_Voronoi_tessellation(options.grid,np.ones(3),coords) perturbedGeom = damask.Grid.from_Voronoi_tessellation(options.grid,np.ones(3),coords)
#--- evaluate current seeds file ------------------------------------------------------------------ #--- evaluate current seeds file ------------------------------------------------------------------
@ -210,9 +210,9 @@ baseFile = os.path.splitext(os.path.basename(options.seedFile))[0]
points = np.array(options.grid).prod().astype('float') points = np.array(options.grid).prod().astype('float')
# ----------- calculate target distribution and bin edges # ----------- calculate target distribution and bin edges
targetGeom = damask.Geom.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom') targetGeom = damask.Grid.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom')
nMaterials = len(np.unique(targetGeom.material)) nMaterials = len(np.unique(targetGeom.material))
targetVolFrac = np.bincount(targetGeom.material.flatten())/targetGeom.grid.prod().astype(np.float) targetVolFrac = np.bincount(targetGeom.material.flatten())/targetGeom.cells.prod().astype(np.float)
target = [] target = []
for i in range(1,nMaterials+1): for i in range(1,nMaterials+1):
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
@ -229,7 +229,7 @@ bestSeedsUpdate = time.time()
# ----------- tessellate initial seed file to get and evaluate geom file # ----------- tessellate initial seed file to get and evaluate geom file
bestSeedsVFile.seek(0) bestSeedsVFile.seek(0)
initialGeom = damask.Geom.from_Voronoi_tessellation(options.grid,np.ones(3),initial_seeds) initialGeom = damask.Grid.from_Voronoi_tessellation(options.grid,np.ones(3),initial_seeds)
if len(np.unique(targetGeom.material)) != nMaterials: if len(np.unique(targetGeom.material)) != nMaterials:
damask.util.croak('error. Material count mismatch') damask.util.croak('error. Material count mismatch')

View File

@ -52,15 +52,15 @@ options.box = np.array(options.box).reshape(3,2)
for name in filenames: for name in filenames:
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Grid.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int) offset =(np.amin(options.box, axis=1)*geom.cells/geom.size).astype(int)
box = np.amax(options.box, axis=1) \ box = np.amax(options.box, axis=1) \
- np.amin(options.box, axis=1) - np.amin(options.box, axis=1)
Nx = int(options.N/np.sqrt(options.N*geom.size[1]*box[1]/geom.size[0]/box[0])) Nx = int(options.N/np.sqrt(options.N*geom.size[1]*box[1]/geom.size[0]/box[0]))
Ny = int(options.N/np.sqrt(options.N*geom.size[0]*box[0]/geom.size[1]/box[1])) Ny = int(options.N/np.sqrt(options.N*geom.size[0]*box[0]/geom.size[1]/box[1]))
Nz = int(box[2]*geom.grid[2]) Nz = int(box[2]*geom.cells[2])
damask.util.croak('poking {} x {} x {} in box {} {} {}...'.format(Nx,Ny,Nz,*box)) damask.util.croak('poking {} x {} x {} in box {} {} {}...'.format(Nx,Ny,Nz,*box))
@ -70,12 +70,12 @@ for name in filenames:
n = 0 n = 0
for i in range(Nx): for i in range(Nx):
for j in range(Ny): for j in range(Ny):
g[0] = round((i+0.5)*box[0]*geom.grid[0]/Nx-0.5)+offset[0] g[0] = round((i+0.5)*box[0]*geom.cells[0]/Nx-0.5)+offset[0]
g[1] = round((j+0.5)*box[1]*geom.grid[1]/Ny-0.5)+offset[1] g[1] = round((j+0.5)*box[1]*geom.cells[1]/Ny-0.5)+offset[1]
for k in range(Nz): for k in range(Nz):
g[2] = k + offset[2] g[2] = k + offset[2]
g %= geom.grid g %= geom.cells
seeds[n,0:3] = (g+0.5)/geom.grid # normalize coordinates to box seeds[n,0:3] = (g+0.5)/geom.cells # normalize coordinates to box
seeds[n, 3] = geom.material[g[0],g[1],g[2]] seeds[n, 3] = geom.material[g[0],g[1],g[2]]
if options.x: g[0] += 1 if options.x: g[0] += 1
if options.y: g[1] += 1 if options.y: g[1] += 1
@ -85,7 +85,7 @@ for name in filenames:
comments = geom.comments \ comments = geom.comments \
+ [scriptID + ' ' + ' '.join(sys.argv[1:]), + [scriptID + ' ' + ' '.join(sys.argv[1:]),
'poking\ta {}\tb {}\tc {}'.format(Nx,Ny,Nz), 'poking\ta {}\tb {}\tc {}'.format(Nx,Ny,Nz),
'grid\ta {}\tb {}\tc {}'.format(*geom.grid), 'grid\ta {}\tb {}\tc {}'.format(*geom.cells),
'size\tx {}\ty {}\tz {}'.format(*geom.size), 'size\tx {}\ty {}\tz {}'.format(*geom.size),
'origin\tx {}\ty {}\tz {}'.format(*geom.origin), 'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
] ]

View File

@ -32,7 +32,7 @@ from ._vtk import VTK # noqa
from ._colormap import Colormap # noqa from ._colormap import Colormap # noqa
from ._config import Config # noqa from ._config import Config # noqa
from ._configmaterial import ConfigMaterial # noqa from ._configmaterial import ConfigMaterial # noqa
from ._geom import Geom # noqa from ._grid import Grid # noqa
from ._result import Result # noqa from ._result import Result # noqa

View File

@ -57,7 +57,7 @@ class Colormap(mpl.colors.ListedColormap):
ax1.imshow(np.linspace(0,1,self.N).reshape(1,-1), ax1.imshow(np.linspace(0,1,self.N).reshape(1,-1),
aspect='auto', cmap=self, interpolation='nearest') aspect='auto', cmap=self, interpolation='nearest')
plt.show(block = False) plt.show(block = False)
return self.name return 'Colormap: '+self.name
@staticmethod @staticmethod
@ -225,7 +225,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_paraview(self,fname=None): def save_paraview(self,fname=None):
""" """
Write colormap to JSON file for Paraview. Save as JSON file for use in Paraview.
Parameters Parameters
---------- ----------
@ -260,7 +260,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_ASCII(self,fname=None): def save_ASCII(self,fname=None):
""" """
Write colormap to ASCII table. Save as ASCII file.
Parameters Parameters
---------- ----------
@ -286,7 +286,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_GOM(self,fname=None): def save_GOM(self,fname=None):
""" """
Write colormap to GOM Aramis compatible format. Save as ASCII file for use in GOM Aramis.
Parameters Parameters
---------- ----------
@ -314,7 +314,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_gmsh(self,fname=None): def save_gmsh(self,fname=None):
""" """
Write colormap to Gmsh compatible format. Save as ASCII file for use in gmsh.
Parameters Parameters
---------- ----------

View File

@ -21,6 +21,9 @@ class NiceDumper(yaml.SafeDumper):
return self.represent_data(dict(data)) if isinstance(data, dict) and type(data) != dict else \ return self.represent_data(dict(data)) if isinstance(data, dict) and type(data) != dict else \
super().represent_data(data) super().represent_data(data)
def ignore_aliases(self, data):
"""No references."""
return True
class Config(dict): class Config(dict):
"""YAML-based configuration.""" """YAML-based configuration."""

View File

@ -9,6 +9,16 @@ from . import Orientation
class ConfigMaterial(Config): class ConfigMaterial(Config):
"""Material configuration.""" """Material configuration."""
_defaults = {'material': [],
'homogenization': {},
'phase': {}}
def __init__(self,d={}):
"""Initialize object with default dictionary keys."""
super().__init__(d)
for k,v in self._defaults.items():
if k not in self: self[k] = v
def save(self,fname='material.yaml',**kwargs): def save(self,fname='material.yaml',**kwargs):
""" """
Save to yaml file. Save to yaml file.
@ -75,6 +85,8 @@ class ConfigMaterial(Config):
fraction: 1.0 fraction: 1.0
phase: Steel phase: Steel
homogenization: SX homogenization: SX
homogenization: {}
phase: {}
""" """
constituents_ = {k:table.get(v) for k,v in constituents.items()} constituents_ = {k:table.get(v) for k,v in constituents.items()}
@ -261,6 +273,8 @@ class ConfigMaterial(Config):
fraction: 1.0 fraction: 1.0
phase: Aluminum phase: Aluminum
homogenization: SX homogenization: SX
homogenization: {}
phase: {}
""" """
length = -1 length = -1
@ -274,6 +288,7 @@ class ConfigMaterial(Config):
c = [{} for _ in range(length)] if constituents is None else \ c = [{} for _ in range(length)] if constituents is None else \
[{'constituents':u} for u in ConfigMaterial._constituents(**constituents)] [{'constituents':u} for u in ConfigMaterial._constituents(**constituents)]
if len(c) == 1: c = [copy.deepcopy(c[0]) for _ in range(length)] if len(c) == 1: c = [copy.deepcopy(c[0]) for _ in range(length)]
if length != 1 and length != len(c): if length != 1 and length != len(c):

View File

@ -7,7 +7,8 @@ import warnings
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import h5py import h5py
from scipy import ndimage,spatial from scipy import ndimage, spatial
from vtk.util.numpy_support import vtk_to_numpy as vtk_to_np
from . import environment from . import environment
from . import VTK from . import VTK
@ -16,21 +17,21 @@ from . import grid_filters
from . import Rotation from . import Rotation
class Geom: class Grid:
"""Geometry definition for grid solvers.""" """Geometry definition for grid solvers."""
def __init__(self,material,size,origin=[0.0,0.0,0.0],comments=[]): def __init__(self,material,size,origin=[0.0,0.0,0.0],comments=[]):
""" """
New geometry definition from array of materials, size, and origin. New grid definition from array of materials, size, and origin.
Parameters Parameters
---------- ----------
material : numpy.ndarray material : numpy.ndarray
Material index array (3D). Material index array (3D).
size : list or numpy.ndarray size : list or numpy.ndarray
Physical size of the geometry in meter. Physical size of the grid in meter.
origin : list or numpy.ndarray, optional origin : list or numpy.ndarray, optional
Physical origin of the geometry in meter. Physical origin of the grid in meter.
comments : list of str, optional comments : list of str, optional
Comment lines. Comment lines.
@ -42,23 +43,26 @@ class Geom:
def __repr__(self): def __repr__(self):
"""Basic information on geometry definition.""" """Basic information on grid definition."""
mat_min = np.nanmin(self.material)
mat_max = np.nanmax(self.material)
mat_N = self.N_materials
return util.srepr([ return util.srepr([
f'grid a b c: {util.srepr(self.grid, " x ")}', f'cells a b c: {util.srepr(self.cells, " x ")}',
f'size x y z: {util.srepr(self.size, " x ")}', f'size x y z: {util.srepr(self.size, " x ")}',
f'origin x y z: {util.srepr(self.origin," ")}', f'origin x y z: {util.srepr(self.origin," ")}',
f'# materials: {self.N_materials}', f'# materials: {mat_N}' + ('' if mat_min == 0 and mat_max+1 == mat_N else
f'max material: {np.nanmax(self.material)}', f' (min: {mat_min}, max: {mat_max})')
]) ])
def __copy__(self): def __copy__(self):
"""Copy geometry.""" """Copy grid."""
return copy.deepcopy(self) return copy.deepcopy(self)
def copy(self): def copy(self):
"""Copy geometry.""" """Copy grid."""
return self.__copy__() return self.__copy__()
@ -68,14 +72,14 @@ class Geom:
Parameters Parameters
---------- ----------
other : Geom other : damask.Grid
Geometry to compare self against. Grid to compare self against.
""" """
message = [] message = []
if np.any(other.grid != self.grid): if np.any(other.cells != self.cells):
message.append(util.deemph(f'grid a b c: {util.srepr(other.grid," x ")}')) message.append(util.deemph(f'cells a b c: {util.srepr(other.cells," x ")}'))
message.append(util.emph( f'grid a b c: {util.srepr( self.grid," x ")}')) message.append(util.emph( f'cells a b c: {util.srepr( self.cells," x ")}'))
if not np.allclose(other.size,self.size): if not np.allclose(other.size,self.size):
message.append(util.deemph(f'size x y z: {util.srepr(other.size," x ")}')) message.append(util.deemph(f'size x y z: {util.srepr(other.size," x ")}'))
@ -104,9 +108,9 @@ class Geom:
@material.setter @material.setter
def material(self,material): def material(self,material):
if len(material.shape) != 3: if len(material.shape) != 3:
raise ValueError(f'Invalid material shape {material.shape}.') raise ValueError(f'invalid material shape {material.shape}')
elif material.dtype not in np.sctypes['float'] + np.sctypes['int']: elif material.dtype not in np.sctypes['float'] + np.sctypes['int']:
raise TypeError(f'Invalid material data type {material.dtype}.') raise TypeError(f'invalid material data type {material.dtype}')
else: else:
self._material = np.copy(material) self._material = np.copy(material)
@ -117,31 +121,31 @@ class Geom:
@property @property
def size(self): def size(self):
"""Physical size of geometry in meter.""" """Physical size of grid in meter."""
return self._size return self._size
@size.setter @size.setter
def size(self,size): def size(self,size):
if len(size) != 3 or any(np.array(size) <= 0): if len(size) != 3 or any(np.array(size) <= 0):
raise ValueError(f'Invalid size {size}.') raise ValueError(f'invalid size {size}')
else: else:
self._size = np.array(size) self._size = np.array(size)
@property @property
def origin(self): def origin(self):
"""Coordinates of geometry origin in meter.""" """Coordinates of grid origin in meter."""
return self._origin return self._origin
@origin.setter @origin.setter
def origin(self,origin): def origin(self,origin):
if len(origin) != 3: if len(origin) != 3:
raise ValueError(f'Invalid origin {origin}.') raise ValueError(f'invalid origin {origin}')
else: else:
self._origin = np.array(origin) self._origin = np.array(origin)
@property @property
def comments(self): def comments(self):
"""Comments/history of geometry.""" """Comments, e.g. history of operations."""
return self._comments return self._comments
@comments.setter @comments.setter
@ -150,35 +154,39 @@ class Geom:
@property @property
def grid(self): def cells(self):
"""Grid dimension of geometry.""" """Number of cells in x,y,z direction."""
return np.asarray(self.material.shape) return np.asarray(self.material.shape)
@property @property
def N_materials(self): def N_materials(self):
"""Number of (unique) material indices within geometry.""" """Number of (unique) material indices within grid."""
return np.unique(self.material).size return np.unique(self.material).size
@staticmethod @staticmethod
def load(fname): def load(fname):
""" """
Read a VTK rectilinear grid. Load from VTK rectilinear grid file.
Parameters Parameters
---------- ----------
fname : str or or pathlib.Path fname : str or or pathlib.Path
Geometry file to read. Grid file to read. Valid extension is .vtr, which will be appended
Valid extension is .vtr, which will be appended if not given. if not given.
""" """
v = VTK.load(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr') v = VTK.load(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
comments = v.get_comments() comments = v.get_comments()
grid = np.array(v.vtk_data.GetDimensions())-1 cells = np.array(v.vtk_data.GetDimensions())-1
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
return Geom(material = v.get('material').reshape(grid,order='F'), for i,c in enumerate([v.vtk_data.GetXCoordinates(),v.vtk_data.GetYCoordinates(),v.vtk_data.GetZCoordinates()]):
if not np.allclose(vtk_to_np(c),np.linspace(bbox[0][i],bbox[1][i],cells[i]+1)):
raise ValueError('regular grid spacing violated')
return Grid(material = v.get('material').reshape(cells,order='F'),
size = bbox[1] - bbox[0], size = bbox[1] - bbox[0],
origin = bbox[0], origin = bbox[0],
comments=comments) comments=comments)
@ -187,7 +195,7 @@ class Geom:
@staticmethod @staticmethod
def load_ASCII(fname): def load_ASCII(fname):
""" """
Read a geom file. Load from geom file.
Storing geometry files in ASCII format is deprecated. Storing geometry files in ASCII format is deprecated.
This function will be removed in a future version of DAMASK. This function will be removed in a future version of DAMASK.
@ -211,7 +219,7 @@ class Geom:
except ValueError: except ValueError:
header_length,keyword = (-1, 'invalid') header_length,keyword = (-1, 'invalid')
if not keyword.startswith('head') or header_length < 3: if not keyword.startswith('head') or header_length < 3:
raise TypeError('Header length information missing or invalid') raise TypeError('header length information missing or invalid')
comments = [] comments = []
content = f.readlines() content = f.readlines()
@ -219,7 +227,7 @@ class Geom:
items = line.split('#')[0].lower().strip().split() items = line.split('#')[0].lower().strip().split()
key = items[0] if items else '' key = items[0] if items else ''
if key == 'grid': if key == 'grid':
grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']]) cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
elif key == 'size': elif key == 'size':
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']]) size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
elif key == 'origin': elif key == 'origin':
@ -227,7 +235,7 @@ class Geom:
else: else:
comments.append(line.strip()) comments.append(line.strip())
material = np.empty(grid.prod()) # initialize as flat array material = np.empty(cells.prod()) # initialize as flat array
i = 0 i = 0
for line in content[header_length:]: for line in content[header_length:]:
items = line.split('#')[0].split() items = line.split('#')[0].split()
@ -242,19 +250,19 @@ class Geom:
material[i:i+len(items)] = items material[i:i+len(items)] = items
i += len(items) i += len(items)
if i != grid.prod(): if i != cells.prod():
raise TypeError(f'Invalid file: expected {grid.prod()} entries, found {i}') raise TypeError(f'invalid file: expected {cells.prod()} entries, found {i}')
if not np.any(np.mod(material,1) != 0.0): # no float present if not np.any(np.mod(material,1) != 0.0): # no float present
material = material.astype('int') - (1 if material.min() > 0 else 0) material = material.astype('int') - (1 if material.min() > 0 else 0)
return Geom(material.reshape(grid,order='F'),size,origin,comments) return Grid(material.reshape(cells,order='F'),size,origin,comments)
@staticmethod @staticmethod
def load_DREAM3D(fname,base_group,point_data=None,material='FeatureIds'): def load_DREAM3D(fname,base_group,point_data=None,material='FeatureIds'):
""" """
Load a DREAM.3D file. Load from DREAM.3D file.
Parameters Parameters
---------- ----------
@ -274,21 +282,21 @@ class Geom:
root_dir ='DataContainers' root_dir ='DataContainers'
f = h5py.File(fname, 'r') f = h5py.File(fname, 'r')
g = path.join(root_dir,base_group,'_SIMPL_GEOMETRY') g = path.join(root_dir,base_group,'_SIMPL_GEOMETRY')
grid = f[path.join(g,'DIMENSIONS')][()] cells = f[path.join(g,'DIMENSIONS')][()]
size = f[path.join(g,'SPACING')][()] * grid size = f[path.join(g,'SPACING')][()] * cells
origin = f[path.join(g,'ORIGIN')][()] origin = f[path.join(g,'ORIGIN')][()]
ma = np.arange(grid.prod(),dtype=int) \ ma = np.arange(cells.prod(),dtype=int) \
if point_data is None else \ if point_data is None else \
np.reshape(f[path.join(root_dir,base_group,point_data,material)],grid.prod()) np.reshape(f[path.join(root_dir,base_group,point_data,material)],cells.prod())
return Geom(ma.reshape(grid,order='F'),size,origin,util.execution_stamp('Geom','load_DREAM3D')) return Grid(ma.reshape(cells,order='F'),size,origin,util.execution_stamp('Grid','load_DREAM3D'))
@staticmethod @staticmethod
def from_table(table,coordinates,labels): def from_table(table,coordinates,labels):
""" """
Derive geometry from an ASCII table. Generate grid from ASCII table.
Parameters Parameters
---------- ----------
@ -302,15 +310,15 @@ class Geom:
Each unique combintation of values results in one material ID. Each unique combintation of values results in one material ID.
""" """
grid,size,origin = grid_filters.cell_coord0_gridSizeOrigin(table.get(coordinates)) cells,size,origin = grid_filters.cellsSizeOrigin_coordinates0_point(table.get(coordinates))
labels_ = [labels] if isinstance(labels,str) else labels labels_ = [labels] if isinstance(labels,str) else labels
unique,unique_inverse = np.unique(np.hstack([table.get(l) for l in labels_]),return_inverse=True,axis=0) unique,unique_inverse = np.unique(np.hstack([table.get(l) for l in labels_]),return_inverse=True,axis=0)
ma = np.arange(grid.prod()) if len(unique) == grid.prod() else \ ma = np.arange(cells.prod()) if len(unique) == cells.prod() else \
np.arange(unique.size)[np.argsort(pd.unique(unique_inverse))][unique_inverse] np.arange(unique.size)[np.argsort(pd.unique(unique_inverse))][unique_inverse]
return Geom(ma.reshape(grid,order='F'),size,origin,util.execution_stamp('Geom','from_table')) return Grid(ma.reshape(cells,order='F'),size,origin,util.execution_stamp('Grid','from_table'))
@staticmethod @staticmethod
@ -318,16 +326,16 @@ class Geom:
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights) return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
@staticmethod @staticmethod
def from_Laguerre_tessellation(grid,size,seeds,weights,material=None,periodic=True): def from_Laguerre_tessellation(cells,size,seeds,weights,material=None,periodic=True):
""" """
Generate geometry from Laguerre tessellation. Generate grid from Laguerre tessellation.
Parameters Parameters
---------- ----------
grid : int numpy.ndarray of shape (3) cells : int numpy.ndarray of shape (3)
Number of grid points in x,y,z direction. Number of cells in x,y,z direction.
size : list or numpy.ndarray of shape (3) size : list or numpy.ndarray of shape (3)
Physical size of the geometry in meter. Physical size of the grid in meter.
seeds : numpy.ndarray of shape (:,3) seeds : numpy.ndarray of shape (:,3)
Position of the seed points in meter. All points need to lay within the box. Position of the seed points in meter. All points need to lay within the box.
weights : numpy.ndarray of shape (seeds.shape[0]) weights : numpy.ndarray of shape (seeds.shape[0])
@ -336,7 +344,7 @@ class Geom:
Material ID of the seeds. Material ID of the seeds.
Defaults to None, in which case materials are consecutively numbered. Defaults to None, in which case materials are consecutively numbered.
periodic : Boolean, optional periodic : Boolean, optional
Perform a periodic tessellation. Defaults to True. Assume grid to be periodic. Defaults to True.
""" """
if periodic: if periodic:
@ -344,57 +352,57 @@ class Geom:
seeds_p = np.vstack((seeds -np.array([size[0],0.,0.]),seeds, seeds +np.array([size[0],0.,0.]))) seeds_p = np.vstack((seeds -np.array([size[0],0.,0.]),seeds, seeds +np.array([size[0],0.,0.])))
seeds_p = np.vstack((seeds_p-np.array([0.,size[1],0.]),seeds_p,seeds_p+np.array([0.,size[1],0.]))) seeds_p = np.vstack((seeds_p-np.array([0.,size[1],0.]),seeds_p,seeds_p+np.array([0.,size[1],0.])))
seeds_p = np.vstack((seeds_p-np.array([0.,0.,size[2]]),seeds_p,seeds_p+np.array([0.,0.,size[2]]))) seeds_p = np.vstack((seeds_p-np.array([0.,0.,size[2]]),seeds_p,seeds_p+np.array([0.,0.,size[2]])))
coords = grid_filters.cell_coord0(grid*3,size*3,-size).reshape(-1,3) coords = grid_filters.coordinates0_point(cells*3,size*3,-size).reshape(-1,3)
else: else:
weights_p = weights weights_p = weights
seeds_p = seeds seeds_p = seeds
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3) coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3)
pool = mp.Pool(processes = int(environment.options['DAMASK_NUM_THREADS'])) pool = mp.Pool(processes = int(environment.options['DAMASK_NUM_THREADS']))
result = pool.map_async(partial(Geom._find_closest_seed,seeds_p,weights_p), [coord for coord in coords]) result = pool.map_async(partial(Grid._find_closest_seed,seeds_p,weights_p), [coord for coord in coords])
pool.close() pool.close()
pool.join() pool.join()
material_ = np.array(result.get()) material_ = np.array(result.get())
if periodic: if periodic:
material_ = material_.reshape(grid*3) material_ = material_.reshape(cells*3)
material_ = material_[grid[0]:grid[0]*2,grid[1]:grid[1]*2,grid[2]:grid[2]*2]%seeds.shape[0] material_ = material_[cells[0]:cells[0]*2,cells[1]:cells[1]*2,cells[2]:cells[2]*2]%seeds.shape[0]
else: else:
material_ = material_.reshape(grid) material_ = material_.reshape(cells)
return Geom(material = material_ if material is None else material[material_], return Grid(material = material_ if material is None else material[material_],
size = size, size = size,
comments = util.execution_stamp('Geom','from_Laguerre_tessellation'), comments = util.execution_stamp('Grid','from_Laguerre_tessellation'),
) )
@staticmethod @staticmethod
def from_Voronoi_tessellation(grid,size,seeds,material=None,periodic=True): def from_Voronoi_tessellation(cells,size,seeds,material=None,periodic=True):
""" """
Generate geometry from Voronoi tessellation. Generate grid from Voronoi tessellation.
Parameters Parameters
---------- ----------
grid : int numpy.ndarray of shape (3) cells : int numpy.ndarray of shape (3)
Number of grid points in x,y,z direction. Number of cells in x,y,z direction.
size : list or numpy.ndarray of shape (3) size : list or numpy.ndarray of shape (3)
Physical size of the geometry in meter. Physical size of the grid in meter.
seeds : numpy.ndarray of shape (:,3) seeds : numpy.ndarray of shape (:,3)
Position of the seed points in meter. All points need to lay within the box. Position of the seed points in meter. All points need to lay within the box.
material : numpy.ndarray of shape (seeds.shape[0]), optional material : numpy.ndarray of shape (seeds.shape[0]), optional
Material ID of the seeds. Material ID of the seeds.
Defaults to None, in which case materials are consecutively numbered. Defaults to None, in which case materials are consecutively numbered.
periodic : Boolean, optional periodic : Boolean, optional
Perform a periodic tessellation. Defaults to True. Assume grid to be periodic. Defaults to True.
""" """
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3) coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3)
KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds) KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds)
devNull,material_ = KDTree.query(coords) devNull,material_ = KDTree.query(coords)
return Geom(material = (material_ if material is None else material[material_]).reshape(grid), return Grid(material = (material_ if material is None else material[material_]).reshape(cells),
size = size, size = size,
comments = util.execution_stamp('Geom','from_Voronoi_tessellation'), comments = util.execution_stamp('Grid','from_Voronoi_tessellation'),
) )
@ -441,16 +449,16 @@ class Geom:
@staticmethod @staticmethod
def from_minimal_surface(grid,size,surface,threshold=0.0,periods=1,materials=(0,1)): def from_minimal_surface(cells,size,surface,threshold=0.0,periods=1,materials=(0,1)):
""" """
Generate geometry from definition of triply periodic minimal surface. Generate grid from definition of triply periodic minimal surface.
Parameters Parameters
---------- ----------
grid : int numpy.ndarray of shape (3) cells : int numpy.ndarray of shape (3)
Number of grid points in x,y,z direction. Number of cells in x,y,z direction.
size : list or numpy.ndarray of shape (3) size : list or numpy.ndarray of shape (3)
Physical size of the geometry in meter. Physical size of the grid in meter.
surface : str surface : str
Type of the minimal surface. See notes for details. Type of the minimal surface. See notes for details.
threshold : float, optional. threshold : float, optional.
@ -493,19 +501,19 @@ class Geom:
https://doi.org/10.1016/j.simpa.2020.100026 https://doi.org/10.1016/j.simpa.2020.100026
""" """
x,y,z = np.meshgrid(periods*2.0*np.pi*(np.arange(grid[0])+0.5)/grid[0], x,y,z = np.meshgrid(periods*2.0*np.pi*(np.arange(cells[0])+0.5)/cells[0],
periods*2.0*np.pi*(np.arange(grid[1])+0.5)/grid[1], periods*2.0*np.pi*(np.arange(cells[1])+0.5)/cells[1],
periods*2.0*np.pi*(np.arange(grid[2])+0.5)/grid[2], periods*2.0*np.pi*(np.arange(cells[2])+0.5)/cells[2],
indexing='ij',sparse=True) indexing='ij',sparse=True)
return Geom(material = np.where(threshold < Geom._minimal_surface[surface](x,y,z),materials[1],materials[0]), return Grid(material = np.where(threshold < Grid._minimal_surface[surface](x,y,z),materials[1],materials[0]),
size = size, size = size,
comments = util.execution_stamp('Geom','from_minimal_surface'), comments = util.execution_stamp('Grid','from_minimal_surface'),
) )
def save(self,fname,compress=True): def save(self,fname,compress=True):
""" """
Store as VTK rectilinear grid. Save as VTK rectilinear grid file.
Parameters Parameters
---------- ----------
@ -515,7 +523,7 @@ class Geom:
Compress with zlib algorithm. Defaults to True. Compress with zlib algorithm. Defaults to True.
""" """
v = VTK.from_rectilinear_grid(self.grid,self.size,self.origin) v = VTK.from_rectilinear_grid(self.cells,self.size,self.origin)
v.add(self.material.flatten(order='F'),'material') v.add(self.material.flatten(order='F'),'material')
v.add_comments(self.comments) v.add_comments(self.comments)
@ -524,7 +532,7 @@ class Geom:
def save_ASCII(self,fname): def save_ASCII(self,fname):
""" """
Write a geom file. Save as geom file.
Storing geometry files in ASCII format is deprecated. Storing geometry files in ASCII format is deprecated.
This function will be removed in a future version of DAMASK. This function will be removed in a future version of DAMASK.
@ -539,7 +547,7 @@ class Geom:
""" """
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning) warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning)
header = [f'{len(self.comments)+4} header'] + self.comments \ header = [f'{len(self.comments)+4} header'] + self.comments \
+ ['grid a {} b {} c {}'.format(*self.grid), + ['grid a {} b {} c {}'.format(*self.cells),
'size x {} y {} z {}'.format(*self.size), 'size x {} y {} z {}'.format(*self.size),
'origin x {} y {} z {}'.format(*self.origin), 'origin x {} y {} z {}'.format(*self.origin),
'homogenization 1', 'homogenization 1',
@ -548,13 +556,13 @@ class Geom:
format_string = '%g' if self.material.dtype in np.sctypes['float'] else \ format_string = '%g' if self.material.dtype in np.sctypes['float'] else \
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.material))))) '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.material)))))
np.savetxt(fname, np.savetxt(fname,
self.material.reshape([self.grid[0],np.prod(self.grid[1:])],order='F').T, self.material.reshape([self.cells[0],np.prod(self.cells[1:])],order='F').T,
header='\n'.join(header), fmt=format_string, comments='') header='\n'.join(header), fmt=format_string, comments='')
def show(self): def show(self):
"""Show on screen.""" """Show on screen."""
VTK.from_rectilinear_grid(self.grid,self.size,self.origin).show() VTK.from_rectilinear_grid(self.cells,self.size,self.origin).show()
def add_primitive(self,dimension,center,exponent, def add_primitive(self,dimension,center,exponent,
@ -566,11 +574,10 @@ class Geom:
---------- ----------
dimension : int or float numpy.ndarray of shape (3) dimension : int or float numpy.ndarray of shape (3)
Dimension (diameter/side length) of the primitive. If given as Dimension (diameter/side length) of the primitive. If given as
integers, grid point locations (cell centers) are addressed. integers, cell centers are addressed.
If given as floats, coordinates are addressed. If given as floats, coordinates are addressed.
center : int or float numpy.ndarray of shape (3) center : int or float numpy.ndarray of shape (3)
Center of the primitive. If given as integers, grid point Center of the primitive. If given as integers, cell centers are addressed.
coordinates (cell centers) are addressed.
If given as floats, coordinates in space are addressed. If given as floats, coordinates in space are addressed.
exponent : numpy.ndarray of shape (3) or float exponent : numpy.ndarray of shape (3) or float
Exponents for the three axes. Exponents for the three axes.
@ -584,44 +591,44 @@ class Geom:
Retain original materials within primitive and fill outside. Retain original materials within primitive and fill outside.
Defaults to False. Defaults to False.
periodic : Boolean, optional periodic : Boolean, optional
Repeat primitive over boundaries. Defaults to True. Assume grid to be periodic. Defaults to True.
""" """
# radius and center # radius and center
r = np.array(dimension)/2.0*self.size/self.grid if np.array(dimension).dtype in np.sctypes['int'] else \ r = np.array(dimension)/2.0*self.size/self.cells if np.array(dimension).dtype in np.sctypes['int'] else \
np.array(dimension)/2.0 np.array(dimension)/2.0
c = (np.array(center) + .5)*self.size/self.grid if np.array(center).dtype in np.sctypes['int'] else \ c = (np.array(center) + .5)*self.size/self.cells if np.array(center).dtype in np.sctypes['int'] else \
(np.array(center) - self.origin) (np.array(center) - self.origin)
coords = grid_filters.cell_coord0(self.grid,self.size, coords = grid_filters.coordinates0_point(self.cells,self.size,
-(0.5*(self.size + (self.size/self.grid -(0.5*(self.size + (self.size/self.cells
if np.array(center).dtype in np.sctypes['int'] else if np.array(center).dtype in np.sctypes['int'] else
0)) if periodic else c)) 0)) if periodic else c))
coords_rot = R.broadcast_to(tuple(self.grid))@coords coords_rot = R.broadcast_to(tuple(self.cells))@coords
with np.errstate(all='ignore'): with np.errstate(all='ignore'):
mask = np.sum(np.power(coords_rot/r,2.0**np.array(exponent)),axis=-1) > 1.0 mask = np.sum(np.power(coords_rot/r,2.0**np.array(exponent)),axis=-1) > 1.0
if periodic: # translate back to center if periodic: # translate back to center
mask = np.roll(mask,((c/self.size-0.5)*self.grid).round().astype(int),(0,1,2)) mask = np.roll(mask,((c/self.size-0.5)*self.cells).round().astype(int),(0,1,2))
return Geom(material = np.where(np.logical_not(mask) if inverse else mask, return Grid(material = np.where(np.logical_not(mask) if inverse else mask,
self.material, self.material,
np.nanmax(self.material)+1 if fill is None else fill), np.nanmax(self.material)+1 if fill is None else fill),
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','add_primitive')], comments = self.comments+[util.execution_stamp('Grid','add_primitive')],
) )
def mirror(self,directions,reflect=False): def mirror(self,directions,reflect=False):
""" """
Mirror geometry along given directions. Mirror grid along given directions.
Parameters Parameters
---------- ----------
directions : iterable containing str directions : iterable containing str
Direction(s) along which the geometry is mirrored. Direction(s) along which the grid is mirrored.
Valid entries are 'x', 'y', 'z'. Valid entries are 'x', 'y', 'z'.
reflect : bool, optional reflect : bool, optional
Reflect (include) outermost layers. Defaults to False. Reflect (include) outermost layers. Defaults to False.
@ -629,7 +636,7 @@ class Geom:
""" """
valid = ['x','y','z'] valid = ['x','y','z']
if not set(directions).issubset(valid): if not set(directions).issubset(valid):
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.') raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
limits = [None,None] if reflect else [-2,0] limits = [None,None] if reflect else [-2,0]
mat = self.material.copy() mat = self.material.copy()
@ -641,52 +648,52 @@ class Geom:
if 'z' in directions: if 'z' in directions:
mat = np.concatenate([mat,mat[:,:,limits[0]:limits[1]:-1]],2) mat = np.concatenate([mat,mat[:,:,limits[0]:limits[1]:-1]],2)
return Geom(material = mat, return Grid(material = mat,
size = self.size/self.grid*np.asarray(mat.shape), size = self.size/self.cells*np.asarray(mat.shape),
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','mirror')], comments = self.comments+[util.execution_stamp('Grid','mirror')],
) )
def flip(self,directions): def flip(self,directions):
""" """
Flip geometry along given directions. Flip grid along given directions.
Parameters Parameters
---------- ----------
directions : iterable containing str directions : iterable containing str
Direction(s) along which the geometry is flipped. Direction(s) along which the grid is flipped.
Valid entries are 'x', 'y', 'z'. Valid entries are 'x', 'y', 'z'.
""" """
valid = ['x','y','z'] valid = ['x','y','z']
if not set(directions).issubset(valid): if not set(directions).issubset(valid):
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.') raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
mat = np.flip(self.material, (valid.index(d) for d in directions if d in valid)) mat = np.flip(self.material, (valid.index(d) for d in directions if d in valid))
return Geom(material = mat, return Grid(material = mat,
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','flip')], comments = self.comments+[util.execution_stamp('Grid','flip')],
) )
def scale(self,grid,periodic=True): def scale(self,cells,periodic=True):
""" """
Scale geometry to new grid. Scale grid to new cells.
Parameters Parameters
---------- ----------
grid : numpy.ndarray of shape (3) cells : numpy.ndarray of shape (3)
Number of grid points in x,y,z direction. Number of cells in x,y,z direction.
periodic : Boolean, optional periodic : Boolean, optional
Assume geometry to be periodic. Defaults to True. Assume grid to be periodic. Defaults to True.
""" """
return Geom(material = ndimage.interpolation.zoom( return Grid(material = ndimage.interpolation.zoom(
self.material, self.material,
grid/self.grid, cells/self.cells,
output=self.material.dtype, output=self.material.dtype,
order=0, order=0,
mode=('wrap' if periodic else 'nearest'), mode=('wrap' if periodic else 'nearest'),
@ -694,13 +701,13 @@ class Geom:
), ),
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','scale')], comments = self.comments+[util.execution_stamp('Grid','scale')],
) )
def clean(self,stencil=3,selection=None,periodic=True): def clean(self,stencil=3,selection=None,periodic=True):
""" """
Smooth geometry by selecting most frequent material index within given stencil at each location. Smooth grid by selecting most frequent material index within given stencil at each location.
Parameters Parameters
---------- ----------
@ -709,7 +716,7 @@ class Geom:
selection : list, optional selection : list, optional
Field values that can be altered. Defaults to all. Field values that can be altered. Defaults to all.
periodic : Boolean, optional periodic : Boolean, optional
Assume geometry to be periodic. Defaults to True. Assume grid to be periodic. Defaults to True.
""" """
def mostFrequent(arr,selection=None): def mostFrequent(arr,selection=None):
@ -720,7 +727,7 @@ class Geom:
else: else:
return me return me
return Geom(material = ndimage.filters.generic_filter( return Grid(material = ndimage.filters.generic_filter(
self.material, self.material,
mostFrequent, mostFrequent,
size=(stencil if selection is None else stencil//2*2+1,)*3, size=(stencil if selection is None else stencil//2*2+1,)*3,
@ -729,7 +736,7 @@ class Geom:
).astype(self.material.dtype), ).astype(self.material.dtype),
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','clean')], comments = self.comments+[util.execution_stamp('Grid','clean')],
) )
@ -737,21 +744,21 @@ class Geom:
"""Renumber sorted material indices as 0,...,N-1.""" """Renumber sorted material indices as 0,...,N-1."""
_,renumbered = np.unique(self.material,return_inverse=True) _,renumbered = np.unique(self.material,return_inverse=True)
return Geom(material = renumbered.reshape(self.grid), return Grid(material = renumbered.reshape(self.cells),
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','renumber')], comments = self.comments+[util.execution_stamp('Grid','renumber')],
) )
def rotate(self,R,fill=None): def rotate(self,R,fill=None):
""" """
Rotate geometry (pad if required). Rotate grid (pad if required).
Parameters Parameters
---------- ----------
R : damask.Rotation R : damask.Rotation
Rotation to apply to the geometry. Rotation to apply to the grid.
fill : int or float, optional fill : int or float, optional
Material index to fill the corners. Defaults to material.max() + 1. Material index to fill the corners. Defaults to material.max() + 1.
@ -773,25 +780,25 @@ class Geom:
else: else:
material_in = material_out material_in = material_out
origin = self.origin-(np.asarray(material_in.shape)-self.grid)*.5 * self.size/self.grid origin = self.origin-(np.asarray(material_in.shape)-self.cells)*.5 * self.size/self.cells
return Geom(material = material_in, return Grid(material = material_in,
size = self.size/self.grid*np.asarray(material_in.shape), size = self.size/self.cells*np.asarray(material_in.shape),
origin = origin, origin = origin,
comments = self.comments+[util.execution_stamp('Geom','rotate')], comments = self.comments+[util.execution_stamp('Grid','rotate')],
) )
def canvas(self,grid=None,offset=None,fill=None): def canvas(self,cells=None,offset=None,fill=None):
""" """
Crop or enlarge/pad geometry. Crop or enlarge/pad grid.
Parameters Parameters
---------- ----------
grid : numpy.ndarray of shape (3) cells : numpy.ndarray of shape (3)
Number of grid points in x,y,z direction. Number of cells x,y,z direction.
offset : numpy.ndarray of shape (3) offset : numpy.ndarray of shape (3)
Offset (measured in grid points) from old to new geometry [0,0,0]. Offset (measured in cells) from old to new grid [0,0,0].
fill : int or float, optional fill : int or float, optional
Material index to fill the background. Defaults to material.max() + 1. Material index to fill the background. Defaults to material.max() + 1.
@ -800,19 +807,19 @@ class Geom:
if fill is None: fill = np.nanmax(self.material) + 1 if fill is None: fill = np.nanmax(self.material) + 1
dtype = float if int(fill) != fill or self.material.dtype in np.sctypes['float'] else int dtype = float if int(fill) != fill or self.material.dtype in np.sctypes['float'] else int
canvas = np.full(self.grid if grid is None else grid,fill,dtype) canvas = np.full(self.cells if cells is None else cells,fill,dtype)
LL = np.clip( offset, 0,np.minimum(self.grid, grid+offset)) LL = np.clip( offset, 0,np.minimum(self.cells, cells+offset))
UR = np.clip( offset+grid, 0,np.minimum(self.grid, grid+offset)) UR = np.clip( offset+cells, 0,np.minimum(self.cells, cells+offset))
ll = np.clip(-offset, 0,np.minimum( grid,self.grid-offset)) ll = np.clip(-offset, 0,np.minimum( cells,self.cells-offset))
ur = np.clip(-offset+self.grid,0,np.minimum( grid,self.grid-offset)) ur = np.clip(-offset+self.cells,0,np.minimum( cells,self.cells-offset))
canvas[ll[0]:ur[0],ll[1]:ur[1],ll[2]:ur[2]] = self.material[LL[0]:UR[0],LL[1]:UR[1],LL[2]:UR[2]] canvas[ll[0]:ur[0],ll[1]:ur[1],ll[2]:ur[2]] = self.material[LL[0]:UR[0],LL[1]:UR[1],LL[2]:UR[2]]
return Geom(material = canvas, return Grid(material = canvas,
size = self.size/self.grid*np.asarray(canvas.shape), size = self.size/self.cells*np.asarray(canvas.shape),
origin = self.origin+offset*self.size/self.grid, origin = self.origin+offset*self.size/self.cells,
comments = self.comments+[util.execution_stamp('Geom','canvas')], comments = self.comments+[util.execution_stamp('Grid','canvas')],
) )
@ -834,10 +841,10 @@ class Geom:
mp = np.vectorize(mp) mp = np.vectorize(mp)
mapper = dict(zip(from_material,to_material)) mapper = dict(zip(from_material,to_material))
return Geom(material = mp(self.material,mapper).reshape(self.grid), return Grid(material = mp(self.material,mapper).reshape(self.cells),
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','substitute')], comments = self.comments+[util.execution_stamp('Grid','substitute')],
) )
@ -848,10 +855,10 @@ class Geom:
sort_idx = np.argsort(from_ma) sort_idx = np.argsort(from_ma)
ma = np.unique(a)[sort_idx][np.searchsorted(from_ma,a,sorter = sort_idx)] ma = np.unique(a)[sort_idx][np.searchsorted(from_ma,a,sorter = sort_idx)]
return Geom(material = ma.reshape(self.grid,order='F'), return Grid(material = ma.reshape(self.cells,order='F'),
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','sort')], comments = self.comments+[util.execution_stamp('Grid','sort')],
) )
@ -861,7 +868,7 @@ class Geom:
Different from themselves (or listed as triggers) within a given (cubic) vicinity, Different from themselves (or listed as triggers) within a given (cubic) vicinity,
i.e. within the region close to a grain/phase boundary. i.e. within the region close to a grain/phase boundary.
ToDo: use include/exclude as in seeds.from_geom ToDo: use include/exclude as in seeds.from_grid
Parameters Parameters
---------- ----------
@ -875,7 +882,7 @@ class Geom:
List of material indices that trigger a change. List of material indices that trigger a change.
Defaults to [], meaning that any different neighbor triggers a change. Defaults to [], meaning that any different neighbor triggers a change.
periodic : Boolean, optional periodic : Boolean, optional
Assume geometry to be periodic. Defaults to True. Assume grid to be periodic. Defaults to True.
""" """
def tainted_neighborhood(stencil,trigger): def tainted_neighborhood(stencil,trigger):
@ -892,10 +899,10 @@ class Geom:
mode='wrap' if periodic else 'nearest', mode='wrap' if periodic else 'nearest',
extra_keywords={'trigger':trigger}) extra_keywords={'trigger':trigger})
return Geom(material = np.where(mask, self.material + offset_,self.material), return Grid(material = np.where(mask, self.material + offset_,self.material),
size = self.size, size = self.size,
origin = self.origin, origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','vicinity_offset')], comments = self.comments+[util.execution_stamp('Grid','vicinity_offset')],
) )
@ -905,20 +912,20 @@ class Geom:
Parameters Parameters
---------- ----------
periodic : bool, optional periodic : Boolean, optional
Show boundaries across periodicity. Defaults to True. Assume grid to be periodic. Defaults to True.
directions : iterable containing str, optional directions : iterable containing str, optional
Direction(s) along which the geometry is mirrored. Direction(s) along which the boundaries are determined.
Valid entries are 'x', 'y', 'z'. Defaults to 'xyz'. Valid entries are 'x', 'y', 'z'. Defaults to 'xyz'.
""" """
valid = ['x','y','z'] valid = ['x','y','z']
if not set(directions).issubset(valid): if not set(directions).issubset(valid):
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.') raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
o = [[0, self.grid[0]+1, np.prod(self.grid[:2]+1)+self.grid[0]+1, np.prod(self.grid[:2]+1)], o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
[0, np.prod(self.grid[:2]+1), np.prod(self.grid[:2]+1)+1, 1], [0, np.prod(self.cells[:2]+1), np.prod(self.cells[:2]+1)+1, 1],
[0, 1, self.grid[0]+1+1, self.grid[0]+1]] # offset for connectivity [0, 1, self.cells[0]+1+1, self.cells[0]+1]] # offset for connectivity
connectivity = [] connectivity = []
for i,d in enumerate(['x','y','z']): for i,d in enumerate(['x','y','z']):
@ -933,5 +940,5 @@ class Geom:
base_nodes = np.argwhere(mask.flatten(order='F')).reshape(-1,1) base_nodes = np.argwhere(mask.flatten(order='F')).reshape(-1,1)
connectivity.append(np.block([base_nodes + o[i][k] for k in range(4)])) connectivity.append(np.block([base_nodes + o[i][k] for k in range(4)]))
coords = grid_filters.node_coord0(self.grid,self.size,self.origin).reshape(-1,3,order='F') coords = grid_filters.coordinates0_node(self.cells,self.size,self.origin).reshape(-1,3,order='F')
return VTK.from_unstructured_grid(coords,np.vstack(connectivity),'QUAD') return VTK.from_unstructured_grid(coords,np.vstack(connectivity),'QUAD')

View File

@ -226,9 +226,9 @@ class Orientation(Rotation):
""" """
return super().__eq__(other) \ return super().__eq__(other) \
and self.family == other.family \ and hasattr(other, 'family') and self.family == other.family \
and self.lattice == other.lattice \ and hasattr(other, 'lattice') and self.lattice == other.lattice \
and self.parameters == other.parameters and hasattr(other, 'parameters') and self.parameters == other.parameters
def __matmul__(self,other): def __matmul__(self,other):

View File

@ -46,13 +46,17 @@ class Result:
self.version_major = f.attrs['DADF5_version_major'] self.version_major = f.attrs['DADF5_version_major']
self.version_minor = f.attrs['DADF5_version_minor'] self.version_minor = f.attrs['DADF5_version_minor']
if self.version_major != 0 or not 7 <= self.version_minor <= 9: if self.version_major != 0 or not 7 <= self.version_minor <= 11:
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}') raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
self.structured = 'grid' in f['geometry'].attrs.keys() self.structured = 'grid' in f['geometry'].attrs.keys() or \
'cells' in f['geometry'].attrs.keys()
if self.structured: if self.structured:
self.grid = f['geometry'].attrs['grid'] try:
self.cells = f['geometry'].attrs['cells']
except KeyError:
self.cells = f['geometry'].attrs['grid']
self.size = f['geometry'].attrs['size'] self.size = f['geometry'].attrs['size']
self.origin = f['geometry'].attrs['origin'] self.origin = f['geometry'].attrs['origin']
@ -558,19 +562,19 @@ class Result:
return dataset return dataset
@property @property
def cell_coordinates(self): def coordinates0_point(self):
"""Return initial coordinates of the cell centers.""" """Return initial coordinates of the cell centers."""
if self.structured: if self.structured:
return grid_filters.cell_coord0(self.grid,self.size,self.origin).reshape(-1,3,order='F') return grid_filters.coordinates0_point(self.cells,self.size,self.origin).reshape(-1,3,order='F')
else: else:
with h5py.File(self.fname,'r') as f: with h5py.File(self.fname,'r') as f:
return f['geometry/x_c'][()] return f['geometry/x_c'][()]
@property @property
def node_coordinates(self): def coordinates0_node(self):
"""Return initial coordinates of the cell centers.""" """Return initial coordinates of the cell centers."""
if self.structured: if self.structured:
return grid_filters.node_coord0(self.grid,self.size,self.origin).reshape(-1,3,order='F') return grid_filters.coordinates0_node(self.cells,self.size,self.origin).reshape(-1,3,order='F')
else: else:
with h5py.File(self.fname,'r') as f: with h5py.File(self.fname,'r') as f:
return f['geometry/x_n'][()] return f['geometry/x_n'][()]
@ -780,13 +784,16 @@ class Result:
@staticmethod @staticmethod
def _add_IPF_color(q,l): def _add_IPF_color(l,q):
m = util.scale_to_coprime(np.array(l)) m = util.scale_to_coprime(np.array(l))
try: try:
lattice = {'fcc':'cF','bcc':'cI','hex':'hP'}[q['meta']['Lattice']] lattice = {'fcc':'cF','bcc':'cI','hex':'hP'}[q['meta']['Lattice']]
except KeyError: except KeyError:
lattice = q['meta']['Lattice'] lattice = q['meta']['Lattice']
try:
o = Orientation(rotation = (rfn.structured_to_unstructured(q['data'])),lattice=lattice) o = Orientation(rotation = (rfn.structured_to_unstructured(q['data'])),lattice=lattice)
except ValueError:
o = Orientation(rotation = q['data'],lattice=lattice)
return { return {
'data': np.uint8(o.IPF_color(l)*255), 'data': np.uint8(o.IPF_color(l)*255),
@ -798,16 +805,17 @@ class Result:
'Creator': 'add_IPF_color' 'Creator': 'add_IPF_color'
} }
} }
def add_IPF_color(self,q,l): def add_IPF_color(self,l,q='O'):
""" """
Add RGB color tuple of inverse pole figure (IPF) color. Add RGB color tuple of inverse pole figure (IPF) color.
Parameters Parameters
---------- ----------
q : str
Label of the dataset containing the crystallographic orientation as quaternions.
l : numpy.array of shape (3) l : numpy.array of shape (3)
Lab frame direction for inverse pole figure. Lab frame direction for inverse pole figure.
q : str
Label of the dataset containing the crystallographic orientation as quaternions.
Defaults to 'O'.
""" """
self._add_generic_pointwise(self._add_IPF_color,{'q':q},{'l':l}) self._add_generic_pointwise(self._add_IPF_color,{'q':q},{'l':l})
@ -1125,6 +1133,7 @@ class Result:
Arguments parsed to func. Arguments parsed to func.
""" """
chunk_size = 1024**2//8
num_threads = damask.environment.options['DAMASK_NUM_THREADS'] num_threads = damask.environment.options['DAMASK_NUM_THREADS']
pool = mp.Pool(int(num_threads) if num_threads is not None else None) pool = mp.Pool(int(num_threads) if num_threads is not None else None)
lock = mp.Manager().Lock() lock = mp.Manager().Lock()
@ -1147,6 +1156,14 @@ class Result:
dataset[...] = result[1]['data'] dataset[...] = result[1]['data']
dataset.attrs['Overwritten'] = 'Yes' if h5py3 else \ dataset.attrs['Overwritten'] = 'Yes' if h5py3 else \
'Yes'.encode() 'Yes'.encode()
else:
if result[1]['data'].size >= chunk_size*2:
shape = result[1]['data'].shape
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'],
maxshape=shape, chunks=chunks,
compression='gzip', compression_opts=6,
shuffle=True,fletcher32=True)
else: else:
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data']) dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
@ -1218,7 +1235,7 @@ class Result:
topology=ET.SubElement(grid, 'Topology') topology=ET.SubElement(grid, 'Topology')
topology.attrib={'TopologyType': '3DCoRectMesh', topology.attrib={'TopologyType': '3DCoRectMesh',
'Dimensions': '{} {} {}'.format(*self.grid+1)} 'Dimensions': '{} {} {}'.format(*self.cells+1)}
geometry=ET.SubElement(grid, 'Geometry') geometry=ET.SubElement(grid, 'Geometry')
geometry.attrib={'GeometryType':'Origin_DxDyDz'} geometry.attrib={'GeometryType':'Origin_DxDyDz'}
@ -1233,7 +1250,7 @@ class Result:
delta.attrib={'Format': 'XML', delta.attrib={'Format': 'XML',
'NumberType': 'Float', 'NumberType': 'Float',
'Dimensions': '3'} 'Dimensions': '3'}
delta.text="{} {} {}".format(*(self.size/self.grid)) delta.text="{} {} {}".format(*(self.size/self.cells))
with h5py.File(self.fname,'r') as f: with h5py.File(self.fname,'r') as f:
@ -1244,7 +1261,7 @@ class Result:
data_items.append(ET.SubElement(attributes[-1], 'DataItem')) data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
data_items[-1].attrib={'Format': 'HDF', data_items[-1].attrib={'Format': 'HDF',
'Precision': '8', 'Precision': '8',
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))} 'Dimensions': '{} {} {} 3'.format(*(self.cells+1))}
data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n' data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
for o,p in zip(['phases','homogenizations'],['out_type_ph','out_type_ho']): for o,p in zip(['phases','homogenizations'],['out_type_ph','out_type_ho']):
@ -1267,7 +1284,7 @@ class Result:
data_items[-1].attrib={'Format': 'HDF', data_items[-1].attrib={'Format': 'HDF',
'NumberType': number_type_map(dtype), 'NumberType': number_type_map(dtype),
'Precision': f'{dtype.itemsize}', 'Precision': f'{dtype.itemsize}',
'Dimensions': '{} {} {} {}'.format(*self.grid,1 if shape == () else 'Dimensions': '{} {} {} {}'.format(*self.cells,1 if shape == () else
np.prod(shape))} np.prod(shape))}
data_items[-1].text=f'{os.path.split(self.fname)[1]}:{name}' data_items[-1].text=f'{os.path.split(self.fname)[1]}:{name}'
@ -1291,7 +1308,7 @@ class Result:
if mode.lower()=='cell': if mode.lower()=='cell':
if self.structured: if self.structured:
v = VTK.from_rectilinear_grid(self.grid,self.size,self.origin) v = VTK.from_rectilinear_grid(self.cells,self.size,self.origin)
else: else:
with h5py.File(self.fname,'r') as f: with h5py.File(self.fname,'r') as f:
v = VTK.from_unstructured_grid(f['/geometry/x_n'][()], v = VTK.from_unstructured_grid(f['/geometry/x_n'][()],
@ -1299,7 +1316,7 @@ class Result:
f['/geometry/T_c'].attrs['VTK_TYPE'] if h5py3 else \ f['/geometry/T_c'].attrs['VTK_TYPE'] if h5py3 else \
f['/geometry/T_c'].attrs['VTK_TYPE'].decode()) f['/geometry/T_c'].attrs['VTK_TYPE'].decode())
elif mode.lower()=='point': elif mode.lower()=='point':
v = VTK.from_poly_data(self.cell_coordinates) v = VTK.from_poly_data(self.coordinates0_point)
N_digits = int(np.floor(np.log10(max(1,int(self.increments[-1][3:])))))+1 N_digits = int(np.floor(np.log10(max(1,int(self.increments[-1][3:])))))+1

View File

@ -144,6 +144,11 @@ class Rotation:
return self.copy(rotation=Rotation(np.block([np.cos(pwr*phi),np.sin(pwr*phi)*p]))._standardize()) return self.copy(rotation=Rotation(np.block([np.cos(pwr*phi),np.sin(pwr*phi)*p]))._standardize())
def __mul__(self,other):
"""Standard multiplication is not implemented."""
raise NotImplementedError('Use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
def __matmul__(self,other): def __matmul__(self,other):
""" """
Rotation of vector, second or fourth order tensor, or rotation object. Rotation of vector, second or fourth order tensor, or rotation object.
@ -199,8 +204,16 @@ class Rotation:
def append(self,other): def append(self,other):
"""Extend rotation array along first dimension with other array.""" """
return self.copy(rotation=np.vstack((self.quaternion,other.quaternion))) Extend rotation array along first dimension with other array(s).
Parameters
----------
other : Rotation or list of Rotations.
"""
return self.copy(rotation=np.vstack(tuple(map(lambda x:x.quaternion,
[self]+other if isinstance(other,list) else [self,other]))))
def flatten(self,order = 'C'): def flatten(self,order = 'C'):
@ -258,7 +271,7 @@ class Rotation:
"""Intermediate representation supporting quaternion averaging.""" """Intermediate representation supporting quaternion averaging."""
return np.einsum('...i,...j',quat,quat) return np.einsum('...i,...j',quat,quat)
if not weights: if weights is None:
weights = np.ones(self.shape,dtype=float) weights = np.ones(self.shape,dtype=float)
eig, vec = np.linalg.eig(np.sum(_M(self.quaternion) * weights[...,np.newaxis,np.newaxis],axis=-3) \ eig, vec = np.linalg.eig(np.sum(_M(self.quaternion) * weights[...,np.newaxis,np.newaxis],axis=-3) \
@ -763,7 +776,7 @@ class Rotation:
def _dg(eu,deg): def _dg(eu,deg):
"""Return infinitesimal Euler space volume of bin(s).""" """Return infinitesimal Euler space volume of bin(s)."""
phi_sorted = eu[np.lexsort((eu[:,0],eu[:,1],eu[:,2]))] phi_sorted = eu[np.lexsort((eu[:,0],eu[:,1],eu[:,2]))]
steps,size,_ = grid_filters.cell_coord0_gridSizeOrigin(phi_sorted) steps,size,_ = grid_filters.cellsSizeOrigin_coordinates0_point(phi_sorted)
delta = np.radians(size/steps) if deg else size/steps delta = np.radians(size/steps) if deg else size/steps
return delta[0]*2.0*np.sin(delta[1]/2.0)*delta[2] / 8.0 / np.pi**2 * np.sin(np.radians(eu[:,1]) if deg else eu[:,1]) return delta[0]*2.0*np.sin(delta[1]/2.0)*delta[2] / 8.0 / np.pi**2 * np.sin(np.radians(eu[:,1]) if deg else eu[:,1])

View File

@ -33,6 +33,10 @@ class Table:
"""Brief overview.""" """Brief overview."""
return '\n'.join(['# '+c for c in self.comments])+'\n'+self.data.__repr__() return '\n'.join(['# '+c for c in self.comments])+'\n'+self.data.__repr__()
def __getitem__(self,item):
"""Return slice according to item."""
return self.__class__(data=self.data[item],shapes=self.shapes,comments=self.comments)
def __len__(self): def __len__(self):
"""Number of rows.""" """Number of rows."""
return len(self.data) return len(self.data)
@ -73,7 +77,7 @@ class Table:
@staticmethod @staticmethod
def load(fname): def load(fname):
""" """
Load ASCII table file. Load from ASCII table file.
In legacy style, the first line indicates the number of In legacy style, the first line indicates the number of
subsequent header lines as "N header", with the last header line being subsequent header lines as "N header", with the last header line being
@ -131,7 +135,7 @@ class Table:
@staticmethod @staticmethod
def load_ang(fname): def load_ang(fname):
""" """
Load ang file. Load from ang file.
A valid TSL ang file needs to contains the following columns: A valid TSL ang file needs to contains the following columns:
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'. * Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.

View File

@ -76,7 +76,8 @@ class VTK:
nodes : numpy.ndarray of shape (:,3) nodes : numpy.ndarray of shape (:,3)
Spatial position of the nodes. Spatial position of the nodes.
connectivity : numpy.ndarray of np.dtype = int connectivity : numpy.ndarray of np.dtype = int
Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell. Cell connectivity (0-based), first dimension determines #Cells,
second dimension determines #Nodes/Cell.
cell_type : str cell_type : str
Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON. Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON.
@ -91,7 +92,9 @@ class VTK:
vtk_data = vtk.vtkUnstructuredGrid() vtk_data = vtk.vtkUnstructuredGrid()
vtk_data.SetPoints(vtk_nodes) vtk_data.SetPoints(vtk_nodes)
vtk_data.SetCells(eval(f'vtk.VTK_{cell_type.split("_",1)[-1].upper()}'),cells) cell_types = {'TRIANGLE':vtk.VTK_TRIANGLE, 'QUAD':vtk.VTK_QUAD,
'TETRA' :vtk.VTK_TETRA, 'HEXAHEDRON':vtk.VTK_HEXAHEDRON}
vtk_data.SetCells(cell_types[cell_type.split("_",1)[-1].upper()],cells)
return VTK(vtk_data) return VTK(vtk_data)
@ -128,7 +131,7 @@ class VTK:
@staticmethod @staticmethod
def load(fname,dataset_type=None): def load(fname,dataset_type=None):
""" """
Create VTK from file. Load from VTK file.
Parameters Parameters
---------- ----------
@ -181,7 +184,7 @@ class VTK:
writer.Write() writer.Write()
def save(self,fname,parallel=True,compress=True): def save(self,fname,parallel=True,compress=True):
""" """
Write to file. Save as VTK file.
Parameters Parameters
---------- ----------

View File

@ -4,38 +4,38 @@ Filters for operations on regular grids.
Notes Notes
----- -----
The grids are defined as (x,y,z,...) where x is fastest and z is slowest. The grids are defined as (x,y,z,...) where x is fastest and z is slowest.
This convention is consistent with the geom file format. This convention is consistent with the layout in grid vtr files.
When converting to/from a plain list (e.g. storage in ASCII table), When converting to/from a plain list (e.g. storage in ASCII table),
the following operations are required for tensorial data: the following operations are required for tensorial data:
D3 = D1.reshape(grid+(-1,),order='F').reshape(grid+(3,3)) D3 = D1.reshape(cells+(-1,),order='F').reshape(cells+(3,3))
D1 = D3.reshape(grid+(-1,)).reshape(-1,9,order='F') D1 = D3.reshape(cells+(-1,)).reshape(-1,9,order='F')
""" """
from scipy import spatial as _spatial from scipy import spatial as _spatial
import numpy as _np import numpy as _np
def _ks(size,grid,first_order=False): def _ks(size,cells,first_order=False):
""" """
Get wave numbers operator. Get wave numbers operator.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
grid : numpy.ndarray of shape (3) cells : numpy.ndarray of shape (3)
number of grid points. Number of cells.
first_order : bool, optional first_order : bool, optional
correction for first order derivatives, defaults to False. Correction for first order derivatives, defaults to False.
""" """
k_sk = _np.where(_np.arange(grid[0])>grid[0]//2,_np.arange(grid[0])-grid[0],_np.arange(grid[0]))/size[0] k_sk = _np.where(_np.arange(cells[0])>cells[0]//2,_np.arange(cells[0])-cells[0],_np.arange(cells[0]))/size[0]
if grid[0]%2 == 0 and first_order: k_sk[grid[0]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) if cells[0]%2 == 0 and first_order: k_sk[cells[0]//2] = 0 # Nyquist freq=0 for even cells (Johnson, MIT, 2011)
k_sj = _np.where(_np.arange(grid[1])>grid[1]//2,_np.arange(grid[1])-grid[1],_np.arange(grid[1]))/size[1] k_sj = _np.where(_np.arange(cells[1])>cells[1]//2,_np.arange(cells[1])-cells[1],_np.arange(cells[1]))/size[1]
if grid[1]%2 == 0 and first_order: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011) if cells[1]%2 == 0 and first_order: k_sj[cells[1]//2] = 0 # Nyquist freq=0 for even cells (Johnson, MIT, 2011)
k_si = _np.arange(grid[2]//2+1)/size[2] k_si = _np.arange(cells[2]//2+1)/size[2]
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1) return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
@ -47,9 +47,9 @@ def curl(size,field):
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
field : numpy.ndarray of shape (:,:,:,3) or (:,:,:,3,3) field : numpy.ndarray of shape (:,:,:,3) or (:,:,:,3,3)
periodic field of which the curl is calculated. Periodic field of which the curl is calculated.
""" """
n = _np.prod(field.shape[3:]) n = _np.prod(field.shape[3:])
@ -73,9 +73,9 @@ def divergence(size,field):
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
field : numpy.ndarray of shape (:,:,:,3) or (:,:,:,3,3) field : numpy.ndarray of shape (:,:,:,3) or (:,:,:,3,3)
periodic field of which the divergence is calculated. Periodic field of which the divergence is calculated.
""" """
n = _np.prod(field.shape[3:]) n = _np.prod(field.shape[3:])
@ -95,9 +95,9 @@ def gradient(size,field):
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
field : numpy.ndarray of shape (:,:,:,1) or (:,:,:,3) field : numpy.ndarray of shape (:,:,:,1) or (:,:,:,3)
periodic field of which the gradient is calculated. Periodic field of which the gradient is calculated.
""" """
n = _np.prod(field.shape[3:]) n = _np.prod(field.shape[3:])
@ -110,39 +110,39 @@ def gradient(size,field):
return _np.fft.irfftn(grad_,axes=(0,1,2),s=field.shape[:3]) return _np.fft.irfftn(grad_,axes=(0,1,2),s=field.shape[:3])
def cell_coord0(grid,size,origin=_np.zeros(3)): def coordinates0_point(cells,size,origin=_np.zeros(3)):
""" """
Cell center positions (undeformed). Cell center positions (undeformed).
Parameters Parameters
---------- ----------
grid : numpy.ndarray of shape (3) cells : numpy.ndarray of shape (3)
number of grid points. Number of cells.
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
origin : numpy.ndarray, optional origin : numpy.ndarray, optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0]. Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
""" """
start = origin + size/grid*.5 start = origin + size/cells*.5
end = origin + size - size/grid*.5 end = origin + size - size/cells*.5
return _np.stack(_np.meshgrid(_np.linspace(start[0],end[0],grid[0]), return _np.stack(_np.meshgrid(_np.linspace(start[0],end[0],cells[0]),
_np.linspace(start[1],end[1],grid[1]), _np.linspace(start[1],end[1],cells[1]),
_np.linspace(start[2],end[2],grid[2]),indexing = 'ij'), _np.linspace(start[2],end[2],cells[2]),indexing = 'ij'),
axis = -1) axis = -1)
def cell_displacement_fluct(size,F): def displacement_fluct_point(size,F):
""" """
Cell center displacement field from fluctuation part of the deformation gradient field. Cell center displacement field from fluctuation part of the deformation gradient field.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
""" """
integrator = 0.5j*size/_np.pi integrator = 0.5j*size/_np.pi
@ -160,194 +160,196 @@ def cell_displacement_fluct(size,F):
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3]) return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
def cell_displacement_avg(size,F): def displacement_avg_point(size,F):
""" """
Cell center displacement field from average part of the deformation gradient field. Cell center displacement field from average part of the deformation gradient field.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
""" """
F_avg = _np.average(F,axis=(0,1,2)) F_avg = _np.average(F,axis=(0,1,2))
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),cell_coord0(F.shape[:3],size)) return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
def cell_displacement(size,F): def displacement_point(size,F):
""" """
Cell center displacement field from deformation gradient field. Cell center displacement field from deformation gradient field.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
""" """
return cell_displacement_avg(size,F) + cell_displacement_fluct(size,F) return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
def cell_coord(size,F,origin=_np.zeros(3)): def coordinates_point(size,F,origin=_np.zeros(3)):
""" """
Cell center positions. Cell center positions.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
origin : numpy.ndarray of shape (3), optional origin : numpy.ndarray of shape (3), optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0]. Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
""" """
return cell_coord0(F.shape[:3],size,origin) + cell_displacement(size,F) return coordinates0_point(F.shape[:3],size,origin) + displacement_point(size,F)
def cell_coord0_gridSizeOrigin(coord0,ordered=True): def cellsSizeOrigin_coordinates0_point(coordinates0,ordered=True):
""" """
Return grid 'DNA', i.e. grid, size, and origin from 1D array of cell positions. Return grid 'DNA', i.e. cells, size, and origin from 1D array of point positions.
Parameters Parameters
---------- ----------
coord0 : numpy.ndarray of shape (:,3) coordinates0 : numpy.ndarray of shape (:,3)
undeformed cell coordinates. Undeformed cell coordinates.
ordered : bool, optional ordered : bool, optional
expect coord0 data to be ordered (x fast, z slow). Expect coordinates0 data to be ordered (x fast, z slow).
Defaults to True.
""" """
coords = [_np.unique(coord0[:,i]) for i in range(3)] coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
mincorner = _np.array(list(map(min,coords))) mincorner = _np.array(list(map(min,coords)))
maxcorner = _np.array(list(map(max,coords))) maxcorner = _np.array(list(map(max,coords)))
grid = _np.array(list(map(len,coords)),'i') cells = _np.array(list(map(len,coords)),'i')
size = grid/_np.maximum(grid-1,1) * (maxcorner-mincorner) size = cells/_np.maximum(cells-1,1) * (maxcorner-mincorner)
delta = size/grid delta = size/cells
origin = mincorner - delta*.5 origin = mincorner - delta*.5
# 1D/2D: size/origin combination undefined, set origin to 0.0 # 1D/2D: size/origin combination undefined, set origin to 0.0
size [_np.where(grid==1)] = origin[_np.where(grid==1)]*2. size [_np.where(cells==1)] = origin[_np.where(cells==1)]*2.
origin[_np.where(grid==1)] = 0.0 origin[_np.where(cells==1)] = 0.0
if grid.prod() != len(coord0): if cells.prod() != len(coordinates0):
raise ValueError('Data count {len(coord0)} does not match grid {grid}.') raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}.')
start = origin + delta*.5 start = origin + delta*.5
end = origin - delta*.5 + size end = origin - delta*.5 + size
atol = _np.max(size)*5e-2 atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0]),atol=atol) and \ if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],cells[0]),atol=atol) and \
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1]),atol=atol) and \ _np.allclose(coords[1],_np.linspace(start[1],end[1],cells[1]),atol=atol) and \
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]),atol=atol)): _np.allclose(coords[2],_np.linspace(start[2],end[2],cells[2]),atol=atol)):
raise ValueError('Regular grid spacing violated.') raise ValueError('Regular cell spacing violated.')
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin),atol=atol): if ordered and not _np.allclose(coordinates0.reshape(tuple(cells)+(3,),order='F'),
coordinates0_point(cells,size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).') raise ValueError('Input data is not ordered (x fast, z slow).')
return (grid,size,origin) return (cells,size,origin)
def coord0_check(coord0): def coordinates0_check(coordinates0):
""" """
Check whether coordinates lie on a regular grid. Check whether coordinates lie on a regular grid.
Parameters Parameters
---------- ----------
coord0 : numpy.ndarray coordinates0 : numpy.ndarray
array of undeformed cell coordinates. Array of undeformed cell coordinates.
""" """
cell_coord0_gridSizeOrigin(coord0,ordered=True) cellsSizeOrigin_coordinates0_point(coordinates0,ordered=True)
def node_coord0(grid,size,origin=_np.zeros(3)): def coordinates0_node(cells,size,origin=_np.zeros(3)):
""" """
Nodal positions (undeformed). Nodal positions (undeformed).
Parameters Parameters
---------- ----------
grid : numpy.ndarray of shape (3) cells : numpy.ndarray of shape (3)
number of grid points. Number of cells.
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
origin : numpy.ndarray of shape (3), optional origin : numpy.ndarray of shape (3), optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0]. Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
""" """
return _np.stack(_np.meshgrid(_np.linspace(origin[0],size[0]+origin[0],grid[0]+1), return _np.stack(_np.meshgrid(_np.linspace(origin[0],size[0]+origin[0],cells[0]+1),
_np.linspace(origin[1],size[1]+origin[1],grid[1]+1), _np.linspace(origin[1],size[1]+origin[1],cells[1]+1),
_np.linspace(origin[2],size[2]+origin[2],grid[2]+1),indexing = 'ij'), _np.linspace(origin[2],size[2]+origin[2],cells[2]+1),indexing = 'ij'),
axis = -1) axis = -1)
def node_displacement_fluct(size,F): def displacement_fluct_node(size,F):
""" """
Nodal displacement field from fluctuation part of the deformation gradient field. Nodal displacement field from fluctuation part of the deformation gradient field.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
""" """
return cell_2_node(cell_displacement_fluct(size,F)) return point_to_node(displacement_fluct_point(size,F))
def node_displacement_avg(size,F): def displacement_avg_node(size,F):
""" """
Nodal displacement field from average part of the deformation gradient field. Nodal displacement field from average part of the deformation gradient field.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
""" """
F_avg = _np.average(F,axis=(0,1,2)) F_avg = _np.average(F,axis=(0,1,2))
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),node_coord0(F.shape[:3],size)) return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
def node_displacement(size,F): def displacement_node(size,F):
""" """
Nodal displacement field from deformation gradient field. Nodal displacement field from deformation gradient field.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
""" """
return node_displacement_avg(size,F) + node_displacement_fluct(size,F) return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
def node_coord(size,F,origin=_np.zeros(3)): def coordinates_node(size,F,origin=_np.zeros(3)):
""" """
Nodal positions. Nodal positions.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size of the periodic field. Physical size of the periodic field.
F : numpy.ndarray F : numpy.ndarray
deformation gradient field. Deformation gradient field.
origin : numpy.ndarray of shape (3), optional origin : numpy.ndarray of shape (3), optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0]. Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
""" """
return node_coord0(F.shape[:3],size,origin) + node_displacement(size,F) return coordinates0_node(F.shape[:3],size,origin) + displacement_node(size,F)
def cell_2_node(cell_data): def point_to_node(cell_data):
"""Interpolate periodic cell data to nodal data.""" """Interpolate periodic point data to nodal data."""
n = ( cell_data + _np.roll(cell_data,1,(0,1,2)) n = ( cell_data + _np.roll(cell_data,1,(0,1,2))
+ _np.roll(cell_data,1,(0,)) + _np.roll(cell_data,1,(1,)) + _np.roll(cell_data,1,(2,)) + _np.roll(cell_data,1,(0,)) + _np.roll(cell_data,1,(1,)) + _np.roll(cell_data,1,(2,))
+ _np.roll(cell_data,1,(0,1)) + _np.roll(cell_data,1,(1,2)) + _np.roll(cell_data,1,(2,0)))*0.125 + _np.roll(cell_data,1,(0,1)) + _np.roll(cell_data,1,(1,2)) + _np.roll(cell_data,1,(2,0)))*0.125
@ -355,8 +357,8 @@ def cell_2_node(cell_data):
return _np.pad(n,((0,1),(0,1),(0,1))+((0,0),)*len(cell_data.shape[3:]),mode='wrap') return _np.pad(n,((0,1),(0,1),(0,1))+((0,0),)*len(cell_data.shape[3:]),mode='wrap')
def node_2_cell(node_data): def node_2_point(node_data):
"""Interpolate periodic nodal data to cell data.""" """Interpolate periodic nodal data to point data."""
c = ( node_data + _np.roll(node_data,1,(0,1,2)) c = ( node_data + _np.roll(node_data,1,(0,1,2))
+ _np.roll(node_data,1,(0,)) + _np.roll(node_data,1,(1,)) + _np.roll(node_data,1,(2,)) + _np.roll(node_data,1,(0,)) + _np.roll(node_data,1,(1,)) + _np.roll(node_data,1,(2,))
+ _np.roll(node_data,1,(0,1)) + _np.roll(node_data,1,(1,2)) + _np.roll(node_data,1,(2,0)))*0.125 + _np.roll(node_data,1,(0,1)) + _np.roll(node_data,1,(1,2)) + _np.roll(node_data,1,(2,0)))*0.125
@ -364,57 +366,57 @@ def node_2_cell(node_data):
return c[1:,1:,1:] return c[1:,1:,1:]
def node_coord0_gridSizeOrigin(coord0,ordered=True): def cellsSizeOrigin_coordinates0_node(coordinates0,ordered=True):
""" """
Return grid 'DNA', i.e. grid, size, and origin from 1D array of nodal positions. Return grid 'DNA', i.e. cells, size, and origin from 1D array of nodal positions.
Parameters Parameters
---------- ----------
coord0 : numpy.ndarray of shape (:,3) coordinates0 : numpy.ndarray of shape (:,3)
undeformed nodal coordinates. Undeformed nodal coordinates.
ordered : bool, optional ordered : bool, optional
expect coord0 data to be ordered (x fast, z slow). Expect coordinates0 data to be ordered (x fast, z slow).
Defaults to True.
""" """
coords = [_np.unique(coord0[:,i]) for i in range(3)] coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
mincorner = _np.array(list(map(min,coords))) mincorner = _np.array(list(map(min,coords)))
maxcorner = _np.array(list(map(max,coords))) maxcorner = _np.array(list(map(max,coords)))
grid = _np.array(list(map(len,coords)),'i') - 1 cells = _np.array(list(map(len,coords)),'i') - 1
size = maxcorner-mincorner size = maxcorner-mincorner
origin = mincorner origin = mincorner
if (grid+1).prod() != len(coord0): if (cells+1).prod() != len(coordinates0):
raise ValueError('Data count {len(coord0)} does not match grid {grid}.') raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}.')
atol = _np.max(size)*5e-2 atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \ if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],cells[0]+1),atol=atol) and \
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1),atol=atol) and \ _np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],cells[1]+1),atol=atol) and \
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1),atol=atol)): _np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],cells[2]+1),atol=atol)):
raise ValueError('Regular grid spacing violated.') raise ValueError('Regular cell spacing violated.')
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin),atol=atol): if ordered and not _np.allclose(coordinates0.reshape(tuple(cells+1)+(3,),order='F'),
coordinates0_node(cells,size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).') raise ValueError('Input data is not ordered (x fast, z slow).')
return (grid,size,origin) return (cells,size,origin)
def regrid(size,F,new_grid): def regrid(size,F,cells):
""" """
Return mapping from coordinates in deformed configuration to a regular grid. Return mapping from coordinates in deformed configuration to a regular grid.
Parameters Parameters
---------- ----------
size : numpy.ndarray of shape (3) size : numpy.ndarray of shape (3)
physical size Physical size.
F : numpy.ndarray of shape (:,:,:,3,3) F : numpy.ndarray of shape (:,:,:,3,3)
deformation gradient field Deformation gradient field.
new_grid : numpy.ndarray of shape (3) cells : numpy.ndarray of shape (3)
new grid for undeformed coordinates Cell count along x,y,z of remapping grid.
""" """
c = cell_coord0(F.shape[:3],size) \ c = coordinates_point(size,F)
+ cell_displacement_avg(size,F) \
+ cell_displacement_fluct(size,F)
outer = _np.dot(_np.average(F,axis=(0,1,2)),size) outer = _np.dot(_np.average(F,axis=(0,1,2)),size)
for d in range(3): for d in range(3):
@ -422,4 +424,4 @@ def regrid(size,F,new_grid):
c[_np.where(c[:,:,:,d]>outer[d])] -= outer[d] c[_np.where(c[:,:,:,d]>outer[d])] -= outer[d]
tree = _spatial.cKDTree(c.reshape(-1,3),boxsize=outer) tree = _spatial.cKDTree(c.reshape(-1,3),boxsize=outer)
return tree.query(cell_coord0(new_grid,outer))[1].flatten() return tree.query(coordinates0_point(cells,outer))[1].flatten()

View File

@ -7,7 +7,7 @@ from . import util
from . import grid_filters from . import grid_filters
def from_random(size,N_seeds,grid=None,rng_seed=None): def from_random(size,N_seeds,cells=None,rng_seed=None):
""" """
Random seeding in space. Random seeding in space.
@ -17,21 +17,21 @@ def from_random(size,N_seeds,grid=None,rng_seed=None):
Physical size of the seeding domain. Physical size of the seeding domain.
N_seeds : int N_seeds : int
Number of seeds. Number of seeds.
grid : numpy.ndarray of shape (3), optional. cells : numpy.ndarray of shape (3), optional.
If given, ensures that all seeds initiate one grain if using a If given, ensures that each seed results in a grain when a standard Voronoi
standard Voronoi tessellation. tessellation is performed using the given grid resolution (i.e. size/cells).
rng_seed : {None, int, array_like[ints], SeedSequence, BitGenerator, Generator}, optional rng_seed : {None, int, array_like[ints], SeedSequence, BitGenerator, Generator}, optional
A seed to initialize the BitGenerator. Defaults to None. A seed to initialize the BitGenerator. Defaults to None.
If None, then fresh, unpredictable entropy will be pulled from the OS. If None, then fresh, unpredictable entropy will be pulled from the OS.
""" """
rng = _np.random.default_rng(rng_seed) rng = _np.random.default_rng(rng_seed)
if grid is None: if cells is None:
coords = rng.random((N_seeds,3)) * size coords = rng.random((N_seeds,3)) * size
else: else:
grid_coords = grid_filters.cell_coord0(grid,size).reshape(-1,3,order='F') grid_coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3,order='F')
coords = grid_coords[rng.choice(_np.prod(grid),N_seeds, replace=False)] \ coords = grid_coords[rng.choice(_np.prod(cells),N_seeds, replace=False)] \
+ _np.broadcast_to(size/grid,(N_seeds,3))*(rng.random((N_seeds,3))*.5-.25) # wobble without leaving grid + _np.broadcast_to(size/cells,(N_seeds,3))*(rng.random((N_seeds,3))*.5-.25) # wobble without leaving cells
return coords return coords
@ -77,14 +77,14 @@ def from_Poisson_disc(size,N_seeds,N_candidates,distance,periodic=True,rng_seed=
return coords return coords
def from_geom(geom,selection=None,invert=False,average=False,periodic=True): def from_grid(grid,selection=None,invert=False,average=False,periodic=True):
""" """
Create seed from existing geometry description. Create seed from existing grid description.
Parameters Parameters
---------- ----------
geom : damask.Geom grid : damask.Grid
Geometry, from which the material IDs are used as seeds. Grid, from which the material IDs are used as seeds.
selection : iterable of integers, optional selection : iterable of integers, optional
Material IDs to consider. Material IDs to consider.
invert : boolean, false invert : boolean, false
@ -95,10 +95,10 @@ def from_geom(geom,selection=None,invert=False,average=False,periodic=True):
Center of gravity with periodic boundaries. Center of gravity with periodic boundaries.
""" """
material = geom.material.reshape((-1,1),order='F') material = grid.material.reshape((-1,1),order='F')
mask = _np.full(geom.grid.prod(),True,dtype=bool) if selection is None else \ mask = _np.full(grid.cells.prod(),True,dtype=bool) if selection is None else \
_np.isin(material,selection,invert=invert).flatten() _np.isin(material,selection,invert=invert).flatten()
coords = grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3,order='F') coords = grid_filters.coordinates0_point(grid.cells,grid.size).reshape(-1,3,order='F')
if not average: if not average:
return (coords[mask],material[mask]) return (coords[mask],material[mask])
@ -106,8 +106,8 @@ def from_geom(geom,selection=None,invert=False,average=False,periodic=True):
materials = _np.unique(material[mask]) materials = _np.unique(material[mask])
coords_ = _np.zeros((materials.size,3),dtype=float) coords_ = _np.zeros((materials.size,3),dtype=float)
for i,mat in enumerate(materials): for i,mat in enumerate(materials):
pc = (2*_np.pi*coords[material[:,0]==mat,:]-geom.origin)/geom.size pc = (2*_np.pi*coords[material[:,0]==mat,:]-grid.origin)/grid.size
coords_[i] = geom.origin + geom.size / 2 / _np.pi * (_np.pi + coords_[i] = grid.origin + grid.size / 2 / _np.pi * (_np.pi +
_np.arctan2(-_np.average(_np.sin(pc),axis=0), _np.arctan2(-_np.average(_np.sin(pc),axis=0),
-_np.average(_np.cos(pc),axis=0))) \ -_np.average(_np.cos(pc),axis=0))) \
if periodic else \ if periodic else \

View File

@ -1,10 +1,10 @@
homogenization: homogenization:
SX: SX:
N_constituents: 2 N_constituents: 1
mech: {type: none} mechanics: {type: none}
Taylor: Taylor:
N_constituents: 2 N_constituents: 2
mech: {type: isostrain} mechanics: {type: isostrain}
material: material:
- constituents: - constituents:
@ -34,11 +34,11 @@ material:
phase: phase:
Aluminum: Aluminum:
lattice: cF lattice: cF
mech: mechanics:
output: [F, P, F_e, F_p, L_p] output: [F, P, F_e, F_p, L_p]
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke} elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
Steel: Steel:
lattice: cI lattice: cI
mech: mechanics:
output: [F, P, F_e, F_p, L_p] output: [F, P, F_e, F_p, L_p]
elasticity: {C_11: 233.3e9, C_12: 135.5e9, C_44: 118.0e9, type: hooke} elasticity: {C_11: 233.3e9, C_12: 135.5e9, C_44: 118.0e9, type: hooke}

View File

@ -1,8 +1,9 @@
import pytest import pytest
import numpy as np import numpy as np
from vtk.util.numpy_support import numpy_to_vtk as np_to_vtk
from damask import VTK from damask import VTK
from damask import Geom from damask import Grid
from damask import Table from damask import Table
from damask import Rotation from damask import Rotation
from damask import util from damask import util
@ -10,9 +11,9 @@ from damask import seeds
from damask import grid_filters from damask import grid_filters
def geom_equal(a,b): def grid_equal(a,b):
return np.all(a.material == b.material) and \ return np.all(a.material == b.material) and \
np.all(a.grid == b.grid) and \ np.all(a.cells == b.cells) and \
np.allclose(a.size, b.size) and \ np.allclose(a.size, b.size) and \
str(a.diff(b)) == str(b.diff(a)) str(a.diff(b)) == str(b.diff(a))
@ -23,15 +24,15 @@ def default():
np.arange(2,42), np.arange(2,42),
np.ones(40,dtype=int)*2, np.ones(40,dtype=int)*2,
np.arange(1,41))).reshape(8,5,4,order='F') np.arange(1,41))).reshape(8,5,4,order='F')
return Geom(x,[8e-6,5e-6,4e-6]) return Grid(x,[8e-6,5e-6,4e-6])
@pytest.fixture @pytest.fixture
def ref_path(ref_path_base): def ref_path(ref_path_base):
"""Directory containing reference results.""" """Directory containing reference results."""
return ref_path_base/'Geom' return ref_path_base/'Grid'
class TestGeom: class TestGrid:
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def _patch_execution_stamp(self, patch_execution_stamp): def _patch_execution_stamp(self, patch_execution_stamp):
@ -46,7 +47,7 @@ class TestGeom:
def test_diff_not_equal(self,default): def test_diff_not_equal(self,default):
new = Geom(default.material[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified']) new = Grid(default.material[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified'])
assert str(default.diff(new)) != '' assert str(default.diff(new)) != ''
def test_repr(self,default): def test_repr(self,default):
@ -54,36 +55,44 @@ class TestGeom:
def test_read_write_vtr(self,default,tmp_path): def test_read_write_vtr(self,default,tmp_path):
default.save(tmp_path/'default') default.save(tmp_path/'default')
new = Geom.load(tmp_path/'default.vtr') new = Grid.load(tmp_path/'default.vtr')
assert geom_equal(new,default) assert grid_equal(new,default)
def test_invalid_vtr(self,tmp_path): def test_invalid_no_material(self,tmp_path):
v = VTK.from_rectilinear_grid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0) v = VTK.from_rectilinear_grid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
v.save(tmp_path/'no_materialpoint.vtr',parallel=False) v.save(tmp_path/'no_materialpoint.vtr',parallel=False)
with pytest.raises(ValueError): with pytest.raises(ValueError):
Geom.load(tmp_path/'no_materialpoint.vtr') Grid.load(tmp_path/'no_materialpoint.vtr')
def test_invalid_material(self): def test_invalid_spacing(self,tmp_path,default):
default.save(tmp_path/'spacing_ok.vtr')
vtk = VTK.load(tmp_path/'spacing_ok.vtr')
vtk.vtk_data.SetXCoordinates(np_to_vtk(np.sort(np.random.random(default.cells[0]))))
vtk.save(tmp_path/'invalid_spacing.vtr',parallel=False)
with pytest.raises(ValueError):
Grid.load(tmp_path/'invalid_spacing.vtr')
def test_invalid_material_type(self):
with pytest.raises(TypeError): with pytest.raises(TypeError):
Geom(np.zeros((3,3,3),dtype='complex'),np.ones(3)) Grid(np.zeros((3,3,3),dtype='complex'),np.ones(3))
def test_cast_to_int(self): def test_cast_to_int(self):
g = Geom(np.zeros((3,3,3)),np.ones(3)) g = Grid(np.zeros((3,3,3)),np.ones(3))
assert g.material.dtype in np.sctypes['int'] assert g.material.dtype in np.sctypes['int']
def test_invalid_size(self,default): def test_invalid_size(self,default):
with pytest.raises(ValueError): with pytest.raises(ValueError):
Geom(default.material[1:,1:,1:], Grid(default.material[1:,1:,1:],
size=np.ones(2)) size=np.ones(2))
def test_save_load_ASCII(self,default,tmp_path): def test_save_load_ASCII(self,default,tmp_path):
default.save_ASCII(tmp_path/'ASCII') default.save_ASCII(tmp_path/'ASCII')
default.material -= 1 default.material -= 1
assert geom_equal(Geom.load_ASCII(tmp_path/'ASCII'),default) assert grid_equal(Grid.load_ASCII(tmp_path/'ASCII'),default)
def test_invalid_origin(self,default): def test_invalid_origin(self,default):
with pytest.raises(ValueError): with pytest.raises(ValueError):
Geom(default.material[1:,1:,1:], Grid(default.material[1:,1:,1:],
size=np.ones(3), size=np.ones(3),
origin=np.ones(4)) origin=np.ones(4))
@ -91,14 +100,14 @@ class TestGeom:
def test_invalid_materials_shape(self,default): def test_invalid_materials_shape(self,default):
material = np.ones((3,3)) material = np.ones((3,3))
with pytest.raises(ValueError): with pytest.raises(ValueError):
Geom(material, Grid(material,
size=np.ones(3)) size=np.ones(3))
def test_invalid_materials_type(self,default): def test_invalid_materials_type(self,default):
material = np.random.randint(1,300,(3,4,5))==1 material = np.random.randint(1,300,(3,4,5))==1
with pytest.raises(TypeError): with pytest.raises(TypeError):
Geom(material) Grid(material)
@pytest.mark.parametrize('directions,reflect',[ @pytest.mark.parametrize('directions,reflect',[
@ -113,7 +122,7 @@ class TestGeom:
tag = f'directions_{"-".join(directions)}+reflect_{reflect}' tag = f'directions_{"-".join(directions)}+reflect_{reflect}'
reference = ref_path/f'mirror_{tag}.vtr' reference = ref_path/f'mirror_{tag}.vtr'
if update: modified.save(reference) if update: modified.save(reference)
assert geom_equal(Geom.load(reference), assert grid_equal(Grid.load(reference),
modified) modified)
@ -135,17 +144,17 @@ class TestGeom:
tag = f'directions_{"-".join(directions)}' tag = f'directions_{"-".join(directions)}'
reference = ref_path/f'flip_{tag}.vtr' reference = ref_path/f'flip_{tag}.vtr'
if update: modified.save(reference) if update: modified.save(reference)
assert geom_equal(Geom.load(reference), assert grid_equal(Grid.load(reference),
modified) modified)
def test_flip_invariant(self,default): def test_flip_invariant(self,default):
assert geom_equal(default,default.flip([])) assert grid_equal(default,default.flip([]))
@pytest.mark.parametrize('direction',[['x'],['x','y']]) @pytest.mark.parametrize('direction',[['x'],['x','y']])
def test_flip_double(self,default,direction): def test_flip_double(self,default,direction):
assert geom_equal(default,default.flip(direction).flip(direction)) assert grid_equal(default,default.flip(direction).flip(direction))
@pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]]) @pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]])
@ -162,12 +171,12 @@ class TestGeom:
reference = ref_path/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}' reference = ref_path/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
if update and stencil > 1: if update and stencil > 1:
current.save(reference) current.save(reference)
assert geom_equal(Geom.load(reference) if stencil > 1 else default, assert grid_equal(Grid.load(reference) if stencil > 1 else default,
current current
) )
@pytest.mark.parametrize('grid',[ @pytest.mark.parametrize('cells',[
(10,11,10), (10,11,10),
[10,13,10], [10,13,10],
np.array((10,10,10)), np.array((10,10,10)),
@ -176,12 +185,12 @@ class TestGeom:
np.array((10,20,2)) np.array((10,20,2))
] ]
) )
def test_scale(self,default,update,ref_path,grid): def test_scale(self,default,update,ref_path,cells):
modified = default.scale(grid) modified = default.scale(cells)
tag = f'grid_{util.srepr(grid,"-")}' tag = f'grid_{util.srepr(cells,"-")}'
reference = ref_path/f'scale_{tag}.vtr' reference = ref_path/f'scale_{tag}.vtr'
if update: modified.save(reference) if update: modified.save(reference)
assert geom_equal(Geom.load(reference), assert grid_equal(Grid.load(reference),
modified) modified)
@ -190,21 +199,21 @@ class TestGeom:
for m in np.unique(material): for m in np.unique(material):
material[material==m] = material.max() + np.random.randint(1,30) material[material==m] = material.max() + np.random.randint(1,30)
default.material -= 1 default.material -= 1
modified = Geom(material, modified = Grid(material,
default.size, default.size,
default.origin) default.origin)
assert not geom_equal(modified,default) assert not grid_equal(modified,default)
assert geom_equal(default, assert grid_equal(default,
modified.renumber()) modified.renumber())
def test_substitute(self,default): def test_substitute(self,default):
offset = np.random.randint(1,500) offset = np.random.randint(1,500)
modified = Geom(default.material + offset, modified = Grid(default.material + offset,
default.size, default.size,
default.origin) default.origin)
assert not geom_equal(modified,default) assert not grid_equal(modified,default)
assert geom_equal(default, assert grid_equal(default,
modified.substitute(np.arange(default.material.max())+1+offset, modified.substitute(np.arange(default.material.max())+1+offset,
np.arange(default.material.max())+1)) np.arange(default.material.max())+1))
@ -212,12 +221,12 @@ class TestGeom:
f = np.unique(default.material.flatten())[:np.random.randint(1,default.material.max())] f = np.unique(default.material.flatten())[:np.random.randint(1,default.material.max())]
t = np.random.permutation(f) t = np.random.permutation(f)
modified = default.substitute(f,t) modified = default.substitute(f,t)
assert np.array_equiv(t,f) or (not geom_equal(modified,default)) assert np.array_equiv(t,f) or (not grid_equal(modified,default))
assert geom_equal(default, modified.substitute(t,f)) assert grid_equal(default, modified.substitute(t,f))
def test_sort(self): def test_sort(self):
grid = np.random.randint(5,20,3) cells = np.random.randint(5,20,3)
m = Geom(np.random.randint(1,20,grid)*3,np.ones(3)).sort().material.flatten(order='F') m = Grid(np.random.randint(1,20,cells)*3,np.ones(3)).sort().material.flatten(order='F')
for i,v in enumerate(m): for i,v in enumerate(m):
assert i==0 or v > m[:i].max() or v in m[:i] assert i==0 or v > m[:i].max() or v in m[:i]
@ -227,7 +236,7 @@ class TestGeom:
modified = default.copy() modified = default.copy()
for i in range(np.rint(360/axis_angle[3]).astype(int)): for i in range(np.rint(360/axis_angle[3]).astype(int)):
modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True)) modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True))
assert geom_equal(default,modified) assert grid_equal(default,modified)
@pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0], @pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0],
@ -237,15 +246,15 @@ class TestGeom:
tag = f'Eulers_{util.srepr(Eulers,"-")}' tag = f'Eulers_{util.srepr(Eulers,"-")}'
reference = ref_path/f'rotate_{tag}.vtr' reference = ref_path/f'rotate_{tag}.vtr'
if update: modified.save(reference) if update: modified.save(reference)
assert geom_equal(Geom.load(reference), assert grid_equal(Grid.load(reference),
modified) modified)
def test_canvas(self,default): def test_canvas(self,default):
grid = default.grid cells = default.cells
grid_add = np.random.randint(0,30,(3)) grid_add = np.random.randint(0,30,(3))
modified = default.canvas(grid + grid_add) modified = default.canvas(cells + grid_add)
assert np.all(modified.material[:grid[0],:grid[1],:grid[2]] == default.material) assert np.all(modified.material[:cells[0],:cells[1],:cells[2]] == default.material)
@pytest.mark.parametrize('center1,center2',[(np.random.random(3)*.5,np.random.random()*8), @pytest.mark.parametrize('center1,center2',[(np.random.random(3)*.5,np.random.random()*8),
@ -263,8 +272,8 @@ class TestGeom:
o = np.random.random(3)-.5 o = np.random.random(3)-.5
g = np.random.randint(8,32,(3)) g = np.random.randint(8,32,(3))
s = np.random.random(3)+.5 s = np.random.random(3)+.5
G_1 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center1,exponent) G_1 = Grid(np.ones(g,'i'),s,o).add_primitive(diameter,center1,exponent)
G_2 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center2,exponent) G_2 = Grid(np.ones(g,'i'),s,o).add_primitive(diameter,center2,exponent)
assert np.count_nonzero(G_1.material!=2) == np.count_nonzero(G_2.material!=2) assert np.count_nonzero(G_1.material!=2) == np.count_nonzero(G_2.material!=2)
@ -279,9 +288,9 @@ class TestGeom:
g = np.random.randint(8,32,(3)) g = np.random.randint(8,32,(3))
s = np.random.random(3)+.5 s = np.random.random(3)+.5
fill = np.random.randint(10)+2 fill = np.random.randint(10)+2
G_1 = Geom(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,inverse=inverse,periodic=periodic) G_1 = Grid(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,inverse=inverse,periodic=periodic)
G_2 = Geom(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,Rotation.from_random(),inverse,periodic=periodic) G_2 = Grid(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,Rotation.from_random(),inverse,periodic=periodic)
assert geom_equal(G_1,G_2) assert grid_equal(G_1,G_2)
@pytest.mark.parametrize('trigger',[[1],[]]) @pytest.mark.parametrize('trigger',[[1],[]])
@ -300,9 +309,9 @@ class TestGeom:
if len(trigger) > 0: if len(trigger) > 0:
m2[m==1] = 1 m2[m==1] = 1
geom = Geom(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger) grid = Grid(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger)
assert np.all(m2==geom.material) assert np.all(m2==grid.material)
@pytest.mark.parametrize('periodic',[True,False]) @pytest.mark.parametrize('periodic',[True,False])
@ -314,39 +323,39 @@ class TestGeom:
@pytest.mark.parametrize('periodic',[True,False]) @pytest.mark.parametrize('periodic',[True,False])
def test_tessellation_approaches(self,periodic): def test_tessellation_approaches(self,periodic):
grid = np.random.randint(10,20,3) cells = np.random.randint(10,20,3)
size = np.random.random(3) + 1.0 size = np.random.random(3) + 1.0
N_seeds= np.random.randint(10,30) N_seeds= np.random.randint(10,30)
seeds = np.random.rand(N_seeds,3) * np.broadcast_to(size,(N_seeds,3)) seeds = np.random.rand(N_seeds,3) * np.broadcast_to(size,(N_seeds,3))
Voronoi = Geom.from_Voronoi_tessellation( grid,size,seeds, np.arange(N_seeds)+5,periodic) Voronoi = Grid.from_Voronoi_tessellation( cells,size,seeds, np.arange(N_seeds)+5,periodic)
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(N_seeds),np.arange(N_seeds)+5,periodic) Laguerre = Grid.from_Laguerre_tessellation(cells,size,seeds,np.ones(N_seeds),np.arange(N_seeds)+5,periodic)
assert geom_equal(Laguerre,Voronoi) assert grid_equal(Laguerre,Voronoi)
def test_Laguerre_weights(self): def test_Laguerre_weights(self):
grid = np.random.randint(10,20,3) cells = np.random.randint(10,20,3)
size = np.random.random(3) + 1.0 size = np.random.random(3) + 1.0
N_seeds= np.random.randint(10,30) N_seeds= np.random.randint(10,30)
seeds = np.random.rand(N_seeds,3) * np.broadcast_to(size,(N_seeds,3)) seeds = np.random.rand(N_seeds,3) * np.broadcast_to(size,(N_seeds,3))
weights= np.full((N_seeds),-np.inf) weights= np.full((N_seeds),-np.inf)
ms = np.random.randint(N_seeds) ms = np.random.randint(N_seeds)
weights[ms] = np.random.random() weights[ms] = np.random.random()
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,weights,periodic=np.random.random()>0.5) Laguerre = Grid.from_Laguerre_tessellation(cells,size,seeds,weights,periodic=np.random.random()>0.5)
assert np.all(Laguerre.material == ms) assert np.all(Laguerre.material == ms)
@pytest.mark.parametrize('approach',['Laguerre','Voronoi']) @pytest.mark.parametrize('approach',['Laguerre','Voronoi'])
def test_tessellate_bicrystal(self,approach): def test_tessellate_bicrystal(self,approach):
grid = np.random.randint(5,10,3)*2 cells = np.random.randint(5,10,3)*2
size = grid.astype(np.float) size = cells.astype(np.float)
seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5]))) seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5])))
material = np.zeros(grid) material = np.zeros(cells)
material[:,grid[1]//2:,:] = 1 material[:,cells[1]//2:,:] = 1
if approach == 'Laguerre': if approach == 'Laguerre':
geom = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(2),periodic=np.random.random()>0.5) grid = Grid.from_Laguerre_tessellation(cells,size,seeds,np.ones(2),periodic=np.random.random()>0.5)
elif approach == 'Voronoi': elif approach == 'Voronoi':
geom = Geom.from_Voronoi_tessellation(grid,size,seeds, periodic=np.random.random()>0.5) grid = Grid.from_Voronoi_tessellation(cells,size,seeds, periodic=np.random.random()>0.5)
assert np.all(geom.material == material) assert np.all(grid.material == material)
@pytest.mark.parametrize('surface',['Schwarz P', @pytest.mark.parametrize('surface',['Schwarz P',
@ -363,14 +372,14 @@ class TestGeom:
'Fisher-Koch S', 'Fisher-Koch S',
]) ])
def test_minimal_surface_basic_properties(self,surface): def test_minimal_surface_basic_properties(self,surface):
grid = np.random.randint(60,100,3) cells = np.random.randint(60,100,3)
size = np.ones(3)+np.random.rand(3) size = np.ones(3)+np.random.rand(3)
threshold = 2*np.random.rand()-1. threshold = 2*np.random.rand()-1.
periods = np.random.randint(2)+1 periods = np.random.randint(2)+1
materials = np.random.randint(0,40,2) materials = np.random.randint(0,40,2)
geom = Geom.from_minimal_surface(grid,size,surface,threshold,periods,materials) grid = Grid.from_minimal_surface(cells,size,surface,threshold,periods,materials)
assert set(geom.material.flatten()) | set(materials) == set(materials) \ assert set(grid.material.flatten()) | set(materials) == set(materials) \
and (geom.size == size).all() and (geom.grid == grid).all() and (grid.size == size).all() and (grid.cells == cells).all()
@pytest.mark.parametrize('surface,threshold',[('Schwarz P',0), @pytest.mark.parametrize('surface,threshold',[('Schwarz P',0),
('Double Primitive',-1./6.), ('Double Primitive',-1./6.),
@ -386,36 +395,36 @@ class TestGeom:
('Fisher-Koch S',0), ('Fisher-Koch S',0),
]) ])
def test_minimal_surface_volume(self,surface,threshold): def test_minimal_surface_volume(self,surface,threshold):
grid = np.ones(3,dtype=int)*64 cells = np.ones(3,dtype=int)*64
geom = Geom.from_minimal_surface(grid,np.ones(3),surface,threshold) grid = Grid.from_minimal_surface(cells,np.ones(3),surface,threshold)
assert np.isclose(np.count_nonzero(geom.material==1)/np.prod(geom.grid),.5,rtol=1e-3) assert np.isclose(np.count_nonzero(grid.material==1)/np.prod(grid.cells),.5,rtol=1e-3)
def test_from_table(self): def test_from_table(self):
grid = np.random.randint(60,100,3) cells = np.random.randint(60,100,3)
size = np.ones(3)+np.random.rand(3) size = np.ones(3)+np.random.rand(3)
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3,order='F') coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3,order='F')
z=np.ones(grid.prod()) z=np.ones(cells.prod())
z[grid[:2].prod()*int(grid[2]/2):]=0 z[cells[:2].prod()*int(cells[2]/2):]=0
t = Table(np.column_stack((coords,z)),{'coords':3,'z':1}) t = Table(np.column_stack((coords,z)),{'coords':3,'z':1})
g = Geom.from_table(t,'coords',['1_coords','z']) g = Grid.from_table(t,'coords',['1_coords','z'])
assert g.N_materials == g.grid[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == grid[0]).all() assert g.N_materials == g.cells[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == cells[0]).all()
def test_from_table_recover(self,tmp_path): def test_from_table_recover(self,tmp_path):
grid = np.random.randint(60,100,3) cells = np.random.randint(60,100,3)
size = np.ones(3)+np.random.rand(3) size = np.ones(3)+np.random.rand(3)
s = seeds.from_random(size,np.random.randint(60,100)) s = seeds.from_random(size,np.random.randint(60,100))
geom = Geom.from_Voronoi_tessellation(grid,size,s) grid = Grid.from_Voronoi_tessellation(cells,size,s)
coords = grid_filters.cell_coord0(grid,size) coords = grid_filters.coordinates0_point(cells,size)
t = Table(np.column_stack((coords.reshape(-1,3,order='F'),geom.material.flatten(order='F'))),{'c':3,'m':1}) t = Table(np.column_stack((coords.reshape(-1,3,order='F'),grid.material.flatten(order='F'))),{'c':3,'m':1})
assert geom_equal(geom.sort().renumber(),Geom.from_table(t,'c',['m'])) assert grid_equal(grid.sort().renumber(),Grid.from_table(t,'c',['m']))
@pytest.mark.parametrize('periodic',[True,False]) @pytest.mark.parametrize('periodic',[True,False])
@pytest.mark.parametrize('direction',['x','y','z',['x','y'],'zy','xz',['x','y','z']]) @pytest.mark.parametrize('direction',['x','y','z',['x','y'],'zy','xz',['x','y','z']])
def test_get_grain_boundaries(self,update,ref_path,periodic,direction): def test_get_grain_boundaries(self,update,ref_path,periodic,direction):
geom=Geom.load(ref_path/'get_grain_boundaries_8g12x15x20.vtr') grid=Grid.load(ref_path/'get_grain_boundaries_8g12x15x20.vtr')
current=geom.get_grain_boundaries(periodic,direction) current=grid.get_grain_boundaries(periodic,direction)
if update: if update:
current.save(ref_path/f'get_grain_boundaries_8g12x15x20_{direction}_{periodic}.vtu',parallel=False) current.save(ref_path/f'get_grain_boundaries_8g12x15x20_{direction}_{periodic}.vtu',parallel=False)
reference=VTK.load(ref_path/f'get_grain_boundaries_8g12x15x20_{"".join(direction)}_{periodic}.vtu') reference=VTK.load(ref_path/f'get_grain_boundaries_8g12x15x20_{"".join(direction)}_{periodic}.vtu')

View File

@ -169,7 +169,7 @@ class TestResult:
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]]) @pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
def test_add_IPF_color(self,default,d): def test_add_IPF_color(self,default,d):
default.add_IPF_color('O',np.array(d)) default.add_IPF_color(d,'O')
loc = {'O': default.get_dataset_location('O'), loc = {'O': default.get_dataset_location('O'),
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))} 'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
qu = default.read_dataset(loc['O']).view(np.double).squeeze() qu = default.read_dataset(loc['O']).view(np.double).squeeze()
@ -356,11 +356,11 @@ class TestResult:
@pytest.mark.parametrize('mode',['cell','node']) @pytest.mark.parametrize('mode',['cell','node'])
def test_coordinates(self,default,mode): def test_coordinates(self,default,mode):
if mode == 'cell': if mode == 'cell':
a = grid_filters.cell_coord0(default.grid,default.size,default.origin) a = grid_filters.coordinates0_point(default.cells,default.size,default.origin)
b = default.cell_coordinates.reshape(tuple(default.grid)+(3,),order='F') b = default.coordinates0_point.reshape(tuple(default.cells)+(3,),order='F')
elif mode == 'node': elif mode == 'node':
a = grid_filters.node_coord0(default.grid,default.size,default.origin) a = grid_filters.coordinates0_node(default.cells,default.size,default.origin)
b = default.node_coordinates.reshape(tuple(default.grid+1)+(3,),order='F') b = default.coordinates0_node.reshape(tuple(default.cells+1)+(3,),order='F')
assert np.allclose(a,b) assert np.allclose(a,b)
@pytest.mark.parametrize('output',['F',[],['F','P']]) @pytest.mark.parametrize('output',['F',[],['F','P']])

View File

@ -800,6 +800,14 @@ class TestRotation:
print(f'append 2x {shape} --> {s.shape}') print(f'append 2x {shape} --> {s.shape}')
assert s[0,...] == r[0,...] and s[-1,...] == p[-1,...] assert s[0,...] == r[0,...] and s[-1,...] == p[-1,...]
@pytest.mark.parametrize('shape',[None,1,(1,),(4,2),(3,3,2)])
def test_append_list(self,shape):
r = Rotation.from_random(shape=shape)
p = Rotation.from_random(shape=shape)
s = r.append([r,p])
print(f'append 3x {shape} --> {s.shape}')
assert s[0,...] == r[0,...] and s[-1,...] == p[-1,...]
@pytest.mark.parametrize('quat,standardized',[ @pytest.mark.parametrize('quat,standardized',[
([-1,0,0,0],[1,0,0,0]), ([-1,0,0,0],[1,0,0,0]),
([-0.5,-0.5,-0.5,-0.5],[0.5,0.5,0.5,0.5]), ([-0.5,-0.5,-0.5,-0.5],[0.5,0.5,0.5,0.5]),
@ -1022,7 +1030,7 @@ class TestRotation:
rng = tuple(zip(np.zeros(3),limits)) rng = tuple(zip(np.zeros(3),limits))
weights = Table.load(ref_path/'ODF_experimental_cell.txt').get('intensity').flatten() weights = Table.load(ref_path/'ODF_experimental_cell.txt').get('intensity').flatten()
Eulers = grid_filters.cell_coord0(steps,limits) Eulers = grid_filters.coordinates0_point(steps,limits)
Eulers = np.radians(Eulers) if not degrees else Eulers Eulers = np.radians(Eulers) if not degrees else Eulers
Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees,fractions).as_Euler_angles(True) Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees,fractions).as_Euler_angles(True)
@ -1040,7 +1048,7 @@ class TestRotation:
weights = Table.load(ref_path/'ODF_experimental.txt').get('intensity') weights = Table.load(ref_path/'ODF_experimental.txt').get('intensity')
weights = weights.reshape(steps+1,order='F')[:-1,:-1,:-1].reshape(-1,order='F') weights = weights.reshape(steps+1,order='F')[:-1,:-1,:-1].reshape(-1,order='F')
Eulers = grid_filters.node_coord0(steps,limits)[:-1,:-1,:-1] Eulers = grid_filters.coordinates0_node(steps,limits)[:-1,:-1,:-1]
Eulers = np.radians(Eulers) if not degrees else Eulers Eulers = np.radians(Eulers) if not degrees else Eulers
Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees).as_Euler_angles(True) Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees).as_Euler_angles(True)

View File

@ -8,7 +8,7 @@ from damask import Table
def default(): def default():
"""Simple Table.""" """Simple Table."""
x = np.ones((5,13),dtype=float) x = np.ones((5,13),dtype=float)
return Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['test data','contains only ones']) return Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['test data','contains five rows of only ones'])
@pytest.fixture @pytest.fixture
def ref_path(ref_path_base): def ref_path(ref_path_base):
@ -20,8 +20,9 @@ class TestTable:
def test_repr(self,default): def test_repr(self,default):
print(default) print(default)
def test_len(self): @pytest.mark.parametrize('N',[10,40])
len(Table(np.random.rand(7,3),{'X':3})) == 7 def test_len(self,N):
assert len(Table(np.random.rand(N,3),{'X':3})) == N
def test_get_scalar(self,default): def test_get_scalar(self,default):
d = default.get('s') d = default.get('s')
@ -39,6 +40,10 @@ class TestTable:
d = default.get('5_F') d = default.get('5_F')
assert np.allclose(d,1.0) and d.shape[1:] == (1,) assert np.allclose(d,1.0) and d.shape[1:] == (1,)
@pytest.mark.parametrize('N',[10,40])
def test_getitem(self,N):
assert len(Table(np.random.rand(N,1),{'X':1})[:N//2]) == N//2
@pytest.mark.parametrize('mode',['str','path']) @pytest.mark.parametrize('mode',['str','path'])
def test_write_read(self,default,tmp_path,mode): def test_write_read(self,default,tmp_path,mode):
default.save(tmp_path/'default.txt') default.save(tmp_path/'default.txt')

View File

@ -16,9 +16,9 @@ def ref_path(ref_path_base):
@pytest.fixture @pytest.fixture
def default(): def default():
"""Simple VTK.""" """Simple VTK."""
grid = np.array([5,6,7],int) cells = np.array([5,6,7],int)
size = np.array([.6,1.,.5]) size = np.array([.6,1.,.5])
return VTK.from_rectilinear_grid(grid,size) return VTK.from_rectilinear_grid(cells,size)
class TestVTK: class TestVTK:
@ -27,10 +27,10 @@ class TestVTK:
print('patched damask.util.execution_stamp') print('patched damask.util.execution_stamp')
def test_rectilinearGrid(self,tmp_path): def test_rectilinearGrid(self,tmp_path):
grid = np.random.randint(5,10,3)*2 cells = np.random.randint(5,10,3)*2
size = np.random.random(3) + 1.0 size = np.random.random(3) + 1.0
origin = np.random.random(3) origin = np.random.random(3)
v = VTK.from_rectilinear_grid(grid,size,origin) v = VTK.from_rectilinear_grid(cells,size,origin)
string = v.__repr__() string = v.__repr__()
v.save(tmp_path/'rectilinearGrid',False) v.save(tmp_path/'rectilinearGrid',False)
vtr = VTK.load(tmp_path/'rectilinearGrid.vtr') vtr = VTK.load(tmp_path/'rectilinearGrid.vtr')
@ -152,11 +152,11 @@ class TestVTK:
np.allclose(polyData.get('coordinates'),points) np.allclose(polyData.get('coordinates'),points)
def test_compare_reference_rectilinearGrid(self,update,ref_path,tmp_path): def test_compare_reference_rectilinearGrid(self,update,ref_path,tmp_path):
grid = np.array([5,6,7],int) cells = np.array([5,6,7],int)
size = np.array([.6,1.,.5]) size = np.array([.6,1.,.5])
rectilinearGrid = VTK.from_rectilinear_grid(grid,size) rectilinearGrid = VTK.from_rectilinear_grid(cells,size)
c = grid_filters.cell_coord0(grid,size).reshape(-1,3,order='F') c = grid_filters.coordinates0_point(cells,size).reshape(-1,3,order='F')
n = grid_filters.node_coord0(grid,size).reshape(-1,3,order='F') n = grid_filters.coordinates0_node(cells,size).reshape(-1,3,order='F')
rectilinearGrid.add(c,'cell') rectilinearGrid.add(c,'cell')
rectilinearGrid.add(n,'node') rectilinearGrid.add(n,'node')
if update: if update:

View File

@ -5,124 +5,124 @@ from damask import grid_filters
class TestGridFilters: class TestGridFilters:
def test_cell_coord0(self): def test_coordinates0_point(self):
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
coord = grid_filters.cell_coord0(grid,size) coord = grid_filters.coordinates0_point(cells,size)
assert np.allclose(coord[0,0,0],size/grid*.5) and coord.shape == tuple(grid) + (3,) assert np.allclose(coord[0,0,0],size/cells*.5) and coord.shape == tuple(cells) + (3,)
def test_node_coord0(self): def test_coordinates0_node(self):
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
coord = grid_filters.node_coord0(grid,size) coord = grid_filters.coordinates0_node(cells,size)
assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(grid+1) + (3,) assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(cells+1) + (3,)
def test_coord0(self): def test_coord0(self):
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
c = grid_filters.cell_coord0(grid+1,size+size/grid) c = grid_filters.coordinates0_point(cells+1,size+size/cells)
n = grid_filters.node_coord0(grid,size) + size/grid*.5 n = grid_filters.coordinates0_node(cells,size) + size/cells*.5
assert np.allclose(c,n) assert np.allclose(c,n)
@pytest.mark.parametrize('mode',['cell','node']) @pytest.mark.parametrize('mode',['point','node'])
def test_grid_DNA(self,mode): def test_grid_DNA(self,mode):
"""Ensure that xx_coord0_gridSizeOrigin is the inverse of xx_coord0.""" """Ensure that cellsSizeOrigin_coordinates0_xx is the inverse of coordinates0_xx."""
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
size = np.random.random(3) size = np.random.random(3)
origin = np.random.random(3) origin = np.random.random(3)
coord0 = eval(f'grid_filters.{mode}_coord0(grid,size,origin)') # noqa coord0 = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)') # noqa
_grid,_size,_origin = eval(f'grid_filters.{mode}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))') _cells,_size,_origin = eval(f'grid_filters.cellsSizeOrigin_coordinates0_{mode}(coord0.reshape(-1,3,order="F"))')
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin) assert np.allclose(cells,_cells) and np.allclose(size,_size) and np.allclose(origin,_origin)
def test_displacement_fluct_equivalence(self): def test_displacement_fluct_equivalence(self):
"""Ensure that fluctuations are periodic.""" """Ensure that fluctuations are periodic."""
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3)) F = np.random.random(tuple(cells)+(3,3))
assert np.allclose(grid_filters.node_displacement_fluct(size,F), assert np.allclose(grid_filters.displacement_fluct_node(size,F),
grid_filters.cell_2_node(grid_filters.cell_displacement_fluct(size,F))) grid_filters.point_to_node(grid_filters.displacement_fluct_point(size,F)))
def test_interpolation_to_node(self): def test_interpolation_to_node(self):
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3)) F = np.random.random(tuple(cells)+(3,3))
assert np.allclose(grid_filters.node_coord(size,F) [1:-1,1:-1,1:-1], assert np.allclose(grid_filters.coordinates_node(size,F) [1:-1,1:-1,1:-1],
grid_filters.cell_2_node(grid_filters.cell_coord(size,F))[1:-1,1:-1,1:-1]) grid_filters.point_to_node(grid_filters.coordinates_point(size,F))[1:-1,1:-1,1:-1])
def test_interpolation_to_cell(self): def test_interpolation_to_cell(self):
grid = np.random.randint(1,30,(3)) cells = np.random.randint(1,30,(3))
node_coord_x = np.linspace(0,np.pi*2,num=grid[0]+1) coordinates_node_x = np.linspace(0,np.pi*2,num=cells[0]+1)
node_field_x = np.cos(node_coord_x) node_field_x = np.cos(coordinates_node_x)
node_field = np.broadcast_to(node_field_x.reshape(-1,1,1),grid+1) node_field = np.broadcast_to(node_field_x.reshape(-1,1,1),cells+1)
cell_coord_x = node_coord_x[:-1]+node_coord_x[1]*.5 coordinates0_point_x = coordinates_node_x[:-1]+coordinates_node_x[1]*.5
cell_field_x = np.interp(cell_coord_x,node_coord_x,node_field_x,period=np.pi*2.) cell_field_x = np.interp(coordinates0_point_x,coordinates_node_x,node_field_x,period=np.pi*2.)
cell_field = np.broadcast_to(cell_field_x.reshape(-1,1,1),grid) cell_field = np.broadcast_to(cell_field_x.reshape(-1,1,1),cells)
assert np.allclose(cell_field,grid_filters.node_2_cell(node_field)) assert np.allclose(cell_field,grid_filters.node_2_point(node_field))
@pytest.mark.parametrize('mode',['cell','node']) @pytest.mark.parametrize('mode',['point','node'])
def test_coord0_origin(self,mode): def test_coordinates0_origin(self,mode):
origin= np.random.random(3) origin= np.random.random(3)
size = np.random.random(3) # noqa size = np.random.random(3) # noqa
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
shifted = eval(f'grid_filters.{mode}_coord0(grid,size,origin)') shifted = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)')
unshifted = eval(f'grid_filters.{mode}_coord0(grid,size)') unshifted = eval(f'grid_filters.coordinates0_{mode}(cells,size)')
if mode == 'cell': if mode == 'cell':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,))) assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(cells) +(3,)))
elif mode == 'node': elif mode == 'node':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid+1)+(3,))) assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(cells+1)+(3,)))
@pytest.mark.parametrize('function',[grid_filters.cell_displacement_avg, @pytest.mark.parametrize('function',[grid_filters.displacement_avg_point,
grid_filters.node_displacement_avg]) grid_filters.displacement_avg_node])
def test_displacement_avg_vanishes(self,function): def test_displacement_avg_vanishes(self,function):
"""Ensure that random fluctuations in F do not result in average displacement.""" """Ensure that random fluctuations in F do not result in average displacement."""
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3)) F = np.random.random(tuple(cells)+(3,3))
F += np.eye(3) - np.average(F,axis=(0,1,2)) F += np.eye(3) - np.average(F,axis=(0,1,2))
assert np.allclose(function(size,F),0.0) assert np.allclose(function(size,F),0.0)
@pytest.mark.parametrize('function',[grid_filters.cell_displacement_fluct, @pytest.mark.parametrize('function',[grid_filters.displacement_fluct_point,
grid_filters.node_displacement_fluct]) grid_filters.displacement_fluct_node])
def test_displacement_fluct_vanishes(self,function): def test_displacement_fluct_vanishes(self,function):
"""Ensure that constant F does not result in fluctuating displacement.""" """Ensure that constant F does not result in fluctuating displacement."""
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
F = np.broadcast_to(np.random.random((3,3)), tuple(grid)+(3,3)) F = np.broadcast_to(np.random.random((3,3)), tuple(cells)+(3,3))
assert np.allclose(function(size,F),0.0) assert np.allclose(function(size,F),0.0)
@pytest.mark.parametrize('function',[grid_filters.coord0_check, @pytest.mark.parametrize('function',[grid_filters.coordinates0_check,
grid_filters.node_coord0_gridSizeOrigin, grid_filters.cellsSizeOrigin_coordinates0_node,
grid_filters.cell_coord0_gridSizeOrigin]) grid_filters.cellsSizeOrigin_coordinates0_point])
def test_invalid_coordinates(self,function): def test_invalid_coordinates(self,function):
invalid_coordinates = np.random.random((np.random.randint(12,52),3)) invalid_coordinates = np.random.random((np.random.randint(12,52),3))
with pytest.raises(ValueError): with pytest.raises(ValueError):
function(invalid_coordinates) function(invalid_coordinates)
@pytest.mark.parametrize('function',[grid_filters.node_coord0_gridSizeOrigin, @pytest.mark.parametrize('function',[grid_filters.cellsSizeOrigin_coordinates0_node,
grid_filters.cell_coord0_gridSizeOrigin]) grid_filters.cellsSizeOrigin_coordinates0_point])
def test_uneven_spaced_coordinates(self,function): def test_uneven_spaced_coordinates(self,function):
start = np.random.random(3) start = np.random.random(3)
end = np.random.random(3)*10. + start end = np.random.random(3)*10. + start
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
uneven = np.stack(np.meshgrid(np.logspace(start[0],end[0],grid[0]), uneven = np.stack(np.meshgrid(np.logspace(start[0],end[0],cells[0]),
np.logspace(start[1],end[1],grid[1]), np.logspace(start[1],end[1],cells[1]),
np.logspace(start[2],end[2],grid[2]),indexing = 'ij'), np.logspace(start[2],end[2],cells[2]),indexing = 'ij'),
axis = -1).reshape((grid.prod(),3),order='F') axis = -1).reshape((cells.prod(),3),order='F')
with pytest.raises(ValueError): with pytest.raises(ValueError):
function(uneven) function(uneven)
@pytest.mark.parametrize('mode',[True,False]) @pytest.mark.parametrize('mode',[True,False])
@pytest.mark.parametrize('function',[grid_filters.node_coord0_gridSizeOrigin, @pytest.mark.parametrize('function',[grid_filters.cellsSizeOrigin_coordinates0_node,
grid_filters.cell_coord0_gridSizeOrigin]) grid_filters.cellsSizeOrigin_coordinates0_point])
def test_unordered_coordinates(self,function,mode): def test_unordered_coordinates(self,function,mode):
origin = np.random.random(3) origin = np.random.random(3)
size = np.random.random(3)*10.+origin size = np.random.random(3)*10.+origin
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
unordered = grid_filters.node_coord0(grid,size,origin).reshape(-1,3) unordered = grid_filters.coordinates0_node(cells,size,origin).reshape(-1,3)
if mode: if mode:
with pytest.raises(ValueError): with pytest.raises(ValueError):
function(unordered,mode) function(unordered,mode)
@ -131,9 +131,9 @@ class TestGridFilters:
def test_regrid(self): def test_regrid(self):
size = np.random.random(3) size = np.random.random(3)
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
F = np.broadcast_to(np.eye(3), tuple(grid)+(3,3)) F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))
assert all(grid_filters.regrid(size,F,grid) == np.arange(grid.prod())) assert all(grid_filters.regrid(size,F,cells) == np.arange(cells.prod()))
@pytest.mark.parametrize('differential_operator',[grid_filters.curl, @pytest.mark.parametrize('differential_operator',[grid_filters.curl,
@ -141,14 +141,14 @@ class TestGridFilters:
grid_filters.gradient]) grid_filters.gradient])
def test_differential_operator_constant(self,differential_operator): def test_differential_operator_constant(self,differential_operator):
size = np.random.random(3)+1.0 size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
shapes = { shapes = {
grid_filters.curl: [(3,),(3,3)], grid_filters.curl: [(3,),(3,3)],
grid_filters.divergence:[(3,),(3,3)], grid_filters.divergence:[(3,),(3,3)],
grid_filters.gradient: [(1,),(3,)] grid_filters.gradient: [(1,),(3,)]
} }
for shape in shapes[differential_operator]: for shape in shapes[differential_operator]:
field = np.ones(tuple(grid)+shape)*np.random.random()*1.0e5 field = np.ones(tuple(cells)+shape)*np.random.random()*1.0e5
assert np.allclose(differential_operator(size,field),0.0) assert np.allclose(differential_operator(size,field),0.0)
@ -190,15 +190,15 @@ class TestGridFilters:
@pytest.mark.parametrize('field_def,grad_def',grad_test_data) @pytest.mark.parametrize('field_def,grad_def',grad_test_data)
def test_grad(self,field_def,grad_def): def test_grad(self,field_def,grad_def):
size = np.random.random(3)+1.0 size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
nodes = grid_filters.cell_coord0(grid,size) nodes = grid_filters.coordinates0_point(cells,size)
my_locals = locals() # needed for list comprehension my_locals = locals() # needed for list comprehension
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),grid) for f in field_def],axis=-1) field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),cells) for f in field_def],axis=-1)
field = field.reshape(tuple(grid) + ((3,) if len(field_def)==3 else (1,))) field = field.reshape(tuple(cells) + ((3,) if len(field_def)==3 else (1,)))
grad = np.stack([np.broadcast_to(eval(c,globals(),my_locals),grid) for c in grad_def], axis=-1) grad = np.stack([np.broadcast_to(eval(c,globals(),my_locals),cells) for c in grad_def], axis=-1)
grad = grad.reshape(tuple(grid) + ((3,3) if len(grad_def)==9 else (3,))) grad = grad.reshape(tuple(cells) + ((3,3) if len(grad_def)==9 else (3,)))
assert np.allclose(grad,grid_filters.gradient(size,field)) assert np.allclose(grad,grid_filters.gradient(size,field))
@ -250,15 +250,15 @@ class TestGridFilters:
@pytest.mark.parametrize('field_def,curl_def',curl_test_data) @pytest.mark.parametrize('field_def,curl_def',curl_test_data)
def test_curl(self,field_def,curl_def): def test_curl(self,field_def,curl_def):
size = np.random.random(3)+1.0 size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
nodes = grid_filters.cell_coord0(grid,size) nodes = grid_filters.coordinates0_point(cells,size)
my_locals = locals() # needed for list comprehension my_locals = locals() # needed for list comprehension
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),grid) for f in field_def],axis=-1) field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),cells) for f in field_def],axis=-1)
field = field.reshape(tuple(grid) + ((3,3) if len(field_def)==9 else (3,))) field = field.reshape(tuple(cells) + ((3,3) if len(field_def)==9 else (3,)))
curl = np.stack([np.broadcast_to(eval(c,globals(),my_locals),grid) for c in curl_def], axis=-1) curl = np.stack([np.broadcast_to(eval(c,globals(),my_locals),cells) for c in curl_def], axis=-1)
curl = curl.reshape(tuple(grid) + ((3,3) if len(curl_def)==9 else (3,))) curl = curl.reshape(tuple(cells) + ((3,3) if len(curl_def)==9 else (3,)))
assert np.allclose(curl,grid_filters.curl(size,field)) assert np.allclose(curl,grid_filters.curl(size,field))
@ -303,17 +303,17 @@ class TestGridFilters:
def test_div(self,field_def,div_def): def test_div(self,field_def,div_def):
size = np.random.random(3)+1.0 size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3)) cells = np.random.randint(8,32,(3))
nodes = grid_filters.cell_coord0(grid,size) nodes = grid_filters.coordinates0_point(cells,size)
my_locals = locals() # needed for list comprehension my_locals = locals() # needed for list comprehension
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),grid) for f in field_def],axis=-1) field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),cells) for f in field_def],axis=-1)
field = field.reshape(tuple(grid) + ((3,3) if len(field_def)==9 else (3,))) field = field.reshape(tuple(cells) + ((3,3) if len(field_def)==9 else (3,)))
div = np.stack([np.broadcast_to(eval(c,globals(),my_locals),grid) for c in div_def], axis=-1) div = np.stack([np.broadcast_to(eval(c,globals(),my_locals),cells) for c in div_def], axis=-1)
if len(div_def)==3: if len(div_def)==3:
div = div.reshape(tuple(grid) + ((3,))) div = div.reshape(tuple(cells) + ((3,)))
else: else:
div=div.reshape(tuple(grid)) div=div.reshape(tuple(cells))
assert np.allclose(div,grid_filters.divergence(size,field)) assert np.allclose(div,grid_filters.divergence(size,field))

Some files were not shown because too many files have changed in this diff Show More