Merge branch 'development' into new-gmsh-version

This commit is contained in:
Sharan Roongta 2021-01-07 16:38:16 +01:00
commit 793e5d0d2b
153 changed files with 4006 additions and 5438 deletions

View File

@ -60,11 +60,11 @@ variables:
MPI_Intel: "$IMPI2020Intel19_1"
MPI_GNU: "$OMPI4_0GNU10"
# ++++++++++++ PETSc ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
PETSc3_14_0IMPI2020Intel19_1: "Libraries/PETSc/3.14.0/Intel-19.1.2-IntelMPI-2019"
PETSc3_14_0OMPI4_0GNU10: "Libraries/PETSc/3.14.0/GNU-10-OpenMPI-4.0.5"
PETSc3_14_2IMPI2020Intel19_1: "Libraries/PETSc/3.14.2/Intel-19.1.2-IntelMPI-2019"
PETSc3_14_2OMPI4_0GNU10: "Libraries/PETSc/3.14.2/GNU-10-OpenMPI-4.0.5"
# ------------ Defaults ----------------------------------------------
PETSc_Intel: "$PETSc3_14_0IMPI2020Intel19_1"
PETSc_GNU: "$PETSc3_14_0OMPI4_0GNU10"
PETSc_Intel: "$PETSc3_14_2IMPI2020Intel19_1"
PETSc_GNU: "$PETSc3_14_2OMPI4_0GNU10"
# ++++++++++++ commercial FEM ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
MSC2020: "FEM/MSC/2020"
# ------------ Defaults ----------------------------------------------
@ -262,20 +262,6 @@ Pytest_grid:
- master
- release
Thermal:
stage: grid
script: Thermal/test.py
except:
- master
- release
Nonlocal_Damage_DetectChanges:
stage: grid
script: Nonlocal_Damage_DetectChanges/test.py
except:
- master
- release
Plasticity_DetectChanges:
stage: grid
script: Plasticity_DetectChanges/test.py
@ -353,7 +339,6 @@ backupData:
- mkdir $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}
- mv $LOCAL_HOME/performance/time.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $LOCAL_HOME/performance/memory.png $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
- mv $DAMASKROOT/PRIVATE/documenting/DAMASK_* $BACKUP/${CI_PIPELINE_ID}_${CI_COMMIT_SHA}/
only:
- development

1
.gitmodules vendored
View File

@ -2,3 +2,4 @@
path = PRIVATE
url = ../PRIVATE.git
branch = master
shallow = true

View File

@ -1,6 +1,18 @@
########################################################################################
# Compiler options for building DAMASK
cmake_minimum_required (VERSION 3.10.0 FATAL_ERROR)
cmake_minimum_required (VERSION 3.10.0)
include (FindPkgConfig REQUIRED)
# Dummy project to determine compiler names and version
project (Prerequisites LANGUAGES)
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig")
pkg_check_modules (PETSC REQUIRED PETSc>=3.12.0 PETSc<3.15.0)
pkg_get_variable (CMAKE_Fortran_COMPILER PETSc fcompiler)
pkg_get_variable (CMAKE_C_COMPILER PETSc ccompiler)
find_program (CAT_EXECUTABLE NAMES cat)
execute_process (COMMAND ${CAT_EXECUTABLE} ${PROJECT_SOURCE_DIR}/VERSION
RESULT_VARIABLE DAMASK_VERSION_RETURN
OUTPUT_VARIABLE DAMASK_VERSION
OUTPUT_STRIP_TRAILING_WHITESPACE)
#---------------------------------------------------------------------------------------
# Find PETSc from system environment
@ -28,19 +40,10 @@ include ${petsc_conf_rules}
include ${petsc_conf_variables}
INCLUDE_DIRS := \${PETSC_FC_INCLUDES}
LIBRARIES := \${PETSC_WITH_EXTERNAL_LIB}
COMPILERF := \${FC}
COMPILERC := \${CC}
LINKERNAME := \${FLINKER}
includes:
\t@echo \${INCLUDE_DIRS}
extlibs:
\t@echo \${LIBRARIES}
compilerf:
\t@echo \${COMPILERF}
compilerc:
\t@echo \${COMPILERC}
linker:
\t@echo \${LINKERNAME}
")
# CMake will execute each target in the ${petsc_config_makefile}
@ -52,26 +55,10 @@ execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_conf
OUTPUT_VARIABLE petsc_includes
OUTPUT_STRIP_TRAILING_WHITESPACE)
# Find the PETSc external linking directory settings
# required for final linking, must be appended after the executable
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "extlibs"
RESULT_VARIABLE PETSC_EXTERNAL_LIB_RETURN
OUTPUT_VARIABLE petsc_external_lib
OUTPUT_STRIP_TRAILING_WHITESPACE)
# PETSc specified fortran compiler
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "compilerf"
RESULT_VARIABLE PETSC_MPIFC_RETURN
OUTPUT_VARIABLE PETSC_MPIFC
OUTPUT_STRIP_TRAILING_WHITESPACE)
# PETSc specified C compiler
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "compilerc"
RESULT_VARIABLE PETSC_MPICC_RETURN
OUTPUT_VARIABLE PETSC_MPICC
OUTPUT_STRIP_TRAILING_WHITESPACE)
# PETSc specified linker (Fortran compiler + PETSc linking flags)
execute_process (COMMAND ${MAKE_EXECUTABLE} --no-print-directory -f ${petsc_config_makefile} "linker"
RESULT_VARIABLE PETSC_LINKER_RETURN
OUTPUT_VARIABLE PETSC_LINKER
OUTPUT_STRIP_TRAILING_WHITESPACE)
# Remove temporary makefile, no need to keep it anymore.
file (REMOVE_RECURSE ${TEMPDIR})
@ -90,14 +77,6 @@ endforeach (exlib)
message ("Found PETSC_DIR:\n${PETSC_DIR}\n" )
message ("Found PETSC_INCLUDES:\n${PETSC_INCLUDES}\n" )
message ("Found PETSC_EXTERNAL_LIB:\n${PETSC_EXTERNAL_LIB}\n")
message ("Found PETSC_LINKER:\n${PETSC_LINKER}\n" )
message ("Found MPI Fortran Compiler:\n${PETSC_MPIFC}\n" )
message ("Found MPI C Compiler:\n${PETSC_MPICC}\n" )
# set compiler commands to match PETSc (needs to be done before defining the project)
# https://cmake.org/Wiki/CMake_FAQ#How_do_I_use_a_different_compiler.3F
set (CMAKE_Fortran_COMPILER "${PETSC_MPIFC}")
set (CMAKE_C_COMPILER "${PETSC_MPICC}")
#---------------------------------------------------------------------------------------
# Now start to care about DAMASK
@ -105,17 +84,18 @@ set (CMAKE_C_COMPILER "${PETSC_MPICC}")
# DAMASK solver defines project to build
string(TOLOWER ${DAMASK_SOLVER} DAMASK_SOLVER)
if (DAMASK_SOLVER STREQUAL "grid")
project (damask-grid Fortran C)
project (damask-grid HOMEPAGE_URL https://damask.mpie.de LANGUAGES Fortran C)
add_definitions (-DGrid)
message ("Building Grid Solver\n")
elseif (DAMASK_SOLVER STREQUAL "mesh")
project (damask-mesh Fortran C)
project (damask-mesh HOMEPAGE_URL https://damask.mpie.de LANGUAGES Fortran C)
add_definitions (-DMesh)
message ("Building Mesh Solver\n")
else ()
message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined")
endif ()
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake)
add_definitions (-DDAMASKVERSION="${DAMASK_VERSION}")
add_definitions (-DPETSc)
message ("\nBuilding ${CMAKE_PROJECT_NAME}\n")
if (CMAKE_BUILD_TYPE STREQUAL "")
set (CMAKE_BUILD_TYPE "RELEASE")
@ -153,17 +133,8 @@ if (CMAKE_BUILD_TYPE STREQUAL "SYNTAXONLY")
set (BUILDCMD_POST "${BUILDCMD_POST} -fsyntax-only")
endif ()
# Parse DAMASK version from VERSION file
find_program (CAT_EXECUTABLE NAMES cat)
execute_process (COMMAND ${CAT_EXECUTABLE} ${PROJECT_SOURCE_DIR}/VERSION
RESULT_VARIABLE DAMASK_VERSION_RETURN
OUTPUT_VARIABLE DAMASK_V
OUTPUT_STRIP_TRAILING_WHITESPACE)
add_definitions (-DDAMASKVERSION="${DAMASK_V}")
# definition of other macros
add_definitions (-DPETSc)
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake)
if (CMAKE_Fortran_COMPILER_ID STREQUAL "Intel")
include (Compiler-Intel)
elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU")
@ -174,9 +145,8 @@ else ()
message (FATAL_ERROR "Compiler type (CMAKE_Fortran_COMPILER_ID) not recognized")
endif ()
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${BUILDCMD_PRE} ${OPENMP_FLAGS} ${STANDARD_CHECK} ${OPTIMIZATION_FLAGS} ${COMPILE_FLAGS} ${PRECISION_FLAGS}")
set (CMAKE_Fortran_LINK_EXECUTABLE "${BUILDCMD_PRE} ${PETSC_LINKER} ${OPENMP_FLAGS} ${OPTIMIZATION_FLAGS} ${LINKER_FLAGS}")
set (CMAKE_Fortran_LINK_EXECUTABLE "${BUILDCMD_PRE} ${CMAKE_Fortran_COMPILER} ${OPENMP_FLAGS} ${OPTIMIZATION_FLAGS} ${LINKER_FLAGS}")
if (CMAKE_BUILD_TYPE STREQUAL "DEBUG")
set (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE} "${CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE}} ${DEBUG_FLAGS}")

View File

@ -2,27 +2,20 @@ SHELL = /bin/sh
########################################################################################
# Makefile for the installation of DAMASK
########################################################################################
DAMASK_ROOT = $(shell python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
.PHONY: all
all: grid mesh processing
.PHONY: grid
grid: build/grid
@(cd build/grid;make -j${DAMASK_NUM_THREADS} all install;)
grid:
@cmake -B build/grid -DDAMASK_SOLVER=GRID -DCMAKE_INSTALL_PREFIX=${PWD} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP}
@cmake --build build/grid --parallel ${DAMASK_NUM_THRADS}
@cmake --install build/grid
.PHONY: mesh
mesh: build/mesh
@(cd build/mesh; make -j${DAMASK_NUM_THREADS} all install;)
.PHONY: build/grid
build/grid:
@mkdir -p build/grid
@(cd build/grid; cmake -Wno-dev -DDAMASK_SOLVER=GRID -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
.PHONY: build/mesh
build/mesh:
@mkdir -p build/mesh
@(cd build/mesh; cmake -Wno-dev -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
mesh:
@cmake -B build/mesh -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${PWD} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP}
@cmake --build build/mesh --parallel ${DAMASK_NUM_THRADS}
@cmake --install build/mesh
.PHONY: clean
clean:

View File

@ -1 +1 @@
v3.0.0-alpha-920-gccf1a849f
v3.0.0-alpha2-153-gf8dd5df0c

View File

@ -20,7 +20,7 @@ endif ()
# -assume std_mod_proc_name (included in -standard-semantics) causes problems if other modules
# (PETSc, HDF5) are not compiled with this option (https://software.intel.com/en-us/forums/intel-fortran-compiler-for-linux-and-mac-os-x/topic/62172)
set (STANDARD_CHECK "-stand f15 -standard-semantics -assume nostd_mod_proc_name")
set (STANDARD_CHECK "-stand f18 -standard-semantics -assume nostd_mod_proc_name")
set (LINKER_FLAGS "${LINKER_FLAGS} -shared-intel")
# Link against shared Intel libraries instead of static ones

View File

@ -5,8 +5,8 @@ homogenization:
phase:
Aluminum:
lattice: cF
mechanics:
lattice: cF
output: [F, P, F_e, F_p, L_p]
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
plasticity:

View File

@ -33,12 +33,12 @@ for filename in options.filenames:
results = damask.Result(filename)
if not results.structured: continue
coords = damask.grid_filters.cell_coord0(results.grid,results.size,results.origin).reshape(-1,3,order='F')
coords = damask.grid_filters.coordinates0_point(results.cells,results.size,results.origin).reshape(-1,3,order='F')
N_digits = int(np.floor(np.log10(int(results.increments[-1][3:]))))+1
N_digits = 5 # hack to keep test intact
for inc in damask.util.show_progress(results.iterate('increments'),len(results.increments)):
table = damask.Table(np.ones(np.product(results.grid),dtype=int)*int(inc[3:]),{'inc':(1,)})\
table = damask.Table(np.ones(np.product(results.cells),dtype=int)*int(inc[3:]),{'inc':(1,)})\
.add('pos',coords.reshape(-1,3))
results.pick('homogenizations',False)
@ -46,14 +46,14 @@ for filename in options.filenames:
for label in options.con:
x = results.get_dataset_location(label)
if len(x) != 0:
table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.grid.prod(),-1))
table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.cells.prod(),-1))
results.pick('phases',False)
results.pick('homogenizations',True)
for label in options.mat:
x = results.get_dataset_location(label)
if len(x) != 0:
table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.grid.prod(),-1))
table = table.add(label,results.read_dataset(x,0,plain=True).reshape(results.cells.prod(),-1))
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):

View File

@ -71,13 +71,13 @@ for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
F = table.get(options.defgrad).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
nodes = damask.grid_filters.node_coord(size,F)
nodes = damask.grid_filters.coordinates_node(size,F)
if options.shape:
centers = damask.grid_filters.cell_coord(size,F)
centers = damask.grid_filters.coordinates_point(size,F)
shapeMismatch = shapeMismatch(size,F,nodes,centers)
table = table.add('shapeMismatch(({}))'.format(options.defgrad),
shapeMismatch.reshape(-1,1,order='F'),

View File

@ -44,7 +44,7 @@ for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
for label in options.labels:
field = table.get(label)

View File

@ -48,24 +48,24 @@ for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
F = table.get(options.f).reshape(tuple(grid)+(-1,),order='F').reshape(tuple(grid)+(3,3))
if options.nodal:
damask.Table(damask.grid_filters.node_coord0(grid,size).reshape(-1,3,order='F'),
damask.Table(damask.grid_filters.coordinates0_node(grid,size).reshape(-1,3,order='F'),
{'pos':(3,)})\
.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_avg(size,F).reshape(-1,3,order='F'),
damask.grid_filters.displacement_avg_node(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\
.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.node_displacement_fluct(size,F).reshape(-1,3,order='F'),
damask.grid_filters.displacement_fluct_node(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\
.save((sys.stdout if name is None else os.path.splitext(name)[0]+'_nodal.txt'))
else:
table.add('avg({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_avg(size,F).reshape(-1,3,order='F'),
damask.grid_filters.displacement_avg_point(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\
.add('fluct({}).{}'.format(options.f,options.pos),
damask.grid_filters.cell_displacement_fluct(size,F).reshape(-1,3,order='F'),
damask.grid_filters.displacement_fluct_point(size,F).reshape(-1,3,order='F'),
scriptID+' '+' '.join(sys.argv[1:]))\
.save((sys.stdout if name is None else name))

View File

@ -44,7 +44,7 @@ for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
for label in options.labels:
field = table.get(label)

View File

@ -143,7 +143,7 @@ for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
neighborhood = neighborhoods[options.neighborhood]
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')

View File

@ -44,7 +44,7 @@ for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid,size,origin = damask.grid_filters.cell_coord0_gridSizeOrigin(table.get(options.pos))
grid,size,origin = damask.grid_filters.cellsSizeOrigin_coordinates0_point(table.get(options.pos))
for label in options.labels:
field = table.get(label)

View File

@ -65,7 +65,7 @@ if filenames == []: parser.error('no input file specified.')
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.load_DREAM3D(name,options.basegroup,options.pointwise)
geom = damask.Grid.load_DREAM3D(name,options.basegroup,options.pointwise)
damask.util.croak(geom)
geom.save_ASCII(os.path.splitext(name)[0]+'.geom')

View File

@ -133,7 +133,7 @@ for i in range(3,np.max(microstructure)):
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
+ config_header
geom = damask.Geom(microstructure.reshape(grid),
geom = damask.Grid(microstructure.reshape(grid),
size,-size/2,
comments=header)
damask.util.croak(geom)

View File

@ -62,9 +62,9 @@ if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
geom = damask.Grid.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid_original = geom.grid
grid_original = geom.cells
damask.util.croak(geom)
material = np.tile(geom.material,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
grid = np.array(material.shape)
@ -169,7 +169,7 @@ for name in filenames:
# undo any changes involving immutable materials
material = np.where(immutable, material_original,material)
damask.Geom(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]],
damask.Grid(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]],
size = geom.size,
origin = geom.origin,
comments = geom.comments + [scriptID + ' ' + ' '.join(sys.argv[1:])],

View File

@ -196,12 +196,12 @@ if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
geom = damask.Grid.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
material = geom.material.flatten(order='F')
cmds = [\
init(),
mesh(geom.grid,geom.size),
mesh(geom.cells,geom.size),
materials(),
geometry(),
initial_conditions(material),

View File

@ -91,7 +91,7 @@ class myThread (threading.Thread):
perturbedSeedsTable.set('pos',coords).save(perturbedSeedsVFile,legacy=True)
#--- do tesselation with perturbed seed file ------------------------------------------------------
perturbedGeom = damask.Geom.from_Voronoi_tessellation(options.grid,np.ones(3),coords)
perturbedGeom = damask.Grid.from_Voronoi_tessellation(options.grid,np.ones(3),coords)
#--- evaluate current seeds file ------------------------------------------------------------------
@ -210,9 +210,9 @@ baseFile = os.path.splitext(os.path.basename(options.seedFile))[0]
points = np.array(options.grid).prod().astype('float')
# ----------- calculate target distribution and bin edges
targetGeom = damask.Geom.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom')
targetGeom = damask.Grid.load_ASCII(os.path.splitext(os.path.basename(options.target))[0]+'.geom')
nMaterials = len(np.unique(targetGeom.material))
targetVolFrac = np.bincount(targetGeom.material.flatten())/targetGeom.grid.prod().astype(np.float)
targetVolFrac = np.bincount(targetGeom.material.flatten())/targetGeom.cells.prod().astype(np.float)
target = []
for i in range(1,nMaterials+1):
targetHist,targetBins = np.histogram(targetVolFrac,bins=i) #bin boundaries
@ -229,7 +229,7 @@ bestSeedsUpdate = time.time()
# ----------- tessellate initial seed file to get and evaluate geom file
bestSeedsVFile.seek(0)
initialGeom = damask.Geom.from_Voronoi_tessellation(options.grid,np.ones(3),initial_seeds)
initialGeom = damask.Grid.from_Voronoi_tessellation(options.grid,np.ones(3),initial_seeds)
if len(np.unique(targetGeom.material)) != nMaterials:
damask.util.croak('error. Material count mismatch')

View File

@ -52,15 +52,15 @@ options.box = np.array(options.box).reshape(3,2)
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
geom = damask.Grid.load_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
offset =(np.amin(options.box, axis=1)*geom.grid/geom.size).astype(int)
offset =(np.amin(options.box, axis=1)*geom.cells/geom.size).astype(int)
box = np.amax(options.box, axis=1) \
- np.amin(options.box, axis=1)
Nx = int(options.N/np.sqrt(options.N*geom.size[1]*box[1]/geom.size[0]/box[0]))
Ny = int(options.N/np.sqrt(options.N*geom.size[0]*box[0]/geom.size[1]/box[1]))
Nz = int(box[2]*geom.grid[2])
Nz = int(box[2]*geom.cells[2])
damask.util.croak('poking {} x {} x {} in box {} {} {}...'.format(Nx,Ny,Nz,*box))
@ -70,12 +70,12 @@ for name in filenames:
n = 0
for i in range(Nx):
for j in range(Ny):
g[0] = round((i+0.5)*box[0]*geom.grid[0]/Nx-0.5)+offset[0]
g[1] = round((j+0.5)*box[1]*geom.grid[1]/Ny-0.5)+offset[1]
g[0] = round((i+0.5)*box[0]*geom.cells[0]/Nx-0.5)+offset[0]
g[1] = round((j+0.5)*box[1]*geom.cells[1]/Ny-0.5)+offset[1]
for k in range(Nz):
g[2] = k + offset[2]
g %= geom.grid
seeds[n,0:3] = (g+0.5)/geom.grid # normalize coordinates to box
g %= geom.cells
seeds[n,0:3] = (g+0.5)/geom.cells # normalize coordinates to box
seeds[n, 3] = geom.material[g[0],g[1],g[2]]
if options.x: g[0] += 1
if options.y: g[1] += 1
@ -85,7 +85,7 @@ for name in filenames:
comments = geom.comments \
+ [scriptID + ' ' + ' '.join(sys.argv[1:]),
'poking\ta {}\tb {}\tc {}'.format(Nx,Ny,Nz),
'grid\ta {}\tb {}\tc {}'.format(*geom.grid),
'grid\ta {}\tb {}\tc {}'.format(*geom.cells),
'size\tx {}\ty {}\tz {}'.format(*geom.size),
'origin\tx {}\ty {}\tz {}'.format(*geom.origin),
]

View File

@ -32,7 +32,7 @@ from ._vtk import VTK # noqa
from ._colormap import Colormap # noqa
from ._config import Config # noqa
from ._configmaterial import ConfigMaterial # noqa
from ._geom import Geom # noqa
from ._grid import Grid # noqa
from ._result import Result # noqa

View File

@ -57,7 +57,7 @@ class Colormap(mpl.colors.ListedColormap):
ax1.imshow(np.linspace(0,1,self.N).reshape(1,-1),
aspect='auto', cmap=self, interpolation='nearest')
plt.show(block = False)
return self.name
return 'Colormap: '+self.name
@staticmethod
@ -225,7 +225,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_paraview(self,fname=None):
"""
Write colormap to JSON file for Paraview.
Save as JSON file for use in Paraview.
Parameters
----------
@ -260,7 +260,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_ASCII(self,fname=None):
"""
Write colormap to ASCII table.
Save as ASCII file.
Parameters
----------
@ -286,7 +286,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_GOM(self,fname=None):
"""
Write colormap to GOM Aramis compatible format.
Save as ASCII file for use in GOM Aramis.
Parameters
----------
@ -314,7 +314,7 @@ class Colormap(mpl.colors.ListedColormap):
def save_gmsh(self,fname=None):
"""
Write colormap to Gmsh compatible format.
Save as ASCII file for use in gmsh.
Parameters
----------

View File

@ -21,6 +21,9 @@ class NiceDumper(yaml.SafeDumper):
return self.represent_data(dict(data)) if isinstance(data, dict) and type(data) != dict else \
super().represent_data(data)
def ignore_aliases(self, data):
"""No references."""
return True
class Config(dict):
"""YAML-based configuration."""

View File

@ -9,6 +9,16 @@ from . import Orientation
class ConfigMaterial(Config):
"""Material configuration."""
_defaults = {'material': [],
'homogenization': {},
'phase': {}}
def __init__(self,d={}):
"""Initialize object with default dictionary keys."""
super().__init__(d)
for k,v in self._defaults.items():
if k not in self: self[k] = v
def save(self,fname='material.yaml',**kwargs):
"""
Save to yaml file.
@ -75,6 +85,8 @@ class ConfigMaterial(Config):
fraction: 1.0
phase: Steel
homogenization: SX
homogenization: {}
phase: {}
"""
constituents_ = {k:table.get(v) for k,v in constituents.items()}
@ -261,6 +273,8 @@ class ConfigMaterial(Config):
fraction: 1.0
phase: Aluminum
homogenization: SX
homogenization: {}
phase: {}
"""
length = -1
@ -274,6 +288,7 @@ class ConfigMaterial(Config):
c = [{} for _ in range(length)] if constituents is None else \
[{'constituents':u} for u in ConfigMaterial._constituents(**constituents)]
if len(c) == 1: c = [copy.deepcopy(c[0]) for _ in range(length)]
if length != 1 and length != len(c):

View File

@ -7,7 +7,8 @@ import warnings
import numpy as np
import pandas as pd
import h5py
from scipy import ndimage,spatial
from scipy import ndimage, spatial
from vtk.util.numpy_support import vtk_to_numpy as vtk_to_np
from . import environment
from . import VTK
@ -16,21 +17,21 @@ from . import grid_filters
from . import Rotation
class Geom:
class Grid:
"""Geometry definition for grid solvers."""
def __init__(self,material,size,origin=[0.0,0.0,0.0],comments=[]):
"""
New geometry definition from array of materials, size, and origin.
New grid definition from array of materials, size, and origin.
Parameters
----------
material : numpy.ndarray
Material index array (3D).
size : list or numpy.ndarray
Physical size of the geometry in meter.
Physical size of the grid in meter.
origin : list or numpy.ndarray, optional
Physical origin of the geometry in meter.
Physical origin of the grid in meter.
comments : list of str, optional
Comment lines.
@ -42,23 +43,26 @@ class Geom:
def __repr__(self):
"""Basic information on geometry definition."""
"""Basic information on grid definition."""
mat_min = np.nanmin(self.material)
mat_max = np.nanmax(self.material)
mat_N = self.N_materials
return util.srepr([
f'grid a b c: {util.srepr(self.grid, " x ")}',
f'size x y z: {util.srepr(self.size, " x ")}',
f'origin x y z: {util.srepr(self.origin," ")}',
f'# materials: {self.N_materials}',
f'max material: {np.nanmax(self.material)}',
f'cells a b c: {util.srepr(self.cells, " x ")}',
f'size x y z: {util.srepr(self.size, " x ")}',
f'origin x y z: {util.srepr(self.origin," ")}',
f'# materials: {mat_N}' + ('' if mat_min == 0 and mat_max+1 == mat_N else
f' (min: {mat_min}, max: {mat_max})')
])
def __copy__(self):
"""Copy geometry."""
"""Copy grid."""
return copy.deepcopy(self)
def copy(self):
"""Copy geometry."""
"""Copy grid."""
return self.__copy__()
@ -68,14 +72,14 @@ class Geom:
Parameters
----------
other : Geom
Geometry to compare self against.
other : damask.Grid
Grid to compare self against.
"""
message = []
if np.any(other.grid != self.grid):
message.append(util.deemph(f'grid a b c: {util.srepr(other.grid," x ")}'))
message.append(util.emph( f'grid a b c: {util.srepr( self.grid," x ")}'))
if np.any(other.cells != self.cells):
message.append(util.deemph(f'cells a b c: {util.srepr(other.cells," x ")}'))
message.append(util.emph( f'cells a b c: {util.srepr( self.cells," x ")}'))
if not np.allclose(other.size,self.size):
message.append(util.deemph(f'size x y z: {util.srepr(other.size," x ")}'))
@ -104,9 +108,9 @@ class Geom:
@material.setter
def material(self,material):
if len(material.shape) != 3:
raise ValueError(f'Invalid material shape {material.shape}.')
raise ValueError(f'invalid material shape {material.shape}')
elif material.dtype not in np.sctypes['float'] + np.sctypes['int']:
raise TypeError(f'Invalid material data type {material.dtype}.')
raise TypeError(f'invalid material data type {material.dtype}')
else:
self._material = np.copy(material)
@ -117,31 +121,31 @@ class Geom:
@property
def size(self):
"""Physical size of geometry in meter."""
"""Physical size of grid in meter."""
return self._size
@size.setter
def size(self,size):
if len(size) != 3 or any(np.array(size) <= 0):
raise ValueError(f'Invalid size {size}.')
raise ValueError(f'invalid size {size}')
else:
self._size = np.array(size)
@property
def origin(self):
"""Coordinates of geometry origin in meter."""
"""Coordinates of grid origin in meter."""
return self._origin
@origin.setter
def origin(self,origin):
if len(origin) != 3:
raise ValueError(f'Invalid origin {origin}.')
raise ValueError(f'invalid origin {origin}')
else:
self._origin = np.array(origin)
@property
def comments(self):
"""Comments/history of geometry."""
"""Comments, e.g. history of operations."""
return self._comments
@comments.setter
@ -150,35 +154,39 @@ class Geom:
@property
def grid(self):
"""Grid dimension of geometry."""
def cells(self):
"""Number of cells in x,y,z direction."""
return np.asarray(self.material.shape)
@property
def N_materials(self):
"""Number of (unique) material indices within geometry."""
"""Number of (unique) material indices within grid."""
return np.unique(self.material).size
@staticmethod
def load(fname):
"""
Read a VTK rectilinear grid.
Load from VTK rectilinear grid file.
Parameters
----------
fname : str or or pathlib.Path
Geometry file to read.
Valid extension is .vtr, which will be appended if not given.
Grid file to read. Valid extension is .vtr, which will be appended
if not given.
"""
v = VTK.load(fname if str(fname).endswith('.vtr') else str(fname)+'.vtr')
comments = v.get_comments()
grid = np.array(v.vtk_data.GetDimensions())-1
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
cells = np.array(v.vtk_data.GetDimensions())-1
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
return Geom(material = v.get('material').reshape(grid,order='F'),
for i,c in enumerate([v.vtk_data.GetXCoordinates(),v.vtk_data.GetYCoordinates(),v.vtk_data.GetZCoordinates()]):
if not np.allclose(vtk_to_np(c),np.linspace(bbox[0][i],bbox[1][i],cells[i]+1)):
raise ValueError('regular grid spacing violated')
return Grid(material = v.get('material').reshape(cells,order='F'),
size = bbox[1] - bbox[0],
origin = bbox[0],
comments=comments)
@ -187,7 +195,7 @@ class Geom:
@staticmethod
def load_ASCII(fname):
"""
Read a geom file.
Load from geom file.
Storing geometry files in ASCII format is deprecated.
This function will be removed in a future version of DAMASK.
@ -211,7 +219,7 @@ class Geom:
except ValueError:
header_length,keyword = (-1, 'invalid')
if not keyword.startswith('head') or header_length < 3:
raise TypeError('Header length information missing or invalid')
raise TypeError('header length information missing or invalid')
comments = []
content = f.readlines()
@ -219,7 +227,7 @@ class Geom:
items = line.split('#')[0].lower().strip().split()
key = items[0] if items else ''
if key == 'grid':
grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
elif key == 'size':
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
elif key == 'origin':
@ -227,7 +235,7 @@ class Geom:
else:
comments.append(line.strip())
material = np.empty(grid.prod()) # initialize as flat array
material = np.empty(cells.prod()) # initialize as flat array
i = 0
for line in content[header_length:]:
items = line.split('#')[0].split()
@ -242,19 +250,19 @@ class Geom:
material[i:i+len(items)] = items
i += len(items)
if i != grid.prod():
raise TypeError(f'Invalid file: expected {grid.prod()} entries, found {i}')
if i != cells.prod():
raise TypeError(f'invalid file: expected {cells.prod()} entries, found {i}')
if not np.any(np.mod(material,1) != 0.0): # no float present
material = material.astype('int') - (1 if material.min() > 0 else 0)
return Geom(material.reshape(grid,order='F'),size,origin,comments)
return Grid(material.reshape(cells,order='F'),size,origin,comments)
@staticmethod
def load_DREAM3D(fname,base_group,point_data=None,material='FeatureIds'):
"""
Load a DREAM.3D file.
Load from DREAM.3D file.
Parameters
----------
@ -274,21 +282,21 @@ class Geom:
root_dir ='DataContainers'
f = h5py.File(fname, 'r')
g = path.join(root_dir,base_group,'_SIMPL_GEOMETRY')
grid = f[path.join(g,'DIMENSIONS')][()]
size = f[path.join(g,'SPACING')][()] * grid
cells = f[path.join(g,'DIMENSIONS')][()]
size = f[path.join(g,'SPACING')][()] * cells
origin = f[path.join(g,'ORIGIN')][()]
ma = np.arange(grid.prod(),dtype=int) \
ma = np.arange(cells.prod(),dtype=int) \
if point_data is None else \
np.reshape(f[path.join(root_dir,base_group,point_data,material)],grid.prod())
np.reshape(f[path.join(root_dir,base_group,point_data,material)],cells.prod())
return Geom(ma.reshape(grid,order='F'),size,origin,util.execution_stamp('Geom','load_DREAM3D'))
return Grid(ma.reshape(cells,order='F'),size,origin,util.execution_stamp('Grid','load_DREAM3D'))
@staticmethod
def from_table(table,coordinates,labels):
"""
Derive geometry from an ASCII table.
Generate grid from ASCII table.
Parameters
----------
@ -302,15 +310,15 @@ class Geom:
Each unique combintation of values results in one material ID.
"""
grid,size,origin = grid_filters.cell_coord0_gridSizeOrigin(table.get(coordinates))
cells,size,origin = grid_filters.cellsSizeOrigin_coordinates0_point(table.get(coordinates))
labels_ = [labels] if isinstance(labels,str) else labels
unique,unique_inverse = np.unique(np.hstack([table.get(l) for l in labels_]),return_inverse=True,axis=0)
ma = np.arange(grid.prod()) if len(unique) == grid.prod() else \
ma = np.arange(cells.prod()) if len(unique) == cells.prod() else \
np.arange(unique.size)[np.argsort(pd.unique(unique_inverse))][unique_inverse]
return Geom(ma.reshape(grid,order='F'),size,origin,util.execution_stamp('Geom','from_table'))
return Grid(ma.reshape(cells,order='F'),size,origin,util.execution_stamp('Grid','from_table'))
@staticmethod
@ -318,16 +326,16 @@ class Geom:
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
@staticmethod
def from_Laguerre_tessellation(grid,size,seeds,weights,material=None,periodic=True):
def from_Laguerre_tessellation(cells,size,seeds,weights,material=None,periodic=True):
"""
Generate geometry from Laguerre tessellation.
Generate grid from Laguerre tessellation.
Parameters
----------
grid : int numpy.ndarray of shape (3)
Number of grid points in x,y,z direction.
cells : int numpy.ndarray of shape (3)
Number of cells in x,y,z direction.
size : list or numpy.ndarray of shape (3)
Physical size of the geometry in meter.
Physical size of the grid in meter.
seeds : numpy.ndarray of shape (:,3)
Position of the seed points in meter. All points need to lay within the box.
weights : numpy.ndarray of shape (seeds.shape[0])
@ -336,7 +344,7 @@ class Geom:
Material ID of the seeds.
Defaults to None, in which case materials are consecutively numbered.
periodic : Boolean, optional
Perform a periodic tessellation. Defaults to True.
Assume grid to be periodic. Defaults to True.
"""
if periodic:
@ -344,57 +352,57 @@ class Geom:
seeds_p = np.vstack((seeds -np.array([size[0],0.,0.]),seeds, seeds +np.array([size[0],0.,0.])))
seeds_p = np.vstack((seeds_p-np.array([0.,size[1],0.]),seeds_p,seeds_p+np.array([0.,size[1],0.])))
seeds_p = np.vstack((seeds_p-np.array([0.,0.,size[2]]),seeds_p,seeds_p+np.array([0.,0.,size[2]])))
coords = grid_filters.cell_coord0(grid*3,size*3,-size).reshape(-1,3)
coords = grid_filters.coordinates0_point(cells*3,size*3,-size).reshape(-1,3)
else:
weights_p = weights
seeds_p = seeds
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3)
coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3)
pool = mp.Pool(processes = int(environment.options['DAMASK_NUM_THREADS']))
result = pool.map_async(partial(Geom._find_closest_seed,seeds_p,weights_p), [coord for coord in coords])
result = pool.map_async(partial(Grid._find_closest_seed,seeds_p,weights_p), [coord for coord in coords])
pool.close()
pool.join()
material_ = np.array(result.get())
if periodic:
material_ = material_.reshape(grid*3)
material_ = material_[grid[0]:grid[0]*2,grid[1]:grid[1]*2,grid[2]:grid[2]*2]%seeds.shape[0]
material_ = material_.reshape(cells*3)
material_ = material_[cells[0]:cells[0]*2,cells[1]:cells[1]*2,cells[2]:cells[2]*2]%seeds.shape[0]
else:
material_ = material_.reshape(grid)
material_ = material_.reshape(cells)
return Geom(material = material_ if material is None else material[material_],
return Grid(material = material_ if material is None else material[material_],
size = size,
comments = util.execution_stamp('Geom','from_Laguerre_tessellation'),
comments = util.execution_stamp('Grid','from_Laguerre_tessellation'),
)
@staticmethod
def from_Voronoi_tessellation(grid,size,seeds,material=None,periodic=True):
def from_Voronoi_tessellation(cells,size,seeds,material=None,periodic=True):
"""
Generate geometry from Voronoi tessellation.
Generate grid from Voronoi tessellation.
Parameters
----------
grid : int numpy.ndarray of shape (3)
Number of grid points in x,y,z direction.
cells : int numpy.ndarray of shape (3)
Number of cells in x,y,z direction.
size : list or numpy.ndarray of shape (3)
Physical size of the geometry in meter.
Physical size of the grid in meter.
seeds : numpy.ndarray of shape (:,3)
Position of the seed points in meter. All points need to lay within the box.
material : numpy.ndarray of shape (seeds.shape[0]), optional
Material ID of the seeds.
Defaults to None, in which case materials are consecutively numbered.
periodic : Boolean, optional
Perform a periodic tessellation. Defaults to True.
Assume grid to be periodic. Defaults to True.
"""
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3)
coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3)
KDTree = spatial.cKDTree(seeds,boxsize=size) if periodic else spatial.cKDTree(seeds)
devNull,material_ = KDTree.query(coords)
return Geom(material = (material_ if material is None else material[material_]).reshape(grid),
return Grid(material = (material_ if material is None else material[material_]).reshape(cells),
size = size,
comments = util.execution_stamp('Geom','from_Voronoi_tessellation'),
comments = util.execution_stamp('Grid','from_Voronoi_tessellation'),
)
@ -441,16 +449,16 @@ class Geom:
@staticmethod
def from_minimal_surface(grid,size,surface,threshold=0.0,periods=1,materials=(0,1)):
def from_minimal_surface(cells,size,surface,threshold=0.0,periods=1,materials=(0,1)):
"""
Generate geometry from definition of triply periodic minimal surface.
Generate grid from definition of triply periodic minimal surface.
Parameters
----------
grid : int numpy.ndarray of shape (3)
Number of grid points in x,y,z direction.
cells : int numpy.ndarray of shape (3)
Number of cells in x,y,z direction.
size : list or numpy.ndarray of shape (3)
Physical size of the geometry in meter.
Physical size of the grid in meter.
surface : str
Type of the minimal surface. See notes for details.
threshold : float, optional.
@ -493,19 +501,19 @@ class Geom:
https://doi.org/10.1016/j.simpa.2020.100026
"""
x,y,z = np.meshgrid(periods*2.0*np.pi*(np.arange(grid[0])+0.5)/grid[0],
periods*2.0*np.pi*(np.arange(grid[1])+0.5)/grid[1],
periods*2.0*np.pi*(np.arange(grid[2])+0.5)/grid[2],
x,y,z = np.meshgrid(periods*2.0*np.pi*(np.arange(cells[0])+0.5)/cells[0],
periods*2.0*np.pi*(np.arange(cells[1])+0.5)/cells[1],
periods*2.0*np.pi*(np.arange(cells[2])+0.5)/cells[2],
indexing='ij',sparse=True)
return Geom(material = np.where(threshold < Geom._minimal_surface[surface](x,y,z),materials[1],materials[0]),
return Grid(material = np.where(threshold < Grid._minimal_surface[surface](x,y,z),materials[1],materials[0]),
size = size,
comments = util.execution_stamp('Geom','from_minimal_surface'),
comments = util.execution_stamp('Grid','from_minimal_surface'),
)
def save(self,fname,compress=True):
"""
Store as VTK rectilinear grid.
Save as VTK rectilinear grid file.
Parameters
----------
@ -515,7 +523,7 @@ class Geom:
Compress with zlib algorithm. Defaults to True.
"""
v = VTK.from_rectilinear_grid(self.grid,self.size,self.origin)
v = VTK.from_rectilinear_grid(self.cells,self.size,self.origin)
v.add(self.material.flatten(order='F'),'material')
v.add_comments(self.comments)
@ -524,7 +532,7 @@ class Geom:
def save_ASCII(self,fname):
"""
Write a geom file.
Save as geom file.
Storing geometry files in ASCII format is deprecated.
This function will be removed in a future version of DAMASK.
@ -539,7 +547,7 @@ class Geom:
"""
warnings.warn('Support for ASCII-based geom format will be removed in DAMASK 3.1.0', DeprecationWarning)
header = [f'{len(self.comments)+4} header'] + self.comments \
+ ['grid a {} b {} c {}'.format(*self.grid),
+ ['grid a {} b {} c {}'.format(*self.cells),
'size x {} y {} z {}'.format(*self.size),
'origin x {} y {} z {}'.format(*self.origin),
'homogenization 1',
@ -548,13 +556,13 @@ class Geom:
format_string = '%g' if self.material.dtype in np.sctypes['float'] else \
'%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.material)))))
np.savetxt(fname,
self.material.reshape([self.grid[0],np.prod(self.grid[1:])],order='F').T,
self.material.reshape([self.cells[0],np.prod(self.cells[1:])],order='F').T,
header='\n'.join(header), fmt=format_string, comments='')
def show(self):
"""Show on screen."""
VTK.from_rectilinear_grid(self.grid,self.size,self.origin).show()
VTK.from_rectilinear_grid(self.cells,self.size,self.origin).show()
def add_primitive(self,dimension,center,exponent,
@ -566,11 +574,10 @@ class Geom:
----------
dimension : int or float numpy.ndarray of shape (3)
Dimension (diameter/side length) of the primitive. If given as
integers, grid point locations (cell centers) are addressed.
integers, cell centers are addressed.
If given as floats, coordinates are addressed.
center : int or float numpy.ndarray of shape (3)
Center of the primitive. If given as integers, grid point
coordinates (cell centers) are addressed.
Center of the primitive. If given as integers, cell centers are addressed.
If given as floats, coordinates in space are addressed.
exponent : numpy.ndarray of shape (3) or float
Exponents for the three axes.
@ -584,44 +591,44 @@ class Geom:
Retain original materials within primitive and fill outside.
Defaults to False.
periodic : Boolean, optional
Repeat primitive over boundaries. Defaults to True.
Assume grid to be periodic. Defaults to True.
"""
# radius and center
r = np.array(dimension)/2.0*self.size/self.grid if np.array(dimension).dtype in np.sctypes['int'] else \
r = np.array(dimension)/2.0*self.size/self.cells if np.array(dimension).dtype in np.sctypes['int'] else \
np.array(dimension)/2.0
c = (np.array(center) + .5)*self.size/self.grid if np.array(center).dtype in np.sctypes['int'] else \
c = (np.array(center) + .5)*self.size/self.cells if np.array(center).dtype in np.sctypes['int'] else \
(np.array(center) - self.origin)
coords = grid_filters.cell_coord0(self.grid,self.size,
-(0.5*(self.size + (self.size/self.grid
coords = grid_filters.coordinates0_point(self.cells,self.size,
-(0.5*(self.size + (self.size/self.cells
if np.array(center).dtype in np.sctypes['int'] else
0)) if periodic else c))
coords_rot = R.broadcast_to(tuple(self.grid))@coords
coords_rot = R.broadcast_to(tuple(self.cells))@coords
with np.errstate(all='ignore'):
mask = np.sum(np.power(coords_rot/r,2.0**np.array(exponent)),axis=-1) > 1.0
if periodic: # translate back to center
mask = np.roll(mask,((c/self.size-0.5)*self.grid).round().astype(int),(0,1,2))
mask = np.roll(mask,((c/self.size-0.5)*self.cells).round().astype(int),(0,1,2))
return Geom(material = np.where(np.logical_not(mask) if inverse else mask,
return Grid(material = np.where(np.logical_not(mask) if inverse else mask,
self.material,
np.nanmax(self.material)+1 if fill is None else fill),
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','add_primitive')],
comments = self.comments+[util.execution_stamp('Grid','add_primitive')],
)
def mirror(self,directions,reflect=False):
"""
Mirror geometry along given directions.
Mirror grid along given directions.
Parameters
----------
directions : iterable containing str
Direction(s) along which the geometry is mirrored.
Direction(s) along which the grid is mirrored.
Valid entries are 'x', 'y', 'z'.
reflect : bool, optional
Reflect (include) outermost layers. Defaults to False.
@ -629,7 +636,7 @@ class Geom:
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
limits = [None,None] if reflect else [-2,0]
mat = self.material.copy()
@ -641,52 +648,52 @@ class Geom:
if 'z' in directions:
mat = np.concatenate([mat,mat[:,:,limits[0]:limits[1]:-1]],2)
return Geom(material = mat,
size = self.size/self.grid*np.asarray(mat.shape),
return Grid(material = mat,
size = self.size/self.cells*np.asarray(mat.shape),
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','mirror')],
comments = self.comments+[util.execution_stamp('Grid','mirror')],
)
def flip(self,directions):
"""
Flip geometry along given directions.
Flip grid along given directions.
Parameters
----------
directions : iterable containing str
Direction(s) along which the geometry is flipped.
Direction(s) along which the grid is flipped.
Valid entries are 'x', 'y', 'z'.
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
mat = np.flip(self.material, (valid.index(d) for d in directions if d in valid))
return Geom(material = mat,
return Grid(material = mat,
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','flip')],
comments = self.comments+[util.execution_stamp('Grid','flip')],
)
def scale(self,grid,periodic=True):
def scale(self,cells,periodic=True):
"""
Scale geometry to new grid.
Scale grid to new cells.
Parameters
----------
grid : numpy.ndarray of shape (3)
Number of grid points in x,y,z direction.
cells : numpy.ndarray of shape (3)
Number of cells in x,y,z direction.
periodic : Boolean, optional
Assume geometry to be periodic. Defaults to True.
Assume grid to be periodic. Defaults to True.
"""
return Geom(material = ndimage.interpolation.zoom(
return Grid(material = ndimage.interpolation.zoom(
self.material,
grid/self.grid,
cells/self.cells,
output=self.material.dtype,
order=0,
mode=('wrap' if periodic else 'nearest'),
@ -694,13 +701,13 @@ class Geom:
),
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','scale')],
comments = self.comments+[util.execution_stamp('Grid','scale')],
)
def clean(self,stencil=3,selection=None,periodic=True):
"""
Smooth geometry by selecting most frequent material index within given stencil at each location.
Smooth grid by selecting most frequent material index within given stencil at each location.
Parameters
----------
@ -709,7 +716,7 @@ class Geom:
selection : list, optional
Field values that can be altered. Defaults to all.
periodic : Boolean, optional
Assume geometry to be periodic. Defaults to True.
Assume grid to be periodic. Defaults to True.
"""
def mostFrequent(arr,selection=None):
@ -720,7 +727,7 @@ class Geom:
else:
return me
return Geom(material = ndimage.filters.generic_filter(
return Grid(material = ndimage.filters.generic_filter(
self.material,
mostFrequent,
size=(stencil if selection is None else stencil//2*2+1,)*3,
@ -729,7 +736,7 @@ class Geom:
).astype(self.material.dtype),
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','clean')],
comments = self.comments+[util.execution_stamp('Grid','clean')],
)
@ -737,21 +744,21 @@ class Geom:
"""Renumber sorted material indices as 0,...,N-1."""
_,renumbered = np.unique(self.material,return_inverse=True)
return Geom(material = renumbered.reshape(self.grid),
return Grid(material = renumbered.reshape(self.cells),
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','renumber')],
comments = self.comments+[util.execution_stamp('Grid','renumber')],
)
def rotate(self,R,fill=None):
"""
Rotate geometry (pad if required).
Rotate grid (pad if required).
Parameters
----------
R : damask.Rotation
Rotation to apply to the geometry.
Rotation to apply to the grid.
fill : int or float, optional
Material index to fill the corners. Defaults to material.max() + 1.
@ -773,25 +780,25 @@ class Geom:
else:
material_in = material_out
origin = self.origin-(np.asarray(material_in.shape)-self.grid)*.5 * self.size/self.grid
origin = self.origin-(np.asarray(material_in.shape)-self.cells)*.5 * self.size/self.cells
return Geom(material = material_in,
size = self.size/self.grid*np.asarray(material_in.shape),
return Grid(material = material_in,
size = self.size/self.cells*np.asarray(material_in.shape),
origin = origin,
comments = self.comments+[util.execution_stamp('Geom','rotate')],
comments = self.comments+[util.execution_stamp('Grid','rotate')],
)
def canvas(self,grid=None,offset=None,fill=None):
def canvas(self,cells=None,offset=None,fill=None):
"""
Crop or enlarge/pad geometry.
Crop or enlarge/pad grid.
Parameters
----------
grid : numpy.ndarray of shape (3)
Number of grid points in x,y,z direction.
cells : numpy.ndarray of shape (3)
Number of cells x,y,z direction.
offset : numpy.ndarray of shape (3)
Offset (measured in grid points) from old to new geometry [0,0,0].
Offset (measured in cells) from old to new grid [0,0,0].
fill : int or float, optional
Material index to fill the background. Defaults to material.max() + 1.
@ -800,19 +807,19 @@ class Geom:
if fill is None: fill = np.nanmax(self.material) + 1
dtype = float if int(fill) != fill or self.material.dtype in np.sctypes['float'] else int
canvas = np.full(self.grid if grid is None else grid,fill,dtype)
canvas = np.full(self.cells if cells is None else cells,fill,dtype)
LL = np.clip( offset, 0,np.minimum(self.grid, grid+offset))
UR = np.clip( offset+grid, 0,np.minimum(self.grid, grid+offset))
ll = np.clip(-offset, 0,np.minimum( grid,self.grid-offset))
ur = np.clip(-offset+self.grid,0,np.minimum( grid,self.grid-offset))
LL = np.clip( offset, 0,np.minimum(self.cells, cells+offset))
UR = np.clip( offset+cells, 0,np.minimum(self.cells, cells+offset))
ll = np.clip(-offset, 0,np.minimum( cells,self.cells-offset))
ur = np.clip(-offset+self.cells,0,np.minimum( cells,self.cells-offset))
canvas[ll[0]:ur[0],ll[1]:ur[1],ll[2]:ur[2]] = self.material[LL[0]:UR[0],LL[1]:UR[1],LL[2]:UR[2]]
return Geom(material = canvas,
size = self.size/self.grid*np.asarray(canvas.shape),
origin = self.origin+offset*self.size/self.grid,
comments = self.comments+[util.execution_stamp('Geom','canvas')],
return Grid(material = canvas,
size = self.size/self.cells*np.asarray(canvas.shape),
origin = self.origin+offset*self.size/self.cells,
comments = self.comments+[util.execution_stamp('Grid','canvas')],
)
@ -834,10 +841,10 @@ class Geom:
mp = np.vectorize(mp)
mapper = dict(zip(from_material,to_material))
return Geom(material = mp(self.material,mapper).reshape(self.grid),
return Grid(material = mp(self.material,mapper).reshape(self.cells),
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','substitute')],
comments = self.comments+[util.execution_stamp('Grid','substitute')],
)
@ -848,10 +855,10 @@ class Geom:
sort_idx = np.argsort(from_ma)
ma = np.unique(a)[sort_idx][np.searchsorted(from_ma,a,sorter = sort_idx)]
return Geom(material = ma.reshape(self.grid,order='F'),
return Grid(material = ma.reshape(self.cells,order='F'),
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','sort')],
comments = self.comments+[util.execution_stamp('Grid','sort')],
)
@ -861,7 +868,7 @@ class Geom:
Different from themselves (or listed as triggers) within a given (cubic) vicinity,
i.e. within the region close to a grain/phase boundary.
ToDo: use include/exclude as in seeds.from_geom
ToDo: use include/exclude as in seeds.from_grid
Parameters
----------
@ -875,7 +882,7 @@ class Geom:
List of material indices that trigger a change.
Defaults to [], meaning that any different neighbor triggers a change.
periodic : Boolean, optional
Assume geometry to be periodic. Defaults to True.
Assume grid to be periodic. Defaults to True.
"""
def tainted_neighborhood(stencil,trigger):
@ -892,10 +899,10 @@ class Geom:
mode='wrap' if periodic else 'nearest',
extra_keywords={'trigger':trigger})
return Geom(material = np.where(mask, self.material + offset_,self.material),
return Grid(material = np.where(mask, self.material + offset_,self.material),
size = self.size,
origin = self.origin,
comments = self.comments+[util.execution_stamp('Geom','vicinity_offset')],
comments = self.comments+[util.execution_stamp('Grid','vicinity_offset')],
)
@ -905,20 +912,20 @@ class Geom:
Parameters
----------
periodic : bool, optional
Show boundaries across periodicity. Defaults to True.
periodic : Boolean, optional
Assume grid to be periodic. Defaults to True.
directions : iterable containing str, optional
Direction(s) along which the geometry is mirrored.
Direction(s) along which the boundaries are determined.
Valid entries are 'x', 'y', 'z'. Defaults to 'xyz'.
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
o = [[0, self.grid[0]+1, np.prod(self.grid[:2]+1)+self.grid[0]+1, np.prod(self.grid[:2]+1)],
[0, np.prod(self.grid[:2]+1), np.prod(self.grid[:2]+1)+1, 1],
[0, 1, self.grid[0]+1+1, self.grid[0]+1]] # offset for connectivity
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
[0, np.prod(self.cells[:2]+1), np.prod(self.cells[:2]+1)+1, 1],
[0, 1, self.cells[0]+1+1, self.cells[0]+1]] # offset for connectivity
connectivity = []
for i,d in enumerate(['x','y','z']):
@ -933,5 +940,5 @@ class Geom:
base_nodes = np.argwhere(mask.flatten(order='F')).reshape(-1,1)
connectivity.append(np.block([base_nodes + o[i][k] for k in range(4)]))
coords = grid_filters.node_coord0(self.grid,self.size,self.origin).reshape(-1,3,order='F')
coords = grid_filters.coordinates0_node(self.cells,self.size,self.origin).reshape(-1,3,order='F')
return VTK.from_unstructured_grid(coords,np.vstack(connectivity),'QUAD')

View File

@ -226,9 +226,9 @@ class Orientation(Rotation):
"""
return super().__eq__(other) \
and self.family == other.family \
and self.lattice == other.lattice \
and self.parameters == other.parameters
and hasattr(other, 'family') and self.family == other.family \
and hasattr(other, 'lattice') and self.lattice == other.lattice \
and hasattr(other, 'parameters') and self.parameters == other.parameters
def __matmul__(self,other):

View File

@ -46,13 +46,17 @@ class Result:
self.version_major = f.attrs['DADF5_version_major']
self.version_minor = f.attrs['DADF5_version_minor']
if self.version_major != 0 or not 7 <= self.version_minor <= 9:
if self.version_major != 0 or not 7 <= self.version_minor <= 11:
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
self.structured = 'grid' in f['geometry'].attrs.keys()
self.structured = 'grid' in f['geometry'].attrs.keys() or \
'cells' in f['geometry'].attrs.keys()
if self.structured:
self.grid = f['geometry'].attrs['grid']
try:
self.cells = f['geometry'].attrs['cells']
except KeyError:
self.cells = f['geometry'].attrs['grid']
self.size = f['geometry'].attrs['size']
self.origin = f['geometry'].attrs['origin']
@ -558,19 +562,19 @@ class Result:
return dataset
@property
def cell_coordinates(self):
def coordinates0_point(self):
"""Return initial coordinates of the cell centers."""
if self.structured:
return grid_filters.cell_coord0(self.grid,self.size,self.origin).reshape(-1,3,order='F')
return grid_filters.coordinates0_point(self.cells,self.size,self.origin).reshape(-1,3,order='F')
else:
with h5py.File(self.fname,'r') as f:
return f['geometry/x_c'][()]
@property
def node_coordinates(self):
def coordinates0_node(self):
"""Return initial coordinates of the cell centers."""
if self.structured:
return grid_filters.node_coord0(self.grid,self.size,self.origin).reshape(-1,3,order='F')
return grid_filters.coordinates0_node(self.cells,self.size,self.origin).reshape(-1,3,order='F')
else:
with h5py.File(self.fname,'r') as f:
return f['geometry/x_n'][()]
@ -780,13 +784,16 @@ class Result:
@staticmethod
def _add_IPF_color(q,l):
def _add_IPF_color(l,q):
m = util.scale_to_coprime(np.array(l))
try:
lattice = {'fcc':'cF','bcc':'cI','hex':'hP'}[q['meta']['Lattice']]
except KeyError:
lattice = q['meta']['Lattice']
o = Orientation(rotation = (rfn.structured_to_unstructured(q['data'])),lattice=lattice)
try:
o = Orientation(rotation = (rfn.structured_to_unstructured(q['data'])),lattice=lattice)
except ValueError:
o = Orientation(rotation = q['data'],lattice=lattice)
return {
'data': np.uint8(o.IPF_color(l)*255),
@ -798,16 +805,17 @@ class Result:
'Creator': 'add_IPF_color'
}
}
def add_IPF_color(self,q,l):
def add_IPF_color(self,l,q='O'):
"""
Add RGB color tuple of inverse pole figure (IPF) color.
Parameters
----------
q : str
Label of the dataset containing the crystallographic orientation as quaternions.
l : numpy.array of shape (3)
Lab frame direction for inverse pole figure.
q : str
Label of the dataset containing the crystallographic orientation as quaternions.
Defaults to 'O'.
"""
self._add_generic_pointwise(self._add_IPF_color,{'q':q},{'l':l})
@ -1125,6 +1133,7 @@ class Result:
Arguments parsed to func.
"""
chunk_size = 1024**2//8
num_threads = damask.environment.options['DAMASK_NUM_THREADS']
pool = mp.Pool(int(num_threads) if num_threads is not None else None)
lock = mp.Manager().Lock()
@ -1148,7 +1157,15 @@ class Result:
dataset.attrs['Overwritten'] = 'Yes' if h5py3 else \
'Yes'.encode()
else:
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
if result[1]['data'].size >= chunk_size*2:
shape = result[1]['data'].shape
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'],
maxshape=shape, chunks=chunks,
compression='gzip', compression_opts=6,
shuffle=True,fletcher32=True)
else:
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
now = datetime.datetime.now().astimezone()
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
@ -1218,7 +1235,7 @@ class Result:
topology=ET.SubElement(grid, 'Topology')
topology.attrib={'TopologyType': '3DCoRectMesh',
'Dimensions': '{} {} {}'.format(*self.grid+1)}
'Dimensions': '{} {} {}'.format(*self.cells+1)}
geometry=ET.SubElement(grid, 'Geometry')
geometry.attrib={'GeometryType':'Origin_DxDyDz'}
@ -1233,7 +1250,7 @@ class Result:
delta.attrib={'Format': 'XML',
'NumberType': 'Float',
'Dimensions': '3'}
delta.text="{} {} {}".format(*(self.size/self.grid))
delta.text="{} {} {}".format(*(self.size/self.cells))
with h5py.File(self.fname,'r') as f:
@ -1244,7 +1261,7 @@ class Result:
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
data_items[-1].attrib={'Format': 'HDF',
'Precision': '8',
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
'Dimensions': '{} {} {} 3'.format(*(self.cells+1))}
data_items[-1].text=f'{os.path.split(self.fname)[1]}:/{inc}/geometry/u_n'
for o,p in zip(['phases','homogenizations'],['out_type_ph','out_type_ho']):
@ -1267,8 +1284,8 @@ class Result:
data_items[-1].attrib={'Format': 'HDF',
'NumberType': number_type_map(dtype),
'Precision': f'{dtype.itemsize}',
'Dimensions': '{} {} {} {}'.format(*self.grid,1 if shape == () else
np.prod(shape))}
'Dimensions': '{} {} {} {}'.format(*self.cells,1 if shape == () else
np.prod(shape))}
data_items[-1].text=f'{os.path.split(self.fname)[1]}:{name}'
with open(self.fname.with_suffix('.xdmf').name,'w') as f:
@ -1291,7 +1308,7 @@ class Result:
if mode.lower()=='cell':
if self.structured:
v = VTK.from_rectilinear_grid(self.grid,self.size,self.origin)
v = VTK.from_rectilinear_grid(self.cells,self.size,self.origin)
else:
with h5py.File(self.fname,'r') as f:
v = VTK.from_unstructured_grid(f['/geometry/x_n'][()],
@ -1299,7 +1316,7 @@ class Result:
f['/geometry/T_c'].attrs['VTK_TYPE'] if h5py3 else \
f['/geometry/T_c'].attrs['VTK_TYPE'].decode())
elif mode.lower()=='point':
v = VTK.from_poly_data(self.cell_coordinates)
v = VTK.from_poly_data(self.coordinates0_point)
N_digits = int(np.floor(np.log10(max(1,int(self.increments[-1][3:])))))+1

View File

@ -144,6 +144,11 @@ class Rotation:
return self.copy(rotation=Rotation(np.block([np.cos(pwr*phi),np.sin(pwr*phi)*p]))._standardize())
def __mul__(self,other):
"""Standard multiplication is not implemented."""
raise NotImplementedError('Use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
def __matmul__(self,other):
"""
Rotation of vector, second or fourth order tensor, or rotation object.
@ -199,8 +204,16 @@ class Rotation:
def append(self,other):
"""Extend rotation array along first dimension with other array."""
return self.copy(rotation=np.vstack((self.quaternion,other.quaternion)))
"""
Extend rotation array along first dimension with other array(s).
Parameters
----------
other : Rotation or list of Rotations.
"""
return self.copy(rotation=np.vstack(tuple(map(lambda x:x.quaternion,
[self]+other if isinstance(other,list) else [self,other]))))
def flatten(self,order = 'C'):
@ -258,7 +271,7 @@ class Rotation:
"""Intermediate representation supporting quaternion averaging."""
return np.einsum('...i,...j',quat,quat)
if not weights:
if weights is None:
weights = np.ones(self.shape,dtype=float)
eig, vec = np.linalg.eig(np.sum(_M(self.quaternion) * weights[...,np.newaxis,np.newaxis],axis=-3) \
@ -763,7 +776,7 @@ class Rotation:
def _dg(eu,deg):
"""Return infinitesimal Euler space volume of bin(s)."""
phi_sorted = eu[np.lexsort((eu[:,0],eu[:,1],eu[:,2]))]
steps,size,_ = grid_filters.cell_coord0_gridSizeOrigin(phi_sorted)
steps,size,_ = grid_filters.cellsSizeOrigin_coordinates0_point(phi_sorted)
delta = np.radians(size/steps) if deg else size/steps
return delta[0]*2.0*np.sin(delta[1]/2.0)*delta[2] / 8.0 / np.pi**2 * np.sin(np.radians(eu[:,1]) if deg else eu[:,1])

View File

@ -33,6 +33,10 @@ class Table:
"""Brief overview."""
return '\n'.join(['# '+c for c in self.comments])+'\n'+self.data.__repr__()
def __getitem__(self,item):
"""Return slice according to item."""
return self.__class__(data=self.data[item],shapes=self.shapes,comments=self.comments)
def __len__(self):
"""Number of rows."""
return len(self.data)
@ -73,7 +77,7 @@ class Table:
@staticmethod
def load(fname):
"""
Load ASCII table file.
Load from ASCII table file.
In legacy style, the first line indicates the number of
subsequent header lines as "N header", with the last header line being
@ -131,7 +135,7 @@ class Table:
@staticmethod
def load_ang(fname):
"""
Load ang file.
Load from ang file.
A valid TSL ang file needs to contains the following columns:
* Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.

View File

@ -76,7 +76,8 @@ class VTK:
nodes : numpy.ndarray of shape (:,3)
Spatial position of the nodes.
connectivity : numpy.ndarray of np.dtype = int
Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell.
Cell connectivity (0-based), first dimension determines #Cells,
second dimension determines #Nodes/Cell.
cell_type : str
Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON.
@ -91,7 +92,9 @@ class VTK:
vtk_data = vtk.vtkUnstructuredGrid()
vtk_data.SetPoints(vtk_nodes)
vtk_data.SetCells(eval(f'vtk.VTK_{cell_type.split("_",1)[-1].upper()}'),cells)
cell_types = {'TRIANGLE':vtk.VTK_TRIANGLE, 'QUAD':vtk.VTK_QUAD,
'TETRA' :vtk.VTK_TETRA, 'HEXAHEDRON':vtk.VTK_HEXAHEDRON}
vtk_data.SetCells(cell_types[cell_type.split("_",1)[-1].upper()],cells)
return VTK(vtk_data)
@ -128,7 +131,7 @@ class VTK:
@staticmethod
def load(fname,dataset_type=None):
"""
Create VTK from file.
Load from VTK file.
Parameters
----------
@ -181,7 +184,7 @@ class VTK:
writer.Write()
def save(self,fname,parallel=True,compress=True):
"""
Write to file.
Save as VTK file.
Parameters
----------

View File

@ -4,38 +4,38 @@ Filters for operations on regular grids.
Notes
-----
The grids are defined as (x,y,z,...) where x is fastest and z is slowest.
This convention is consistent with the geom file format.
This convention is consistent with the layout in grid vtr files.
When converting to/from a plain list (e.g. storage in ASCII table),
the following operations are required for tensorial data:
D3 = D1.reshape(grid+(-1,),order='F').reshape(grid+(3,3))
D1 = D3.reshape(grid+(-1,)).reshape(-1,9,order='F')
D3 = D1.reshape(cells+(-1,),order='F').reshape(cells+(3,3))
D1 = D3.reshape(cells+(-1,)).reshape(-1,9,order='F')
"""
from scipy import spatial as _spatial
import numpy as _np
def _ks(size,grid,first_order=False):
def _ks(size,cells,first_order=False):
"""
Get wave numbers operator.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
grid : numpy.ndarray of shape (3)
number of grid points.
Physical size of the periodic field.
cells : numpy.ndarray of shape (3)
Number of cells.
first_order : bool, optional
correction for first order derivatives, defaults to False.
Correction for first order derivatives, defaults to False.
"""
k_sk = _np.where(_np.arange(grid[0])>grid[0]//2,_np.arange(grid[0])-grid[0],_np.arange(grid[0]))/size[0]
if grid[0]%2 == 0 and first_order: k_sk[grid[0]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_sk = _np.where(_np.arange(cells[0])>cells[0]//2,_np.arange(cells[0])-cells[0],_np.arange(cells[0]))/size[0]
if cells[0]%2 == 0 and first_order: k_sk[cells[0]//2] = 0 # Nyquist freq=0 for even cells (Johnson, MIT, 2011)
k_sj = _np.where(_np.arange(grid[1])>grid[1]//2,_np.arange(grid[1])-grid[1],_np.arange(grid[1]))/size[1]
if grid[1]%2 == 0 and first_order: k_sj[grid[1]//2] = 0 # Nyquist freq=0 for even grid (Johnson, MIT, 2011)
k_sj = _np.where(_np.arange(cells[1])>cells[1]//2,_np.arange(cells[1])-cells[1],_np.arange(cells[1]))/size[1]
if cells[1]%2 == 0 and first_order: k_sj[cells[1]//2] = 0 # Nyquist freq=0 for even cells (Johnson, MIT, 2011)
k_si = _np.arange(grid[2]//2+1)/size[2]
k_si = _np.arange(cells[2]//2+1)/size[2]
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
@ -47,9 +47,9 @@ def curl(size,field):
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
field : numpy.ndarray of shape (:,:,:,3) or (:,:,:,3,3)
periodic field of which the curl is calculated.
Periodic field of which the curl is calculated.
"""
n = _np.prod(field.shape[3:])
@ -73,9 +73,9 @@ def divergence(size,field):
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
field : numpy.ndarray of shape (:,:,:,3) or (:,:,:,3,3)
periodic field of which the divergence is calculated.
Periodic field of which the divergence is calculated.
"""
n = _np.prod(field.shape[3:])
@ -95,9 +95,9 @@ def gradient(size,field):
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
field : numpy.ndarray of shape (:,:,:,1) or (:,:,:,3)
periodic field of which the gradient is calculated.
Periodic field of which the gradient is calculated.
"""
n = _np.prod(field.shape[3:])
@ -110,39 +110,39 @@ def gradient(size,field):
return _np.fft.irfftn(grad_,axes=(0,1,2),s=field.shape[:3])
def cell_coord0(grid,size,origin=_np.zeros(3)):
def coordinates0_point(cells,size,origin=_np.zeros(3)):
"""
Cell center positions (undeformed).
Parameters
----------
grid : numpy.ndarray of shape (3)
number of grid points.
cells : numpy.ndarray of shape (3)
Number of cells.
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
origin : numpy.ndarray, optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
"""
start = origin + size/grid*.5
end = origin + size - size/grid*.5
start = origin + size/cells*.5
end = origin + size - size/cells*.5
return _np.stack(_np.meshgrid(_np.linspace(start[0],end[0],grid[0]),
_np.linspace(start[1],end[1],grid[1]),
_np.linspace(start[2],end[2],grid[2]),indexing = 'ij'),
return _np.stack(_np.meshgrid(_np.linspace(start[0],end[0],cells[0]),
_np.linspace(start[1],end[1],cells[1]),
_np.linspace(start[2],end[2],cells[2]),indexing = 'ij'),
axis = -1)
def cell_displacement_fluct(size,F):
def displacement_fluct_point(size,F):
"""
Cell center displacement field from fluctuation part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
"""
integrator = 0.5j*size/_np.pi
@ -160,194 +160,196 @@ def cell_displacement_fluct(size,F):
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
def cell_displacement_avg(size,F):
def displacement_avg_point(size,F):
"""
Cell center displacement field from average part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
"""
F_avg = _np.average(F,axis=(0,1,2))
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),cell_coord0(F.shape[:3],size))
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
def cell_displacement(size,F):
def displacement_point(size,F):
"""
Cell center displacement field from deformation gradient field.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
"""
return cell_displacement_avg(size,F) + cell_displacement_fluct(size,F)
return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
def cell_coord(size,F,origin=_np.zeros(3)):
def coordinates_point(size,F,origin=_np.zeros(3)):
"""
Cell center positions.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
origin : numpy.ndarray of shape (3), optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
"""
return cell_coord0(F.shape[:3],size,origin) + cell_displacement(size,F)
return coordinates0_point(F.shape[:3],size,origin) + displacement_point(size,F)
def cell_coord0_gridSizeOrigin(coord0,ordered=True):
def cellsSizeOrigin_coordinates0_point(coordinates0,ordered=True):
"""
Return grid 'DNA', i.e. grid, size, and origin from 1D array of cell positions.
Return grid 'DNA', i.e. cells, size, and origin from 1D array of point positions.
Parameters
----------
coord0 : numpy.ndarray of shape (:,3)
undeformed cell coordinates.
coordinates0 : numpy.ndarray of shape (:,3)
Undeformed cell coordinates.
ordered : bool, optional
expect coord0 data to be ordered (x fast, z slow).
Expect coordinates0 data to be ordered (x fast, z slow).
Defaults to True.
"""
coords = [_np.unique(coord0[:,i]) for i in range(3)]
coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
mincorner = _np.array(list(map(min,coords)))
maxcorner = _np.array(list(map(max,coords)))
grid = _np.array(list(map(len,coords)),'i')
size = grid/_np.maximum(grid-1,1) * (maxcorner-mincorner)
delta = size/grid
cells = _np.array(list(map(len,coords)),'i')
size = cells/_np.maximum(cells-1,1) * (maxcorner-mincorner)
delta = size/cells
origin = mincorner - delta*.5
# 1D/2D: size/origin combination undefined, set origin to 0.0
size [_np.where(grid==1)] = origin[_np.where(grid==1)]*2.
origin[_np.where(grid==1)] = 0.0
size [_np.where(cells==1)] = origin[_np.where(cells==1)]*2.
origin[_np.where(cells==1)] = 0.0
if grid.prod() != len(coord0):
raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
if cells.prod() != len(coordinates0):
raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}.')
start = origin + delta*.5
end = origin - delta*.5 + size
atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0]),atol=atol) and \
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1]),atol=atol) and \
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]),atol=atol)):
raise ValueError('Regular grid spacing violated.')
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],cells[0]),atol=atol) and \
_np.allclose(coords[1],_np.linspace(start[1],end[1],cells[1]),atol=atol) and \
_np.allclose(coords[2],_np.linspace(start[2],end[2],cells[2]),atol=atol)):
raise ValueError('Regular cell spacing violated.')
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin),atol=atol):
if ordered and not _np.allclose(coordinates0.reshape(tuple(cells)+(3,),order='F'),
coordinates0_point(cells,size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).')
return (grid,size,origin)
return (cells,size,origin)
def coord0_check(coord0):
def coordinates0_check(coordinates0):
"""
Check whether coordinates lie on a regular grid.
Parameters
----------
coord0 : numpy.ndarray
array of undeformed cell coordinates.
coordinates0 : numpy.ndarray
Array of undeformed cell coordinates.
"""
cell_coord0_gridSizeOrigin(coord0,ordered=True)
cellsSizeOrigin_coordinates0_point(coordinates0,ordered=True)
def node_coord0(grid,size,origin=_np.zeros(3)):
def coordinates0_node(cells,size,origin=_np.zeros(3)):
"""
Nodal positions (undeformed).
Parameters
----------
grid : numpy.ndarray of shape (3)
number of grid points.
cells : numpy.ndarray of shape (3)
Number of cells.
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
origin : numpy.ndarray of shape (3), optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
"""
return _np.stack(_np.meshgrid(_np.linspace(origin[0],size[0]+origin[0],grid[0]+1),
_np.linspace(origin[1],size[1]+origin[1],grid[1]+1),
_np.linspace(origin[2],size[2]+origin[2],grid[2]+1),indexing = 'ij'),
return _np.stack(_np.meshgrid(_np.linspace(origin[0],size[0]+origin[0],cells[0]+1),
_np.linspace(origin[1],size[1]+origin[1],cells[1]+1),
_np.linspace(origin[2],size[2]+origin[2],cells[2]+1),indexing = 'ij'),
axis = -1)
def node_displacement_fluct(size,F):
def displacement_fluct_node(size,F):
"""
Nodal displacement field from fluctuation part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
"""
return cell_2_node(cell_displacement_fluct(size,F))
return point_to_node(displacement_fluct_point(size,F))
def node_displacement_avg(size,F):
def displacement_avg_node(size,F):
"""
Nodal displacement field from average part of the deformation gradient field.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
"""
F_avg = _np.average(F,axis=(0,1,2))
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),node_coord0(F.shape[:3],size))
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
def node_displacement(size,F):
def displacement_node(size,F):
"""
Nodal displacement field from deformation gradient field.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
"""
return node_displacement_avg(size,F) + node_displacement_fluct(size,F)
return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
def node_coord(size,F,origin=_np.zeros(3)):
def coordinates_node(size,F,origin=_np.zeros(3)):
"""
Nodal positions.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size of the periodic field.
Physical size of the periodic field.
F : numpy.ndarray
deformation gradient field.
Deformation gradient field.
origin : numpy.ndarray of shape (3), optional
physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
Physical origin of the periodic field. Defaults to [0.0,0.0,0.0].
"""
return node_coord0(F.shape[:3],size,origin) + node_displacement(size,F)
return coordinates0_node(F.shape[:3],size,origin) + displacement_node(size,F)
def cell_2_node(cell_data):
"""Interpolate periodic cell data to nodal data."""
def point_to_node(cell_data):
"""Interpolate periodic point data to nodal data."""
n = ( cell_data + _np.roll(cell_data,1,(0,1,2))
+ _np.roll(cell_data,1,(0,)) + _np.roll(cell_data,1,(1,)) + _np.roll(cell_data,1,(2,))
+ _np.roll(cell_data,1,(0,1)) + _np.roll(cell_data,1,(1,2)) + _np.roll(cell_data,1,(2,0)))*0.125
@ -355,8 +357,8 @@ def cell_2_node(cell_data):
return _np.pad(n,((0,1),(0,1),(0,1))+((0,0),)*len(cell_data.shape[3:]),mode='wrap')
def node_2_cell(node_data):
"""Interpolate periodic nodal data to cell data."""
def node_2_point(node_data):
"""Interpolate periodic nodal data to point data."""
c = ( node_data + _np.roll(node_data,1,(0,1,2))
+ _np.roll(node_data,1,(0,)) + _np.roll(node_data,1,(1,)) + _np.roll(node_data,1,(2,))
+ _np.roll(node_data,1,(0,1)) + _np.roll(node_data,1,(1,2)) + _np.roll(node_data,1,(2,0)))*0.125
@ -364,57 +366,57 @@ def node_2_cell(node_data):
return c[1:,1:,1:]
def node_coord0_gridSizeOrigin(coord0,ordered=True):
def cellsSizeOrigin_coordinates0_node(coordinates0,ordered=True):
"""
Return grid 'DNA', i.e. grid, size, and origin from 1D array of nodal positions.
Return grid 'DNA', i.e. cells, size, and origin from 1D array of nodal positions.
Parameters
----------
coord0 : numpy.ndarray of shape (:,3)
undeformed nodal coordinates.
coordinates0 : numpy.ndarray of shape (:,3)
Undeformed nodal coordinates.
ordered : bool, optional
expect coord0 data to be ordered (x fast, z slow).
Expect coordinates0 data to be ordered (x fast, z slow).
Defaults to True.
"""
coords = [_np.unique(coord0[:,i]) for i in range(3)]
coords = [_np.unique(coordinates0[:,i]) for i in range(3)]
mincorner = _np.array(list(map(min,coords)))
maxcorner = _np.array(list(map(max,coords)))
grid = _np.array(list(map(len,coords)),'i') - 1
cells = _np.array(list(map(len,coords)),'i') - 1
size = maxcorner-mincorner
origin = mincorner
if (grid+1).prod() != len(coord0):
raise ValueError('Data count {len(coord0)} does not match grid {grid}.')
if (cells+1).prod() != len(coordinates0):
raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}.')
atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1),atol=atol) and \
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1),atol=atol)):
raise ValueError('Regular grid spacing violated.')
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],cells[0]+1),atol=atol) and \
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],cells[1]+1),atol=atol) and \
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],cells[2]+1),atol=atol)):
raise ValueError('Regular cell spacing violated.')
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin),atol=atol):
if ordered and not _np.allclose(coordinates0.reshape(tuple(cells+1)+(3,),order='F'),
coordinates0_node(cells,size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).')
return (grid,size,origin)
return (cells,size,origin)
def regrid(size,F,new_grid):
def regrid(size,F,cells):
"""
Return mapping from coordinates in deformed configuration to a regular grid.
Parameters
----------
size : numpy.ndarray of shape (3)
physical size
Physical size.
F : numpy.ndarray of shape (:,:,:,3,3)
deformation gradient field
new_grid : numpy.ndarray of shape (3)
new grid for undeformed coordinates
Deformation gradient field.
cells : numpy.ndarray of shape (3)
Cell count along x,y,z of remapping grid.
"""
c = cell_coord0(F.shape[:3],size) \
+ cell_displacement_avg(size,F) \
+ cell_displacement_fluct(size,F)
c = coordinates_point(size,F)
outer = _np.dot(_np.average(F,axis=(0,1,2)),size)
for d in range(3):
@ -422,4 +424,4 @@ def regrid(size,F,new_grid):
c[_np.where(c[:,:,:,d]>outer[d])] -= outer[d]
tree = _spatial.cKDTree(c.reshape(-1,3),boxsize=outer)
return tree.query(cell_coord0(new_grid,outer))[1].flatten()
return tree.query(coordinates0_point(cells,outer))[1].flatten()

View File

@ -7,7 +7,7 @@ from . import util
from . import grid_filters
def from_random(size,N_seeds,grid=None,rng_seed=None):
def from_random(size,N_seeds,cells=None,rng_seed=None):
"""
Random seeding in space.
@ -17,21 +17,21 @@ def from_random(size,N_seeds,grid=None,rng_seed=None):
Physical size of the seeding domain.
N_seeds : int
Number of seeds.
grid : numpy.ndarray of shape (3), optional.
If given, ensures that all seeds initiate one grain if using a
standard Voronoi tessellation.
cells : numpy.ndarray of shape (3), optional.
If given, ensures that each seed results in a grain when a standard Voronoi
tessellation is performed using the given grid resolution (i.e. size/cells).
rng_seed : {None, int, array_like[ints], SeedSequence, BitGenerator, Generator}, optional
A seed to initialize the BitGenerator. Defaults to None.
If None, then fresh, unpredictable entropy will be pulled from the OS.
"""
rng = _np.random.default_rng(rng_seed)
if grid is None:
if cells is None:
coords = rng.random((N_seeds,3)) * size
else:
grid_coords = grid_filters.cell_coord0(grid,size).reshape(-1,3,order='F')
coords = grid_coords[rng.choice(_np.prod(grid),N_seeds, replace=False)] \
+ _np.broadcast_to(size/grid,(N_seeds,3))*(rng.random((N_seeds,3))*.5-.25) # wobble without leaving grid
grid_coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3,order='F')
coords = grid_coords[rng.choice(_np.prod(cells),N_seeds, replace=False)] \
+ _np.broadcast_to(size/cells,(N_seeds,3))*(rng.random((N_seeds,3))*.5-.25) # wobble without leaving cells
return coords
@ -77,14 +77,14 @@ def from_Poisson_disc(size,N_seeds,N_candidates,distance,periodic=True,rng_seed=
return coords
def from_geom(geom,selection=None,invert=False,average=False,periodic=True):
def from_grid(grid,selection=None,invert=False,average=False,periodic=True):
"""
Create seed from existing geometry description.
Create seed from existing grid description.
Parameters
----------
geom : damask.Geom
Geometry, from which the material IDs are used as seeds.
grid : damask.Grid
Grid, from which the material IDs are used as seeds.
selection : iterable of integers, optional
Material IDs to consider.
invert : boolean, false
@ -95,10 +95,10 @@ def from_geom(geom,selection=None,invert=False,average=False,periodic=True):
Center of gravity with periodic boundaries.
"""
material = geom.material.reshape((-1,1),order='F')
mask = _np.full(geom.grid.prod(),True,dtype=bool) if selection is None else \
material = grid.material.reshape((-1,1),order='F')
mask = _np.full(grid.cells.prod(),True,dtype=bool) if selection is None else \
_np.isin(material,selection,invert=invert).flatten()
coords = grid_filters.cell_coord0(geom.grid,geom.size).reshape(-1,3,order='F')
coords = grid_filters.coordinates0_point(grid.cells,grid.size).reshape(-1,3,order='F')
if not average:
return (coords[mask],material[mask])
@ -106,8 +106,8 @@ def from_geom(geom,selection=None,invert=False,average=False,periodic=True):
materials = _np.unique(material[mask])
coords_ = _np.zeros((materials.size,3),dtype=float)
for i,mat in enumerate(materials):
pc = (2*_np.pi*coords[material[:,0]==mat,:]-geom.origin)/geom.size
coords_[i] = geom.origin + geom.size / 2 / _np.pi * (_np.pi +
pc = (2*_np.pi*coords[material[:,0]==mat,:]-grid.origin)/grid.size
coords_[i] = grid.origin + grid.size / 2 / _np.pi * (_np.pi +
_np.arctan2(-_np.average(_np.sin(pc),axis=0),
-_np.average(_np.cos(pc),axis=0))) \
if periodic else \

View File

@ -1,10 +1,10 @@
homogenization:
SX:
N_constituents: 2
mech: {type: none}
N_constituents: 1
mechanics: {type: none}
Taylor:
N_constituents: 2
mech: {type: isostrain}
mechanics: {type: isostrain}
material:
- constituents:
@ -34,11 +34,11 @@ material:
phase:
Aluminum:
lattice: cF
mech:
mechanics:
output: [F, P, F_e, F_p, L_p]
elasticity: {C_11: 106.75e9, C_12: 60.41e9, C_44: 28.34e9, type: hooke}
Steel:
lattice: cI
mech:
mechanics:
output: [F, P, F_e, F_p, L_p]
elasticity: {C_11: 233.3e9, C_12: 135.5e9, C_44: 118.0e9, type: hooke}

View File

@ -1,8 +1,9 @@
import pytest
import numpy as np
from vtk.util.numpy_support import numpy_to_vtk as np_to_vtk
from damask import VTK
from damask import Geom
from damask import Grid
from damask import Table
from damask import Rotation
from damask import util
@ -10,9 +11,9 @@ from damask import seeds
from damask import grid_filters
def geom_equal(a,b):
def grid_equal(a,b):
return np.all(a.material == b.material) and \
np.all(a.grid == b.grid) and \
np.all(a.cells == b.cells) and \
np.allclose(a.size, b.size) and \
str(a.diff(b)) == str(b.diff(a))
@ -23,15 +24,15 @@ def default():
np.arange(2,42),
np.ones(40,dtype=int)*2,
np.arange(1,41))).reshape(8,5,4,order='F')
return Geom(x,[8e-6,5e-6,4e-6])
return Grid(x,[8e-6,5e-6,4e-6])
@pytest.fixture
def ref_path(ref_path_base):
"""Directory containing reference results."""
return ref_path_base/'Geom'
return ref_path_base/'Grid'
class TestGeom:
class TestGrid:
@pytest.fixture(autouse=True)
def _patch_execution_stamp(self, patch_execution_stamp):
@ -46,7 +47,7 @@ class TestGeom:
def test_diff_not_equal(self,default):
new = Geom(default.material[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified'])
new = Grid(default.material[1:,1:,1:]+1,default.size*.9,np.ones(3)-default.origin,comments=['modified'])
assert str(default.diff(new)) != ''
def test_repr(self,default):
@ -54,36 +55,44 @@ class TestGeom:
def test_read_write_vtr(self,default,tmp_path):
default.save(tmp_path/'default')
new = Geom.load(tmp_path/'default.vtr')
assert geom_equal(new,default)
new = Grid.load(tmp_path/'default.vtr')
assert grid_equal(new,default)
def test_invalid_vtr(self,tmp_path):
def test_invalid_no_material(self,tmp_path):
v = VTK.from_rectilinear_grid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
v.save(tmp_path/'no_materialpoint.vtr',parallel=False)
with pytest.raises(ValueError):
Geom.load(tmp_path/'no_materialpoint.vtr')
Grid.load(tmp_path/'no_materialpoint.vtr')
def test_invalid_material(self):
def test_invalid_spacing(self,tmp_path,default):
default.save(tmp_path/'spacing_ok.vtr')
vtk = VTK.load(tmp_path/'spacing_ok.vtr')
vtk.vtk_data.SetXCoordinates(np_to_vtk(np.sort(np.random.random(default.cells[0]))))
vtk.save(tmp_path/'invalid_spacing.vtr',parallel=False)
with pytest.raises(ValueError):
Grid.load(tmp_path/'invalid_spacing.vtr')
def test_invalid_material_type(self):
with pytest.raises(TypeError):
Geom(np.zeros((3,3,3),dtype='complex'),np.ones(3))
Grid(np.zeros((3,3,3),dtype='complex'),np.ones(3))
def test_cast_to_int(self):
g = Geom(np.zeros((3,3,3)),np.ones(3))
g = Grid(np.zeros((3,3,3)),np.ones(3))
assert g.material.dtype in np.sctypes['int']
def test_invalid_size(self,default):
with pytest.raises(ValueError):
Geom(default.material[1:,1:,1:],
Grid(default.material[1:,1:,1:],
size=np.ones(2))
def test_save_load_ASCII(self,default,tmp_path):
default.save_ASCII(tmp_path/'ASCII')
default.material -= 1
assert geom_equal(Geom.load_ASCII(tmp_path/'ASCII'),default)
assert grid_equal(Grid.load_ASCII(tmp_path/'ASCII'),default)
def test_invalid_origin(self,default):
with pytest.raises(ValueError):
Geom(default.material[1:,1:,1:],
Grid(default.material[1:,1:,1:],
size=np.ones(3),
origin=np.ones(4))
@ -91,14 +100,14 @@ class TestGeom:
def test_invalid_materials_shape(self,default):
material = np.ones((3,3))
with pytest.raises(ValueError):
Geom(material,
Grid(material,
size=np.ones(3))
def test_invalid_materials_type(self,default):
material = np.random.randint(1,300,(3,4,5))==1
with pytest.raises(TypeError):
Geom(material)
Grid(material)
@pytest.mark.parametrize('directions,reflect',[
@ -113,7 +122,7 @@ class TestGeom:
tag = f'directions_{"-".join(directions)}+reflect_{reflect}'
reference = ref_path/f'mirror_{tag}.vtr'
if update: modified.save(reference)
assert geom_equal(Geom.load(reference),
assert grid_equal(Grid.load(reference),
modified)
@ -135,17 +144,17 @@ class TestGeom:
tag = f'directions_{"-".join(directions)}'
reference = ref_path/f'flip_{tag}.vtr'
if update: modified.save(reference)
assert geom_equal(Geom.load(reference),
assert grid_equal(Grid.load(reference),
modified)
def test_flip_invariant(self,default):
assert geom_equal(default,default.flip([]))
assert grid_equal(default,default.flip([]))
@pytest.mark.parametrize('direction',[['x'],['x','y']])
def test_flip_double(self,default,direction):
assert geom_equal(default,default.flip(direction).flip(direction))
assert grid_equal(default,default.flip(direction).flip(direction))
@pytest.mark.parametrize('directions',[(1,2,'y'),('a','b','x'),[1]])
@ -162,12 +171,12 @@ class TestGeom:
reference = ref_path/f'clean_{stencil}_{"+".join(map(str,[None] if selection is None else selection))}_{periodic}'
if update and stencil > 1:
current.save(reference)
assert geom_equal(Geom.load(reference) if stencil > 1 else default,
assert grid_equal(Grid.load(reference) if stencil > 1 else default,
current
)
@pytest.mark.parametrize('grid',[
@pytest.mark.parametrize('cells',[
(10,11,10),
[10,13,10],
np.array((10,10,10)),
@ -176,12 +185,12 @@ class TestGeom:
np.array((10,20,2))
]
)
def test_scale(self,default,update,ref_path,grid):
modified = default.scale(grid)
tag = f'grid_{util.srepr(grid,"-")}'
def test_scale(self,default,update,ref_path,cells):
modified = default.scale(cells)
tag = f'grid_{util.srepr(cells,"-")}'
reference = ref_path/f'scale_{tag}.vtr'
if update: modified.save(reference)
assert geom_equal(Geom.load(reference),
assert grid_equal(Grid.load(reference),
modified)
@ -190,21 +199,21 @@ class TestGeom:
for m in np.unique(material):
material[material==m] = material.max() + np.random.randint(1,30)
default.material -= 1
modified = Geom(material,
modified = Grid(material,
default.size,
default.origin)
assert not geom_equal(modified,default)
assert geom_equal(default,
assert not grid_equal(modified,default)
assert grid_equal(default,
modified.renumber())
def test_substitute(self,default):
offset = np.random.randint(1,500)
modified = Geom(default.material + offset,
modified = Grid(default.material + offset,
default.size,
default.origin)
assert not geom_equal(modified,default)
assert geom_equal(default,
assert not grid_equal(modified,default)
assert grid_equal(default,
modified.substitute(np.arange(default.material.max())+1+offset,
np.arange(default.material.max())+1))
@ -212,12 +221,12 @@ class TestGeom:
f = np.unique(default.material.flatten())[:np.random.randint(1,default.material.max())]
t = np.random.permutation(f)
modified = default.substitute(f,t)
assert np.array_equiv(t,f) or (not geom_equal(modified,default))
assert geom_equal(default, modified.substitute(t,f))
assert np.array_equiv(t,f) or (not grid_equal(modified,default))
assert grid_equal(default, modified.substitute(t,f))
def test_sort(self):
grid = np.random.randint(5,20,3)
m = Geom(np.random.randint(1,20,grid)*3,np.ones(3)).sort().material.flatten(order='F')
cells = np.random.randint(5,20,3)
m = Grid(np.random.randint(1,20,cells)*3,np.ones(3)).sort().material.flatten(order='F')
for i,v in enumerate(m):
assert i==0 or v > m[:i].max() or v in m[:i]
@ -227,7 +236,7 @@ class TestGeom:
modified = default.copy()
for i in range(np.rint(360/axis_angle[3]).astype(int)):
modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True))
assert geom_equal(default,modified)
assert grid_equal(default,modified)
@pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0],
@ -237,15 +246,15 @@ class TestGeom:
tag = f'Eulers_{util.srepr(Eulers,"-")}'
reference = ref_path/f'rotate_{tag}.vtr'
if update: modified.save(reference)
assert geom_equal(Geom.load(reference),
assert grid_equal(Grid.load(reference),
modified)
def test_canvas(self,default):
grid = default.grid
cells = default.cells
grid_add = np.random.randint(0,30,(3))
modified = default.canvas(grid + grid_add)
assert np.all(modified.material[:grid[0],:grid[1],:grid[2]] == default.material)
modified = default.canvas(cells + grid_add)
assert np.all(modified.material[:cells[0],:cells[1],:cells[2]] == default.material)
@pytest.mark.parametrize('center1,center2',[(np.random.random(3)*.5,np.random.random()*8),
@ -263,8 +272,8 @@ class TestGeom:
o = np.random.random(3)-.5
g = np.random.randint(8,32,(3))
s = np.random.random(3)+.5
G_1 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center1,exponent)
G_2 = Geom(np.ones(g,'i'),s,o).add_primitive(diameter,center2,exponent)
G_1 = Grid(np.ones(g,'i'),s,o).add_primitive(diameter,center1,exponent)
G_2 = Grid(np.ones(g,'i'),s,o).add_primitive(diameter,center2,exponent)
assert np.count_nonzero(G_1.material!=2) == np.count_nonzero(G_2.material!=2)
@ -279,9 +288,9 @@ class TestGeom:
g = np.random.randint(8,32,(3))
s = np.random.random(3)+.5
fill = np.random.randint(10)+2
G_1 = Geom(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,inverse=inverse,periodic=periodic)
G_2 = Geom(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,Rotation.from_random(),inverse,periodic=periodic)
assert geom_equal(G_1,G_2)
G_1 = Grid(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,inverse=inverse,periodic=periodic)
G_2 = Grid(np.ones(g,'i'),s).add_primitive(.3,center,1,fill,Rotation.from_random(),inverse,periodic=periodic)
assert grid_equal(G_1,G_2)
@pytest.mark.parametrize('trigger',[[1],[]])
@ -300,9 +309,9 @@ class TestGeom:
if len(trigger) > 0:
m2[m==1] = 1
geom = Geom(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger)
grid = Grid(m,np.random.rand(3)).vicinity_offset(vicinity,offset,trigger=trigger)
assert np.all(m2==geom.material)
assert np.all(m2==grid.material)
@pytest.mark.parametrize('periodic',[True,False])
@ -314,39 +323,39 @@ class TestGeom:
@pytest.mark.parametrize('periodic',[True,False])
def test_tessellation_approaches(self,periodic):
grid = np.random.randint(10,20,3)
cells = np.random.randint(10,20,3)
size = np.random.random(3) + 1.0
N_seeds= np.random.randint(10,30)
seeds = np.random.rand(N_seeds,3) * np.broadcast_to(size,(N_seeds,3))
Voronoi = Geom.from_Voronoi_tessellation( grid,size,seeds, np.arange(N_seeds)+5,periodic)
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(N_seeds),np.arange(N_seeds)+5,periodic)
assert geom_equal(Laguerre,Voronoi)
Voronoi = Grid.from_Voronoi_tessellation( cells,size,seeds, np.arange(N_seeds)+5,periodic)
Laguerre = Grid.from_Laguerre_tessellation(cells,size,seeds,np.ones(N_seeds),np.arange(N_seeds)+5,periodic)
assert grid_equal(Laguerre,Voronoi)
def test_Laguerre_weights(self):
grid = np.random.randint(10,20,3)
cells = np.random.randint(10,20,3)
size = np.random.random(3) + 1.0
N_seeds= np.random.randint(10,30)
seeds = np.random.rand(N_seeds,3) * np.broadcast_to(size,(N_seeds,3))
weights= np.full((N_seeds),-np.inf)
ms = np.random.randint(N_seeds)
weights[ms] = np.random.random()
Laguerre = Geom.from_Laguerre_tessellation(grid,size,seeds,weights,periodic=np.random.random()>0.5)
Laguerre = Grid.from_Laguerre_tessellation(cells,size,seeds,weights,periodic=np.random.random()>0.5)
assert np.all(Laguerre.material == ms)
@pytest.mark.parametrize('approach',['Laguerre','Voronoi'])
def test_tessellate_bicrystal(self,approach):
grid = np.random.randint(5,10,3)*2
size = grid.astype(np.float)
cells = np.random.randint(5,10,3)*2
size = cells.astype(np.float)
seeds = np.vstack((size*np.array([0.5,0.25,0.5]),size*np.array([0.5,0.75,0.5])))
material = np.zeros(grid)
material[:,grid[1]//2:,:] = 1
material = np.zeros(cells)
material[:,cells[1]//2:,:] = 1
if approach == 'Laguerre':
geom = Geom.from_Laguerre_tessellation(grid,size,seeds,np.ones(2),periodic=np.random.random()>0.5)
grid = Grid.from_Laguerre_tessellation(cells,size,seeds,np.ones(2),periodic=np.random.random()>0.5)
elif approach == 'Voronoi':
geom = Geom.from_Voronoi_tessellation(grid,size,seeds, periodic=np.random.random()>0.5)
assert np.all(geom.material == material)
grid = Grid.from_Voronoi_tessellation(cells,size,seeds, periodic=np.random.random()>0.5)
assert np.all(grid.material == material)
@pytest.mark.parametrize('surface',['Schwarz P',
@ -363,14 +372,14 @@ class TestGeom:
'Fisher-Koch S',
])
def test_minimal_surface_basic_properties(self,surface):
grid = np.random.randint(60,100,3)
size = np.ones(3)+np.random.rand(3)
cells = np.random.randint(60,100,3)
size = np.ones(3)+np.random.rand(3)
threshold = 2*np.random.rand()-1.
periods = np.random.randint(2)+1
materials = np.random.randint(0,40,2)
geom = Geom.from_minimal_surface(grid,size,surface,threshold,periods,materials)
assert set(geom.material.flatten()) | set(materials) == set(materials) \
and (geom.size == size).all() and (geom.grid == grid).all()
grid = Grid.from_minimal_surface(cells,size,surface,threshold,periods,materials)
assert set(grid.material.flatten()) | set(materials) == set(materials) \
and (grid.size == size).all() and (grid.cells == cells).all()
@pytest.mark.parametrize('surface,threshold',[('Schwarz P',0),
('Double Primitive',-1./6.),
@ -386,36 +395,36 @@ class TestGeom:
('Fisher-Koch S',0),
])
def test_minimal_surface_volume(self,surface,threshold):
grid = np.ones(3,dtype=int)*64
geom = Geom.from_minimal_surface(grid,np.ones(3),surface,threshold)
assert np.isclose(np.count_nonzero(geom.material==1)/np.prod(geom.grid),.5,rtol=1e-3)
cells = np.ones(3,dtype=int)*64
grid = Grid.from_minimal_surface(cells,np.ones(3),surface,threshold)
assert np.isclose(np.count_nonzero(grid.material==1)/np.prod(grid.cells),.5,rtol=1e-3)
def test_from_table(self):
grid = np.random.randint(60,100,3)
cells = np.random.randint(60,100,3)
size = np.ones(3)+np.random.rand(3)
coords = grid_filters.cell_coord0(grid,size).reshape(-1,3,order='F')
z=np.ones(grid.prod())
z[grid[:2].prod()*int(grid[2]/2):]=0
coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3,order='F')
z=np.ones(cells.prod())
z[cells[:2].prod()*int(cells[2]/2):]=0
t = Table(np.column_stack((coords,z)),{'coords':3,'z':1})
g = Geom.from_table(t,'coords',['1_coords','z'])
assert g.N_materials == g.grid[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == grid[0]).all()
g = Grid.from_table(t,'coords',['1_coords','z'])
assert g.N_materials == g.cells[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == cells[0]).all()
def test_from_table_recover(self,tmp_path):
grid = np.random.randint(60,100,3)
cells = np.random.randint(60,100,3)
size = np.ones(3)+np.random.rand(3)
s = seeds.from_random(size,np.random.randint(60,100))
geom = Geom.from_Voronoi_tessellation(grid,size,s)
coords = grid_filters.cell_coord0(grid,size)
t = Table(np.column_stack((coords.reshape(-1,3,order='F'),geom.material.flatten(order='F'))),{'c':3,'m':1})
assert geom_equal(geom.sort().renumber(),Geom.from_table(t,'c',['m']))
grid = Grid.from_Voronoi_tessellation(cells,size,s)
coords = grid_filters.coordinates0_point(cells,size)
t = Table(np.column_stack((coords.reshape(-1,3,order='F'),grid.material.flatten(order='F'))),{'c':3,'m':1})
assert grid_equal(grid.sort().renumber(),Grid.from_table(t,'c',['m']))
@pytest.mark.parametrize('periodic',[True,False])
@pytest.mark.parametrize('direction',['x','y','z',['x','y'],'zy','xz',['x','y','z']])
def test_get_grain_boundaries(self,update,ref_path,periodic,direction):
geom=Geom.load(ref_path/'get_grain_boundaries_8g12x15x20.vtr')
current=geom.get_grain_boundaries(periodic,direction)
grid=Grid.load(ref_path/'get_grain_boundaries_8g12x15x20.vtr')
current=grid.get_grain_boundaries(periodic,direction)
if update:
current.save(ref_path/f'get_grain_boundaries_8g12x15x20_{direction}_{periodic}.vtu',parallel=False)
reference=VTK.load(ref_path/f'get_grain_boundaries_8g12x15x20_{"".join(direction)}_{periodic}.vtu')

View File

@ -169,7 +169,7 @@ class TestResult:
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
def test_add_IPF_color(self,default,d):
default.add_IPF_color('O',np.array(d))
default.add_IPF_color(d,'O')
loc = {'O': default.get_dataset_location('O'),
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
qu = default.read_dataset(loc['O']).view(np.double).squeeze()
@ -356,11 +356,11 @@ class TestResult:
@pytest.mark.parametrize('mode',['cell','node'])
def test_coordinates(self,default,mode):
if mode == 'cell':
a = grid_filters.cell_coord0(default.grid,default.size,default.origin)
b = default.cell_coordinates.reshape(tuple(default.grid)+(3,),order='F')
a = grid_filters.coordinates0_point(default.cells,default.size,default.origin)
b = default.coordinates0_point.reshape(tuple(default.cells)+(3,),order='F')
elif mode == 'node':
a = grid_filters.node_coord0(default.grid,default.size,default.origin)
b = default.node_coordinates.reshape(tuple(default.grid+1)+(3,),order='F')
a = grid_filters.coordinates0_node(default.cells,default.size,default.origin)
b = default.coordinates0_node.reshape(tuple(default.cells+1)+(3,),order='F')
assert np.allclose(a,b)
@pytest.mark.parametrize('output',['F',[],['F','P']])

View File

@ -800,6 +800,14 @@ class TestRotation:
print(f'append 2x {shape} --> {s.shape}')
assert s[0,...] == r[0,...] and s[-1,...] == p[-1,...]
@pytest.mark.parametrize('shape',[None,1,(1,),(4,2),(3,3,2)])
def test_append_list(self,shape):
r = Rotation.from_random(shape=shape)
p = Rotation.from_random(shape=shape)
s = r.append([r,p])
print(f'append 3x {shape} --> {s.shape}')
assert s[0,...] == r[0,...] and s[-1,...] == p[-1,...]
@pytest.mark.parametrize('quat,standardized',[
([-1,0,0,0],[1,0,0,0]),
([-0.5,-0.5,-0.5,-0.5],[0.5,0.5,0.5,0.5]),
@ -1022,7 +1030,7 @@ class TestRotation:
rng = tuple(zip(np.zeros(3),limits))
weights = Table.load(ref_path/'ODF_experimental_cell.txt').get('intensity').flatten()
Eulers = grid_filters.cell_coord0(steps,limits)
Eulers = grid_filters.coordinates0_point(steps,limits)
Eulers = np.radians(Eulers) if not degrees else Eulers
Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees,fractions).as_Euler_angles(True)
@ -1040,7 +1048,7 @@ class TestRotation:
weights = Table.load(ref_path/'ODF_experimental.txt').get('intensity')
weights = weights.reshape(steps+1,order='F')[:-1,:-1,:-1].reshape(-1,order='F')
Eulers = grid_filters.node_coord0(steps,limits)[:-1,:-1,:-1]
Eulers = grid_filters.coordinates0_node(steps,limits)[:-1,:-1,:-1]
Eulers = np.radians(Eulers) if not degrees else Eulers
Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees).as_Euler_angles(True)

View File

@ -8,7 +8,7 @@ from damask import Table
def default():
"""Simple Table."""
x = np.ones((5,13),dtype=float)
return Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['test data','contains only ones'])
return Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['test data','contains five rows of only ones'])
@pytest.fixture
def ref_path(ref_path_base):
@ -20,8 +20,9 @@ class TestTable:
def test_repr(self,default):
print(default)
def test_len(self):
len(Table(np.random.rand(7,3),{'X':3})) == 7
@pytest.mark.parametrize('N',[10,40])
def test_len(self,N):
assert len(Table(np.random.rand(N,3),{'X':3})) == N
def test_get_scalar(self,default):
d = default.get('s')
@ -39,6 +40,10 @@ class TestTable:
d = default.get('5_F')
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
@pytest.mark.parametrize('N',[10,40])
def test_getitem(self,N):
assert len(Table(np.random.rand(N,1),{'X':1})[:N//2]) == N//2
@pytest.mark.parametrize('mode',['str','path'])
def test_write_read(self,default,tmp_path,mode):
default.save(tmp_path/'default.txt')

View File

@ -16,9 +16,9 @@ def ref_path(ref_path_base):
@pytest.fixture
def default():
"""Simple VTK."""
grid = np.array([5,6,7],int)
size = np.array([.6,1.,.5])
return VTK.from_rectilinear_grid(grid,size)
cells = np.array([5,6,7],int)
size = np.array([.6,1.,.5])
return VTK.from_rectilinear_grid(cells,size)
class TestVTK:
@ -27,10 +27,10 @@ class TestVTK:
print('patched damask.util.execution_stamp')
def test_rectilinearGrid(self,tmp_path):
grid = np.random.randint(5,10,3)*2
cells = np.random.randint(5,10,3)*2
size = np.random.random(3) + 1.0
origin = np.random.random(3)
v = VTK.from_rectilinear_grid(grid,size,origin)
v = VTK.from_rectilinear_grid(cells,size,origin)
string = v.__repr__()
v.save(tmp_path/'rectilinearGrid',False)
vtr = VTK.load(tmp_path/'rectilinearGrid.vtr')
@ -152,11 +152,11 @@ class TestVTK:
np.allclose(polyData.get('coordinates'),points)
def test_compare_reference_rectilinearGrid(self,update,ref_path,tmp_path):
grid = np.array([5,6,7],int)
size = np.array([.6,1.,.5])
rectilinearGrid = VTK.from_rectilinear_grid(grid,size)
c = grid_filters.cell_coord0(grid,size).reshape(-1,3,order='F')
n = grid_filters.node_coord0(grid,size).reshape(-1,3,order='F')
cells = np.array([5,6,7],int)
size = np.array([.6,1.,.5])
rectilinearGrid = VTK.from_rectilinear_grid(cells,size)
c = grid_filters.coordinates0_point(cells,size).reshape(-1,3,order='F')
n = grid_filters.coordinates0_node(cells,size).reshape(-1,3,order='F')
rectilinearGrid.add(c,'cell')
rectilinearGrid.add(n,'node')
if update:

View File

@ -5,124 +5,124 @@ from damask import grid_filters
class TestGridFilters:
def test_cell_coord0(self):
def test_coordinates0_point(self):
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
coord = grid_filters.cell_coord0(grid,size)
assert np.allclose(coord[0,0,0],size/grid*.5) and coord.shape == tuple(grid) + (3,)
cells = np.random.randint(8,32,(3))
coord = grid_filters.coordinates0_point(cells,size)
assert np.allclose(coord[0,0,0],size/cells*.5) and coord.shape == tuple(cells) + (3,)
def test_node_coord0(self):
def test_coordinates0_node(self):
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
coord = grid_filters.node_coord0(grid,size)
assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(grid+1) + (3,)
cells = np.random.randint(8,32,(3))
coord = grid_filters.coordinates0_node(cells,size)
assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(cells+1) + (3,)
def test_coord0(self):
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
c = grid_filters.cell_coord0(grid+1,size+size/grid)
n = grid_filters.node_coord0(grid,size) + size/grid*.5
cells = np.random.randint(8,32,(3))
c = grid_filters.coordinates0_point(cells+1,size+size/cells)
n = grid_filters.coordinates0_node(cells,size) + size/cells*.5
assert np.allclose(c,n)
@pytest.mark.parametrize('mode',['cell','node'])
@pytest.mark.parametrize('mode',['point','node'])
def test_grid_DNA(self,mode):
"""Ensure that xx_coord0_gridSizeOrigin is the inverse of xx_coord0."""
grid = np.random.randint(8,32,(3))
"""Ensure that cellsSizeOrigin_coordinates0_xx is the inverse of coordinates0_xx."""
cells = np.random.randint(8,32,(3))
size = np.random.random(3)
origin = np.random.random(3)
coord0 = eval(f'grid_filters.{mode}_coord0(grid,size,origin)') # noqa
_grid,_size,_origin = eval(f'grid_filters.{mode}_coord0_gridSizeOrigin(coord0.reshape(-1,3,order="F"))')
assert np.allclose(grid,_grid) and np.allclose(size,_size) and np.allclose(origin,_origin)
coord0 = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)') # noqa
_cells,_size,_origin = eval(f'grid_filters.cellsSizeOrigin_coordinates0_{mode}(coord0.reshape(-1,3,order="F"))')
assert np.allclose(cells,_cells) and np.allclose(size,_size) and np.allclose(origin,_origin)
def test_displacement_fluct_equivalence(self):
"""Ensure that fluctuations are periodic."""
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3))
assert np.allclose(grid_filters.node_displacement_fluct(size,F),
grid_filters.cell_2_node(grid_filters.cell_displacement_fluct(size,F)))
cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(cells)+(3,3))
assert np.allclose(grid_filters.displacement_fluct_node(size,F),
grid_filters.point_to_node(grid_filters.displacement_fluct_point(size,F)))
def test_interpolation_to_node(self):
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3))
assert np.allclose(grid_filters.node_coord(size,F) [1:-1,1:-1,1:-1],
grid_filters.cell_2_node(grid_filters.cell_coord(size,F))[1:-1,1:-1,1:-1])
cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(cells)+(3,3))
assert np.allclose(grid_filters.coordinates_node(size,F) [1:-1,1:-1,1:-1],
grid_filters.point_to_node(grid_filters.coordinates_point(size,F))[1:-1,1:-1,1:-1])
def test_interpolation_to_cell(self):
grid = np.random.randint(1,30,(3))
cells = np.random.randint(1,30,(3))
node_coord_x = np.linspace(0,np.pi*2,num=grid[0]+1)
node_field_x = np.cos(node_coord_x)
node_field = np.broadcast_to(node_field_x.reshape(-1,1,1),grid+1)
coordinates_node_x = np.linspace(0,np.pi*2,num=cells[0]+1)
node_field_x = np.cos(coordinates_node_x)
node_field = np.broadcast_to(node_field_x.reshape(-1,1,1),cells+1)
cell_coord_x = node_coord_x[:-1]+node_coord_x[1]*.5
cell_field_x = np.interp(cell_coord_x,node_coord_x,node_field_x,period=np.pi*2.)
cell_field = np.broadcast_to(cell_field_x.reshape(-1,1,1),grid)
coordinates0_point_x = coordinates_node_x[:-1]+coordinates_node_x[1]*.5
cell_field_x = np.interp(coordinates0_point_x,coordinates_node_x,node_field_x,period=np.pi*2.)
cell_field = np.broadcast_to(cell_field_x.reshape(-1,1,1),cells)
assert np.allclose(cell_field,grid_filters.node_2_cell(node_field))
assert np.allclose(cell_field,grid_filters.node_2_point(node_field))
@pytest.mark.parametrize('mode',['cell','node'])
def test_coord0_origin(self,mode):
@pytest.mark.parametrize('mode',['point','node'])
def test_coordinates0_origin(self,mode):
origin= np.random.random(3)
size = np.random.random(3) # noqa
grid = np.random.randint(8,32,(3))
shifted = eval(f'grid_filters.{mode}_coord0(grid,size,origin)')
unshifted = eval(f'grid_filters.{mode}_coord0(grid,size)')
cells = np.random.randint(8,32,(3))
shifted = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)')
unshifted = eval(f'grid_filters.coordinates0_{mode}(cells,size)')
if mode == 'cell':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid) +(3,)))
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(cells) +(3,)))
elif mode == 'node':
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(grid+1)+(3,)))
assert np.allclose(shifted,unshifted+np.broadcast_to(origin,tuple(cells+1)+(3,)))
@pytest.mark.parametrize('function',[grid_filters.cell_displacement_avg,
grid_filters.node_displacement_avg])
@pytest.mark.parametrize('function',[grid_filters.displacement_avg_point,
grid_filters.displacement_avg_node])
def test_displacement_avg_vanishes(self,function):
"""Ensure that random fluctuations in F do not result in average displacement."""
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3))
cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(cells)+(3,3))
F += np.eye(3) - np.average(F,axis=(0,1,2))
assert np.allclose(function(size,F),0.0)
@pytest.mark.parametrize('function',[grid_filters.cell_displacement_fluct,
grid_filters.node_displacement_fluct])
@pytest.mark.parametrize('function',[grid_filters.displacement_fluct_point,
grid_filters.displacement_fluct_node])
def test_displacement_fluct_vanishes(self,function):
"""Ensure that constant F does not result in fluctuating displacement."""
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.broadcast_to(np.random.random((3,3)), tuple(grid)+(3,3))
cells = np.random.randint(8,32,(3))
F = np.broadcast_to(np.random.random((3,3)), tuple(cells)+(3,3))
assert np.allclose(function(size,F),0.0)
@pytest.mark.parametrize('function',[grid_filters.coord0_check,
grid_filters.node_coord0_gridSizeOrigin,
grid_filters.cell_coord0_gridSizeOrigin])
@pytest.mark.parametrize('function',[grid_filters.coordinates0_check,
grid_filters.cellsSizeOrigin_coordinates0_node,
grid_filters.cellsSizeOrigin_coordinates0_point])
def test_invalid_coordinates(self,function):
invalid_coordinates = np.random.random((np.random.randint(12,52),3))
with pytest.raises(ValueError):
function(invalid_coordinates)
@pytest.mark.parametrize('function',[grid_filters.node_coord0_gridSizeOrigin,
grid_filters.cell_coord0_gridSizeOrigin])
@pytest.mark.parametrize('function',[grid_filters.cellsSizeOrigin_coordinates0_node,
grid_filters.cellsSizeOrigin_coordinates0_point])
def test_uneven_spaced_coordinates(self,function):
start = np.random.random(3)
end = np.random.random(3)*10. + start
grid = np.random.randint(8,32,(3))
uneven = np.stack(np.meshgrid(np.logspace(start[0],end[0],grid[0]),
np.logspace(start[1],end[1],grid[1]),
np.logspace(start[2],end[2],grid[2]),indexing = 'ij'),
axis = -1).reshape((grid.prod(),3),order='F')
cells = np.random.randint(8,32,(3))
uneven = np.stack(np.meshgrid(np.logspace(start[0],end[0],cells[0]),
np.logspace(start[1],end[1],cells[1]),
np.logspace(start[2],end[2],cells[2]),indexing = 'ij'),
axis = -1).reshape((cells.prod(),3),order='F')
with pytest.raises(ValueError):
function(uneven)
@pytest.mark.parametrize('mode',[True,False])
@pytest.mark.parametrize('function',[grid_filters.node_coord0_gridSizeOrigin,
grid_filters.cell_coord0_gridSizeOrigin])
@pytest.mark.parametrize('function',[grid_filters.cellsSizeOrigin_coordinates0_node,
grid_filters.cellsSizeOrigin_coordinates0_point])
def test_unordered_coordinates(self,function,mode):
origin = np.random.random(3)
size = np.random.random(3)*10.+origin
grid = np.random.randint(8,32,(3))
unordered = grid_filters.node_coord0(grid,size,origin).reshape(-1,3)
cells = np.random.randint(8,32,(3))
unordered = grid_filters.coordinates0_node(cells,size,origin).reshape(-1,3)
if mode:
with pytest.raises(ValueError):
function(unordered,mode)
@ -131,9 +131,9 @@ class TestGridFilters:
def test_regrid(self):
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.broadcast_to(np.eye(3), tuple(grid)+(3,3))
assert all(grid_filters.regrid(size,F,grid) == np.arange(grid.prod()))
cells = np.random.randint(8,32,(3))
F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))
assert all(grid_filters.regrid(size,F,cells) == np.arange(cells.prod()))
@pytest.mark.parametrize('differential_operator',[grid_filters.curl,
@ -141,14 +141,14 @@ class TestGridFilters:
grid_filters.gradient])
def test_differential_operator_constant(self,differential_operator):
size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3))
cells = np.random.randint(8,32,(3))
shapes = {
grid_filters.curl: [(3,),(3,3)],
grid_filters.divergence:[(3,),(3,3)],
grid_filters.gradient: [(1,),(3,)]
}
for shape in shapes[differential_operator]:
field = np.ones(tuple(grid)+shape)*np.random.random()*1.0e5
field = np.ones(tuple(cells)+shape)*np.random.random()*1.0e5
assert np.allclose(differential_operator(size,field),0.0)
@ -190,15 +190,15 @@ class TestGridFilters:
@pytest.mark.parametrize('field_def,grad_def',grad_test_data)
def test_grad(self,field_def,grad_def):
size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3))
cells = np.random.randint(8,32,(3))
nodes = grid_filters.cell_coord0(grid,size)
nodes = grid_filters.coordinates0_point(cells,size)
my_locals = locals() # needed for list comprehension
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),grid) for f in field_def],axis=-1)
field = field.reshape(tuple(grid) + ((3,) if len(field_def)==3 else (1,)))
grad = np.stack([np.broadcast_to(eval(c,globals(),my_locals),grid) for c in grad_def], axis=-1)
grad = grad.reshape(tuple(grid) + ((3,3) if len(grad_def)==9 else (3,)))
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),cells) for f in field_def],axis=-1)
field = field.reshape(tuple(cells) + ((3,) if len(field_def)==3 else (1,)))
grad = np.stack([np.broadcast_to(eval(c,globals(),my_locals),cells) for c in grad_def], axis=-1)
grad = grad.reshape(tuple(cells) + ((3,3) if len(grad_def)==9 else (3,)))
assert np.allclose(grad,grid_filters.gradient(size,field))
@ -250,15 +250,15 @@ class TestGridFilters:
@pytest.mark.parametrize('field_def,curl_def',curl_test_data)
def test_curl(self,field_def,curl_def):
size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3))
cells = np.random.randint(8,32,(3))
nodes = grid_filters.cell_coord0(grid,size)
nodes = grid_filters.coordinates0_point(cells,size)
my_locals = locals() # needed for list comprehension
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),grid) for f in field_def],axis=-1)
field = field.reshape(tuple(grid) + ((3,3) if len(field_def)==9 else (3,)))
curl = np.stack([np.broadcast_to(eval(c,globals(),my_locals),grid) for c in curl_def], axis=-1)
curl = curl.reshape(tuple(grid) + ((3,3) if len(curl_def)==9 else (3,)))
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),cells) for f in field_def],axis=-1)
field = field.reshape(tuple(cells) + ((3,3) if len(field_def)==9 else (3,)))
curl = np.stack([np.broadcast_to(eval(c,globals(),my_locals),cells) for c in curl_def], axis=-1)
curl = curl.reshape(tuple(cells) + ((3,3) if len(curl_def)==9 else (3,)))
assert np.allclose(curl,grid_filters.curl(size,field))
@ -303,17 +303,17 @@ class TestGridFilters:
def test_div(self,field_def,div_def):
size = np.random.random(3)+1.0
grid = np.random.randint(8,32,(3))
cells = np.random.randint(8,32,(3))
nodes = grid_filters.cell_coord0(grid,size)
nodes = grid_filters.coordinates0_point(cells,size)
my_locals = locals() # needed for list comprehension
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),grid) for f in field_def],axis=-1)
field = field.reshape(tuple(grid) + ((3,3) if len(field_def)==9 else (3,)))
div = np.stack([np.broadcast_to(eval(c,globals(),my_locals),grid) for c in div_def], axis=-1)
field = np.stack([np.broadcast_to(eval(f,globals(),my_locals),cells) for f in field_def],axis=-1)
field = field.reshape(tuple(cells) + ((3,3) if len(field_def)==9 else (3,)))
div = np.stack([np.broadcast_to(eval(c,globals(),my_locals),cells) for c in div_def], axis=-1)
if len(div_def)==3:
div = div.reshape(tuple(grid) + ((3,)))
div = div.reshape(tuple(cells) + ((3,)))
else:
div=div.reshape(tuple(grid))
div=div.reshape(tuple(cells))
assert np.allclose(div,grid_filters.divergence(size,field))

Some files were not shown because too many files have changed in this diff Show More