Merge remote-tracking branch 'origin/development' into 145-AGPL
This commit is contained in:
commit
e8cfbf5d2e
|
@ -46,7 +46,7 @@ variables:
|
||||||
# ++++++++++++ PETSc ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
# ++++++++++++ PETSc ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||||
PETSC_GNU: "Libraries/PETSc/3.16.1/GNU-10-OpenMPI-4.1.1"
|
PETSC_GNU: "Libraries/PETSc/3.16.1/GNU-10-OpenMPI-4.1.1"
|
||||||
PETSC_INTELLLVM: "Libraries/PETSc/3.16.3/oneAPI-2022.0.1-IntelMPI-2021.5.0"
|
PETSC_INTELLLVM: "Libraries/PETSc/3.16.3/oneAPI-2022.0.1-IntelMPI-2021.5.0"
|
||||||
PETSC_INTEL: "Libraries/PETSc/3.16.2/Intel-2022.0.1-IntelMPI-2021.5.0"
|
PETSC_INTEL: "Libraries/PETSc/3.16.3/Intel-2022.0.1-IntelMPI-2021.5.0"
|
||||||
# ++++++++++++ MSC Marc +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
# ++++++++++++ MSC Marc +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||||
MSC: "FEM/MSC/2021.3.1"
|
MSC: "FEM/MSC/2021.3.1"
|
||||||
IntelMarc: "Compiler/Intel/19.1.2 Libraries/IMKL/2020"
|
IntelMarc: "Compiler/Intel/19.1.2 Libraries/IMKL/2020"
|
||||||
|
@ -79,49 +79,69 @@ mypy:
|
||||||
|
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
test_grid_GNU:
|
grid_GNU:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
|
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
|
||||||
- cd PRIVATE/testing/pytest
|
- cd PRIVATE/testing/pytest
|
||||||
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_GNU
|
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_GNU
|
||||||
|
|
||||||
test_mesh_GNU:
|
mesh_GNU:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
|
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
|
||||||
- cd PRIVATE/testing/pytest
|
- cd PRIVATE/testing/pytest
|
||||||
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_GNU
|
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_GNU
|
||||||
|
|
||||||
test_mesh_IntelLLVM:
|
grid_GNU-64bit:
|
||||||
|
stage: compile
|
||||||
|
script:
|
||||||
|
- module load Compiler/GNU/10 Libraries/PETSc/3.16.2/64bit
|
||||||
|
- cd PRIVATE/testing/pytest
|
||||||
|
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_GNU-64bit
|
||||||
|
|
||||||
|
mesh_GNU-64bit:
|
||||||
|
stage: compile
|
||||||
|
script:
|
||||||
|
- module load Compiler/GNU/10 Libraries/PETSc/3.16.2/64bit
|
||||||
|
- cd PRIVATE/testing/pytest
|
||||||
|
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_GNU-64bit
|
||||||
|
|
||||||
|
grid_IntelLLVM:
|
||||||
|
stage: compile
|
||||||
|
script:
|
||||||
|
- module load ${COMPILER_INTELLLVM} ${MPI_INTELLLVM} ${PETSC_INTELLLVM}
|
||||||
|
- cd PRIVATE/testing/pytest
|
||||||
|
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_IntelLLVM
|
||||||
|
|
||||||
|
mesh_IntelLLVM:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
- module load ${COMPILER_INTELLLVM} ${MPI_INTELLLVM} ${PETSC_INTELLLVM}
|
- module load ${COMPILER_INTELLLVM} ${MPI_INTELLLVM} ${PETSC_INTELLLVM}
|
||||||
- cd PRIVATE/testing/pytest
|
- cd PRIVATE/testing/pytest
|
||||||
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_IntelLLVM
|
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_IntelLLVM
|
||||||
|
|
||||||
test_grid_Intel:
|
grid_Intel:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
- module load ${COMPILER_INTEL} ${MPI_INTEL} ${PETSC_INTEL}
|
- module load ${COMPILER_INTEL} ${MPI_INTEL} ${PETSC_INTEL}
|
||||||
- cd PRIVATE/testing/pytest
|
- cd PRIVATE/testing/pytest
|
||||||
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_Intel
|
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_Intel
|
||||||
|
|
||||||
test_mesh_Intel:
|
mesh_Intel:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
- module load ${COMPILER_INTEL} ${MPI_INTEL} ${PETSC_INTEL}
|
- module load ${COMPILER_INTEL} ${MPI_INTEL} ${PETSC_INTEL}
|
||||||
- cd PRIVATE/testing/pytest
|
- cd PRIVATE/testing/pytest
|
||||||
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_Intel
|
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_Intel
|
||||||
|
|
||||||
test_Marc:
|
Marc_Intel:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
- module load $IntelMarc $HDF5Marc $MSC
|
- module load $IntelMarc $HDF5Marc $MSC
|
||||||
- cd PRIVATE/testing/pytest
|
- cd PRIVATE/testing/pytest
|
||||||
- pytest -k 'compile and Marc' --basetemp ${TESTROOT}/compile_Marc
|
- pytest -k 'compile and Marc' --basetemp ${TESTROOT}/compile_Marc
|
||||||
|
|
||||||
|
|
||||||
setup_grid:
|
setup_grid:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
|
@ -138,7 +158,7 @@ setup_mesh:
|
||||||
- cmake -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${TESTROOT} ${CI_PROJECT_DIR}
|
- cmake -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${TESTROOT} ${CI_PROJECT_DIR}
|
||||||
- make -j2 all install
|
- make -j2 all install
|
||||||
|
|
||||||
compile_Marc:
|
setup_Marc:
|
||||||
stage: compile
|
stage: compile
|
||||||
script:
|
script:
|
||||||
- module load $IntelMarc $HDF5Marc $MSC
|
- module load $IntelMarc $HDF5Marc $MSC
|
||||||
|
@ -172,6 +192,10 @@ Marc:
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
grid_runtime:
|
grid_runtime:
|
||||||
stage: performance
|
stage: performance
|
||||||
|
before_script:
|
||||||
|
- ${LOCAL_HOME}/bin/queue ${CI_JOB_ID} --blocking
|
||||||
|
- source env/DAMASK.sh
|
||||||
|
- echo Job start:" $(date)"
|
||||||
script:
|
script:
|
||||||
- module load ${COMPILER_INTEL} ${MPI_INTEL} ${PETSC_INTEL}
|
- module load ${COMPILER_INTEL} ${MPI_INTEL} ${PETSC_INTEL}
|
||||||
- cd $(mktemp -d)
|
- cd $(mktemp -d)
|
||||||
|
@ -186,10 +210,6 @@ grid_runtime:
|
||||||
--output_dir ./
|
--output_dir ./
|
||||||
--tag ${CI_COMMIT_SHA}
|
--tag ${CI_COMMIT_SHA}
|
||||||
- if [ ${CI_COMMIT_BRANCH} == development ]; then git commit -am ${CI_PIPELINE_ID}_${CI_COMMIT_SHA}; git push; fi
|
- if [ ${CI_COMMIT_BRANCH} == development ]; then git commit -am ${CI_PIPELINE_ID}_${CI_COMMIT_SHA}; git push; fi
|
||||||
before_script:
|
|
||||||
- ${LOCAL_HOME}/bin/queue ${CI_JOB_ID} --blocking
|
|
||||||
- source env/DAMASK.sh
|
|
||||||
- echo Job start:" $(date)"
|
|
||||||
|
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
|
@ -209,19 +229,10 @@ update_revision:
|
||||||
script:
|
script:
|
||||||
- cd $(mktemp -d)
|
- cd $(mktemp -d)
|
||||||
- git clone -q git@git.damask.mpie.de:damask/DAMASK.git .
|
- git clone -q git@git.damask.mpie.de:damask/DAMASK.git .
|
||||||
- git checkout ${CI_COMMIT_SHA}
|
- git pull
|
||||||
- export VERSION=$(git describe)
|
- export VERSION=$(git describe ${CI_COMMIT_SHA})
|
||||||
- echo ${VERSION} > python/damask/VERSION
|
- echo ${VERSION} > python/damask/VERSION
|
||||||
- git add python/damask/VERSION
|
- git commit python/damask/VERSION -m "[skip ci] updated version information after successful test of $VERSION"
|
||||||
- git commit -m "[skip ci] updated version information after successful test of $VERSION"
|
- if [ ${CI_COMMIT_SHA} == $(git rev-parse HEAD^) ]; then git push origin HEAD:master HEAD:development; fi
|
||||||
- export UPDATEDREV=$(git describe) # tested state + 1 commit
|
|
||||||
- git checkout master
|
|
||||||
- git pull
|
|
||||||
- git merge $UPDATEDREV -s recursive -X ours # conflicts occur only for inconsistent state
|
|
||||||
- git push
|
|
||||||
- git checkout development
|
|
||||||
- git pull
|
|
||||||
- git merge master -s recursive -X ours -m "[skip ci] Merge branch 'master' into development" # only possible conflict is in VERSION file
|
|
||||||
- git push origin development # development is unchanged (as master is based on it) but has updated VERSION file
|
|
||||||
only:
|
only:
|
||||||
- development
|
- development
|
||||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit b898a8b5552bd9d1c555edc3d8134564dd32fe53
|
Subproject commit d570b4a1fac5b9b99d026d3ff9bf593615e22ce5
|
|
@ -6,7 +6,7 @@ references:
|
||||||
|
|
||||||
output: [xi_sl, xi_tw]
|
output: [xi_sl, xi_tw]
|
||||||
|
|
||||||
N_sl: [3, 3, 6, 0, 6] # basal, prism, -, 1. pyr<a>, -, 2. pyr<c+a>
|
N_sl: [3, 3, 6, 0, 6] # basal, prism, 1. pyr<a>, -, 2. pyr<c+a>
|
||||||
N_tw: [6, 0, 6] # tension, -, compression
|
N_tw: [6, 0, 6] # tension, -, compression
|
||||||
|
|
||||||
xi_0_sl: [10.e+6, 55.e+6, 60.e+6, 0., 60.e+6]
|
xi_0_sl: [10.e+6, 55.e+6, 60.e+6, 0., 60.e+6]
|
||||||
|
@ -23,9 +23,13 @@ f_sat_sl-tw: 10.0
|
||||||
h_0_sl-sl: 500.0e+6
|
h_0_sl-sl: 500.0e+6
|
||||||
h_0_tw-tw: 50.0e+6
|
h_0_tw-tw: 50.0e+6
|
||||||
h_0_tw-sl: 150.0e+6
|
h_0_tw-sl: 150.0e+6
|
||||||
h_sl-sl: [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
|
h_sl-sl: [+1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
|
||||||
1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0,
|
+1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0,
|
||||||
1.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, +1.0, 1.0] # unused entries are indicated by -1.0
|
-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0,
|
||||||
|
-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0,
|
||||||
|
-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, 1.0, 1.0, 1.0, 1.0,
|
||||||
|
+1.0, 1.0, -1.0, -1.0, -1.0, -1.0, 1.0, 1.0, 1.0, 1.0,
|
||||||
|
-1.0, -1.0, -1.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0] # unused entries are indicated by -1.0
|
||||||
h_tw-tw: [+1.0, 1.0, -1.0, -1.0, -1.0, -1.0, 1.0, -1.0, 1.0, 1.0,
|
h_tw-tw: [+1.0, 1.0, -1.0, -1.0, -1.0, -1.0, 1.0, -1.0, 1.0, 1.0,
|
||||||
-1.0, 1.0] # unused entries are indicated by -1.0
|
-1.0, 1.0] # unused entries are indicated by -1.0
|
||||||
h_tw-sl: [1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0,
|
h_tw-sl: [1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0,
|
||||||
|
|
|
@ -8,7 +8,7 @@ references:
|
||||||
https://doi.org/10.1016/j.actamat.2017.05.015
|
https://doi.org/10.1016/j.actamat.2017.05.015
|
||||||
output: [gamma_sl]
|
output: [gamma_sl]
|
||||||
|
|
||||||
N_sl: [3, 3, 0, 12] # basal, 1. prism, -, 1. pyr<c+a>
|
N_sl: [3, 3, 0, 12] # basal, prism, -, 1. pyr<c+a>
|
||||||
n_sl: 20
|
n_sl: 20
|
||||||
a_sl: 2.0
|
a_sl: 2.0
|
||||||
dot_gamma_0_sl: 0.001
|
dot_gamma_0_sl: 0.001
|
||||||
|
@ -20,5 +20,8 @@ xi_inf_sl: [568.e+6, 150.e+7, 0.0, 3420.e+6]
|
||||||
# L. Wang et al. :
|
# L. Wang et al. :
|
||||||
# xi_0_sl: [127.e+6, 96.e+6, 0.0, 240.e+6]
|
# xi_0_sl: [127.e+6, 96.e+6, 0.0, 240.e+6]
|
||||||
|
|
||||||
h_sl-sl: [+1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0,
|
h_sl-sl: [+1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0,
|
||||||
-1.0, -1.0, +1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0] # unused entries are indicated by -1.0
|
-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, 1.0,
|
||||||
|
+1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
|
||||||
|
+1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
|
||||||
|
+1.0, 1.0, 1.0, 1.0, 1.0, 1.0] # unused entries are indicated by -1.0
|
||||||
|
|
|
@ -12,17 +12,15 @@
|
||||||
|
|
||||||
# AEM
|
# AEM
|
||||||
if test "$MARCDLLOUTDIR" = ""; then
|
if test "$MARCDLLOUTDIR" = ""; then
|
||||||
@@ -390,8 +395,8 @@
|
@@ -390,7 +395,7 @@
|
||||||
I8DEFINES=
|
I8DEFINES=
|
||||||
I8CDEFINES=
|
I8CDEFINES=
|
||||||
else
|
else
|
||||||
- I8FFLAGS="-i8"
|
- I8FFLAGS="-i8"
|
||||||
- I8DEFINES="-DI64"
|
|
||||||
+ I8FFLAGS="-i8 -integer-size 64"
|
+ I8FFLAGS="-i8 -integer-size 64"
|
||||||
+ I8DEFINES="-DI64 -DINT=8"
|
I8DEFINES="-DI64"
|
||||||
I8CDEFINES="-U_DOUBLE -D_SINGLE"
|
I8CDEFINES="-U_DOUBLE -D_SINGLE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -498,7 +503,7 @@
|
@@ -498,7 +503,7 @@
|
||||||
PROFILE=" $PROFILE -pg"
|
PROFILE=" $PROFILE -pg"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -12,17 +12,15 @@
|
||||||
|
|
||||||
# AEM
|
# AEM
|
||||||
if test "$MARCDLLOUTDIR" = ""; then
|
if test "$MARCDLLOUTDIR" = ""; then
|
||||||
@@ -439,8 +444,8 @@
|
@@ -439,7 +444,7 @@
|
||||||
I8DEFINES=
|
I8DEFINES=
|
||||||
I8CDEFINES=
|
I8CDEFINES=
|
||||||
else
|
else
|
||||||
- I8FFLAGS="-i8"
|
- I8FFLAGS="-i8"
|
||||||
- I8DEFINES="-DI64"
|
|
||||||
+ I8FFLAGS="-i8 -integer-size 64"
|
+ I8FFLAGS="-i8 -integer-size 64"
|
||||||
+ I8DEFINES="-DI64 -DINT=8"
|
I8DEFINES="-DI64"
|
||||||
I8CDEFINES="-U_DOUBLE -D_SINGLE"
|
I8CDEFINES="-U_DOUBLE -D_SINGLE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -556,7 +561,7 @@
|
@@ -556,7 +561,7 @@
|
||||||
PROFILE=" $PROFILE -pg"
|
PROFILE=" $PROFILE -pg"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -12,17 +12,15 @@
|
||||||
# AEM
|
# AEM
|
||||||
if test "$MARCDLLOUTDIR" = ""; then
|
if test "$MARCDLLOUTDIR" = ""; then
|
||||||
DLLOUTDIR="$MARC_LIB"
|
DLLOUTDIR="$MARC_LIB"
|
||||||
@@ -439,8 +444,8 @@ if test "$MARC_INTEGER_SIZE" = "i4" ; then
|
@@ -439,7 +444,7 @@ if test "$MARC_INTEGER_SIZE" = "i4" ; then
|
||||||
I8DEFINES=
|
I8DEFINES=
|
||||||
I8CDEFINES=
|
I8CDEFINES=
|
||||||
else
|
else
|
||||||
- I8FFLAGS="-i8"
|
- I8FFLAGS="-i8"
|
||||||
- I8DEFINES="-DI64"
|
|
||||||
+ I8FFLAGS="-i8 -integer-size 64"
|
+ I8FFLAGS="-i8 -integer-size 64"
|
||||||
+ I8DEFINES="-DI64 -DINT=8"
|
I8DEFINES="-DI64"
|
||||||
I8CDEFINES="-U_DOUBLE -D_SINGLE"
|
I8CDEFINES="-U_DOUBLE -D_SINGLE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -556,7 +561,7 @@ then
|
@@ -556,7 +561,7 @@ then
|
||||||
PROFILE=" $PROFILE -pg"
|
PROFILE=" $PROFILE -pg"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
v3.0.0-alpha5-389-ga000e477c
|
v3.0.0-alpha5-460-gfc7c165af
|
||||||
|
|
|
@ -21,8 +21,8 @@ from ._rotation import Rotation # noqa
|
||||||
from ._crystal import Crystal # noqa
|
from ._crystal import Crystal # noqa
|
||||||
from ._orientation import Orientation # noqa
|
from ._orientation import Orientation # noqa
|
||||||
from ._table import Table # noqa
|
from ._table import Table # noqa
|
||||||
from ._vtk import VTK # noqa
|
|
||||||
from ._colormap import Colormap # noqa
|
from ._colormap import Colormap # noqa
|
||||||
|
from ._vtk import VTK # noqa
|
||||||
from ._config import Config # noqa
|
from ._config import Config # noqa
|
||||||
from ._configmaterial import ConfigMaterial # noqa
|
from ._configmaterial import ConfigMaterial # noqa
|
||||||
from ._grid import Grid # noqa
|
from ._grid import Grid # noqa
|
||||||
|
|
|
@ -250,7 +250,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
np.logical_or (np.isnan(field), field == gap)) # mask NaN (and gap if present)
|
np.logical_or (np.isnan(field), field == gap)) # mask NaN (and gap if present)
|
||||||
|
|
||||||
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
|
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
|
||||||
np.array(bounds,float)[:2]
|
(bounds[0],bounds[1])
|
||||||
|
|
||||||
delta,avg = r-l,0.5*abs(r+l)
|
delta,avg = r-l,0.5*abs(r+l)
|
||||||
|
|
||||||
|
|
|
@ -428,7 +428,7 @@ class Grid:
|
||||||
seeds_p = np.vstack((seeds_p-np.array([0.,size[1],0.]),seeds_p,seeds_p+np.array([0.,size[1],0.])))
|
seeds_p = np.vstack((seeds_p-np.array([0.,size[1],0.]),seeds_p,seeds_p+np.array([0.,size[1],0.])))
|
||||||
seeds_p = np.vstack((seeds_p-np.array([0.,0.,size[2]]),seeds_p,seeds_p+np.array([0.,0.,size[2]])))
|
seeds_p = np.vstack((seeds_p-np.array([0.,0.,size[2]]),seeds_p,seeds_p+np.array([0.,0.,size[2]])))
|
||||||
else:
|
else:
|
||||||
weights_p = weights
|
weights_p = np.array(weights,float)
|
||||||
seeds_p = seeds
|
seeds_p = seeds
|
||||||
|
|
||||||
coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3)
|
coords = grid_filters.coordinates0_point(cells,size).reshape(-1,3)
|
||||||
|
@ -674,7 +674,7 @@ class Grid:
|
||||||
|
|
||||||
def show(self) -> None:
|
def show(self) -> None:
|
||||||
"""Show on screen."""
|
"""Show on screen."""
|
||||||
VTK.from_rectilinear_grid(self.cells,self.size,self.origin).show()
|
VTK.from_image_data(self.cells,self.size,self.origin).show()
|
||||||
|
|
||||||
|
|
||||||
def add_primitive(self,
|
def add_primitive(self,
|
||||||
|
@ -841,7 +841,8 @@ class Grid:
|
||||||
if not set(directions).issubset(valid):
|
if not set(directions).issubset(valid):
|
||||||
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
mat = np.flip(self.material, (valid.index(d) for d in directions if d in valid))
|
|
||||||
|
mat = np.flip(self.material, [valid.index(d) for d in directions if d in valid])
|
||||||
|
|
||||||
return Grid(material = mat,
|
return Grid(material = mat,
|
||||||
size = self.size,
|
size = self.size,
|
||||||
|
@ -973,14 +974,13 @@ class Grid:
|
||||||
Updated grid-based geometry.
|
Updated grid-based geometry.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if fill is None: fill = np.nanmax(self.material) + 1
|
|
||||||
dtype = float if isinstance(fill,float) or self.material.dtype in np.sctypes['float'] else int
|
|
||||||
|
|
||||||
material = self.material
|
material = self.material
|
||||||
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
|
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
|
||||||
# see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
|
# see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
|
||||||
for angle,axes in zip(R.as_Euler_angles(degrees=True)[::-1], [(0,1),(1,2),(0,1)]):
|
for angle,axes in zip(R.as_Euler_angles(degrees=True)[::-1], [(0,1),(1,2),(0,1)]):
|
||||||
material_temp = ndimage.rotate(material,angle,axes,order=0,prefilter=False,output=dtype,cval=fill)
|
material_temp = ndimage.rotate(material,angle,axes,order=0,prefilter=False,
|
||||||
|
output=self.material.dtype,
|
||||||
|
cval=np.nanmax(self.material) + 1 if fill is None else fill)
|
||||||
# avoid scipy interpolation errors for rotations close to multiples of 90°
|
# avoid scipy interpolation errors for rotations close to multiples of 90°
|
||||||
material = material_temp if np.prod(material_temp.shape) != np.prod(material.shape) else \
|
material = material_temp if np.prod(material_temp.shape) != np.prod(material.shape) else \
|
||||||
np.rot90(material,k=np.rint(angle/90.).astype(int),axes=axes)
|
np.rot90(material,k=np.rint(angle/90.).astype(int),axes=axes)
|
||||||
|
@ -1031,10 +1031,8 @@ class Grid:
|
||||||
"""
|
"""
|
||||||
offset_ = np.array(offset,int) if offset is not None else np.zeros(3,int)
|
offset_ = np.array(offset,int) if offset is not None else np.zeros(3,int)
|
||||||
cells_ = np.array(cells,int) if cells is not None else self.cells
|
cells_ = np.array(cells,int) if cells is not None else self.cells
|
||||||
if fill is None: fill = np.nanmax(self.material) + 1
|
|
||||||
dtype = float if int(fill) != fill or self.material.dtype in np.sctypes['float'] else int
|
|
||||||
|
|
||||||
canvas = np.full(cells_,fill,dtype)
|
canvas = np.full(cells_,np.nanmax(self.material) + 1 if fill is None else fill,self.material.dtype)
|
||||||
|
|
||||||
LL = np.clip( offset_, 0,np.minimum(self.cells, cells_+offset_))
|
LL = np.clip( offset_, 0,np.minimum(self.cells, cells_+offset_))
|
||||||
UR = np.clip( offset_+cells_, 0,np.minimum(self.cells, cells_+offset_))
|
UR = np.clip( offset_+cells_, 0,np.minimum(self.cells, cells_+offset_))
|
||||||
|
@ -1067,13 +1065,11 @@ class Grid:
|
||||||
Updated grid-based geometry.
|
Updated grid-based geometry.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def mp(entry, mapper):
|
material = self.material.copy()
|
||||||
return mapper[entry] if entry in mapper else entry
|
for f,t in zip(from_material,to_material): # ToDo Python 3.10 has strict mode for zip
|
||||||
|
material[self.material==f] = t
|
||||||
|
|
||||||
mp = np.vectorize(mp)
|
return Grid(material = material,
|
||||||
mapper = dict(zip(from_material,to_material))
|
|
||||||
|
|
||||||
return Grid(material = mp(self.material,mapper).reshape(self.cells),
|
|
||||||
size = self.size,
|
size = self.size,
|
||||||
origin = self.origin,
|
origin = self.origin,
|
||||||
comments = self.comments+[util.execution_stamp('Grid','substitute')],
|
comments = self.comments+[util.execution_stamp('Grid','substitute')],
|
||||||
|
|
|
@ -981,7 +981,7 @@ class Result:
|
||||||
t = 'tensor'
|
t = 'tensor'
|
||||||
if o is None: o = 'fro'
|
if o is None: o = 'fro'
|
||||||
else:
|
else:
|
||||||
raise ValueError(f'invalid norm order {ord}')
|
raise ValueError(f'invalid shape of {x["label"]}')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
|
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
|
||||||
|
|
|
@ -812,8 +812,7 @@ class Rotation:
|
||||||
return Rotation.from_basis(R)
|
return Rotation.from_basis(R)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_parallel(a,b,
|
def from_parallel(a,b):
|
||||||
**kwargs):
|
|
||||||
"""
|
"""
|
||||||
Initialize from pairs of two orthogonal lattice basis vectors.
|
Initialize from pairs of two orthogonal lattice basis vectors.
|
||||||
|
|
||||||
|
@ -963,8 +962,7 @@ class Rotation:
|
||||||
N = 500,
|
N = 500,
|
||||||
degrees = True,
|
degrees = True,
|
||||||
fractions = True,
|
fractions = True,
|
||||||
rng_seed = None,
|
rng_seed = None):
|
||||||
**kwargs):
|
|
||||||
"""
|
"""
|
||||||
Sample discrete values from a binned orientation distribution function (ODF).
|
Sample discrete values from a binned orientation distribution function (ODF).
|
||||||
|
|
||||||
|
|
|
@ -442,22 +442,24 @@ class VTK:
|
||||||
|
|
||||||
mapper = vtk.vtkDataSetMapper()
|
mapper = vtk.vtkDataSetMapper()
|
||||||
mapper.SetInputData(self.vtk_data)
|
mapper.SetInputData(self.vtk_data)
|
||||||
|
|
||||||
actor = vtk.vtkActor()
|
actor = vtk.vtkActor()
|
||||||
actor.SetMapper(mapper)
|
actor.SetMapper(mapper)
|
||||||
|
actor.GetProperty().SetColor(230/255,150/255,68/255)
|
||||||
|
|
||||||
ren = vtk.vtkRenderer()
|
ren = vtk.vtkRenderer()
|
||||||
|
ren.AddActor(actor)
|
||||||
|
ren.SetBackground(67/255,128/255,208/255)
|
||||||
|
|
||||||
window = vtk.vtkRenderWindow()
|
window = vtk.vtkRenderWindow()
|
||||||
window.AddRenderer(ren)
|
window.AddRenderer(ren)
|
||||||
|
|
||||||
ren.AddActor(actor)
|
|
||||||
ren.SetBackground(0.2,0.2,0.2)
|
|
||||||
|
|
||||||
window.SetSize(width,height)
|
window.SetSize(width,height)
|
||||||
|
window.SetWindowName(util.execution_stamp('VTK','show'))
|
||||||
|
|
||||||
iren = vtk.vtkRenderWindowInteractor()
|
iren = vtk.vtkRenderWindowInteractor()
|
||||||
iren.SetRenderWindow(window)
|
iren.SetRenderWindow(window)
|
||||||
|
if os.name == 'posix' and 'DISPLAY' not in os.environ:
|
||||||
iren.Initialize()
|
print('Found no rendering device')
|
||||||
|
else:
|
||||||
window.Render()
|
window.Render()
|
||||||
iren.Start()
|
iren.Start()
|
||||||
|
|
|
@ -81,6 +81,7 @@ class TestColormap:
|
||||||
assert Colormap.from_predefined('strain') == Colormap.from_predefined('strain')
|
assert Colormap.from_predefined('strain') == Colormap.from_predefined('strain')
|
||||||
assert Colormap.from_predefined('strain') != Colormap.from_predefined('stress')
|
assert Colormap.from_predefined('strain') != Colormap.from_predefined('stress')
|
||||||
assert Colormap.from_predefined('strain',N=128) != Colormap.from_predefined('strain',N=64)
|
assert Colormap.from_predefined('strain',N=128) != Colormap.from_predefined('strain',N=64)
|
||||||
|
assert not Colormap.from_predefined('strain',N=128) == 1
|
||||||
|
|
||||||
@pytest.mark.parametrize('low,high',[((0,0,0),(1,1,1)),
|
@pytest.mark.parametrize('low,high',[((0,0,0),(1,1,1)),
|
||||||
([0,0,0],[1,1,1])])
|
([0,0,0],[1,1,1])])
|
||||||
|
|
|
@ -40,6 +40,9 @@ class TestCrystal:
|
||||||
alpha=alpha,beta=beta,gamma=gamma)
|
alpha=alpha,beta=beta,gamma=gamma)
|
||||||
assert np.allclose(np.eye(3),np.einsum('ik,jk',c.basis_real,c.basis_reciprocal))
|
assert np.allclose(np.eye(3),np.einsum('ik,jk',c.basis_real,c.basis_reciprocal))
|
||||||
|
|
||||||
|
def test_basis_invalid(self):
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
Crystal(family='cubic').basis_real
|
||||||
|
|
||||||
@pytest.mark.parametrize('keyFrame,keyLattice',[('uvw','direction'),('hkl','plane'),])
|
@pytest.mark.parametrize('keyFrame,keyLattice',[('uvw','direction'),('hkl','plane'),])
|
||||||
@pytest.mark.parametrize('vector',np.array([
|
@pytest.mark.parametrize('vector',np.array([
|
||||||
|
|
|
@ -44,6 +44,7 @@ class TestGrid:
|
||||||
|
|
||||||
def test_equal(self,default):
|
def test_equal(self,default):
|
||||||
assert default == default
|
assert default == default
|
||||||
|
assert not default == 42
|
||||||
|
|
||||||
def test_repr(self,default):
|
def test_repr(self,default):
|
||||||
print(default)
|
print(default)
|
||||||
|
|
|
@ -364,6 +364,11 @@ class TestOrientation:
|
||||||
table.save(reference)
|
table.save(reference)
|
||||||
assert np.allclose(P,Table.load(reference).get('Schmid'))
|
assert np.allclose(P,Table.load(reference).get('Schmid'))
|
||||||
|
|
||||||
|
def test_Schmid_invalid(self):
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
Orientation(lattice='fcc').Schmid()
|
||||||
|
|
||||||
|
|
||||||
### vectorization tests ###
|
### vectorization tests ###
|
||||||
|
|
||||||
@pytest.mark.parametrize('lattice',['hP','cI','cF']) # tI not included yet
|
@pytest.mark.parametrize('lattice',['hP','cI','cF']) # tI not included yet
|
||||||
|
@ -505,3 +510,7 @@ class TestOrientation:
|
||||||
for loc in np.random.randint(0,blend,(10,len(blend))):
|
for loc in np.random.randint(0,blend,(10,len(blend))):
|
||||||
assert np.allclose(o[tuple(loc[:len(o.shape)])].to_pole(uvw=v[tuple(loc[-len(v.shape[:-1]):])]),
|
assert np.allclose(o[tuple(loc[:len(o.shape)])].to_pole(uvw=v[tuple(loc[-len(v.shape[:-1]):])]),
|
||||||
o.to_pole(uvw=v)[tuple(loc)])
|
o.to_pole(uvw=v)[tuple(loc)])
|
||||||
|
|
||||||
|
def test_mul_invalid(self):
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
Orientation.from_random(lattice='cF')*np.ones(3)
|
||||||
|
|
|
@ -102,6 +102,9 @@ class TestResult:
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
default.view('invalid',True)
|
default.view('invalid',True)
|
||||||
|
|
||||||
|
def test_add_invalid(self,default):
|
||||||
|
default.add_absolute('xxxx')
|
||||||
|
|
||||||
def test_add_absolute(self,default):
|
def test_add_absolute(self,default):
|
||||||
default.add_absolute('F_e')
|
default.add_absolute('F_e')
|
||||||
in_memory = np.abs(default.place('F_e'))
|
in_memory = np.abs(default.place('F_e'))
|
||||||
|
|
|
@ -792,6 +792,11 @@ class TestRotation:
|
||||||
R = Rotation.from_random(shape,rng_seed=1)
|
R = Rotation.from_random(shape,rng_seed=1)
|
||||||
assert R == R if shape is None else (R == R).all()
|
assert R == R if shape is None else (R == R).all()
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('shape',[None,5,(4,6)])
|
||||||
|
def test_allclose(self,shape):
|
||||||
|
R = Rotation.from_random(shape,rng_seed=1)
|
||||||
|
assert R.allclose(R)
|
||||||
|
|
||||||
@pytest.mark.parametrize('shape',[None,5,(4,6)])
|
@pytest.mark.parametrize('shape',[None,5,(4,6)])
|
||||||
def test_unequal(self,shape):
|
def test_unequal(self,shape):
|
||||||
R = Rotation.from_random(shape,rng_seed=1)
|
R = Rotation.from_random(shape,rng_seed=1)
|
||||||
|
@ -1124,3 +1129,7 @@ class TestRotation:
|
||||||
weights_r = np.histogramdd(Eulers_r,steps,rng)[0].flatten(order='F')/N * np.sum(weights)
|
weights_r = np.histogramdd(Eulers_r,steps,rng)[0].flatten(order='F')/N * np.sum(weights)
|
||||||
|
|
||||||
assert np.sqrt(((weights_r - weights) ** 2).mean()) < 5
|
assert np.sqrt(((weights_r - weights) ** 2).mean()) < 5
|
||||||
|
|
||||||
|
def test_mul_invalid(self):
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
Rotation.from_random()*np.ones(3)
|
||||||
|
|
|
@ -28,6 +28,10 @@ class TestVTK:
|
||||||
def _patch_execution_stamp(self, patch_execution_stamp):
|
def _patch_execution_stamp(self, patch_execution_stamp):
|
||||||
print('patched damask.util.execution_stamp')
|
print('patched damask.util.execution_stamp')
|
||||||
|
|
||||||
|
def test_show(sef,default,monkeypatch):
|
||||||
|
monkeypatch.delenv('DISPLAY',raising=False)
|
||||||
|
default.show()
|
||||||
|
|
||||||
def test_rectilinearGrid(self,tmp_path):
|
def test_rectilinearGrid(self,tmp_path):
|
||||||
cells = np.random.randint(5,10,3)*2
|
cells = np.random.randint(5,10,3)*2
|
||||||
size = np.random.random(3) + 1.0
|
size = np.random.random(3) + 1.0
|
||||||
|
|
|
@ -32,14 +32,14 @@ module CPFEM
|
||||||
real(pReal), dimension (:,:,:,:), allocatable, private :: &
|
real(pReal), dimension (:,:,:,:), allocatable, private :: &
|
||||||
CPFEM_dcsdE_knownGood !< known good tangent
|
CPFEM_dcsdE_knownGood !< known good tangent
|
||||||
|
|
||||||
integer(pInt), public :: &
|
integer, public :: &
|
||||||
cycleCounter = 0_pInt !< needs description
|
cycleCounter = 0 !< needs description
|
||||||
|
|
||||||
integer(pInt), parameter, public :: &
|
integer, parameter, public :: &
|
||||||
CPFEM_CALCRESULTS = 2_pInt**0_pInt, &
|
CPFEM_CALCRESULTS = 2**0, &
|
||||||
CPFEM_AGERESULTS = 2_pInt**1_pInt, &
|
CPFEM_AGERESULTS = 2**1, &
|
||||||
CPFEM_BACKUPJACOBIAN = 2_pInt**2_pInt, &
|
CPFEM_BACKUPJACOBIAN = 2**2, &
|
||||||
CPFEM_RESTOREJACOBIAN = 2_pInt**3_pInt
|
CPFEM_RESTOREJACOBIAN = 2**3
|
||||||
|
|
||||||
type, private :: tNumerics
|
type, private :: tNumerics
|
||||||
integer :: &
|
integer :: &
|
||||||
|
@ -134,12 +134,12 @@ end subroutine CPFEM_init
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyStress, jacobian)
|
subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyStress, jacobian)
|
||||||
|
|
||||||
integer(pInt), intent(in) :: elFE, & !< FE element number
|
integer, intent(in) :: elFE, & !< FE element number
|
||||||
ip !< integration point number
|
ip !< integration point number
|
||||||
real(pReal), intent(in) :: dt !< time increment
|
real(pReal), intent(in) :: dt !< time increment
|
||||||
real(pReal), dimension (3,3), intent(in) :: ffn, & !< deformation gradient for t=t0
|
real(pReal), dimension (3,3), intent(in) :: ffn, & !< deformation gradient for t=t0
|
||||||
ffn1 !< deformation gradient for t=t1
|
ffn1 !< deformation gradient for t=t1
|
||||||
integer(pInt), intent(in) :: mode !< computation mode 1: regular computation plus aging of results
|
integer, intent(in) :: mode !< computation mode 1: regular computation plus aging of results
|
||||||
real(pReal), intent(in) :: temperature_inp !< temperature
|
real(pReal), intent(in) :: temperature_inp !< temperature
|
||||||
real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector
|
real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector
|
||||||
real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE)
|
real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE)
|
||||||
|
@ -150,7 +150,7 @@ subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyS
|
||||||
real(pReal), dimension (3,3,3,3) :: H_sym, &
|
real(pReal), dimension (3,3,3,3) :: H_sym, &
|
||||||
H
|
H
|
||||||
|
|
||||||
integer(pInt) elCP, & ! crystal plasticity element number
|
integer elCP, & ! crystal plasticity element number
|
||||||
i, j, k, l, m, n, ph, homog, mySource,ce
|
i, j, k, l, m, n, ph, homog, mySource,ce
|
||||||
|
|
||||||
real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress if terminallyIll
|
real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress if terminallyIll
|
||||||
|
@ -171,17 +171,17 @@ subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyS
|
||||||
print'(a,/)', '#############################################'; flush (6)
|
print'(a,/)', '#############################################'; flush (6)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0_pInt) &
|
if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0) &
|
||||||
CPFEM_dcsde_knownGood = CPFEM_dcsde
|
CPFEM_dcsde_knownGood = CPFEM_dcsde
|
||||||
if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) &
|
if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0) &
|
||||||
CPFEM_dcsde = CPFEM_dcsde_knownGood
|
CPFEM_dcsde = CPFEM_dcsde_knownGood
|
||||||
|
|
||||||
if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) call CPFEM_forward
|
if (iand(mode, CPFEM_AGERESULTS) /= 0) call CPFEM_forward
|
||||||
|
|
||||||
homogenization_F0(1:3,1:3,ce) = ffn
|
homogenization_F0(1:3,1:3,ce) = ffn
|
||||||
homogenization_F(1:3,1:3,ce) = ffn1
|
homogenization_F(1:3,1:3,ce) = ffn1
|
||||||
|
|
||||||
if (iand(mode, CPFEM_CALCRESULTS) /= 0_pInt) then
|
if (iand(mode, CPFEM_CALCRESULTS) /= 0) then
|
||||||
|
|
||||||
validCalculation: if (terminallyIll) then
|
validCalculation: if (terminallyIll) then
|
||||||
call random_number(rnd)
|
call random_number(rnd)
|
||||||
|
@ -264,7 +264,7 @@ end subroutine CPFEM_forward
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine CPFEM_results(inc,time)
|
subroutine CPFEM_results(inc,time)
|
||||||
|
|
||||||
integer(pInt), intent(in) :: inc
|
integer, intent(in) :: inc
|
||||||
real(pReal), intent(in) :: time
|
real(pReal), intent(in) :: time
|
||||||
|
|
||||||
call results_openJobFile
|
call results_openJobFile
|
||||||
|
|
|
@ -223,9 +223,9 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
|
||||||
integer :: computationMode, i, node, CPnodeID
|
integer :: computationMode, i, node, CPnodeID
|
||||||
integer(pI32) :: defaultNumThreadsInt !< default value set by Marc
|
integer(pI32) :: defaultNumThreadsInt !< default value set by Marc
|
||||||
|
|
||||||
integer(pInt), save :: &
|
integer, save :: &
|
||||||
theInc = -1_pInt, & !< needs description
|
theInc = -1, & !< needs description
|
||||||
lastLovl = 0_pInt !< lovl in previous call to marc hypela2
|
lastLovl = 0 !< lovl in previous call to marc hypela2
|
||||||
real(pReal), save :: &
|
real(pReal), save :: &
|
||||||
theTime = 0.0_pReal, & !< needs description
|
theTime = 0.0_pReal, & !< needs description
|
||||||
theDelta = 0.0_pReal
|
theDelta = 0.0_pReal
|
||||||
|
|
|
@ -1862,8 +1862,8 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
|
||||||
|
|
||||||
integer, dimension(worldsize) :: &
|
integer, dimension(worldsize) :: &
|
||||||
readSize !< contribution of all processes
|
readSize !< contribution of all processes
|
||||||
integer :: ierr
|
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! creating a property list for transfer properties (is collective for MPI)
|
! creating a property list for transfer properties (is collective for MPI)
|
||||||
|
@ -1877,8 +1877,8 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
|
||||||
if (parallel) then
|
if (parallel) then
|
||||||
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
|
call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if
|
end if
|
||||||
#endif
|
#endif
|
||||||
myStart = int(0,HSIZE_T)
|
myStart = int(0,HSIZE_T)
|
||||||
|
@ -1956,7 +1956,8 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
|
|
||||||
integer, dimension(worldsize) :: writeSize !< contribution of all processes
|
integer, dimension(worldsize) :: writeSize !< contribution of all processes
|
||||||
integer(HID_T) :: dcpl
|
integer(HID_T) :: dcpl
|
||||||
integer :: ierr, hdferr
|
integer :: hdferr
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
|
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
|
||||||
|
|
||||||
|
|
||||||
|
@ -1977,8 +1978,8 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
writeSize(worldrank+1) = int(myShape(ubound(myShape,1)))
|
writeSize(worldrank+1) = int(myShape(ubound(myShape,1)))
|
||||||
#ifdef PETSC
|
#ifdef PETSC
|
||||||
if (parallel) then
|
if (parallel) then
|
||||||
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
|
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if
|
end if
|
||||||
#endif
|
#endif
|
||||||
myStart = int(0,HSIZE_T)
|
myStart = int(0,HSIZE_T)
|
||||||
|
|
|
@ -62,6 +62,8 @@ module YAML_types
|
||||||
tNode_get_byKey_as1dString => tNode_get_byKey_as1dString
|
tNode_get_byKey_as1dString => tNode_get_byKey_as1dString
|
||||||
procedure :: &
|
procedure :: &
|
||||||
getKey => tNode_get_byIndex_asKey
|
getKey => tNode_get_byIndex_asKey
|
||||||
|
procedure :: &
|
||||||
|
Keys => tNode_getKeys
|
||||||
procedure :: &
|
procedure :: &
|
||||||
getIndex => tNode_get_byKey_asIndex
|
getIndex => tNode_get_byKey_asIndex
|
||||||
procedure :: &
|
procedure :: &
|
||||||
|
@ -117,7 +119,7 @@ module YAML_types
|
||||||
|
|
||||||
type, extends(tNode), public :: tList
|
type, extends(tNode), public :: tList
|
||||||
|
|
||||||
class(tItem), pointer :: first => null()
|
class(tItem), pointer :: first => NULL()
|
||||||
|
|
||||||
contains
|
contains
|
||||||
procedure :: asFormattedString => tList_asFormattedString
|
procedure :: asFormattedString => tList_asFormattedString
|
||||||
|
@ -144,8 +146,8 @@ module YAML_types
|
||||||
|
|
||||||
type :: tItem
|
type :: tItem
|
||||||
character(len=:), allocatable :: key
|
character(len=:), allocatable :: key
|
||||||
class(tNode), pointer :: node => null()
|
class(tNode), pointer :: node => NULL()
|
||||||
class(tItem), pointer :: next => null()
|
class(tItem), pointer :: next => NULL()
|
||||||
|
|
||||||
contains
|
contains
|
||||||
final :: tItem_finalize
|
final :: tItem_finalize
|
||||||
|
@ -221,22 +223,22 @@ subroutine selfTest
|
||||||
select type(s1)
|
select type(s1)
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
s1 = '2'
|
s1 = '2'
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
select type(s2)
|
select type(s2)
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
s2 = '3'
|
s2 = '3'
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
select type(s3)
|
select type(s3)
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
s3 = '4'
|
s3 = '4'
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
select type(s4)
|
select type(s4)
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
s4 = '5'
|
s4 = '5'
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
|
|
||||||
allocate(tList::l1)
|
allocate(tList::l1)
|
||||||
|
@ -249,14 +251,14 @@ subroutine selfTest
|
||||||
if (any(dNeq(l1%as1dFloat(),[2.0_pReal,3.0_pReal]))) error stop 'tList_as1dFloat'
|
if (any(dNeq(l1%as1dFloat(),[2.0_pReal,3.0_pReal]))) error stop 'tList_as1dFloat'
|
||||||
if (n%get_asInt(1) /= 2) error stop 'byIndex_asInt'
|
if (n%get_asInt(1) /= 2) error stop 'byIndex_asInt'
|
||||||
if (dNeq(n%get_asFloat(2),3.0_pReal)) error stop 'byIndex_asFloat'
|
if (dNeq(n%get_asFloat(2),3.0_pReal)) error stop 'byIndex_asFloat'
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
allocate(tList::l3)
|
allocate(tList::l3)
|
||||||
select type(l3)
|
select type(l3)
|
||||||
class is(tList)
|
class is(tList)
|
||||||
call l3%append(s3)
|
call l3%append(s3)
|
||||||
call l3%append(s4)
|
call l3%append(s4)
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
allocate(tList::l2)
|
allocate(tList::l2)
|
||||||
select type(l2)
|
select type(l2)
|
||||||
|
@ -332,9 +334,12 @@ function tNode_asScalar(self) result(scalar)
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in), target :: self
|
||||||
class(tScalar), pointer :: scalar
|
class(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
select type(self)
|
select type(self)
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => self
|
scalar => self
|
||||||
|
class default
|
||||||
|
nullify(scalar)
|
||||||
end select
|
end select
|
||||||
|
|
||||||
end function tNode_asScalar
|
end function tNode_asScalar
|
||||||
|
@ -348,9 +353,12 @@ function tNode_asList(self) result(list)
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in), target :: self
|
||||||
class(tList), pointer :: list
|
class(tList), pointer :: list
|
||||||
|
|
||||||
|
|
||||||
select type(self)
|
select type(self)
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => self
|
list => self
|
||||||
|
class default
|
||||||
|
nullify(list)
|
||||||
end select
|
end select
|
||||||
|
|
||||||
end function tNode_asList
|
end function tNode_asList
|
||||||
|
@ -364,9 +372,12 @@ function tNode_asDict(self) result(dict)
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in), target :: self
|
||||||
class(tDict), pointer :: dict
|
class(tDict), pointer :: dict
|
||||||
|
|
||||||
|
|
||||||
select type(self)
|
select type(self)
|
||||||
class is(tDict)
|
class is(tDict)
|
||||||
dict => self
|
dict => self
|
||||||
|
class default
|
||||||
|
nullify(dict)
|
||||||
end select
|
end select
|
||||||
|
|
||||||
end function tNode_asDict
|
end function tNode_asDict
|
||||||
|
@ -385,12 +396,13 @@ function tNode_get_byIndex(self,i) result(node)
|
||||||
class(tItem), pointer :: item
|
class(tItem), pointer :: item
|
||||||
integer :: j
|
integer :: j
|
||||||
|
|
||||||
|
|
||||||
select type(self)
|
select type(self)
|
||||||
class is(tList)
|
class is(tList)
|
||||||
self_ => self%asList()
|
self_ => self%asList()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected list')
|
call IO_error(706,ext_msg='Expected list')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
item => self_%first
|
item => self_%first
|
||||||
|
|
||||||
|
@ -409,15 +421,14 @@ end function tNode_get_byIndex
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_asFloat(self,i) result(nodeAsFloat)
|
function tNode_get_byIndex_asFloat(self,i) result(nodeAsFloat)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
real(pReal) :: nodeAsFloat
|
real(pReal) :: nodeAsFloat
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsFloat = scalar%asFloat()
|
nodeAsFloat = scalar%asFloat()
|
||||||
|
@ -433,15 +444,15 @@ end function tNode_get_byIndex_asFloat
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_asInt(self,i) result(nodeAsInt)
|
function tNode_get_byIndex_asInt(self,i) result(nodeAsInt)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
integer :: nodeAsInt
|
integer :: nodeAsInt
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
class(tNode), pointer :: node
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsInt = scalar%asInt()
|
nodeAsInt = scalar%asInt()
|
||||||
|
@ -457,21 +468,20 @@ end function tNode_get_byIndex_asInt
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_asBool(self,i) result(nodeAsBool)
|
function tNode_get_byIndex_asBool(self,i) result(nodeAsBool)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
logical :: nodeAsBool
|
logical :: nodeAsBool
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsBool = scalar%asBool()
|
nodeAsBool = scalar%asBool()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected scalar Boolean')
|
call IO_error(706,ext_msg='Expected scalar Boolean')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
end function tNode_get_byIndex_asBool
|
end function tNode_get_byIndex_asBool
|
||||||
|
|
||||||
|
@ -481,21 +491,20 @@ end function tNode_get_byIndex_asBool
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_asString(self,i) result(nodeAsString)
|
function tNode_get_byIndex_asString(self,i) result(nodeAsString)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
character(len=:), allocatable :: nodeAsString
|
character(len=:), allocatable :: nodeAsString
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsString = scalar%asString()
|
nodeAsString = scalar%asString()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected scalar string')
|
call IO_error(706,ext_msg='Expected scalar string')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
end function tNode_get_byIndex_asString
|
end function tNode_get_byIndex_asString
|
||||||
|
|
||||||
|
@ -505,21 +514,20 @@ end function tNode_get_byIndex_asString
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_as1dFloat(self,i) result(nodeAs1dFloat)
|
function tNode_get_byIndex_as1dFloat(self,i) result(nodeAs1dFloat)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
real(pReal), dimension(:), allocatable :: nodeAs1dFloat
|
real(pReal), dimension(:), allocatable :: nodeAs1dFloat
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
class(tList), pointer :: list
|
class(tList), pointer :: list
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dFloat = list%as1dFloat()
|
nodeAs1dFloat = list%as1dFloat()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected list of floats')
|
call IO_error(706,ext_msg='Expected list of floats')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
end function tNode_get_byIndex_as1dFloat
|
end function tNode_get_byIndex_as1dFloat
|
||||||
|
|
||||||
|
@ -529,21 +537,20 @@ end function tNode_get_byIndex_as1dFloat
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_as1dInt(self,i) result(nodeAs1dInt)
|
function tNode_get_byIndex_as1dInt(self,i) result(nodeAs1dInt)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
integer, dimension(:), allocatable :: nodeAs1dInt
|
integer, dimension(:), allocatable :: nodeAs1dInt
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
class(tList), pointer :: list
|
class(tList), pointer :: list
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dInt = list%as1dInt()
|
nodeAs1dInt = list%as1dInt()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected list of integers')
|
call IO_error(706,ext_msg='Expected list of integers')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
end function tNode_get_byIndex_as1dInt
|
end function tNode_get_byIndex_as1dInt
|
||||||
|
|
||||||
|
@ -553,21 +560,20 @@ end function tNode_get_byIndex_as1dInt
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_as1dBool(self,i) result(nodeAs1dBool)
|
function tNode_get_byIndex_as1dBool(self,i) result(nodeAs1dBool)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
logical, dimension(:), allocatable :: nodeAs1dBool
|
logical, dimension(:), allocatable :: nodeAs1dBool
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
class(tList), pointer :: list
|
class(tList), pointer :: list
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dBool = list%as1dBool()
|
nodeAs1dBool = list%as1dBool()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected list of Booleans')
|
call IO_error(706,ext_msg='Expected list of Booleans')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
end function tNode_get_byIndex_as1dBool
|
end function tNode_get_byIndex_as1dBool
|
||||||
|
|
||||||
|
@ -577,21 +583,20 @@ end function tNode_get_byIndex_as1dBool
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byIndex_as1dString(self,i) result(nodeAs1dString)
|
function tNode_get_byIndex_as1dString(self,i) result(nodeAs1dString)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
integer, intent(in) :: i
|
integer, intent(in) :: i
|
||||||
character(len=:), allocatable, dimension(:) :: nodeAs1dString
|
character(len=:), allocatable, dimension(:) :: nodeAs1dString
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tList), pointer :: list
|
type(tList), pointer :: list
|
||||||
|
|
||||||
node => self%get(i)
|
|
||||||
select type(node)
|
select type(node => self%get(i))
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dString = list%as1dString()
|
nodeAs1dString = list%as1dString()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected list of strings')
|
call IO_error(706,ext_msg='Expected list of strings')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
end function tNode_get_byIndex_as1dString
|
end function tNode_get_byIndex_as1dString
|
||||||
|
|
||||||
|
@ -609,22 +614,50 @@ function tNode_get_byIndex_asKey(self,i) result(key)
|
||||||
type(tDict), pointer :: dict
|
type(tDict), pointer :: dict
|
||||||
type(tItem), pointer :: item
|
type(tItem), pointer :: item
|
||||||
|
|
||||||
|
|
||||||
select type(self)
|
select type(self)
|
||||||
class is(tDict)
|
class is(tDict)
|
||||||
dict => self%asDict()
|
dict => self%asDict()
|
||||||
item => dict%first
|
item => dict%first
|
||||||
do j = 1, min(i,dict%length)-1
|
do j = 1, min(i,dict%length)-1
|
||||||
item => item%next
|
item => item%next
|
||||||
enddo
|
end do
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected dict')
|
call IO_error(706,ext_msg='Expected dict')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
key = item%key
|
key = item%key
|
||||||
|
|
||||||
end function tNode_get_byIndex_asKey
|
end function tNode_get_byIndex_asKey
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Get all keys from a dictionary
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
function tNode_getKeys(self) result(keys)
|
||||||
|
|
||||||
|
class(tNode), intent(in) :: self
|
||||||
|
character(len=:), dimension(:), allocatable :: keys
|
||||||
|
|
||||||
|
character(len=pStringLen), dimension(:), allocatable :: temp
|
||||||
|
integer :: j, l
|
||||||
|
|
||||||
|
|
||||||
|
allocate(temp(self%length))
|
||||||
|
l = 0
|
||||||
|
do j = 1, self%length
|
||||||
|
temp(j) = self%getKey(j)
|
||||||
|
l = max(len_trim(temp(j)),l)
|
||||||
|
end do
|
||||||
|
|
||||||
|
allocate(character(l)::keys(self%length))
|
||||||
|
do j = 1, self%length
|
||||||
|
keys(j) = trim(temp(j))
|
||||||
|
end do
|
||||||
|
|
||||||
|
end function tNode_getKeys
|
||||||
|
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
!> @brief Checks if a given key/item is present in the dict/list
|
!> @brief Checks if a given key/item is present in the dict/list
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
|
@ -658,7 +691,7 @@ function tNode_contains(self,k) result(exists)
|
||||||
enddo
|
enddo
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected list or dict')
|
call IO_error(706,ext_msg='Expected list or dict')
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
end function tNode_contains
|
end function tNode_contains
|
||||||
|
|
||||||
|
@ -686,7 +719,7 @@ function tNode_get_byKey(self,k,defaultVal) result(node)
|
||||||
self_ => self%asDict()
|
self_ => self%asDict()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected dict for key '//k)
|
call IO_error(706,ext_msg='Expected dict for key '//k)
|
||||||
endselect
|
end select
|
||||||
|
|
||||||
j = 1
|
j = 1
|
||||||
item => self_%first
|
item => self_%first
|
||||||
|
@ -713,23 +746,22 @@ end function tNode_get_byKey
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_asFloat(self,k,defaultVal) result(nodeAsFloat)
|
function tNode_get_byKey_asFloat(self,k,defaultVal) result(nodeAsFloat)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
real(pReal), intent(in),optional :: defaultVal
|
real(pReal), intent(in), optional :: defaultVal
|
||||||
real(pReal) :: nodeAsFloat
|
real(pReal) :: nodeAsFloat
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsFloat = scalar%asFloat()
|
nodeAsFloat = scalar%asFloat()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected scalar float for key '//k)
|
call IO_error(706,ext_msg='Expected scalar float for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAsFloat = defaultVal
|
nodeAsFloat = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -744,23 +776,22 @@ end function tNode_get_byKey_asFloat
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_asInt(self,k,defaultVal) result(nodeAsInt)
|
function tNode_get_byKey_asInt(self,k,defaultVal) result(nodeAsInt)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
integer, intent(in),optional :: defaultVal
|
integer, intent(in), optional :: defaultVal
|
||||||
integer :: nodeAsInt
|
integer :: nodeAsInt
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsInt = scalar%asInt()
|
nodeAsInt = scalar%asInt()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected scalar integer for key '//k)
|
call IO_error(706,ext_msg='Expected scalar integer for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAsInt = defaultVal
|
nodeAsInt = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -775,23 +806,22 @@ end function tNode_get_byKey_asInt
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_asBool(self,k,defaultVal) result(nodeAsBool)
|
function tNode_get_byKey_asBool(self,k,defaultVal) result(nodeAsBool)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
logical, intent(in),optional :: defaultVal
|
logical, intent(in), optional :: defaultVal
|
||||||
logical :: nodeAsBool
|
logical :: nodeAsBool
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsBool = scalar%asBool()
|
nodeAsBool = scalar%asBool()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected scalar Boolean for key '//k)
|
call IO_error(706,ext_msg='Expected scalar Boolean for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAsBool = defaultVal
|
nodeAsBool = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -806,23 +836,22 @@ end function tNode_get_byKey_asBool
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_asString(self,k,defaultVal) result(nodeAsString)
|
function tNode_get_byKey_asString(self,k,defaultVal) result(nodeAsString)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
character(len=*), intent(in),optional :: defaultVal
|
character(len=*), intent(in), optional :: defaultVal
|
||||||
character(len=:), allocatable :: nodeAsString
|
character(len=:), allocatable :: nodeAsString
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tScalar)
|
class is(tScalar)
|
||||||
scalar => node%asScalar()
|
scalar => node%asScalar()
|
||||||
nodeAsString = scalar%asString()
|
nodeAsString = scalar%asString()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected scalar string for key '//k)
|
call IO_error(706,ext_msg='Expected scalar string for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAsString = defaultVal
|
nodeAsString = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -837,25 +866,24 @@ end function tNode_get_byKey_asString
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_as1dFloat(self,k,defaultVal,requiredSize) result(nodeAs1dFloat)
|
function tNode_get_byKey_as1dFloat(self,k,defaultVal,requiredSize) result(nodeAs1dFloat)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
real(pReal), intent(in), dimension(:), optional :: defaultVal
|
real(pReal), intent(in), dimension(:), optional :: defaultVal
|
||||||
integer, intent(in), optional :: requiredSize
|
integer, intent(in), optional :: requiredSize
|
||||||
|
|
||||||
real(pReal), dimension(:), allocatable :: nodeAs1dFloat
|
real(pReal), dimension(:), allocatable :: nodeAs1dFloat
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tList), pointer :: list
|
type(tList), pointer :: list
|
||||||
|
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dFloat = list%as1dFloat()
|
nodeAs1dFloat = list%as1dFloat()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected 1D float array for key '//k)
|
call IO_error(706,ext_msg='Expected 1D float array for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAs1dFloat = defaultVal
|
nodeAs1dFloat = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -874,25 +902,24 @@ end function tNode_get_byKey_as1dFloat
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_as2dFloat(self,k,defaultVal,requiredShape) result(nodeAs2dFloat)
|
function tNode_get_byKey_as2dFloat(self,k,defaultVal,requiredShape) result(nodeAs2dFloat)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
real(pReal), intent(in), dimension(:,:), optional :: defaultVal
|
real(pReal), intent(in), dimension(:,:), optional :: defaultVal
|
||||||
integer, intent(in), dimension(2), optional :: requiredShape
|
integer, intent(in), dimension(2), optional :: requiredShape
|
||||||
|
|
||||||
real(pReal), dimension(:,:), allocatable :: nodeAs2dFloat
|
real(pReal), dimension(:,:), allocatable :: nodeAs2dFloat
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tList), pointer :: rows
|
type(tList), pointer :: rows
|
||||||
|
|
||||||
|
|
||||||
if(self%contains(k)) then
|
if(self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tList)
|
class is(tList)
|
||||||
rows => node%asList()
|
rows => node%asList()
|
||||||
nodeAs2dFloat = rows%as2dFloat()
|
nodeAs2dFloat = rows%as2dFloat()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected 2D float array for key '//k)
|
call IO_error(706,ext_msg='Expected 2D float array for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif(present(defaultVal)) then
|
elseif(present(defaultVal)) then
|
||||||
nodeAs2dFloat = defaultVal
|
nodeAs2dFloat = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -911,24 +938,22 @@ end function tNode_get_byKey_as2dFloat
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_as1dInt(self,k,defaultVal,requiredSize) result(nodeAs1dInt)
|
function tNode_get_byKey_as1dInt(self,k,defaultVal,requiredSize) result(nodeAs1dInt)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
integer, dimension(:), intent(in), optional :: defaultVal
|
integer, dimension(:), intent(in), optional :: defaultVal
|
||||||
integer, intent(in), optional :: requiredSize
|
integer, intent(in), optional :: requiredSize
|
||||||
integer, dimension(:), allocatable :: nodeAs1dInt
|
integer, dimension(:), allocatable :: nodeAs1dInt
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tList), pointer :: list
|
type(tList), pointer :: list
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dInt = list%as1dInt()
|
nodeAs1dInt = list%as1dInt()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected 1D integer array for key '//k)
|
call IO_error(706,ext_msg='Expected 1D integer array for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAs1dInt = defaultVal
|
nodeAs1dInt = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -947,23 +972,22 @@ end function tNode_get_byKey_as1dInt
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_as1dBool(self,k,defaultVal) result(nodeAs1dBool)
|
function tNode_get_byKey_as1dBool(self,k,defaultVal) result(nodeAs1dBool)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
logical, dimension(:), intent(in), optional :: defaultVal
|
logical, dimension(:), intent(in), optional :: defaultVal
|
||||||
logical, dimension(:), allocatable :: nodeAs1dBool
|
logical, dimension(:), allocatable :: nodeAs1dBool
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tList), pointer :: list
|
type(tList), pointer :: list
|
||||||
|
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dBool = list%as1dBool()
|
nodeAs1dBool = list%as1dBool()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected 1D Boolean array for key '//k)
|
call IO_error(706,ext_msg='Expected 1D Boolean array for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAs1dBool = defaultVal
|
nodeAs1dBool = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -978,23 +1002,22 @@ end function tNode_get_byKey_as1dBool
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function tNode_get_byKey_as1dString(self,k,defaultVal) result(nodeAs1dString)
|
function tNode_get_byKey_as1dString(self,k,defaultVal) result(nodeAs1dString)
|
||||||
|
|
||||||
class(tNode), intent(in), target :: self
|
class(tNode), intent(in) :: self
|
||||||
character(len=*), intent(in) :: k
|
character(len=*), intent(in) :: k
|
||||||
character(len=*), intent(in), dimension(:), optional :: defaultVal
|
character(len=*), intent(in), dimension(:), optional :: defaultVal
|
||||||
character(len=:), allocatable, dimension(:) :: nodeAs1dString
|
character(len=:), allocatable, dimension(:) :: nodeAs1dString
|
||||||
|
|
||||||
class(tNode), pointer :: node
|
|
||||||
type(tList), pointer :: list
|
type(tList), pointer :: list
|
||||||
|
|
||||||
|
|
||||||
if (self%contains(k)) then
|
if (self%contains(k)) then
|
||||||
node => self%get(k)
|
select type(node => self%get(k))
|
||||||
select type(node)
|
|
||||||
class is(tList)
|
class is(tList)
|
||||||
list => node%asList()
|
list => node%asList()
|
||||||
nodeAs1dString = list%as1dString()
|
nodeAs1dString = list%as1dString()
|
||||||
class default
|
class default
|
||||||
call IO_error(706,ext_msg='Expected 1D string array for key '//k)
|
call IO_error(706,ext_msg='Expected 1D string array for key '//k)
|
||||||
endselect
|
end select
|
||||||
elseif (present(defaultVal)) then
|
elseif (present(defaultVal)) then
|
||||||
nodeAs1dString = defaultVal
|
nodeAs1dString = defaultVal
|
||||||
else
|
else
|
||||||
|
@ -1019,7 +1042,7 @@ function output_as1dString(self) result(output) !ToDo: SR: Re
|
||||||
allocate(output(output_list%length))
|
allocate(output(output_list%length))
|
||||||
do o = 1, output_list%length
|
do o = 1, output_list%length
|
||||||
output(o) = output_list%get_asString(o)
|
output(o) = output_list%get_asString(o)
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
end function output_as1dString
|
end function output_as1dString
|
||||||
|
|
||||||
|
@ -1042,7 +1065,7 @@ function tNode_get_byKey_asIndex(self,key) result(keyIndex)
|
||||||
do while (associated(item%next) .and. item%key /= key)
|
do while (associated(item%next) .and. item%key /= key)
|
||||||
item => item%next
|
item => item%next
|
||||||
keyIndex = keyIndex+1
|
keyIndex = keyIndex+1
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
if (item%key /= key) call IO_error(140,ext_msg=key)
|
if (item%key /= key) call IO_error(140,ext_msg=key)
|
||||||
|
|
||||||
|
@ -1087,7 +1110,7 @@ recursive function tList_asFormattedString(self,indent) result(str)
|
||||||
if (i /= 1) str = str//repeat(' ',indent_)
|
if (i /= 1) str = str//repeat(' ',indent_)
|
||||||
str = str//'- '//item%node%asFormattedString(indent_+2)
|
str = str//'- '//item%node%asFormattedString(indent_+2)
|
||||||
item => item%next
|
item => item%next
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
end function tList_asFormattedString
|
end function tList_asFormattedString
|
||||||
|
|
||||||
|
@ -1119,9 +1142,9 @@ recursive function tDict_asFormattedString(self,indent) result(str)
|
||||||
str = str//trim(item%key)//': '//item%node%asFormattedString(indent_+len_trim(item%key)+2)
|
str = str//trim(item%key)//': '//item%node%asFormattedString(indent_+len_trim(item%key)+2)
|
||||||
class default
|
class default
|
||||||
str = str//trim(item%key)//':'//IO_EOL//repeat(' ',indent_+2)//item%node%asFormattedString(indent_+2)
|
str = str//trim(item%key)//':'//IO_EOL//repeat(' ',indent_+2)//item%node%asFormattedString(indent_+2)
|
||||||
endselect
|
end select
|
||||||
item => item%next
|
item => item%next
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
end function tDict_asFormattedString
|
end function tDict_asFormattedString
|
||||||
|
|
||||||
|
@ -1190,13 +1213,14 @@ function tList_as1dFloat(self)
|
||||||
type(tItem), pointer :: item
|
type(tItem), pointer :: item
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
allocate(tList_as1dFloat(self%length))
|
allocate(tList_as1dFloat(self%length))
|
||||||
item => self%first
|
item => self%first
|
||||||
do i = 1, self%length
|
do i = 1, self%length
|
||||||
scalar => item%node%asScalar()
|
scalar => item%node%asScalar()
|
||||||
tList_as1dFloat(i) = scalar%asFloat()
|
tList_as1dFloat(i) = scalar%asFloat()
|
||||||
item => item%next
|
item => item%next
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
end function tList_as1dFloat
|
end function tList_as1dFloat
|
||||||
|
|
||||||
|
@ -1213,6 +1237,7 @@ function tList_as2dFloat(self)
|
||||||
class(tNode), pointer :: row
|
class(tNode), pointer :: row
|
||||||
type(tList), pointer :: row_data
|
type(tList), pointer :: row_data
|
||||||
|
|
||||||
|
|
||||||
row => self%get(1)
|
row => self%get(1)
|
||||||
row_data => row%asList()
|
row_data => row%asList()
|
||||||
allocate(tList_as2dFloat(self%length,row_data%length))
|
allocate(tList_as2dFloat(self%length,row_data%length))
|
||||||
|
@ -1220,9 +1245,9 @@ function tList_as2dFloat(self)
|
||||||
do i=1,self%length
|
do i=1,self%length
|
||||||
row => self%get(i)
|
row => self%get(i)
|
||||||
row_data => row%asList()
|
row_data => row%asList()
|
||||||
if(row_data%length /= size(tList_as2dFloat,2)) call IO_error(709,ext_msg='Varying number of columns')
|
if (row_data%length /= size(tList_as2dFloat,2)) call IO_error(709,ext_msg='Varying number of columns')
|
||||||
tList_as2dFloat(i,:) = self%get_as1dFloat(i)
|
tList_as2dFloat(i,:) = self%get_as1dFloat(i)
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
end function tList_as2dFloat
|
end function tList_as2dFloat
|
||||||
|
|
||||||
|
@ -1239,13 +1264,14 @@ function tList_as1dInt(self)
|
||||||
type(tItem), pointer :: item
|
type(tItem), pointer :: item
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
allocate(tList_as1dInt(self%length))
|
allocate(tList_as1dInt(self%length))
|
||||||
item => self%first
|
item => self%first
|
||||||
do i = 1, self%length
|
do i = 1, self%length
|
||||||
scalar => item%node%asScalar()
|
scalar => item%node%asScalar()
|
||||||
tList_as1dInt(i) = scalar%asInt()
|
tList_as1dInt(i) = scalar%asInt()
|
||||||
item => item%next
|
item => item%next
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
end function tList_as1dInt
|
end function tList_as1dInt
|
||||||
|
|
||||||
|
@ -1262,13 +1288,14 @@ function tList_as1dBool(self)
|
||||||
type(tItem), pointer :: item
|
type(tItem), pointer :: item
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
allocate(tList_as1dBool(self%length))
|
allocate(tList_as1dBool(self%length))
|
||||||
item => self%first
|
item => self%first
|
||||||
do i = 1, self%length
|
do i = 1, self%length
|
||||||
scalar => item%node%asScalar()
|
scalar => item%node%asScalar()
|
||||||
tList_as1dBool(i) = scalar%asBool()
|
tList_as1dBool(i) = scalar%asBool()
|
||||||
item => item%next
|
item => item%next
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
end function tList_as1dBool
|
end function tList_as1dBool
|
||||||
|
|
||||||
|
@ -1285,13 +1312,14 @@ function tList_as1dString(self)
|
||||||
type(tItem), pointer :: item
|
type(tItem), pointer :: item
|
||||||
type(tScalar), pointer :: scalar
|
type(tScalar), pointer :: scalar
|
||||||
|
|
||||||
|
|
||||||
len_max = 0
|
len_max = 0
|
||||||
item => self%first
|
item => self%first
|
||||||
do i = 1, self%length
|
do i = 1, self%length
|
||||||
scalar => item%node%asScalar()
|
scalar => item%node%asScalar()
|
||||||
len_max = max(len_max, len_trim(scalar%asString()))
|
len_max = max(len_max, len_trim(scalar%asString()))
|
||||||
item => item%next
|
item => item%next
|
||||||
enddo
|
end do
|
||||||
|
|
||||||
allocate(character(len=len_max) :: tList_as1dString(self%length))
|
allocate(character(len=len_max) :: tList_as1dString(self%length))
|
||||||
item => self%first
|
item => self%first
|
||||||
|
|
|
@ -151,7 +151,7 @@ pure logical function validBase64(base64_str)
|
||||||
l = len(base64_str,pI64)
|
l = len(base64_str,pI64)
|
||||||
validBase64 = .true.
|
validBase64 = .true.
|
||||||
|
|
||||||
if(mod(l,4_pI64)/=0_pI64 .or. l < 4_pInt) validBase64 = .false.
|
if(mod(l,4_pI64)/=0_pI64 .or. l < 4_pI64) validBase64 = .false.
|
||||||
if(verify(base64_str(:l-2_pI64),base64_encoding, kind=pI64) /= 0_pI64) validBase64 = .false.
|
if(verify(base64_str(:l-2_pI64),base64_encoding, kind=pI64) /= 0_pI64) validBase64 = .false.
|
||||||
if(verify(base64_str(l-1_pI64:),base64_encoding//'=',kind=pI64) /= 0_pI64) validBase64 = .false.
|
if(verify(base64_str(l-1_pI64:),base64_encoding//'=',kind=pI64) /= 0_pI64) validBase64 = .false.
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,6 @@ program DAMASK_grid
|
||||||
integer :: &
|
integer :: &
|
||||||
i, j, m, field, &
|
i, j, m, field, &
|
||||||
errorID = 0, &
|
errorID = 0, &
|
||||||
ierr,&
|
|
||||||
cutBackLevel = 0, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$
|
cutBackLevel = 0, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$
|
||||||
stepFraction = 0, & !< fraction of current time interval
|
stepFraction = 0, & !< fraction of current time interval
|
||||||
l = 0, & !< current load case
|
l = 0, & !< current load case
|
||||||
|
@ -86,6 +85,7 @@ program DAMASK_grid
|
||||||
nActiveFields = 0, &
|
nActiveFields = 0, &
|
||||||
maxCutBack, & !< max number of cut backs
|
maxCutBack, & !< max number of cut backs
|
||||||
stagItMax !< max number of field level staggered iterations
|
stagItMax !< max number of field level staggered iterations
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
character(len=pStringLen) :: &
|
character(len=pStringLen) :: &
|
||||||
incInfo
|
incInfo
|
||||||
|
|
||||||
|
@ -455,23 +455,23 @@ program DAMASK_grid
|
||||||
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged'
|
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged'
|
||||||
endif; flush(IO_STDOUT)
|
endif; flush(IO_STDOUT)
|
||||||
|
|
||||||
call MPI_Allreduce(interface_SIGUSR1,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(interface_SIGUSR1,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
if (mod(inc,loadCases(l)%f_out) == 0 .or. signal) then
|
if (mod(inc,loadCases(l)%f_out) == 0 .or. signal) then
|
||||||
print'(/,1x,a)', '... writing results to file ...............................................'
|
print'(/,1x,a)', '... writing results to file ...............................................'
|
||||||
flush(IO_STDOUT)
|
flush(IO_STDOUT)
|
||||||
call CPFEM_results(totalIncsCounter,t)
|
call CPFEM_results(totalIncsCounter,t)
|
||||||
endif
|
endif
|
||||||
if (signal) call interface_setSIGUSR1(.false.)
|
if (signal) call interface_setSIGUSR1(.false.)
|
||||||
call MPI_Allreduce(interface_SIGUSR2,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(interface_SIGUSR2,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then
|
if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then
|
||||||
call mechanical_restartWrite
|
call mechanical_restartWrite
|
||||||
call CPFEM_restartWrite
|
call CPFEM_restartWrite
|
||||||
endif
|
endif
|
||||||
if (signal) call interface_setSIGUSR2(.false.)
|
if (signal) call interface_setSIGUSR2(.false.)
|
||||||
call MPI_Allreduce(interface_SIGTERM,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(interface_SIGTERM,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
if (signal) exit loadCaseLooping
|
if (signal) exit loadCaseLooping
|
||||||
endif skipping
|
endif skipping
|
||||||
|
|
||||||
|
|
|
@ -62,8 +62,8 @@ subroutine discretization_grid_init(restart)
|
||||||
|
|
||||||
integer :: &
|
integer :: &
|
||||||
j, &
|
j, &
|
||||||
debug_element, debug_ip, &
|
debug_element, debug_ip
|
||||||
ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
integer(C_INTPTR_T) :: &
|
integer(C_INTPTR_T) :: &
|
||||||
devNull, z, z_offset
|
devNull, z, z_offset
|
||||||
integer, dimension(worldsize) :: &
|
integer, dimension(worldsize) :: &
|
||||||
|
@ -88,13 +88,13 @@ subroutine discretization_grid_init(restart)
|
||||||
end if
|
end if
|
||||||
|
|
||||||
|
|
||||||
call MPI_Bcast(grid,3,MPI_INTEGER,0,MPI_COMM_WORLD, ierr)
|
call MPI_Bcast(grid,3_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
if (grid(1) < 2) call IO_error(844, ext_msg='cells(1) must be larger than 1')
|
if (grid(1) < 2) call IO_error(844, ext_msg='cells(1) must be larger than 1')
|
||||||
call MPI_Bcast(geomSize,3,MPI_DOUBLE,0,MPI_COMM_WORLD, ierr)
|
call MPI_Bcast(geomSize,3_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call MPI_Bcast(origin,3,MPI_DOUBLE,0,MPI_COMM_WORLD, ierr)
|
call MPI_Bcast(origin,3_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
print'(/,1x,a,3(i12,1x))', 'cells a b c: ', grid
|
print'(/,1x,a,3(i12,1x))', 'cells a b c: ', grid
|
||||||
print '(1x,a,3(es12.5,1x))', 'size x y z: ', geomSize
|
print '(1x,a,3(es12.5,1x))', 'size x y z: ', geomSize
|
||||||
|
@ -118,14 +118,17 @@ subroutine discretization_grid_init(restart)
|
||||||
myGrid = [grid(1:2),grid3]
|
myGrid = [grid(1:2),grid3]
|
||||||
mySize = [geomSize(1:2),size3]
|
mySize = [geomSize(1:2),size3]
|
||||||
|
|
||||||
call MPI_Gather(product(grid(1:2))*grid3Offset,1,MPI_INTEGER,displs, 1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
|
call MPI_Gather(product(grid(1:2))*grid3Offset, 1_MPI_INTEGER_KIND,MPI_INTEGER,displs,&
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
call MPI_Gather(product(myGrid), 1,MPI_INTEGER,sendcounts,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
call MPI_Gather(product(myGrid), 1_MPI_INTEGER_KIND,MPI_INTEGER,sendcounts,&
|
||||||
|
1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
allocate(materialAt(product(myGrid)))
|
allocate(materialAt(product(myGrid)))
|
||||||
call MPI_Scatterv(materialAt_global,sendcounts,displs,MPI_INTEGER,materialAt,size(materialAt),MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
|
call MPI_Scatterv(materialAt_global,sendcounts,displs,MPI_INTEGER,materialAt,size(materialAt),&
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call discretization_init(materialAt, &
|
call discretization_init(materialAt, &
|
||||||
IPcoordinates0(myGrid,mySize,grid3Offset), &
|
IPcoordinates0(myGrid,mySize,grid3Offset), &
|
||||||
|
@ -307,7 +310,7 @@ subroutine readVTI(grid,geomSize,origin,material, &
|
||||||
case('Float64')
|
case('Float64')
|
||||||
as_Int = int(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed)))
|
as_Int = int(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed)))
|
||||||
case default
|
case default
|
||||||
call IO_error(844_pInt,ext_msg='unknown data type: '//trim(dataType))
|
call IO_error(844,ext_msg='unknown data type: '//trim(dataType))
|
||||||
end select
|
end select
|
||||||
|
|
||||||
end function as_Int
|
end function as_Int
|
||||||
|
@ -335,7 +338,7 @@ subroutine readVTI(grid,geomSize,origin,material, &
|
||||||
case('Float64')
|
case('Float64')
|
||||||
as_pReal = real(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed)),pReal)
|
as_pReal = real(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed)),pReal)
|
||||||
case default
|
case default
|
||||||
call IO_error(844_pInt,ext_msg='unknown data type: '//trim(dataType))
|
call IO_error(844,ext_msg='unknown data type: '//trim(dataType))
|
||||||
end select
|
end select
|
||||||
|
|
||||||
end function as_pReal
|
end function as_pReal
|
||||||
|
|
|
@ -69,7 +69,8 @@ subroutine grid_damage_spectral_init()
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
PetscInt, dimension(0:worldsize-1) :: localK
|
||||||
DM :: damage_grid
|
DM :: damage_grid
|
||||||
Vec :: uBound, lBound
|
Vec :: uBound, lBound
|
||||||
PetscErrorCode :: ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
PetscErrorCode :: err_PETSc
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_grid, &
|
num_grid, &
|
||||||
num_generic
|
num_generic
|
||||||
|
@ -99,50 +100,51 @@ subroutine grid_damage_spectral_init()
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set default and user defined options for PETSc
|
! set default and user defined options for PETSc
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf &
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf &
|
||||||
&-damage_snes_ksp_ew -damage_ksp_type fgmres',ierr)
|
&-damage_snes_ksp_ew -damage_ksp_type fgmres',err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,damage_snes,ierr); CHKERRQ(ierr)
|
call SNESCreate(PETSC_COMM_WORLD,damage_snes,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(damage_snes,'damage_',ierr);CHKERRQ(ierr)
|
call SNESSetOptionsPrefix(damage_snes,'damage_',err_PETSc);CHKERRQ(err_PETSc)
|
||||||
localK = 0
|
localK = 0_pPetscInt
|
||||||
localK(worldrank) = grid3
|
localK(worldrank) = int(grid3,pPetscInt)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3D(PETSC_COMM_WORLD, &
|
call DMDACreate3D(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
grid(1),grid(2),grid(3), & ! global grid
|
int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
|
||||||
1, 1, worldsize, &
|
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
||||||
1, 0, & ! #dof (damage phase field), ghost boundary width (domain overlap)
|
1_pPetscInt, 0_pPetscInt, & ! #dof (phi, scalar), ghost boundary width (domain overlap)
|
||||||
[grid(1)],[grid(2)],localK, & ! local grid
|
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
|
||||||
damage_grid,ierr) ! handle, error
|
damage_grid,err_PETSc) ! handle, error
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(damage_snes,damage_grid,ierr); CHKERRQ(ierr) ! connect snes to da
|
call SNESSetDM(damage_snes,damage_grid,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
|
||||||
call DMsetFromOptions(damage_grid,ierr); CHKERRQ(ierr)
|
call DMsetFromOptions(damage_grid,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(damage_grid,ierr); CHKERRQ(ierr)
|
call DMsetUp(damage_grid,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(damage_grid,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 1, i.e. every def grad tensor)
|
call DMCreateGlobalVector(damage_grid,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 1, i.e. every def grad tensor)
|
||||||
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
|
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetFromOptions(damage_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
|
call SNESSetFromOptions(damage_snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
call SNESGetType(damage_snes,snes_type,ierr); CHKERRQ(ierr)
|
call SNESGetType(damage_snes,snes_type,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
if (trim(snes_type) == 'vinewtonrsls' .or. &
|
if (trim(snes_type) == 'vinewtonrsls' .or. &
|
||||||
trim(snes_type) == 'vinewtonssls') then
|
trim(snes_type) == 'vinewtonssls') then
|
||||||
call DMGetGlobalVector(damage_grid,lBound,ierr); CHKERRQ(ierr)
|
call DMGetGlobalVector(damage_grid,lBound,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetGlobalVector(damage_grid,uBound,ierr); CHKERRQ(ierr)
|
call DMGetGlobalVector(damage_grid,uBound,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecSet(lBound,0.0_pReal,ierr); CHKERRQ(ierr)
|
call VecSet(lBound,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecSet(uBound,1.0_pReal,ierr); CHKERRQ(ierr)
|
call VecSet(uBound,1.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call SNESVISetVariableBounds(damage_snes,lBound,uBound,ierr) ! variable bounds for variational inequalities like contact mechanics, damage etc.
|
call SNESVISetVariableBounds(damage_snes,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities like contact mechanics, damage etc.
|
||||||
call DMRestoreGlobalVector(damage_grid,lBound,ierr); CHKERRQ(ierr)
|
call DMRestoreGlobalVector(damage_grid,lBound,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMRestoreGlobalVector(damage_grid,uBound,ierr); CHKERRQ(ierr)
|
call DMRestoreGlobalVector(damage_grid,uBound,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
end if
|
end if
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call DMDAGetCorners(damage_grid,xstart,ystart,zstart,xend,yend,zend,ierr)
|
call DMDAGetCorners(damage_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
xend = xstart + xend - 1
|
xend = xstart + xend - 1
|
||||||
yend = ystart + yend - 1
|
yend = ystart + yend - 1
|
||||||
zend = zstart + zend - 1
|
zend = zstart + zend - 1
|
||||||
|
@ -150,7 +152,7 @@ subroutine grid_damage_spectral_init()
|
||||||
allocate(phi_lastInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
allocate(phi_lastInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
||||||
allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal)
|
||||||
|
|
||||||
call VecSet(solution_vec,1.0_pReal,ierr); CHKERRQ(ierr)
|
call VecSet(solution_vec,1.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call updateReference
|
call updateReference
|
||||||
|
|
||||||
|
@ -169,7 +171,8 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
|
||||||
PetscInt :: devNull
|
PetscInt :: devNull
|
||||||
PetscReal :: phi_min, phi_max, stagNorm
|
PetscReal :: phi_min, phi_max, stagNorm
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
solution%converged =.false.
|
solution%converged =.false.
|
||||||
|
@ -178,8 +181,10 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
|
||||||
! set module wide availabe data
|
! set module wide availabe data
|
||||||
params%Delta_t = Delta_t
|
params%Delta_t = Delta_t
|
||||||
|
|
||||||
call SNESSolve(damage_snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
|
call SNESSolve(damage_snes,PETSC_NULL_VEC,solution_vec,err_PETSc)
|
||||||
call SNESGetConvergedReason(damage_snes,reason,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
call SNESGetConvergedReason(damage_snes,reason,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (reason < 1) then
|
if (reason < 1) then
|
||||||
solution%converged = .false.
|
solution%converged = .false.
|
||||||
|
@ -189,9 +194,11 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
|
||||||
solution%iterationsNeeded = totalIter
|
solution%iterationsNeeded = totalIter
|
||||||
end if
|
end if
|
||||||
stagNorm = maxval(abs(phi_current - phi_stagInc))
|
stagNorm = maxval(abs(phi_current - phi_stagInc))
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
|
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
phi_stagInc = phi_current
|
phi_stagInc = phi_current
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -202,8 +209,8 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
|
||||||
call homogenization_set_phi(phi_current(i,j,k),ce)
|
call homogenization_set_phi(phi_current(i,j,k),ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
call VecMin(solution_vec,devNull,phi_min,ierr); CHKERRQ(ierr)
|
call VecMin(solution_vec,devNull,phi_min,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecMax(solution_vec,devNull,phi_max,ierr); CHKERRQ(ierr)
|
call VecMax(solution_vec,devNull,phi_max,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
if (solution%converged) &
|
if (solution%converged) &
|
||||||
print'(/,1x,a)', '... nonlocal damage converged .....................................'
|
print'(/,1x,a)', '... nonlocal damage converged .....................................'
|
||||||
print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm
|
print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm
|
||||||
|
@ -222,17 +229,19 @@ subroutine grid_damage_spectral_forward(cutBack)
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
DM :: dm_local
|
DM :: dm_local
|
||||||
PetscScalar, dimension(:,:,:), pointer :: x_scal
|
PetscScalar, dimension(:,:,:), pointer :: x_scal
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
if (cutBack) then
|
if (cutBack) then
|
||||||
phi_current = phi_lastInc
|
phi_current = phi_lastInc
|
||||||
phi_stagInc = phi_lastInc
|
phi_stagInc = phi_lastInc
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! reverting damage field state
|
! reverting damage field state
|
||||||
call SNESGetDM(damage_snes,dm_local,ierr); CHKERRQ(ierr)
|
call SNESGetDM(damage_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) !< get the data out of PETSc to work with
|
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
x_scal(xstart:xend,ystart:yend,zstart:zend) = phi_current
|
x_scal(xstart:xend,ystart:yend,zstart:zend) = phi_current
|
||||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
|
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
|
@ -249,7 +258,7 @@ end subroutine grid_damage_spectral_forward
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief forms the spectral damage residual vector
|
!> @brief forms the spectral damage residual vector
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
|
subroutine formResidual(in,x_scal,f_scal,dummy,dummy_err)
|
||||||
|
|
||||||
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
|
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
|
||||||
in
|
in
|
||||||
|
@ -260,7 +269,7 @@ subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
|
||||||
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
|
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
|
||||||
f_scal
|
f_scal
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: dummy_err
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
|
|
||||||
|
|
||||||
|
@ -311,7 +320,8 @@ end subroutine formResidual
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine updateReference()
|
subroutine updateReference()
|
||||||
|
|
||||||
integer :: ce,ierr
|
integer :: ce
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
|
||||||
K_ref = 0.0_pReal
|
K_ref = 0.0_pReal
|
||||||
|
@ -322,9 +332,11 @@ subroutine updateReference()
|
||||||
end do
|
end do
|
||||||
|
|
||||||
K_ref = K_ref*wgt
|
K_ref = K_ref*wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
mu_ref = mu_ref*wgt
|
mu_ref = mu_ref*wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
end subroutine updateReference
|
end subroutine updateReference
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,8 @@ subroutine grid_mechanical_FEM_init
|
||||||
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
|
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
|
||||||
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
|
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
|
||||||
real(pReal), dimension(3,3,3,3) :: devNull
|
real(pReal), dimension(3,3,3,3) :: devNull
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||||
u_current,u_lastInc
|
u_current,u_lastInc
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
PetscInt, dimension(0:worldsize-1) :: localK
|
||||||
|
@ -145,12 +146,11 @@ subroutine grid_mechanical_FEM_init
|
||||||
! set default and user defined options for PETSc
|
! set default and user defined options for PETSc
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS, &
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS, &
|
||||||
'-mechanical_snes_type newtonls -mechanical_ksp_type fgmres &
|
'-mechanical_snes_type newtonls -mechanical_ksp_type fgmres &
|
||||||
&-mechanical_ksp_max_it 25 -mechanical_pc_type ml &
|
&-mechanical_ksp_max_it 25 -mechanical_mg_levels_ksp_type chebyshev', &
|
||||||
&-mechanical_mg_levels_ksp_type chebyshev', &
|
err_PETSc)
|
||||||
ierr)
|
CHKERRQ(err_PETSc)
|
||||||
CHKERRQ(ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
CHKERRQ(err_PETSc)
|
||||||
CHKERRQ(ierr)
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! allocate global fields
|
! allocate global fields
|
||||||
|
@ -160,59 +160,60 @@ subroutine grid_mechanical_FEM_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,ierr)
|
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr)
|
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
localK = 0
|
localK = 0_pPetscInt
|
||||||
localK(worldrank) = grid3
|
localK(worldrank) = int(grid3,pPetscInt)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3d(PETSC_COMM_WORLD, &
|
call DMDACreate3d(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
|
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
|
||||||
DMDA_STENCIL_BOX, &
|
DMDA_STENCIL_BOX, &
|
||||||
grid(1),grid(2),grid(3), &
|
int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
|
||||||
1, 1, worldsize, &
|
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
||||||
3, 1, &
|
3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap)
|
||||||
[grid(1)],[grid(2)],localK, &
|
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
|
||||||
mechanical_grid,ierr)
|
mechanical_grid,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(mechanical_snes,mechanical_grid,ierr)
|
call SNESSetDM(mechanical_snes,mechanical_grid,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetFromOptions(mechanical_grid,ierr)
|
call DMsetFromOptions(mechanical_grid,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(mechanical_grid,ierr)
|
call DMsetUp(mechanical_grid,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDASetUniformCoordinates(mechanical_grid,0.0_pReal,geomSize(1),0.0_pReal,geomSize(2),0.0_pReal,geomSize(3),ierr)
|
call DMDASetUniformCoordinates(mechanical_grid,0.0_pReal,geomSize(1),0.0_pReal,geomSize(2),0.0_pReal,geomSize(3),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(mechanical_grid,solution_current,ierr)
|
call DMCreateGlobalVector(mechanical_grid,solution_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(mechanical_grid,solution_lastInc,ierr)
|
call DMCreateGlobalVector(mechanical_grid,solution_lastInc,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(mechanical_grid,solution_rate ,ierr)
|
call DMCreateGlobalVector(mechanical_grid,solution_rate ,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,ierr)
|
call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,ierr)
|
call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged"
|
call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged"
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), ierr) ! ignore linear solve failures
|
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPetscInt), err_PETSc) ! ignore linear solve failures
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetFromOptions(mechanical_snes,ierr) ! pull it all together with additional cli arguments
|
call SNESSetFromOptions(mechanical_snes,err_PETSc) ! pull it all together with additional cli arguments
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call VecSet(solution_current,0.0_pReal,ierr);CHKERRQ(ierr)
|
call VecSet(solution_current,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
call VecSet(solution_lastInc,0.0_pReal,ierr);CHKERRQ(ierr)
|
call VecSet(solution_lastInc,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
call VecSet(solution_rate ,0.0_pReal,ierr);CHKERRQ(ierr)
|
call VecSet(solution_rate ,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr)
|
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call DMDAGetCorners(mechanical_grid,xstart,ystart,zstart,xend,yend,zend,ierr) ! local grid extent
|
call DMDAGetCorners(mechanical_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc) ! local grid extent
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
xend = xstart+xend-1
|
xend = xstart+xend-1
|
||||||
yend = ystart+yend-1
|
yend = ystart+yend-1
|
||||||
zend = zstart+zend-1
|
zend = zstart+zend-1
|
||||||
|
@ -240,17 +241,17 @@ subroutine grid_mechanical_FEM_init
|
||||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
|
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
|
||||||
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if(ierr /=0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
|
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
|
||||||
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if(ierr /=0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
|
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
|
||||||
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if(ierr /=0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
||||||
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if(ierr /=0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F,groupHandle,'F')
|
call HDF5_read(F,groupHandle,'F')
|
||||||
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
||||||
call HDF5_read(u_current,groupHandle,'u')
|
call HDF5_read(u_current,groupHandle,'u')
|
||||||
|
@ -266,19 +267,19 @@ subroutine grid_mechanical_FEM_init
|
||||||
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
||||||
F, & ! target F
|
F, & ! target F
|
||||||
0.0_pReal) ! time increment
|
0.0_pReal) ! time increment
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr)
|
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead2: if (interface_restartInc > 0) then
|
restartRead2: if (interface_restartInc > 0) then
|
||||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
||||||
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
||||||
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if(ierr /=0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
|
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
|
||||||
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if(ierr /=0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
|
@ -301,7 +302,7 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution)
|
||||||
solution
|
solution
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc Data
|
! PETSc Data
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
incInfo = incInfoIn
|
incInfo = incInfoIn
|
||||||
|
@ -312,13 +313,13 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! solve BVP
|
! solve BVP
|
||||||
call SNESsolve(mechanical_snes,PETSC_NULL_VEC,solution_current,ierr)
|
call SNESsolve(mechanical_snes,PETSC_NULL_VEC,solution_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! check convergence
|
! check convergence
|
||||||
call SNESGetConvergedReason(mechanical_snes,reason,ierr)
|
call SNESGetConvergedReason(mechanical_snes,reason,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
solution%converged = reason > 0
|
solution%converged = reason > 0
|
||||||
solution%iterationsNeeded = totalIter
|
solution%iterationsNeeded = totalIter
|
||||||
|
@ -348,15 +349,15 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
|
||||||
deformation_BC
|
deformation_BC
|
||||||
type(rotation), intent(in) :: &
|
type(rotation), intent(in) :: &
|
||||||
rotation_BC
|
rotation_BC
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||||
u_current,u_lastInc
|
u_current,u_lastInc
|
||||||
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr)
|
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (cutBack) then
|
if (cutBack) then
|
||||||
C_volAvg = C_volAvgLastInc
|
C_volAvg = C_volAvgLastInc
|
||||||
|
@ -380,13 +381,14 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if (guess) then
|
if (guess) then
|
||||||
call VecWAXPY(solution_rate,-1.0_pReal,solution_lastInc,solution_current,ierr)
|
call VecWAXPY(solution_rate,-1.0_pReal,solution_lastInc,solution_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call VecScale(solution_rate,1.0_pReal/Delta_t_old,ierr); CHKERRQ(ierr)
|
call VecScale(solution_rate,1.0_pReal/Delta_t_old,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
else
|
else
|
||||||
call VecSet(solution_rate,0.0_pReal,ierr); CHKERRQ(ierr)
|
call VecSet(solution_rate,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
endif
|
endif
|
||||||
call VecCopy(solution_current,solution_lastInc,ierr); CHKERRQ(ierr)
|
call VecCopy(solution_current,solution_lastInc,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
F_lastInc = F
|
F_lastInc = F
|
||||||
|
|
||||||
|
@ -401,12 +403,12 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
|
||||||
if (stress_BC%myType=='dot_P') P_aim = P_aim &
|
if (stress_BC%myType=='dot_P') P_aim = P_aim &
|
||||||
+ merge(.0_pReal,stress_BC%values,stress_BC%mask)*Delta_t
|
+ merge(.0_pReal,stress_BC%values,stress_BC%mask)*Delta_t
|
||||||
|
|
||||||
call VecAXPY(solution_current,Delta_t,solution_rate,ierr); CHKERRQ(ierr)
|
call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr)
|
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set module wide available data
|
! set module wide available data
|
||||||
|
@ -432,15 +434,15 @@ end subroutine grid_mechanical_FEM_updateCoords
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_mechanical_FEM_restartWrite
|
subroutine grid_mechanical_FEM_restartWrite
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
|
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
|
||||||
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr)
|
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
|
@ -466,10 +468,10 @@ subroutine grid_mechanical_FEM_restartWrite
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr)
|
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr)
|
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_FEM_restartWrite
|
end subroutine grid_mechanical_FEM_restartWrite
|
||||||
|
|
||||||
|
@ -477,7 +479,7 @@ end subroutine grid_mechanical_FEM_restartWrite
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief convergence check
|
!> @brief convergence check
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,ierr)
|
subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,err_PETSc)
|
||||||
|
|
||||||
SNES :: snes_local
|
SNES :: snes_local
|
||||||
PetscInt, intent(in) :: PETScIter
|
PetscInt, intent(in) :: PETScIter
|
||||||
|
@ -487,7 +489,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,i
|
||||||
fnorm
|
fnorm
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
err_div, &
|
err_div, &
|
||||||
divTol, &
|
divTol, &
|
||||||
|
@ -521,7 +523,7 @@ end subroutine converged
|
||||||
!> @brief forms the residual vector
|
!> @brief forms the residual vector
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine formResidual(da_local,x_local, &
|
subroutine formResidual(da_local,x_local, &
|
||||||
f_local,dummy,ierr)
|
f_local,dummy,err_PETSc)
|
||||||
|
|
||||||
DM :: da_local
|
DM :: da_local
|
||||||
Vec :: x_local, f_local
|
Vec :: x_local, f_local
|
||||||
|
@ -532,13 +534,14 @@ subroutine formResidual(da_local,x_local, &
|
||||||
PETScIter, &
|
PETScIter, &
|
||||||
nfuncs
|
nfuncs
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
real(pReal), dimension(3,3,3,3) :: devNull
|
real(pReal), dimension(3,3,3,3) :: devNull
|
||||||
|
|
||||||
call SNESGetNumberFunctionEvals(mechanical_snes,nfuncs,ierr)
|
call SNESGetNumberFunctionEvals(mechanical_snes,nfuncs,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetIterationNumber(mechanical_snes,PETScIter,ierr)
|
call SNESGetIterationNumber(mechanical_snes,PETScIter,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
||||||
|
|
||||||
|
@ -556,7 +559,7 @@ subroutine formResidual(da_local,x_local, &
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! get deformation gradient
|
! get deformation gradient
|
||||||
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
||||||
ctr = 0
|
ctr = 0
|
||||||
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
|
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
|
||||||
|
@ -566,14 +569,15 @@ subroutine formResidual(da_local,x_local, &
|
||||||
ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1
|
ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1
|
||||||
F(1:3,1:3,ii,jj,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem))
|
F(1:3,1:3,ii,jj,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem))
|
||||||
enddo; enddo; enddo
|
enddo; enddo; enddo
|
||||||
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! evaluate constitutive response
|
! evaluate constitutive response
|
||||||
call utilities_constitutiveResponse(P_current,&
|
call utilities_constitutiveResponse(P_current,&
|
||||||
P_av,C_volAvg,devNull, &
|
P_av,C_volAvg,devNull, &
|
||||||
F,params%Delta_t,params%rotation_BC)
|
F,params%Delta_t,params%rotation_BC)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! stress BC handling
|
! stress BC handling
|
||||||
|
@ -582,9 +586,9 @@ subroutine formResidual(da_local,x_local, &
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! constructing residual
|
! constructing residual
|
||||||
call VecSet(f_local,0.0_pReal,ierr);CHKERRQ(ierr)
|
call VecSet(f_local,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
ele = 0
|
ele = 0
|
||||||
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
||||||
ctr = 0
|
ctr = 0
|
||||||
|
@ -604,12 +608,12 @@ subroutine formResidual(da_local,x_local, &
|
||||||
f_scal(0:2,i+ii,j+jj,k+kk) = f_scal(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3)
|
f_scal(0:2,i+ii,j+jj,k+kk) = f_scal(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3)
|
||||||
enddo; enddo; enddo
|
enddo; enddo; enddo
|
||||||
enddo; enddo; enddo
|
enddo; enddo; enddo
|
||||||
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! applying boundary conditions
|
! applying boundary conditions
|
||||||
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
if (zstart == 0) then
|
if (zstart == 0) then
|
||||||
f_scal(0:2,xstart,ystart,zstart) = 0.0
|
f_scal(0:2,xstart,ystart,zstart) = 0.0
|
||||||
f_scal(0:2,xend+1,ystart,zstart) = 0.0
|
f_scal(0:2,xend+1,ystart,zstart) = 0.0
|
||||||
|
@ -622,7 +626,7 @@ subroutine formResidual(da_local,x_local, &
|
||||||
f_scal(0:2,xstart,yend+1,zend+1) = 0.0
|
f_scal(0:2,xstart,yend+1,zend+1) = 0.0
|
||||||
f_scal(0:2,xend+1,yend+1,zend+1) = 0.0
|
f_scal(0:2,xend+1,yend+1,zend+1) = 0.0
|
||||||
endif
|
endif
|
||||||
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine formResidual
|
end subroutine formResidual
|
||||||
|
|
||||||
|
@ -630,7 +634,7 @@ end subroutine formResidual
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief forms the FEM stiffness matrix
|
!> @brief forms the FEM stiffness matrix
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
|
subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,err_PETSc)
|
||||||
|
|
||||||
DM :: da_local
|
DM :: da_local
|
||||||
Vec :: x_local, coordinates
|
Vec :: x_local, coordinates
|
||||||
|
@ -644,15 +648,17 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
|
||||||
PetscScalar :: diag
|
PetscScalar :: diag
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
MatNullSpace :: matnull
|
MatNullSpace :: matnull
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
BMatFull = 0.0
|
BMatFull = 0.0
|
||||||
BMatFull(1:3,1 :8 ) = BMat
|
BMatFull(1:3,1 :8 ) = BMat
|
||||||
BMatFull(4:6,9 :16) = BMat
|
BMatFull(4:6,9 :16) = BMat
|
||||||
BMatFull(7:9,17:24) = BMat
|
BMatFull(7:9,17:24) = BMat
|
||||||
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,ierr); CHKERRQ(ierr)
|
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,err_PETSc)
|
||||||
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call MatZeroEntries(Jac,ierr); CHKERRQ(ierr)
|
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call MatZeroEntries(Jac,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
ele = 0
|
ele = 0
|
||||||
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
||||||
ctr = 0
|
ctr = 0
|
||||||
|
@ -687,34 +693,42 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
|
||||||
matmul(transpose(BMatFull), &
|
matmul(transpose(BMatFull), &
|
||||||
matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), &
|
matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), &
|
||||||
shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ
|
shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ
|
||||||
call MatSetValuesStencil(Jac,24,row,24,col,K_ele,ADD_VALUES,ierr)
|
call MatSetValuesStencil(Jac,24_pPETScInt,row,24_pPetscInt,col,K_ele,ADD_VALUES,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
enddo; enddo; enddo
|
enddo; enddo; enddo
|
||||||
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! applying boundary conditions
|
! applying boundary conditions
|
||||||
diag = (C_volAvg(1,1,1,1)/delta(1)**2 + &
|
diag = (C_volAvg(1,1,1,1)/delta(1)**2 + &
|
||||||
C_volAvg(2,2,2,2)/delta(2)**2 + &
|
C_volAvg(2,2,2,2)/delta(2)**2 + &
|
||||||
C_volAvg(3,3,3,3)/delta(3)**2)*detJ
|
C_volAvg(3,3,3,3)/delta(3)**2)*detJ
|
||||||
call MatZeroRowsColumns(Jac,size(rows),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr)
|
call MatZeroRowsColumns(Jac,size(rows,kind=pPetscInt),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr)
|
call DMGetGlobalVector(da_local,coordinates,err_PETSc)
|
||||||
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
ele = 0
|
ele = 0
|
||||||
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
|
||||||
ele = ele + 1
|
ele = ele + 1
|
||||||
x_scal(0:2,i,j,k) = discretization_IPcoords(1:3,ele)
|
x_scal(0:2,i,j,k) = discretization_IPcoords(1:3,ele)
|
||||||
enddo; enddo; enddo
|
enddo; enddo; enddo
|
||||||
call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr) ! initialize to undeformed coordinates (ToDo: use ip coordinates)
|
call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,err_PETSc)
|
||||||
call MatNullSpaceCreateRigidBody(coordinates,matnull,ierr); CHKERRQ(ierr) ! get rigid body deformation modes
|
CHKERRQ(err_PETSc) ! initialize to undeformed coordinates (ToDo: use ip coordinates)
|
||||||
call DMRestoreGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr)
|
call MatNullSpaceCreateRigidBody(coordinates,matnull,err_PETSc)
|
||||||
call MatSetNullSpace(Jac,matnull,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc) ! get rigid body deformation modes
|
||||||
call MatSetNearNullSpace(Jac,matnull,ierr); CHKERRQ(ierr)
|
call DMRestoreGlobalVector(da_local,coordinates,err_PETSc)
|
||||||
call MatNullSpaceDestroy(matnull,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
call MatSetNullSpace(Jac,matnull,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call MatSetNearNullSpace(Jac,matnull,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call MatNullSpaceDestroy(matnull,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine formJacobian
|
end subroutine formJacobian
|
||||||
|
|
||||||
|
|
|
@ -97,7 +97,8 @@ contains
|
||||||
subroutine grid_mechanical_spectral_basic_init
|
subroutine grid_mechanical_spectral_basic_init
|
||||||
|
|
||||||
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
|
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||||
F ! pointer to solution data
|
F ! pointer to solution data
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
PetscInt, dimension(0:worldsize-1) :: localK
|
||||||
|
@ -145,10 +146,10 @@ subroutine grid_mechanical_spectral_basic_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set default and user defined options for PETSc
|
! set default and user defined options for PETSc
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! allocate global fields
|
! allocate global fields
|
||||||
|
@ -157,33 +158,34 @@ subroutine grid_mechanical_spectral_basic_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr)
|
call SNESCreate(PETSC_COMM_WORLD,snes,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr)
|
call SNESSetOptionsPrefix(snes,'mechanical_',err_PETSc);CHKERRQ(err_PETSc)
|
||||||
localK = 0
|
localK = 0_pPetscInt
|
||||||
localK(worldrank) = grid3
|
localK(worldrank) = int(grid3,pPetscInt)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3d(PETSC_COMM_WORLD, &
|
call DMDACreate3d(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
grid(1),grid(2),grid(3), & ! global grid
|
int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
|
||||||
1 , 1, worldsize, &
|
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
||||||
9, 0, & ! #dof (F tensor), ghost boundary width (domain overlap)
|
9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap)
|
||||||
[grid(1)],[grid(2)],localK, & ! local grid
|
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
|
||||||
da,ierr) ! handle, error
|
da,err_PETSc) ! handle, error
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da
|
call SNESSetDM(snes,da,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
|
||||||
call DMsetFromOptions(da,ierr); CHKERRQ(ierr)
|
call DMsetFromOptions(da,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(da,ierr); CHKERRQ(ierr)
|
call DMsetUp(da,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 9, i.e. every def grad tensor)
|
call DMcreateGlobalVector(da,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 9, i.e. every def grad tensor)
|
||||||
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
|
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged"
|
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
|
call SNESsetFromOptions(snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! places pointer on PETSc data
|
call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc) ! places pointer on PETSc data
|
||||||
|
|
||||||
restartRead: if (interface_restartInc > 0) then
|
restartRead: if (interface_restartInc > 0) then
|
||||||
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
|
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
@ -192,17 +194,17 @@ subroutine grid_mechanical_spectral_basic_init
|
||||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
|
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
|
||||||
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
|
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
|
||||||
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
|
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
|
||||||
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
||||||
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F,groupHandle,'F')
|
call HDF5_read(F,groupHandle,'F')
|
||||||
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
||||||
|
|
||||||
|
@ -216,24 +218,27 @@ subroutine grid_mechanical_spectral_basic_init
|
||||||
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
||||||
reshape(F,shape(F_lastInc)), & ! target F
|
reshape(F,shape(F_lastInc)), & ! target F
|
||||||
0.0_pReal) ! time increment
|
0.0_pReal) ! time increment
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! deassociate pointer
|
call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc) ! deassociate pointer
|
||||||
|
|
||||||
restartRead2: if (interface_restartInc > 0) then
|
restartRead2: if (interface_restartInc > 0) then
|
||||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
||||||
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
||||||
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
|
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
|
||||||
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
|
|
||||||
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
|
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
|
||||||
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,ierr)
|
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
|
||||||
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,ierr)
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call MPI_File_close(fileUnit,ierr)
|
call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
call MPI_File_close(fileUnit,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if restartRead2
|
end if restartRead2
|
||||||
|
|
||||||
call utilities_updateGamma(C_minMaxAvg)
|
call utilities_updateGamma(C_minMaxAvg)
|
||||||
|
@ -255,7 +260,7 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution)
|
||||||
solution
|
solution
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc Data
|
! PETSc Data
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
incInfo = incInfoIn
|
incInfo = incInfoIn
|
||||||
|
@ -267,11 +272,11 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! solve BVP
|
! solve BVP
|
||||||
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
|
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! check convergence
|
! check convergence
|
||||||
call SNESGetConvergedReason(snes,reason,ierr); CHKERRQ(ierr)
|
call SNESGetConvergedReason(snes,reason,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
solution%converged = reason > 0
|
solution%converged = reason > 0
|
||||||
solution%iterationsNeeded = totalIter
|
solution%iterationsNeeded = totalIter
|
||||||
|
@ -301,11 +306,11 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
|
||||||
deformation_BC
|
deformation_BC
|
||||||
type(rotation), intent(in) :: &
|
type(rotation), intent(in) :: &
|
||||||
rotation_BC
|
rotation_BC
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: F
|
PetscScalar, pointer, dimension(:,:,:,:) :: F
|
||||||
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (cutBack) then
|
if (cutBack) then
|
||||||
C_volAvg = C_volAvgLastInc
|
C_volAvg = C_volAvgLastInc
|
||||||
|
@ -348,7 +353,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
|
||||||
|
|
||||||
F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average
|
F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average
|
||||||
rotation_BC%rotate(F_aim,active=.true.)),[9,grid(1),grid(2),grid3])
|
rotation_BC%rotate(F_aim,active=.true.)),[9,grid(1),grid(2),grid3])
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set module wide available data
|
! set module wide available data
|
||||||
|
@ -364,12 +369,12 @@ end subroutine grid_mechanical_spectral_basic_forward
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_mechanical_spectral_basic_updateCoords
|
subroutine grid_mechanical_spectral_basic_updateCoords
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: F
|
PetscScalar, dimension(:,:,:,:), pointer :: F
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call utilities_updateCoords(F)
|
call utilities_updateCoords(F)
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_basic_updateCoords
|
end subroutine grid_mechanical_spectral_basic_updateCoords
|
||||||
|
|
||||||
|
@ -379,11 +384,11 @@ end subroutine grid_mechanical_spectral_basic_updateCoords
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_mechanical_spectral_basic_restartWrite
|
subroutine grid_mechanical_spectral_basic_restartWrite
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: F
|
PetscScalar, dimension(:,:,:,:), pointer :: F
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
|
@ -410,7 +415,7 @@ subroutine grid_mechanical_spectral_basic_restartWrite
|
||||||
|
|
||||||
if (num%update_gamma) call utilities_saveReferenceStiffness
|
if (num%update_gamma) call utilities_saveReferenceStiffness
|
||||||
|
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_basic_restartWrite
|
end subroutine grid_mechanical_spectral_basic_restartWrite
|
||||||
|
|
||||||
|
@ -418,7 +423,7 @@ end subroutine grid_mechanical_spectral_basic_restartWrite
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief convergence check
|
!> @brief convergence check
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,ierr)
|
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,err_PETSc)
|
||||||
|
|
||||||
SNES :: snes_local
|
SNES :: snes_local
|
||||||
PetscInt, intent(in) :: PETScIter
|
PetscInt, intent(in) :: PETScIter
|
||||||
|
@ -428,7 +433,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
|
||||||
devNull3
|
devNull3
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
divTol, &
|
divTol, &
|
||||||
BCTol
|
BCTol
|
||||||
|
@ -460,7 +465,7 @@ end subroutine converged
|
||||||
!> @brief forms the residual vector
|
!> @brief forms the residual vector
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine formResidual(in, F, &
|
subroutine formResidual(in, F, &
|
||||||
residuum, dummy, ierr)
|
residuum, dummy, err_PETSc)
|
||||||
|
|
||||||
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
|
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
|
||||||
PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), &
|
PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), &
|
||||||
|
@ -473,10 +478,11 @@ subroutine formResidual(in, F, &
|
||||||
PETScIter, &
|
PETScIter, &
|
||||||
nfuncs
|
nfuncs
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
|
call SNESGetNumberFunctionEvals(snes,nfuncs,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
|
call SNESGetIterationNumber(snes,PETScIter,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
||||||
|
|
||||||
|
@ -497,7 +503,8 @@ subroutine formResidual(in, F, &
|
||||||
call utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory)
|
call utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory)
|
||||||
P_av,C_volAvg,C_minMaxAvg, &
|
P_av,C_volAvg,C_minMaxAvg, &
|
||||||
F,params%Delta_t,params%rotation_BC)
|
F,params%Delta_t,params%rotation_BC)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! stress BC handling
|
! stress BC handling
|
||||||
|
|
|
@ -108,7 +108,8 @@ contains
|
||||||
subroutine grid_mechanical_spectral_polarisation_init
|
subroutine grid_mechanical_spectral_polarisation_init
|
||||||
|
|
||||||
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
|
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||||
FandF_tau, & ! overall pointer to solution data
|
FandF_tau, & ! overall pointer to solution data
|
||||||
F, & ! specific (sub)pointer
|
F, & ! specific (sub)pointer
|
||||||
|
@ -163,10 +164,10 @@ subroutine grid_mechanical_spectral_polarisation_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set default and user defined options for PETSc
|
! set default and user defined options for PETSc
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! allocate global fields
|
! allocate global fields
|
||||||
|
@ -177,33 +178,34 @@ subroutine grid_mechanical_spectral_polarisation_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr)
|
call SNESCreate(PETSC_COMM_WORLD,snes,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr)
|
call SNESSetOptionsPrefix(snes,'mechanical_',err_PETSc);CHKERRQ(err_PETSc)
|
||||||
localK = 0
|
localK = 0_pPetscInt
|
||||||
localK(worldrank) = grid3
|
localK(worldrank) = int(grid3,pPetscInt)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3d(PETSC_COMM_WORLD, &
|
call DMDACreate3d(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
grid(1),grid(2),grid(3), & ! global grid
|
int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
|
||||||
1 , 1, worldsize, &
|
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
||||||
18, 0, & ! #dof (F tensor), ghost boundary width (domain overlap)
|
18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap)
|
||||||
[grid(1)],[grid(2)],localK, & ! local grid
|
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
|
||||||
da,ierr) ! handle, error
|
da,err_PETSc) ! handle, error
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da
|
call SNESSetDM(snes,da,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
|
||||||
call DMsetFromOptions(da,ierr); CHKERRQ(ierr)
|
call DMsetFromOptions(da,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(da,ierr); CHKERRQ(ierr)
|
call DMsetUp(da,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 18, i.e. every def grad tensor)
|
call DMcreateGlobalVector(da,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 18, i.e. every def grad tensor)
|
||||||
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
|
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged"
|
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
|
call SNESsetFromOptions(snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! places pointer on PETSc data
|
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc) ! places pointer on PETSc data
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
|
||||||
|
@ -214,17 +216,17 @@ subroutine grid_mechanical_spectral_polarisation_init
|
||||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
|
||||||
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
|
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
|
||||||
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
|
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
|
||||||
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
|
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
|
||||||
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
||||||
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(F,groupHandle,'F')
|
call HDF5_read(F,groupHandle,'F')
|
||||||
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
||||||
call HDF5_read(F_tau,groupHandle,'F_tau')
|
call HDF5_read(F_tau,groupHandle,'F_tau')
|
||||||
|
@ -242,24 +244,28 @@ subroutine grid_mechanical_spectral_polarisation_init
|
||||||
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
||||||
reshape(F,shape(F_lastInc)), & ! target F
|
reshape(F,shape(F_lastInc)), & ! target F
|
||||||
0.0_pReal) ! time increment
|
0.0_pReal) ! time increment
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! deassociate pointer
|
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! deassociate pointer
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead2: if (interface_restartInc > 0) then
|
restartRead2: if (interface_restartInc > 0) then
|
||||||
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
|
||||||
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
|
||||||
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
|
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
|
||||||
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
|
|
||||||
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
|
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
|
||||||
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,ierr)
|
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
|
||||||
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,ierr)
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call MPI_File_close(fileUnit,ierr)
|
call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
call MPI_File_close(fileUnit,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if restartRead2
|
end if restartRead2
|
||||||
|
|
||||||
call utilities_updateGamma(C_minMaxAvg)
|
call utilities_updateGamma(C_minMaxAvg)
|
||||||
|
@ -283,7 +289,7 @@ function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(soluti
|
||||||
solution
|
solution
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc Data
|
! PETSc Data
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
incInfo = incInfoIn
|
incInfo = incInfoIn
|
||||||
|
@ -299,11 +305,11 @@ function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(soluti
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! solve BVP
|
! solve BVP
|
||||||
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
|
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! check convergence
|
! check convergence
|
||||||
call SNESGetConvergedReason(snes,reason,ierr); CHKERRQ(ierr)
|
call SNESGetConvergedReason(snes,reason,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
solution%converged = reason > 0
|
solution%converged = reason > 0
|
||||||
solution%iterationsNeeded = totalIter
|
solution%iterationsNeeded = totalIter
|
||||||
|
@ -333,13 +339,13 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D
|
||||||
deformation_BC
|
deformation_BC
|
||||||
type(rotation), intent(in) :: &
|
type(rotation), intent(in) :: &
|
||||||
rotation_BC
|
rotation_BC
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: FandF_tau, F, F_tau
|
PetscScalar, pointer, dimension(:,:,:,:) :: FandF_tau, F, F_tau
|
||||||
integer :: i, j, k
|
integer :: i, j, k
|
||||||
real(pReal), dimension(3,3) :: F_lambda33
|
real(pReal), dimension(3,3) :: F_lambda33
|
||||||
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
|
||||||
|
@ -402,7 +408,8 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
end if
|
end if
|
||||||
|
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set module wide available data
|
! set module wide available data
|
||||||
|
@ -418,12 +425,14 @@ end subroutine grid_mechanical_spectral_polarisation_forward
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_mechanical_spectral_polarisation_updateCoords
|
subroutine grid_mechanical_spectral_polarisation_updateCoords
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau
|
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
call utilities_updateCoords(FandF_tau(0:8,:,:,:))
|
call utilities_updateCoords(FandF_tau(0:8,:,:,:))
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_polarisation_updateCoords
|
end subroutine grid_mechanical_spectral_polarisation_updateCoords
|
||||||
|
|
||||||
|
@ -433,11 +442,11 @@ end subroutine grid_mechanical_spectral_polarisation_updateCoords
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_mechanical_spectral_polarisation_restartWrite
|
subroutine grid_mechanical_spectral_polarisation_restartWrite
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
|
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
|
||||||
|
@ -467,7 +476,8 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
|
||||||
|
|
||||||
if (num%update_gamma) call utilities_saveReferenceStiffness
|
if (num%update_gamma) call utilities_saveReferenceStiffness
|
||||||
|
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_polarisation_restartWrite
|
end subroutine grid_mechanical_spectral_polarisation_restartWrite
|
||||||
|
|
||||||
|
@ -475,7 +485,7 @@ end subroutine grid_mechanical_spectral_polarisation_restartWrite
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief convergence check
|
!> @brief convergence check
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,ierr)
|
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,err_PETSc)
|
||||||
|
|
||||||
SNES :: snes_local
|
SNES :: snes_local
|
||||||
PetscInt, intent(in) :: PETScIter
|
PetscInt, intent(in) :: PETScIter
|
||||||
|
@ -485,7 +495,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
|
||||||
devNull3
|
devNull3
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
curlTol, &
|
curlTol, &
|
||||||
divTol, &
|
divTol, &
|
||||||
|
@ -521,7 +531,7 @@ end subroutine converged
|
||||||
!> @brief forms the residual vector
|
!> @brief forms the residual vector
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine formResidual(in, FandF_tau, &
|
subroutine formResidual(in, FandF_tau, &
|
||||||
residuum, dummy,ierr)
|
residuum, dummy,err_PETSc)
|
||||||
|
|
||||||
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
|
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
|
||||||
PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), &
|
PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), &
|
||||||
|
@ -537,7 +547,8 @@ subroutine formResidual(in, FandF_tau, &
|
||||||
PETScIter, &
|
PETScIter, &
|
||||||
nfuncs
|
nfuncs
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
integer :: &
|
integer :: &
|
||||||
i, j, k, e
|
i, j, k, e
|
||||||
|
|
||||||
|
@ -553,10 +564,11 @@ subroutine formResidual(in, FandF_tau, &
|
||||||
X_RANGE, Y_RANGE, Z_RANGE)
|
X_RANGE, Y_RANGE, Z_RANGE)
|
||||||
|
|
||||||
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
|
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,F_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
|
call SNESGetNumberFunctionEvals(snes,nfuncs,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
|
call SNESGetIterationNumber(snes,PETScIter,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
||||||
|
|
||||||
|
@ -597,7 +609,7 @@ subroutine formResidual(in, FandF_tau, &
|
||||||
call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory)
|
call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory)
|
||||||
P_av,C_volAvg,C_minMaxAvg, &
|
P_av,C_volAvg,C_minMaxAvg, &
|
||||||
F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC)
|
F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! stress BC handling
|
! stress BC handling
|
||||||
|
|
|
@ -71,7 +71,8 @@ subroutine grid_thermal_spectral_init(T_0)
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
DM :: thermal_grid
|
DM :: thermal_grid
|
||||||
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
|
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
|
||||||
PetscErrorCode :: ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
PetscErrorCode :: err_PETSc
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_grid
|
num_grid
|
||||||
|
|
||||||
|
@ -94,39 +95,41 @@ subroutine grid_thermal_spectral_init(T_0)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set default and user defined options for PETSc
|
! set default and user defined options for PETSc
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type newtonls -thermal_snes_mf &
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type newtonls -thermal_snes_mf &
|
||||||
&-thermal_snes_ksp_ew -thermal_ksp_type fgmres',ierr)
|
&-thermal_snes_ksp_ew -thermal_ksp_type fgmres',err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,thermal_snes,ierr); CHKERRQ(ierr)
|
call SNESCreate(PETSC_COMM_WORLD,thermal_snes,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(thermal_snes,'thermal_',ierr);CHKERRQ(ierr)
|
call SNESSetOptionsPrefix(thermal_snes,'thermal_',err_PETSc);CHKERRQ(err_PETSc)
|
||||||
localK = 0
|
localK = 0_pPetscInt
|
||||||
localK(worldrank) = grid3
|
localK(worldrank) = int(grid3,pPetscInt)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3D(PETSC_COMM_WORLD, &
|
call DMDACreate3D(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
grid(1),grid(2),grid(3), & ! global grid
|
int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
|
||||||
1, 1, worldsize, &
|
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
||||||
1, 0, & ! #dof (T field), ghost boundary width (domain overlap)
|
1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap)
|
||||||
[grid(1)],[grid(2)],localK, & ! local grid
|
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
|
||||||
thermal_grid,ierr) ! handle, error
|
thermal_grid,err_PETSc) ! handle, error
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(thermal_snes,thermal_grid,ierr); CHKERRQ(ierr) ! connect snes to da
|
call SNESSetDM(thermal_snes,thermal_grid,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
|
||||||
call DMsetFromOptions(thermal_grid,ierr); CHKERRQ(ierr)
|
call DMsetFromOptions(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(thermal_grid,ierr); CHKERRQ(ierr)
|
call DMsetUp(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(thermal_grid,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 1, i.e. every def grad tensor)
|
call DMCreateGlobalVector(thermal_grid,solution_vec,err_PETSc) ! global solution vector (grid x 1, i.e. every def grad tensor)
|
||||||
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
|
CHKERRQ(err_PETSc)
|
||||||
CHKERRQ(ierr)
|
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
call SNESSetFromOptions(thermal_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
|
CHKERRQ(err_PETSc)
|
||||||
|
call SNESSetFromOptions(thermal_snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call DMDAGetCorners(thermal_grid,xstart,ystart,zstart,xend,yend,zend,ierr)
|
call DMDAGetCorners(thermal_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
xend = xstart + xend - 1
|
xend = xstart + xend - 1
|
||||||
yend = ystart + yend - 1
|
yend = ystart + yend - 1
|
||||||
zend = zstart + zend - 1
|
zend = zstart + zend - 1
|
||||||
|
@ -143,9 +146,11 @@ subroutine grid_thermal_spectral_init(T_0)
|
||||||
call homogenization_thermal_setField(T_0,0.0_pReal,ce)
|
call homogenization_thermal_setField(T_0,0.0_pReal,ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,ierr); CHKERRQ(ierr)
|
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
T_PETSc(xstart:xend,ystart:yend,zstart:zend) = T_current
|
T_PETSc(xstart:xend,ystart:yend,zstart:zend) = T_current
|
||||||
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call updateReference
|
call updateReference
|
||||||
|
|
||||||
|
@ -164,7 +169,8 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
|
||||||
PetscInt :: devNull
|
PetscInt :: devNull
|
||||||
PetscReal :: T_min, T_max, stagNorm
|
PetscReal :: T_min, T_max, stagNorm
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
solution%converged =.false.
|
solution%converged =.false.
|
||||||
|
@ -173,8 +179,10 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
|
||||||
! set module wide availabe data
|
! set module wide availabe data
|
||||||
params%Delta_t = Delta_t
|
params%Delta_t = Delta_t
|
||||||
|
|
||||||
call SNESSolve(thermal_snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
|
call SNESSolve(thermal_snes,PETSC_NULL_VEC,solution_vec,err_PETSc)
|
||||||
call SNESGetConvergedReason(thermal_snes,reason,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
call SNESGetConvergedReason(thermal_snes,reason,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (reason < 1) then
|
if (reason < 1) then
|
||||||
solution%converged = .false.
|
solution%converged = .false.
|
||||||
|
@ -184,9 +192,11 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
|
||||||
solution%iterationsNeeded = totalIter
|
solution%iterationsNeeded = totalIter
|
||||||
end if
|
end if
|
||||||
stagNorm = maxval(abs(T_current - T_stagInc))
|
stagNorm = maxval(abs(T_current - T_stagInc))
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
|
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
T_stagInc = T_current
|
T_stagInc = T_current
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -197,8 +207,8 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
|
||||||
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
|
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
call VecMin(solution_vec,devNull,T_min,ierr); CHKERRQ(ierr)
|
call VecMin(solution_vec,devNull,T_min,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecMax(solution_vec,devNull,T_max,ierr); CHKERRQ(ierr)
|
call VecMax(solution_vec,devNull,T_max,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
if (solution%converged) &
|
if (solution%converged) &
|
||||||
print'(/,1x,a)', '... thermal conduction converged ..................................'
|
print'(/,1x,a)', '... thermal conduction converged ..................................'
|
||||||
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
|
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
|
||||||
|
@ -217,7 +227,7 @@ subroutine grid_thermal_spectral_forward(cutBack)
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
DM :: dm_local
|
DM :: dm_local
|
||||||
PetscScalar, dimension(:,:,:), pointer :: x_scal
|
PetscScalar, dimension(:,:,:), pointer :: x_scal
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
if (cutBack) then
|
if (cutBack) then
|
||||||
T_current = T_lastInc
|
T_current = T_lastInc
|
||||||
|
@ -225,10 +235,13 @@ subroutine grid_thermal_spectral_forward(cutBack)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! reverting thermal field state
|
! reverting thermal field state
|
||||||
call SNESGetDM(thermal_snes,dm_local,ierr); CHKERRQ(ierr)
|
call SNESGetDM(thermal_snes,dm_local,err_PETSc)
|
||||||
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) !< get the data out of PETSc to work with
|
CHKERRQ(err_PETSc)
|
||||||
|
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
x_scal(xstart:xend,ystart:yend,zstart:zend) = T_current
|
x_scal(xstart:xend,ystart:yend,zstart:zend) = T_current
|
||||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr)
|
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
|
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
|
@ -245,7 +258,7 @@ end subroutine grid_thermal_spectral_forward
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief forms the spectral thermal residual vector
|
!> @brief forms the spectral thermal residual vector
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
|
subroutine formResidual(in,x_scal,f_scal,dummy,dummy_err)
|
||||||
|
|
||||||
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
|
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
|
||||||
in
|
in
|
||||||
|
@ -256,7 +269,7 @@ subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
|
||||||
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
|
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
|
||||||
f_scal
|
f_scal
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: dummy_err
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
|
|
||||||
T_current = x_scal
|
T_current = x_scal
|
||||||
|
@ -301,7 +314,8 @@ end subroutine formResidual
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine updateReference()
|
subroutine updateReference()
|
||||||
|
|
||||||
integer :: ce,ierr
|
integer :: ce
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
|
||||||
K_ref = 0.0_pReal
|
K_ref = 0.0_pReal
|
||||||
|
@ -312,9 +326,11 @@ subroutine updateReference()
|
||||||
end do
|
end do
|
||||||
|
|
||||||
K_ref = K_ref*wgt
|
K_ref = K_ref*wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
mu_ref = mu_ref*wgt
|
mu_ref = mu_ref*wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
end subroutine updateReference
|
end subroutine updateReference
|
||||||
|
|
||||||
|
|
|
@ -144,7 +144,7 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine spectral_utilities_init
|
subroutine spectral_utilities_init
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
integer :: i, j, k, &
|
integer :: i, j, k, &
|
||||||
FFTW_planner_flag
|
FFTW_planner_flag
|
||||||
integer, dimension(3) :: k_s
|
integer, dimension(3) :: k_s
|
||||||
|
@ -156,7 +156,7 @@ subroutine spectral_utilities_init
|
||||||
integer(C_INTPTR_T) :: alloc_local, local_K, local_K_offset
|
integer(C_INTPTR_T) :: alloc_local, local_K, local_K_offset
|
||||||
integer(C_INTPTR_T), parameter :: &
|
integer(C_INTPTR_T), parameter :: &
|
||||||
scalarSize = 1_C_INTPTR_T, &
|
scalarSize = 1_C_INTPTR_T, &
|
||||||
vecSize = 3_C_INTPTR_T, &
|
vectorSize = 3_C_INTPTR_T, &
|
||||||
tensorSize = 9_C_INTPTR_T
|
tensorSize = 9_C_INTPTR_T
|
||||||
character(len=*), parameter :: &
|
character(len=*), parameter :: &
|
||||||
PETSCDEBUG = ' -snes_view -snes_monitor '
|
PETSCDEBUG = ' -snes_view -snes_monitor '
|
||||||
|
@ -193,13 +193,13 @@ subroutine spectral_utilities_init
|
||||||
'add more using the "PETSc_options" keyword in numerics.yaml'
|
'add more using the "PETSc_options" keyword in numerics.yaml'
|
||||||
flush(IO_STDOUT)
|
flush(IO_STDOUT)
|
||||||
|
|
||||||
call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr)
|
call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
if (debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr)
|
if (debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,&
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,&
|
||||||
num_grid%get_asString('PETSc_options',defaultVal=''),ierr)
|
num_grid%get_asString('PETSc_options',defaultVal=''),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
grid1Red = grid(1)/2 + 1
|
grid1Red = grid(1)/2 + 1
|
||||||
wgt = 1.0/real(product(grid),pReal)
|
wgt = 1.0/real(product(grid),pReal)
|
||||||
|
@ -274,7 +274,7 @@ subroutine spectral_utilities_init
|
||||||
call c_f_pointer(tensorField, tensorField_fourier, [3_C_INTPTR_T,3_C_INTPTR_T, &
|
call c_f_pointer(tensorField, tensorField_fourier, [3_C_INTPTR_T,3_C_INTPTR_T, &
|
||||||
gridFFTW(1)/2_C_INTPTR_T + 1_C_INTPTR_T , gridFFTW(2),local_K]) ! place a pointer for a fourier tensor representation
|
gridFFTW(1)/2_C_INTPTR_T + 1_C_INTPTR_T , gridFFTW(2),local_K]) ! place a pointer for a fourier tensor representation
|
||||||
|
|
||||||
vectorField = fftw_alloc_complex(vecSize*alloc_local)
|
vectorField = fftw_alloc_complex(vectorSize*alloc_local)
|
||||||
call c_f_pointer(vectorField, vectorField_real, [3_C_INTPTR_T,&
|
call c_f_pointer(vectorField, vectorField_real, [3_C_INTPTR_T,&
|
||||||
2_C_INTPTR_T*(gridFFTW(1)/2_C_INTPTR_T + 1_C_INTPTR_T),gridFFTW(2),local_K]) ! place a pointer for a real vector representation
|
2_C_INTPTR_T*(gridFFTW(1)/2_C_INTPTR_T + 1_C_INTPTR_T),gridFFTW(2),local_K]) ! place a pointer for a real vector representation
|
||||||
call c_f_pointer(vectorField, vectorField_fourier,[3_C_INTPTR_T,&
|
call c_f_pointer(vectorField, vectorField_fourier,[3_C_INTPTR_T,&
|
||||||
|
@ -288,42 +288,42 @@ subroutine spectral_utilities_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! tensor MPI fftw plans
|
! tensor MPI fftw plans
|
||||||
planTensorForth = fftw_mpi_plan_many_dft_r2c(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
|
planTensorForth = fftw_mpi_plan_many_dft_r2c(3,gridFFTW(3:1:-1),tensorSize, &
|
||||||
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
FFTW_MPI_DEFAULT_BLOCK,FFTW_MPI_DEFAULT_BLOCK, &
|
||||||
tensorField_real, tensorField_fourier, & ! input data, output data
|
tensorField_real,tensorField_fourier, &
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
|
PETSC_COMM_WORLD,FFTW_planner_flag)
|
||||||
if (.not. C_ASSOCIATED(planTensorForth)) error stop 'FFTW error'
|
if (.not. c_associated(planTensorForth)) error stop 'FFTW error'
|
||||||
planTensorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
|
planTensorBack = fftw_mpi_plan_many_dft_c2r(3,gridFFTW(3:1:-1),tensorSize, &
|
||||||
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &
|
||||||
tensorField_fourier,tensorField_real, & ! input data, output data
|
tensorField_fourier,tensorField_real, &
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
|
PETSC_COMM_WORLD, FFTW_planner_flag)
|
||||||
if (.not. C_ASSOCIATED(planTensorBack)) error stop 'FFTW error'
|
if (.not. c_associated(planTensorBack)) error stop 'FFTW error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! vector MPI fftw plans
|
! vector MPI fftw plans
|
||||||
planVectorForth = fftw_mpi_plan_many_dft_r2c(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
|
planVectorForth = fftw_mpi_plan_many_dft_r2c(3,gridFFTW(3:1:-1),vectorSize, &
|
||||||
vecSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK,&! no. of transforms, default iblock and oblock
|
FFTW_MPI_DEFAULT_BLOCK,FFTW_MPI_DEFAULT_BLOCK, &
|
||||||
vectorField_real, vectorField_fourier, & ! input data, output data
|
vectorField_real,vectorField_fourier, &
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
|
PETSC_COMM_WORLD,FFTW_planner_flag)
|
||||||
if (.not. C_ASSOCIATED(planVectorForth)) error stop 'FFTW error'
|
if (.not. c_associated(planVectorForth)) error stop 'FFTW error'
|
||||||
planVectorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
|
planVectorBack = fftw_mpi_plan_many_dft_c2r(3,gridFFTW(3:1:-1),vectorSize, &
|
||||||
vecSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, & ! no. of transforms, default iblock and oblock
|
FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &
|
||||||
vectorField_fourier,vectorField_real, & ! input data, output data
|
vectorField_fourier,vectorField_real, &
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
|
PETSC_COMM_WORLD, FFTW_planner_flag)
|
||||||
if (.not. C_ASSOCIATED(planVectorBack)) error stop 'FFTW error'
|
if (.not. c_associated(planVectorBack)) error stop 'FFTW error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! scalar MPI fftw plans
|
! scalar MPI fftw plans
|
||||||
planScalarForth = fftw_mpi_plan_many_dft_r2c(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
|
planScalarForth = fftw_mpi_plan_many_dft_r2c(3,gridFFTW(3:1:-1),scalarSize, &
|
||||||
scalarSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
FFTW_MPI_DEFAULT_BLOCK,FFTW_MPI_DEFAULT_BLOCK, &
|
||||||
scalarField_real, scalarField_fourier, & ! input data, output data
|
scalarField_real,scalarField_fourier, &
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
|
PETSC_COMM_WORLD,FFTW_planner_flag)
|
||||||
if (.not. C_ASSOCIATED(planScalarForth)) error stop 'FFTW error'
|
if (.not. c_associated(planScalarForth)) error stop 'FFTW error'
|
||||||
planScalarBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order, no. of transforms
|
planScalarBack = fftw_mpi_plan_many_dft_c2r(3,gridFFTW(3:1:-1),scalarSize, &
|
||||||
scalarSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &
|
||||||
scalarField_fourier,scalarField_real, & ! input data, output data
|
scalarField_fourier,scalarField_real, &
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
|
PETSC_COMM_WORLD, FFTW_planner_flag)
|
||||||
if (.not. C_ASSOCIATED(planScalarBack)) error stop 'FFTW error'
|
if (.not. c_associated(planScalarBack)) error stop 'FFTW error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! calculation of discrete angular frequencies, ordered as in FFTW (wrap around)
|
! calculation of discrete angular frequencies, ordered as in FFTW (wrap around)
|
||||||
|
@ -559,7 +559,8 @@ end subroutine utilities_fourierGreenConvolution
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
real(pReal) function utilities_divergenceRMS()
|
real(pReal) function utilities_divergenceRMS()
|
||||||
|
|
||||||
integer :: i, j, k, ierr
|
integer :: i, j, k
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
complex(pReal), dimension(3) :: rescaledGeom
|
complex(pReal), dimension(3) :: rescaledGeom
|
||||||
|
|
||||||
print'(/,1x,a)', '... calculating divergence ................................................'
|
print'(/,1x,a)', '... calculating divergence ................................................'
|
||||||
|
@ -589,8 +590,8 @@ real(pReal) function utilities_divergenceRMS()
|
||||||
conjg(-xi1st(1:3,grid1Red,j,k))*rescaledGeom))**2)
|
conjg(-xi1st(1:3,grid1Red,j,k))*rescaledGeom))**2)
|
||||||
enddo; enddo
|
enddo; enddo
|
||||||
if (grid(1) == 1) utilities_divergenceRMS = utilities_divergenceRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
|
if (grid(1) == 1) utilities_divergenceRMS = utilities_divergenceRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,utilities_divergenceRMS,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,utilities_divergenceRMS,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
utilities_divergenceRMS = sqrt(utilities_divergenceRMS) * wgt ! RMS in real space calculated with Parsevals theorem from Fourier space
|
utilities_divergenceRMS = sqrt(utilities_divergenceRMS) * wgt ! RMS in real space calculated with Parsevals theorem from Fourier space
|
||||||
|
|
||||||
end function utilities_divergenceRMS
|
end function utilities_divergenceRMS
|
||||||
|
@ -601,7 +602,8 @@ end function utilities_divergenceRMS
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
real(pReal) function utilities_curlRMS()
|
real(pReal) function utilities_curlRMS()
|
||||||
|
|
||||||
integer :: i, j, k, l, ierr
|
integer :: i, j, k, l
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
complex(pReal), dimension(3,3) :: curl_fourier
|
complex(pReal), dimension(3,3) :: curl_fourier
|
||||||
complex(pReal), dimension(3) :: rescaledGeom
|
complex(pReal), dimension(3) :: rescaledGeom
|
||||||
|
|
||||||
|
@ -649,8 +651,8 @@ real(pReal) function utilities_curlRMS()
|
||||||
+ sum(curl_fourier%re**2 + curl_fourier%im**2) ! this layer (Nyquist) does not have a conjugate complex counterpart (if grid(1) /= 1)
|
+ sum(curl_fourier%re**2 + curl_fourier%im**2) ! this layer (Nyquist) does not have a conjugate complex counterpart (if grid(1) /= 1)
|
||||||
enddo; enddo
|
enddo; enddo
|
||||||
|
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,utilities_curlRMS,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,utilities_curlRMS,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
utilities_curlRMS = sqrt(utilities_curlRMS) * wgt
|
utilities_curlRMS = sqrt(utilities_curlRMS) * wgt
|
||||||
if (grid(1) == 1) utilities_curlRMS = utilities_curlRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
|
if (grid(1) == 1) utilities_curlRMS = utilities_curlRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
|
||||||
|
|
||||||
|
@ -799,8 +801,8 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
|
||||||
type(rotation), intent(in), optional :: rotation_BC !< rotation of load frame
|
type(rotation), intent(in), optional :: rotation_BC !< rotation of load frame
|
||||||
|
|
||||||
|
|
||||||
integer :: &
|
integer :: i
|
||||||
i,ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
real(pReal), dimension(3,3,3,3) :: dPdF_max, dPdF_min
|
real(pReal), dimension(3,3,3,3) :: dPdF_max, dPdF_min
|
||||||
real(pReal) :: dPdF_norm_max, dPdF_norm_min
|
real(pReal) :: dPdF_norm_max, dPdF_norm_min
|
||||||
real(pReal), dimension(2) :: valueAndRank !< pair of min/max norm of dPdF to synchronize min/max of dPdF
|
real(pReal), dimension(2) :: valueAndRank !< pair of min/max norm of dPdF to synchronize min/max of dPdF
|
||||||
|
@ -818,7 +820,8 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
|
||||||
|
|
||||||
P = reshape(homogenization_P, [3,3,grid(1),grid(2),grid3])
|
P = reshape(homogenization_P, [3,3,grid(1),grid(2),grid3])
|
||||||
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt
|
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,P_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
if (debugRotation) print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', &
|
if (debugRotation) print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', &
|
||||||
'Piola--Kirchhoff stress (lab) / MPa =', transpose(P_av)*1.e-6_pReal
|
'Piola--Kirchhoff stress (lab) / MPa =', transpose(P_av)*1.e-6_pReal
|
||||||
if (present(rotation_BC)) P_av = rotation_BC%rotate(P_av)
|
if (present(rotation_BC)) P_av = rotation_BC%rotate(P_av)
|
||||||
|
@ -842,22 +845,22 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
valueAndRank = [dPdF_norm_max,real(worldrank,pReal)]
|
valueAndRank = [dPdF_norm_max,real(worldrank,pReal)]
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MAXLOC, MPI_COMM_WORLD, ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1_MPI_INTEGER_KIND,MPI_2DOUBLE_PRECISION,MPI_MAXLOC,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call MPI_Bcast(dPdF_max,81,MPI_DOUBLE,int(valueAndRank(2)),MPI_COMM_WORLD, ierr)
|
call MPI_Bcast(dPdF_max,81_MPI_INTEGER_KIND,MPI_DOUBLE,int(valueAndRank(2),MPI_INTEGER_KIND),MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
valueAndRank = [dPdF_norm_min,real(worldrank,pReal)]
|
valueAndRank = [dPdF_norm_min,real(worldrank,pReal)]
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MINLOC, MPI_COMM_WORLD, ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1_MPI_INTEGER_KIND,MPI_2DOUBLE_PRECISION,MPI_MINLOC,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call MPI_Bcast(dPdF_min,81,MPI_DOUBLE,int(valueAndRank(2)),MPI_COMM_WORLD, ierr)
|
call MPI_Bcast(dPdF_min,81_MPI_INTEGER_KIND,MPI_DOUBLE,int(valueAndRank(2),MPI_INTEGER_KIND),MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
C_minmaxAvg = 0.5_pReal*(dPdF_max + dPdF_min)
|
C_minmaxAvg = 0.5_pReal*(dPdF_max + dPdF_min)
|
||||||
|
|
||||||
C_volAvg = sum(homogenization_dPdF,dim=5)
|
C_volAvg = sum(homogenization_dPdF,dim=5)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /= 0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
C_volAvg = C_volAvg * wgt
|
C_volAvg = C_volAvg * wgt
|
||||||
|
|
||||||
|
|
||||||
|
@ -906,12 +909,13 @@ function utilities_forwardField(Delta_t,field_lastInc,rate,aim)
|
||||||
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: &
|
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: &
|
||||||
utilities_forwardField
|
utilities_forwardField
|
||||||
real(pReal), dimension(3,3) :: fieldDiff !< <a + adot*t> - aim
|
real(pReal), dimension(3,3) :: fieldDiff !< <a + adot*t> - aim
|
||||||
PetscErrorCode :: ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
utilities_forwardField = field_lastInc + rate*Delta_t
|
utilities_forwardField = field_lastInc + rate*Delta_t
|
||||||
if (present(aim)) then !< correct to match average
|
if (present(aim)) then !< correct to match average
|
||||||
fieldDiff = sum(sum(sum(utilities_forwardField,dim=5),dim=4),dim=3)*wgt
|
fieldDiff = sum(sum(sum(utilities_forwardField,dim=5),dim=4),dim=3)*wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,fieldDiff,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,fieldDiff,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
fieldDiff = fieldDiff - aim
|
fieldDiff = fieldDiff - aim
|
||||||
utilities_forwardField = utilities_forwardField - &
|
utilities_forwardField = utilities_forwardField - &
|
||||||
spread(spread(spread(fieldDiff,3,grid(1)),4,grid(2)),5,grid3)
|
spread(spread(spread(fieldDiff,3,grid(1)),4,grid(2)),5,grid3)
|
||||||
|
@ -981,9 +985,10 @@ subroutine utilities_updateCoords(F)
|
||||||
real(pReal), dimension(3, grid(1)+1,grid(2)+1,grid3+1) :: nodeCoords
|
real(pReal), dimension(3, grid(1)+1,grid(2)+1,grid3+1) :: nodeCoords
|
||||||
integer :: &
|
integer :: &
|
||||||
i,j,k,n, &
|
i,j,k,n, &
|
||||||
rank_t, rank_b, &
|
c
|
||||||
c, &
|
integer(MPI_INTEGER_KIND) :: &
|
||||||
ierr
|
rank_t, rank_b
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
||||||
type(MPI_Request), dimension(4) :: request
|
type(MPI_Request), dimension(4) :: request
|
||||||
type(MPI_Status), dimension(4) :: status
|
type(MPI_Status), dimension(4) :: status
|
||||||
|
@ -1025,30 +1030,30 @@ subroutine utilities_updateCoords(F)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! average F
|
! average F
|
||||||
if (grid3Offset == 0) Favg = real(tensorField_fourier(1:3,1:3,1,1,1),pReal)*wgt
|
if (grid3Offset == 0) Favg = real(tensorField_fourier(1:3,1:3,1,1,1),pReal)*wgt
|
||||||
call MPI_Bcast(Favg,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(Favg,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! pad cell center fluctuations along z-direction (needed when running MPI simulation)
|
! pad cell center fluctuations along z-direction (needed when running MPI simulation)
|
||||||
IPfluct_padded(1:3,1:grid(1),1:grid(2),2:grid3+1) = vectorField_real(1:3,1:grid(1),1:grid(2),1:grid3)
|
IPfluct_padded(1:3,1:grid(1),1:grid(2),2:grid3+1) = vectorField_real(1:3,1:grid(1),1:grid(2),1:grid3)
|
||||||
c = product(shape(IPfluct_padded(:,:,:,1))) !< amount of data to transfer
|
c = product(shape(IPfluct_padded(:,:,:,1))) !< amount of data to transfer
|
||||||
rank_t = modulo(worldrank+1,worldsize)
|
rank_t = modulo(worldrank+1_MPI_INTEGER_KIND,worldsize)
|
||||||
rank_b = modulo(worldrank-1,worldsize)
|
rank_b = modulo(worldrank-1_MPI_INTEGER_KIND,worldsize)
|
||||||
|
|
||||||
! send bottom layer to process below
|
! send bottom layer to process below
|
||||||
call MPI_Isend(IPfluct_padded(:,:,:,2), c,MPI_DOUBLE,rank_b,0,MPI_COMM_WORLD,request(1),ierr)
|
call MPI_Isend(IPfluct_padded(:,:,:,2), c,MPI_DOUBLE,rank_b,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(1),err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call MPI_Irecv(IPfluct_padded(:,:,:,grid3+2),c,MPI_DOUBLE,rank_t,0,MPI_COMM_WORLD,request(2),ierr)
|
call MPI_Irecv(IPfluct_padded(:,:,:,grid3+2),c,MPI_DOUBLE,rank_t,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(2),err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
! send top layer to process above
|
! send top layer to process above
|
||||||
call MPI_Isend(IPfluct_padded(:,:,:,grid3+1),c,MPI_DOUBLE,rank_t,1,MPI_COMM_WORLD,request(3),ierr)
|
call MPI_Isend(IPfluct_padded(:,:,:,grid3+1),c,MPI_DOUBLE,rank_t,1_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(3),err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call MPI_Irecv(IPfluct_padded(:,:,:,1), c,MPI_DOUBLE,rank_b,1,MPI_COMM_WORLD,request(4),ierr)
|
call MPI_Irecv(IPfluct_padded(:,:,:,1), c,MPI_DOUBLE,rank_b,1_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(4),err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call MPI_Waitall(4,request,status,ierr)
|
call MPI_Waitall(4,request,status,err_MPI)
|
||||||
if (ierr /=0) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
||||||
! ToDo
|
! ToDo
|
||||||
#else
|
#else
|
||||||
|
|
142
src/lattice.f90
142
src/lattice.f90
|
@ -587,8 +587,8 @@ function lattice_C66_trans(Ntrans,C_parent66,lattice_target, &
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Non-schmid projections for bcc with up to 6 coefficients
|
!> @brief Non-schmid projections for bcc with up to 6 coefficients
|
||||||
! Koester et al. 2012, Acta Materialia 60 (2012) 3894–3901, eq. (17)
|
! https://doi.org/10.1016/j.actamat.2012.03.053, eq. (17)
|
||||||
! Gröger et al. 2008, Acta Materialia 56 (2008) 5412–5425, table 1
|
! https://doi.org/10.1016/j.actamat.2008.07.037, table 1
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function lattice_nonSchmidMatrix(Nslip,nonSchmidCoefficients,sense) result(nonSchmidMatrix)
|
function lattice_nonSchmidMatrix(Nslip,nonSchmidCoefficients,sense) result(nonSchmidMatrix)
|
||||||
|
|
||||||
|
@ -602,6 +602,7 @@ function lattice_nonSchmidMatrix(Nslip,nonSchmidCoefficients,sense) result(nonSc
|
||||||
type(rotation) :: R
|
type(rotation) :: R
|
||||||
integer :: i
|
integer :: i
|
||||||
|
|
||||||
|
|
||||||
if (abs(sense) /= 1) error stop 'Sense in lattice_nonSchmidMatrix'
|
if (abs(sense) /= 1) error stop 'Sense in lattice_nonSchmidMatrix'
|
||||||
|
|
||||||
coordinateSystem = buildCoordinateSystem(Nslip,BCC_NSLIPSYSTEM,BCC_SYSTEMSLIP,'cI',0.0_pReal)
|
coordinateSystem = buildCoordinateSystem(Nslip,BCC_NSLIPSYSTEM,BCC_SYSTEMSLIP,'cI',0.0_pReal)
|
||||||
|
@ -634,7 +635,9 @@ end function lattice_nonSchmidMatrix
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Slip-slip interaction matrix
|
!> @brief Slip-slip interaction matrix
|
||||||
!> details only active slip systems are considered
|
!> @details only active slip systems are considered
|
||||||
|
!> @details https://doi.org/10.1016/j.actamat.2016.12.040 (fcc: Tab S4-1, bcc: Tab S5-1)
|
||||||
|
!> @details https://doi.org/10.1016/j.ijplas.2014.06.010 (hex: Tab 3b)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function lattice_interaction_SlipBySlip(Nslip,interactionValues,lattice) result(interactionMatrix)
|
function lattice_interaction_SlipBySlip(Nslip,interactionValues,lattice) result(interactionMatrix)
|
||||||
|
|
||||||
|
@ -646,6 +649,7 @@ function lattice_interaction_SlipBySlip(Nslip,interactionValues,lattice) result(
|
||||||
integer, dimension(:), allocatable :: NslipMax
|
integer, dimension(:), allocatable :: NslipMax
|
||||||
integer, dimension(:,:), allocatable :: interactionTypes
|
integer, dimension(:,:), allocatable :: interactionTypes
|
||||||
|
|
||||||
|
|
||||||
integer, dimension(FCC_NSLIP,FCC_NSLIP), parameter :: &
|
integer, dimension(FCC_NSLIP,FCC_NSLIP), parameter :: &
|
||||||
FCC_INTERACTIONSLIPSLIP = reshape( [&
|
FCC_INTERACTIONSLIPSLIP = reshape( [&
|
||||||
1, 2, 2, 4, 7, 5, 3, 5, 5, 4, 6, 7, 10,11,10,11,12,13, & ! -----> acting (forest)
|
1, 2, 2, 4, 7, 5, 3, 5, 5, 4, 6, 7, 10,11,10,11,12,13, & ! -----> acting (forest)
|
||||||
|
@ -750,41 +754,113 @@ function lattice_interaction_SlipBySlip(Nslip,interactionValues,lattice) result(
|
||||||
integer, dimension(HEX_NSLIP,HEX_NSLIP), parameter :: &
|
integer, dimension(HEX_NSLIP,HEX_NSLIP), parameter :: &
|
||||||
HEX_INTERACTIONSLIPSLIP = reshape( [&
|
HEX_INTERACTIONSLIPSLIP = reshape( [&
|
||||||
! basal prism 1. pyr<a> 1. pyr<c+a> 2. pyr<c+a>
|
! basal prism 1. pyr<a> 1. pyr<c+a> 2. pyr<c+a>
|
||||||
1, 2, 2, 3, 3, 3, 7, 7, 7, 7, 7, 7, 13,13,13,13,13,13,13,13,13,13,13,13, 21,21,21,21,21,21, & ! -----> acting (forest)
|
1, 2, 2, 3, 4, 4, 9,10, 9, 9,10, 9, 20,21,22,22,21,20,20,21,22,22,21,20, 47,47,48,47,47,48, & ! -----> acting (forest)
|
||||||
2, 1, 2, 3, 3, 3, 7, 7, 7, 7, 7, 7, 13,13,13,13,13,13,13,13,13,13,13,13, 21,21,21,21,21,21, & ! | basal
|
2, 1, 2, 4, 3, 4, 10, 9, 9,10, 9, 9, 22,22,21,20,20,21,22,22,21,20,20,21, 47,48,47,47,48,47, & ! | basal
|
||||||
2, 2, 1, 3, 3, 3, 7, 7, 7, 7, 7, 7, 13,13,13,13,13,13,13,13,13,13,13,13, 21,21,21,21,21,21, & ! |
|
2, 2, 1, 4, 4, 3, 9, 9,10, 9, 9,10, 21,20,20,21,22,22,21,20,20,21,22,22, 48,47,47,48,47,47, & ! |
|
||||||
! v
|
! v
|
||||||
6, 6, 6, 4, 5, 5, 8, 8, 8, 8, 8, 8, 14,14,14,14,14,14,14,14,14,14,14,14, 22,22,22,22,22,22, & ! reacting (primary)
|
7, 8, 8, 5, 6, 6, 11,12,11,11,12,11, 23,24,25,25,24,23,23,24,25,25,24,23, 49,49,50,49,49,50, & ! reacting (primary)
|
||||||
6, 6, 6, 5, 4, 5, 8, 8, 8, 8, 8, 8, 14,14,14,14,14,14,14,14,14,14,14,14, 22,22,22,22,22,22, & ! prism
|
8, 7, 8, 6, 5, 6, 12,11,11,12,11,11, 25,25,24,23,23,24,25,25,24,23,23,24, 49,50,49,49,50,49, & ! prism
|
||||||
6, 6, 6, 5, 5, 4, 8, 8, 8, 8, 8, 8, 14,14,14,14,14,14,14,14,14,14,14,14, 22,22,22,22,22,22, &
|
8, 8, 7, 6, 6, 5, 11,11,12,11,11,12, 24,23,23,24,25,25,24,23,23,24,25,25, 50,49,49,50,49,49, &
|
||||||
|
|
||||||
12,12,12, 11,11,11, 9,10,10,10,10,10, 15,15,15,15,15,15,15,15,15,15,15,15, 23,23,23,23,23,23, &
|
18,19,18, 16,17,16, 13,14,14,15,14,14, 26,26,27,28,28,27,29,29,27,28,28,27, 51,52,51,51,52,51, &
|
||||||
12,12,12, 11,11,11, 10, 9,10,10,10,10, 15,15,15,15,15,15,15,15,15,15,15,15, 23,23,23,23,23,23, &
|
19,18,18, 17,16,16, 14,13,14,14,15,14, 28,27,26,26,27,28,28,27,29,29,27,28, 51,51,52,51,51,52, &
|
||||||
12,12,12, 11,11,11, 10,10, 9,10,10,10, 15,15,15,15,15,15,15,15,15,15,15,15, 23,23,23,23,23,23, &
|
18,18,19, 16,16,17, 14,14,13,14,14,15, 27,28,28,27,26,26,27,28,28,27,29,29, 52,51,51,52,51,51, &
|
||||||
12,12,12, 11,11,11, 10,10,10, 9,10,10, 15,15,15,15,15,15,15,15,15,15,15,15, 23,23,23,23,23,23, & ! 1. pyr<a>
|
18,19,18, 16,17,16, 15,14,14,13,14,14, 29,29,27,28,28,27,26,26,27,28,28,27, 51,52,51,51,52,51, & ! 1. pyr<a>
|
||||||
12,12,12, 11,11,11, 10,10,10,10, 9,10, 15,15,15,15,15,15,15,15,15,15,15,15, 23,23,23,23,23,23, &
|
19,18,18, 17,16,16, 14,15,14,14,13,14, 28,27,29,29,27,28,28,27,26,26,27,28, 51,51,52,51,51,52, &
|
||||||
12,12,12, 11,11,11, 10,10,10,10,10, 9, 15,15,15,15,15,15,15,15,15,15,15,15, 23,23,23,23,23,23, &
|
18,18,19, 16,16,17, 14,14,15,14,14,13, 27,28,28,27,29,29,27,28,28,27,26,26, 52,51,51,52,51,51, &
|
||||||
|
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 16,17,17,17,17,17,17,17,17,17,17,17, 24,24,24,24,24,24, &
|
44,45,46, 41,42,43, 37,38,39,40,38,39, 30,31,32,32,32,33,34,35,32,32,32,36, 53,54,55,53,54,56, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,16,17,17,17,17,17,17,17,17,17,17, 24,24,24,24,24,24, &
|
46,45,44, 43,42,41, 37,39,38,40,39,38, 31,30,36,32,32,32,35,34,33,32,32,32, 56,54,53,55,54,53, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,16,17,17,17,17,17,17,17,17,17, 24,24,24,24,24,24, &
|
45,46,44, 42,43,41, 39,37,38,39,40,38, 32,36,30,31,32,32,32,33,34,35,32,32, 56,53,54,55,53,54, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,16,17,17,17,17,17,17,17,17, 24,24,24,24,24,24, &
|
45,44,46, 42,41,43, 38,37,39,38,40,39, 32,32,31,30,36,32,32,32,35,34,33,32, 53,56,54,53,55,54, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,16,17,17,17,17,17,17,17, 24,24,24,24,24,24, &
|
46,44,45, 43,41,42, 38,39,37,38,39,40, 32,32,32,36,30,31,32,32,32,33,34,35, 54,56,53,54,55,53, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,16,17,17,17,17,17,17, 24,24,24,24,24,24, &
|
44,46,45, 41,43,42, 39,38,37,39,38,40, 33,32,32,32,31,30,36,32,32,32,35,34, 54,53,56,54,53,55, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,17,16,17,17,17,17,17, 24,24,24,24,24,24, & ! 1. pyr<c+a>
|
44,45,46, 41,42,43, 40,38,39,37,38,39, 34,35,32,32,32,36,30,31,32,32,32,33, 53,54,56,53,54,55, & ! 1. pyr<c+a>
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,17,17,16,17,17,17,17, 24,24,24,24,24,24, &
|
46,45,44, 43,42,41, 40,39,38,37,39,38, 35,34,33,32,32,32,31,30,36,32,32,32, 55,54,53,56,54,53, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,17,17,17,16,17,17,17, 24,24,24,24,24,24, &
|
45,46,44, 42,43,41, 39,40,38,39,37,38, 32,33,34,35,32,32,32,36,30,31,32,32, 55,53,54,56,53,54, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,17,17,17,17,16,17,17, 24,24,24,24,24,24, &
|
45,44,46, 42,41,43, 38,40,39,38,37,39, 32,32,35,34,33,32,32,32,31,30,36,32, 53,55,54,53,56,54, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,17,17,17,17,17,16,17, 24,24,24,24,24,24, &
|
46,44,45, 43,41,42, 38,39,40,38,39,37, 32,32,32,33,34,35,32,32,32,36,30,31, 54,55,53,54,56,53, &
|
||||||
20,20,20, 19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,17,17,17,17,17,17,16, 24,24,24,24,24,24, &
|
44,46,45, 41,43,42, 39,38,40,39,38,37, 36,32,32,32,35,34,33,32,32,32,31,30, 54,53,55,54,53,56, &
|
||||||
|
|
||||||
30,30,30, 29,29,29, 28,28,28,28,28,28, 27,27,27,27,27,27,27,27,27,27,27,27, 25,26,26,26,26,26, &
|
68,68,69, 66,66,67, 64,64,65,64,65,65, 60,61,61,60,62,62,60,63,63,60,62,62, 57,58,58,59,58,58, &
|
||||||
30,30,30, 29,29,29, 28,28,28,28,28,28, 27,27,27,27,27,27,27,27,27,27,27,27, 26,25,26,26,26,26, &
|
68,69,68, 66,67,66, 65,64,64,65,64,64, 62,62,60,61,61,60,62,62,60,63,63,60, 58,57,58,58,59,58, &
|
||||||
30,30,30, 29,29,29, 28,28,28,28,28,28, 27,27,27,27,27,27,27,27,27,27,27,27, 26,26,25,26,26,26, &
|
69,68,68, 67,66,66, 64,65,64,64,65,64, 63,60,62,62,60,61,61,60,62,62,60,63, 58,58,57,58,58,59, &
|
||||||
30,30,30, 29,29,29, 28,28,28,28,28,28, 27,27,27,27,27,27,27,27,27,27,27,27, 26,26,26,25,26,26, & ! 2. pyr<c+a>
|
68,68,69, 66,66,67, 64,64,65,64,64,65, 60,63,63,60,62,62,60,61,61,60,62,62, 59,58,58,57,58,58, & ! 2. pyr<c+a>
|
||||||
30,30,30, 29,29,29, 28,28,28,28,28,28, 27,27,27,27,27,27,27,27,27,27,27,27, 26,26,26,26,25,26, &
|
68,69,68, 66,67,66, 65,64,64,65,64,64, 62,62,60,63,63,60,62,62,60,61,61,60, 58,59,58,58,57,58, &
|
||||||
30,30,30, 29,29,29, 28,28,28,28,28,28, 27,27,27,27,27,27,27,27,27,27,27,27, 26,26,26,26,26,25 &
|
69,68,68, 67,66,66, 64,65,64,64,65,64, 61,60,62,62,60,63,63,60,62,62,60,61, 58,58,59,58,58,57 &
|
||||||
],shape(HEX_INTERACTIONSLIPSLIP)) !< Slip-slip interaction types for hex (onion peel naming scheme)
|
],shape(HEX_INTERACTIONSLIPSLIP)) !< Slip-slip interaction types for hex (onion peel naming scheme)
|
||||||
|
!< 10.1016/j.ijplas.2014.06.010 table 3
|
||||||
|
!< 10.1080/14786435.2012.699689 table 2 and 3
|
||||||
|
!< index & label & description
|
||||||
|
!< 1 & S1 & basal self-interaction
|
||||||
|
!< 2 & 1 & basal/basal coplanar
|
||||||
|
!< 3 & 3 & basal/prismatic collinear
|
||||||
|
!< 4 & 4 & basal/prismatic non-collinear
|
||||||
|
!< 5 & S2 & prismatic self-interaction
|
||||||
|
!< 6 & 2 & prismatic/prismatic
|
||||||
|
!< 7 & 5 & prismatic/basal collinear
|
||||||
|
!< 8 & 6 & prismatic/basal non-collinear
|
||||||
|
!< 9 & - & basal/pyramidal <a> non-collinear
|
||||||
|
!< 10 & - & basal/pyramidal <a> collinear
|
||||||
|
!< 11 & - & prismatic/pyramidal <a> non-collinear
|
||||||
|
!< 12 & - & prismatic/pyramidal <a> collinear
|
||||||
|
!< 13 & - & pyramidal <a> self-interaction
|
||||||
|
!< 14 & - & pyramidal <a> non-collinear
|
||||||
|
!< 15 & - & pyramidal <a> collinear
|
||||||
|
!< 16 & - & pyramidal <a>/prismatic non-collinear
|
||||||
|
!< 17 & - & pyramidal <a>/prismatic collinear
|
||||||
|
!< 18 & - & pyramidal <a>/basal non-collinear
|
||||||
|
!< 19 & - & pyramidal <a>/basal collinear
|
||||||
|
!< 20 & - & basal/1. order pyramidal <c+a> semi-collinear
|
||||||
|
!< 21 & - & basal/1. order pyramidal <c+a>
|
||||||
|
!< 22 & - & basal/1. order pyramidal <c+a>
|
||||||
|
!< 23 & - & prismatic/1. order pyramidal <c+a> semi-collinear
|
||||||
|
!< 24 & - & prismatic/1. order pyramidal <c+a>
|
||||||
|
!< 25 & - & prismatic/1. order pyramidal <c+a> semi-coplanar?
|
||||||
|
!< 26 & - & pyramidal <a>/1. order pyramidal <c+a> coplanar
|
||||||
|
!< 27 & - & pyramidal <a>/1. order pyramidal <c+a>
|
||||||
|
!< 28 & - & pyramidal <a>/1. order pyramidal <c+a> semi-collinear
|
||||||
|
!< 29 & - & pyramidal <a>/1. order pyramidal <c+a> semi-coplanar
|
||||||
|
!< 30 & - & 1. order pyramidal <c+a> self-interaction
|
||||||
|
!< 31 & - & 1. order pyramidal <c+a> coplanar
|
||||||
|
!< 32 & - & 1. order pyramidal <c+a>
|
||||||
|
!< 33 & - & 1. order pyramidal <c+a>
|
||||||
|
!< 34 & - & 1. order pyramidal <c+a> semi-coplanar
|
||||||
|
!< 35 & - & 1. order pyramidal <c+a> semi-coplanar
|
||||||
|
!< 36 & - & 1. order pyramidal <c+a> collinear
|
||||||
|
!< 37 & - & 1. order pyramidal <c+a>/pyramidal <a> coplanar
|
||||||
|
!< 38 & - & 1. order pyramidal <c+a>/pyramidal <a> semi-collinear
|
||||||
|
!< 39 & - & 1. order pyramidal <c+a>/pyramidal <a>
|
||||||
|
!< 40 & - & 1. order pyramidal <c+a>/pyramidal <a> semi-coplanar
|
||||||
|
!< 41 & - & 1. order pyramidal <c+a>/prismatic semi-collinear
|
||||||
|
!< 42 & - & 1. order pyramidal <c+a>/prismatic semi-coplanar
|
||||||
|
!< 43 & - & 1. order pyramidal <c+a>/prismatic
|
||||||
|
!< 44 & - & 1. order pyramidal <c+a>/basal semi-collinear
|
||||||
|
!< 45 & - & 1. order pyramidal <c+a>/basal
|
||||||
|
!< 46 & - & 1. order pyramidal <c+a>/basal
|
||||||
|
!< 47 & 8 & basal/2. order pyramidal <c+a> non-collinear
|
||||||
|
!< 48 & 7 & basal/2. order pyramidal <c+a> semi-collinear
|
||||||
|
!< 49 & 10 & prismatic/2. order pyramidal <c+a>
|
||||||
|
!< 50 & 9 & prismatic/2. order pyramidal <c+a> semi-collinear
|
||||||
|
!< 51 & - & pyramidal <a>/2. order pyramidal <c+a>
|
||||||
|
!< 52 & - & pyramidal <a>/2. order pyramidal <c+a> semi collinear
|
||||||
|
!< 53 & - & 1. order pyramidal <c+a>/2. order pyramidal <c+a>
|
||||||
|
!< 54 & - & 1. order pyramidal <c+a>/2. order pyramidal <c+a>
|
||||||
|
!< 55 & - & 1. order pyramidal <c+a>/2. order pyramidal <c+a>
|
||||||
|
!< 56 & - & 1. order pyramidal <c+a>/2. order pyramidal <c+a> collinear
|
||||||
|
!< 57 & S3 & 2. order pyramidal <c+a> self-interaction
|
||||||
|
!< 58 & 16 & 2. order pyramidal <c+a> non-collinear
|
||||||
|
!< 59 & 15 & 2. order pyramidal <c+a> semi-collinear
|
||||||
|
!< 60 & - & 2. order pyramidal <c+a>/1. order pyramidal <c+a>
|
||||||
|
!< 61 & - & 2. order pyramidal <c+a>/1. order pyramidal <c+a> collinear
|
||||||
|
!< 62 & - & 2. order pyramidal <c+a>/1. order pyramidal <c+a>
|
||||||
|
!< 63 & - & 2. order pyramidal <c+a>/1. order pyramidal <c+a>
|
||||||
|
!< 64 & - & 2. order pyramidal <c+a>/pyramidal <a> non-collinear
|
||||||
|
!< 65 & - & 2. order pyramidal <c+a>/pyramidal <a> semi-collinear
|
||||||
|
!< 66 & 14 & 2. order pyramidal <c+a>/prismatic non-collinear
|
||||||
|
!< 67 & 13 & 2. order pyramidal <c+a>/prismatic semi-collinear
|
||||||
|
!< 68 & 12 & 2. order pyramidal <c+a>/basal non-collinear
|
||||||
|
!< 69 & 11 & 2. order pyramidal <c+a>/basal semi-collinear
|
||||||
|
|
||||||
integer, dimension(BCT_NSLIP,BCT_NSLIP), parameter :: &
|
integer, dimension(BCT_NSLIP,BCT_NSLIP), parameter :: &
|
||||||
BCT_INTERACTIONSLIPSLIP = reshape( [&
|
BCT_INTERACTIONSLIPSLIP = reshape( [&
|
||||||
|
|
|
@ -108,8 +108,14 @@ subroutine parse()
|
||||||
homogenizations => config_material%get('homogenization')
|
homogenizations => config_material%get('homogenization')
|
||||||
|
|
||||||
call sanityCheck(materials, homogenizations)
|
call sanityCheck(materials, homogenizations)
|
||||||
|
|
||||||
|
#if defined (__GFORTRAN__)
|
||||||
material_name_phase = getKeys(phases)
|
material_name_phase = getKeys(phases)
|
||||||
material_name_homogenization = getKeys(homogenizations)
|
material_name_homogenization = getKeys(homogenizations)
|
||||||
|
#else
|
||||||
|
material_name_phase = phases%Keys()
|
||||||
|
material_name_homogenization = homogenizations%Keys()
|
||||||
|
#endif
|
||||||
|
|
||||||
allocate(homogenization_Nconstituents(homogenizations%length))
|
allocate(homogenization_Nconstituents(homogenizations%length))
|
||||||
do h=1, homogenizations%length
|
do h=1, homogenizations%length
|
||||||
|
@ -203,9 +209,9 @@ subroutine sanityCheck(materials,homogenizations)
|
||||||
|
|
||||||
end subroutine sanityCheck
|
end subroutine sanityCheck
|
||||||
|
|
||||||
|
#if defined (__GFORTRAN__)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Get all keys from a dictionary
|
!> @brief %keys() is broken on gfortran
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
function getKeys(dict)
|
function getKeys(dict)
|
||||||
|
|
||||||
|
@ -228,5 +234,6 @@ function getKeys(dict)
|
||||||
end do
|
end do
|
||||||
|
|
||||||
end function getKeys
|
end function getKeys
|
||||||
|
#endif
|
||||||
|
|
||||||
end module material
|
end module material
|
||||||
|
|
30
src/math.f90
30
src/math.f90
|
@ -84,38 +84,34 @@ contains
|
||||||
subroutine math_init
|
subroutine math_init
|
||||||
|
|
||||||
real(pReal), dimension(4) :: randTest
|
real(pReal), dimension(4) :: randTest
|
||||||
integer :: &
|
integer :: randSize
|
||||||
randSize, &
|
integer, dimension(:), allocatable :: seed
|
||||||
randomSeed !< fixed seeding for pseudo-random number generator, Default 0: use random seed
|
|
||||||
integer, dimension(:), allocatable :: randInit
|
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_generic
|
num_generic
|
||||||
|
|
||||||
|
|
||||||
print'(/,1x,a)', '<<<+- math init -+>>>'; flush(IO_STDOUT)
|
print'(/,1x,a)', '<<<+- math init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
num_generic => config_numerics%get('generic',defaultVal=emptyDict)
|
num_generic => config_numerics%get('generic',defaultVal=emptyDict)
|
||||||
randomSeed = num_generic%get_asInt('random_seed', defaultVal = 0)
|
|
||||||
|
|
||||||
call random_seed(size=randSize)
|
call random_seed(size=randSize)
|
||||||
allocate(randInit(randSize))
|
allocate(seed(randSize))
|
||||||
if (randomSeed > 0) then
|
|
||||||
randInit = randomSeed
|
if (num_generic%contains('random_seed')) then
|
||||||
|
seed = num_generic%get_as1dInt('random_seed',requiredSize=randSize)
|
||||||
else
|
else
|
||||||
call random_seed()
|
call random_seed()
|
||||||
call random_seed(get = randInit)
|
call random_seed(get = seed)
|
||||||
randInit(2:randSize) = randInit(1)
|
end if
|
||||||
endif
|
|
||||||
|
|
||||||
call random_seed(put = randInit)
|
call random_seed(put = seed)
|
||||||
call random_number(randTest)
|
call random_number(randTest)
|
||||||
|
|
||||||
print'(/,a,i2)', ' size of random seed: ', randSize
|
print'(/,a,i2)', ' size of random seed: ', randSize
|
||||||
print'( a,i0)', ' value of random seed: ', randInit(1)
|
print*, 'value of random seed: ', seed
|
||||||
print'( a,4(/,26x,f17.14),/)', ' start of random sequence: ', randTest
|
print'( a,4(/,26x,f17.14))', ' start of random sequence: ', randTest
|
||||||
|
|
||||||
call random_seed(put = randInit)
|
call selfTest()
|
||||||
|
|
||||||
call selfTest
|
|
||||||
|
|
||||||
end subroutine math_init
|
end subroutine math_init
|
||||||
|
|
||||||
|
|
|
@ -78,7 +78,7 @@ program DAMASK_mesh
|
||||||
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
|
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
|
||||||
type(tSolutionState), allocatable, dimension(:) :: solres
|
type(tSolutionState), allocatable, dimension(:) :: solres
|
||||||
PetscInt :: faceSet, currentFaceSet, dimPlex
|
PetscInt :: faceSet, currentFaceSet, dimPlex
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
integer(kind(COMPONENT_UNDEFINED_ID)) :: ID
|
integer(kind(COMPONENT_UNDEFINED_ID)) :: ID
|
||||||
external :: &
|
external :: &
|
||||||
quit
|
quit
|
||||||
|
@ -98,8 +98,8 @@ program DAMASK_mesh
|
||||||
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
||||||
|
|
||||||
! reading basic information from load case file and allocate data structure containing load cases
|
! reading basic information from load case file and allocate data structure containing load cases
|
||||||
call DMGetDimension(geomMesh,dimPlex,ierr) !< dimension of mesh (2D or 3D)
|
call DMGetDimension(geomMesh,dimPlex,err_PETSc) !< dimension of mesh (2D or 3D)
|
||||||
CHKERRA(ierr)
|
CHKERRA(err_PETSc)
|
||||||
allocate(solres(1))
|
allocate(solres(1))
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -50,7 +50,7 @@ module FEM_utilities
|
||||||
type, public :: tSolutionState !< return type of solution from FEM solver variants
|
type, public :: tSolutionState !< return type of solution from FEM solver variants
|
||||||
logical :: converged = .true.
|
logical :: converged = .true.
|
||||||
logical :: stagConverged = .true.
|
logical :: stagConverged = .true.
|
||||||
integer :: iterationsNeeded = 0
|
PetscInt :: iterationsNeeded = 0_pPETSCINT
|
||||||
end type tSolutionState
|
end type tSolutionState
|
||||||
|
|
||||||
type, public :: tComponentBC
|
type, public :: tComponentBC
|
||||||
|
@ -92,7 +92,7 @@ subroutine FEM_utilities_init
|
||||||
p_i !< integration order (quadrature rule)
|
p_i !< integration order (quadrature rule)
|
||||||
character(len=*), parameter :: &
|
character(len=*), parameter :: &
|
||||||
PETSCDEBUG = ' -snes_view -snes_monitor '
|
PETSCDEBUG = ' -snes_view -snes_monitor '
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
logical :: debugPETSc !< use some in debug defined options for more verbose PETSc solution
|
logical :: debugPETSc !< use some in debug defined options for more verbose PETSc solution
|
||||||
|
|
||||||
|
|
||||||
|
@ -103,9 +103,9 @@ subroutine FEM_utilities_init
|
||||||
p_s = num_mesh%get_asInt('p_s',defaultVal = 2)
|
p_s = num_mesh%get_asInt('p_s',defaultVal = 2)
|
||||||
p_i = num_mesh%get_asInt('p_i',defaultVal = p_s)
|
p_i = num_mesh%get_asInt('p_i',defaultVal = p_s)
|
||||||
|
|
||||||
if (p_s < 1_pInt .or. p_s > size(FEM_nQuadrature,2)) &
|
if (p_s < 1 .or. p_s > size(FEM_nQuadrature,2)) &
|
||||||
call IO_error(821,ext_msg='shape function order (p_s) out of bounds')
|
call IO_error(821,ext_msg='shape function order (p_s) out of bounds')
|
||||||
if (p_i < max(1_pInt,p_s-1_pInt) .or. p_i > p_s) &
|
if (p_i < max(1,p_s-1) .or. p_i > p_s) &
|
||||||
call IO_error(821,ext_msg='integration order (p_i) out of bounds')
|
call IO_error(821,ext_msg='integration order (p_i) out of bounds')
|
||||||
|
|
||||||
debug_mesh => config_debug%get('mesh',defaultVal=emptyList)
|
debug_mesh => config_debug%get('mesh',defaultVal=emptyList)
|
||||||
|
@ -116,20 +116,20 @@ subroutine FEM_utilities_init
|
||||||
trim(PETScDebug), &
|
trim(PETScDebug), &
|
||||||
'add more using the "PETSc_options" keyword in numerics.yaml'
|
'add more using the "PETSc_options" keyword in numerics.yaml'
|
||||||
flush(IO_STDOUT)
|
flush(IO_STDOUT)
|
||||||
call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr)
|
call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr)
|
if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type newtonls &
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type newtonls &
|
||||||
&-mechanical_snes_linesearch_type cp -mechanical_snes_ksp_ew &
|
&-mechanical_snes_linesearch_type cp -mechanical_snes_ksp_ew &
|
||||||
&-mechanical_snes_ksp_ew_rtol0 0.01 -mechanical_snes_ksp_ew_rtolmax 0.01 &
|
&-mechanical_snes_ksp_ew_rtol0 0.01 -mechanical_snes_ksp_ew_rtolmax 0.01 &
|
||||||
&-mechanical_ksp_type fgmres -mechanical_ksp_max_it 25', ierr)
|
&-mechanical_ksp_type fgmres -mechanical_ksp_max_it 25', err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_mesh%get_asString('PETSc_options',defaultVal=''),ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_mesh%get_asString('PETSc_options',defaultVal=''),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
write(petsc_optionsOrder,'(a,i0)') '-mechFE_petscspace_degree ', p_s
|
write(petsc_optionsOrder,'(a,i0)') '-mechFE_petscspace_degree ', p_s
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_optionsOrder),ierr)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_optionsOrder),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
wgt = 1.0/real(mesh_maxNips*mesh_NcpElemsGlobal,pReal)
|
wgt = 1.0/real(mesh_maxNips*mesh_NcpElemsGlobal,pReal)
|
||||||
|
|
||||||
|
@ -144,10 +144,9 @@ subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
|
||||||
|
|
||||||
real(pReal), intent(in) :: timeinc !< loading time
|
real(pReal), intent(in) :: timeinc !< loading time
|
||||||
logical, intent(in) :: forwardData !< age results
|
logical, intent(in) :: forwardData !< age results
|
||||||
|
|
||||||
real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress
|
real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
print'(/,1x,a)', '... evaluating constitutive response ......................................'
|
print'(/,1x,a)', '... evaluating constitutive response ......................................'
|
||||||
|
|
||||||
|
@ -157,7 +156,9 @@ subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
|
||||||
cutBack = .false.
|
cutBack = .false.
|
||||||
|
|
||||||
P_av = sum(homogenization_P,dim=3) * wgt
|
P_av = sum(homogenization_P,dim=3) * wgt
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,P_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
|
|
||||||
end subroutine utilities_constitutiveResponse
|
end subroutine utilities_constitutiveResponse
|
||||||
|
|
||||||
|
@ -174,26 +175,29 @@ subroutine utilities_projectBCValues(localVec,section,field,comp,bcPointsIS,BCVa
|
||||||
PetscInt, pointer :: bcPoints(:)
|
PetscInt, pointer :: bcPoints(:)
|
||||||
PetscScalar, pointer :: localArray(:)
|
PetscScalar, pointer :: localArray(:)
|
||||||
PetscScalar :: BCValue,BCDotValue,timeinc
|
PetscScalar :: BCValue,BCDotValue,timeinc
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
|
|
||||||
call PetscSectionGetFieldComponents(section,field,numComp,ierr); CHKERRQ(ierr)
|
call PetscSectionGetFieldComponents(section,field,numComp,err_PETSc)
|
||||||
call ISGetSize(bcPointsIS,nBcPoints,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
if (nBcPoints > 0) call ISGetIndicesF90(bcPointsIS,bcPoints,ierr)
|
call ISGetSize(bcPointsIS,nBcPoints,err_PETSc)
|
||||||
call VecGetArrayF90(localVec,localArray,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
if (nBcPoints > 0) call ISGetIndicesF90(bcPointsIS,bcPoints,err_PETSc)
|
||||||
|
call VecGetArrayF90(localVec,localArray,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
do point = 1, nBcPoints
|
do point = 1, nBcPoints
|
||||||
call PetscSectionGetFieldDof(section,bcPoints(point),field,numDof,ierr)
|
call PetscSectionGetFieldDof(section,bcPoints(point),field,numDof,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscSectionGetFieldOffset(section,bcPoints(point),field,offset,ierr)
|
call PetscSectionGetFieldOffset(section,bcPoints(point),field,offset,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
do dof = offset+comp+1, offset+numDof, numComp
|
do dof = offset+comp+1, offset+numDof, numComp
|
||||||
localArray(dof) = localArray(dof) + BCValue + BCDotValue*timeinc
|
localArray(dof) = localArray(dof) + BCValue + BCDotValue*timeinc
|
||||||
end do
|
end do
|
||||||
end do
|
end do
|
||||||
call VecRestoreArrayF90(localVec,localArray,ierr); CHKERRQ(ierr)
|
call VecRestoreArrayF90(localVec,localArray,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecAssemblyBegin(localVec, ierr); CHKERRQ(ierr)
|
call VecAssemblyBegin(localVec, err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecAssemblyEnd (localVec, ierr); CHKERRQ(ierr)
|
call VecAssemblyEnd (localVec, err_PETSc); CHKERRQ(err_PETSc)
|
||||||
if (nBcPoints > 0) call ISRestoreIndicesF90(bcPointsIS,bcPoints,ierr)
|
if (nBcPoints > 0) call ISRestoreIndicesF90(bcPointsIS,bcPoints,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine utilities_projectBCValues
|
end subroutine utilities_projectBCValues
|
||||||
|
|
||||||
|
|
|
@ -71,21 +71,22 @@ subroutine discretization_mesh_init(restart)
|
||||||
|
|
||||||
logical, intent(in) :: restart
|
logical, intent(in) :: restart
|
||||||
|
|
||||||
integer :: dimPlex, &
|
PetscInt :: dimPlex, &
|
||||||
mesh_Nnodes, & !< total number of nodes in mesh
|
mesh_Nnodes, & !< total number of nodes in mesh
|
||||||
j, &
|
j, &
|
||||||
debug_element, debug_ip
|
debug_element, debug_ip
|
||||||
PetscSF :: sf
|
PetscSF :: sf
|
||||||
DM :: globalMesh
|
DM :: globalMesh
|
||||||
PetscInt :: nFaceSets
|
PetscInt :: nFaceSets, Nboundaries, NelemsGlobal, Nelems
|
||||||
PetscInt, pointer, dimension(:) :: pFaceSets
|
PetscInt, pointer, dimension(:) :: pFaceSets
|
||||||
IS :: faceSetIS
|
IS :: faceSetIS
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
integer, dimension(:), allocatable :: &
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
PetscInt, dimension(:), allocatable :: &
|
||||||
materialAt
|
materialAt
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_mesh
|
num_mesh
|
||||||
integer :: p_i !< integration order (quadrature rule)
|
integer :: p_i, dim !< integration order (quadrature rule)
|
||||||
type(tvec) :: coords_node0
|
type(tvec) :: coords_node0
|
||||||
|
|
||||||
print'(/,1x,a)', '<<<+- discretization_mesh init -+>>>'
|
print'(/,1x,a)', '<<<+- discretization_mesh init -+>>>'
|
||||||
|
@ -100,56 +101,64 @@ subroutine discretization_mesh_init(restart)
|
||||||
debug_element = config_debug%get_asInt('element',defaultVal=1)
|
debug_element = config_debug%get_asInt('element',defaultVal=1)
|
||||||
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
|
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
|
||||||
|
|
||||||
call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,ierr)
|
call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetDimension(globalMesh,dimPlex,ierr)
|
call DMGetDimension(globalMesh,dimPlex,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetStratumSize(globalMesh,'depth',dimPlex,mesh_NcpElemsGlobal,ierr)
|
call DMGetStratumSize(globalMesh,'depth',dimPlex,NelemsGlobal,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
print'()'
|
mesh_NcpElemsGlobal = int(NelemsGlobal)
|
||||||
call DMView(globalMesh, PETSC_VIEWER_STDOUT_WORLD,ierr)
|
call DMView(globalMesh, PETSC_VIEWER_STDOUT_WORLD,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
! get number of IDs in face sets (for boundary conditions?)
|
! get number of IDs in face sets (for boundary conditions?)
|
||||||
call DMGetLabelSize(globalMesh,'Face Sets',mesh_Nboundaries,ierr)
|
call DMGetLabelSize(globalMesh,'Face Sets',Nboundaries,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call MPI_Bcast(mesh_Nboundaries,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
|
mesh_Nboundaries = int(Nboundaries)
|
||||||
call MPI_Bcast(mesh_NcpElemsGlobal,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(mesh_Nboundaries,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
call MPI_Bcast(dimPlex,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
call MPI_Bcast(mesh_NcpElemsGlobal,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
dim = int(dimPlex)
|
||||||
|
call MPI_Bcast(dim,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
|
dimPlex = int(dim,pPETSCINT)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
if (worldrank == 0) then
|
if (worldrank == 0) then
|
||||||
call DMClone(globalMesh,geomMesh,ierr)
|
call DMClone(globalMesh,geomMesh,err_PETSc)
|
||||||
else
|
else
|
||||||
call DMPlexDistribute(globalMesh,0,sf,geomMesh,ierr)
|
call DMPlexDistribute(globalMesh,0_pPETSCINT,sf,geomMesh,err_PETSc)
|
||||||
endif
|
endif
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
allocate(mesh_boundaries(mesh_Nboundaries), source = 0)
|
allocate(mesh_boundaries(mesh_Nboundaries), source = 0_pPETSCINT)
|
||||||
call DMGetLabelSize(globalMesh,'Face Sets',nFaceSets,ierr)
|
call DMGetLabelSize(globalMesh,'Face Sets',nFaceSets,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetLabelIdIS(globalMesh,'Face Sets',faceSetIS,ierr)
|
call DMGetLabelIdIS(globalMesh,'Face Sets',faceSetIS,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
if (nFaceSets > 0) then
|
if (nFaceSets > 0) then
|
||||||
call ISGetIndicesF90(faceSetIS,pFaceSets,ierr)
|
call ISGetIndicesF90(faceSetIS,pFaceSets,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
mesh_boundaries(1:nFaceSets) = pFaceSets
|
mesh_boundaries(1:nFaceSets) = pFaceSets
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call ISRestoreIndicesF90(faceSetIS,pFaceSets,ierr)
|
call ISRestoreIndicesF90(faceSetIS,pFaceSets,err_PETSc)
|
||||||
endif
|
endif
|
||||||
call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
|
call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call DMDestroy(globalMesh,ierr); CHKERRQ(ierr)
|
call DMDestroy(globalMesh,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call DMGetStratumSize(geomMesh,'depth',dimPlex,mesh_NcpElems,ierr)
|
call DMGetStratumSize(geomMesh,'depth',dimPlex,Nelems,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetStratumSize(geomMesh,'depth',0,mesh_Nnodes,ierr)
|
mesh_NcpElems = int(Nelems)
|
||||||
CHKERRQ(ierr)
|
call DMGetStratumSize(geomMesh,'depth',0_pPETSCINT,mesh_Nnodes,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
! Get initial nodal coordinates
|
! Get initial nodal coordinates
|
||||||
call DMGetCoordinates(geomMesh,coords_node0,ierr)
|
call DMGetCoordinates(geomMesh,coords_node0,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call VecGetArrayF90(coords_node0, mesh_node0_temp,ierr)
|
call VecGetArrayF90(coords_node0, mesh_node0_temp,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
mesh_maxNips = FEM_nQuadrature(dimPlex,p_i)
|
mesh_maxNips = FEM_nQuadrature(dimPlex,p_i)
|
||||||
|
|
||||||
|
@ -158,10 +167,10 @@ subroutine discretization_mesh_init(restart)
|
||||||
|
|
||||||
allocate(materialAt(mesh_NcpElems))
|
allocate(materialAt(mesh_NcpElems))
|
||||||
do j = 1, mesh_NcpElems
|
do j = 1, mesh_NcpElems
|
||||||
call DMGetLabelValue(geomMesh,'Cell Sets',j-1,materialAt(j),ierr)
|
call DMGetLabelValue(geomMesh,'Cell Sets',j-1,materialAt(j),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
enddo
|
enddo
|
||||||
materialAt = materialAt + 1
|
materialAt = materialAt + 1_pPETSCINT
|
||||||
|
|
||||||
if (debug_element < 1 .or. debug_element > mesh_NcpElems) call IO_error(602,ext_msg='element')
|
if (debug_element < 1 .or. debug_element > mesh_NcpElems) call IO_error(602,ext_msg='element')
|
||||||
if (debug_ip < 1 .or. debug_ip > mesh_maxNips) call IO_error(602,ext_msg='IP')
|
if (debug_ip < 1 .or. debug_ip > mesh_maxNips) call IO_error(602,ext_msg='IP')
|
||||||
|
@ -170,7 +179,7 @@ subroutine discretization_mesh_init(restart)
|
||||||
mesh_node0(1:dimPlex,:) = reshape(mesh_node0_temp,[dimPlex,mesh_Nnodes])
|
mesh_node0(1:dimPlex,:) = reshape(mesh_node0_temp,[dimPlex,mesh_Nnodes])
|
||||||
|
|
||||||
|
|
||||||
call discretization_init(materialAt,&
|
call discretization_init(int(materialAt),&
|
||||||
reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), &
|
reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), &
|
||||||
mesh_node0)
|
mesh_node0)
|
||||||
|
|
||||||
|
@ -188,16 +197,17 @@ subroutine mesh_FEM_build_ipVolumes(dimPlex)
|
||||||
PetscReal :: vol
|
PetscReal :: vol
|
||||||
PetscReal, pointer,dimension(:) :: pCent, pNorm
|
PetscReal, pointer,dimension(:) :: pCent, pNorm
|
||||||
PetscInt :: cellStart, cellEnd, cell
|
PetscInt :: cellStart, cellEnd, cell
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
allocate(mesh_ipVolume(mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
|
allocate(mesh_ipVolume(mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
|
||||||
|
|
||||||
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,ierr); CHKERRQ(ierr)
|
call DMPlexGetHeightStratum(geomMesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
allocate(pCent(dimPlex))
|
allocate(pCent(dimPlex))
|
||||||
allocate(pNorm(dimPlex))
|
allocate(pNorm(dimPlex))
|
||||||
do cell = cellStart, cellEnd-1
|
do cell = cellStart, cellEnd-1
|
||||||
call DMPlexComputeCellGeometryFVM(geomMesh,cell,vol,pCent,pNorm,ierr)
|
call DMPlexComputeCellGeometryFVM(geomMesh,cell,vol,pCent,pNorm,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
mesh_ipVolume(:,cell+1) = vol/real(mesh_maxNips,pReal)
|
mesh_ipVolume(:,cell+1) = vol/real(mesh_maxNips,pReal)
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
|
@ -215,7 +225,7 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
|
||||||
PetscReal, pointer,dimension(:) :: pV0, pCellJ, pInvcellJ
|
PetscReal, pointer,dimension(:) :: pV0, pCellJ, pInvcellJ
|
||||||
PetscReal :: detJ
|
PetscReal :: detJ
|
||||||
PetscInt :: cellStart, cellEnd, cell, qPt, dirI, dirJ, qOffset
|
PetscInt :: cellStart, cellEnd, cell, qPt, dirI, dirJ, qOffset
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
|
|
||||||
allocate(mesh_ipCoordinates(3,mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
|
allocate(mesh_ipCoordinates(3,mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
|
||||||
|
@ -223,10 +233,11 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
|
||||||
allocate(pV0(dimPlex))
|
allocate(pV0(dimPlex))
|
||||||
allocatE(pCellJ(dimPlex**2))
|
allocatE(pCellJ(dimPlex**2))
|
||||||
allocatE(pinvCellJ(dimPlex**2))
|
allocatE(pinvCellJ(dimPlex**2))
|
||||||
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,ierr); CHKERRQ(ierr)
|
call DMPlexGetHeightStratum(geomMesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
do cell = cellStart, cellEnd-1 !< loop over all elements
|
do cell = cellStart, cellEnd-1 !< loop over all elements
|
||||||
call DMPlexComputeCellGeometryAffineFEM(geomMesh,cell,pV0,pCellJ,pInvcellJ,detJ,ierr)
|
call DMPlexComputeCellGeometryAffineFEM(geomMesh,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
qOffset = 0
|
qOffset = 0
|
||||||
do qPt = 1, mesh_maxNips
|
do qPt = 1, mesh_maxNips
|
||||||
do dirI = 1, dimPlex
|
do dirI = 1, dimPlex
|
||||||
|
|
|
@ -40,7 +40,7 @@ module mesh_mechanical_FEM
|
||||||
type(tSolutionParams) :: params
|
type(tSolutionParams) :: params
|
||||||
|
|
||||||
type, private :: tNumerics
|
type, private :: tNumerics
|
||||||
integer :: &
|
PetscInt :: &
|
||||||
p_i, & !< integration order (quadrature rule)
|
p_i, & !< integration order (quadrature rule)
|
||||||
itmax
|
itmax
|
||||||
logical :: &
|
logical :: &
|
||||||
|
@ -55,7 +55,8 @@ module mesh_mechanical_FEM
|
||||||
! PETSc data
|
! PETSc data
|
||||||
SNES :: mechanical_snes
|
SNES :: mechanical_snes
|
||||||
Vec :: solution, solution_rate, solution_local
|
Vec :: solution, solution_rate, solution_local
|
||||||
PetscInt :: dimPlex, cellDof, nQuadrature, nBasis
|
PetscInt :: dimPlex, cellDof, nBasis
|
||||||
|
integer :: nQuadrature
|
||||||
PetscReal, allocatable, target :: qPoints(:), qWeights(:)
|
PetscReal, allocatable, target :: qPoints(:), qWeights(:)
|
||||||
MatNullSpace :: matnull
|
MatNullSpace :: matnull
|
||||||
|
|
||||||
|
@ -104,11 +105,11 @@ subroutine FEM_mechanical_init(fieldBC)
|
||||||
PetscReal :: detJ
|
PetscReal :: detJ
|
||||||
PetscReal, allocatable, target :: cellJMat(:,:)
|
PetscReal, allocatable, target :: cellJMat(:,:)
|
||||||
|
|
||||||
PetscScalar, pointer :: px_scal(:)
|
PetscScalar, pointer, dimension(:) :: px_scal
|
||||||
PetscScalar, allocatable, target :: x_scal(:)
|
PetscScalar, allocatable, target, dimension(:) :: x_scal
|
||||||
|
|
||||||
character(len=*), parameter :: prefix = 'mechFE_'
|
character(len=*), parameter :: prefix = 'mechFE_'
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
real(pReal), dimension(3,3) :: devNull
|
real(pReal), dimension(3,3) :: devNull
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_mesh
|
num_mesh
|
||||||
|
@ -118,8 +119,8 @@ subroutine FEM_mechanical_init(fieldBC)
|
||||||
!-----------------------------------------------------------------------------
|
!-----------------------------------------------------------------------------
|
||||||
! read numerical parametes and do sanity checks
|
! read numerical parametes and do sanity checks
|
||||||
num_mesh => config_numerics%get('mesh',defaultVal=emptyDict)
|
num_mesh => config_numerics%get('mesh',defaultVal=emptyDict)
|
||||||
num%p_i = num_mesh%get_asInt('p_i',defaultVal = 2)
|
num%p_i = int(num_mesh%get_asInt('p_i',defaultVal = 2),pPETSCINT)
|
||||||
num%itmax = num_mesh%get_asInt('itmax',defaultVal=250)
|
num%itmax = int(num_mesh%get_asInt('itmax',defaultVal=250),pPETSCINT)
|
||||||
num%BBarStabilisation = num_mesh%get_asBool('bbarstabilisation',defaultVal = .false.)
|
num%BBarStabilisation = num_mesh%get_asBool('bbarstabilisation',defaultVal = .false.)
|
||||||
num%eps_struct_atol = num_mesh%get_asFloat('eps_struct_atol', defaultVal = 1.0e-10_pReal)
|
num%eps_struct_atol = num_mesh%get_asFloat('eps_struct_atol', defaultVal = 1.0e-10_pReal)
|
||||||
num%eps_struct_rtol = num_mesh%get_asFloat('eps_struct_rtol', defaultVal = 1.0e-4_pReal)
|
num%eps_struct_rtol = num_mesh%get_asFloat('eps_struct_rtol', defaultVal = 1.0e-4_pReal)
|
||||||
|
@ -130,8 +131,8 @@ subroutine FEM_mechanical_init(fieldBC)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! Setup FEM mech mesh
|
! Setup FEM mech mesh
|
||||||
call DMClone(geomMesh,mechanical_mesh,ierr); CHKERRQ(ierr)
|
call DMClone(geomMesh,mechanical_mesh,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetDimension(mechanical_mesh,dimPlex,ierr); CHKERRQ(ierr)
|
call DMGetDimension(mechanical_mesh,dimPlex,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! Setup FEM mech discretization
|
! Setup FEM mech discretization
|
||||||
|
@ -140,96 +141,104 @@ subroutine FEM_mechanical_init(fieldBC)
|
||||||
nQuadrature = FEM_nQuadrature( dimPlex,num%p_i)
|
nQuadrature = FEM_nQuadrature( dimPlex,num%p_i)
|
||||||
qPointsP => qPoints
|
qPointsP => qPoints
|
||||||
qWeightsP => qWeights
|
qWeightsP => qWeights
|
||||||
call PetscQuadratureCreate(PETSC_COMM_SELF,mechQuad,ierr); CHKERRQ(ierr)
|
call PetscQuadratureCreate(PETSC_COMM_SELF,mechQuad,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
nc = dimPlex
|
nc = dimPlex
|
||||||
call PetscQuadratureSetData(mechQuad,dimPlex,nc,nQuadrature,qPointsP,qWeightsP,ierr)
|
call PetscQuadratureSetData(mechQuad,dimPlex,nc,int(nQuadrature,pPETSCINT),qPointsP,qWeightsP,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscFECreateDefault(PETSC_COMM_SELF,dimPlex,nc,PETSC_TRUE,prefix, &
|
call PetscFECreateDefault(PETSC_COMM_SELF,dimPlex,nc,PETSC_TRUE,prefix, &
|
||||||
num%p_i,mechFE,ierr); CHKERRQ(ierr)
|
num%p_i,mechFE,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call PetscFESetQuadrature(mechFE,mechQuad,ierr); CHKERRQ(ierr)
|
call PetscFESetQuadrature(mechFE,mechQuad,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call PetscFEGetDimension(mechFE,nBasis,ierr); CHKERRQ(ierr)
|
call PetscFEGetDimension(mechFE,nBasis,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
nBasis = nBasis/nc
|
nBasis = nBasis/nc
|
||||||
call DMAddField(mechanical_mesh,PETSC_NULL_DMLABEL,mechFE,ierr); CHKERRQ(ierr)
|
call DMAddField(mechanical_mesh,PETSC_NULL_DMLABEL,mechFE,err_PETSc)
|
||||||
call DMCreateDS(mechanical_mesh,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetDS(mechanical_mesh,mechDS,ierr); CHKERRQ(ierr)
|
call DMCreateDS(mechanical_mesh,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call PetscDSGetTotalDimension(mechDS,cellDof,ierr); CHKERRQ(ierr)
|
call DMGetDS(mechanical_mesh,mechDS,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call PetscFEDestroy(mechFE,ierr); CHKERRQ(ierr)
|
call PetscDSGetTotalDimension(mechDS,cellDof,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call PetscQuadratureDestroy(mechQuad,ierr); CHKERRQ(ierr)
|
call PetscFEDestroy(mechFE,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
call PetscQuadratureDestroy(mechQuad,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! Setup FEM mech boundary conditions
|
! Setup FEM mech boundary conditions
|
||||||
call DMGetLabel(mechanical_mesh,'Face Sets',BCLabel,ierr); CHKERRQ(ierr)
|
call DMGetLabel(mechanical_mesh,'Face Sets',BCLabel,err_PETSc)
|
||||||
call DMPlexLabelComplete(mechanical_mesh,BCLabel,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetLocalSection(mechanical_mesh,section,ierr); CHKERRQ(ierr)
|
call DMPlexLabelComplete(mechanical_mesh,BCLabel,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
call DMGetLocalSection(mechanical_mesh,section,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
allocate(pnumComp(1), source=dimPlex)
|
allocate(pnumComp(1), source=dimPlex)
|
||||||
allocate(pnumDof(0:dimPlex), source = 0)
|
allocate(pnumDof(0:dimPlex), source = 0_pPETSCINT)
|
||||||
do topologDim = 0, dimPlex
|
do topologDim = 0, dimPlex
|
||||||
call DMPlexGetDepthStratum(mechanical_mesh,topologDim,cellStart,cellEnd,ierr)
|
call DMPlexGetDepthStratum(mechanical_mesh,topologDim,cellStart,cellEnd,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscSectionGetDof(section,cellStart,pnumDof(topologDim),ierr)
|
call PetscSectionGetDof(section,cellStart,pnumDof(topologDim),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
enddo
|
enddo
|
||||||
numBC = 0
|
numBC = 0
|
||||||
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
|
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
|
||||||
if (fieldBC%componentBC(field)%Mask(faceSet)) numBC = numBC + 1
|
if (fieldBC%componentBC(field)%Mask(faceSet)) numBC = numBC + 1
|
||||||
enddo; enddo
|
enddo; enddo
|
||||||
allocate(pbcField(numBC), source=0)
|
allocate(pbcField(numBC), source=0_pPETSCINT)
|
||||||
allocate(pbcComps(numBC))
|
allocate(pbcComps(numBC))
|
||||||
allocate(pbcPoints(numBC))
|
allocate(pbcPoints(numBC))
|
||||||
numBC = 0
|
numBC = 0
|
||||||
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
|
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
|
||||||
if (fieldBC%componentBC(field)%Mask(faceSet)) then
|
if (fieldBC%componentBC(field)%Mask(faceSet)) then
|
||||||
numBC = numBC + 1
|
numBC = numBC + 1
|
||||||
call ISCreateGeneral(PETSC_COMM_WORLD,1,[field-1],PETSC_COPY_VALUES,pbcComps(numBC),ierr)
|
call ISCreateGeneral(PETSC_COMM_WORLD,1_pPETSCINT,[field-1],PETSC_COPY_VALUES,pbcComps(numBC),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetStratumSize(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcSize,ierr)
|
call DMGetStratumSize(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcSize,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
if (bcSize > 0) then
|
if (bcSize > 0) then
|
||||||
call DMGetStratumIS(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcPoint,ierr)
|
call DMGetStratumIS(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcPoint,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call ISGetIndicesF90(bcPoint,pBcPoint,ierr); CHKERRQ(ierr)
|
call ISGetIndicesF90(bcPoint,pBcPoint,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call ISCreateGeneral(PETSC_COMM_WORLD,bcSize,pBcPoint,PETSC_COPY_VALUES,pbcPoints(numBC),ierr)
|
call ISCreateGeneral(PETSC_COMM_WORLD,bcSize,pBcPoint,PETSC_COPY_VALUES,pbcPoints(numBC),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call ISRestoreIndicesF90(bcPoint,pBcPoint,ierr); CHKERRQ(ierr)
|
call ISRestoreIndicesF90(bcPoint,pBcPoint,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call ISDestroy(bcPoint,ierr); CHKERRQ(ierr)
|
call ISDestroy(bcPoint,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
else
|
else
|
||||||
call ISCreateGeneral(PETSC_COMM_WORLD,0,[0],PETSC_COPY_VALUES,pbcPoints(numBC),ierr)
|
call ISCreateGeneral(PETSC_COMM_WORLD,0_pPETSCINT,[0_pPETSCINT],PETSC_COPY_VALUES,pbcPoints(numBC),err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
enddo; enddo
|
enddo; enddo
|
||||||
call DMPlexCreateSection(mechanical_mesh,nolabel,pNumComp,pNumDof, &
|
call DMPlexCreateSection(mechanical_mesh,nolabel,pNumComp,pNumDof, &
|
||||||
numBC,pBcField,pBcComps,pBcPoints,PETSC_NULL_IS,section,ierr)
|
numBC,pBcField,pBcComps,pBcPoints,PETSC_NULL_IS,section,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMSetSection(mechanical_mesh,section,ierr); CHKERRQ(ierr)
|
call DMSetSection(mechanical_mesh,section,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
do faceSet = 1, numBC
|
do faceSet = 1, numBC
|
||||||
call ISDestroy(pbcPoints(faceSet),ierr); CHKERRQ(ierr)
|
call ISDestroy(pbcPoints(faceSet),err_PETSc); CHKERRQ(err_PETSc)
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,ierr);CHKERRQ(ierr)
|
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,err_PETSc);CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr);CHKERRQ(ierr)
|
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',err_PETSc)
|
||||||
call SNESSetDM(mechanical_snes,mechanical_mesh,ierr); CHKERRQ(ierr) !< set the mesh for non-linear solver
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(mechanical_mesh,solution ,ierr); CHKERRQ(ierr) !< locally owned displacement Dofs
|
call SNESSetDM(mechanical_snes,mechanical_mesh,err_PETSc) ! set the mesh for non-linear solver
|
||||||
call DMCreateGlobalVector(mechanical_mesh,solution_rate ,ierr); CHKERRQ(ierr) !< locally owned velocity Dofs to guess solution at next load step
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateLocalVector (mechanical_mesh,solution_local ,ierr); CHKERRQ(ierr) !< locally owned velocity Dofs to guess solution at next load step
|
call DMCreateGlobalVector(mechanical_mesh,solution, err_PETSc) ! locally owned displacement Dofs
|
||||||
call DMSNESSetFunctionLocal(mechanical_mesh,FEM_mechanical_formResidual,PETSC_NULL_VEC,ierr) !< function to evaluate residual forces
|
CHKERRQ(err_PETSc)
|
||||||
CHKERRQ(ierr)
|
call DMCreateGlobalVector(mechanical_mesh,solution_rate, err_PETSc) ! locally owned velocity Dofs to guess solution at next load step
|
||||||
call DMSNESSetJacobianLocal(mechanical_mesh,FEM_mechanical_formJacobian,PETSC_NULL_VEC,ierr) !< function to evaluate stiffness matrix
|
CHKERRQ(err_PETSc)
|
||||||
CHKERRQ(ierr)
|
call DMCreateLocalVector (mechanical_mesh,solution_local,err_PETSc) ! locally owned velocity Dofs to guess solution at next load step
|
||||||
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), ierr); CHKERRQ(ierr) !< ignore linear solve failures
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetConvergenceTest(mechanical_snes,FEM_mechanical_converged,PETSC_NULL_VEC,PETSC_NULL_FUNCTION,ierr)
|
call DMSNESSetFunctionLocal(mechanical_mesh,FEM_mechanical_formResidual,PETSC_NULL_VEC,err_PETSc) ! function to evaluate residual forces
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetTolerances(mechanical_snes,1.0,0.0,0.0,num%itmax,num%itmax,ierr)
|
call DMSNESSetJacobianLocal(mechanical_mesh,FEM_mechanical_formJacobian,PETSC_NULL_VEC,err_PETSc) ! function to evaluate stiffness matrix
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetFromOptions(mechanical_snes,ierr); CHKERRQ(ierr)
|
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPETSCINT), err_PETSc) ! ignore linear solve failures
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call SNESSetConvergenceTest(mechanical_snes,FEM_mechanical_converged,PETSC_NULL_VEC,PETSC_NULL_FUNCTION,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call SNESSetTolerances(mechanical_snes,1.0,0.0,0.0,num%itmax,num%itmax,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call SNESSetFromOptions(mechanical_snes,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call VecSet(solution ,0.0,ierr); CHKERRQ(ierr)
|
call VecSet(solution ,0.0,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecSet(solution_rate ,0.0,ierr); CHKERRQ(ierr)
|
call VecSet(solution_rate ,0.0,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
allocate(x_scal(cellDof))
|
allocate(x_scal(cellDof))
|
||||||
allocate(nodalWeightsP(1))
|
allocate(nodalWeightsP(1))
|
||||||
allocate(nodalPointsP(dimPlex))
|
allocate(nodalPointsP(dimPlex))
|
||||||
|
@ -237,26 +246,26 @@ subroutine FEM_mechanical_init(fieldBC)
|
||||||
allocate(pcellJ(dimPlex**2))
|
allocate(pcellJ(dimPlex**2))
|
||||||
allocate(pinvcellJ(dimPlex**2))
|
allocate(pinvcellJ(dimPlex**2))
|
||||||
allocate(cellJMat(dimPlex,dimPlex))
|
allocate(cellJMat(dimPlex,dimPlex))
|
||||||
call PetscDSGetDiscretization(mechDS,0,mechFE,ierr)
|
call PetscDSGetDiscretization(mechDS,0_pPETSCINT,mechFE,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscFEGetDualSpace(mechFE,mechDualSpace,ierr); CHKERRQ(ierr)
|
call PetscFEGetDualSpace(mechFE,mechDualSpace,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMPlexGetHeightStratum(mechanical_mesh,0,cellStart,cellEnd,ierr)
|
call DMPlexGetHeightStratum(mechanical_mesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
do cell = cellStart, cellEnd-1 !< loop over all elements
|
do cell = cellStart, cellEnd-1 !< loop over all elements
|
||||||
x_scal = 0.0_pReal
|
x_scal = 0.0_pReal
|
||||||
call DMPlexComputeCellGeometryAffineFEM(mechanical_mesh,cell,pV0,pCellJ,pInvcellJ,detJ,ierr)
|
call DMPlexComputeCellGeometryAffineFEM(mechanical_mesh,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
cellJMat = reshape(pCellJ,shape=[dimPlex,dimPlex])
|
cellJMat = reshape(pCellJ,shape=[dimPlex,dimPlex])
|
||||||
do basis = 0, nBasis*dimPlex-1, dimPlex
|
do basis = 0, nBasis*dimPlex-1, dimPlex
|
||||||
call PetscDualSpaceGetFunctional(mechDualSpace,basis,functional,ierr)
|
call PetscDualSpaceGetFunctional(mechDualSpace,basis,functional,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscQuadratureGetData(functional,dimPlex,nc,nNodalPoints,nodalPointsP,nodalWeightsP,ierr)
|
call PetscQuadratureGetData(functional,dimPlex,nc,nNodalPoints,nodalPointsP,nodalWeightsP,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
x_scal(basis+1:basis+dimPlex) = pV0 + matmul(transpose(cellJMat),nodalPointsP + 1.0_pReal)
|
x_scal(basis+1:basis+dimPlex) = pV0 + matmul(transpose(cellJMat),nodalPointsP + 1.0_pReal)
|
||||||
enddo
|
enddo
|
||||||
px_scal => x_scal
|
px_scal => x_scal
|
||||||
call DMPlexVecSetClosure(mechanical_mesh,section,solution_local,cell,px_scal,5,ierr)
|
call DMPlexVecSetClosure(mechanical_mesh,section,solution_local,cell,px_scal,5,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
enddo
|
enddo
|
||||||
call utilities_constitutiveResponse(0.0_pReal,devNull,.true.)
|
call utilities_constitutiveResponse(0.0_pReal,devNull,.true.)
|
||||||
|
|
||||||
|
@ -279,7 +288,7 @@ type(tSolutionState) function FEM_mechanical_solution( &
|
||||||
character(len=*), intent(in) :: &
|
character(len=*), intent(in) :: &
|
||||||
incInfoIn
|
incInfoIn
|
||||||
|
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
incInfo = incInfoIn
|
incInfo = incInfoIn
|
||||||
|
@ -289,8 +298,10 @@ type(tSolutionState) function FEM_mechanical_solution( &
|
||||||
params%timeinc = timeinc
|
params%timeinc = timeinc
|
||||||
params%fieldBC = fieldBC
|
params%fieldBC = fieldBC
|
||||||
|
|
||||||
call SNESSolve(mechanical_snes,PETSC_NULL_VEC,solution,ierr); CHKERRQ(ierr) ! solve mechanical_snes based on solution guess (result in solution)
|
call SNESSolve(mechanical_snes,PETSC_NULL_VEC,solution,err_PETSc) ! solve mechanical_snes based on solution guess (result in solution)
|
||||||
call SNESGetConvergedReason(mechanical_snes,reason,ierr); CHKERRQ(ierr) ! solution converged?
|
CHKERRQ(err_PETSc)
|
||||||
|
call SNESGetConvergedReason(mechanical_snes,reason,err_PETSc) ! solution converged?
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
terminallyIll = .false.
|
terminallyIll = .false.
|
||||||
|
|
||||||
if (reason < 1) then ! 0: still iterating (will not occur), negative -> convergence error
|
if (reason < 1) then ! 0: still iterating (will not occur), negative -> convergence error
|
||||||
|
@ -298,8 +309,8 @@ type(tSolutionState) function FEM_mechanical_solution( &
|
||||||
FEM_mechanical_solution%iterationsNeeded = num%itmax
|
FEM_mechanical_solution%iterationsNeeded = num%itmax
|
||||||
else ! >= 1 proper convergence (or terminally ill)
|
else ! >= 1 proper convergence (or terminally ill)
|
||||||
FEM_mechanical_solution%converged = .true.
|
FEM_mechanical_solution%converged = .true.
|
||||||
call SNESGetIterationNumber(mechanical_snes,FEM_mechanical_solution%iterationsNeeded,ierr)
|
call SNESGetIterationNumber(mechanical_snes,FEM_mechanical_solution%iterationsNeeded,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
print'(/,1x,a)', '==========================================================================='
|
print'(/,1x,a)', '==========================================================================='
|
||||||
|
@ -311,11 +322,12 @@ end function FEM_mechanical_solution
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief forms the FEM residual vector
|
!> @brief forms the FEM residual vector
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
|
subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,err_PETSc)
|
||||||
|
|
||||||
DM :: dm_local
|
DM :: dm_local
|
||||||
PetscObject,intent(in) :: dummy
|
PetscObject,intent(in) :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
PetscDS :: prob
|
PetscDS :: prob
|
||||||
Vec :: x_local, f_local, xx_local
|
Vec :: x_local, f_local, xx_local
|
||||||
|
@ -339,22 +351,25 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
|
||||||
allocate(pinvcellJ(dimPlex**2))
|
allocate(pinvcellJ(dimPlex**2))
|
||||||
allocate(x_scal(cellDof))
|
allocate(x_scal(cellDof))
|
||||||
|
|
||||||
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr)
|
call DMGetLocalSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetDS(dm_local,prob,ierr); CHKERRQ(ierr)
|
call DMGetDS(dm_local,prob,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call PetscDSGetTabulation(prob,0,basisField,basisFieldDer,ierr)
|
call PetscDSGetTabulation(prob,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr)
|
call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
|
||||||
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call VecWAXPY(x_local,1.0,xx_local,solution_local,ierr); CHKERRQ(ierr)
|
call DMGetLocalVector(dm_local,x_local,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call VecWAXPY(x_local,1.0,xx_local,solution_local,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
do field = 1, dimPlex; do face = 1, mesh_Nboundaries
|
do field = 1, dimPlex; do face = 1, mesh_Nboundaries
|
||||||
if (params%fieldBC%componentBC(field)%Mask(face)) then
|
if (params%fieldBC%componentBC(field)%Mask(face)) then
|
||||||
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr)
|
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
|
||||||
if (bcSize > 0) then
|
if (bcSize > 0) then
|
||||||
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr)
|
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, &
|
call utilities_projectBCValues(x_local,section,0_pPETSCINT,field-1,bcPoints, &
|
||||||
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
|
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
|
||||||
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr)
|
call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
enddo; enddo
|
enddo; enddo
|
||||||
|
@ -363,12 +378,12 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
|
||||||
! evaluate field derivatives
|
! evaluate field derivatives
|
||||||
do cell = cellStart, cellEnd-1 !< loop over all elements
|
do cell = cellStart, cellEnd-1 !< loop over all elements
|
||||||
|
|
||||||
call PetscSectionGetNumFields(section,numFields,ierr)
|
call PetscSectionGetNumFields(section,numFields,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element
|
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr)
|
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
|
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
|
||||||
do qPt = 0, nQuadrature-1
|
do qPt = 0, nQuadrature-1
|
||||||
m = cell*nQuadrature + qPt+1
|
m = cell*nQuadrature + qPt+1
|
||||||
|
@ -392,23 +407,24 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
|
||||||
|
|
||||||
enddo
|
enddo
|
||||||
endif
|
endif
|
||||||
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr)
|
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! evaluate constitutive response
|
! evaluate constitutive response
|
||||||
call utilities_constitutiveResponse(params%timeinc,P_av,ForwardData)
|
call utilities_constitutiveResponse(params%timeinc,P_av,ForwardData)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
|
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
ForwardData = .false.
|
ForwardData = .false.
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! integrating residual
|
! integrating residual
|
||||||
do cell = cellStart, cellEnd-1 !< loop over all elements
|
do cell = cellStart, cellEnd-1 !< loop over all elements
|
||||||
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element
|
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr)
|
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
|
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
|
||||||
f_scal = 0.0
|
f_scal = 0.0
|
||||||
do qPt = 0, nQuadrature-1
|
do qPt = 0, nQuadrature-1
|
||||||
|
@ -429,12 +445,12 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
|
||||||
enddo
|
enddo
|
||||||
f_scal = f_scal*abs(detJ)
|
f_scal = f_scal*abs(detJ)
|
||||||
pf_scal => f_scal
|
pf_scal => f_scal
|
||||||
call DMPlexVecSetClosure(dm_local,section,f_local,cell,pf_scal,ADD_VALUES,ierr)
|
call DMPlexVecSetClosure(dm_local,section,f_local,cell,pf_scal,ADD_VALUES,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr)
|
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
enddo
|
enddo
|
||||||
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine FEM_mechanical_formResidual
|
end subroutine FEM_mechanical_formResidual
|
||||||
|
|
||||||
|
@ -442,13 +458,13 @@ end subroutine FEM_mechanical_formResidual
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief forms the FEM stiffness matrix
|
!> @brief forms the FEM stiffness matrix
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
|
subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_PETSc)
|
||||||
|
|
||||||
|
|
||||||
DM :: dm_local
|
DM :: dm_local
|
||||||
Mat :: Jac_pre, Jac
|
Mat :: Jac_pre, Jac
|
||||||
PetscObject, intent(in) :: dummy
|
PetscObject, intent(in) :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
PetscDS :: prob
|
PetscDS :: prob
|
||||||
Vec :: x_local, xx_local
|
Vec :: x_local, xx_local
|
||||||
|
@ -478,34 +494,43 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
|
||||||
allocate(pcellJ(dimPlex**2))
|
allocate(pcellJ(dimPlex**2))
|
||||||
allocate(pinvcellJ(dimPlex**2))
|
allocate(pinvcellJ(dimPlex**2))
|
||||||
|
|
||||||
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,ierr); CHKERRQ(ierr)
|
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,err_PETSc)
|
||||||
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call MatZeroEntries(Jac,ierr); CHKERRQ(ierr)
|
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,err_PETSc)
|
||||||
call DMGetDS(dm_local,prob,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call PetscDSGetTabulation(prob,0,basisField,basisFieldDer,ierr)
|
call MatZeroEntries(Jac,err_PETSc)
|
||||||
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetGlobalSection(dm_local,gSection,ierr); CHKERRQ(ierr)
|
call DMGetDS(dm_local,prob,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call PetscDSGetTabulation(prob,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
|
||||||
|
call DMGetLocalSection(dm_local,section,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call DMGetGlobalSection(dm_local,gSection,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
call DMGetLocalVector(dm_local,x_local,err_PETSc)
|
||||||
call VecWAXPY(x_local,1.0_pReal,xx_local,solution_local,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
call VecWAXPY(x_local,1.0_pReal,xx_local,solution_local,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
do field = 1, dimPlex; do face = 1, mesh_Nboundaries
|
do field = 1, dimPlex; do face = 1, mesh_Nboundaries
|
||||||
if (params%fieldBC%componentBC(field)%Mask(face)) then
|
if (params%fieldBC%componentBC(field)%Mask(face)) then
|
||||||
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr)
|
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
|
||||||
if (bcSize > 0) then
|
if (bcSize > 0) then
|
||||||
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr)
|
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, &
|
call utilities_projectBCValues(x_local,section,0_pPETSCINT,field-1,bcPoints, &
|
||||||
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
|
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
|
||||||
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr)
|
call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
enddo; enddo
|
enddo; enddo
|
||||||
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr)
|
call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
do cell = cellStart, cellEnd-1 !< loop over all elements
|
do cell = cellStart, cellEnd-1 !< loop over all elements
|
||||||
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element
|
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr)
|
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
K_eA = 0.0
|
K_eA = 0.0
|
||||||
K_eB = 0.0
|
K_eB = 0.0
|
||||||
MatB = 0.0
|
MatB = 0.0
|
||||||
|
@ -531,11 +556,11 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
|
||||||
FInv = math_inv33(F)
|
FInv = math_inv33(F)
|
||||||
K_eA = K_eA + matmul(transpose(BMat),MatA)*math_det33(FInv)**(1.0/real(dimPlex))
|
K_eA = K_eA + matmul(transpose(BMat),MatA)*math_det33(FInv)**(1.0/real(dimPlex))
|
||||||
K_eB = K_eB - &
|
K_eB = K_eB - &
|
||||||
matmul(transpose(matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[dimPlex*dimPlex,1]), &
|
matmul(transpose(matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[dimPlex**2,1_pPETSCINT]), &
|
||||||
matmul(reshape(FInv(1:dimPlex,1:dimPlex), &
|
matmul(reshape(FInv(1:dimPlex,1:dimPlex), &
|
||||||
shape=[1,dimPlex*dimPlex],order=[2,1]),BMat))),MatA)
|
shape=[1_pPETSCINT,dimPlex**2],order=[2,1]),BMat))),MatA)
|
||||||
MatB = MatB &
|
MatB = MatB &
|
||||||
+ matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[1,dimPlex*dimPlex]),MatA)
|
+ matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[1_pPETSCINT,dimPlex**2]),MatA)
|
||||||
FAvg = FAvg + F
|
FAvg = FAvg + F
|
||||||
BMatAvg = BMatAvg + BMat
|
BMatAvg = BMatAvg + BMat
|
||||||
else
|
else
|
||||||
|
@ -546,39 +571,40 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
|
||||||
FInv = math_inv33(FAvg)
|
FInv = math_inv33(FAvg)
|
||||||
K_e = K_eA*math_det33(FAvg/real(nQuadrature))**(1.0/real(dimPlex)) + &
|
K_e = K_eA*math_det33(FAvg/real(nQuadrature))**(1.0/real(dimPlex)) + &
|
||||||
(matmul(matmul(transpose(BMatAvg), &
|
(matmul(matmul(transpose(BMatAvg), &
|
||||||
reshape(FInv(1:dimPlex,1:dimPlex),shape=[dimPlex*dimPlex,1],order=[2,1])),MatB) + &
|
reshape(FInv(1:dimPlex,1:dimPlex),shape=[dimPlex**2,1_pPETSCINT],order=[2,1])),MatB) + &
|
||||||
K_eB)/real(dimPlex)
|
K_eB)/real(dimPlex)
|
||||||
else
|
else
|
||||||
K_e = K_eA
|
K_e = K_eA
|
||||||
endif
|
endif
|
||||||
K_e = (K_e + eps*math_eye(cellDof)) * abs(detJ)
|
K_e = (K_e + eps*math_eye(int(cellDof))) * abs(detJ)
|
||||||
#ifndef __INTEL_COMPILER
|
#ifndef __INTEL_COMPILER
|
||||||
pK_e(1:cellDOF**2) => K_e
|
pK_e(1:cellDOF**2) => K_e
|
||||||
#else
|
#else
|
||||||
! https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/782230 (bug)
|
! https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/782230 (bug)
|
||||||
allocate(pK_e(cellDOF**2),source = reshape(K_e,[cellDOF**2]))
|
allocate(pK_e(cellDOF**2),source = reshape(K_e,[cellDOF**2]))
|
||||||
#endif
|
#endif
|
||||||
call DMPlexMatSetClosure(dm_local,section,gSection,Jac,cell,pK_e,ADD_VALUES,ierr)
|
call DMPlexMatSetClosure(dm_local,section,gSection,Jac,cell,pK_e,ADD_VALUES,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr)
|
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
enddo
|
enddo
|
||||||
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
|
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! apply boundary conditions
|
! apply boundary conditions
|
||||||
#if (PETSC_VERSION_MINOR < 14)
|
#if (PETSC_VERSION_MINOR < 14)
|
||||||
call DMPlexCreateRigidBody(dm_local,matnull,ierr); CHKERRQ(ierr)
|
call DMPlexCreateRigidBody(dm_local,matnull,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
#else
|
#else
|
||||||
call DMPlexCreateRigidBody(dm_local,0,matnull,ierr); CHKERRQ(ierr)
|
call DMPlexCreateRigidBody(dm_local,0_pPETSCINT,matnull,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
#endif
|
#endif
|
||||||
call MatSetNullSpace(Jac,matnull,ierr); CHKERRQ(ierr)
|
call MatSetNullSpace(Jac,matnull,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatSetNearNullSpace(Jac,matnull,ierr); CHKERRQ(ierr)
|
call MatSetNearNullSpace(Jac,matnull,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call MatNullSpaceDestroy(matnull,ierr); CHKERRQ(ierr)
|
call MatNullSpaceDestroy(matnull,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine FEM_mechanical_formJacobian
|
end subroutine FEM_mechanical_formJacobian
|
||||||
|
|
||||||
|
@ -601,43 +627,43 @@ subroutine FEM_mechanical_forward(guess,timeinc,timeinc_old,fieldBC)
|
||||||
Vec :: x_local
|
Vec :: x_local
|
||||||
PetscSection :: section
|
PetscSection :: section
|
||||||
IS :: bcPoints
|
IS :: bcPoints
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! forward last inc
|
! forward last inc
|
||||||
if (guess .and. .not. cutBack) then
|
if (guess .and. .not. cutBack) then
|
||||||
ForwardData = .True.
|
ForwardData = .True.
|
||||||
homogenization_F0 = homogenization_F
|
homogenization_F0 = homogenization_F
|
||||||
call SNESGetDM(mechanical_snes,dm_local,ierr); CHKERRQ(ierr) !< retrieve mesh info from mechanical_snes into dm_local
|
call SNESGetDM(mechanical_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc) !< retrieve mesh info from mechanical_snes into dm_local
|
||||||
call DMGetSection(dm_local,section,ierr); CHKERRQ(ierr)
|
call DMGetSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
call DMGetLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecSet(x_local,0.0_pReal,ierr); CHKERRQ(ierr)
|
call VecSet(x_local,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGlobalToLocalBegin(dm_local,solution,INSERT_VALUES,x_local,ierr) !< retrieve my partition of global solution vector
|
call DMGlobalToLocalBegin(dm_local,solution,INSERT_VALUES,x_local,err_PETSc) !< retrieve my partition of global solution vector
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGlobalToLocalEnd(dm_local,solution,INSERT_VALUES,x_local,ierr)
|
call DMGlobalToLocalEnd(dm_local,solution,INSERT_VALUES,x_local,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call VecAXPY(solution_local,1.0,x_local,ierr); CHKERRQ(ierr)
|
call VecAXPY(solution_local,1.0,x_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
do field = 1, dimPlex; do face = 1, mesh_Nboundaries
|
do field = 1, dimPlex; do face = 1, mesh_Nboundaries
|
||||||
if (fieldBC%componentBC(field)%Mask(face)) then
|
if (fieldBC%componentBC(field)%Mask(face)) then
|
||||||
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr)
|
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
|
||||||
if (bcSize > 0) then
|
if (bcSize > 0) then
|
||||||
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr)
|
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
call utilities_projectBCValues(solution_local,section,0,field-1,bcPoints, &
|
call utilities_projectBCValues(solution_local,section,0_pPETSCINT,field-1,bcPoints, &
|
||||||
0.0_pReal,fieldBC%componentBC(field)%Value(face),timeinc_old)
|
0.0_pReal,fieldBC%componentBC(field)%Value(face),timeinc_old)
|
||||||
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr)
|
call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
enddo; enddo
|
enddo; enddo
|
||||||
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! update rate and forward last inc
|
! update rate and forward last inc
|
||||||
call VecCopy(solution,solution_rate,ierr); CHKERRQ(ierr)
|
call VecCopy(solution,solution_rate,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecScale(solution_rate,1.0/timeinc_old,ierr); CHKERRQ(ierr)
|
call VecScale(solution_rate,1.0/timeinc_old,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
endif
|
endif
|
||||||
call VecCopy(solution_rate,solution,ierr); CHKERRQ(ierr)
|
call VecCopy(solution_rate,solution,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call VecScale(solution,timeinc,ierr); CHKERRQ(ierr)
|
call VecScale(solution,timeinc,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine FEM_mechanical_forward
|
end subroutine FEM_mechanical_forward
|
||||||
|
|
||||||
|
@ -645,24 +671,24 @@ end subroutine FEM_mechanical_forward
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief reporting
|
!> @brief reporting
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine FEM_mechanical_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr)
|
subroutine FEM_mechanical_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,err_PETSc)
|
||||||
|
|
||||||
SNES :: snes_local
|
SNES :: snes_local
|
||||||
PetscInt :: PETScIter
|
PetscInt :: PETScIter
|
||||||
PetscReal :: xnorm,snorm,fnorm,divTol
|
PetscReal :: xnorm,snorm,fnorm,divTol
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! report
|
! report
|
||||||
divTol = max(maxval(abs(P_av(1:dimPlex,1:dimPlex)))*num%eps_struct_rtol,num%eps_struct_atol)
|
divTol = max(maxval(abs(P_av(1:dimPlex,1:dimPlex)))*num%eps_struct_rtol,num%eps_struct_atol)
|
||||||
call SNESConvergedDefault(snes_local,PETScIter,xnorm,snorm,fnorm/divTol,reason,dummy,ierr)
|
call SNESConvergedDefault(snes_local,PETScIter,xnorm,snorm,fnorm/divTol,reason,dummy,err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
if (terminallyIll) reason = SNES_DIVERGED_FUNCTION_DOMAIN
|
if (terminallyIll) reason = SNES_DIVERGED_FUNCTION_DOMAIN
|
||||||
print'(/,1x,a,a,i0,a,i0,f0.3)', trim(incInfo), &
|
print'(/,1x,a,a,i0,a,i0,f0.3)', trim(incInfo), &
|
||||||
' @ Iteration ',PETScIter,' mechanical residual norm = ', &
|
' @ Iteration ',PETScIter,' mechanical residual norm = ', &
|
||||||
int(fnorm/divTol),fnorm/divTol-int(fnorm/divTol)
|
int(fnorm/divTol),fnorm/divTol-int(fnorm/divTol) ! ToDo: int casting?
|
||||||
print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', &
|
print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', &
|
||||||
'Piola--Kirchhoff stress / MPa =',transpose(P_av)*1.e-6_pReal
|
'Piola--Kirchhoff stress / MPa =',transpose(P_av)*1.e-6_pReal
|
||||||
flush(IO_STDOUT)
|
flush(IO_STDOUT)
|
||||||
|
@ -675,9 +701,7 @@ end subroutine FEM_mechanical_converged
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine FEM_mechanical_updateCoords()
|
subroutine FEM_mechanical_updateCoords()
|
||||||
|
|
||||||
real(pReal), pointer, dimension(:) :: &
|
PetscReal, pointer, dimension(:,:) :: &
|
||||||
nodeCoords_linear !< nodal coordinates (dimPlex*Nnodes)
|
|
||||||
real(pReal), pointer, dimension(:,:) :: &
|
|
||||||
nodeCoords !< nodal coordinates (3,Nnodes)
|
nodeCoords !< nodal coordinates (3,Nnodes)
|
||||||
real(pReal), pointer, dimension(:,:,:) :: &
|
real(pReal), pointer, dimension(:,:,:) :: &
|
||||||
ipCoords !< ip coordinates (3,nQuadrature,mesh_NcpElems)
|
ipCoords !< ip coordinates (3,nQuadrature,mesh_NcpElems)
|
||||||
|
@ -690,39 +714,44 @@ subroutine FEM_mechanical_updateCoords()
|
||||||
|
|
||||||
DM :: dm_local
|
DM :: dm_local
|
||||||
Vec :: x_local
|
Vec :: x_local
|
||||||
PetscErrorCode :: ierr
|
PetscErrorCode :: err_PETSc
|
||||||
PetscInt :: pStart, pEnd, p, s, e, q, &
|
PetscInt :: pStart, pEnd, p, s, e, q, &
|
||||||
cellStart, cellEnd, c, n
|
cellStart, cellEnd, c, n
|
||||||
PetscSection :: section
|
PetscSection :: section
|
||||||
PetscQuadrature :: mechQuad
|
PetscQuadrature :: mechQuad
|
||||||
PetscReal, dimension(:), pointer :: basisField, basisFieldDer
|
PetscReal, dimension(:), pointer :: basisField, basisFieldDer, &
|
||||||
|
nodeCoords_linear !< nodal coordinates (dimPlex*Nnodes)
|
||||||
PetscScalar, dimension(:), pointer :: x_scal
|
PetscScalar, dimension(:), pointer :: x_scal
|
||||||
|
|
||||||
call SNESGetDM(mechanical_snes,dm_local,ierr); CHKERRQ(ierr)
|
call SNESGetDM(mechanical_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetDS(dm_local,mechQuad,ierr); CHKERRQ(ierr)
|
call DMGetDS(dm_local,mechQuad,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr)
|
call DMGetLocalSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
call DMGetLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
call DMGetDimension(dm_local,dimPlex,ierr); CHKERRQ(ierr)
|
call DMGetDimension(dm_local,dimPlex,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
! write cell vertex displacements
|
! write cell vertex displacements
|
||||||
call DMPlexGetDepthStratum(dm_local,0,pStart,pEnd,ierr); CHKERRQ(ierr)
|
call DMPlexGetDepthStratum(dm_local,0_pPETSCINT,pStart,pEnd,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
allocate(nodeCoords(3,pStart:pEnd-1),source=0.0_pReal)
|
allocate(nodeCoords(3,pStart:pEnd-1),source=0.0_pReal)
|
||||||
call VecGetArrayF90(x_local,nodeCoords_linear,ierr); CHKERRQ(ierr)
|
call VecGetArrayF90(x_local,nodeCoords_linear,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
do p=pStart, pEnd-1
|
do p=pStart, pEnd-1
|
||||||
call DMPlexGetPointLocal(dm_local, p, s, e, ierr); CHKERRQ(ierr)
|
call DMPlexGetPointLocal(dm_local, p, s, e, err_PETSc); CHKERRQ(err_PETSc)
|
||||||
nodeCoords(1:dimPlex,p)=nodeCoords_linear(s+1:e)
|
nodeCoords(1:dimPlex,p)=nodeCoords_linear(s+1:e)
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
call discretization_setNodeCoords(nodeCoords)
|
call discretization_setNodeCoords(nodeCoords)
|
||||||
call VecRestoreArrayF90(x_local,nodeCoords_linear,ierr); CHKERRQ(ierr)
|
call VecRestoreArrayF90(x_local,nodeCoords_linear,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
! write ip displacements
|
! write ip displacements
|
||||||
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr)
|
call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
|
||||||
call PetscDSGetTabulation(mechQuad,0,basisField,basisFieldDer,ierr); CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
call PetscDSGetTabulation(mechQuad,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
allocate(ipCoords(3,nQuadrature,mesh_NcpElems),source=0.0_pReal)
|
allocate(ipCoords(3,nQuadrature,mesh_NcpElems),source=0.0_pReal)
|
||||||
do c=cellStart,cellEnd-1
|
do c=cellStart,cellEnd-1
|
||||||
qOffset=0
|
qOffset=0
|
||||||
call DMPlexVecGetClosure(dm_local,section,x_local,c,x_scal,ierr); CHKERRQ(ierr) !< get nodal coordinates of each element
|
call DMPlexVecGetClosure(dm_local,section,x_local,c,x_scal,err_PETSc) !< get nodal coordinates of each element
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
do qPt=0,nQuadrature-1
|
do qPt=0,nQuadrature-1
|
||||||
qOffset= qPt * (size(basisField)/nQuadrature)
|
qOffset= qPt * (size(basisField)/nQuadrature)
|
||||||
do comp=0,dimPlex-1 !< loop over components
|
do comp=0,dimPlex-1 !< loop over components
|
||||||
|
@ -737,10 +766,11 @@ subroutine FEM_mechanical_updateCoords()
|
||||||
enddo
|
enddo
|
||||||
enddo
|
enddo
|
||||||
enddo
|
enddo
|
||||||
call DMPlexVecRestoreClosure(dm_local,section,x_local,c,x_scal,ierr); CHKERRQ(ierr)
|
call DMPlexVecRestoreClosure(dm_local,section,x_local,c,x_scal,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
end do
|
end do
|
||||||
call discretization_setIPcoords(reshape(ipCoords,[3,mesh_NcpElems*nQuadrature]))
|
call discretization_setIPcoords(reshape(ipCoords,[3,mesh_NcpElems*nQuadrature]))
|
||||||
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr)
|
call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine FEM_mechanical_updateCoords
|
end subroutine FEM_mechanical_updateCoords
|
||||||
|
|
||||||
|
|
|
@ -20,9 +20,17 @@ module parallelization
|
||||||
implicit none
|
implicit none
|
||||||
private
|
private
|
||||||
|
|
||||||
integer, protected, public :: &
|
#ifndef PETSC
|
||||||
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only)
|
integer, parameter, public :: &
|
||||||
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only)
|
MPI_INTEGER_KIND = pI64
|
||||||
|
integer(MPI_INTEGER_KIND), parameter, public :: &
|
||||||
|
worldrank = 0_MPI_INTEGER_KIND, & !< MPI dummy worldrank
|
||||||
|
worldsize = 1_MPI_INTEGER_KIND !< MPI dummy worldsize
|
||||||
|
#else
|
||||||
|
integer(MPI_INTEGER_KIND), protected, public :: &
|
||||||
|
worldrank = 0_MPI_INTEGER_KIND, & !< MPI worldrank (/=0 for MPI simulations only)
|
||||||
|
worldsize = 1_MPI_INTEGER_KIND !< MPI worldsize (/=1 for MPI simulations only)
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifndef PETSC
|
#ifndef PETSC
|
||||||
public :: parallelization_bcast_str
|
public :: parallelization_bcast_str
|
||||||
|
@ -44,51 +52,63 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine parallelization_init
|
subroutine parallelization_init
|
||||||
|
|
||||||
integer :: err, typeSize
|
integer(MPI_INTEGER_KIND) :: err_MPI, typeSize
|
||||||
!$ integer :: got_env, threadLevel
|
!$ integer :: got_env, threadLevel
|
||||||
!$ integer(pI32) :: OMP_NUM_THREADS
|
!$ integer(pI32) :: OMP_NUM_THREADS
|
||||||
!$ character(len=6) NumThreadsString
|
!$ character(len=6) NumThreadsString
|
||||||
|
|
||||||
|
|
||||||
PetscErrorCode :: petsc_err
|
PetscErrorCode :: err_PETSc
|
||||||
#ifdef _OPENMP
|
#ifdef _OPENMP
|
||||||
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
|
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
|
||||||
! Otherwise, the first call to PETSc will do the initialization.
|
! Otherwise, the first call to PETSc will do the initialization.
|
||||||
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err)
|
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err_MPI)
|
||||||
if (err /= 0) error stop 'MPI init failed'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI init failed'
|
||||||
if (threadLevel<MPI_THREAD_FUNNELED) error stop 'MPI library does not support OpenMP'
|
if (threadLevel<MPI_THREAD_FUNNELED) error stop 'MPI library does not support OpenMP'
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(DEBUG)
|
#if defined(DEBUG)
|
||||||
call PetscInitialize(PETSC_NULL_CHARACTER,petsc_err)
|
call PetscInitialize(PETSC_NULL_CHARACTER,err_PETSc)
|
||||||
#else
|
#else
|
||||||
call PetscInitializeNoArguments(petsc_err)
|
call PetscInitializeNoArguments(err_PETSc)
|
||||||
#endif
|
#endif
|
||||||
CHKERRQ(petsc_err)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
#if defined(DEBUG) && defined(__INTEL_COMPILER)
|
#if defined(DEBUG) && defined(__INTEL_COMPILER)
|
||||||
call PetscSetFPTrap(PETSC_FP_TRAP_ON,petsc_err)
|
call PetscSetFPTrap(PETSC_FP_TRAP_ON,err_PETSc)
|
||||||
#else
|
#else
|
||||||
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,petsc_err)
|
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,err_PETSc)
|
||||||
#endif
|
#endif
|
||||||
CHKERRQ(petsc_err)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err)
|
call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err_MPI)
|
||||||
if (err /= 0) error stop 'Could not determine worldrank'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) &
|
||||||
|
error stop 'Could not determine worldrank'
|
||||||
|
|
||||||
if (worldrank == 0) print'(/,1x,a)', '<<<+- parallelization init -+>>>'
|
if (worldrank == 0) print'(/,1x,a)', '<<<+- parallelization init -+>>>'
|
||||||
|
|
||||||
call MPI_Comm_size(MPI_COMM_WORLD,worldsize,err)
|
call MPI_Comm_size(MPI_COMM_WORLD,worldsize,err_MPI)
|
||||||
if (err /= 0) error stop 'Could not determine worldsize'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) &
|
||||||
|
error stop 'Could not determine worldsize'
|
||||||
if (worldrank == 0) print'(/,1x,a,i3)', 'MPI processes: ',worldsize
|
if (worldrank == 0) print'(/,1x,a,i3)', 'MPI processes: ',worldsize
|
||||||
|
|
||||||
call MPI_Type_size(MPI_INTEGER,typeSize,err)
|
call MPI_Type_size(MPI_INTEGER,typeSize,err_MPI)
|
||||||
if (err /= 0) error stop 'Could not determine MPI integer size'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) &
|
||||||
if (typeSize*8 /= bit_size(0)) error stop 'Mismatch between MPI and DAMASK integer'
|
error stop 'Could not determine size of MPI_INTEGER'
|
||||||
|
if (typeSize*8_MPI_INTEGER_KIND /= int(bit_size(0),MPI_INTEGER_KIND)) &
|
||||||
|
error stop 'Mismatch between MPI_INTEGER and DAMASK default integer'
|
||||||
|
|
||||||
call MPI_Type_size(MPI_DOUBLE,typeSize,err)
|
call MPI_Type_size(MPI_INTEGER8,typeSize,err_MPI)
|
||||||
if (err /= 0) error stop 'Could not determine MPI real size'
|
if (err_MPI /= 0) &
|
||||||
if (typeSize*8 /= storage_size(0.0_pReal)) error stop 'Mismatch between MPI and DAMASK real'
|
error stop 'Could not determine size of MPI_INTEGER8'
|
||||||
|
if (typeSize*8_MPI_INTEGER_KIND /= int(bit_size(0_pI64),MPI_INTEGER_KIND)) &
|
||||||
|
error stop 'Mismatch between MPI_INTEGER8 and DAMASK pI64'
|
||||||
|
|
||||||
|
call MPI_Type_size(MPI_DOUBLE,typeSize,err_MPI)
|
||||||
|
if (err_MPI /= 0_MPI_INTEGER_KIND) &
|
||||||
|
error stop 'Could not determine size of MPI_DOUBLE'
|
||||||
|
if (typeSize*8_MPI_INTEGER_KIND /= int(storage_size(0.0_pReal),MPI_INTEGER_KIND)) &
|
||||||
|
error stop 'Mismatch between MPI_DOUBLE and DAMASK pReal'
|
||||||
|
|
||||||
if (worldrank /= 0) then
|
if (worldrank /= 0) then
|
||||||
close(OUTPUT_UNIT) ! disable output
|
close(OUTPUT_UNIT) ! disable output
|
||||||
|
@ -119,14 +139,14 @@ subroutine parallelization_bcast_str(string)
|
||||||
|
|
||||||
character(len=:), allocatable, intent(inout) :: string
|
character(len=:), allocatable, intent(inout) :: string
|
||||||
|
|
||||||
integer :: strlen, ierr ! pI64 for strlen not supported by MPI
|
integer(MPI_INTEGER_KIND) :: strlen, err_MPI
|
||||||
|
|
||||||
|
|
||||||
if (worldrank == 0) strlen = len(string)
|
if (worldrank == 0) strlen = len(string,MPI_INTEGER_KIND)
|
||||||
call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr)
|
call MPI_Bcast(strlen,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
|
||||||
if (worldrank /= 0) allocate(character(len=strlen)::string)
|
if (worldrank /= 0) allocate(character(len=strlen)::string)
|
||||||
|
|
||||||
call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr)
|
call MPI_Bcast(string,strlen,MPI_CHARACTER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
|
||||||
|
|
||||||
|
|
||||||
end subroutine parallelization_bcast_str
|
end subroutine parallelization_bcast_str
|
||||||
|
|
|
@ -17,17 +17,17 @@ submodule(phase:plastic) dislotwin
|
||||||
p_sb = 1.0_pReal, & !< p-exponent in shear band velocity
|
p_sb = 1.0_pReal, & !< p-exponent in shear band velocity
|
||||||
q_sb = 1.0_pReal, & !< q-exponent in shear band velocity
|
q_sb = 1.0_pReal, & !< q-exponent in shear band velocity
|
||||||
i_tw = 1.0_pReal, & !< adjustment parameter to calculate MFP for twinning
|
i_tw = 1.0_pReal, & !< adjustment parameter to calculate MFP for twinning
|
||||||
L_tw = 1.0_pReal, & !< Length of twin nuclei in Burgers vectors
|
L_tw = 1.0_pReal, & !< length of twin nuclei in Burgers vectors: TODO unit should be meters
|
||||||
L_tr = 1.0_pReal, & !< Length of trans nuclei in Burgers vectors
|
L_tr = 1.0_pReal, & !< length of trans nuclei in Burgers vectors: TODO unit should be meters
|
||||||
x_c_tw = 1.0_pReal, & !< critical distance for formation of twin nucleus
|
x_c_tw = 1.0_pReal, & !< critical distance for formation of twin nucleus
|
||||||
x_c_tr = 1.0_pReal, & !< critical distance for formation of trans nucleus
|
x_c_tr = 1.0_pReal, & !< critical distance for formation of trans nucleus
|
||||||
V_cs = 1.0_pReal, & !< cross slip volume
|
V_cs = 1.0_pReal, & !< cross slip volume
|
||||||
xi_sb = 1.0_pReal, & !< value for shearband resistance
|
xi_sb = 1.0_pReal, & !< value for shearband resistance
|
||||||
v_sb = 1.0_pReal, & !< value for shearband velocity_0
|
v_sb = 1.0_pReal, & !< value for shearband velocity_0
|
||||||
E_sb = 1.0_pReal, & !< activation energy for shear bands
|
E_sb = 1.0_pReal, & !< activation energy for shear bands
|
||||||
delta_G = 1.0_pReal, & !< Free energy difference between austensite and martensite
|
delta_G = 1.0_pReal, & !< free energy difference between austensite and martensite
|
||||||
i_tr = 1.0_pReal, & !< adjustment parameter to calculate MFP for transformation
|
i_tr = 1.0_pReal, & !< adjustment parameter to calculate MFP for transformation
|
||||||
h = 1.0_pReal, & !< Stack height of hex nucleus
|
h = 1.0_pReal, & !< stack height of hex nucleus
|
||||||
T_ref = T_ROOM, &
|
T_ref = T_ROOM, &
|
||||||
a_cI = 1.0_pReal, &
|
a_cI = 1.0_pReal, &
|
||||||
a_cF = 1.0_pReal
|
a_cF = 1.0_pReal
|
||||||
|
@ -40,14 +40,13 @@ submodule(phase:plastic) dislotwin
|
||||||
Q_sl,& !< activation energy for glide [J] for each slip system
|
Q_sl,& !< activation energy for glide [J] for each slip system
|
||||||
v_0, & !< dislocation velocity prefactor [m/s] for each slip system
|
v_0, & !< dislocation velocity prefactor [m/s] for each slip system
|
||||||
dot_N_0_tw, & !< twin nucleation rate [1/m³s] for each twin system
|
dot_N_0_tw, & !< twin nucleation rate [1/m³s] for each twin system
|
||||||
dot_N_0_tr, & !< trans nucleation rate [1/m³s] for each trans system
|
|
||||||
t_tw, & !< twin thickness [m] for each twin system
|
t_tw, & !< twin thickness [m] for each twin system
|
||||||
i_sl, & !< Adj. parameter for distance between 2 forest dislocations for each slip system
|
i_sl, & !< Adj. parameter for distance between 2 forest dislocations for each slip system
|
||||||
t_tr, & !< martensite lamellar thickness [m] for each trans system
|
t_tr, & !< martensite lamellar thickness [m] for each trans system
|
||||||
p, & !< p-exponent in glide velocity
|
p, & !< p-exponent in glide velocity
|
||||||
q, & !< q-exponent in glide velocity
|
q, & !< q-exponent in glide velocity
|
||||||
r, & !< r-exponent in twin nucleation rate
|
r, & !< exponent in twin nucleation rate
|
||||||
s, & !< s-exponent in trans nucleation rate
|
s, & !< exponent in trans nucleation rate
|
||||||
tau_0, & !< strength due to elements in solid solution
|
tau_0, & !< strength due to elements in solid solution
|
||||||
gamma_char, & !< characteristic shear for twins
|
gamma_char, & !< characteristic shear for twins
|
||||||
B, & !< drag coefficient
|
B, & !< drag coefficient
|
||||||
|
@ -102,11 +101,7 @@ submodule(phase:plastic) dislotwin
|
||||||
Lambda_tr, & !< mean free path between 2 obstacles seen by a growing martensite
|
Lambda_tr, & !< mean free path between 2 obstacles seen by a growing martensite
|
||||||
tau_pass, & !< threshold stress for slip
|
tau_pass, & !< threshold stress for slip
|
||||||
tau_hat_tw, & !< threshold stress for twinning
|
tau_hat_tw, & !< threshold stress for twinning
|
||||||
tau_hat_tr, & !< threshold stress for transformation
|
tau_hat_tr !< threshold stress for transformation
|
||||||
V_tw, & !< volume of a new twin
|
|
||||||
V_tr, & !< volume of a new martensite disc
|
|
||||||
tau_r_tw, & !< stress to bring partials close together (twin)
|
|
||||||
tau_r_tr !< stress to bring partials close together (trans)
|
|
||||||
end type tDislotwinDependentState
|
end type tDislotwinDependentState
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -153,10 +148,10 @@ module function plastic_dislotwin_init() result(myPlasticity)
|
||||||
print'(/,a,i0)', ' # phases: ',count(myPlasticity); flush(IO_STDOUT)
|
print'(/,a,i0)', ' # phases: ',count(myPlasticity); flush(IO_STDOUT)
|
||||||
|
|
||||||
print'(/,1x,a)', 'A. Ma and F. Roters, Acta Materialia 52(12):3603–3612, 2004'
|
print'(/,1x,a)', 'A. Ma and F. Roters, Acta Materialia 52(12):3603–3612, 2004'
|
||||||
print'( 1x,a)', 'https://doi.org/10.1016/j.actamat.2004.04.012'//IO_EOL
|
print'( 1x,a)', 'https://doi.org/10.1016/j.actamat.2004.04.012'
|
||||||
|
|
||||||
print'(/,1x,a)', 'F. Roters et al., Computational Materials Science 39:91–95, 2007'
|
print'(/,1x,a)', 'F. Roters et al., Computational Materials Science 39:91–95, 2007'
|
||||||
print'( 1x,a)', 'https://doi.org/10.1016/j.commatsci.2006.04.014'//IO_EOL
|
print'( 1x,a)', 'https://doi.org/10.1016/j.commatsci.2006.04.014'
|
||||||
|
|
||||||
print'(/,1x,a)', 'S.L. Wong et al., Acta Materialia 118:140–151, 2016'
|
print'(/,1x,a)', 'S.L. Wong et al., Acta Materialia 118:140–151, 2016'
|
||||||
print'( 1x,a)', 'https://doi.org/10.1016/j.actamat.2016.07.032'
|
print'( 1x,a)', 'https://doi.org/10.1016/j.actamat.2016.07.032'
|
||||||
|
@ -306,10 +301,10 @@ module function plastic_dislotwin_init() result(myPlasticity)
|
||||||
prm%b_tr = pl%get_as1dFloat('b_tr')
|
prm%b_tr = pl%get_as1dFloat('b_tr')
|
||||||
prm%b_tr = math_expand(prm%b_tr,prm%N_tr)
|
prm%b_tr = math_expand(prm%b_tr,prm%N_tr)
|
||||||
|
|
||||||
prm%h = pl%get_asFloat('h', defaultVal=0.0_pReal) ! ToDo: How to handle that???
|
prm%h = pl%get_asFloat('h', defaultVal=0.0_pReal) ! ToDo: This is not optional!
|
||||||
prm%i_tr = pl%get_asFloat('i_tr', defaultVal=0.0_pReal) ! ToDo: How to handle that???
|
prm%i_tr = pl%get_asFloat('i_tr', defaultVal=0.0_pReal) ! ToDo: This is not optional!
|
||||||
prm%delta_G = pl%get_asFloat('delta_G')
|
prm%delta_G = pl%get_asFloat('delta_G')
|
||||||
prm%x_c_tr = pl%get_asFloat('x_c_tr', defaultVal=0.0_pReal) ! ToDo: How to handle that???
|
prm%x_c_tr = pl%get_asFloat('x_c_tr', defaultVal=0.0_pReal) ! ToDo: This is not optional!
|
||||||
prm%L_tr = pl%get_asFloat('L_tr')
|
prm%L_tr = pl%get_asFloat('L_tr')
|
||||||
prm%a_cI = pl%get_asFloat('a_cI', defaultVal=0.0_pReal)
|
prm%a_cI = pl%get_asFloat('a_cI', defaultVal=0.0_pReal)
|
||||||
prm%a_cF = pl%get_asFloat('a_cF', defaultVal=0.0_pReal)
|
prm%a_cF = pl%get_asFloat('a_cF', defaultVal=0.0_pReal)
|
||||||
|
@ -324,10 +319,6 @@ module function plastic_dislotwin_init() result(myPlasticity)
|
||||||
prm%a_cI, &
|
prm%a_cI, &
|
||||||
prm%a_cF)
|
prm%a_cF)
|
||||||
|
|
||||||
if (phase_lattice(ph) /= 'cF') then
|
|
||||||
prm%dot_N_0_tr = pl%get_as1dFloat('dot_N_0_tr')
|
|
||||||
prm%dot_N_0_tr = math_expand(prm%dot_N_0_tr,prm%N_tr)
|
|
||||||
endif
|
|
||||||
prm%t_tr = pl%get_as1dFloat('t_tr')
|
prm%t_tr = pl%get_as1dFloat('t_tr')
|
||||||
prm%t_tr = math_expand(prm%t_tr,prm%N_tr)
|
prm%t_tr = math_expand(prm%t_tr,prm%N_tr)
|
||||||
prm%s = pl%get_as1dFloat('p_tr',defaultVal=[0.0_pReal])
|
prm%s = pl%get_as1dFloat('p_tr',defaultVal=[0.0_pReal])
|
||||||
|
@ -339,11 +330,8 @@ module function plastic_dislotwin_init() result(myPlasticity)
|
||||||
if ( prm%i_tr < 0.0_pReal) extmsg = trim(extmsg)//' i_tr'
|
if ( prm%i_tr < 0.0_pReal) extmsg = trim(extmsg)//' i_tr'
|
||||||
if (any(prm%t_tr < 0.0_pReal)) extmsg = trim(extmsg)//' t_tr'
|
if (any(prm%t_tr < 0.0_pReal)) extmsg = trim(extmsg)//' t_tr'
|
||||||
if (any(prm%s < 0.0_pReal)) extmsg = trim(extmsg)//' p_tr'
|
if (any(prm%s < 0.0_pReal)) extmsg = trim(extmsg)//' p_tr'
|
||||||
if (phase_lattice(ph) /= 'cF') then
|
|
||||||
if (any(prm%dot_N_0_tr < 0.0_pReal)) extmsg = trim(extmsg)//' dot_N_0_tr'
|
|
||||||
end if
|
|
||||||
else transActive
|
else transActive
|
||||||
allocate(prm%s,prm%b_tr,prm%t_tr,prm%dot_N_0_tr,source=emptyRealArray)
|
allocate(prm%s,prm%b_tr,prm%t_tr,source=emptyRealArray)
|
||||||
allocate(prm%h_tr_tr(0,0))
|
allocate(prm%h_tr_tr(0,0))
|
||||||
end if transActive
|
end if transActive
|
||||||
|
|
||||||
|
@ -443,13 +431,9 @@ module function plastic_dislotwin_init() result(myPlasticity)
|
||||||
|
|
||||||
allocate(dst%Lambda_tw (prm%sum_N_tw,Nmembers),source=0.0_pReal)
|
allocate(dst%Lambda_tw (prm%sum_N_tw,Nmembers),source=0.0_pReal)
|
||||||
allocate(dst%tau_hat_tw (prm%sum_N_tw,Nmembers),source=0.0_pReal)
|
allocate(dst%tau_hat_tw (prm%sum_N_tw,Nmembers),source=0.0_pReal)
|
||||||
allocate(dst%tau_r_tw (prm%sum_N_tw,Nmembers),source=0.0_pReal)
|
|
||||||
allocate(dst%V_tw (prm%sum_N_tw,Nmembers),source=0.0_pReal)
|
|
||||||
|
|
||||||
allocate(dst%Lambda_tr (prm%sum_N_tr,Nmembers),source=0.0_pReal)
|
allocate(dst%Lambda_tr (prm%sum_N_tr,Nmembers),source=0.0_pReal)
|
||||||
allocate(dst%tau_hat_tr (prm%sum_N_tr,Nmembers),source=0.0_pReal)
|
allocate(dst%tau_hat_tr (prm%sum_N_tr,Nmembers),source=0.0_pReal)
|
||||||
allocate(dst%tau_r_tr (prm%sum_N_tr,Nmembers),source=0.0_pReal)
|
|
||||||
allocate(dst%V_tr (prm%sum_N_tr,Nmembers),source=0.0_pReal)
|
|
||||||
|
|
||||||
end associate
|
end associate
|
||||||
|
|
||||||
|
@ -656,12 +640,14 @@ module subroutine dislotwin_dotState(Mp,T,ph,en)
|
||||||
dot_gamma_tr
|
dot_gamma_tr
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
mu, &
|
mu, &
|
||||||
nu
|
nu, &
|
||||||
|
Gamma
|
||||||
|
|
||||||
associate(prm => param(ph), stt => state(ph), dot => dotState(ph), dst => dependentState(ph))
|
associate(prm => param(ph), stt => state(ph), dot => dotState(ph), dst => dependentState(ph))
|
||||||
|
|
||||||
mu = elastic_mu(ph,en)
|
mu = elastic_mu(ph,en)
|
||||||
nu = elastic_nu(ph,en)
|
nu = elastic_nu(ph,en)
|
||||||
|
Gamma = prm%Gamma_sf(1) + prm%Gamma_sf(2) * (T-prm%T_ref)
|
||||||
|
|
||||||
f_matrix = 1.0_pReal &
|
f_matrix = 1.0_pReal &
|
||||||
- sum(stt%f_tw(1:prm%sum_N_tw,en)) &
|
- sum(stt%f_tw(1:prm%sum_N_tw,en)) &
|
||||||
|
@ -689,8 +675,7 @@ module subroutine dislotwin_dotState(Mp,T,ph,en)
|
||||||
else
|
else
|
||||||
! Argon & Moffat, Acta Metallurgica, Vol. 29, pg 293 to 299, 1981
|
! Argon & Moffat, Acta Metallurgica, Vol. 29, pg 293 to 299, 1981
|
||||||
sigma_cl = dot_product(prm%n0_sl(1:3,i),matmul(Mp,prm%n0_sl(1:3,i)))
|
sigma_cl = dot_product(prm%n0_sl(1:3,i),matmul(Mp,prm%n0_sl(1:3,i)))
|
||||||
b_d = merge(24.0_pReal*PI*(1.0_pReal - nu)/(2.0_pReal + nu) &
|
b_d = merge(24.0_pReal*PI*(1.0_pReal - nu)/(2.0_pReal + nu) * Gamma / (mu*prm%b_sl(i)), &
|
||||||
* (prm%Gamma_sf(1) + prm%Gamma_sf(2) * T) / (mu*prm%b_sl(i)), &
|
|
||||||
1.0_pReal, &
|
1.0_pReal, &
|
||||||
prm%ExtendedDislocations)
|
prm%ExtendedDislocations)
|
||||||
v_cl = 2.0_pReal*prm%omega*b_d**2*exp(-prm%Q_cl/(K_B*T)) &
|
v_cl = 2.0_pReal*prm%omega*b_d**2*exp(-prm%Q_cl/(K_B*T)) &
|
||||||
|
@ -742,8 +727,6 @@ module subroutine dislotwin_dependentState(T,ph,en)
|
||||||
real(pReal), dimension(param(ph)%sum_N_tr) :: &
|
real(pReal), dimension(param(ph)%sum_N_tr) :: &
|
||||||
inv_lambda_tr_tr, & !< 1/mean free distance between 2 martensite stacks from different systems seen by a growing martensite
|
inv_lambda_tr_tr, & !< 1/mean free distance between 2 martensite stacks from different systems seen by a growing martensite
|
||||||
f_over_t_tr
|
f_over_t_tr
|
||||||
real(pReal), dimension(:), allocatable :: &
|
|
||||||
x0
|
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
mu, &
|
mu, &
|
||||||
nu
|
nu
|
||||||
|
@ -756,7 +739,7 @@ module subroutine dislotwin_dependentState(T,ph,en)
|
||||||
sumf_tw = sum(stt%f_tw(1:prm%sum_N_tw,en))
|
sumf_tw = sum(stt%f_tw(1:prm%sum_N_tw,en))
|
||||||
sumf_tr = sum(stt%f_tr(1:prm%sum_N_tr,en))
|
sumf_tr = sum(stt%f_tr(1:prm%sum_N_tr,en))
|
||||||
|
|
||||||
Gamma = prm%Gamma_sf(1) + prm%Gamma_sf(2) * T
|
Gamma = prm%Gamma_sf(1) + prm%Gamma_sf(2) * (T-prm%T_ref)
|
||||||
|
|
||||||
!* rescaled volume fraction for topology
|
!* rescaled volume fraction for topology
|
||||||
f_over_t_tw = stt%f_tw(1:prm%sum_N_tw,en)/prm%t_tw ! this is per system ...
|
f_over_t_tw = stt%f_tw(1:prm%sum_N_tw,en)/prm%t_tw ! this is per system ...
|
||||||
|
@ -786,16 +769,6 @@ module subroutine dislotwin_dependentState(T,ph,en)
|
||||||
+ 3.0_pReal*prm%b_tr*mu/(prm%L_tr*prm%b_tr) &
|
+ 3.0_pReal*prm%b_tr*mu/(prm%L_tr*prm%b_tr) &
|
||||||
+ prm%h*prm%delta_G/(3.0_pReal*prm%b_tr)
|
+ prm%h*prm%delta_G/(3.0_pReal*prm%b_tr)
|
||||||
|
|
||||||
dst%V_tw(:,en) = (PI/4.0_pReal)*prm%t_tw*dst%Lambda_tw(:,en)**2
|
|
||||||
dst%V_tr(:,en) = (PI/4.0_pReal)*prm%t_tr*dst%Lambda_tr(:,en)**2
|
|
||||||
|
|
||||||
|
|
||||||
x0 = mu*prm%b_tw**2/(Gamma*8.0_pReal*PI)*(2.0_pReal+nu)/(1.0_pReal-nu) ! ToDo: In the paper, this is the Burgers vector for slip
|
|
||||||
dst%tau_r_tw(:,en) = mu*prm%b_tw/(2.0_pReal*PI)*(1.0_pReal/(x0+prm%x_c_tw)+cos(pi/3.0_pReal)/x0)
|
|
||||||
|
|
||||||
x0 = mu*prm%b_tr**2/(Gamma*8.0_pReal*PI)*(2.0_pReal+nu)/(1.0_pReal-nu) ! ToDo: In the paper, this is the Burgers vector for slip
|
|
||||||
dst%tau_r_tr(:,en) = mu*prm%b_tr/(2.0_pReal*PI)*(1.0_pReal/(x0+prm%x_c_tr)+cos(pi/3.0_pReal)/x0)
|
|
||||||
|
|
||||||
end associate
|
end associate
|
||||||
|
|
||||||
end subroutine dislotwin_dependentState
|
end subroutine dislotwin_dependentState
|
||||||
|
@ -959,48 +932,68 @@ pure subroutine kinetics_tw(Mp,T,dot_gamma_sl,ph,en,&
|
||||||
real(pReal), dimension(param(ph)%sum_N_tw), optional, intent(out) :: &
|
real(pReal), dimension(param(ph)%sum_N_tw), optional, intent(out) :: &
|
||||||
ddot_gamma_dtau_tw
|
ddot_gamma_dtau_tw
|
||||||
|
|
||||||
real, dimension(param(ph)%sum_N_tw) :: &
|
real :: &
|
||||||
tau, &
|
tau, tau_r, &
|
||||||
Ndot0, &
|
dot_N_0, &
|
||||||
stressRatio_r, &
|
x0, V, &
|
||||||
ddot_gamma_dtau
|
Gamma, &
|
||||||
|
mu, nu, &
|
||||||
integer :: i,s1,s2
|
P_ncs, dP_ncs_dtau, &
|
||||||
|
P, dP_dtau
|
||||||
|
integer, dimension(2) :: &
|
||||||
|
s
|
||||||
|
integer :: i
|
||||||
|
|
||||||
|
|
||||||
associate(prm => param(ph), stt => state(ph), dst => dependentState(ph))
|
associate(prm => param(ph), stt => state(ph), dst => dependentState(ph))
|
||||||
|
|
||||||
do i = 1, prm%sum_N_tw
|
|
||||||
tau(i) = math_tensordot(Mp,prm%P_tw(1:3,1:3,i))
|
|
||||||
isFCC: if (prm%fccTwinTransNucleation) then
|
isFCC: if (prm%fccTwinTransNucleation) then
|
||||||
s1=prm%fcc_twinNucleationSlipPair(1,i)
|
mu = elastic_mu(ph,en)
|
||||||
s2=prm%fcc_twinNucleationSlipPair(2,i)
|
nu = elastic_nu(ph,en)
|
||||||
if (tau(i) < dst%tau_r_tw(i,en)) then ! ToDo: correct?
|
Gamma = prm%Gamma_sf(1) + prm%Gamma_sf(2) * (T-prm%T_ref)
|
||||||
Ndot0=(abs(dot_gamma_sl(s1))*(stt%rho_mob(s2,en)+stt%rho_dip(s2,en))+&
|
|
||||||
abs(dot_gamma_sl(s2))*(stt%rho_mob(s1,en)+stt%rho_dip(s1,en)))/&
|
do i = 1, prm%sum_N_tw
|
||||||
(prm%L_tw*prm%b_sl(i))*&
|
tau = math_tensordot(Mp,prm%P_tw(1:3,1:3,i))
|
||||||
(1.0_pReal-exp(-prm%V_cs/(K_B*T)*(dst%tau_r_tw(i,en)-tau(i))))
|
x0 = mu*prm%b_tw(i)**2*(2.0_pReal+nu)/(Gamma*8.0_pReal*PI*(1.0_pReal-nu)) ! ToDo: In the paper, the Burgers vector for slip is used
|
||||||
|
tau_r = mu*prm%b_tw(i)/(2.0_pReal*PI)*(1.0_pReal/(x0+prm%x_c_tw)+cos(PI/3.0_pReal)/x0) ! ToDo: In the paper, the Burgers vector for slip is used
|
||||||
|
|
||||||
|
if (tau > tol_math_check .and. tau < tau_r) then
|
||||||
|
P = exp(-(dst%tau_hat_tw(i,en)/tau)**prm%r(i))
|
||||||
|
dP_dTau = prm%r(i) * (dst%tau_hat_tw(i,en)/tau)**prm%r(i)/tau * P
|
||||||
|
|
||||||
|
s = prm%fcc_twinNucleationSlipPair(1:2,i)
|
||||||
|
dot_N_0 = sum(abs(dot_gamma_sl(s(2:1:-1)))*(stt%rho_mob(s,en)+stt%rho_dip(s,en))) &
|
||||||
|
/ (prm%L_tw*prm%b_sl(i))
|
||||||
|
|
||||||
|
P_ncs = 1.0_pReal-exp(-prm%V_cs/(K_B*T)*(tau_r-tau))
|
||||||
|
dP_ncs_dtau = prm%V_cs / (K_B * T) * (P_ncs - 1.0_pReal)
|
||||||
|
|
||||||
|
V = PI/4.0_pReal*dst%Lambda_tw(i,en)**2*prm%t_tw(i)
|
||||||
|
dot_gamma_tw(i) = V*dot_N_0*P_ncs*P
|
||||||
|
if (present(ddot_gamma_dtau_tw)) &
|
||||||
|
ddot_gamma_dtau_tw(i) = V*dot_N_0*(P*dP_ncs_dtau + P_ncs*dP_dtau)
|
||||||
else
|
else
|
||||||
Ndot0=0.0_pReal
|
dot_gamma_tw(i) = 0.0_pReal
|
||||||
|
if (present(ddot_gamma_dtau_tw)) ddot_gamma_dtau_tw(i) = 0.0_pReal
|
||||||
end if
|
end if
|
||||||
else isFCC
|
|
||||||
Ndot0=prm%dot_N_0_tw(i)
|
|
||||||
end if isFCC
|
|
||||||
end do
|
end do
|
||||||
|
|
||||||
significantStress: where(tau > tol_math_check)
|
else isFCC
|
||||||
StressRatio_r = (dst%tau_hat_tw(:,en)/tau)**prm%r
|
do i = 1, prm%sum_N_tw
|
||||||
dot_gamma_tw = prm%gamma_char * dst%V_tw(:,en) * Ndot0*exp(-StressRatio_r)
|
error stop 'not implemented'
|
||||||
ddot_gamma_dtau = (dot_gamma_tw*prm%r/tau)*StressRatio_r
|
tau = math_tensordot(Mp,prm%P_tw(1:3,1:3,i))
|
||||||
else where significantStress
|
if (tau > tol_math_check) then
|
||||||
dot_gamma_tw = 0.0_pReal
|
dot_gamma_tw(i) = 0.0_pReal
|
||||||
ddot_gamma_dtau = 0.0_pReal
|
if (present(ddot_gamma_dtau_tw)) ddot_gamma_dtau_tw(i) = 0.0_pReal
|
||||||
end where significantStress
|
else
|
||||||
|
dot_gamma_tw(i) = 0.0_pReal
|
||||||
|
if (present(ddot_gamma_dtau_tw)) ddot_gamma_dtau_tw(i) = 0.0_pReal
|
||||||
|
end if
|
||||||
|
end do
|
||||||
|
end if isFCC
|
||||||
|
|
||||||
end associate
|
end associate
|
||||||
|
|
||||||
if (present(ddot_gamma_dtau_tw)) ddot_gamma_dtau_tw = ddot_gamma_dtau
|
|
||||||
|
|
||||||
end subroutine kinetics_tw
|
end subroutine kinetics_tw
|
||||||
|
|
||||||
|
|
||||||
|
@ -1029,47 +1022,53 @@ pure subroutine kinetics_tr(Mp,T,dot_gamma_sl,ph,en,&
|
||||||
real(pReal), dimension(param(ph)%sum_N_tr), optional, intent(out) :: &
|
real(pReal), dimension(param(ph)%sum_N_tr), optional, intent(out) :: &
|
||||||
ddot_gamma_dtau_tr
|
ddot_gamma_dtau_tr
|
||||||
|
|
||||||
real, dimension(param(ph)%sum_N_tr) :: &
|
real :: &
|
||||||
tau, &
|
tau, tau_r, &
|
||||||
Ndot0, &
|
dot_N_0, &
|
||||||
stressRatio_s, &
|
x0, V, &
|
||||||
ddot_gamma_dtau
|
Gamma, &
|
||||||
integer :: i,s1,s2
|
mu, nu, &
|
||||||
|
P_ncs, dP_ncs_dtau, &
|
||||||
|
P, dP_dtau
|
||||||
|
integer, dimension(2) :: &
|
||||||
|
s
|
||||||
|
integer :: i
|
||||||
|
|
||||||
|
|
||||||
associate(prm => param(ph), stt => state(ph), dst => dependentState(ph))
|
associate(prm => param(ph), stt => state(ph), dst => dependentState(ph))
|
||||||
|
|
||||||
|
mu = elastic_mu(ph,en)
|
||||||
|
nu = elastic_nu(ph,en)
|
||||||
|
Gamma = prm%Gamma_sf(1) + prm%Gamma_sf(2) * (T-prm%T_ref)
|
||||||
|
|
||||||
do i = 1, prm%sum_N_tr
|
do i = 1, prm%sum_N_tr
|
||||||
tau(i) = math_tensordot(Mp,prm%P_tr(1:3,1:3,i))
|
tau = math_tensordot(Mp,prm%P_tr(1:3,1:3,i))
|
||||||
isFCC: if (prm%fccTwinTransNucleation) then
|
x0 = mu*prm%b_tr(i)**2*(2.0_pReal+nu)/(Gamma*8.0_pReal*PI*(1.0_pReal-nu)) ! ToDo: In the paper, the Burgers vector for slip is used
|
||||||
s1=prm%fcc_twinNucleationSlipPair(1,i)
|
tau_r = mu*prm%b_tr(i)/(2.0_pReal*PI)*(1.0_pReal/(x0+prm%x_c_tr)+cos(PI/3.0_pReal)/x0) ! ToDo: In the paper, the Burgers vector for slip is used
|
||||||
s2=prm%fcc_twinNucleationSlipPair(2,i)
|
|
||||||
if (tau(i) < dst%tau_r_tr(i,en)) then ! ToDo: correct?
|
if (tau > tol_math_check .and. tau < tau_r) then
|
||||||
Ndot0=(abs(dot_gamma_sl(s1))*(stt%rho_mob(s2,en)+stt%rho_dip(s2,en))+&
|
P = exp(-(dst%tau_hat_tr(i,en)/tau)**prm%s(i))
|
||||||
abs(dot_gamma_sl(s2))*(stt%rho_mob(s1,en)+stt%rho_dip(s1,en)))/&
|
dP_dTau = prm%s(i) * (dst%tau_hat_tr(i,en)/tau)**prm%s(i)/tau * P
|
||||||
(prm%L_tr*prm%b_sl(i))*&
|
|
||||||
(1.0_pReal-exp(-prm%V_cs/(K_B*T)*(dst%tau_r_tr(i,en)-tau(i))))
|
s = prm%fcc_twinNucleationSlipPair(1:2,i)
|
||||||
|
dot_N_0 = sum(abs(dot_gamma_sl(s(2:1:-1)))*(stt%rho_mob(s,en)+stt%rho_dip(s,en))) &
|
||||||
|
/ (prm%L_tr*prm%b_sl(i))
|
||||||
|
|
||||||
|
P_ncs = 1.0_pReal-exp(-prm%V_cs/(K_B*T)*(tau_r-tau))
|
||||||
|
dP_ncs_dtau = prm%V_cs / (K_B * T) * (P_ncs - 1.0_pReal)
|
||||||
|
|
||||||
|
V = PI/4.0_pReal*dst%Lambda_tr(i,en)**2*prm%t_tr(i)
|
||||||
|
dot_gamma_tr(i) = V*dot_N_0*P_ncs*P
|
||||||
|
if (present(ddot_gamma_dtau_tr)) &
|
||||||
|
ddot_gamma_dtau_tr(i) = V*dot_N_0*(P*dP_ncs_dtau + P_ncs*dP_dtau)
|
||||||
else
|
else
|
||||||
Ndot0=0.0_pReal
|
dot_gamma_tr(i) = 0.0_pReal
|
||||||
|
if (present(ddot_gamma_dtau_tr)) ddot_gamma_dtau_tr(i) = 0.0_pReal
|
||||||
end if
|
end if
|
||||||
else isFCC
|
|
||||||
Ndot0=prm%dot_N_0_tr(i)
|
|
||||||
end if isFCC
|
|
||||||
end do
|
end do
|
||||||
|
|
||||||
significantStress: where(tau > tol_math_check)
|
|
||||||
StressRatio_s = (dst%tau_hat_tr(:,en)/tau)**prm%s
|
|
||||||
dot_gamma_tr = dst%V_tr(:,en) * Ndot0*exp(-StressRatio_s)
|
|
||||||
ddot_gamma_dtau = (dot_gamma_tr*prm%s/tau)*StressRatio_s
|
|
||||||
else where significantStress
|
|
||||||
dot_gamma_tr = 0.0_pReal
|
|
||||||
ddot_gamma_dtau = 0.0_pReal
|
|
||||||
end where significantStress
|
|
||||||
|
|
||||||
end associate
|
end associate
|
||||||
|
|
||||||
if (present(ddot_gamma_dtau_tr)) ddot_gamma_dtau_tr = ddot_gamma_dtau
|
|
||||||
|
|
||||||
end subroutine kinetics_tr
|
end subroutine kinetics_tr
|
||||||
|
|
||||||
end submodule dislotwin
|
end submodule dislotwin
|
||||||
|
|
|
@ -203,7 +203,7 @@ module function plastic_nonlocal_init() result(myPlasticity)
|
||||||
print'(/,a,i0)', ' # phases: ',Ninstances; flush(IO_STDOUT)
|
print'(/,a,i0)', ' # phases: ',Ninstances; flush(IO_STDOUT)
|
||||||
|
|
||||||
print'(/,1x,a)', 'C. Reuber et al., Acta Materialia 71:333–348, 2014'
|
print'(/,1x,a)', 'C. Reuber et al., Acta Materialia 71:333–348, 2014'
|
||||||
print'( 1x,a)', 'https://doi.org/10.1016/j.actamat.2014.03.012'//IO_EOL
|
print'( 1x,a)', 'https://doi.org/10.1016/j.actamat.2014.03.012'
|
||||||
|
|
||||||
print'(/,1x,a)', 'C. Kords, Dissertation RWTH Aachen, 2014'
|
print'(/,1x,a)', 'C. Kords, Dissertation RWTH Aachen, 2014'
|
||||||
print'( 1x,a)', 'http://publications.rwth-aachen.de/record/229993'
|
print'( 1x,a)', 'http://publications.rwth-aachen.de/record/229993'
|
||||||
|
@ -1570,7 +1570,6 @@ subroutine stateInit(ini,phase,Nentries)
|
||||||
upto, &
|
upto, &
|
||||||
s
|
s
|
||||||
real(pReal), dimension(2) :: &
|
real(pReal), dimension(2) :: &
|
||||||
noise, &
|
|
||||||
rnd
|
rnd
|
||||||
real(pReal) :: &
|
real(pReal) :: &
|
||||||
meanDensity, &
|
meanDensity, &
|
||||||
|
|
26
src/prec.f90
26
src/prec.f90
|
@ -10,6 +10,11 @@ module prec
|
||||||
use, intrinsic :: IEEE_arithmetic
|
use, intrinsic :: IEEE_arithmetic
|
||||||
use, intrinsic :: ISO_C_binding
|
use, intrinsic :: ISO_C_binding
|
||||||
|
|
||||||
|
#ifdef PETSC
|
||||||
|
#include <petsc/finclude/petscsys.h>
|
||||||
|
use PETScSys
|
||||||
|
#endif
|
||||||
|
|
||||||
implicit none
|
implicit none
|
||||||
public
|
public
|
||||||
|
|
||||||
|
@ -17,13 +22,12 @@ module prec
|
||||||
integer, parameter :: pReal = IEEE_selected_real_kind(15,307) !< number with 15 significant digits, up to 1e+-307 (typically 64 bit)
|
integer, parameter :: pReal = IEEE_selected_real_kind(15,307) !< number with 15 significant digits, up to 1e+-307 (typically 64 bit)
|
||||||
integer, parameter :: pI32 = selected_int_kind(9) !< number with at least up to +-1e9 (typically 32 bit)
|
integer, parameter :: pI32 = selected_int_kind(9) !< number with at least up to +-1e9 (typically 32 bit)
|
||||||
integer, parameter :: pI64 = selected_int_kind(18) !< number with at least up to +-1e18 (typically 64 bit)
|
integer, parameter :: pI64 = selected_int_kind(18) !< number with at least up to +-1e18 (typically 64 bit)
|
||||||
#if(INT==8)
|
#ifdef PETSC
|
||||||
integer, parameter :: pInt = pI64
|
PetscInt, private :: dummy
|
||||||
#else
|
integer, parameter :: pPETSCINT = kind(dummy)
|
||||||
integer, parameter :: pInt = pI32
|
|
||||||
#endif
|
#endif
|
||||||
integer, parameter :: pStringLen = 256 !< default string length
|
integer, parameter :: pSTRINGLEN = 256 !< default string length
|
||||||
integer, parameter :: pPathLen = 4096 !< maximum length of a path name on linux
|
integer, parameter :: pPATHLEN = 4096 !< maximum length of a path name on linux
|
||||||
|
|
||||||
real(pReal), parameter :: tol_math_check = 1.0e-8_pReal !< tolerance for internal math self-checks (rotation)
|
real(pReal), parameter :: tol_math_check = 1.0e-8_pReal !< tolerance for internal math self-checks (rotation)
|
||||||
|
|
||||||
|
@ -268,7 +272,7 @@ subroutine selfTest
|
||||||
|
|
||||||
integer, allocatable, dimension(:) :: realloc_lhs_test
|
integer, allocatable, dimension(:) :: realloc_lhs_test
|
||||||
real(pReal), dimension(1) :: f
|
real(pReal), dimension(1) :: f
|
||||||
integer(pInt), dimension(1) :: i
|
integer(pI64), dimension(1) :: i
|
||||||
real(pReal), dimension(2) :: r
|
real(pReal), dimension(2) :: r
|
||||||
|
|
||||||
|
|
||||||
|
@ -289,11 +293,11 @@ subroutine selfTest
|
||||||
f = real(prec_bytesToC_DOUBLE(int([0,0,0,-32,+119,+65,+115,65],C_SIGNED_CHAR)),pReal)
|
f = real(prec_bytesToC_DOUBLE(int([0,0,0,-32,+119,+65,+115,65],C_SIGNED_CHAR)),pReal)
|
||||||
if (dNeq(f(1),20191102.0_pReal,0.0_pReal)) error stop 'prec_bytesToC_DOUBLE'
|
if (dNeq(f(1),20191102.0_pReal,0.0_pReal)) error stop 'prec_bytesToC_DOUBLE'
|
||||||
|
|
||||||
i = int(prec_bytesToC_INT32_T(int([+126,+23,+52,+1],C_SIGNED_CHAR)),pInt)
|
i = int(prec_bytesToC_INT32_T(int([+126,+23,+52,+1],C_SIGNED_CHAR)),pI64)
|
||||||
if (i(1) /= 20191102_pInt) error stop 'prec_bytesToC_INT32_T'
|
if (i(1) /= 20191102_pI64) error stop 'prec_bytesToC_INT32_T'
|
||||||
|
|
||||||
i = int(prec_bytesToC_INT64_T(int([+126,+23,+52,+1,0,0,0,0],C_SIGNED_CHAR)),pInt)
|
i = int(prec_bytesToC_INT64_T(int([+126,+23,+52,+1,0,0,0,0],C_SIGNED_CHAR)),pI64)
|
||||||
if (i(1) /= 20191102_pInt) error stop 'prec_bytesToC_INT64_T'
|
if (i(1) /= 20191102_pI64) error stop 'prec_bytesToC_INT64_T'
|
||||||
|
|
||||||
end subroutine selfTest
|
end subroutine selfTest
|
||||||
|
|
||||||
|
|
28
src/quit.f90
28
src/quit.f90
|
@ -15,20 +15,23 @@ subroutine quit(stop_id)
|
||||||
implicit none
|
implicit none
|
||||||
integer, intent(in) :: stop_id
|
integer, intent(in) :: stop_id
|
||||||
integer, dimension(8) :: dateAndTime
|
integer, dimension(8) :: dateAndTime
|
||||||
integer :: error
|
integer :: err_HDF5
|
||||||
PetscErrorCode :: ierr = 0
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
call h5open_f(error)
|
call h5open_f(err_HDF5)
|
||||||
if (error /= 0) write(6,'(a,i5)') ' Error in h5open_f ',error ! prevents error if not opened yet
|
if (err_HDF5 /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in h5open_f ',err_HDF5 ! prevents error if not opened yet
|
||||||
call h5close_f(error)
|
call h5close_f(err_HDF5)
|
||||||
if (error /= 0) write(6,'(a,i5)') ' Error in h5close_f ',error
|
if (err_HDF5 /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in h5close_f ',err_HDF5
|
||||||
|
|
||||||
call PetscFinalize(ierr)
|
call PetscFinalize(err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
#ifdef _OPENMP
|
#ifdef _OPENMP
|
||||||
call MPI_finalize(error)
|
call MPI_finalize(err_MPI)
|
||||||
if (error /= 0) write(6,'(a,i5)') ' Error in MPI_finalize',error
|
if (err_MPI /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in MPI_finalize',err_MPI
|
||||||
|
#else
|
||||||
|
err_MPI = 0_MPI_INTEGER_KIND
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
call date_and_time(values = dateAndTime)
|
call date_and_time(values = dateAndTime)
|
||||||
|
@ -40,7 +43,10 @@ subroutine quit(stop_id)
|
||||||
dateAndTime(6),':',&
|
dateAndTime(6),':',&
|
||||||
dateAndTime(7)
|
dateAndTime(7)
|
||||||
|
|
||||||
if (stop_id == 0 .and. ierr == 0 .and. error == 0) stop 0 ! normal termination
|
if (stop_id == 0 .and. &
|
||||||
|
err_HDF5 == 0 .and. &
|
||||||
|
err_MPI == 0_MPI_INTEGER_KIND .and. &
|
||||||
|
err_PETSC == 0) stop 0 ! normal termination
|
||||||
stop 1 ! error (message from IO_error)
|
stop 1 ! error (message from IO_error)
|
||||||
|
|
||||||
end subroutine quit
|
end subroutine quit
|
||||||
|
|
|
@ -497,9 +497,9 @@ subroutine results_mapping_phase(ID,entry,label)
|
||||||
integer, dimension(:,:), intent(in) :: entry !< phase entry at (co,ce)
|
integer, dimension(:,:), intent(in) :: entry !< phase entry at (co,ce)
|
||||||
character(len=*), dimension(:), intent(in) :: label !< label of each phase section
|
character(len=*), dimension(:), intent(in) :: label !< label of each phase section
|
||||||
|
|
||||||
integer, dimension(size(entry,1),size(entry,2)) :: &
|
integer(pI64), dimension(size(entry,1),size(entry,2)) :: &
|
||||||
entryGlobal
|
entryGlobal
|
||||||
integer, dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
||||||
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
||||||
integer(HSIZE_T), dimension(2) :: &
|
integer(HSIZE_T), dimension(2) :: &
|
||||||
myShape, & !< shape of the dataset (this process)
|
myShape, & !< shape of the dataset (this process)
|
||||||
|
@ -507,6 +507,7 @@ subroutine results_mapping_phase(ID,entry,label)
|
||||||
totalShape !< shape of the dataset (all processes)
|
totalShape !< shape of the dataset (all processes)
|
||||||
|
|
||||||
integer(HID_T) :: &
|
integer(HID_T) :: &
|
||||||
|
pI64_t, & !< HDF5 type for pI64 (8 bit integer)
|
||||||
loc_id, & !< identifier of group in file
|
loc_id, & !< identifier of group in file
|
||||||
dtype_id, & !< identifier of compound data type
|
dtype_id, & !< identifier of compound data type
|
||||||
label_id, & !< identifier of label (string) in compound data type
|
label_id, & !< identifier of label (string) in compound data type
|
||||||
|
@ -518,7 +519,8 @@ subroutine results_mapping_phase(ID,entry,label)
|
||||||
dt_id
|
dt_id
|
||||||
|
|
||||||
integer(SIZE_T) :: type_size_string, type_size_int
|
integer(SIZE_T) :: type_size_string, type_size_int
|
||||||
integer :: hdferr, ierr, ce, co
|
integer :: hdferr, ce, co
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
|
||||||
writeSize = 0
|
writeSize = 0
|
||||||
|
@ -528,28 +530,28 @@ subroutine results_mapping_phase(ID,entry,label)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
#ifndef PETSC
|
#ifndef PETSC
|
||||||
entryGlobal = entry -1 ! 0-based
|
entryGlobal = int(entry -1,pI64) ! 0-based
|
||||||
#else
|
#else
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! MPI settings and communication
|
! MPI settings and communication
|
||||||
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process
|
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process
|
||||||
if(ierr /= 0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
entryOffset = 0
|
entryOffset = 0_pI64
|
||||||
do co = 1, size(ID,1)
|
do co = 1, size(ID,1)
|
||||||
do ce = 1, size(ID,2)
|
do ce = 1, size(ID,2)
|
||||||
entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1
|
entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1_pI64
|
||||||
end do
|
end do
|
||||||
end do
|
end do
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process
|
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INTEGER8,MPI_SUM,MPI_COMM_WORLD,err_MPI)! get offset at each process
|
||||||
if(ierr /= 0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
|
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
|
||||||
do co = 1, size(ID,1)
|
do co = 1, size(ID,1)
|
||||||
do ce = 1, size(ID,2)
|
do ce = 1, size(ID,2)
|
||||||
entryGlobal(co,ce) = entry(co,ce) -1 + entryOffset(ID(co,ce),worldrank)
|
entryGlobal(co,ce) = int(entry(co,ce),pI64) -1_pI64 + entryOffset(ID(co,ce),worldrank)
|
||||||
end do
|
end do
|
||||||
end do
|
end do
|
||||||
#endif
|
#endif
|
||||||
|
@ -567,14 +569,15 @@ subroutine results_mapping_phase(ID,entry,label)
|
||||||
call h5tget_size_f(dt_id, type_size_string, hdferr)
|
call h5tget_size_f(dt_id, type_size_string, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, hdferr)
|
pI64_t = h5kind_to_type(kind(entryGlobal),H5_INTEGER_KIND)
|
||||||
|
call h5tget_size_f(pI64_t, type_size_int, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
|
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr)
|
call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call h5tinsert_f(dtype_id, 'entry', type_size_string, H5T_NATIVE_INTEGER, hdferr)
|
call h5tinsert_f(dtype_id, 'entry', type_size_string, pI64_t, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -586,7 +589,7 @@ subroutine results_mapping_phase(ID,entry,label)
|
||||||
|
|
||||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr)
|
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, pI64_t, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call h5tclose_f(dt_id, hdferr)
|
call h5tclose_f(dt_id, hdferr)
|
||||||
|
@ -650,9 +653,9 @@ subroutine results_mapping_homogenization(ID,entry,label)
|
||||||
integer, dimension(:), intent(in) :: entry !< homogenization entry at (ce)
|
integer, dimension(:), intent(in) :: entry !< homogenization entry at (ce)
|
||||||
character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section
|
character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section
|
||||||
|
|
||||||
integer, dimension(size(entry,1)) :: &
|
integer(pI64), dimension(size(entry,1)) :: &
|
||||||
entryGlobal
|
entryGlobal
|
||||||
integer, dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
||||||
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
||||||
integer(HSIZE_T), dimension(1) :: &
|
integer(HSIZE_T), dimension(1) :: &
|
||||||
myShape, & !< shape of the dataset (this process)
|
myShape, & !< shape of the dataset (this process)
|
||||||
|
@ -660,6 +663,7 @@ subroutine results_mapping_homogenization(ID,entry,label)
|
||||||
totalShape !< shape of the dataset (all processes)
|
totalShape !< shape of the dataset (all processes)
|
||||||
|
|
||||||
integer(HID_T) :: &
|
integer(HID_T) :: &
|
||||||
|
pI64_t, & !< HDF5 type for pI64 (8 bit integer)
|
||||||
loc_id, & !< identifier of group in file
|
loc_id, & !< identifier of group in file
|
||||||
dtype_id, & !< identifier of compound data type
|
dtype_id, & !< identifier of compound data type
|
||||||
label_id, & !< identifier of label (string) in compound data type
|
label_id, & !< identifier of label (string) in compound data type
|
||||||
|
@ -671,7 +675,8 @@ subroutine results_mapping_homogenization(ID,entry,label)
|
||||||
dt_id
|
dt_id
|
||||||
|
|
||||||
integer(SIZE_T) :: type_size_string, type_size_int
|
integer(SIZE_T) :: type_size_string, type_size_int
|
||||||
integer :: hdferr, ierr, ce
|
integer :: hdferr, ce
|
||||||
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
|
||||||
writeSize = 0
|
writeSize = 0
|
||||||
|
@ -681,25 +686,25 @@ subroutine results_mapping_homogenization(ID,entry,label)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
#ifndef PETSC
|
#ifndef PETSC
|
||||||
entryGlobal = entry -1 ! 0-based
|
entryGlobal = int(entry -1,pI64) ! 0-based
|
||||||
#else
|
#else
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! MPI settings and communication
|
! MPI settings and communication
|
||||||
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process
|
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process
|
||||||
if(ierr /= 0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
entryOffset = 0
|
entryOffset = 0_pI64
|
||||||
do ce = 1, size(ID,1)
|
do ce = 1, size(ID,1)
|
||||||
entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1
|
entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1_pI64
|
||||||
end do
|
end do
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process
|
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INTEGER8,MPI_SUM,MPI_COMM_WORLD,err_MPI)! get offset at each process
|
||||||
if(ierr /= 0) error stop 'MPI error'
|
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
|
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
|
||||||
do ce = 1, size(ID,1)
|
do ce = 1, size(ID,1)
|
||||||
entryGlobal(ce) = entry(ce) -1 + entryOffset(ID(ce),worldrank)
|
entryGlobal(ce) = int(entry(ce),pI64) -1_pI64 + entryOffset(ID(ce),worldrank)
|
||||||
end do
|
end do
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -716,14 +721,15 @@ subroutine results_mapping_homogenization(ID,entry,label)
|
||||||
call h5tget_size_f(dt_id, type_size_string, hdferr)
|
call h5tget_size_f(dt_id, type_size_string, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, hdferr)
|
pI64_t = h5kind_to_type(kind(entryGlobal),H5_INTEGER_KIND)
|
||||||
|
call h5tget_size_f(pI64_t, type_size_int, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
|
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr)
|
call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call h5tinsert_f(dtype_id, 'entry', type_size_string, H5T_NATIVE_INTEGER, hdferr)
|
call h5tinsert_f(dtype_id, 'entry', type_size_string, pI64_t, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -735,7 +741,7 @@ subroutine results_mapping_homogenization(ID,entry,label)
|
||||||
|
|
||||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr)
|
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
|
call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, pI64_t, hdferr)
|
||||||
if(hdferr < 0) error stop 'HDF5 error'
|
if(hdferr < 0) error stop 'HDF5 error'
|
||||||
|
|
||||||
call h5tclose_f(dt_id, hdferr)
|
call h5tclose_f(dt_id, hdferr)
|
||||||
|
|
|
@ -270,7 +270,7 @@ pure elemental subroutine standardize(self)
|
||||||
|
|
||||||
class(rotation), intent(inout) :: self
|
class(rotation), intent(inout) :: self
|
||||||
|
|
||||||
if (self%q(1) < 0.0_pReal) self%q = - self%q
|
if (sign(1.0_pReal,self%q(1)) < 0.0_pReal) self%q = - self%q
|
||||||
|
|
||||||
end subroutine standardize
|
end subroutine standardize
|
||||||
|
|
||||||
|
@ -450,7 +450,7 @@ pure function qu2om(qu) result(om)
|
||||||
om(3,2) = 2.0_pReal*(qu(4)*qu(3)+qu(1)*qu(2))
|
om(3,2) = 2.0_pReal*(qu(4)*qu(3)+qu(1)*qu(2))
|
||||||
om(1,3) = 2.0_pReal*(qu(2)*qu(4)+qu(1)*qu(3))
|
om(1,3) = 2.0_pReal*(qu(2)*qu(4)+qu(1)*qu(3))
|
||||||
|
|
||||||
if (P < 0.0_pReal) om = transpose(om)
|
if (sign(1.0_pReal,P) < 0.0_pReal) om = transpose(om)
|
||||||
|
|
||||||
end function qu2om
|
end function qu2om
|
||||||
|
|
||||||
|
@ -480,7 +480,7 @@ pure function qu2eu(qu) result(eu)
|
||||||
atan2( 2.0_pReal*chi, q03-q12 ), &
|
atan2( 2.0_pReal*chi, q03-q12 ), &
|
||||||
atan2(( P*qu(1)*qu(3)+qu(2)*qu(4))*chi, (-P*qu(1)*qu(2)+qu(3)*qu(4))*chi )]
|
atan2(( P*qu(1)*qu(3)+qu(2)*qu(4))*chi, (-P*qu(1)*qu(2)+qu(3)*qu(4))*chi )]
|
||||||
endif degenerated
|
endif degenerated
|
||||||
where(eu<0.0_pReal) eu = mod(eu+2.0_pReal*PI,[2.0_pReal*PI,PI,2.0_pReal*PI])
|
where(sign(1.0_pReal,eu)<0.0_pReal) eu = mod(eu+2.0_pReal*PI,[2.0_pReal*PI,PI,2.0_pReal*PI])
|
||||||
|
|
||||||
end function qu2eu
|
end function qu2eu
|
||||||
|
|
||||||
|
@ -602,7 +602,7 @@ pure function om2qu(om) result(qu)
|
||||||
qu = [ (om(2,1) - om(1,2)) /s,(om(1,3) + om(3,1)) / s,(om(2,3) + om(3,2)) / s,0.25_pReal * s]
|
qu = [ (om(2,1) - om(1,2)) /s,(om(1,3) + om(3,1)) / s,(om(2,3) + om(3,2)) / s,0.25_pReal * s]
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
if(qu(1)<0._pReal) qu =-1.0_pReal * qu
|
if(sign(1.0_pReal,qu(1))<0.0_pReal) qu =-1.0_pReal * qu
|
||||||
qu = qu*[1.0_pReal,P,P,P]
|
qu = qu*[1.0_pReal,P,P,P]
|
||||||
|
|
||||||
end function om2qu
|
end function om2qu
|
||||||
|
@ -628,7 +628,7 @@ pure function om2eu(om) result(eu)
|
||||||
eu = [atan2(om(1,2),om(1,1)), 0.5_pReal*PI*(1.0_pReal-om(3,3)),0.0_pReal ]
|
eu = [atan2(om(1,2),om(1,1)), 0.5_pReal*PI*(1.0_pReal-om(3,3)),0.0_pReal ]
|
||||||
end if
|
end if
|
||||||
where(abs(eu) < 1.e-8_pReal) eu = 0.0_pReal
|
where(abs(eu) < 1.e-8_pReal) eu = 0.0_pReal
|
||||||
where(eu<0.0_pReal) eu = mod(eu+2.0_pReal*PI,[2.0_pReal*PI,PI,2.0_pReal*PI])
|
where(sign(1.0_pReal,eu)<0.0_pReal) eu = mod(eu+2.0_pReal*PI,[2.0_pReal*PI,PI,2.0_pReal*PI])
|
||||||
|
|
||||||
end function om2eu
|
end function om2eu
|
||||||
|
|
||||||
|
@ -735,7 +735,7 @@ pure function eu2qu(eu) result(qu)
|
||||||
-P*sPhi*cos(ee(1)-ee(3)), &
|
-P*sPhi*cos(ee(1)-ee(3)), &
|
||||||
-P*sPhi*sin(ee(1)-ee(3)), &
|
-P*sPhi*sin(ee(1)-ee(3)), &
|
||||||
-P*cPhi*sin(ee(1)+ee(3))]
|
-P*cPhi*sin(ee(1)+ee(3))]
|
||||||
if(qu(1) < 0.0_pReal) qu = qu * (-1.0_pReal)
|
if(sign(1.0_pReal,qu(1)) < 0.0_pReal) qu = qu * (-1.0_pReal)
|
||||||
|
|
||||||
end function eu2qu
|
end function eu2qu
|
||||||
|
|
||||||
|
@ -792,7 +792,7 @@ pure function eu2ax(eu) result(ax)
|
||||||
else
|
else
|
||||||
ax(1:3) = -P/tau * [ t*cos(delta), t*sin(delta), sin(sigma) ] ! passive axis-angle pair so a minus sign in front
|
ax(1:3) = -P/tau * [ t*cos(delta), t*sin(delta), sin(sigma) ] ! passive axis-angle pair so a minus sign in front
|
||||||
ax(4) = alpha
|
ax(4) = alpha
|
||||||
if (alpha < 0.0_pReal) ax = -ax ! ensure alpha is positive
|
if (sign(1.0_pReal,alpha) < 0.0_pReal) ax = -ax ! ensure alpha is positive
|
||||||
end if
|
end if
|
||||||
|
|
||||||
end function eu2ax
|
end function eu2ax
|
||||||
|
|
Loading…
Reference in New Issue