Merge branch 'misc-improvements' into development
This commit is contained in:
commit
a38aca9351
|
@ -108,9 +108,9 @@ if (DAMASK_SOLVER STREQUAL "grid")
|
|||
project (damask-grid Fortran C)
|
||||
add_definitions (-DGrid)
|
||||
message ("Building Grid Solver\n")
|
||||
elseif (DAMASK_SOLVER STREQUAL "fem" OR DAMASK_SOLVER STREQUAL "mesh")
|
||||
elseif (DAMASK_SOLVER STREQUAL "mesh")
|
||||
project (damask-mesh Fortran C)
|
||||
add_definitions (-DFEM)
|
||||
add_definitions (-DMesh)
|
||||
message ("Building Mesh Solver\n")
|
||||
else ()
|
||||
message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined")
|
||||
|
|
6
Makefile
6
Makefile
|
@ -2,19 +2,23 @@ SHELL = /bin/sh
|
|||
########################################################################################
|
||||
# Makefile for the installation of DAMASK
|
||||
########################################################################################
|
||||
DAMASK_ROOT = $(shell python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
|
||||
DAMASK_ROOT = $(shell python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
|
||||
.PHONY: all
|
||||
all: grid mesh processing
|
||||
|
||||
.PHONY: grid
|
||||
grid: build/grid
|
||||
@(cd build/grid;make -j${DAMASK_NUM_THREADS} all install;)
|
||||
@rm -f ${DAMASK_ROOT}/bin/DAMASK_spectral > /dev/null || true
|
||||
@ln -s ${DAMASK_ROOT}/bin/DAMASK_grid ${DAMASK_ROOT}/bin/DAMASK_spectral || true
|
||||
.PHONY: spectral
|
||||
spectral: grid
|
||||
|
||||
.PHONY: mesh
|
||||
mesh: build/mesh
|
||||
@(cd build/mesh; make -j${DAMASK_NUM_THREADS} all install;)
|
||||
@rm -f ${DAMASK_ROOT}/bin/DAMASK_FEM > /dev/null || true
|
||||
@ln -s ${DAMASK_ROOT}/bin/DAMASK_mesh ${DAMASK_ROOT}/bin/DAMASK_FEM || true
|
||||
.PHONY: FEM
|
||||
FEM: mesh
|
||||
|
||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
|||
Subproject commit c595994cd8880acadf50b5dedb79156d04d35b91
|
||||
Subproject commit 038af521a1ef70ed77b132c426bc1a4880db01ef
|
|
@ -25,6 +25,9 @@ set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
|
|||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
|
||||
# preprocessor
|
||||
|
||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fPIC -fPIE")
|
||||
# position independent code
|
||||
|
||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
|
||||
# restrict line length to the standard 132 characters (lattice.f90 require more characters)
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
set CALLED=($_)
|
||||
set ENV_ROOT=`dirname $CALLED[2]`
|
||||
set DAMASK_ROOT=`python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $ENV_ROOT"/../"`
|
||||
set DAMASK_ROOT=`python3 -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $ENV_ROOT"/../"`
|
||||
|
||||
source $ENV_ROOT/CONFIG
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# usage: source DAMASK.sh
|
||||
|
||||
function canonicalPath {
|
||||
python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
||||
python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
||||
}
|
||||
|
||||
function blink {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# usage: source DAMASK.zsh
|
||||
|
||||
function canonicalPath {
|
||||
python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
||||
python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
||||
}
|
||||
|
||||
function blink {
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
hydrogenflux cahnhilliard
|
||||
initialHydrogenConc 0.0
|
||||
(output) hydrogenconc
|
|
@ -2,6 +2,8 @@ import multiprocessing
|
|||
import re
|
||||
import glob
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
import xml.dom.minidom
|
||||
from functools import partial
|
||||
|
||||
import h5py
|
||||
|
@ -65,6 +67,10 @@ class Result:
|
|||
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
||||
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
|
||||
|
||||
# faster, but does not work with (deprecated) DADF5_postResults
|
||||
#self.materialpoints = [m for m in f['inc0/materialpoint']]
|
||||
#self.constituents = [c for c in f['inc0/constituent']]
|
||||
|
||||
self.con_physics = []
|
||||
for c in self.constituents:
|
||||
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
|
||||
|
@ -80,7 +86,7 @@ class Result:
|
|||
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
|
||||
}
|
||||
|
||||
self.fname = fname
|
||||
self.fname = os.path.abspath(fname)
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
|
@ -1035,6 +1041,102 @@ class Result:
|
|||
pool.join()
|
||||
|
||||
|
||||
def write_XMDF(self):
|
||||
"""
|
||||
Write XDMF file to directly visualize data in DADF5 file.
|
||||
|
||||
This works only for scalar, 3-vector and 3x3-tensor data.
|
||||
Selection is not taken into account.
|
||||
"""
|
||||
if len(self.constituents) != 1 or not self.structured:
|
||||
raise NotImplementedError
|
||||
|
||||
xdmf=ET.Element('Xdmf')
|
||||
xdmf.attrib={'Version': '2.0',
|
||||
'xmlns:xi': 'http://www.w3.org/2001/XInclude'}
|
||||
|
||||
domain=ET.SubElement(xdmf, 'Domain')
|
||||
|
||||
collection = ET.SubElement(domain, 'Grid')
|
||||
collection.attrib={'GridType': 'Collection',
|
||||
'CollectionType': 'Temporal'}
|
||||
|
||||
time = ET.SubElement(collection, 'Time')
|
||||
time.attrib={'TimeType': 'List'}
|
||||
|
||||
time_data = ET.SubElement(time, 'DataItem')
|
||||
time_data.attrib={'Format': 'XML',
|
||||
'NumberType': 'Float',
|
||||
'Dimensions': '{}'.format(len(self.times))}
|
||||
time_data.text = ' '.join(map(str,self.times))
|
||||
|
||||
attributes = []
|
||||
data_items = []
|
||||
|
||||
for inc in self.increments:
|
||||
|
||||
grid=ET.SubElement(collection,'Grid')
|
||||
grid.attrib = {'GridType': 'Uniform',
|
||||
'Name': inc}
|
||||
|
||||
topology=ET.SubElement(grid, 'Topology')
|
||||
topology.attrib={'TopologyType': '3DCoRectMesh',
|
||||
'Dimensions': '{} {} {}'.format(*self.grid+1)}
|
||||
|
||||
geometry=ET.SubElement(grid, 'Geometry')
|
||||
geometry.attrib={'GeometryType':'Origin_DxDyDz'}
|
||||
|
||||
origin=ET.SubElement(geometry, 'DataItem')
|
||||
origin.attrib={'Format': 'XML',
|
||||
'NumberType': 'Float',
|
||||
'Dimensions': '3'}
|
||||
origin.text="{} {} {}".format(*self.origin)
|
||||
|
||||
delta=ET.SubElement(geometry, 'DataItem')
|
||||
delta.attrib={'Format': 'XML',
|
||||
'NumberType': 'Float',
|
||||
'Dimensions': '3'}
|
||||
delta.text="{} {} {}".format(*(self.size/self.grid))
|
||||
|
||||
|
||||
with h5py.File(self.fname,'r') as f:
|
||||
attributes.append(ET.SubElement(grid, 'Attribute'))
|
||||
attributes[-1].attrib={'Name': 'u',
|
||||
'Center': 'Node',
|
||||
'AttributeType': 'Vector'}
|
||||
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
|
||||
data_items[-1].attrib={'Format': 'HDF',
|
||||
'Precision': '8',
|
||||
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
|
||||
data_items[-1].text='{}:/{}/geometry/u_n'.format(os.path.split(self.fname)[1],inc)
|
||||
|
||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||
for oo in getattr(self,o):
|
||||
for pp in getattr(self,p):
|
||||
g = '/'.join([inc,o[:-1],oo,pp])
|
||||
for l in f[g]:
|
||||
name = '/'.join([g,l])
|
||||
shape = f[name].shape[1:]
|
||||
dtype = f[name].dtype
|
||||
prec = f[name].dtype.itemsize
|
||||
|
||||
if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue
|
||||
|
||||
attributes.append(ET.SubElement(grid, 'Attribute'))
|
||||
attributes[-1].attrib={'Name': '{}'.format(name.split('/',2)[2]),
|
||||
'Center': 'Cell',
|
||||
'AttributeType': 'Tensor'}
|
||||
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
|
||||
data_items[-1].attrib={'Format': 'HDF',
|
||||
'NumberType': 'Float',
|
||||
'Precision': '{}'.format(prec),
|
||||
'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))}
|
||||
data_items[-1].text='{}:{}'.format(os.path.split(self.fname)[1],name)
|
||||
|
||||
with open(os.path.splitext(self.fname)[0]+'.xdmf','w') as f:
|
||||
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
||||
|
||||
|
||||
def to_vtk(self,labels=[],mode='cell'):
|
||||
"""
|
||||
Export to vtk cell/point data.
|
||||
|
|
|
@ -238,12 +238,13 @@ def cell_coord0_gridSizeOrigin(coord0,ordered=True):
|
|||
start = origin + delta*.5
|
||||
end = origin - delta*.5 + size
|
||||
|
||||
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0])) and \
|
||||
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1])) and \
|
||||
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]))):
|
||||
atol = _np.max(size)*5e-2
|
||||
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0]),atol=atol) and \
|
||||
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1]),atol=atol) and \
|
||||
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]),atol=atol)):
|
||||
raise ValueError('Regular grid spacing violated.')
|
||||
|
||||
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin)):
|
||||
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin),atol=atol):
|
||||
raise ValueError('Input data is not ordered (x fast, z slow).')
|
||||
|
||||
return (grid,size,origin)
|
||||
|
@ -385,12 +386,13 @@ def node_coord0_gridSizeOrigin(coord0,ordered=True):
|
|||
if (grid+1).prod() != len(coord0):
|
||||
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
|
||||
|
||||
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1)) and \
|
||||
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1)) and \
|
||||
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1))):
|
||||
atol = _np.max(size)*5e-2
|
||||
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \
|
||||
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1),atol=atol) and \
|
||||
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1),atol=atol)):
|
||||
raise ValueError('Regular grid spacing violated.')
|
||||
|
||||
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin)):
|
||||
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin),atol=atol):
|
||||
raise ValueError('Input data is not ordered (x fast, z slow).')
|
||||
|
||||
return (grid,size,origin)
|
||||
|
|
|
@ -112,8 +112,8 @@ def execute(cmd,
|
|||
|
||||
"""
|
||||
initialPath = os.getcwd()
|
||||
os.chdir(wd)
|
||||
myEnv = os.environ if env is None else env
|
||||
os.chdir(wd)
|
||||
process = subprocess.Popen(shlex.split(cmd),
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.PIPE,
|
||||
|
@ -121,9 +121,9 @@ def execute(cmd,
|
|||
env = myEnv)
|
||||
out,error = [i for i in (process.communicate() if streamIn is None
|
||||
else process.communicate(streamIn.read().encode('utf-8')))]
|
||||
os.chdir(initialPath)
|
||||
out = out.decode('utf-8').replace('\x08','')
|
||||
error = error.decode('utf-8').replace('\x08','')
|
||||
os.chdir(initialPath)
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
|
||||
return out,error
|
||||
|
|
|
@ -17,10 +17,10 @@ if (PROJECT_NAME STREQUAL "damask-grid")
|
|||
file(GLOB grid-sources grid/*.f90)
|
||||
|
||||
if(NOT CMAKE_BUILD_TYPE STREQUAL "SYNTAXONLY")
|
||||
add_executable(DAMASK_spectral ${damask-sources} ${grid-sources})
|
||||
install (TARGETS DAMASK_spectral RUNTIME DESTINATION bin)
|
||||
add_executable(DAMASK_grid ${damask-sources} ${grid-sources})
|
||||
install (TARGETS DAMASK_grid RUNTIME DESTINATION bin)
|
||||
else()
|
||||
add_library(DAMASK_spectral OBJECT ${damask-sources} ${grid-sources})
|
||||
add_library(DAMASK_grid OBJECT ${damask-sources} ${grid-sources})
|
||||
exec_program (mktemp OUTPUT_VARIABLE nothing)
|
||||
exec_program (mktemp ARGS -d OUTPUT_VARIABLE black_hole)
|
||||
install (PROGRAMS ${nothing} DESTINATION ${black_hole})
|
||||
|
@ -30,7 +30,7 @@ elseif (PROJECT_NAME STREQUAL "damask-mesh")
|
|||
|
||||
file(GLOB mesh-sources mesh/*.f90)
|
||||
|
||||
add_executable(DAMASK_FEM ${damask-sources} ${mesh-sources})
|
||||
install (TARGETS DAMASK_FEM RUNTIME DESTINATION bin)
|
||||
add_executable(DAMASK_mesh ${damask-sources} ${mesh-sources})
|
||||
install (TARGETS DAMASK_mesh RUNTIME DESTINATION bin)
|
||||
|
||||
endif()
|
||||
|
|
|
@ -70,7 +70,7 @@ contains
|
|||
!> @brief call (thread safe) all module initializations
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine CPFEM_initAll(el,ip)
|
||||
|
||||
|
||||
integer(pInt), intent(in) :: el, & !< FE el number
|
||||
ip !< FE integration point number
|
||||
|
||||
|
@ -85,10 +85,10 @@ subroutine CPFEM_initAll(el,ip)
|
|||
call rotations_init
|
||||
call YAML_types_init
|
||||
call HDF5_utilities_init
|
||||
call results_init
|
||||
call results_init(.false.)
|
||||
call discretization_marc_init(ip, el)
|
||||
call lattice_init
|
||||
call material_init
|
||||
call material_init(.false.)
|
||||
call constitutive_init
|
||||
call crystallite_init
|
||||
call homogenization_init
|
||||
|
@ -134,7 +134,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
logical, intent(in) :: parallelExecution !< flag indicating parallel computation of requested IPs
|
||||
real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector
|
||||
real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE)
|
||||
|
||||
|
||||
real(pReal) J_inverse, & ! inverse of Jacobian
|
||||
rnd
|
||||
real(pReal), dimension (3,3) :: Kirchhoff, & ! Piola-Kirchhoff stress
|
||||
|
@ -142,16 +142,16 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
real(pReal), dimension (3,3,3,3) :: H_sym, &
|
||||
H, &
|
||||
jacobian3333 ! jacobian in Matrix notation
|
||||
|
||||
|
||||
integer(pInt) elCP, & ! crystal plasticity element number
|
||||
i, j, k, l, m, n, ph, homog, mySource
|
||||
logical updateJaco ! flag indicating if Jacobian has to be updated
|
||||
|
||||
|
||||
real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress in case of ping pong dummy cycle
|
||||
ODD_JACOBIAN = 1e50_pReal !< return value for jacobian in case of ping pong dummy cycle
|
||||
|
||||
|
||||
elCP = mesh_FEM2DAMASK_elem(elFE)
|
||||
|
||||
|
||||
if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt &
|
||||
.and. elCP == debug_e .and. ip == debug_i) then
|
||||
write(6,'(/,a)') '#############################################'
|
||||
|
@ -166,16 +166,16 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
write(6,'(a,/)') '# --- terminallyIll --- #'
|
||||
write(6,'(a,/)') '#############################################'; flush (6)
|
||||
endif
|
||||
|
||||
|
||||
if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0_pInt) &
|
||||
CPFEM_dcsde_knownGood = CPFEM_dcsde
|
||||
if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) &
|
||||
CPFEM_dcsde = CPFEM_dcsde_knownGood
|
||||
|
||||
|
||||
!*** age results
|
||||
if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) call CPFEM_forward
|
||||
|
||||
|
||||
|
||||
|
||||
!*** collection of FEM input with returning of randomize odd stress and jacobian
|
||||
!* If no parallel execution is required, there is no need to collect FEM input
|
||||
if (.not. parallelExecution) then
|
||||
|
@ -186,7 +186,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
end select chosenThermal1
|
||||
materialpoint_F0(1:3,1:3,ip,elCP) = ffn
|
||||
materialpoint_F(1:3,1:3,ip,elCP) = ffn1
|
||||
|
||||
|
||||
elseif (iand(mode, CPFEM_COLLECT) /= 0_pInt) then
|
||||
call random_number(rnd)
|
||||
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
|
||||
|
@ -201,11 +201,11 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
materialpoint_F(1:3,1:3,ip,elCP) = ffn1
|
||||
CPFEM_calc_done = .false.
|
||||
endif
|
||||
|
||||
|
||||
|
||||
|
||||
!*** calculation of stress and jacobian
|
||||
if (iand(mode, CPFEM_CALCRESULTS) /= 0_pInt) then
|
||||
|
||||
|
||||
!*** deformation gradient outdated or any actual deformation gradient differs more than relevantStrain from the stored one
|
||||
validCalculation: if (terminallyIll &
|
||||
.or. outdatedFFN1 &
|
||||
|
@ -223,7 +223,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
|
||||
CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd
|
||||
CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6)
|
||||
|
||||
|
||||
!*** deformation gradient is not outdated
|
||||
else validCalculation
|
||||
updateJaco = mod(cycleCounter,iJacoStiffness) == 0
|
||||
|
@ -234,7 +234,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
|
||||
write(6,'(a,i8,1x,i2)') '<< CPFEM >> calculation for elFE ip ',elFE,ip
|
||||
call materialpoint_stressAndItsTangent(updateJaco, dt)
|
||||
|
||||
|
||||
!* parallel computation and calulation not yet done
|
||||
elseif (.not. CPFEM_calc_done) then
|
||||
if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
|
||||
|
@ -243,22 +243,22 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
call materialpoint_stressAndItsTangent(updateJaco, dt)
|
||||
CPFEM_calc_done = .true.
|
||||
endif
|
||||
|
||||
|
||||
!* map stress and stiffness (or return odd values if terminally ill)
|
||||
terminalIllness: if (terminallyIll) then
|
||||
|
||||
|
||||
call random_number(rnd)
|
||||
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
|
||||
CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd
|
||||
CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6)
|
||||
|
||||
|
||||
else terminalIllness
|
||||
|
||||
|
||||
! translate from P to CS
|
||||
Kirchhoff = matmul(materialpoint_P(1:3,1:3,ip,elCP), transpose(materialpoint_F(1:3,1:3,ip,elCP)))
|
||||
J_inverse = 1.0_pReal / math_det33(materialpoint_F(1:3,1:3,ip,elCP))
|
||||
CPFEM_cs(1:6,ip,elCP) = math_sym33to6(J_inverse * Kirchhoff,weighted=.false.)
|
||||
|
||||
|
||||
! translate from dP/dF to dCS/dE
|
||||
H = 0.0_pReal
|
||||
do i=1,3; do j=1,3; do k=1,3; do l=1,3; do m=1,3; do n=1,3
|
||||
|
@ -269,15 +269,15 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
+ 0.5_pReal * ( Kirchhoff(j,l)*math_delta(i,k) + Kirchhoff(i,k)*math_delta(j,l) &
|
||||
+ Kirchhoff(j,k)*math_delta(i,l) + Kirchhoff(i,l)*math_delta(j,k))
|
||||
enddo; enddo; enddo; enddo; enddo; enddo
|
||||
|
||||
|
||||
forall(i=1:3, j=1:3,k=1:3,l=1:3) &
|
||||
H_sym(i,j,k,l) = 0.25_pReal * (H(i,j,k,l) + H(j,i,k,l) + H(i,j,l,k) + H(j,i,l,k))
|
||||
|
||||
|
||||
CPFEM_dcsde(1:6,1:6,ip,elCP) = math_sym3333to66(J_inverse * H_sym,weighted=.false.)
|
||||
|
||||
|
||||
endif terminalIllness
|
||||
endif validCalculation
|
||||
|
||||
|
||||
!* report stress and stiffness
|
||||
if ((iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
|
||||
.and. ((debug_e == elCP .and. debug_i == ip) &
|
||||
|
@ -288,17 +288,17 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
|
|||
'<< CPFEM >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(CPFEM_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal
|
||||
flush(6)
|
||||
endif
|
||||
|
||||
|
||||
endif
|
||||
|
||||
|
||||
!*** warn if stiffness close to zero
|
||||
if (all(abs(CPFEM_dcsdE(1:6,1:6,ip,elCP)) < 1e-10_pReal)) call IO_warning(601,elCP,ip)
|
||||
|
||||
|
||||
!*** copy to output if using commercial FEM solver
|
||||
cauchyStress = CPFEM_cs (1:6, ip,elCP)
|
||||
jacobian = CPFEM_dcsdE(1:6,1:6,ip,elCP)
|
||||
|
||||
|
||||
|
||||
|
||||
!*** remember extreme values of stress ...
|
||||
cauchyStress33 = math_6toSym33(CPFEM_cs(1:6,ip,elCP),weighted=.false.)
|
||||
if (maxval(cauchyStress33) > debug_stressMax) then
|
||||
|
|
|
@ -22,7 +22,7 @@ module CPFEM2
|
|||
use homogenization
|
||||
use constitutive
|
||||
use crystallite
|
||||
#if defined(FEM)
|
||||
#if defined(Mesh)
|
||||
use FEM_quadrature
|
||||
use discretization_mesh
|
||||
#elif defined(Grid)
|
||||
|
@ -43,7 +43,7 @@ subroutine CPFEM_initAll
|
|||
call DAMASK_interface_init ! Spectral and FEM interface to commandline
|
||||
call prec_init
|
||||
call IO_init
|
||||
#ifdef FEM
|
||||
#ifdef Mesh
|
||||
call FEM_quadrature_init
|
||||
#endif
|
||||
call numerics_init
|
||||
|
@ -54,13 +54,13 @@ subroutine CPFEM_initAll
|
|||
call YAML_types_init
|
||||
call lattice_init
|
||||
call HDF5_utilities_init
|
||||
call results_init
|
||||
#if defined(FEM)
|
||||
call discretization_mesh_init
|
||||
call results_init(restart=interface_restartInc>0)
|
||||
#if defined(Mesh)
|
||||
call discretization_mesh_init(restart=interface_restartInc>0)
|
||||
#elif defined(Grid)
|
||||
call discretization_grid_init
|
||||
call discretization_grid_init(restart=interface_restartInc>0)
|
||||
#endif
|
||||
call material_init
|
||||
call material_init(restart=interface_restartInc>0)
|
||||
call constitutive_init
|
||||
call crystallite_init
|
||||
call homogenization_init
|
||||
|
|
|
@ -206,7 +206,7 @@ subroutine DAMASK_interface_init
|
|||
write(6,'(a,/)')' Valid command line switches:'
|
||||
write(6,'(a)') ' --geom (-g, --geometry)'
|
||||
write(6,'(a)') ' --load (-l, --loadcase)'
|
||||
write(6,'(a)') ' --workingdir (-w, --wd, --workingdirectory, -d, --directory)'
|
||||
write(6,'(a)') ' --workingdir (-w, --wd, --workingdirectory)'
|
||||
write(6,'(a)') ' --restart (-r, --rs)'
|
||||
write(6,'(a)') ' --help (-h)'
|
||||
write(6,'(/,a)')' -----------------------------------------------------------------------'
|
||||
|
@ -223,12 +223,12 @@ subroutine DAMASK_interface_init
|
|||
write(6,'(a)') ' directory.'
|
||||
write(6,'(a)') ' For further configuration place "numerics.config"'
|
||||
write(6,'(a)')' and "debug.config" in that directory.'
|
||||
write(6,'(/,a)')' --restart XX'
|
||||
write(6,'(a)') ' Reads in increment XX and continues with calculating'
|
||||
write(6,'(a)') ' increment XX+1 based on this.'
|
||||
write(6,'(/,a)')' --restart N'
|
||||
write(6,'(a)') ' Reads in increment N and continues with calculating'
|
||||
write(6,'(a)') ' increment N+1 based on this.'
|
||||
write(6,'(a)') ' Appends to existing results file'
|
||||
write(6,'(a)') ' "NameOfGeom_NameOfLoadFile".'
|
||||
write(6,'(a)') ' Works only if the restart information for increment XX'
|
||||
write(6,'(a)') ' "NameOfGeom_NameOfLoadFile.hdf5".'
|
||||
write(6,'(a)') ' Works only if the restart information for increment N'
|
||||
write(6,'(a)') ' is available in the working directory.'
|
||||
write(6,'(/,a)')' -----------------------------------------------------------------------'
|
||||
write(6,'(a)') ' Help:'
|
||||
|
@ -239,7 +239,7 @@ subroutine DAMASK_interface_init
|
|||
call get_command_argument(i+1,loadCaseArg)
|
||||
case ('-g', '--geom', '--geometry')
|
||||
call get_command_argument(i+1,geometryArg)
|
||||
case ('-w', '-d', '--wd', '--directory', '--workingdir', '--workingdirectory')
|
||||
case ('-w', '--wd', '--workingdir', '--workingdirectory')
|
||||
call get_command_argument(i+1,workingDirArg)
|
||||
case ('-r', '--rs', '--restart')
|
||||
call get_command_argument(i+1,arg)
|
||||
|
|
|
@ -81,7 +81,7 @@ contains
|
|||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief open libary and do sanity checks
|
||||
!> @brief initialize HDF5 libary and do sanity checks
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_utilities_init
|
||||
|
||||
|
|
|
@ -160,7 +160,7 @@ module subroutine plastic_phenopowerlaw_init
|
|||
config%getFloats('interaction_twintwin'), &
|
||||
config%getString('lattice_structure'))
|
||||
prm%gamma_twin_char = lattice_characteristicShear_twin(N_tw,config%getString('lattice_structure'),&
|
||||
config%getFloat('c/a'))
|
||||
config%getFloat('c/a',defaultVal=0.0_pReal))
|
||||
|
||||
xi_twin_0 = config%getFloats('tau0_twin',requiredSize=size(N_tw))
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
!> @details doing cutbacking, forwarding in case of restart, reporting statistics, writing
|
||||
!> results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
program DAMASK_spectral
|
||||
program DAMASK_grid
|
||||
#include <petsc/finclude/petscsys.h>
|
||||
use PETScsys
|
||||
use prec
|
||||
|
@ -495,4 +495,4 @@ program DAMASK_spectral
|
|||
|
||||
call quit(0) ! no complains ;)
|
||||
|
||||
end program DAMASK_spectral
|
||||
end program DAMASK_grid
|
||||
|
|
|
@ -42,7 +42,9 @@ contains
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief reads the geometry file to obtain information on discretization
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine discretization_grid_init
|
||||
subroutine discretization_grid_init(restart)
|
||||
|
||||
logical, intent(in) :: restart
|
||||
|
||||
include 'fftw3-mpi.f03'
|
||||
real(pReal), dimension(3) :: &
|
||||
|
@ -100,13 +102,14 @@ subroutine discretization_grid_init
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! store geometry information for post processing
|
||||
call results_openJobFile
|
||||
call results_closeGroup(results_addGroup('geometry'))
|
||||
call results_addAttribute('grid', grid, 'geometry')
|
||||
call results_addAttribute('size', geomSize,'geometry')
|
||||
call results_addAttribute('origin',origin, 'geometry')
|
||||
call results_closeJobFile
|
||||
|
||||
if(.not. restart) then
|
||||
call results_openJobFile
|
||||
call results_closeGroup(results_addGroup('geometry'))
|
||||
call results_addAttribute('grid', grid, 'geometry')
|
||||
call results_addAttribute('size', geomSize,'geometry')
|
||||
call results_addAttribute('origin',origin, 'geometry')
|
||||
call results_closeJobFile
|
||||
endif
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! geometry information required by the nonlocal CP model
|
||||
call geometry_plastic_nonlocal_setIPvolume(reshape([(product(mySize/real(myGrid,pReal)),j=1,product(myGrid))], &
|
||||
|
|
|
@ -2304,7 +2304,6 @@ subroutine unitTest
|
|||
|
||||
system = reshape([1.0_pReal+r(1),0.0_pReal,0.0_pReal, 0.0_pReal,1.0_pReal+r(2),0.0_pReal],[6,1])
|
||||
CoSy = buildCoordinateSystem([1],[1],system,'fcc',0.0_pReal)
|
||||
|
||||
if(any(dNeq(CoSy(1:3,1:3,1),math_I3))) &
|
||||
call IO_error(0)
|
||||
|
||||
|
|
|
@ -207,7 +207,9 @@ contains
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief parses material configuration file
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine material_init
|
||||
subroutine material_init(restart)
|
||||
|
||||
logical, intent(in) :: restart
|
||||
|
||||
integer :: i,e,m,c,h, myDebug, myPhase, myHomog, myMicro
|
||||
integer, dimension(:), allocatable :: &
|
||||
|
@ -339,11 +341,12 @@ subroutine material_init
|
|||
call config_deallocate('material.config/microstructure')
|
||||
call config_deallocate('material.config/texture')
|
||||
|
||||
call results_openJobFile
|
||||
call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase)
|
||||
call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization)
|
||||
call results_closeJobFile
|
||||
|
||||
if (.not. restart) then
|
||||
call results_openJobFile
|
||||
call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase)
|
||||
call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization)
|
||||
call results_closeJobFile
|
||||
endif
|
||||
|
||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
! BEGIN DEPRECATED
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
!> @details doing cutbacking, reporting statistics, writing
|
||||
!> results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
program DAMASK_FEM
|
||||
program DAMASK_mesh
|
||||
#include <petsc/finclude/petscsys.h>
|
||||
use PetscDM
|
||||
use prec
|
||||
|
@ -367,4 +367,4 @@ program DAMASK_FEM
|
|||
|
||||
call quit(0) ! no complains ;)
|
||||
|
||||
end program DAMASK_FEM
|
||||
end program DAMASK_mesh
|
||||
|
|
|
@ -63,7 +63,9 @@ contains
|
|||
!> @brief initializes the mesh by calling all necessary private routines the mesh module
|
||||
!! Order and routines strongly depend on type of solver
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine discretization_mesh_init
|
||||
subroutine discretization_mesh_init(restart)
|
||||
|
||||
logical, intent(in) :: restart
|
||||
|
||||
integer, dimension(1), parameter:: FE_geomtype = [1] !< geometry type of particular element type
|
||||
integer, dimension(1) :: FE_Nips !< number of IPs in a specific type of element
|
||||
|
|
|
@ -63,8 +63,8 @@ module numerics
|
|||
#endif
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! FEM parameters:
|
||||
#ifdef FEM
|
||||
! Mesh parameters:
|
||||
#ifdef Mesh
|
||||
integer, protected, public :: &
|
||||
integrationOrder = 2, & !< order of quadrature rule required
|
||||
structOrder = 2 !< order of displacement shape functions
|
||||
|
@ -200,8 +200,8 @@ subroutine numerics_init
|
|||
#endif
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! FEM parameters
|
||||
#ifdef FEM
|
||||
! Mesh parameters
|
||||
#ifdef Mesh
|
||||
case ('integrationorder')
|
||||
integrationorder = IO_intValue(line,chunkPos,2)
|
||||
case ('structorder')
|
||||
|
@ -267,7 +267,7 @@ subroutine numerics_init
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! spectral parameters
|
||||
#ifdef FEM
|
||||
#ifdef Mesh
|
||||
write(6,'(a24,1x,i8)') ' integrationOrder: ',integrationOrder
|
||||
write(6,'(a24,1x,i8)') ' structOrder: ',structOrder
|
||||
write(6,'(a24,1x,L8)') ' B-Bar stabilisation: ',BBarStabilisation
|
||||
|
|
174
src/results.f90
174
src/results.f90
|
@ -15,31 +15,31 @@ module results
|
|||
|
||||
implicit none
|
||||
private
|
||||
|
||||
|
||||
integer(HID_T) :: resultsFile
|
||||
|
||||
interface results_writeDataset
|
||||
|
||||
|
||||
module procedure results_writeTensorDataset_real
|
||||
module procedure results_writeVectorDataset_real
|
||||
module procedure results_writeScalarDataset_real
|
||||
|
||||
|
||||
module procedure results_writeTensorDataset_int
|
||||
module procedure results_writeVectorDataset_int
|
||||
|
||||
|
||||
module procedure results_writeScalarDataset_rotation
|
||||
|
||||
|
||||
end interface results_writeDataset
|
||||
|
||||
|
||||
interface results_addAttribute
|
||||
|
||||
|
||||
module procedure results_addAttribute_real
|
||||
module procedure results_addAttribute_int
|
||||
module procedure results_addAttribute_str
|
||||
|
||||
|
||||
module procedure results_addAttribute_int_array
|
||||
module procedure results_addAttribute_real_array
|
||||
|
||||
|
||||
end interface results_addAttribute
|
||||
|
||||
public :: &
|
||||
|
@ -59,7 +59,9 @@ module results
|
|||
results_mapping_materialpoint
|
||||
contains
|
||||
|
||||
subroutine results_init
|
||||
subroutine results_init(restart)
|
||||
|
||||
logical, intent(in) :: restart
|
||||
|
||||
character(len=pStringLen) :: commandLine
|
||||
|
||||
|
@ -68,15 +70,17 @@ subroutine results_init
|
|||
write(6,'(/,a)') ' Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):83–91, 2017'
|
||||
write(6,'(a)') ' https://doi.org/10.1007/s40192-017-0084-5'
|
||||
|
||||
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
|
||||
call results_addAttribute('DADF5_version_major',0)
|
||||
call results_addAttribute('DADF5_version_minor',6)
|
||||
call results_addAttribute('DAMASK_version',DAMASKVERSION)
|
||||
call get_command(commandLine)
|
||||
call results_addAttribute('call',trim(commandLine))
|
||||
call results_closeGroup(results_addGroup('mapping'))
|
||||
call results_closeGroup(results_addGroup('mapping/cellResults'))
|
||||
call results_closeJobFile
|
||||
if(.not. restart) then
|
||||
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
|
||||
call results_addAttribute('DADF5_version_major',0)
|
||||
call results_addAttribute('DADF5_version_minor',6)
|
||||
call results_addAttribute('DAMASK_version',DAMASKVERSION)
|
||||
call get_command(commandLine)
|
||||
call results_addAttribute('call',trim(commandLine))
|
||||
call results_closeGroup(results_addGroup('mapping'))
|
||||
call results_closeGroup(results_addGroup('mapping/cellResults'))
|
||||
call results_closeJobFile
|
||||
endif
|
||||
|
||||
end subroutine results_init
|
||||
|
||||
|
@ -87,7 +91,7 @@ end subroutine results_init
|
|||
subroutine results_openJobFile
|
||||
|
||||
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','a',.true.)
|
||||
|
||||
|
||||
end subroutine results_openJobFile
|
||||
|
||||
|
||||
|
@ -105,7 +109,7 @@ end subroutine results_closeJobFile
|
|||
!> @brief creates the group of increment and adds time as attribute to the file
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine results_addIncrement(inc,time)
|
||||
|
||||
|
||||
integer, intent(in) :: inc
|
||||
real(pReal), intent(in) :: time
|
||||
character(len=pStringLen) :: incChar
|
||||
|
@ -137,7 +141,7 @@ end subroutine results_finalizeIncrement
|
|||
integer(HID_T) function results_openGroup(groupName)
|
||||
|
||||
character(len=*), intent(in) :: groupName
|
||||
|
||||
|
||||
results_openGroup = HDF5_openGroup(resultsFile,groupName)
|
||||
|
||||
end function results_openGroup
|
||||
|
@ -149,7 +153,7 @@ end function results_openGroup
|
|||
integer(HID_T) function results_addGroup(groupName)
|
||||
|
||||
character(len=*), intent(in) :: groupName
|
||||
|
||||
|
||||
results_addGroup = HDF5_addGroup(resultsFile,groupName)
|
||||
|
||||
end function results_addGroup
|
||||
|
@ -161,7 +165,7 @@ end function results_addGroup
|
|||
subroutine results_closeGroup(group_id)
|
||||
|
||||
integer(HID_T), intent(in) :: group_id
|
||||
|
||||
|
||||
call HDF5_closeGroup(group_id)
|
||||
|
||||
end subroutine results_closeGroup
|
||||
|
@ -290,17 +294,17 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
|
|||
character(len=*), intent(in) :: label,group,description
|
||||
character(len=*), intent(in), optional :: SIunit
|
||||
real(pReal), intent(inout), dimension(:) :: dataset
|
||||
|
||||
|
||||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
|
||||
|
||||
#ifdef PETSc
|
||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
||||
#else
|
||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
||||
#endif
|
||||
|
||||
|
||||
if (HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||
|
@ -319,17 +323,17 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
|
|||
character(len=*), intent(in) :: label,group,description
|
||||
character(len=*), intent(in), optional :: SIunit
|
||||
real(pReal), intent(inout), dimension(:,:) :: dataset
|
||||
|
||||
|
||||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
|
||||
|
||||
#ifdef PETSc
|
||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
||||
#else
|
||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
||||
#endif
|
||||
|
||||
|
||||
if (HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||
|
@ -350,13 +354,13 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
|
|||
character(len=*), intent(in), optional :: SIunit
|
||||
logical, intent(in), optional :: transposed
|
||||
real(pReal), intent(in), dimension(:,:,:) :: dataset
|
||||
|
||||
integer :: i
|
||||
|
||||
integer :: i
|
||||
logical :: transposed_
|
||||
integer(HID_T) :: groupHandle
|
||||
real(pReal), dimension(:,:,:), allocatable :: dataset_transposed
|
||||
|
||||
|
||||
|
||||
if(present(transposed)) then
|
||||
transposed_ = transposed
|
||||
else
|
||||
|
@ -374,13 +378,13 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
|
|||
endif
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
|
||||
|
||||
#ifdef PETSc
|
||||
call HDF5_write(groupHandle,dataset_transposed,label,.true.)
|
||||
#else
|
||||
call HDF5_write(groupHandle,dataset_transposed,label,.false.)
|
||||
#endif
|
||||
|
||||
|
||||
if (HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||
|
@ -400,17 +404,17 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
|
|||
character(len=*), intent(in) :: label,group,description
|
||||
character(len=*), intent(in), optional :: SIunit
|
||||
integer, intent(inout), dimension(:,:) :: dataset
|
||||
|
||||
|
||||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
|
||||
|
||||
#ifdef PETSc
|
||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
||||
#else
|
||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
||||
#endif
|
||||
|
||||
|
||||
if (HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||
|
@ -430,17 +434,17 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
|
|||
character(len=*), intent(in) :: label,group,description
|
||||
character(len=*), intent(in), optional :: SIunit
|
||||
integer, intent(inout), dimension(:,:,:) :: dataset
|
||||
|
||||
|
||||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
|
||||
|
||||
#ifdef PETSc
|
||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
||||
#else
|
||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
||||
#endif
|
||||
|
||||
|
||||
if (HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||
|
@ -460,17 +464,17 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l
|
|||
character(len=*), intent(in) :: label,group,description
|
||||
character(len=*), intent(in), optional :: lattice_structure
|
||||
type(rotation), intent(inout), dimension(:) :: dataset
|
||||
|
||||
|
||||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
|
||||
|
||||
#ifdef PETSc
|
||||
call HDF5_write(groupHandle,dataset,label,.true.)
|
||||
#else
|
||||
call HDF5_write(groupHandle,dataset,label,.false.)
|
||||
#endif
|
||||
|
||||
|
||||
if (HDF5_objectExists(groupHandle,label)) &
|
||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||
if (HDF5_objectExists(groupHandle,label) .and. present(lattice_structure)) &
|
||||
|
@ -486,11 +490,11 @@ end subroutine results_writeScalarDataset_rotation
|
|||
!> @brief adds the unique mapping from spatial position and constituent ID to results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
|
||||
|
||||
|
||||
integer, dimension(:,:), intent(in) :: phaseAt !< phase section at (constituent,element)
|
||||
integer, dimension(:,:,:), intent(in) :: memberAtLocal !< phase member at (constituent,IP,element)
|
||||
character(len=pStringLen), dimension(:), intent(in) :: label !< label of each phase section
|
||||
|
||||
|
||||
integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2),size(memberAtLocal,3)) :: &
|
||||
phaseAtMaterialpoint, &
|
||||
memberAtGlobal
|
||||
|
@ -500,7 +504,7 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
|
|||
myShape, & !< shape of the dataset (this process)
|
||||
myOffset, &
|
||||
totalShape !< shape of the dataset (all processes)
|
||||
|
||||
|
||||
integer(HID_T) :: &
|
||||
loc_id, & !< identifier of group in file
|
||||
dtype_id, & !< identifier of compound data type
|
||||
|
@ -512,30 +516,30 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
|
|||
plist_id, &
|
||||
dt_id
|
||||
|
||||
|
||||
|
||||
integer(SIZE_T) :: type_size_string, type_size_int
|
||||
integer :: ierr, i
|
||||
|
||||
|
||||
!---------------------------------------------------------------------------------------------------
|
||||
! compound type: name of phase section + position/index within results array
|
||||
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr)
|
||||
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr)
|
||||
call h5tget_size_f(dt_id, type_size_string, ierr)
|
||||
|
||||
|
||||
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr)
|
||||
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr)
|
||||
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr)
|
||||
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr)
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create memory types for each component of the compound type
|
||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr)
|
||||
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr)
|
||||
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr)
|
||||
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr)
|
||||
|
||||
|
||||
call h5tclose_f(dt_id, ierr)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -553,10 +557,10 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
|
|||
#ifdef PETSc
|
||||
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5pset_dxpl_mpio_f')
|
||||
|
||||
|
||||
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
|
||||
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/writeSize')
|
||||
|
||||
|
||||
call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
|
||||
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/memberOffset')
|
||||
#endif
|
||||
|
@ -564,15 +568,15 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
|
|||
myShape = int([size(phaseAt,1),writeSize(worldrank)], HSIZE_T)
|
||||
myOffset = int([0,sum(writeSize(0:worldrank-1))], HSIZE_T)
|
||||
totalShape = int([size(phaseAt,1),sum(writeSize)], HSIZE_T)
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create dataspace in memory (local shape = hyperslab) and in file (global shape)
|
||||
call h5screate_simple_f(2,myShape,memspace_id,ierr,myShape)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/memspace_id')
|
||||
|
||||
|
||||
call h5screate_simple_f(2,totalShape,filespace_id,ierr,totalShape)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/filespace_id')
|
||||
|
||||
|
||||
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5sselect_hyperslab_f')
|
||||
|
||||
|
@ -581,7 +585,7 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
|
|||
do i = 1, size(phaseAtMaterialpoint,2)
|
||||
phaseAtMaterialpoint(:,i,:) = phaseAt
|
||||
enddo
|
||||
|
||||
|
||||
!---------------------------------------------------------------------------------------------------
|
||||
! renumber member from my process to all processes
|
||||
do i = 1, size(label)
|
||||
|
@ -591,11 +595,11 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! write the components of the compound type individually
|
||||
call h5pset_preserve_f(plist_id, .TRUE., ierr)
|
||||
|
||||
|
||||
loc_id = results_openGroup('/mapping/cellResults')
|
||||
call h5dcreate_f(loc_id, 'constituent', dtype_id, filespace_id, dset_id, ierr)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dcreate_f')
|
||||
|
||||
|
||||
call h5dwrite_f(dset_id, name_id, reshape(label(pack(phaseAtMaterialpoint,.true.)),myShape), &
|
||||
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dwrite_f/name_id')
|
||||
|
@ -621,11 +625,11 @@ end subroutine results_mapping_constituent
|
|||
!> @brief adds the unique mapping from spatial position and constituent ID to results
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
||||
|
||||
|
||||
integer, dimension(:), intent(in) :: homogenizationAt !< homogenization section at (element)
|
||||
integer, dimension(:,:), intent(in) :: memberAtLocal !< homogenization member at (IP,element)
|
||||
character(len=pStringLen), dimension(:), intent(in) :: label !< label of each homogenization section
|
||||
|
||||
|
||||
integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2)) :: &
|
||||
homogenizationAtMaterialpoint, &
|
||||
memberAtGlobal
|
||||
|
@ -635,7 +639,7 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
|||
myShape, & !< shape of the dataset (this process)
|
||||
myOffset, &
|
||||
totalShape !< shape of the dataset (all processes)
|
||||
|
||||
|
||||
integer(HID_T) :: &
|
||||
loc_id, & !< identifier of group in file
|
||||
dtype_id, & !< identifier of compound data type
|
||||
|
@ -647,30 +651,30 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
|||
plist_id, &
|
||||
dt_id
|
||||
|
||||
|
||||
|
||||
integer(SIZE_T) :: type_size_string, type_size_int
|
||||
integer :: ierr, i
|
||||
|
||||
|
||||
!---------------------------------------------------------------------------------------------------
|
||||
! compound type: name of phase section + position/index within results array
|
||||
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr)
|
||||
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr)
|
||||
call h5tget_size_f(dt_id, type_size_string, ierr)
|
||||
|
||||
|
||||
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr)
|
||||
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr)
|
||||
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr)
|
||||
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr)
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create memory types for each component of the compound type
|
||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr)
|
||||
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr)
|
||||
|
||||
|
||||
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr)
|
||||
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr)
|
||||
|
||||
|
||||
call h5tclose_f(dt_id, ierr)
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -688,10 +692,10 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
|||
#ifdef PETSc
|
||||
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5pset_dxpl_mpio_f')
|
||||
|
||||
|
||||
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
|
||||
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_materialpoint: MPI_allreduce/writeSize')
|
||||
|
||||
|
||||
call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
|
||||
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_materialpoint: MPI_allreduce/memberOffset')
|
||||
#endif
|
||||
|
@ -699,15 +703,15 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
|||
myShape = int([writeSize(worldrank)], HSIZE_T)
|
||||
myOffset = int([sum(writeSize(0:worldrank-1))], HSIZE_T)
|
||||
totalShape = int([sum(writeSize)], HSIZE_T)
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! create dataspace in memory (local shape = hyperslab) and in file (global shape)
|
||||
call h5screate_simple_f(1,myShape,memspace_id,ierr,myShape)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5screate_simple_f/memspace_id')
|
||||
|
||||
|
||||
call h5screate_simple_f(1,totalShape,filespace_id,ierr,totalShape)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5screate_simple_f/filespace_id')
|
||||
|
||||
|
||||
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5sselect_hyperslab_f')
|
||||
|
||||
|
@ -716,7 +720,7 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
|||
do i = 1, size(homogenizationAtMaterialpoint,1)
|
||||
homogenizationAtMaterialpoint(i,:) = homogenizationAt
|
||||
enddo
|
||||
|
||||
|
||||
!---------------------------------------------------------------------------------------------------
|
||||
! renumber member from my process to all processes
|
||||
do i = 1, size(label)
|
||||
|
@ -726,11 +730,11 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
! write the components of the compound type individually
|
||||
call h5pset_preserve_f(plist_id, .TRUE., ierr)
|
||||
|
||||
|
||||
loc_id = results_openGroup('/mapping/cellResults')
|
||||
call h5dcreate_f(loc_id, 'materialpoint', dtype_id, filespace_id, dset_id, ierr)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5dcreate_f')
|
||||
|
||||
|
||||
call h5dwrite_f(dset_id, name_id, reshape(label(pack(homogenizationAtMaterialpoint,.true.)),myShape), &
|
||||
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
|
||||
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5dwrite_f/name_id')
|
||||
|
|
Loading…
Reference in New Issue