Merge branch 'Marc-No-nonlocal' into 'development'

Marc no nonlocal

See merge request damask/DAMASK!98
This commit is contained in:
Franz Roters 2019-10-15 10:35:37 +02:00
commit 91691744c6
14 changed files with 520 additions and 719 deletions

View File

@ -75,7 +75,7 @@ variables:
MSC: "$MSC2019" MSC: "$MSC2019"
IntelMarc: "$IntelCompiler17_8" IntelMarc: "$IntelCompiler17_8"
IntelAbaqus: "$IntelCompiler16_4" IntelAbaqus: "$IntelCompiler16_4"
HDF5Marc: "HDF5/1.10.4/Intel-17.8" HDF5Marc: "HDF5/1.10.5/Intel-17.8"
# ++++++++++++ Documentation ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # ++++++++++++ Documentation ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Doxygen1_8_15: "Documentation/Doxygen/1.8.15" Doxygen1_8_15: "Documentation/Doxygen/1.8.15"
# ------------ Defaults ---------------------------------------------- # ------------ Defaults ----------------------------------------------

View File

@ -63,6 +63,7 @@ else
INTEGER_PATH=/$MARC_INTEGER_SIZE INTEGER_PATH=/$MARC_INTEGER_SIZE
fi fi
FCOMP=ifort
INTELPATH="/opt/intel/compilers_and_libraries_2017/linux" INTELPATH="/opt/intel/compilers_and_libraries_2017/linux"
# find the root directory of the compiler installation: # find the root directory of the compiler installation:
@ -103,9 +104,6 @@ if test "$DAMASK_HDF5" = "ON";then
H5FC="$(h5fc -shlib -show)" H5FC="$(h5fc -shlib -show)"
HDF5_LIB=${H5FC//ifort/} HDF5_LIB=${H5FC//ifort/}
FCOMP="$H5FC -DDAMASK_HDF5" FCOMP="$H5FC -DDAMASK_HDF5"
echo $FCOMP
else
FCOMP=ifort
fi fi
# AEM # AEM
@ -531,7 +529,7 @@ else
FORT_OPT=" $FORT_OPT -save -zero" FORT_OPT=" $FORT_OPT -save -zero"
fi fi
if test "$MARCHDF" = "HDF"; then if test "$MARCHDF" = "HDF"; then
FORT_OPT="$FORT_OPT -DMARCHDF=$MARCHDF $HDF_INCLUDE" FORT_OPT="$FORT_OPT -DMARCHDF=$MARCHDF"
fi fi
FORTLOW="$FCOMP $FORT_OPT $PROFILE -O0 $I8FFLAGS -I$MARC_SOURCE/common \ FORTLOW="$FCOMP $FORT_OPT $PROFILE -O0 $I8FFLAGS -I$MARC_SOURCE/common \
@ -757,7 +755,7 @@ SECLIBS="-L$MARC_LIB -llapi"
SOLVERLIBS="${BCSSOLVERLIBS} ${VKISOLVERLIBS} ${CASISOLVERLIBS} ${MF2SOLVERLIBS} \ SOLVERLIBS="${BCSSOLVERLIBS} ${VKISOLVERLIBS} ${CASISOLVERLIBS} ${MF2SOLVERLIBS} \
$MKLLIB -L$MARC_MKL -liomp5 \ $MKLLIB -L$MARC_MKL -liomp5 \
$MARC_LIB/blas_src.a ${ACSI_LIB}/ACSI_MarcLib.a $KDTREE2_LIB/kdtree2.a $HDF_LIBS $HDF_LIB" $MARC_LIB/blas_src.a ${ACSI_LIB}/ACSI_MarcLib.a $KDTREE2_LIB/kdtree2.a $HDF5_LIB"
SOLVERLIBS_DLL=${SOLVERLIBS} SOLVERLIBS_DLL=${SOLVERLIBS}
if test "$AEM_DLL" -eq 1 if test "$AEM_DLL" -eq 1

View File

@ -3,6 +3,7 @@
import os import os
import argparse import argparse
import h5py
import numpy as np import numpy as np
import vtk import vtk
from vtk.util import numpy_support from vtk.util import numpy_support
@ -41,20 +42,29 @@ for filename in options.filenames:
results = damask.DADF5(filename) results = damask.DADF5(filename)
if results.structured: # for grid solvers use rectilinear grid if results.structured: # for grid solvers use rectilinear grid
rGrid = vtk.vtkRectilinearGrid() grid = vtk.vtkRectilineagrid()
coordArray = [vtk.vtkDoubleArray(), coordArray = [vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(), vtk.vtkDoubleArray(),
vtk.vtkDoubleArray(), vtk.vtkDoubleArray(),
] ]
rGrid.SetDimensions(*(results.grid+1)) grid.SetDimensions(*(results.grid+1))
for dim in [0,1,2]: for dim in [0,1,2]:
for c in np.linspace(0,results.size[dim],1+results.grid[dim]): for c in np.linspace(0,results.size[dim],1+results.grid[dim]):
coordArray[dim].InsertNextValue(c) coordArray[dim].InsertNextValue(c)
rGrid.SetXCoordinates(coordArray[0]) grid.SetXCoordinates(coordArray[0])
rGrid.SetYCoordinates(coordArray[1]) grid.SetYCoordinates(coordArray[1])
rGrid.SetZCoordinates(coordArray[2]) grid.SetZCoordinates(coordArray[2])
else:
nodes = vtk.vtkPoints()
with h5py.File(filename) as f:
nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True))
grid = vtk.vtkUnstructuredGrid()
grid.SetPoints(nodes)
grid.Allocate(f['/geometry/T_c'].shape[0])
for i in f['/geometry/T_c']:
grid.InsertNextCell(vtk.VTK_HEXAHEDRON,8,i-1)
for i,inc in enumerate(results.iter_visible('increments')): for i,inc in enumerate(results.iter_visible('increments')):
@ -75,7 +85,7 @@ for filename in options.filenames:
shape = [array.shape[0],np.product(array.shape[1:])] shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE)) vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
rGrid.GetCellData().AddArray(vtk_data[-1]) grid.GetCellData().AddArray(vtk_data[-1])
else: else:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) == 0: if len(x) == 0:
@ -84,7 +94,7 @@ for filename in options.filenames:
shape = [array.shape[0],np.product(array.shape[1:])] shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE)) vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
rGrid.GetCellData().AddArray(vtk_data[-1]) grid.GetCellData().AddArray(vtk_data[-1])
results.set_visible('constituents', False) results.set_visible('constituents', False)
results.set_visible('materialpoints',True) results.set_visible('materialpoints',True)
@ -99,7 +109,7 @@ for filename in options.filenames:
shape = [array.shape[0],np.product(array.shape[1:])] shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE)) vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
rGrid.GetCellData().AddArray(vtk_data[-1]) grid.GetCellData().AddArray(vtk_data[-1])
else: else:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) == 0: if len(x) == 0:
@ -108,10 +118,10 @@ for filename in options.filenames:
shape = [array.shape[0],np.product(array.shape[1:])] shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE)) vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
rGrid.GetCellData().AddArray(vtk_data[-1]) grid.GetCellData().AddArray(vtk_data[-1])
if results.structured: writer = vtk.vtkXMLRectilineagridWriter() if results.structured else \
writer = vtk.vtkXMLRectilinearGridWriter() vtk.vtkXMLUnstructuredGridWriter()
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir)) dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
@ -122,7 +132,6 @@ for filename in options.filenames:
writer.SetCompressorTypeToZLib() writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(dirname,file_out)) writer.SetFileName(os.path.join(dirname,file_out))
if results.structured: writer.SetInputData(grid)
writer.SetInputData(rGrid)
writer.Write() writer.Write()

View File

@ -0,0 +1,121 @@
#!/usr/bin/env python3
import os
import argparse
import numpy as np
import vtk
from vtk.util import numpy_support
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = argparse.ArgumentParser()
#ToDo: We need to decide on a way of handling arguments of variable lentght
#https://stackoverflow.com/questions/15459997/passing-integer-lists-to-python
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
parser.add_argument('filenames', nargs='+',
help='DADF5 files')
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory relative to the location of the DADF5 file to hold output')
parser.add_argument('--mat', nargs='+',
help='labels for materialpoint',dest='mat')
parser.add_argument('--con', nargs='+',
help='labels for constituent',dest='con')
options = parser.parse_args()
if options.mat is None: options.mat=[]
if options.con is None: options.con=[]
# --- loop over input files ------------------------------------------------------------------------
for filename in options.filenames:
results = damask.DADF5(filename)
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for c in results.cell_coordinates():
pointID = Points.InsertNextPoint(c)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
Polydata = vtk.vtkPolyData()
Polydata.SetPoints(Points)
Polydata.SetVerts(Vertices)
Polydata.Modified()
for i,inc in enumerate(results.iter_visible('increments')):
print('Output step {}/{}'.format(i+1,len(results.increments)))
vtk_data = []
results.set_visible('materialpoints',False)
results.set_visible('constituents', True)
for label in options.con:
for p in results.iter_visible('con_physics'):
if p != 'generic':
for c in results.iter_visible('constituents'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
results.set_visible('constituents', False)
results.set_visible('materialpoints',True)
for label in options.mat:
for p in results.iter_visible('mat_physics'):
if p != 'generic':
for m in results.iter_visible('materialpoints'):
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
else:
x = results.get_dataset_location(label)
if len(x) == 0:
continue
array = results.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
Polydata.GetCellData().AddArray(vtk_data[-1])
writer = vtk.vtkXMLPolyDataWriter()
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
if not os.path.isdir(dirname):
os.mkdir(dirname,0o755)
file_out = '{}_{}.{}'.format(os.path.splitext(os.path.split(filename)[-1])[0],inc,writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(dirname,file_out))
writer.SetInputData(Polydata)
writer.Write()

View File

@ -348,6 +348,20 @@ class DADF5():
return dataset return dataset
def cell_coordinates(self):
"""Initial coordinates of the cell centers."""
if self.structured:
delta = self.size/self.grid*0.5
z, y, x = np.meshgrid(np.linspace(delta[2],self.size[2]-delta[2],self.grid[2]),
np.linspace(delta[1],self.size[1]-delta[1],self.grid[1]),
np.linspace(delta[0],self.size[0]-delta[0],self.grid[0]),
)
return np.concatenate((x[:,:,:,None],y[:,:,:,None],y[:,:,:,None]),axis = 3).reshape([np.product(self.grid),3])
else:
with h5py.File(self.filename,'r') as f:
return f['geometry/x_c'][()]
def add_Cauchy(self,P='P',F='F'): def add_Cauchy(self,P='P',F='F'):
""" """
Adds Cauchy stress calculated from 1st Piola-Kirchhoff stress and deformation gradient. Adds Cauchy stress calculated from 1st Piola-Kirchhoff stress and deformation gradient.

View File

@ -381,6 +381,7 @@ subroutine CPFEM_results(inc,time)
call results_addIncrement(inc,time) call results_addIncrement(inc,time)
call constitutive_results call constitutive_results
call crystallite_results call crystallite_results
call homogenization_results
call results_removeLink('current') ! ToDo: put this into closeJobFile call results_removeLink('current') ! ToDo: put this into closeJobFile
call results_closeJobFile call results_closeJobFile
#endif #endif

View File

@ -313,8 +313,7 @@ subroutine UMAT(STRESS,STATEV,DDSDDE,SSE,SPD,SCD,&
call CPFEM_general(computationMode,usePingPong,dfgrd0,dfgrd1,temperature,dtime,noel,npt,stress_h,ddsdde_h) call CPFEM_general(computationMode,usePingPong,dfgrd0,dfgrd1,temperature,dtime,noel,npt,stress_h,ddsdde_h)
! Mandel: 11, 22, 33, SQRT(2)*12, SQRT(2)*23, SQRT(2)*13 ! DAMASK: 11, 22, 33, 12, 23, 13
! straight: 11, 22, 33, 12, 23, 13
! ABAQUS explicit: 11, 22, 33, 12, 23, 13 ! ABAQUS explicit: 11, 22, 33, 12, 23, 13
! ABAQUS implicit: 11, 22, 33, 12, 13, 23 ! ABAQUS implicit: 11, 22, 33, 12, 13, 23
! ABAQUS implicit: 11, 22, 33, 12 ! ABAQUS implicit: 11, 22, 33, 12

View File

@ -265,8 +265,8 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
call debug_reset() ! resets debugging call debug_reset() ! resets debugging
outdatedFFN1 = .false. outdatedFFN1 = .false.
cycleCounter = cycleCounter + 1 cycleCounter = cycleCounter + 1
mesh_cellnode = mesh_build_cellnodes() ! update cell node coordinates !mesh_cellnode = mesh_build_cellnodes() ! update cell node coordinates
call mesh_build_ipCoordinates() ! update ip coordinates !call mesh_build_ipCoordinates() ! update ip coordinates
endif endif
if (outdatedByNewInc) then if (outdatedByNewInc) then
computationMode = ior(computationMode,CPFEM_AGERESULTS) ! calc and age results computationMode = ior(computationMode,CPFEM_AGERESULTS) ! calc and age results
@ -315,9 +315,6 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
lastLovl = lovl ! record lovl lastLovl = lovl ! record lovl
call CPFEM_general(computationMode,usePingPong,ffn,ffn1,t(1),timinc,m(1),nn,stress,ddsdde) call CPFEM_general(computationMode,usePingPong,ffn,ffn1,t(1),timinc,m(1),nn,stress,ddsdde)
! Mandel: 11, 22, 33, SQRT(2)*12, SQRT(2)*23, SQRT(2)*13
! Marc: 11, 22, 33, 12, 23, 13
! Marc: 11, 22, 33, 12
d = ddsdde(1:ngens,1:ngens) d = ddsdde(1:ngens,1:ngens)
s = stress(1:ndi+nshear) s = stress(1:ndi+nshear)

View File

@ -112,14 +112,10 @@ subroutine HDF5_utilities_init
call h5open_f(hdferr) call h5open_f(hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5open_f') if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5open_f')
#ifndef Marc4DAMASK
! This test should ensure that integer size matches. For some reasons, the HDF5 libraries
! that come with MSC.Marc>=2019 seem to be of 4byte even though it is a 8byte Marc version
call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr) call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (int)') if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (int)')
if (int(bit_size(0),SIZE_T)/=typeSize*8) & if (int(bit_size(0),SIZE_T)/=typeSize*8) &
call IO_error(0,ext_msg='Default integer size does not match H5T_NATIVE_INTEGER') call IO_error(0,ext_msg='Default integer size does not match H5T_NATIVE_INTEGER')
#endif
call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr) call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (double)') if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (double)')

View File

@ -221,9 +221,6 @@ subroutine mesh_init
call theMesh%init(dimplex,integrationOrder,mesh_node0) call theMesh%init(dimplex,integrationOrder,mesh_node0)
call theMesh%setNelems(mesh_NcpElems) call theMesh%setNelems(mesh_NcpElems)
theMesh%homogenizationAt = mesh_element(3,:)
theMesh%microstructureAt = mesh_element(4,:)
call discretization_init(mesh_element(3,:),mesh_element(4,:),& call discretization_init(mesh_element(3,:),mesh_element(4,:),&
reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), & reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), &
mesh_node0) mesh_node0)

View File

@ -1,4 +1,3 @@
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @author Franz Roters, Max-Planck-Institut für Eisenforschung GmbH !> @author Franz Roters, Max-Planck-Institut für Eisenforschung GmbH
!> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH !> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH
@ -8,14 +7,13 @@
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module mesh_base module mesh_base
use, intrinsic :: iso_c_binding
use prec use prec
use element use element
implicit none implicit none
!--------------------------------------------------------------------------------------------------- !---------------------------------------------------------------------------------------------------
!> Properties of a the whole mesh (consisting of one type of elements) !> Properties of a whole mesh (consisting of one type of elements)
!--------------------------------------------------------------------------------------------------- !---------------------------------------------------------------------------------------------------
type, public :: tMesh type, public :: tMesh
type(tElement) :: & type(tElement) :: &
@ -33,11 +31,7 @@ module mesh_base
elemType, & elemType, &
Ncells, & Ncells, &
nIPneighbors, & nIPneighbors, &
NcellNodes, & NcellNodes
maxElemsPerNode
integer(pInt), dimension(:), allocatable, public :: &
homogenizationAt, &
microstructureAt
integer(pInt), dimension(:,:), allocatable, public :: & integer(pInt), dimension(:,:), allocatable, public :: &
connectivity connectivity
contains contains
@ -47,6 +41,7 @@ module mesh_base
end type tMesh end type tMesh
contains contains
subroutine tMesh_base_init(self,meshType,elemType,nodes) subroutine tMesh_base_init(self,meshType,elemType,nodes)
class(tMesh) :: self class(tMesh) :: self

File diff suppressed because it is too large Load Diff

View File

@ -434,7 +434,7 @@ end function plastic_isotropic_postResults
!> @brief writes results to HDF5 output file !> @brief writes results to HDF5 output file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine plastic_isotropic_results(instance,group) subroutine plastic_isotropic_results(instance,group)
#if defined(PETSc) || defined(DAMASKHDF5) #if defined(PETSc) || defined(DAMASK_HDF5)
integer, intent(in) :: instance integer, intent(in) :: instance
character(len=*), intent(in) :: group character(len=*), intent(in) :: group