This commit is contained in:
Martin Diehl 2019-06-11 09:48:07 +02:00
parent 9dfe71aa06
commit b2409d6998
13 changed files with 1587 additions and 1700 deletions

View File

@ -40,12 +40,6 @@ module DAMASK_interface
setSIGTERM, &
setSIGUSR1, &
setSIGUSR2
private :: &
setWorkingDirectory, &
getGeometryFile, &
getLoadCaseFile, &
rectifyPath, &
makeRelativePath
contains

View File

@ -9,9 +9,7 @@ module HDF5_utilities
use IO
use rotations
use numerics
#if defined(PETSc) || defined(DAMASK_HDF5)
use HDF5
#endif
#ifdef PETSc
use PETSC
#endif
@ -21,7 +19,7 @@ module HDF5_utilities
#if defined(PETSc) || defined(DAMASK_HDF5)
!--------------------------------------------------------------------------------------------------
!> @brief reads pInt or pReal data of defined shape from file ! ToDo: order of arguments wrong
!> @brief reads integer or float data of defined shape from file ! ToDo: order of arguments wrong
!> @details for parallel IO, all dimension except for the last need to match
!--------------------------------------------------------------------------------------------------
interface HDF5_read
@ -44,7 +42,7 @@ module HDF5_utilities
end interface HDF5_read
!--------------------------------------------------------------------------------------------------
!> @brief writes pInt or pReal data of defined shape to file ! ToDo: order of arguments wrong
!> @brief writes integer or real data of defined shape to file ! ToDo: order of arguments wrong
!> @details for parallel IO, all dimension except for the last need to match
!--------------------------------------------------------------------------------------------------
interface HDF5_write
@ -69,7 +67,7 @@ module HDF5_utilities
end interface HDF5_write
!--------------------------------------------------------------------------------------------------
!> @brief attached attributes of type char,pInt or pReal to a file/dataset/group
!> @brief attached attributes of type char, integer or real to a file/dataset/group
!--------------------------------------------------------------------------------------------------
interface HDF5_addAttribute
module procedure HDF5_addAttribute_str
@ -114,7 +112,7 @@ subroutine HDF5_utilities_init
call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (int)')
if (int(bit_size(0),SIZE_T)/=typeSize*8) &
call IO_error(0_pInt,ext_msg='Default integer size does not match H5T_NATIVE_INTEGER')
call IO_error(0,ext_msg='Default integer size does not match H5T_NATIVE_INTEGER')
call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (double)')
@ -144,30 +142,30 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel) ! ToDo: simply "op
endif
call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pcreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5pcreate_f')
#ifdef PETSc
if (present(parallel)) then; if (parallel) then
call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pset_fapl_mpio_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5pset_fapl_mpio_f')
endif; endif
#endif
if (m == 'w') then
call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr,access_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fcreate_f (w)')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5fcreate_f (w)')
elseif(m == 'a') then
call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr,access_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (a)')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5fopen_f (a)')
elseif(m == 'r') then
call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr,access_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f (r)')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5fopen_f (r)')
else
call IO_error(1_pInt,ext_msg='HDF5_openFile: h5fopen_f unknown access mode: '//trim(m))
call IO_error(1,ext_msg='HDF5_openFile: h5fopen_f unknown access mode: '//trim(m))
endif
call h5pclose_f(plist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_openFile: h5pclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5pclose_f')
end function HDF5_openFile
@ -182,7 +180,7 @@ subroutine HDF5_closeFile(fileHandle)
integer :: hdferr
call h5fclose_f(fileHandle,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_closeFile: h5fclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_closeFile: h5fclose_f')
end subroutine HDF5_closeFile
@ -201,19 +199,19 @@ integer(HID_T) function HDF5_addGroup(fileHandle,groupName)
!-------------------------------------------------------------------------------------------------
! creating a property list for data access properties
call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5pcreate_f ('//trim(groupName)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_addGroup: h5pcreate_f ('//trim(groupName)//')')
!-------------------------------------------------------------------------------------------------
! setting I/O mode to collective
#ifdef PETSc
call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_addGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
#endif
!-------------------------------------------------------------------------------------------------
! Create group
call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup, hdferr, OBJECT_NAMELEN_DEFAULT_F,gapl_id = aplist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')')
call h5pclose_f(aplist_id,hdferr)
@ -237,19 +235,19 @@ integer(HID_T) function HDF5_openGroup(fileHandle,groupName)
!-------------------------------------------------------------------------------------------------
! creating a property list for data access properties
call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5pcreate_f ('//trim(groupName)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_openGroup: h5pcreate_f ('//trim(groupName)//')')
!-------------------------------------------------------------------------------------------------
! setting I/O mode to collective
#ifdef PETSc
call h5pget_all_coll_metadata_ops_f(aplist_id, is_collective, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_openGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
#endif
!-------------------------------------------------------------------------------------------------
! opening the group
call h5gopen_f(fileHandle, trim(groupName), HDF5_openGroup, hdferr, gapl_id = aplist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(groupName)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(groupName)//')')
call h5pclose_f(aplist_id,hdferr)
@ -265,7 +263,7 @@ subroutine HDF5_closeGroup(group_id)
integer :: hdferr
call h5gclose_f(group_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(group_id,pInt))
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(group_id))
end subroutine HDF5_closeGroup
@ -288,11 +286,11 @@ logical function HDF5_objectExists(loc_id,path)
endif
call h5lexists_f(loc_id, p, HDF5_objectExists, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f')
if(HDF5_objectExists) then
call h5oexists_by_name_f(loc_id, p, HDF5_objectExists, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f')
endif
end function HDF5_objectExists
@ -319,27 +317,27 @@ subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path)
endif
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5screate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5screate_f')
call h5tcopy_f(H5T_NATIVE_CHARACTER, type_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5tcopy_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tcopy_f')
call h5tset_size_f(type_id, int(len(trim(attrValue)),HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5tset_size_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tset_size_f')
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5aexists_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aexists_by_name_f')
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5adelete_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5adelete_by_name_f')
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5acreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5acreate_f')
call h5awrite_f(attr_id, type_id, trim(attrValue), int([1],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5awrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5awrite_f')
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5aclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aclose_f')
call h5tclose_f(type_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5tclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tclose_f')
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_str: h5sclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5sclose_f')
end subroutine HDF5_addAttribute_str
@ -351,7 +349,7 @@ subroutine HDF5_addAttribute_int(loc_id,attrLabel,attrValue,path)
integer(HID_T), intent(in) :: loc_id
character(len=*), intent(in) :: attrLabel
integer(pInt), intent(in) :: attrValue
integer, intent(in) :: attrValue
character(len=*), intent(in), optional :: path
integer :: hdferr
@ -366,21 +364,21 @@ subroutine HDF5_addAttribute_int(loc_id,attrLabel,attrValue,path)
endif
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5screate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5screate_f')
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5aexists_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5aexists_by_name_f')
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5adelete_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5adelete_by_name_f')
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_INTEGER,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5acreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5acreate_f')
call h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attrValue, int([1],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5awrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5awrite_f')
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5tclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5tclose_f')
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5sclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5sclose_f')
end subroutine HDF5_addAttribute_int
@ -407,21 +405,21 @@ subroutine HDF5_addAttribute_real(loc_id,attrLabel,attrValue,path)
endif
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5screate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5screate_f')
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5aexists_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5aexists_by_name_f')
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5adelete_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5adelete_by_name_f')
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_DOUBLE,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5acreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5acreate_f')
call h5awrite_f(attr_id, H5T_NATIVE_DOUBLE, attrValue, int([1],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5awrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5awrite_f')
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5tclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5tclose_f')
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5sclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5sclose_f')
end subroutine HDF5_addAttribute_real
@ -433,7 +431,7 @@ subroutine HDF5_addAttribute_int_array(loc_id,attrLabel,attrValue,path)
integer(HID_T), intent(in) :: loc_id
character(len=*), intent(in) :: attrLabel
integer(pInt), intent(in), dimension(:) :: attrValue
integer, intent(in), dimension(:) :: attrValue
character(len=*), intent(in), optional :: path
integer :: hdferr
@ -451,21 +449,21 @@ subroutine HDF5_addAttribute_int_array(loc_id,attrLabel,attrValue,path)
array_size = size(attrValue,kind=HSIZE_T)
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_INTEGER,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
call h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attrValue, array_size, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
end subroutine HDF5_addAttribute_int_array
@ -495,21 +493,21 @@ subroutine HDF5_addAttribute_real_array(loc_id,attrLabel,attrValue,path)
array_size = size(attrValue,kind=HSIZE_T)
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_DOUBLE,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
call h5awrite_f(attr_id, H5T_NATIVE_DOUBLE, attrValue, array_size, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
end subroutine HDF5_addAttribute_real_array
@ -525,19 +523,19 @@ subroutine HDF5_setLink(loc_id,target_name,link_name)
logical :: linkExists
call h5lexists_f(loc_id, link_name,linkExists, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link_name)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link_name)//')')
if (linkExists) then
call h5ldelete_f(loc_id,link_name, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link_name)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link_name)//')')
endif
call h5lcreate_soft_f(target_name, loc_id, link_name, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(target_name)//' '//trim(link_name)//')')
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(target_name)//' '//trim(link_name)//')')
end subroutine HDF5_setLink
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pReal with 1 dimension
!> @brief read dataset of type real with 1 dimension
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_real1(loc_id,dataset,datasetName,parallel)
@ -570,14 +568,14 @@ subroutine HDF5_read_real1(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_real1: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real1: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_real1
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pReal with 2 dimensions
!> @brief read dataset of type real with 2 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_real2(loc_id,dataset,datasetName,parallel)
@ -610,14 +608,14 @@ subroutine HDF5_read_real2(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_real2: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real2: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_real2
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pReal with 2 dimensions
!> @brief read dataset of type real with 2 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_real3(loc_id,dataset,datasetName,parallel)
@ -650,14 +648,14 @@ subroutine HDF5_read_real3(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_real3: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real3: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_real3
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pReal with 4 dimensions
!> @brief read dataset of type real with 4 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_real4(loc_id,dataset,datasetName,parallel)
@ -690,14 +688,14 @@ subroutine HDF5_read_real4(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_real4: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real4: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_real4
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pReal with 5 dimensions
!> @brief read dataset of type real with 5 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_real5(loc_id,dataset,datasetName,parallel)
@ -730,14 +728,14 @@ subroutine HDF5_read_real5(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_real5: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real5: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_real5
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pReal with 6 dimensions
!> @brief read dataset of type real with 6 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_real6(loc_id,dataset,datasetName,parallel)
@ -770,14 +768,14 @@ subroutine HDF5_read_real6(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_real6: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real6: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_real6
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pReal with 7 dimensions
!> @brief read dataset of type real with 7 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_real7(loc_id,dataset,datasetName,parallel)
@ -810,7 +808,7 @@ subroutine HDF5_read_real7(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_real7: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real7: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -818,7 +816,7 @@ end subroutine HDF5_read_real7
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pInt with 1 dimension
!> @brief read dataset of type integer with 1 dimension
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_int1(loc_id,dataset,datasetName,parallel)
@ -851,14 +849,14 @@ subroutine HDF5_read_int1(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_int1: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int1: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_int1
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pInt with 2 dimensions
!> @brief read dataset of type integer with 2 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_int2(loc_id,dataset,datasetName,parallel)
@ -891,14 +889,14 @@ subroutine HDF5_read_int2(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_int2: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int2: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_int2
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pInt with 3 dimensions
!> @brief read dataset of type integer with 3 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_int3(loc_id,dataset,datasetName,parallel)
@ -931,14 +929,14 @@ subroutine HDF5_read_int3(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_int3: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int3: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_int3
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pInt withh 4 dimensions
!> @brief read dataset of type integer withh 4 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_int4(loc_id,dataset,datasetName,parallel)
@ -971,14 +969,14 @@ subroutine HDF5_read_int4(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_int4: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int4: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_int4
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pInt with 5 dimensions
!> @brief read dataset of type integer with 5 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_int5(loc_id,dataset,datasetName,parallel)
@ -1011,14 +1009,14 @@ subroutine HDF5_read_int5(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_int5: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int5: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_int5
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pInt with 6 dimensions
!> @brief read dataset of type integer with 6 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_int6(loc_id,dataset,datasetName,parallel)
@ -1051,14 +1049,14 @@ subroutine HDF5_read_int6(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_int6: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int6: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
end subroutine HDF5_read_int6
!--------------------------------------------------------------------------------------------------
!> @brief read dataset of type pInt with 7 dimensions
!> @brief read dataset of type integer with 7 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_read_int7(loc_id,dataset,datasetName,parallel)
@ -1091,7 +1089,7 @@ subroutine HDF5_read_int7(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_int7: h5dread_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int7: h5dread_f')
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -1099,7 +1097,7 @@ end subroutine HDF5_read_int7
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pReal with 1 dimension
!> @brief write dataset of type real with 1 dimension
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real1(loc_id,dataset,datasetName,parallel)
@ -1132,7 +1130,7 @@ subroutine HDF5_write_real1(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_real1: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real1: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1140,7 +1138,7 @@ subroutine HDF5_write_real1(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_real1
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pReal with 2 dimensions
!> @brief write dataset of type real with 2 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real2(loc_id,dataset,datasetName,parallel)
@ -1173,7 +1171,7 @@ subroutine HDF5_write_real2(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_real2: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real2: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1181,7 +1179,7 @@ subroutine HDF5_write_real2(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_real2
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pReal with 3 dimensions
!> @brief write dataset of type real with 3 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real3(loc_id,dataset,datasetName,parallel)
@ -1214,7 +1212,7 @@ subroutine HDF5_write_real3(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_real3: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real3: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1222,7 +1220,7 @@ subroutine HDF5_write_real3(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_real3
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pReal with 4 dimensions
!> @brief write dataset of type real with 4 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real4(loc_id,dataset,datasetName,parallel)
@ -1255,7 +1253,7 @@ subroutine HDF5_write_real4(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_real4: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real4: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1264,7 +1262,7 @@ end subroutine HDF5_write_real4
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pReal with 5 dimensions
!> @brief write dataset of type real with 5 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real5(loc_id,dataset,datasetName,parallel)
@ -1297,7 +1295,7 @@ subroutine HDF5_write_real5(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_real5: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real5: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1305,7 +1303,7 @@ subroutine HDF5_write_real5(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_real5
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pReal with 6 dimensions
!> @brief write dataset of type real with 6 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real6(loc_id,dataset,datasetName,parallel)
@ -1338,7 +1336,7 @@ subroutine HDF5_write_real6(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_real6: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real6: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1346,7 +1344,7 @@ subroutine HDF5_write_real6(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_real6
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pReal with 7 dimensions
!> @brief write dataset of type real with 7 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_real7(loc_id,dataset,datasetName,parallel)
@ -1379,7 +1377,7 @@ subroutine HDF5_write_real7(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_real7: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real7: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1388,7 +1386,7 @@ end subroutine HDF5_write_real7
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pInt with 1 dimension
!> @brief write dataset of type integer with 1 dimension
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_int1(loc_id,dataset,datasetName,parallel)
@ -1421,7 +1419,7 @@ subroutine HDF5_write_int1(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_int1: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int1: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1429,7 +1427,7 @@ subroutine HDF5_write_int1(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_int1
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pInt with 2 dimensions
!> @brief write dataset of type integer with 2 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_int2(loc_id,dataset,datasetName,parallel)
@ -1462,7 +1460,7 @@ subroutine HDF5_write_int2(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_int2: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int2: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1470,7 +1468,7 @@ subroutine HDF5_write_int2(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_int2
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pInt with 3 dimensions
!> @brief write dataset of type integer with 3 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_int3(loc_id,dataset,datasetName,parallel)
@ -1503,7 +1501,7 @@ subroutine HDF5_write_int3(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_int3: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int3: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1511,7 +1509,7 @@ subroutine HDF5_write_int3(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_int3
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pInt with 4 dimensions
!> @brief write dataset of type integer with 4 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_int4(loc_id,dataset,datasetName,parallel)
@ -1544,7 +1542,7 @@ subroutine HDF5_write_int4(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_int4: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int4: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1552,7 +1550,7 @@ subroutine HDF5_write_int4(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_int4
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pInt with 5 dimensions
!> @brief write dataset of type integer with 5 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_int5(loc_id,dataset,datasetName,parallel)
@ -1585,7 +1583,7 @@ subroutine HDF5_write_int5(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_int5: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int5: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1593,7 +1591,7 @@ subroutine HDF5_write_int5(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_int5
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pInt with 6 dimensions
!> @brief write dataset of type integer with 6 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_int6(loc_id,dataset,datasetName,parallel)
@ -1626,7 +1624,7 @@ subroutine HDF5_write_int6(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_int6: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int6: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1634,7 +1632,7 @@ subroutine HDF5_write_int6(loc_id,dataset,datasetName,parallel)
end subroutine HDF5_write_int6
!--------------------------------------------------------------------------------------------------
!> @brief write dataset of type pInt with 7 dimensions
!> @brief write dataset of type integer with 7 dimensions
!--------------------------------------------------------------------------------------------------
subroutine HDF5_write_int7(loc_id,dataset,datasetName,parallel)
@ -1667,7 +1665,7 @@ subroutine HDF5_write_int7(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_int7: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int7: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1743,7 +1741,7 @@ subroutine HDF5_write_rotation(loc_id,dataset,datasetName,parallel)
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
call h5dwrite_f(dset_id, z_id,dataset_asArray(4,:),int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_rotation: h5dwrite_f')
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_rotation: h5dwrite_f')
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1768,7 +1766,7 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
globalShape !< shape of the dataset (all processes)
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id
integer(pInt), dimension(worldsize) :: &
integer, dimension(worldsize) :: &
readSize !< contribution of all processes
integer :: ierr
integer :: hdferr
@ -1776,17 +1774,17 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
!-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective for MPI)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pcreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pcreate_f')
!--------------------------------------------------------------------------------------------------
readSize = 0_pInt
readSize(worldrank+1) = int(localShape(ubound(localShape,1)),pInt)
readSize = 0
readSize(worldrank+1) = int(localShape(ubound(localShape,1)))
#ifdef PETSc
if (parallel) then
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pset_dxpl_mpio_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pset_dxpl_mpio_f')
call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if (ierr /= 0) call IO_error(894_pInt,ext_msg='initialize_read: MPI_allreduce')
if (ierr /= 0) call IO_error(894,ext_msg='initialize_read: MPI_allreduce')
endif
#endif
myStart = int(0,HSIZE_T)
@ -1796,28 +1794,28 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape)
call h5screate_simple_f(size(localShape), localShape, memspace_id, hdferr, localShape)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5screate_simple_f/memspace_id')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5screate_simple_f/memspace_id')
!--------------------------------------------------------------------------------------------------
! creating a property list for IO and set it to collective
call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pcreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pcreate_f')
#ifdef PETSc
call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pset_all_coll_metadata_ops_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pset_all_coll_metadata_ops_f')
#endif
!--------------------------------------------------------------------------------------------------
! open the dataset in the file and get the space ID
call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5dopen_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5dopen_f')
call h5dget_space_f(dset_id, filespace_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5dget_space_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5dget_space_f')
!--------------------------------------------------------------------------------------------------
! select a hyperslab (the portion of the current process) in the file
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, localShape, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5sselect_hyperslab_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5sselect_hyperslab_f')
end subroutine initialize_read
@ -1831,15 +1829,15 @@ subroutine finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id
integer :: hdferr
call h5pclose_f(plist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: plist_id')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: plist_id')
call h5pclose_f(aplist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: aplist_id')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: aplist_id')
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: h5dclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: h5dclose_f')
call h5sclose_f(filespace_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: h5sclose_f/filespace_id')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: h5sclose_f/filespace_id')
call h5sclose_f(memspace_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: h5sclose_f/memspace_id')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: h5sclose_f/memspace_id')
end subroutine finalize_read
@ -1870,22 +1868,22 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
!-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective when reading in parallel)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5pcreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5pcreate_f')
#ifdef PETSc
if (parallel) then
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5pset_dxpl_mpio_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5pset_dxpl_mpio_f')
endif
#endif
!--------------------------------------------------------------------------------------------------
! determine the global data layout among all processes
writeSize = 0_pInt
writeSize(worldrank+1) = int(myShape(ubound(myShape,1)),pInt)
writeSize = 0
writeSize(worldrank+1) = int(myShape(ubound(myShape,1)))
#ifdef PETSc
if (parallel) then
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if (ierr /= 0) call IO_error(894_pInt,ext_msg='initialize_write: MPI_allreduce')
if (ierr /= 0) call IO_error(894,ext_msg='initialize_write: MPI_allreduce')
endif
#endif
myStart = int(0,HSIZE_T)
@ -1895,17 +1893,16 @@ if (parallel) then
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape) and in file (global shape)
call h5screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5dopen_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5dopen_f')
call h5screate_simple_f(size(totalShape), totalShape, filespace_id, hdferr, totalShape)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5dget_space_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5dget_space_f')
!--------------------------------------------------------------------------------------------------
! create dataset in the file and select a hyperslab from it (the portion of the current process)
call h5dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5dcreate_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5dcreate_f')
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, myShape, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5sselect_hyperslab_f')
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5sselect_hyperslab_f')
end subroutine initialize_write
@ -1919,14 +1916,15 @@ subroutine finalize_write(plist_id, dset_id, filespace_id, memspace_id)
integer :: hdferr
call h5pclose_f(plist_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: plist_id')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: plist_id')
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: h5dclose_f')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: h5dclose_f')
call h5sclose_f(filespace_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: h5sclose_f/filespace_id')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: h5sclose_f/filespace_id')
call h5sclose_f(memspace_id, hdferr)
if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: h5sclose_f/memspace_id')
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: h5sclose_f/memspace_id')
end subroutine finalize_write
#endif
end module HDF5_Utilities

View File

@ -7,23 +7,27 @@ module damage_local
use material
use numerics
use config
use source_damage_isoBrittle
use source_damage_isoDuctile
use source_damage_anisoBrittle
use source_damage_anisoDuctile
implicit none
private
integer, dimension(:,:), allocatable, target, public :: &
damage_local_sizePostResult !< size of each post result output
damage_local_sizePostResult
character(len=64), dimension(:,:), allocatable, target, public :: &
damage_local_output !< name of each post result output
damage_local_output
integer, dimension(:), allocatable, target, public :: &
damage_local_Noutput !< number of outputs per instance of this damage
damage_local_Noutput
enum, bind(c)
enumerator :: undefined_ID, &
enumerator :: &
undefined_ID, &
damage_ID
end enum
integer(kind(undefined_ID)), dimension(:,:), allocatable :: &
damage_local_outputID !< ID of each post result output
@ -40,7 +44,6 @@ module damage_local
damage_local_updateState, &
damage_local_postResults
contains
!--------------------------------------------------------------------------------------------------
@ -116,9 +119,9 @@ subroutine damage_local_init
end associate
enddo
end subroutine damage_local_init
!--------------------------------------------------------------------------------------------------
!> @brief calculates local change in damage field
!--------------------------------------------------------------------------------------------------
@ -153,18 +156,11 @@ function damage_local_updateState(subdt, ip, el)
end function damage_local_updateState
!--------------------------------------------------------------------------------------------------
!> @brief calculates homogenized local damage driving forces
!--------------------------------------------------------------------------------------------------
subroutine damage_local_getSourceAndItsTangent(phiDot, dPhiDot_dPhi, phi, ip, el)
use source_damage_isoBrittle, only: &
source_damage_isobrittle_getRateAndItsTangent
use source_damage_isoDuctile, only: &
source_damage_isoductile_getRateAndItsTangent
use source_damage_anisoBrittle, only: &
source_damage_anisobrittle_getRateAndItsTangent
use source_damage_anisoDuctile, only: &
source_damage_anisoductile_getRateAndItsTangent
integer, intent(in) :: &
ip, & !< integration point number
@ -225,8 +221,7 @@ function damage_local_postResults(ip,el)
real(pReal), dimension(sum(damage_local_sizePostResult(:,damage_typeInstance(material_homogenizationAt(el))))) :: &
damage_local_postResults
integer :: &
instance, homog, offset, o, c
integer :: instance, homog, offset, o, c
homog = material_homogenizationAt(el)
offset = damageMapping(homog)%p(ip,el)
@ -244,6 +239,7 @@ function damage_local_postResults(ip,el)
enddo outputsLoop
end associate
end function damage_local_postResults
end module damage_local

View File

@ -20,16 +20,15 @@ module damage_nonlocal
private
integer, dimension(:,:), allocatable, target, public :: &
damage_nonlocal_sizePostResult !< size of each post result output
damage_nonlocal_sizePostResult
character(len=64), dimension(:,:), allocatable, target, public :: &
damage_nonlocal_output !< name of each post result output
damage_nonlocal_output
integer, dimension(:), allocatable, target, public :: &
damage_nonlocal_Noutput !< number of outputs per instance of this damage
damage_nonlocal_Noutput
enum, bind(c)
enumerator :: undefined_ID, &
enumerator :: &
undefined_ID, &
damage_ID
end enum

View File

@ -3,7 +3,11 @@
!> @brief New fortran functions for compiler versions that do not support them
!--------------------------------------------------------------------------------------------------
module future
use prec
implicit none
public
contains
#if defined(__GFORTRAN__) || __INTEL_COMPILER < 1800
@ -11,6 +15,7 @@ contains
!> @brief substitute for the findloc intrinsic (only for integer, dimension(:) at the moment)
!--------------------------------------------------------------------------------------------------
function findloc(a,v)
integer, intent(in), dimension(:) :: a
integer, intent(in) :: v
integer :: i,j
@ -32,10 +37,7 @@ end function findloc
!> @brief substitute for the norm2 intrinsic (only for real, dimension(3) at the moment)
!--------------------------------------------------------------------------------------------------
real(pReal) pure function norm2(v)
use prec, only: &
pReal
implicit none
real(pReal), intent(in), dimension(3) :: v
norm2 = sqrt(sum(v**2))

View File

@ -23,15 +23,14 @@ module homogenization
use damage_none
use damage_local
use damage_nonlocal
#if defined(PETSc) || defined(DAMASK_HDF5)
use results
use HDF5_utilities
#endif
implicit none
private
!--------------------------------------------------------------------------------------------------
! General variables for the homogenization at a material point
implicit none
private
real(pReal), dimension(:,:,:,:), allocatable, public :: &
materialpoint_F0, & !< def grad of IP at start of FE increment
materialpoint_F, & !< def grad of IP to be reached at end of FE increment
@ -45,17 +44,17 @@ module homogenization
thermal_maxSizePostResults, &
damage_maxSizePostResults
real(pReal), dimension(:,:,:,:), allocatable, private :: &
real(pReal), dimension(:,:,:,:), allocatable :: &
materialpoint_subF0, & !< def grad of IP at beginning of homogenization increment
materialpoint_subF !< def grad of IP to be reached at end of homog inc
real(pReal), dimension(:,:), allocatable, private :: &
real(pReal), dimension(:,:), allocatable :: &
materialpoint_subFrac, &
materialpoint_subStep, &
materialpoint_subdt
logical, dimension(:,:), allocatable, private :: &
logical, dimension(:,:), allocatable :: &
materialpoint_requested, &
materialpoint_converged
logical, dimension(:,:,:), allocatable, private :: &
logical, dimension(:,:,:), allocatable :: &
materialpoint_doneAndHappy
interface

View File

@ -9,65 +9,31 @@
program DAMASK_FEM
#include <petsc/finclude/petscsys.h>
use PetscDM
use prec, only: &
pInt, &
pReal, &
tol_math_check
use DAMASK_interface, only: &
DAMASK_interface_init, &
loadCaseFile, &
getSolverJobName
use IO, only: &
IO_isBlank, &
IO_stringPos, &
IO_stringValue, &
IO_floatValue, &
IO_intValue, &
IO_error, &
IO_lc, &
IO_intOut, &
IO_warning
use math ! need to include the whole module for FFTW
use prec
use DAMASK_interface
use IO
use math
use CPFEM2
use FEsolving, only: &
restartWrite, &
restartInc
use numerics, only: &
worldrank, &
maxCutBack, &
stagItMax
use mesh, only: &
mesh_Nboundaries, &
mesh_boundaries, &
geomMesh
use FEM_Utilities, only: &
utilities_init, &
tSolutionState, &
tLoadCase, &
cutBack, &
maxFields, &
nActiveFields, &
FIELD_MECH_ID, &
COMPONENT_MECH_X_ID, &
COMPONENT_MECH_Y_ID, &
COMPONENT_MECH_Z_ID, &
FIELD_MECH_label
use FEsolving
use numerics
use mesh
use FEM_Utilities
use FEM_mech
implicit none
!--------------------------------------------------------------------------------------------------
! variables related to information from load case and geom file
integer(pInt), allocatable, dimension(:) :: chunkPos ! this is longer than needed for geometry parsing
integer(pInt) :: &
N_def = 0_pInt !< # of rate of deformation specifiers found in load case file
integer, allocatable, dimension(:) :: chunkPos ! this is longer than needed for geometry parsing
integer :: &
N_def = 0 !< # of rate of deformation specifiers found in load case file
character(len=65536) :: &
line
!--------------------------------------------------------------------------------------------------
! loop variables, convergence etc.
integer(pInt), parameter :: &
subStepFactor = 2_pInt !< for each substep, divide the last time increment by 2.0
integer, parameter :: &
subStepFactor = 2 !< for each substep, divide the last time increment by 2.0
real(pReal) :: &
time = 0.0_pReal, & !< elapsed time
time0 = 0.0_pReal, & !< begin of interval
@ -77,22 +43,22 @@ program DAMASK_FEM
logical :: &
guess, & !< guess along former trajectory
stagIterate
integer(pInt) :: &
integer :: &
i, &
errorID, &
cutBackLevel = 0_pInt, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$
stepFraction = 0_pInt !< fraction of current time interval
integer(pInt) :: &
currentLoadcase = 0_pInt, & !< current load case
currentFace = 0_pInt, &
cutBackLevel = 0, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$
stepFraction = 0 !< fraction of current time interval
integer :: &
currentLoadcase = 0, & !< current load case
currentFace = 0, &
inc, & !< current increment in current load case
totalIncsCounter = 0_pInt, & !< total # of increments
convergedCounter = 0_pInt, & !< # of converged increments
notConvergedCounter = 0_pInt, & !< # of non-converged increments
fileUnit = 0_pInt, & !< file unit for reading load case and writing results
totalIncsCounter = 0, & !< total # of increments
convergedCounter = 0, & !< # of converged increments
notConvergedCounter = 0, & !< # of non-converged increments
fileUnit = 0, & !< file unit for reading load case and writing results
myStat, &
statUnit = 0_pInt, & !< file unit for statistics output
lastRestartWritten = 0_pInt, & !< total increment No. at which last restart information was written
statUnit = 0, & !< file unit for statistics output
lastRestartWritten = 0, & !< total increment No. at which last restart information was written
stagIter, &
component
character(len=6) :: loadcase_string
@ -120,17 +86,17 @@ program DAMASK_FEM
!--------------------------------------------------------------------------------------------------
! reading basic information from load case file and allocate data structure containing load cases
open(newunit=fileunit,iostat=myStat,file=trim(loadCaseFile),action='read')
if (myStat /= 0_pInt) call IO_error(100_pInt,el=myStat,ext_msg=trim(loadCaseFile))
if (myStat /= 0) call IO_error(100,el=myStat,ext_msg=trim(loadCaseFile))
do
read(fileUnit, '(A)', iostat=myStat) line
if ( myStat /= 0_pInt) exit
if ( myStat /= 0) exit
if (IO_isBlank(line)) cycle ! skip empty lines
chunkPos = IO_stringPos(line)
do i = 1_pInt, chunkPos(1) ! reading compulsory parameters for loadcase
do i = 1, chunkPos(1) ! reading compulsory parameters for loadcase
select case (IO_lc(IO_stringValue(line,chunkPos,i)))
case('$loadcase')
N_def = N_def + 1_pInt
N_def = N_def + 1
end select
enddo ! count all identifiers to allocate memory and do sanity check
enddo
@ -172,35 +138,35 @@ program DAMASK_FEM
rewind(fileUnit)
do
read(fileUnit, '(A)', iostat=myStat) line
if ( myStat /= 0_pInt) exit
if ( myStat /= 0) exit
if (IO_isBlank(line)) cycle ! skip empty lines
chunkPos = IO_stringPos(line)
do i = 1_pInt, chunkPos(1)
do i = 1, chunkPos(1)
select case (IO_lc(IO_stringValue(line,chunkPos,i)))
!--------------------------------------------------------------------------------------------------
! loadcase information
case('$loadcase')
currentLoadCase = IO_intValue(line,chunkPos,i+1_pInt)
currentLoadCase = IO_intValue(line,chunkPos,i+1)
case('face')
currentFace = IO_intValue(line,chunkPos,i+1_pInt)
currentFaceSet = -1_pInt
currentFace = IO_intValue(line,chunkPos,i+1)
currentFaceSet = -1
do faceSet = 1, mesh_Nboundaries
if (mesh_boundaries(faceSet) == currentFace) currentFaceSet = faceSet
enddo
if (currentFaceSet < 0_pInt) call IO_error(error_ID = errorID, ext_msg = 'invalid BC')
if (currentFaceSet < 0) call IO_error(error_ID = errorID, ext_msg = 'invalid BC')
case('t','time','delta') ! increment time
loadCases(currentLoadCase)%time = IO_floatValue(line,chunkPos,i+1_pInt)
loadCases(currentLoadCase)%time = IO_floatValue(line,chunkPos,i+1)
case('n','incs','increments','steps') ! number of increments
loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1_pInt)
loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1)
case('logincs','logincrements','logsteps') ! number of increments (switch to log time scaling)
loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1_pInt)
loadCases(currentLoadCase)%logscale = 1_pInt
loadCases(currentLoadCase)%incs = IO_intValue(line,chunkPos,i+1)
loadCases(currentLoadCase)%logscale = 1
case('freq','frequency','outputfreq') ! frequency of result writings
loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1_pInt)
loadCases(currentLoadCase)%outputfrequency = IO_intValue(line,chunkPos,i+1)
case('r','restart','restartwrite') ! frequency of writing restart information
loadCases(currentLoadCase)%restartfrequency = &
max(0_pInt,IO_intValue(line,chunkPos,i+1_pInt))
max(0,IO_intValue(line,chunkPos,i+1))
case('guessreset','dropguessing')
loadCases(currentLoadCase)%followFormerTrajectory = .false. ! do not continue to predict deformation along former trajectory
@ -214,7 +180,7 @@ program DAMASK_FEM
loadCases(currentLoadCase)%fieldBC(field)%componentBC(component)%Mask (currentFaceSet) = &
.true.
loadCases(currentLoadCase)%fieldBC(field)%componentBC(component)%Value(currentFaceSet) = &
IO_floatValue(line,chunkPos,i+1_pInt)
IO_floatValue(line,chunkPos,i+1)
endif
enddo
endif
@ -227,7 +193,7 @@ program DAMASK_FEM
loadCases(currentLoadCase)%fieldBC(field)%componentBC(component)%Mask (currentFaceSet) = &
.true.
loadCases(currentLoadCase)%fieldBC(field)%componentBC(component)%Value(currentFaceSet) = &
IO_floatValue(line,chunkPos,i+1_pInt)
IO_floatValue(line,chunkPos,i+1)
endif
enddo
endif
@ -240,7 +206,7 @@ program DAMASK_FEM
loadCases(currentLoadCase)%fieldBC(field)%componentBC(component)%Mask (currentFaceSet) = &
.true.
loadCases(currentLoadCase)%fieldBC(field)%componentBC(component)%Value(currentFaceSet) = &
IO_floatValue(line,chunkPos,i+1_pInt)
IO_floatValue(line,chunkPos,i+1)
endif
enddo
endif
@ -252,20 +218,20 @@ program DAMASK_FEM
!--------------------------------------------------------------------------------------------------
! consistency checks and output of load case
loadCases(1)%followFormerTrajectory = .false. ! cannot guess along trajectory for first inc of first currentLoadCase
errorID = 0_pInt
checkLoadcases: do currentLoadCase = 1_pInt, size(loadCases)
errorID = 0
checkLoadcases: do currentLoadCase = 1, size(loadCases)
write (loadcase_string, '(i6)' ) currentLoadCase
write(6,'(1x,a,i6)') 'load case: ', currentLoadCase
if (.not. loadCases(currentLoadCase)%followFormerTrajectory) &
write(6,'(2x,a)') 'drop guessing along trajectory'
do field = 1_pInt, nActiveFields
do field = 1, nActiveFields
select case (loadCases(currentLoadCase)%fieldBC(field)%ID)
case(FIELD_MECH_ID)
write(6,'(2x,a)') 'Field '//trim(FIELD_MECH_label)
end select
do faceSet = 1_pInt, mesh_Nboundaries
do component = 1_pInt, loadCases(currentLoadCase)%fieldBC(field)%nComponents
do faceSet = 1, mesh_Nboundaries
do component = 1, loadCases(currentLoadCase)%fieldBC(field)%nComponents
if (loadCases(currentLoadCase)%fieldBC(field)%componentBC(component)%Mask(faceSet)) &
write(6,'(4x,a,i2,a,i2,a,f12.7)') 'Face ', mesh_boundaries(faceSet), &
' Component ', component, &
@ -275,19 +241,19 @@ program DAMASK_FEM
enddo
enddo
write(6,'(2x,a,f12.6)') 'time: ', loadCases(currentLoadCase)%time
if (loadCases(currentLoadCase)%incs < 1_pInt) errorID = 835_pInt ! non-positive incs count
if (loadCases(currentLoadCase)%incs < 1) errorID = 835 ! non-positive incs count
write(6,'(2x,a,i5)') 'increments: ', loadCases(currentLoadCase)%incs
if (loadCases(currentLoadCase)%outputfrequency < 1_pInt) errorID = 836_pInt ! non-positive result frequency
if (loadCases(currentLoadCase)%outputfrequency < 1) errorID = 836 ! non-positive result frequency
write(6,'(2x,a,i5)') 'output frequency: ', &
loadCases(currentLoadCase)%outputfrequency
write(6,'(2x,a,i5,/)') 'restart frequency: ', &
loadCases(currentLoadCase)%restartfrequency
if (errorID > 0_pInt) call IO_error(error_ID = errorID, ext_msg = loadcase_string) ! exit with error message
if (errorID > 0) call IO_error(error_ID = errorID, ext_msg = loadcase_string) ! exit with error message
enddo checkLoadcases
!--------------------------------------------------------------------------------------------------
! doing initialization depending on active solvers
call Utilities_init()
call Utilities_init
do field = 1, nActiveFields
select case (loadCases(1)%fieldBC(field)%ID)
case(FIELD_MECH_ID)
@ -296,30 +262,30 @@ program DAMASK_FEM
enddo
loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases)
loadCaseLooping: do currentLoadCase = 1, size(loadCases)
time0 = time ! load case start time
guess = loadCases(currentLoadCase)%followFormerTrajectory ! change of load case? homogeneous guess for the first inc
incLooping: do inc = 1_pInt, loadCases(currentLoadCase)%incs
totalIncsCounter = totalIncsCounter + 1_pInt
incLooping: do inc = 1, loadCases(currentLoadCase)%incs
totalIncsCounter = totalIncsCounter + 1
!--------------------------------------------------------------------------------------------------
! forwarding time
timeIncOld = timeinc ! last timeinc that brought former inc to an end
if (loadCases(currentLoadCase)%logscale == 0_pInt) then ! linear scale
if (loadCases(currentLoadCase)%logscale == 0) then ! linear scale
timeinc = loadCases(currentLoadCase)%time/real(loadCases(currentLoadCase)%incs,pReal)
else
if (currentLoadCase == 1_pInt) then ! 1st load case of logarithmic scale
if (inc == 1_pInt) then ! 1st inc of 1st load case of logarithmic scale
timeinc = loadCases(1)%time*(2.0_pReal**real( 1_pInt-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd
if (currentLoadCase == 1) then ! 1st load case of logarithmic scale
if (inc == 1) then ! 1st inc of 1st load case of logarithmic scale
timeinc = loadCases(1)%time*(2.0_pReal**real( 1-loadCases(1)%incs ,pReal)) ! assume 1st inc is equal to 2nd
else ! not-1st inc of 1st load case of logarithmic scale
timeinc = loadCases(1)%time*(2.0_pReal**real(inc-1_pInt-loadCases(1)%incs ,pReal))
timeinc = loadCases(1)%time*(2.0_pReal**real(inc-1-loadCases(1)%incs ,pReal))
endif
else ! not-1st load case of logarithmic scale
timeinc = time0 * &
( (1.0_pReal + loadCases(currentLoadCase)%time/time0 )**(real( inc,pReal)/&
real(loadCases(currentLoadCase)%incs ,pReal))&
-(1.0_pReal + loadCases(currentLoadCase)%time/time0 )**(real( inc-1_pInt ,pReal)/&
-(1.0_pReal + loadCases(currentLoadCase)%time/time0 )**(real( inc-1 ,pReal)/&
real(loadCases(currentLoadCase)%incs ,pReal)))
endif
endif
@ -327,14 +293,14 @@ program DAMASK_FEM
skipping: if (totalIncsCounter <= restartInc) then ! not yet at restart inc?
time = time + timeinc ! just advance time, skip already performed calculation
guess = .true. ! QUESTION:why forced guessing instead of inheriting loadcase preference
guess = .true.
else skipping
stepFraction = 0_pInt ! fraction scaled by stepFactor**cutLevel
stepFraction = 0 ! fraction scaled by stepFactor**cutLevel
subStepLooping: do while (stepFraction < subStepFactor**cutBackLevel)
remainingLoadCaseTime = loadCases(currentLoadCase)%time+time0 - time
time = time + timeinc ! forward target time
stepFraction = stepFraction + 1_pInt ! count step
stepFraction = stepFraction + 1 ! count step
!--------------------------------------------------------------------------------------------------
! report begin of new step
@ -369,7 +335,7 @@ program DAMASK_FEM
!--------------------------------------------------------------------------------------------------
! solve fields
stagIter = 0_pInt
stagIter = 0
stagIterate = .true.
do while (stagIterate)
do field = 1, nActiveFields
@ -383,7 +349,7 @@ program DAMASK_FEM
if(.not. solres(field)%converged) exit ! no solution found
enddo
stagIter = stagIter + 1_pInt
stagIter = stagIter + 1
stagIterate = stagIter < stagItMax &
.and. all(solres(:)%converged) &
.and. .not. all(solres(:)%stagConverged) ! stationary with respect to staggered iteration
@ -395,13 +361,13 @@ program DAMASK_FEM
if (cutBackLevel < maxCutBack) then ! do cut back
write(6,'(/,a)') ' cut back detected'
cutBack = .True.
stepFraction = (stepFraction - 1_pInt) * subStepFactor ! adjust to new denominator
cutBackLevel = cutBackLevel + 1_pInt
stepFraction = (stepFraction - 1) * subStepFactor ! adjust to new denominator
cutBackLevel = cutBackLevel + 1
time = time - timeinc ! rewind time
timeinc = timeinc/2.0_pReal
else ! default behavior, exit if spectral solver does not converge
call IO_warning(850_pInt)
call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written
call IO_warning(850)
call quit(-1*(lastRestartWritten+1)) ! quit and provide information about last restart inc written
endif
else
guess = .true. ! start guessing after first converged (sub)inc
@ -413,26 +379,26 @@ program DAMASK_FEM
endif
enddo subStepLooping
cutBackLevel = max(0_pInt, cutBackLevel - 1_pInt) ! try half number of subincs next inc
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc
if (all(solres(:)%converged)) then
convergedCounter = convergedCounter + 1_pInt
convergedCounter = convergedCounter + 1
write(6,'(/,a,'//IO_intOut(totalIncsCounter)//',a)') & ! report converged inc
' increment ', totalIncsCounter, ' converged'
else
notConvergedCounter = notConvergedCounter + 1_pInt
notConvergedCounter = notConvergedCounter + 1
write(6,'(/,a,'//IO_intOut(totalIncsCounter)//',a)') & ! report non-converged inc
' increment ', totalIncsCounter, ' NOT converged'
endif; flush(6)
if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0_pInt) then ! at output frequency
if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0) then ! at output frequency
write(6,'(1/,a)') ' ... writing results to file ......................................'
call CPFEM_results(totalIncsCounter,time)
endif
if ( loadCases(currentLoadCase)%restartFrequency > 0_pInt & ! writing of restart info requested ...
.and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! ... and at frequency of writing restart information
if ( loadCases(currentLoadCase)%restartFrequency > 0 & ! writing of restart info requested ...
.and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0) then ! ... and at frequency of writing restart information
restartWrite = .true. ! set restart parameter for FEsolving
lastRestartWritten = inc ! QUESTION: first call to CPFEM_general will write?
lastRestartWritten = inc ! first call to CPFEM_general will write
endif
endif skipping
@ -453,7 +419,7 @@ program DAMASK_FEM
flush(6)
close(statUnit)
if (notConvergedCounter > 0_pInt) call quit(2_pInt) ! error if some are not converged
call quit(0_pInt) ! no complains ;)
if (notConvergedCounter > 0) call quit(2) ! error if some are not converged
call quit(0) ! no complains ;)
end program DAMASK_FEM

View File

@ -13,15 +13,17 @@ module FEM_mech
use PETScDM
use PETScDMplex
use PETScDT
use prec, only: &
pReal
use FEM_utilities, only: &
tSolutionState, &
tFieldBC, &
tComponentBC
use mesh, only: &
mesh_Nboundaries, &
mesh_boundaries
use prec
use FEM_utilities
use mesh
use IO
use DAMASK_interface
use numerics
use FEM_Zoo
use FEsolving
use homogenization
use math
implicit none
private
@ -34,49 +36,36 @@ module FEM_mech
real(pReal) :: timeincOld
end type tSolutionParams
type(tSolutionParams), private :: params
type(tSolutionParams) :: params
!--------------------------------------------------------------------------------------------------
! PETSc data
SNES, private :: mech_snes
Vec, private :: solution, solution_rate, solution_local
PetscInt, private :: dimPlex, cellDof, nQuadrature, nBasis
PetscReal, allocatable, target, private :: qPoints(:), qWeights(:)
MatNullSpace, private :: matnull
SNES :: mech_snes
Vec :: solution, solution_rate, solution_local
PetscInt :: dimPlex, cellDof, nQuadrature, nBasis
PetscReal, allocatable, target :: qPoints(:), qWeights(:)
MatNullSpace :: matnull
!--------------------------------------------------------------------------------------------------
! stress, stiffness and compliance average etc.
character(len=1024), private :: incInfo
real(pReal), private, dimension(3,3) :: &
character(len=1024) :: incInfo
real(pReal), dimension(3,3) :: &
P_av = 0.0_pReal
logical, private :: ForwardData
real(pReal), parameter, private :: eps = 1.0e-18_pReal
logical :: ForwardData
real(pReal), parameter :: eps = 1.0e-18_pReal
public :: &
FEM_mech_init, &
FEM_mech_solution, &
FEM_mech_forward
contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all neccessary fields and fills them with data, potentially from restart info
!--------------------------------------------------------------------------------------------------
subroutine FEM_mech_init(fieldBC)
use IO, only: &
IO_error
use DAMASK_interface, only: &
getSolverJobName
use mesh, only: &
geomMesh
use numerics, only: &
itmax, &
integrationOrder
use FEM_Zoo, only: &
FEM_Zoo_nQuadrature, &
FEM_Zoo_QuadraturePoints, &
FEM_Zoo_QuadratureWeights
implicit none
type(tFieldBC), intent(in) :: fieldBC
DM :: mech_mesh
PetscFE :: mechFE
@ -99,7 +88,7 @@ subroutine FEM_mech_init(fieldBC)
PetscReal, allocatable, target :: cellJMat(:,:)
PetscReal, pointer :: pV0(:), pCellJ(:), pInvcellJ(:)
PetscInt :: cellStart, cellEnd, cell, basis
character(len=7) :: prefix = 'mechFE_'
character(len=7), parameter :: prefix = 'mechFE_'
PetscErrorCode :: ierr
write(6,'(/,a)') ' <<<+- FEM_mech init -+>>>'
@ -246,12 +235,7 @@ end subroutine FEM_mech_init
!--------------------------------------------------------------------------------------------------
type(tSolutionState) function FEM_mech_solution( &
incInfoIn,timeinc,timeinc_old,fieldBC)
use numerics, only: &
itmax
use FEsolving, only: &
terminallyIll
implicit none
!--------------------------------------------------------------------------------------------------
! input data for solution
real(pReal), intent(in) :: &
@ -263,7 +247,6 @@ type(tSolutionState) function FEM_mech_solution( &
incInfoIn
!--------------------------------------------------------------------------------------------------
!
PetscErrorCode :: ierr
SNESConvergedReason :: reason
@ -298,21 +281,7 @@ end function FEM_mech_solution
!> @brief forms the FEM residual vector
!--------------------------------------------------------------------------------------------------
subroutine FEM_mech_formResidual(dm_local,xx_local,f_local,dummy,ierr)
use numerics, only: &
BBarStabilisation
use FEM_utilities, only: &
utilities_projectBCValues, &
utilities_constitutiveResponse
use homogenization, only: &
materialpoint_F, &
materialpoint_P
use math, only: &
math_det33, &
math_inv33
use FEsolving, only: &
terminallyIll
implicit none
DM :: dm_local
PetscDS :: prob
Vec :: x_local, f_local, xx_local
@ -433,19 +402,6 @@ end subroutine FEM_mech_formResidual
!> @brief forms the FEM stiffness matrix
!--------------------------------------------------------------------------------------------------
subroutine FEM_mech_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
use numerics, only: &
BBarStabilisation
use homogenization, only: &
materialpoint_dPdF, &
materialpoint_F
use math, only: &
math_inv33, &
math_identity2nd, &
math_det33
use FEM_utilities, only: &
utilities_projectBCValues
implicit none
DM :: dm_local
PetscDS :: prob
@ -577,15 +533,7 @@ end subroutine FEM_mech_formJacobian
!> @brief forwarding routine
!--------------------------------------------------------------------------------------------------
subroutine FEM_mech_forward(guess,timeinc,timeinc_old,fieldBC)
use FEM_utilities, only: &
cutBack
use homogenization, only: &
materialpoint_F0, &
materialpoint_F
use FEM_utilities, only: &
utilities_projectBCValues
implicit none
type(tFieldBC), intent(in) :: &
fieldBC
real(pReal), intent(in) :: &
@ -644,15 +592,7 @@ end subroutine FEM_mech_forward
!> @brief reporting
!--------------------------------------------------------------------------------------------------
subroutine FEM_mech_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr)
use numerics, only: &
err_struct_tolAbs, &
err_struct_tolRel
use IO, only: &
IO_intOut
use FEsolving, only: &
terminallyIll
implicit none
SNES :: snes_local
PetscInt :: PETScIter
PetscReal :: xnorm,snorm,fnorm,divTol

View File

@ -6,19 +6,26 @@ module FEM_utilities
#include <petsc/finclude/petscdmplex.h>
#include <petsc/finclude/petscdmda.h>
#include <petsc/finclude/petscis.h>
use prec, only: pReal, pInt
use PETScdmplex
use PETScdmda
use PETScis
use prec
use FEsolving
use homogenization
use numerics
use debug
use math
use mesh
implicit none
private
!--------------------------------------------------------------------------------------------------
!
logical, public :: cutBack = .false. !< cut back of BVP solver in case convergence is not achieved or a material point is terminally ill
integer(pInt), public, parameter :: maxFields = 6_pInt
integer(pInt), public :: nActiveFields = 0_pInt
integer, public, parameter :: maxFields = 6
integer, public :: nActiveFields = 0
!--------------------------------------------------------------------------------------------------
! grid related information information
@ -43,7 +50,7 @@ use PETScis
!--------------------------------------------------------------------------------------------------
! variables controlling debugging
logical, private :: &
logical :: &
debugPETSc !< use some in debug defined options for more verbose PETSc solution
!--------------------------------------------------------------------------------------------------
@ -51,7 +58,7 @@ use PETScis
type, public :: tSolutionState !< return type of solution from FEM solver variants
logical :: converged = .true.
logical :: stagConverged = .true.
integer(pInt) :: iterationsNeeded = 0_pInt
integer :: iterationsNeeded = 0
end type tSolutionState
type, public :: tComponentBC
@ -62,18 +69,18 @@ use PETScis
type, public :: tFieldBC
integer(kind(FIELD_UNDEFINED_ID)) :: ID
integer(pInt) :: nComponents = 0_pInt
integer :: nComponents = 0
type(tComponentBC), allocatable :: componentBC(:)
end type tFieldBC
type, public :: tLoadCase
real(pReal) :: time = 0.0_pReal !< length of increment
integer(pInt) :: incs = 0_pInt, & !< number of increments
outputfrequency = 1_pInt, & !< frequency of result writes
restartfrequency = 0_pInt, & !< frequency of restart writes
logscale = 0_pInt !< linear/logarithmic time inc flag
integer :: incs = 0, & !< number of increments
outputfrequency = 1, & !< frequency of result writes
restartfrequency = 0, & !< frequency of restart writes
logscale = 0 !< linear/logarithmic time inc flag
logical :: followFormerTrajectory = .true. !< follow trajectory of former loadcase
integer(pInt), allocatable :: faceID(:)
integer, allocatable :: faceID(:)
type(tFieldBC), allocatable :: fieldBC(:)
end type tLoadCase
@ -92,19 +99,6 @@ contains
!> @brief allocates all neccessary fields, sets debug flags
!--------------------------------------------------------------------------------------------------
subroutine utilities_init
use numerics, only: &
structOrder, &
petsc_defaultOptions, &
petsc_options
use debug, only: &
debug_level, &
debug_SPECTRAL, &
debug_SPECTRALPETSC,&
PETSCDEBUG
use math ! must use the whole module for use of FFTW
use mesh, only: &
mesh_NcpElemsGlobal, &
mesh_maxNips
character(len=1024) :: petsc_optionsPhysics
PetscErrorCode :: ierr
@ -139,11 +133,6 @@ end subroutine utilities_init
!> @brief calculates constitutive response
!--------------------------------------------------------------------------------------------------
subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
use FEsolving, only: &
restartWrite
use homogenization, only: &
materialpoint_P, &
materialpoint_stressAndItsTangent
real(pReal), intent(in) :: timeinc !< loading time
logical, intent(in) :: forwardData !< age results

View File

@ -3,11 +3,12 @@
!> @brief Interpolation data used by the FEM solver
!--------------------------------------------------------------------------------------------------
module FEM_Zoo
use prec, only: pReal, pInt, group_float
use prec
implicit none
private
integer(pInt), parameter, public:: &
integer, parameter, public:: &
maxOrder = 5 !< current max interpolation set at cubic (intended to be arbitrary)
real(pReal), dimension(2,3), private, parameter :: &
triangle = reshape([-1.0_pReal, -1.0_pReal, &
@ -18,7 +19,7 @@ module FEM_Zoo
1.0_pReal, -1.0_pReal, -1.0_pReal, &
-1.0_pReal, 1.0_pReal, -1.0_pReal, &
-1.0_pReal, -1.0_pReal, 1.0_pReal], shape=[3,4])
integer(pInt), dimension(3,maxOrder), public, protected :: &
integer, dimension(3,maxOrder), public, protected :: &
FEM_Zoo_nQuadrature !< number of quadrature points for a given spatial dimension(1-3) and interpolation order(1-maxOrder)
type(group_float), dimension(3,maxOrder), public, protected :: &
FEM_Zoo_QuadratureWeights, & !< quadrature weights for each quadrature rule
@ -34,7 +35,6 @@ contains
!> @brief initializes FEM interpolation data
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_init
implicit none
write(6,'(/,a)') ' <<<+- FEM_Zoo init -+>>>'
@ -183,87 +183,110 @@ subroutine FEM_Zoo_init
end subroutine FEM_Zoo_init
!--------------------------------------------------------------------------------------------------
!> @brief star 3 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar3(point,qPt)
implicit none
real(pReal) :: point(1), qPt(2,1), temp(3,1)
real(pReal), dimension(1), intent(in) :: point
real(pReal), dimension(2,1), intent(out) :: qPt
real(pReal), dimension(3,1) :: temp
temp(:,1) = [point(1), point(1), point(1)]
qPt = matmul(triangle, temp)
end subroutine FEM_Zoo_permutationStar3
!--------------------------------------------------------------------------------------------------
!> @brief star 21 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar21(point,qPt)
implicit none
real(pReal) :: point(1), qPt(2,3), temp(3,3)
real(pReal), dimension(1), intent(in) :: point
real(pReal), dimension(2,3), intent(out) :: qPt
real(pReal), dimension(3,3) :: temp
temp(:,1) = [point(1), point(1), 1.0_pReal - 2.0_pReal*point(1)]
temp(:,2) = [point(1), 1.0_pReal - 2.0_pReal*point(1), point(1)]
temp(:,3) = [1.0_pReal - 2.0_pReal*point(1), point(1), point(1)]
qPt = matmul(triangle, temp)
end subroutine FEM_Zoo_permutationStar21
!--------------------------------------------------------------------------------------------------
!> @brief star 111 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar111(point,qPt)
implicit none
real(pReal) :: point(2), qPt(2,6), temp(3,6)
real(pReal), dimension(2), intent(in) :: point
real(pReal), dimension(2,6), intent(out) :: qPt
real(pReal), dimension(3,6) :: temp
temp(:,1) = [point(1), point(2), 1.0_pReal - point(1) - point(2)]
temp(:,2) = [point(1), 1.0_pReal - point(1) - point(2), point(2)]
temp(:,4) = [point(2), 1.0_pReal - point(1) - point(2), point(1)]
temp(:,5) = [1.0_pReal - point(1) - point(2), point(2), point(1)]
temp(:,6) = [1.0_pReal - point(1) - point(2), point(1), point(2)]
qPt = matmul(triangle, temp)
end subroutine FEM_Zoo_permutationStar111
!--------------------------------------------------------------------------------------------------
!> @brief star 4 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar4(point,qPt)
implicit none
real(pReal) :: point(1), qPt(3,1), temp(4,1)
real(pReal), dimension(1), intent(in) :: point
real(pReal), dimension(3,1), intent(out) :: qPt
real(pReal), dimension(4,1) :: temp
temp(:,1) = [point(1), point(1), point(1), point(1)]
qPt = matmul(tetrahedron, temp)
end subroutine FEM_Zoo_permutationStar4
!--------------------------------------------------------------------------------------------------
!> @brief star 31 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar31(point,qPt)
implicit none
real(pReal) :: point(1), qPt(3,4), temp(4,4)
real(pReal), dimension(1), intent(in) :: point
real(pReal), dimension(3,4), intent(out) :: qPt
real(pReal), dimension(4,4) :: temp
temp(:,1) = [point(1), point(1), point(1), 1.0_pReal - 3.0_pReal*point(1)]
temp(:,2) = [point(1), point(1), 1.0_pReal - 3.0_pReal*point(1), point(1)]
temp(:,3) = [point(1), 1.0_pReal - 3.0_pReal*point(1), point(1), point(1)]
temp(:,4) = [1.0_pReal - 3.0_pReal*point(1), point(1), point(1), point(1)]
qPt = matmul(tetrahedron, temp)
end subroutine FEM_Zoo_permutationStar31
!--------------------------------------------------------------------------------------------------
!> @brief star 22 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar22(point,qPt)
implicit none
real(pReal) :: point(1), qPt(3,6), temp(4,6)
real(pReal), dimension(1), intent(in) :: point
real(pReal), dimension(3,6), intent(out) :: qPt
real(pReal), dimension(4,6) :: temp
temp(:,1) = [point(1), point(1), 0.5_pReal - point(1), 0.5_pReal - point(1)]
temp(:,2) = [point(1), 0.5_pReal - point(1), point(1), 0.5_pReal - point(1)]
@ -271,17 +294,21 @@ subroutine FEM_Zoo_permutationStar22(point,qPt)
temp(:,4) = [0.5_pReal - point(1), point(1), 0.5_pReal - point(1), point(1)]
temp(:,5) = [0.5_pReal - point(1), 0.5_pReal - point(1), point(1), point(1)]
temp(:,6) = [point(1), 0.5_pReal - point(1), 0.5_pReal - point(1), point(1)]
qPt = matmul(tetrahedron, temp)
end subroutine FEM_Zoo_permutationStar22
!--------------------------------------------------------------------------------------------------
!> @brief star 211 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar211(point,qPt)
implicit none
real(pReal) :: point(2), qPt(3,12), temp(4,12)
real(pReal), dimension(2), intent(in) :: point
real(pReal), dimension(3,12), intent(out) :: qPt
real(pReal), dimension(4,12) :: temp
temp(:,1 ) = [point(1), point(1), point(2), 1.0_pReal - 2.0_pReal*point(1) - point(2)]
temp(:,2 ) = [point(1), point(1), 1.0_pReal - 2.0_pReal*point(1) - point(2), point(2)]
@ -295,17 +322,21 @@ subroutine FEM_Zoo_permutationStar211(point,qPt)
temp(:,10) = [1.0_pReal - 2.0_pReal*point(1) - point(2), point(1), point(1), point(2)]
temp(:,11) = [1.0_pReal - 2.0_pReal*point(1) - point(2), point(1), point(2), point(1)]
temp(:,12) = [1.0_pReal - 2.0_pReal*point(1) - point(2), point(2), point(1), point(1)]
qPt = matmul(tetrahedron, temp)
end subroutine FEM_Zoo_permutationStar211
!--------------------------------------------------------------------------------------------------
!> @brief star 1111 permutation of input
!--------------------------------------------------------------------------------------------------
subroutine FEM_Zoo_permutationStar1111(point,qPt)
implicit none
real(pReal) :: point(3), qPt(3,24), temp(4,24)
real(pReal), dimension(3), intent(in) :: point
real(pReal), dimension(3,24), intent(out) :: qPt
real(pReal), dimension(4,24) :: temp
temp(:,1 ) = [point(1), point(2), point(3), 1.0_pReal - point(1) - point(2)- point(3)]
temp(:,2 ) = [point(1), point(2), 1.0_pReal - point(1) - point(2)- point(3), point(3)]
@ -331,9 +362,9 @@ subroutine FEM_Zoo_permutationStar1111(point,qPt)
temp(:,22) = [1.0_pReal - point(1) - point(2)- point(3), point(2), point(3), point(1)]
temp(:,23) = [1.0_pReal - point(1) - point(2)- point(3), point(3), point(1), point(2)]
temp(:,24) = [1.0_pReal - point(1) - point(2)- point(3), point(3), point(2), point(1)]
qPt = matmul(tetrahedron, temp)
end subroutine FEM_Zoo_permutationStar1111
end module FEM_Zoo

View File

@ -75,7 +75,6 @@ contains
subroutine tMesh_FEM_init(self,dimen,order,nodes)
implicit none
integer, intent(in) :: dimen
integer, intent(in) :: order
real(pReal), intent(in), dimension(:,:) :: nodes
@ -238,7 +237,6 @@ end subroutine mesh_init
!--------------------------------------------------------------------------------------------------
pure function mesh_cellCenterCoordinates(ip,el)
implicit none
integer, intent(in) :: el, & !< element number
ip !< integration point number
real(pReal), dimension(3) :: mesh_cellCenterCoordinates !< x,y,z coordinates of the cell center of the requested IP cell

View File

@ -107,7 +107,7 @@ end subroutine results_closeJobFile
!--------------------------------------------------------------------------------------------------
subroutine results_addIncrement(inc,time)
integer(pInt), intent(in) :: inc
integer, intent(in) :: inc
real(pReal), intent(in) :: time
character(len=pStringLen) :: incChar

View File

@ -6,9 +6,15 @@
!--------------------------------------------------------------------------------------------------
module source_damage_anisoDuctile
use prec
use debug
use IO
use math
use material
use config
implicit none
private
integer, dimension(:), allocatable, public, protected :: &
source_damage_anisoDuctile_offset, & !< which source is my current damage mechanism?
source_damage_anisoDuctile_instance !< instance of damage source mechanism
@ -57,26 +63,6 @@ contains
!> @details reads in material parameters, allocates arrays, and does sanity checks
!--------------------------------------------------------------------------------------------------
subroutine source_damage_anisoDuctile_init
use debug, only: &
debug_level,&
debug_constitutive,&
debug_levelBasic
use IO, only: &
IO_error
use math, only: &
math_expand
use material, only: &
material_allocateSourceState, &
phase_source, &
phase_Nsources, &
phase_Noutput, &
SOURCE_damage_anisoDuctile_label, &
SOURCE_damage_anisoDuctile_ID, &
material_phase, &
sourceState
use config, only: &
config_phase
integer :: Ninstance,phase,instance,source,sourceOffset
integer :: NofMyPhase,p ,i
@ -181,13 +167,6 @@ end subroutine source_damage_anisoDuctile_init
!> @brief calculates derived quantities from state
!--------------------------------------------------------------------------------------------------
subroutine source_damage_anisoDuctile_dotState(ipc, ip, el)
use material, only: &
phaseAt, phasememberAt, &
plasticState, &
sourceState, &
material_homogenizationAt, &
damage, &
damageMapping
integer, intent(in) :: &
ipc, & !< component-ID of integration point
@ -222,8 +201,6 @@ end subroutine source_damage_anisoDuctile_dotState
!> @brief returns local part of nonlocal damage driving force
!--------------------------------------------------------------------------------------------------
subroutine source_damage_anisoDuctile_getRateAndItsTangent(localphiDot, dLocalphiDot_dPhi, phi, phase, constituent)
use material, only: &
sourceState
integer, intent(in) :: &
phase, &
@ -249,8 +226,6 @@ end subroutine source_damage_anisoDuctile_getRateAndItsTangent
!> @brief return array of local damage results
!--------------------------------------------------------------------------------------------------
function source_damage_anisoDuctile_postResults(phase, constituent)
use material, only: &
sourceState
integer, intent(in) :: &
phase, &