Merge branch 'hdf5-error-stop' into development

This commit is contained in:
Sharan Roongta 2020-11-13 11:47:58 +01:00
commit 1f59f63011
15 changed files with 293 additions and 592 deletions

View File

@ -12,11 +12,10 @@ module HDF5_utilities
use prec
use parallelization
use IO
use rotations
implicit none
public
implicit none
public
!--------------------------------------------------------------------------------------------------
!> @brief reads integer or float data of defined shape from file ! ToDo: order of arguments wrong
@ -93,15 +92,15 @@ subroutine HDF5_utilities_init
!--------------------------------------------------------------------------------------------------
!initialize HDF5 library and check if integer and float type size match
call h5open_f(hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5open_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(H5T_NATIVE_INTEGER,typeSize, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (int)')
if(hdferr < 0) error stop 'HDF5 error'
if (int(bit_size(0),SIZE_T)/=typeSize*8) &
error stop 'Default integer size does not match H5T_NATIVE_INTEGER'
call h5tget_size_f(H5T_NATIVE_DOUBLE,typeSize, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_Utilities_init: h5tget_size_f (double)')
if(hdferr < 0) error stop 'HDF5 error'
if (int(storage_size(0.0_pReal),SIZE_T)/=typeSize*8) &
error stop 'pReal does not match H5T_NATIVE_DOUBLE'
@ -128,30 +127,30 @@ integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
endif
call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5pcreate_f')
if(hdferr < 0) error stop 'HDF5 error'
#ifdef PETSc
if (present(parallel)) then; if (parallel) then
call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5pset_fapl_mpio_f')
if(hdferr < 0) error stop 'HDF5 error'
endif; endif
#endif
if (m == 'w') then
call h5fcreate_f(fileName,H5F_ACC_TRUNC_F,HDF5_openFile,hdferr,access_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5fcreate_f (w)')
if(hdferr < 0) error stop 'HDF5 error'
elseif(m == 'a') then
call h5fopen_f(fileName,H5F_ACC_RDWR_F,HDF5_openFile,hdferr,access_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5fopen_f (a)')
if(hdferr < 0) error stop 'HDF5 error'
elseif(m == 'r') then
call h5fopen_f(fileName,H5F_ACC_RDONLY_F,HDF5_openFile,hdferr,access_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5fopen_f (r)')
if(hdferr < 0) error stop 'HDF5 error'
else
call IO_error(1,ext_msg='HDF5_openFile: h5fopen_f unknown access mode: '//trim(m))
error stop 'unknown access mode'
endif
call h5pclose_f(plist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_openFile: h5pclose_f')
if(hdferr < 0) error stop 'HDF5 error'
end function HDF5_openFile
@ -166,7 +165,7 @@ subroutine HDF5_closeFile(fileHandle)
integer :: hdferr
call h5fclose_f(fileHandle,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_closeFile: h5fclose_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_closeFile
@ -185,19 +184,19 @@ integer(HID_T) function HDF5_addGroup(fileHandle,groupName)
!-------------------------------------------------------------------------------------------------
! creating a property list for data access properties
call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_addGroup: h5pcreate_f ('//trim(groupName)//')')
if(hdferr < 0) error stop 'HDF5 error'
!-------------------------------------------------------------------------------------------------
! setting I/O mode to collective
#ifdef PETSc
call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_addGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
if(hdferr < 0) error stop 'HDF5 error'
#endif
!-------------------------------------------------------------------------------------------------
! Create group
call h5gcreate_f(fileHandle, trim(groupName), HDF5_addGroup, hdferr, OBJECT_NAMELEN_DEFAULT_F,gapl_id = aplist_id)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_addGroup: h5gcreate_f ('//trim(groupName)//')')
if(hdferr < 0) error stop 'HDF5 error'
call h5pclose_f(aplist_id,hdferr)
@ -221,19 +220,19 @@ integer(HID_T) function HDF5_openGroup(fileHandle,groupName)
!-------------------------------------------------------------------------------------------------
! creating a property list for data access properties
call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_openGroup: h5pcreate_f ('//trim(groupName)//')')
if(hdferr < 0) error stop 'HDF5 error'
!-------------------------------------------------------------------------------------------------
! setting I/O mode to collective
#ifdef PETSc
call h5pget_all_coll_metadata_ops_f(aplist_id, is_collective, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_openGroup: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')')
if(hdferr < 0) error stop 'HDF5 error'
#endif
!-------------------------------------------------------------------------------------------------
! opening the group
call h5gopen_f(fileHandle, trim(groupName), HDF5_openGroup, hdferr, gapl_id = aplist_id)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_openGroup: h5gopen_f ('//trim(groupName)//')')
if(hdferr < 0) error stop 'HDF5 error'
call h5pclose_f(aplist_id,hdferr)
@ -250,7 +249,7 @@ subroutine HDF5_closeGroup(group_id)
integer :: hdferr
call h5gclose_f(group_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_closeGroup: h5gclose_f (el is ID)', el = int(group_id))
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_closeGroup
@ -273,11 +272,11 @@ logical function HDF5_objectExists(loc_id,path)
endif
call h5lexists_f(loc_id, p, HDF5_objectExists, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
if(HDF5_objectExists) then
call h5oexists_by_name_f(loc_id, p, HDF5_objectExists, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_objectExists: h5oexists_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
end function HDF5_objectExists
@ -309,25 +308,25 @@ subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path)
ptr(1) = c_loc(attrValue_)
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5screate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5tcopy_f(H5T_STRING, type_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tcopy_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aexists_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5adelete_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5acreate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5awrite_f(attr_id, type_id, c_loc(ptr(1)), hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5awrite_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5aclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(type_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5tclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_str: h5sclose_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_addAttribute_str
@ -354,21 +353,21 @@ subroutine HDF5_addAttribute_int(loc_id,attrLabel,attrValue,path)
endif
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5screate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5aexists_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5adelete_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_INTEGER,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5acreate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attrValue, int([1],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5awrite_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5tclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int: h5sclose_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_addAttribute_int
@ -395,21 +394,21 @@ subroutine HDF5_addAttribute_real(loc_id,attrLabel,attrValue,path)
endif
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5screate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5aexists_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5adelete_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_DOUBLE,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5acreate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5awrite_f(attr_id, H5T_NATIVE_DOUBLE, attrValue, int([1],HSIZE_T), hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5awrite_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5tclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_real: h5sclose_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_addAttribute_real
@ -439,21 +438,21 @@ subroutine HDF5_addAttribute_int_array(loc_id,attrLabel,attrValue,path)
array_size = size(attrValue,kind=HSIZE_T)
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_simple_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_INTEGER,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attrValue, array_size, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_addAttribute_int_array
@ -483,21 +482,21 @@ subroutine HDF5_addAttribute_real_array(loc_id,attrLabel,attrValue,path)
array_size = size(attrValue,kind=HSIZE_T)
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5screate_simple_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
if (attrExists) then
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_DOUBLE,space_id,attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5awrite_f(attr_id, H5T_NATIVE_DOUBLE, attrValue, array_size, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5aclose_f(attr_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(space_id,hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_addAttribute_real_array
@ -513,13 +512,13 @@ subroutine HDF5_setLink(loc_id,target_name,link_name)
logical :: linkExists
call h5lexists_f(loc_id, link_name,linkExists, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_setLink: h5lexists_soft_f ('//trim(link_name)//')')
if(hdferr < 0) error stop 'HDF5 error'
if (linkExists) then
call h5ldelete_f(loc_id,link_name, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_setLink: h5ldelete_soft_f ('//trim(link_name)//')')
if(hdferr < 0) error stop 'HDF5 error'
endif
call h5lcreate_soft_f(target_name, loc_id, link_name, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg = 'HDF5_setLink: h5lcreate_soft_f ('//trim(target_name)//' '//trim(link_name)//')')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine HDF5_setLink
@ -558,7 +557,7 @@ subroutine HDF5_read_real1(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real1: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -598,7 +597,7 @@ subroutine HDF5_read_real2(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real2: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -638,7 +637,7 @@ subroutine HDF5_read_real3(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real3: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -678,7 +677,7 @@ subroutine HDF5_read_real4(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real4: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -718,7 +717,7 @@ subroutine HDF5_read_real5(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real5: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -758,7 +757,7 @@ subroutine HDF5_read_real6(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real6: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -798,7 +797,7 @@ subroutine HDF5_read_real7(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_real7: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -840,7 +839,7 @@ subroutine HDF5_read_int1(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int1: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -880,7 +879,7 @@ subroutine HDF5_read_int2(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int2: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -920,7 +919,7 @@ subroutine HDF5_read_int3(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int3: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -960,7 +959,7 @@ subroutine HDF5_read_int4(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int4: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -1000,7 +999,7 @@ subroutine HDF5_read_int5(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int5: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -1040,7 +1039,7 @@ subroutine HDF5_read_int6(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int6: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -1080,7 +1079,7 @@ subroutine HDF5_read_int7(loc_id,dataset,datasetName,parallel)
call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,totalShape, hdferr,&
file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_read_int7: h5dread_f')
if(hdferr < 0) error stop 'HDF5 error'
call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id)
@ -1121,7 +1120,7 @@ subroutine HDF5_write_real1(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real1: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1162,7 +1161,7 @@ subroutine HDF5_write_real2(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real2: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1203,7 +1202,7 @@ subroutine HDF5_write_real3(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real3: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1244,7 +1243,7 @@ subroutine HDF5_write_real4(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real4: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1286,7 +1285,7 @@ subroutine HDF5_write_real5(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real5: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1327,7 +1326,7 @@ subroutine HDF5_write_real6(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real6: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1368,7 +1367,7 @@ subroutine HDF5_write_real7(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_real7: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1410,7 +1409,7 @@ subroutine HDF5_write_int1(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int1: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1451,7 +1450,7 @@ subroutine HDF5_write_int2(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int2: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1492,7 +1491,7 @@ subroutine HDF5_write_int3(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int3: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1533,7 +1532,7 @@ subroutine HDF5_write_int4(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int4: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1574,7 +1573,7 @@ subroutine HDF5_write_int5(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int5: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1615,7 +1614,7 @@ subroutine HDF5_write_int6(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int6: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1656,7 +1655,7 @@ subroutine HDF5_write_int7(loc_id,dataset,datasetName,parallel)
if (product(totalShape) /= 0) then
call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_int7: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1732,7 +1731,7 @@ subroutine HDF5_write_rotation(loc_id,dataset,datasetName,parallel)
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
call h5dwrite_f(dset_id, z_id,dataset_asArray(4,:),int(totalShape,HSIZE_T), hdferr,&
file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (hdferr < 0) call IO_error(1,ext_msg='HDF5_write_rotation: h5dwrite_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
call finalize_write(plist_id, dset_id, filespace_id, memspace_id)
@ -1765,7 +1764,7 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
!-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective for MPI)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pcreate_f')
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
readSize = 0
@ -1773,9 +1772,9 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
#ifdef PETSc
if (parallel) then
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pset_dxpl_mpio_f')
if(hdferr < 0) error stop 'HDF5 error'
call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if (ierr /= 0) call IO_error(894,ext_msg='initialize_read: MPI_allreduce')
if (ierr /= 0) error stop 'MPI error'
endif
#endif
myStart = int(0,HSIZE_T)
@ -1785,28 +1784,28 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape)
call h5screate_simple_f(size(localShape), localShape, memspace_id, hdferr, localShape)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5screate_simple_f/memspace_id')
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! creating a property list for IO and set it to collective
call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pcreate_f')
if(hdferr < 0) error stop 'HDF5 error'
#ifdef PETSc
call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5pset_all_coll_metadata_ops_f')
if(hdferr < 0) error stop 'HDF5 error'
#endif
!--------------------------------------------------------------------------------------------------
! open the dataset in the file and get the space ID
call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5dopen_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5dget_space_f(dset_id, filespace_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5dget_space_f')
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! select a hyperslab (the portion of the current process) in the file
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, localShape, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_read: h5sselect_hyperslab_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine initialize_read
@ -1820,15 +1819,15 @@ subroutine finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id
integer :: hdferr
call h5pclose_f(plist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: plist_id')
if(hdferr < 0) error stop 'HDF5 error'
call h5pclose_f(aplist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: aplist_id')
if(hdferr < 0) error stop 'HDF5 error'
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: h5dclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(filespace_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: h5sclose_f/filespace_id')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(memspace_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_read: h5sclose_f/memspace_id')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine finalize_read
@ -1859,11 +1858,11 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
!-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective when reading in parallel)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5pcreate_f')
if(hdferr < 0) error stop 'HDF5 error'
#ifdef PETSc
if (parallel) then
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5pset_dxpl_mpio_f')
if(hdferr < 0) error stop 'HDF5 error'
endif
#endif
@ -1874,7 +1873,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
#ifdef PETSc
if (parallel) then
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if (ierr /= 0) call IO_error(894,ext_msg='initialize_write: MPI_allreduce')
if (ierr /= 0) error stop 'MPI error'
endif
#endif
myStart = int(0,HSIZE_T)
@ -1884,16 +1883,16 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape) and in file (global shape)
call h5screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5dopen_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5screate_simple_f(size(totalShape), totalShape, filespace_id, hdferr, totalShape)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5dget_space_f')
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! create dataset in the file and select a hyperslab from it (the portion of the current process)
call h5dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5dcreate_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, myShape, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='initialize_write: h5sselect_hyperslab_f')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine initialize_write
@ -1907,13 +1906,13 @@ subroutine finalize_write(plist_id, dset_id, filespace_id, memspace_id)
integer :: hdferr
call h5pclose_f(plist_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: plist_id')
if(hdferr < 0) error stop 'HDF5 error'
call h5dclose_f(dset_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: h5dclose_f')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(filespace_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: h5sclose_f/filespace_id')
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(memspace_id, hdferr)
if (hdferr < 0) call IO_error(1,ext_msg='finalize_write: h5sclose_f/memspace_id')
if(hdferr < 0) error stop 'HDF5 error'
end subroutine finalize_write

View File

@ -367,12 +367,8 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
! file handling errors
case (100)
msg = 'could not open file:'
case (101)
msg = 'write error for file:'
case (102)
msg = 'could not read file:'
case (106)
msg = 'working directory does not exist:'
!--------------------------------------------------------------------------------------------------
! file parsing errors
@ -395,14 +391,10 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
msg = 'hex lattice structure with invalid c/a ratio'
case (132)
msg = 'trans_lattice_structure not possible'
case (133)
msg = 'transformed hex lattice structure with invalid c/a ratio'
case (134)
msg = 'negative lattice parameter'
case (135)
msg = 'zero entry on stiffness diagonal'
case (136)
msg = 'zero entry on stiffness diagonal for transformed phase'
case (137)
msg = 'not defined for lattice structure'
case (138)
@ -431,8 +423,6 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
! material error messages and related messages in mesh
case (150)
msg = 'index out of bounds'
case (151)
msg = 'material has no constituents'
case (153)
msg = 'sum of phase fractions differs from 1'
case (155)
@ -463,10 +453,6 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
!--------------------------------------------------------------------------------------------------
! math errors
case (400)
msg = 'matrix inversion error'
case (401)
msg = 'error in Eigenvalue calculation'
case (402)
msg = 'invalid orientation specified'
@ -499,10 +485,6 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
!-------------------------------------------------------------------------------------------------
! errors related to the grid solver
case (809)
msg = 'initializing FFTW'
case (810)
msg = 'FFTW plan creation'
case (831)
msg = 'mask consistency violated in grid load case'
case (832)
@ -587,45 +569,20 @@ subroutine IO_warning(warning_ID,el,ip,g,ext_msg)
character(len=pStringLen) :: formatString
select case (warning_ID)
case (1)
msg = 'unknown key'
case (34)
msg = 'invalid restart increment given'
case (35)
msg = 'could not get $DAMASK_NUM_THREADS'
case (40)
msg = 'found spectral solver parameter'
case (42)
msg = 'parameter has no effect'
case (43)
msg = 'main diagonal of C66 close to zero'
case (47)
msg = 'no valid parameter for FFTW, using FFTW_PATIENT'
case (50)
msg = 'not all available slip system families are defined'
case (51)
msg = 'not all available twin system families are defined'
case (52)
msg = 'not all available parameters are defined'
case (53)
msg = 'not all available transformation system families are defined'
case (101)
msg = 'crystallite debugging off'
case (201)
msg = 'position not found when parsing line'
case (207)
msg = 'line truncated'
case (600)
msg = 'crystallite responds elastically'
case (601)
msg = 'stiffness close to zero'
case (650)
msg = 'polar decomposition failed'
case (700)
msg = 'unknown crystal symmetry'
case (709)
msg = 'read only the first document'
case (850)
msg = 'max number of cut back exceeded, terminating'
case default

View File

@ -84,6 +84,8 @@ subroutine discretization_grid_init(restart)
PETSC_COMM_WORLD, &
z, & ! domain grid size along z
z_offset) ! domain grid offset along z
if(z==0_C_INTPTR_T) call IO_error(894, ext_msg='Cannot distribute MPI processes')
grid3 = int(z)
grid3Offset = int(z_offset)
size3 = geomSize(3)*real(grid3,pReal) /real(grid(3),pReal)

View File

@ -95,10 +95,10 @@ subroutine grid_damage_spectral_init
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf &
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf &
&-damage_snes_ksp_ew -damage_ksp_type fgmres',ierr)
CHKERRQ(ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------

View File

@ -13,6 +13,7 @@ module grid_mech_FEM
use prec
use parallelization
use DAMASK_interface
use IO
use HDF5_utilities
use math
use spectral_utilities
@ -141,10 +142,10 @@ subroutine grid_mech_FEM_init
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type newtonls -mech_ksp_type fgmres &
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type newtonls -mech_ksp_type fgmres &
&-mech_ksp_max_it 25 -mech_pc_type ml -mech_mg_levels_ksp_type chebyshev',ierr)
CHKERRQ(ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------

View File

@ -13,6 +13,7 @@ module grid_mech_spectral_basic
use prec
use parallelization
use DAMASK_interface
use IO
use HDF5_utilities
use math
use spectral_utilities
@ -140,9 +141,9 @@ subroutine grid_mech_spectral_basic_init
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type ngmres',ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type ngmres',ierr)
CHKERRQ(ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------

View File

@ -13,6 +13,7 @@ module grid_mech_spectral_polarisation
use prec
use parallelization
use DAMASK_interface
use IO
use HDF5_utilities
use math
use spectral_utilities
@ -158,9 +159,9 @@ subroutine grid_mech_spectral_polarisation_init
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type ngmres',ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type ngmres',ierr)
CHKERRQ(ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------

View File

@ -89,9 +89,9 @@ subroutine grid_thermal_spectral_init
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type ngmres',ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type ngmres',ierr)
CHKERRQ(ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------

View File

@ -214,11 +214,11 @@ subroutine spectral_utilities_init
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
call PETScOptionsClear(PETSC_NULL_OPTIONS,ierr)
call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr)
CHKERRQ(ierr)
if(debugPETSc) call PETScOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr)
if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr)
CHKERRQ(ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,&
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,&
num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
@ -313,12 +313,12 @@ subroutine spectral_utilities_init
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
tensorField_real, tensorField_fourier, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
if (.not. C_ASSOCIATED(planTensorForth)) call IO_error(810, ext_msg='planTensorForth')
if (.not. C_ASSOCIATED(planTensorForth)) error stop 'FFTW error'
planTensorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
tensorField_fourier,tensorField_real, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
if (.not. C_ASSOCIATED(planTensorBack)) call IO_error(810, ext_msg='planTensorBack')
if (.not. C_ASSOCIATED(planTensorBack)) error stop 'FFTW error'
!--------------------------------------------------------------------------------------------------
! vector MPI fftw plans
@ -326,12 +326,12 @@ subroutine spectral_utilities_init
vecSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK,&! no. of transforms, default iblock and oblock
vectorField_real, vectorField_fourier, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
if (.not. C_ASSOCIATED(planVectorForth)) call IO_error(810, ext_msg='planVectorForth')
if (.not. C_ASSOCIATED(planVectorForth)) error stop 'FFTW error'
planVectorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
vecSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, & ! no. of transforms, default iblock and oblock
vectorField_fourier,vectorField_real, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
if (.not. C_ASSOCIATED(planVectorBack)) call IO_error(810, ext_msg='planVectorBack')
if (.not. C_ASSOCIATED(planVectorBack)) error stop 'FFTW error'
!--------------------------------------------------------------------------------------------------
! scalar MPI fftw plans
@ -339,12 +339,12 @@ subroutine spectral_utilities_init
scalarSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
scalarField_real, scalarField_fourier, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
if (.not. C_ASSOCIATED(planScalarForth)) call IO_error(810, ext_msg='planScalarForth')
if (.not. C_ASSOCIATED(planScalarForth)) error stop 'FFTW error'
planScalarBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order, no. of transforms
scalarSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
scalarField_fourier,scalarField_real, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
if (.not. C_ASSOCIATED(planScalarBack)) call IO_error(810, ext_msg='planScalarBack')
if (.not. C_ASSOCIATED(planScalarBack)) error stop 'FFTW error'
!--------------------------------------------------------------------------------------------------
! calculation of discrete angular frequencies, ordered as in FFTW (wrap around)
@ -603,10 +603,9 @@ real(pReal) function utilities_divergenceRMS()
enddo; enddo
if(grid(1) == 1) utilities_divergenceRMS = utilities_divergenceRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
call MPI_Allreduce(MPI_IN_PLACE,utilities_divergenceRMS,1,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
if(ierr /=0) call IO_error(894, ext_msg='utilities_divergenceRMS')
if(ierr /=0) error stop 'MPI error'
utilities_divergenceRMS = sqrt(utilities_divergenceRMS) * wgt ! RMS in real space calculated with Parsevals theorem from Fourier space
end function utilities_divergenceRMS
@ -664,7 +663,7 @@ real(pReal) function utilities_curlRMS()
enddo; enddo
call MPI_Allreduce(MPI_IN_PLACE,utilities_curlRMS,1,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
if(ierr /=0) call IO_error(894, ext_msg='utilities_curlRMS')
if(ierr /=0) error stop 'MPI error'
utilities_curlRMS = sqrt(utilities_curlRMS) * wgt
if(grid(1) == 1) utilities_curlRMS = utilities_curlRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
@ -713,7 +712,6 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
allocate(s_reduced,mold = c_reduced)
call math_invert(s_reduced, errmatinv, c_reduced) ! invert reduced stiffness
if (any(IEEE_is_NaN(s_reduced))) errmatinv = .true.
if (errmatinv) call IO_error(error_ID=400,ext_msg='utilities_maskedCompliance')
!--------------------------------------------------------------------------------------------------
! check if inversion was successful
@ -725,7 +723,7 @@ function utilities_maskedCompliance(rot_BC,mask_stress,C)
write(IO_STDOUT,trim(formatString),advance='no') ' C * S (load) ', &
transpose(matmul(c_reduced,s_reduced))
write(IO_STDOUT,trim(formatString),advance='no') ' S (load) ', transpose(s_reduced)
if(errmatinv) call IO_error(error_ID=400,ext_msg='utilities_maskedCompliance')
if(errmatinv) error stop 'matrix inversion error'
endif
temp99_real = reshape(unpack(reshape(s_reduced,[size_reduced**2]),reshape(mask,[81]),0.0_pReal),[9,9])
else
@ -857,20 +855,21 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
valueAndRank = [dPdF_norm_max,real(worldrank,pReal)]
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MAXLOC, PETSC_COMM_WORLD, ierr)
if (ierr /= 0) call IO_error(894, ext_msg='MPI_Allreduce max')
if (ierr /= 0) error stop 'MPI error'
call MPI_Bcast(dPdF_max,81,MPI_DOUBLE,int(valueAndRank(2)),PETSC_COMM_WORLD, ierr)
if (ierr /= 0) call IO_error(894, ext_msg='MPI_Bcast max')
if (ierr /= 0) error stop 'MPI error'
valueAndRank = [dPdF_norm_min,real(worldrank,pReal)]
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MINLOC, PETSC_COMM_WORLD, ierr)
if (ierr /= 0) call IO_error(894, ext_msg='MPI_Allreduce min')
if (ierr /= 0) error stop 'MPI error'
call MPI_Bcast(dPdF_min,81,MPI_DOUBLE,int(valueAndRank(2)),PETSC_COMM_WORLD, ierr)
if (ierr /= 0) call IO_error(894, ext_msg='MPI_Bcast min')
if (ierr /= 0) error stop 'MPI error'
C_minmaxAvg = 0.5_pReal*(dPdF_max + dPdF_min)
C_volAvg = sum(sum(homogenization_dPdF,dim=6),dim=5)
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
if (ierr /= 0) error stop 'MPI error'
C_volAvg = C_volAvg * wgt
@ -1035,7 +1034,7 @@ subroutine utilities_updateCoords(F)
! average F
if (grid3Offset == 0) Favg = real(tensorField_fourier(1:3,1:3,1,1,1),pReal)*wgt
call MPI_Bcast(Favg,9,MPI_DOUBLE,0,PETSC_COMM_WORLD,ierr)
if(ierr /=0) call IO_error(894, ext_msg='update_IPcoords/MPI_Bcast')
if(ierr /=0) error stop 'MPI error'
!--------------------------------------------------------------------------------------------------
! pad cell center fluctuations along z-direction (needed when running MPI simulation)
@ -1046,19 +1045,19 @@ subroutine utilities_updateCoords(F)
! send bottom layer to process below
call MPI_Isend(IPfluct_padded(:,:,:,2), c,MPI_DOUBLE,rank_b,0,PETSC_COMM_WORLD,r,ierr)
if(ierr /=0) call IO_error(894, ext_msg='update_IPcoords/MPI_Isend')
if(ierr /=0) error stop 'MPI error'
call MPI_Irecv(IPfluct_padded(:,:,:,grid3+2),c,MPI_DOUBLE,rank_t,0,PETSC_COMM_WORLD,r,ierr)
if(ierr /=0) call IO_error(894, ext_msg='update_IPcoords/MPI_Irecv')
if(ierr /=0) error stop 'MPI error'
call MPI_Wait(r,s,ierr)
if(ierr /=0) call IO_error(894, ext_msg='update_IPcoords/MPI_Wait')
if(ierr /=0) error stop 'MPI error'
! send top layer to process above
call MPI_Isend(IPfluct_padded(:,:,:,grid3+1),c,MPI_DOUBLE,rank_t,0,PETSC_COMM_WORLD,r,ierr)
if(ierr /=0) call IO_error(894, ext_msg='update_IPcoords/MPI_Isend')
if(ierr /=0) error stop 'MPI error'
call MPI_Irecv(IPfluct_padded(:,:,:,1), c,MPI_DOUBLE,rank_b,0,PETSC_COMM_WORLD,r,ierr)
if(ierr /=0) call IO_error(894, ext_msg='update_IPcoords/MPI_Irecv')
if(ierr /=0) error stop 'MPI error'
call MPI_Wait(r,s,ierr)
if(ierr /=0) call IO_error(894, ext_msg='update_IPcoords/MPI_Wait')
if(ierr /=0) error stop 'MPI error'
!--------------------------------------------------------------------------------------------------
! calculate nodal displacements

View File

@ -588,9 +588,6 @@ function lattice_characteristicShear_Twin(Ntwin,structure,CoverA) result(charact
4 &
],[HEX_NTWIN]) ! indicator to formulas below
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_characteristicShear_Twin: '//trim(structure))
a = 0
myFamilies: do f = 1,size(Ntwin,1)
mySystems: do s = 1,Ntwin(f)
@ -636,9 +633,6 @@ function lattice_C66_twin(Ntwin,C66,structure,CoverA)
type(rotation) :: R
integer :: i
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_C66_twin: '//trim(structure))
select case(structure)
case('fcc')
coordinateSystem = buildCoordinateSystem(Ntwin,FCC_NSLIPSYSTEM,FCC_SYSTEMTWIN,&
@ -966,9 +960,6 @@ function lattice_interaction_SlipBySlip(Nslip,interactionValues,structure) resul
],shape(BCT_INTERACTIONSLIPSLIP))
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_interaction_SlipBySlip: '//trim(structure))
select case(structure)
case('fcc')
interactionTypes = FCC_INTERACTIONSLIPSLIP
@ -1070,9 +1061,6 @@ function lattice_interaction_TwinByTwin(Ntwin,interactionValues,structure) resul
20,20,20,20,20,20, 19,19,19,19,19,19, 18,18,18,18,18,18, 17,17,17,17,17,16 &
],shape(HEX_INTERACTIONTWINTWIN)) !< Twin-twin interaction types for hex
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_interaction_TwinByTwin: '//trim(structure))
select case(structure)
case('fcc')
interactionTypes = FCC_INTERACTIONTWINTWIN
@ -1122,9 +1110,6 @@ function lattice_interaction_TransByTrans(Ntrans,interactionValues,structure) re
2,2,2,2,2,2,2,2,2,1,1,1 &
],shape(FCC_INTERACTIONTRANSTRANS)) !< Trans-trans interaction types for fcc
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_interaction_TransByTrans: '//trim(structure))
if(structure == 'fcc') then
interactionTypes = FCC_INTERACTIONTRANSTRANS
NtransMax = FCC_NTRANSSYSTEM
@ -1252,9 +1237,6 @@ function lattice_interaction_SlipByTwin(Nslip,Ntwin,interactionValues,structure)
!
],shape(HEX_INTERACTIONSLIPTWIN)) !< Slip-twin interaction types for hex
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_interaction_SlipByTwin: '//trim(structure))
select case(structure)
case('fcc')
interactionTypes = FCC_INTERACTIONSLIPTWIN
@ -1316,9 +1298,6 @@ function lattice_interaction_SlipByTrans(Nslip,Ntrans,interactionValues,structur
4,4,4,4,4,4,4,4,4,4,4,4 &
],shape(FCC_INTERACTIONSLIPTRANS)) !< Slip-trans interaction types for fcc
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_interaction_SlipByTrans: '//trim(structure))
select case(structure)
case('fcc')
interactionTypes = FCC_INTERACTIONSLIPTRANS
@ -1386,9 +1365,6 @@ function lattice_interaction_TwinBySlip(Ntwin,Nslip,interactionValues,structure)
4, 4, 4, 8, 8, 8, 12,12,12, 16,16,16,16,16,16, 20,20,20,20,20,20,20,20,20,20,20,20, 24,24,24,24,24,24 &
],shape(HEX_INTERACTIONTWINSLIP)) !< Twin-slip interaction types for hex
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_interaction_TwinBySlip: '//trim(structure))
select case(structure)
case('fcc')
interactionTypes = FCC_INTERACTIONTWINSLIP
@ -1427,9 +1403,6 @@ function lattice_SchmidMatrix_slip(Nslip,structure,cOverA) result(SchmidMatrix)
integer, dimension(:), allocatable :: NslipMax
integer :: i
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_SchmidMatrix_slip: '//trim(structure))
select case(structure)
case('fcc')
NslipMax = FCC_NSLIPSYSTEM
@ -1480,9 +1453,6 @@ function lattice_SchmidMatrix_twin(Ntwin,structure,cOverA) result(SchmidMatrix)
integer, dimension(:), allocatable :: NtwinMax
integer :: i
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_SchmidMatrix_twin: '//trim(structure))
select case(structure)
case('fcc')
NtwinMax = FCC_NTWINSYSTEM
@ -1560,9 +1530,6 @@ function lattice_SchmidMatrix_cleavage(Ncleavage,structure,cOverA) result(Schmid
integer, dimension(:), allocatable :: NcleavageMax
integer :: i
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_SchmidMatrix_cleavage: '//trim(structure))
select case(structure)
case('ort')
NcleavageMax = ORT_NCLEAVAGESYSTEM
@ -1662,9 +1629,6 @@ function lattice_labels_slip(Nslip,structure) result(labels)
real(pReal), dimension(:,:), allocatable :: slipSystems
integer, dimension(:), allocatable :: NslipMax
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_labels_slip: '//trim(structure))
select case(structure)
case('fcc')
NslipMax = FCC_NSLIPSYSTEM
@ -1706,9 +1670,6 @@ function lattice_applyLatticeSymmetry33(T,structure) result(T_sym)
T_sym = 0.0_pReal
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_applyLatticeSymmetry33: '//trim(structure))
select case(structure)
case('iso','fcc','bcc')
do k=1,3
@ -1744,9 +1705,6 @@ function applyLatticeSymmetryC66(C66,structure) result(C66_sym)
C66_sym = 0.0_pReal
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='applyLatticeSymmetryC66: '//trim(structure))
select case(structure)
case ('iso')
do k=1,3
@ -1824,9 +1782,6 @@ function lattice_labels_twin(Ntwin,structure) result(labels)
real(pReal), dimension(:,:), allocatable :: twinSystems
integer, dimension(:), allocatable :: NtwinMax
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='lattice_labels_twin: '//trim(structure))
select case(structure)
case('fcc')
NtwinMax = FCC_NTWINSYSTEM
@ -1913,9 +1868,6 @@ function coordinateSystem_slip(Nslip,structure,cOverA) result(coordinateSystem)
real(pReal), dimension(:,:), allocatable :: slipSystems
integer, dimension(:), allocatable :: NslipMax
if (len_trim(structure) /= 3) &
call IO_error(137,ext_msg='coordinateSystem_slip: '//trim(structure))
select case(structure)
case('fcc')
NslipMax = FCC_NSLIPSYSTEM
@ -2257,11 +2209,11 @@ function equivalent_nu(C,assumption) result(nu)
/ 9.0_pReal
elseif(IO_lc(assumption) == 'reuss') then
call math_invert(S,error,C)
if(error) call IO_error(0)
if(error) error stop 'matrix inversion failed'
K = 1.0_pReal &
/ (S(1,1)+S(2,2)+S(3,3) +2.0_pReal*(S(1,2)+S(2,3)+S(1,3)))
else
call IO_error(0)
error stop 'invalid assumption'
K = 0.0_pReal
endif
@ -2289,11 +2241,11 @@ function equivalent_mu(C,assumption) result(mu)
/ 15.0_pReal
elseif(IO_lc(assumption) == 'reuss') then
call math_invert(S,error,C)
if(error) call IO_error(0)
if(error) error stop 'matrix inversion failed'
mu = 15.0_pReal &
/ (4.0_pReal*(S(1,1)+S(2,2)+S(3,3)) -4.0_pReal*(S(1,2)+S(2,3)+S(1,3)) +3.0_pReal*(S(4,4)+S(5,5)+S(6,6)))
else
call IO_error(0)
error stop 'invalid assumption'
mu = 0.0_pReal
endif

View File

@ -499,7 +499,7 @@ function math_invSym3333(A)
call dgetrf(6,6,temp66,6,ipiv6,ierr_i)
call dgetri(6,temp66,6,ipiv6,work,size(work,1),ierr_f)
if (ierr_i /= 0 .or. ierr_f /= 0) then
call IO_error(400, ext_msg = 'math_invSym3333')
error stop 'matrix inversion error'
else
math_invSym3333 = math_66toSym3333(temp66)
endif
@ -1200,8 +1200,8 @@ subroutine selfTest
if(any(dNeq(math_exp33(math_I3,0),math_I3))) &
error stop 'math_exp33(math_I3,1)'
if(any(dNeq(math_exp33(math_I3,256),exp(1.0_pReal)*math_I3))) &
error stop 'math_exp33(math_I3,256)'
if(any(dNeq(math_exp33(math_I3,128),exp(1.0_pReal)*math_I3))) &
error stop 'math_exp33(math_I3,128)'
call random_number(v9)
if(any(dNeq(math_33to9(math_9to33(v9)),v9))) &

View File

@ -50,10 +50,17 @@ subroutine parallelization_init
if (threadLevel<MPI_THREAD_FUNNELED) error stop 'MPI library does not support OpenMP'
#endif
call PETScInitializeNoArguments(petsc_err) ! first line in the code according to PETSc manual
call PetscInitializeNoArguments(petsc_err) ! first line in the code according to PETSc manual
CHKERRQ(petsc_err)
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,err)
#if defined(DEBUG) && defined(__INTEL_COMPILER)
call PetscSetFPTrap(PETSC_FP_TRAP_ON,petsc_err)
#else
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,petsc_err)
#endif
CHKERRQ(petsc_err)
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,err)
if (err /= 0) error stop 'Could not determine worldrank'
if (worldrank == 0) print'(/,a)', ' <<<+- parallelization init -+>>>'

View File

@ -23,7 +23,7 @@ subroutine quit(stop_id)
call h5close_f(error)
if (error /= 0) write(6,'(a,i5)') ' Error in h5close_f ',error
call PETScFinalize(ierr)
call PetscFinalize(ierr)
CHKERRQ(ierr)
#ifdef _OPENMP

View File

@ -7,6 +7,7 @@
module results
use DAMASK_interface
use parallelization
use IO
use rotations
use HDF5_utilities
#ifdef PETSc
@ -535,33 +536,46 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
integer(SIZE_T) :: type_size_string, type_size_int
integer :: ierr, i
integer :: hdferr, ierr, i
!---------------------------------------------------------------------------------------------------
! compound type: name of phase section + position/index within results array
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr)
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr)
call h5tget_size_f(dt_id, type_size_string, ierr)
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(dt_id, type_size_string, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr)
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr)
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr)
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! create memory types for each component of the compound type
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr)
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr)
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(dt_id, ierr)
call h5tclose_f(dt_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! prepare MPI communication (transparent for non-MPI runs)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
memberOffset = 0
do i=1, size(label)
memberOffset(i,worldrank) = count(phaseAt == i)*size(memberAtLocal,2) ! number of points/instance of this process
@ -572,14 +586,14 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
!--------------------------------------------------------------------------------------------------
! MPI settings and communication
#ifdef PETSc
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5pset_dxpl_mpio_f')
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/writeSize')
if(ierr /= 0) error stop 'MPI error'
call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/memberOffset')
if(ierr /= 0) error stop 'MPI error'
#endif
myShape = int([size(phaseAt,1),writeSize(worldrank)], HSIZE_T)
@ -588,14 +602,14 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape = hyperslab) and in file (global shape)
call h5screate_simple_f(2,myShape,memspace_id,ierr,myShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/memspace_id')
call h5screate_simple_f(2,myShape,memspace_id,hdferr,myShape)
if(hdferr < 0) error stop 'HDF5 error'
call h5screate_simple_f(2,totalShape,filespace_id,ierr,totalShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/filespace_id')
call h5screate_simple_f(2,totalShape,filespace_id,hdferr,totalShape)
if(hdferr < 0) error stop 'HDF5 error'
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5sselect_hyperslab_f')
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
!---------------------------------------------------------------------------------------------------
! expand phaseAt to consider IPs (is not stored per IP)
@ -611,29 +625,36 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
!--------------------------------------------------------------------------------------------------
! write the components of the compound type individually
call h5pset_preserve_f(plist_id, .TRUE., ierr)
call h5pset_preserve_f(plist_id, .TRUE., hdferr)
if(hdferr < 0) error stop 'HDF5 error'
loc_id = results_openGroup('/mapping')
call h5dcreate_f(loc_id, 'phase', dtype_id, filespace_id, dset_id, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dcreate_f')
call h5dcreate_f(loc_id, 'phase', dtype_id, filespace_id, dset_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dwrite_f(dset_id, name_id, reshape(label(pack(phaseAtMaterialpoint,.true.)),myShape), &
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dwrite_f/name_id')
myShape, hdferr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if(hdferr < 0) error stop 'HDF5 error'
call h5dwrite_f(dset_id, position_id, reshape(pack(memberAtGlobal,.true.),myShape), &
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dwrite_f/position_id')
myShape, hdferr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! close all
call HDF5_closeGroup(loc_id)
call h5pclose_f(plist_id, ierr)
call h5sclose_f(filespace_id, ierr)
call h5sclose_f(memspace_id, ierr)
call h5dclose_f(dset_id, ierr)
call h5tclose_f(dtype_id, ierr)
call h5tclose_f(name_id, ierr)
call h5tclose_f(position_id, ierr)
call h5pclose_f(plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(filespace_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(memspace_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dclose_f(dset_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(dtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(name_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(position_id, hdferr)
! for backward compatibility
call results_setLink('/mapping/phase','/mapping/cellResults/constituent')
@ -673,51 +694,64 @@ subroutine results_mapping_homogenization(homogenizationAt,memberAtLocal,label)
integer(SIZE_T) :: type_size_string, type_size_int
integer :: ierr, i
integer :: hdferr, ierr, i
!---------------------------------------------------------------------------------------------------
! compound type: name of phase section + position/index within results array
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr)
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr)
call h5tget_size_f(dt_id, type_size_string, ierr)
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(dt_id, type_size_string, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr)
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr)
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr)
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! create memory types for each component of the compound type
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr)
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr)
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(dt_id, ierr)
call h5tclose_f(dt_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! prepare MPI communication (transparent for non-MPI runs)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, ierr)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
memberOffset = 0
do i=1, size(label)
memberOffset(i,worldrank) = count(homogenizationAt == i)*size(memberAtLocal,1) ! number of points/instance of this process
memberOffset(i,worldrank) = count(homogenizationAt == i)*size(memberAtLocal,1) ! number of points/instance of this process
enddo
writeSize = 0
writeSize(worldrank) = size(memberAtLocal) ! total number of points by this process
writeSize(worldrank) = size(memberAtLocal) ! total number of points by this process
!--------------------------------------------------------------------------------------------------
! MPI settings and communication
#ifdef PETSc
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_homogenization: h5pset_dxpl_mpio_f')
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_homogenization: MPI_allreduce/writeSize')
if(ierr /= 0) error stop 'MPI error'
call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_homogenization: MPI_allreduce/memberOffset')
if(ierr /= 0) error stop 'MPI error'
#endif
myShape = int([writeSize(worldrank)], HSIZE_T)
@ -726,14 +760,14 @@ subroutine results_mapping_homogenization(homogenizationAt,memberAtLocal,label)
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape = hyperslab) and in file (global shape)
call h5screate_simple_f(1,myShape,memspace_id,ierr,myShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_homogenization: h5screate_simple_f/memspace_id')
call h5screate_simple_f(1,myShape,memspace_id,hdferr,myShape)
if(hdferr < 0) error stop 'HDF5 error'
call h5screate_simple_f(1,totalShape,filespace_id,ierr,totalShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_homogenization: h5screate_simple_f/filespace_id')
call h5screate_simple_f(1,totalShape,filespace_id,hdferr,totalShape)
if(hdferr < 0) error stop 'HDF5 error'
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_homogenization: h5sselect_hyperslab_f')
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
!---------------------------------------------------------------------------------------------------
! expand phaseAt to consider IPs (is not stored per IP)
@ -749,29 +783,36 @@ subroutine results_mapping_homogenization(homogenizationAt,memberAtLocal,label)
!--------------------------------------------------------------------------------------------------
! write the components of the compound type individually
call h5pset_preserve_f(plist_id, .TRUE., ierr)
call h5pset_preserve_f(plist_id, .TRUE., hdferr)
loc_id = results_openGroup('/mapping')
call h5dcreate_f(loc_id, 'homogenization', dtype_id, filespace_id, dset_id, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_homogenization: h5dcreate_f')
call h5dcreate_f(loc_id, 'homogenization', dtype_id, filespace_id, dset_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dwrite_f(dset_id, name_id, reshape(label(pack(homogenizationAtMaterialpoint,.true.)),myShape), &
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_homogenization: h5dwrite_f/name_id')
myShape, hdferr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if(hdferr < 0) error stop 'HDF5 error'
call h5dwrite_f(dset_id, position_id, reshape(pack(memberAtGlobal,.true.),myShape), &
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_homogenization: h5dwrite_f/position_id')
myShape, hdferr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if(hdferr < 0) error stop 'HDF5 error'
!--------------------------------------------------------------------------------------------------
! close all
call HDF5_closeGroup(loc_id)
call h5pclose_f(plist_id, ierr)
call h5sclose_f(filespace_id, ierr)
call h5sclose_f(memspace_id, ierr)
call h5dclose_f(dset_id, ierr)
call h5tclose_f(dtype_id, ierr)
call h5tclose_f(name_id, ierr)
call h5tclose_f(position_id, ierr)
call h5pclose_f(plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(filespace_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5sclose_f(memspace_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5dclose_f(dset_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(dtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(name_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(position_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
! for backward compatibility
call results_setLink('/mapping/homogenization','/mapping/cellResults/materialpoint')
@ -793,263 +834,4 @@ character(len=24) function now()
end function now
!!--------------------------------------------------------------------------------------------------
!!> @brief adds the backward mapping from spatial position and constituent ID to results
!!--------------------------------------------------------------------------------------------------
!subroutine HDF5_backwardMappingPhase(material_phase,phasememberat,phase_name,dataspace_size,mpiOffset,mpiOffset_phase)
! integer(pInt), intent(in), dimension(:,:,:) :: material_phase, phasememberat
! character(len=*), intent(in), dimension(:) :: phase_name
! integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_phase
! integer(pInt), intent(in) :: mpiOffset
! integer(pInt) :: hdferr, NmatPoints, Nconstituents, i, j
! integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace
! integer(SIZE_T) :: type_size
! integer(pInt), dimension(:,:), allocatable :: arr
! integer(HSIZE_T), dimension(1) :: counter
! integer(HSSIZE_T), dimension(1) :: fileOffset
! character(len=64) :: phaseID
! Nconstituents = size(phasememberat,1)
! NmatPoints = count(material_phase /=0)/Nconstituents
! allocate(arr(2,NmatPoints*Nconstituents))
! do i=1, NmatPoints
! do j=Nconstituents-1, 0, -1
! arr(1,Nconstituents*i-j) = i-1
! enddo
! enddo
! arr(2,:) = pack(material_phase,material_phase/=0)
! do i=1, size(phase_name)
! write(phaseID, '(i0)') i
! mapping_ID = results_openGroup('/current/constitutive/'//trim(phaseID)//'_'//phase_name(i))
! NmatPoints = count(material_phase == i)
!!--------------------------------------------------------------------------------------------------
! ! create dataspace
! call h5screate_simple_f(1, int([dataspace_size(i)],HSIZE_T), space_id, hdferr, &
! int([dataspace_size(i)],HSIZE_T))
! if (hdferr < 0) call IO_error(1,ext_msg='HDF5_writeBackwardMapping')
!!--------------------------------------------------------------------------------------------------
! ! compound type
! call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr)
! call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id')
! call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5tinsert_f 0')
!!--------------------------------------------------------------------------------------------------
! ! create Dataset
! call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase')
!!--------------------------------------------------------------------------------------------------
! ! Create memory types (one compound datatype for each member)
! call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5tcreate_f position_id')
! call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5tinsert_f position_id')
!!--------------------------------------------------------------------------------------------------
! ! Define and select hyperslabs
! counter = NmatPoints ! how big i am
! fileOffset = mpiOffset_phase(i) ! where i start to write my data
! call h5screate_simple_f(1, counter, memspace, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5screate_simple_f')
! call h5dget_space_f(dset_id, space_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5dget_space_f')
! call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5sselect_hyperslab_f')
!!--------------------------------------------------------------------------------------------------
! ! Create property list for collective dataset write
!#ifdef PETSc
! call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5pcreate_f')
! call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5pset_dxpl_mpio_f')
!#endif
!!--------------------------------------------------------------------------------------------------
! ! write data by fields in the datatype. Fields order is not important.
! call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i)+mpiOffset, int([dataspace_size(i)],HSIZE_T),&
! hdferr, file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5dwrite_f instance_id')
!!--------------------------------------------------------------------------------------------------
! !close types, dataspaces
! call h5tclose_f(dtype_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5tclose_f dtype_id')
! call h5tclose_f(position_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5tclose_f position_id')
! call h5dclose_f(dset_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5dclose_f')
! call h5sclose_f(space_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5sclose_f space_id')
! call h5sclose_f(memspace, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5sclose_f memspace')
! call h5pclose_f(plist_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingPhase: h5pclose_f')
! call HDF5_closeGroup(mapping_ID)
! enddo
!end subroutine HDF5_backwardMappingPhase
!!--------------------------------------------------------------------------------------------------
!!> @brief adds the backward mapping from spatial position and constituent ID to results
!!--------------------------------------------------------------------------------------------------
!subroutine HDF5_backwardMappingHomog(material_homog,homogmemberat,homogenization_name,dataspace_size,mpiOffset,mpiOffset_homog)
! integer(pInt), intent(in), dimension(:,:) :: material_homog, homogmemberat
! character(len=*), intent(in), dimension(:) :: homogenization_name
! integer(pInt), intent(in), dimension(:) :: dataspace_size, mpiOffset_homog
! integer(pInt), intent(in) :: mpiOffset
! integer(pInt) :: hdferr, NmatPoints, i
! integer(HID_T) :: mapping_id, dtype_id, dset_id, space_id, position_id, plist_id, memspace
! integer(SIZE_T) :: type_size
! integer(pInt), dimension(:,:), allocatable :: arr
! integer(HSIZE_T), dimension(1) :: counter
! integer(HSSIZE_T), dimension(1) :: fileOffset
! character(len=64) :: homogID
! NmatPoints = count(material_homog /=0)
! allocate(arr(2,NmatPoints))
! arr(1,:) = (/(i, i=0,NmatPoints-1)/)
! arr(2,:) = pack(material_homog,material_homog/=0)
! do i=1, size(homogenization_name)
! write(homogID, '(i0)') i
! mapping_ID = results_openGroup('/current/homogenization/'//trim(homogID)//'_'//homogenization_name(i))
!!--------------------------------------------------------------------------------------------------
! ! create dataspace
! call h5screate_simple_f(1, int([dataspace_size(i)],HSIZE_T), space_id, hdferr, &
! int([dataspace_size(i)],HSIZE_T))
! if (hdferr < 0) call IO_error(1,ext_msg='HDF5_writeBackwardMapping')
!!--------------------------------------------------------------------------------------------------
! ! compound type
! call h5tget_size_f(H5T_STD_I32LE, type_size, hdferr)
! call h5tcreate_f(H5T_COMPOUND_F, type_size, dtype_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='HDF5_writeBackwardMapping: h5tcreate_f dtype_id')
! call h5tinsert_f(dtype_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5tinsert_f 0')
!!--------------------------------------------------------------------------------------------------
! ! create Dataset
! call h5dcreate_f(mapping_id, 'mapGeometry', dtype_id, space_id, dset_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog')
!!--------------------------------------------------------------------------------------------------
! ! Create memory types (one compound datatype for each member)
! call h5tcreate_f(H5T_COMPOUND_F, int(pInt,SIZE_T), position_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5tcreate_f position_id')
! call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_STD_I32LE, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5tinsert_f position_id')
!!--------------------------------------------------------------------------------------------------
! ! Define and select hyperslabs
! counter = NmatPoints ! how big i am
! fileOffset = mpiOffset_homog(i) ! where i start to write my data
! call h5screate_simple_f(1, counter, memspace, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5screate_simple_f')
! call h5dget_space_f(dset_id, space_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5dget_space_f')
! call h5sselect_hyperslab_f(space_id, H5S_SELECT_SET_F, fileOffset, counter, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5sselect_hyperslab_f')
!!--------------------------------------------------------------------------------------------------
! ! Create property list for collective dataset write
!#ifdef PETSc
! call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5pcreate_f')
! call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5pset_dxpl_mpio_f')
!#endif
!!--------------------------------------------------------------------------------------------------
! ! write data by fields in the datatype. Fields order is not important.
! call h5dwrite_f(dset_id, position_id, pack(arr(1,:),arr(2,:)==i)+mpiOffset,int([dataspace_size(i)],HSIZE_T),&
! hdferr, file_space_id = space_id, mem_space_id = memspace, xfer_prp = plist_id)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5dwrite_f instance_id')
!!--------------------------------------------------------------------------------------------------
! !close types, dataspaces
! call h5tclose_f(dtype_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5tclose_f dtype_id')
! call h5tclose_f(position_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5tclose_f position_id')
! call h5dclose_f(dset_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5dclose_f')
! call h5sclose_f(space_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5sclose_f space_id')
! call h5sclose_f(memspace, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5sclose_f memspace')
! call h5pclose_f(plist_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_backwardMappingHomog: h5pclose_f')
! call HDF5_closeGroup(mapping_ID)
! enddo
!end subroutine HDF5_backwardMappingHomog
!!--------------------------------------------------------------------------------------------------
!!> @brief adds the unique cell to node mapping
!!--------------------------------------------------------------------------------------------------
!subroutine HDF5_mappingCells(mapping)
! integer(pInt), intent(in), dimension(:) :: mapping
! integer :: hdferr, Nnodes
! integer(HID_T) :: mapping_id, dset_id, space_id
! Nnodes=size(mapping)
! mapping_ID = results_openGroup("mapping")
!!--------------------------------------------------------------------------------------------------
!! create dataspace
! call h5screate_simple_f(1, int([Nnodes],HSIZE_T), space_id, hdferr, &
! int([Nnodes],HSIZE_T))
! if (hdferr < 0) call IO_error(1,ext_msg='IO_mappingCells: h5screate_simple_f')
!!--------------------------------------------------------------------------------------------------
!! create Dataset
! call h5dcreate_f(mapping_id, "Cell",H5T_NATIVE_INTEGER, space_id, dset_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_mappingCells')
!!--------------------------------------------------------------------------------------------------
!! write data by fields in the datatype. Fields order is not important.
! call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER, mapping, int([Nnodes],HSIZE_T), hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_mappingCells: h5dwrite_f instance_id')
!!--------------------------------------------------------------------------------------------------
!!close types, dataspaces
! call h5dclose_f(dset_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_mappingConstitutive: h5dclose_f')
! call h5sclose_f(space_id, hdferr)
! if (hdferr < 0) call IO_error(1,ext_msg='IO_mappingConstitutive: h5sclose_f')
! call HDF5_closeGroup(mapping_ID)
!end subroutine HDF5_mappingCells
end module results

View File

@ -640,13 +640,13 @@ function om2ax(om) result(ax)
ax(1:3) = [ 0.0_pReal, 0.0_pReal, 1.0_pReal ]
else
call dgeev('N','V',3,om_,3,Wr,Wi,devNull,3,VR,3,work,size(work,1),ierr)
if (ierr /= 0) call IO_error(401,ext_msg='Error in om2ax: DGEEV return not zero')
if (ierr /= 0) error stop 'LAPACK error'
#if defined(__GFORTRAN__) && __GNUC__<9 || defined(__INTEL_COMPILER) && INTEL_COMPILER<1800 || defined(__PGI)
i = maxloc(merge(1,0,cEq(cmplx(Wr,Wi,pReal),cmplx(1.0_pReal,0.0_pReal,pReal),tol=1.0e-14_pReal)),dim=1)
#else
i = findloc(cEq(cmplx(Wr,Wi,pReal),cmplx(1.0_pReal,0.0_pReal,pReal),tol=1.0e-14_pReal),.true.,dim=1) !find eigenvalue (1,0)
#endif
if (i == 0) call IO_error(401,ext_msg='Error in om2ax Real: eigenvalue not found')
if (i == 0) error stop 'om2ax conversion failed'
ax(1:3) = VR(1:3,i)
where ( dNeq0([om(2,3)-om(3,2), om(3,1)-om(1,3), om(1,2)-om(2,1)])) &
ax(1:3) = sign(ax(1:3),-P *[om(2,3)-om(3,2), om(3,1)-om(1,3), om(1,2)-om(2,1)])