diff --git a/cmake/Compiler-GNU.cmake b/cmake/Compiler-GNU.cmake index 589850af6..6fc669299 100644 --- a/cmake/Compiler-GNU.cmake +++ b/cmake/Compiler-GNU.cmake @@ -26,7 +26,7 @@ set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input") # preprocessor set (COMPILE_FLAGS "${COMPILE_FLAGS} -fPIC -fPIE") -# position independent conde +# position independent code set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132") # restrict line length to the standard 132 characters (lattice.f90 require more characters) diff --git a/src/CPFEM.f90 b/src/CPFEM.f90 index 5394fc90c..56b9c9233 100644 --- a/src/CPFEM.f90 +++ b/src/CPFEM.f90 @@ -69,7 +69,7 @@ contains !> @brief call (thread safe) all module initializations !-------------------------------------------------------------------------------------------------- subroutine CPFEM_initAll(el,ip) - + integer(pInt), intent(in) :: el, & !< FE el number ip !< FE integration point number @@ -83,10 +83,10 @@ subroutine CPFEM_initAll(el,ip) call math_init call rotations_init call HDF5_utilities_init - call results_init + call results_init(.false.) call discretization_marc_init(ip, el) call lattice_init - call material_init + call material_init(.false.) call constitutive_init call crystallite_init call homogenization_init @@ -132,7 +132,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt logical, intent(in) :: parallelExecution !< flag indicating parallel computation of requested IPs real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE) - + real(pReal) J_inverse, & ! inverse of Jacobian rnd real(pReal), dimension (3,3) :: Kirchhoff, & ! Piola-Kirchhoff stress @@ -140,16 +140,16 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt real(pReal), dimension (3,3,3,3) :: H_sym, & H, & jacobian3333 ! jacobian in Matrix notation - + integer(pInt) elCP, & ! crystal plasticity element number i, j, k, l, m, n, ph, homog, mySource logical updateJaco ! flag indicating if Jacobian has to be updated - + real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress in case of ping pong dummy cycle ODD_JACOBIAN = 1e50_pReal !< return value for jacobian in case of ping pong dummy cycle - + elCP = mesh_FEM2DAMASK_elem(elFE) - + if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt & .and. elCP == debug_e .and. ip == debug_i) then write(6,'(/,a)') '#############################################' @@ -164,16 +164,16 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt write(6,'(a,/)') '# --- terminallyIll --- #' write(6,'(a,/)') '#############################################'; flush (6) endif - + if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0_pInt) & CPFEM_dcsde_knownGood = CPFEM_dcsde if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) & CPFEM_dcsde = CPFEM_dcsde_knownGood - + !*** age results if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) call CPFEM_forward - - + + !*** collection of FEM input with returning of randomize odd stress and jacobian !* If no parallel execution is required, there is no need to collect FEM input if (.not. parallelExecution) then @@ -184,7 +184,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt end select chosenThermal1 materialpoint_F0(1:3,1:3,ip,elCP) = ffn materialpoint_F(1:3,1:3,ip,elCP) = ffn1 - + elseif (iand(mode, CPFEM_COLLECT) /= 0_pInt) then call random_number(rnd) if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal @@ -199,11 +199,11 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt materialpoint_F(1:3,1:3,ip,elCP) = ffn1 CPFEM_calc_done = .false. endif - - + + !*** calculation of stress and jacobian if (iand(mode, CPFEM_CALCRESULTS) /= 0_pInt) then - + !*** deformation gradient outdated or any actual deformation gradient differs more than relevantStrain from the stored one validCalculation: if (terminallyIll & .or. outdatedFFN1 & @@ -221,7 +221,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6) - + !*** deformation gradient is not outdated else validCalculation updateJaco = mod(cycleCounter,iJacoStiffness) == 0 @@ -232,7 +232,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) & write(6,'(a,i8,1x,i2)') '<< CPFEM >> calculation for elFE ip ',elFE,ip call materialpoint_stressAndItsTangent(updateJaco, dt) - + !* parallel computation and calulation not yet done elseif (.not. CPFEM_calc_done) then if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) & @@ -241,22 +241,22 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt call materialpoint_stressAndItsTangent(updateJaco, dt) CPFEM_calc_done = .true. endif - + !* map stress and stiffness (or return odd values if terminally ill) terminalIllness: if (terminallyIll) then - + call random_number(rnd) if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6) - + else terminalIllness - + ! translate from P to CS Kirchhoff = matmul(materialpoint_P(1:3,1:3,ip,elCP), transpose(materialpoint_F(1:3,1:3,ip,elCP))) J_inverse = 1.0_pReal / math_det33(materialpoint_F(1:3,1:3,ip,elCP)) CPFEM_cs(1:6,ip,elCP) = math_sym33to6(J_inverse * Kirchhoff,weighted=.false.) - + ! translate from dP/dF to dCS/dE H = 0.0_pReal do i=1,3; do j=1,3; do k=1,3; do l=1,3; do m=1,3; do n=1,3 @@ -267,15 +267,15 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt + 0.5_pReal * ( Kirchhoff(j,l)*math_delta(i,k) + Kirchhoff(i,k)*math_delta(j,l) & + Kirchhoff(j,k)*math_delta(i,l) + Kirchhoff(i,l)*math_delta(j,k)) enddo; enddo; enddo; enddo; enddo; enddo - + forall(i=1:3, j=1:3,k=1:3,l=1:3) & H_sym(i,j,k,l) = 0.25_pReal * (H(i,j,k,l) + H(j,i,k,l) + H(i,j,l,k) + H(j,i,l,k)) - + CPFEM_dcsde(1:6,1:6,ip,elCP) = math_sym3333to66(J_inverse * H_sym,weighted=.false.) - + endif terminalIllness endif validCalculation - + !* report stress and stiffness if ((iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) & .and. ((debug_e == elCP .and. debug_i == ip) & @@ -286,17 +286,17 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt '<< CPFEM >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(CPFEM_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal flush(6) endif - + endif - + !*** warn if stiffness close to zero if (all(abs(CPFEM_dcsdE(1:6,1:6,ip,elCP)) < 1e-10_pReal)) call IO_warning(601,elCP,ip) - + !*** copy to output if using commercial FEM solver cauchyStress = CPFEM_cs (1:6, ip,elCP) jacobian = CPFEM_dcsdE(1:6,1:6,ip,elCP) - - + + !*** remember extreme values of stress ... cauchyStress33 = math_6toSym33(CPFEM_cs(1:6,ip,elCP),weighted=.false.) if (maxval(cauchyStress33) > debug_stressMax) then diff --git a/src/CPFEM2.f90 b/src/CPFEM2.f90 index dea500900..703d539ca 100644 --- a/src/CPFEM2.f90 +++ b/src/CPFEM2.f90 @@ -52,13 +52,13 @@ subroutine CPFEM_initAll call rotations_init call lattice_init call HDF5_utilities_init - call results_init + call results_init(restart=interface_restartInc>0) #if defined(Mesh) - call discretization_mesh_init + call discretization_mesh_init(restart=interface_restartInc>0) #elif defined(Grid) - call discretization_grid_init + call discretization_grid_init(restart=interface_restartInc>0) #endif - call material_init + call material_init(restart=interface_restartInc>0) call constitutive_init call crystallite_init call homogenization_init diff --git a/src/grid/discretization_grid.f90 b/src/grid/discretization_grid.f90 index 8413fc5e8..5cea99550 100644 --- a/src/grid/discretization_grid.f90 +++ b/src/grid/discretization_grid.f90 @@ -42,7 +42,9 @@ contains !-------------------------------------------------------------------------------------------------- !> @brief reads the geometry file to obtain information on discretization !-------------------------------------------------------------------------------------------------- -subroutine discretization_grid_init +subroutine discretization_grid_init(restart) + + logical, intent(in) :: restart include 'fftw3-mpi.f03' real(pReal), dimension(3) :: & @@ -100,13 +102,14 @@ subroutine discretization_grid_init !-------------------------------------------------------------------------------------------------- ! store geometry information for post processing - call results_openJobFile - call results_closeGroup(results_addGroup('geometry')) - call results_addAttribute('grid', grid, 'geometry') - call results_addAttribute('size', geomSize,'geometry') - call results_addAttribute('origin',origin, 'geometry') - call results_closeJobFile - + if(.not. restart) then + call results_openJobFile + call results_closeGroup(results_addGroup('geometry')) + call results_addAttribute('grid', grid, 'geometry') + call results_addAttribute('size', geomSize,'geometry') + call results_addAttribute('origin',origin, 'geometry') + call results_closeJobFile + endif !-------------------------------------------------------------------------------------------------- ! geometry information required by the nonlocal CP model call geometry_plastic_nonlocal_setIPvolume(reshape([(product(mySize/real(myGrid,pReal)),j=1,product(myGrid))], & diff --git a/src/material.f90 b/src/material.f90 index 749c9a3d8..90f2d9b16 100644 --- a/src/material.f90 +++ b/src/material.f90 @@ -207,7 +207,9 @@ contains !-------------------------------------------------------------------------------------------------- !> @brief parses material configuration file !-------------------------------------------------------------------------------------------------- -subroutine material_init +subroutine material_init(restart) + + logical, intent(in) :: restart integer :: i,e,m,c,h, myDebug, myPhase, myHomog, myMicro integer, dimension(:), allocatable :: & @@ -339,11 +341,12 @@ subroutine material_init call config_deallocate('material.config/microstructure') call config_deallocate('material.config/texture') - call results_openJobFile - call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase) - call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization) - call results_closeJobFile - + if (.not. restart) then + call results_openJobFile + call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase) + call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization) + call results_closeJobFile + endif !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ! BEGIN DEPRECATED diff --git a/src/mesh/discretization_mesh.f90 b/src/mesh/discretization_mesh.f90 index 072f3b4ec..b98cbbfad 100644 --- a/src/mesh/discretization_mesh.f90 +++ b/src/mesh/discretization_mesh.f90 @@ -63,7 +63,9 @@ contains !> @brief initializes the mesh by calling all necessary private routines the mesh module !! Order and routines strongly depend on type of solver !-------------------------------------------------------------------------------------------------- -subroutine discretization_mesh_init +subroutine discretization_mesh_init(restart) + + integer, intent(in) :: restart integer, dimension(1), parameter:: FE_geomtype = [1] !< geometry type of particular element type integer, dimension(1) :: FE_Nips !< number of IPs in a specific type of element diff --git a/src/results.f90 b/src/results.f90 index cfd495a71..cee6aedd9 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -15,31 +15,31 @@ module results implicit none private - + integer(HID_T) :: resultsFile interface results_writeDataset - + module procedure results_writeTensorDataset_real module procedure results_writeVectorDataset_real module procedure results_writeScalarDataset_real - + module procedure results_writeTensorDataset_int module procedure results_writeVectorDataset_int - + module procedure results_writeScalarDataset_rotation - + end interface results_writeDataset - + interface results_addAttribute - + module procedure results_addAttribute_real module procedure results_addAttribute_int module procedure results_addAttribute_str - + module procedure results_addAttribute_int_array module procedure results_addAttribute_real_array - + end interface results_addAttribute public :: & @@ -59,7 +59,9 @@ module results results_mapping_materialpoint contains -subroutine results_init +subroutine results_init(restart) + + logical, intent(in) :: restart character(len=pStringLen) :: commandLine @@ -68,15 +70,17 @@ subroutine results_init write(6,'(/,a)') ' Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):83–91, 2017' write(6,'(a)') ' https://doi.org/10.1007/s40192-017-0084-5' - resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.) - call results_addAttribute('DADF5_version_major',0) - call results_addAttribute('DADF5_version_minor',6) - call results_addAttribute('DAMASK_version',DAMASKVERSION) - call get_command(commandLine) - call results_addAttribute('call',trim(commandLine)) - call results_closeGroup(results_addGroup('mapping')) - call results_closeGroup(results_addGroup('mapping/cellResults')) - call results_closeJobFile + if(.not. restart) then + resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.) + call results_addAttribute('DADF5_version_major',0) + call results_addAttribute('DADF5_version_minor',6) + call results_addAttribute('DAMASK_version',DAMASKVERSION) + call get_command(commandLine) + call results_addAttribute('call',trim(commandLine)) + call results_closeGroup(results_addGroup('mapping')) + call results_closeGroup(results_addGroup('mapping/cellResults')) + call results_closeJobFile + endif end subroutine results_init @@ -87,7 +91,7 @@ end subroutine results_init subroutine results_openJobFile resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','a',.true.) - + end subroutine results_openJobFile @@ -105,7 +109,7 @@ end subroutine results_closeJobFile !> @brief creates the group of increment and adds time as attribute to the file !-------------------------------------------------------------------------------------------------- subroutine results_addIncrement(inc,time) - + integer, intent(in) :: inc real(pReal), intent(in) :: time character(len=pStringLen) :: incChar @@ -137,7 +141,7 @@ end subroutine results_finalizeIncrement integer(HID_T) function results_openGroup(groupName) character(len=*), intent(in) :: groupName - + results_openGroup = HDF5_openGroup(resultsFile,groupName) end function results_openGroup @@ -149,7 +153,7 @@ end function results_openGroup integer(HID_T) function results_addGroup(groupName) character(len=*), intent(in) :: groupName - + results_addGroup = HDF5_addGroup(resultsFile,groupName) end function results_addGroup @@ -161,7 +165,7 @@ end function results_addGroup subroutine results_closeGroup(group_id) integer(HID_T), intent(in) :: group_id - + call HDF5_closeGroup(group_id) end subroutine results_closeGroup @@ -290,17 +294,17 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni character(len=*), intent(in) :: label,group,description character(len=*), intent(in), optional :: SIunit real(pReal), intent(inout), dimension(:) :: dataset - + integer(HID_T) :: groupHandle - + groupHandle = results_openGroup(group) - + #ifdef PETSc call HDF5_write(groupHandle,dataset,label,.true.) #else call HDF5_write(groupHandle,dataset,label,.false.) #endif - + if (HDF5_objectExists(groupHandle,label)) & call HDF5_addAttribute(groupHandle,'Description',description,label) if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) & @@ -319,17 +323,17 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni character(len=*), intent(in) :: label,group,description character(len=*), intent(in), optional :: SIunit real(pReal), intent(inout), dimension(:,:) :: dataset - + integer(HID_T) :: groupHandle - + groupHandle = results_openGroup(group) - + #ifdef PETSc call HDF5_write(groupHandle,dataset,label,.true.) #else call HDF5_write(groupHandle,dataset,label,.false.) #endif - + if (HDF5_objectExists(groupHandle,label)) & call HDF5_addAttribute(groupHandle,'Description',description,label) if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) & @@ -350,13 +354,13 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni character(len=*), intent(in), optional :: SIunit logical, intent(in), optional :: transposed real(pReal), intent(in), dimension(:,:,:) :: dataset - - integer :: i + + integer :: i logical :: transposed_ integer(HID_T) :: groupHandle real(pReal), dimension(:,:,:), allocatable :: dataset_transposed - + if(present(transposed)) then transposed_ = transposed else @@ -374,13 +378,13 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni endif groupHandle = results_openGroup(group) - + #ifdef PETSc call HDF5_write(groupHandle,dataset_transposed,label,.true.) #else call HDF5_write(groupHandle,dataset_transposed,label,.false.) #endif - + if (HDF5_objectExists(groupHandle,label)) & call HDF5_addAttribute(groupHandle,'Description',description,label) if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) & @@ -400,17 +404,17 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit character(len=*), intent(in) :: label,group,description character(len=*), intent(in), optional :: SIunit integer, intent(inout), dimension(:,:) :: dataset - + integer(HID_T) :: groupHandle - + groupHandle = results_openGroup(group) - + #ifdef PETSc call HDF5_write(groupHandle,dataset,label,.true.) #else call HDF5_write(groupHandle,dataset,label,.false.) #endif - + if (HDF5_objectExists(groupHandle,label)) & call HDF5_addAttribute(groupHandle,'Description',description,label) if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) & @@ -430,17 +434,17 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit character(len=*), intent(in) :: label,group,description character(len=*), intent(in), optional :: SIunit integer, intent(inout), dimension(:,:,:) :: dataset - + integer(HID_T) :: groupHandle - + groupHandle = results_openGroup(group) - + #ifdef PETSc call HDF5_write(groupHandle,dataset,label,.true.) #else call HDF5_write(groupHandle,dataset,label,.false.) #endif - + if (HDF5_objectExists(groupHandle,label)) & call HDF5_addAttribute(groupHandle,'Description',description,label) if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) & @@ -460,17 +464,17 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l character(len=*), intent(in) :: label,group,description character(len=*), intent(in), optional :: lattice_structure type(rotation), intent(inout), dimension(:) :: dataset - + integer(HID_T) :: groupHandle - + groupHandle = results_openGroup(group) - + #ifdef PETSc call HDF5_write(groupHandle,dataset,label,.true.) #else call HDF5_write(groupHandle,dataset,label,.false.) #endif - + if (HDF5_objectExists(groupHandle,label)) & call HDF5_addAttribute(groupHandle,'Description',description,label) if (HDF5_objectExists(groupHandle,label) .and. present(lattice_structure)) & @@ -486,11 +490,11 @@ end subroutine results_writeScalarDataset_rotation !> @brief adds the unique mapping from spatial position and constituent ID to results !-------------------------------------------------------------------------------------------------- subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) - + integer, dimension(:,:), intent(in) :: phaseAt !< phase section at (constituent,element) integer, dimension(:,:,:), intent(in) :: memberAtLocal !< phase member at (constituent,IP,element) character(len=pStringLen), dimension(:), intent(in) :: label !< label of each phase section - + integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2),size(memberAtLocal,3)) :: & phaseAtMaterialpoint, & memberAtGlobal @@ -500,7 +504,7 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) myShape, & !< shape of the dataset (this process) myOffset, & totalShape !< shape of the dataset (all processes) - + integer(HID_T) :: & loc_id, & !< identifier of group in file dtype_id, & !< identifier of compound data type @@ -512,30 +516,30 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) plist_id, & dt_id - + integer(SIZE_T) :: type_size_string, type_size_int integer :: ierr, i - + !--------------------------------------------------------------------------------------------------- ! compound type: name of phase section + position/index within results array call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr) call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr) call h5tget_size_f(dt_id, type_size_string, ierr) - + call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr) - + call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr) call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr) call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr) - + !-------------------------------------------------------------------------------------------------- ! create memory types for each component of the compound type call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr) call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr) - + call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr) call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr) - + call h5tclose_f(dt_id, ierr) !-------------------------------------------------------------------------------------------------- @@ -553,10 +557,10 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) #ifdef PETSc call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5pset_dxpl_mpio_f') - + call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/writeSize') - + call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/memberOffset') #endif @@ -564,15 +568,15 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) myShape = int([size(phaseAt,1),writeSize(worldrank)], HSIZE_T) myOffset = int([0,sum(writeSize(0:worldrank-1))], HSIZE_T) totalShape = int([size(phaseAt,1),sum(writeSize)], HSIZE_T) - + !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape = hyperslab) and in file (global shape) call h5screate_simple_f(2,myShape,memspace_id,ierr,myShape) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/memspace_id') - + call h5screate_simple_f(2,totalShape,filespace_id,ierr,totalShape) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/filespace_id') - + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5sselect_hyperslab_f') @@ -581,7 +585,7 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) do i = 1, size(phaseAtMaterialpoint,2) phaseAtMaterialpoint(:,i,:) = phaseAt enddo - + !--------------------------------------------------------------------------------------------------- ! renumber member from my process to all processes do i = 1, size(label) @@ -591,11 +595,11 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) !-------------------------------------------------------------------------------------------------- ! write the components of the compound type individually call h5pset_preserve_f(plist_id, .TRUE., ierr) - + loc_id = results_openGroup('/mapping/cellResults') call h5dcreate_f(loc_id, 'constituent', dtype_id, filespace_id, dset_id, ierr) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dcreate_f') - + call h5dwrite_f(dset_id, name_id, reshape(label(pack(phaseAtMaterialpoint,.true.)),myShape), & myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dwrite_f/name_id') @@ -621,11 +625,11 @@ end subroutine results_mapping_constituent !> @brief adds the unique mapping from spatial position and constituent ID to results !-------------------------------------------------------------------------------------------------- subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label) - + integer, dimension(:), intent(in) :: homogenizationAt !< homogenization section at (element) integer, dimension(:,:), intent(in) :: memberAtLocal !< homogenization member at (IP,element) character(len=pStringLen), dimension(:), intent(in) :: label !< label of each homogenization section - + integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2)) :: & homogenizationAtMaterialpoint, & memberAtGlobal @@ -635,7 +639,7 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label) myShape, & !< shape of the dataset (this process) myOffset, & totalShape !< shape of the dataset (all processes) - + integer(HID_T) :: & loc_id, & !< identifier of group in file dtype_id, & !< identifier of compound data type @@ -647,30 +651,30 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label) plist_id, & dt_id - + integer(SIZE_T) :: type_size_string, type_size_int integer :: ierr, i - + !--------------------------------------------------------------------------------------------------- ! compound type: name of phase section + position/index within results array call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr) call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr) call h5tget_size_f(dt_id, type_size_string, ierr) - + call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr) - + call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr) call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr) call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr) - + !-------------------------------------------------------------------------------------------------- ! create memory types for each component of the compound type call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr) call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr) - + call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr) call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr) - + call h5tclose_f(dt_id, ierr) !-------------------------------------------------------------------------------------------------- @@ -688,10 +692,10 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label) #ifdef PETSc call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5pset_dxpl_mpio_f') - + call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_materialpoint: MPI_allreduce/writeSize') - + call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_materialpoint: MPI_allreduce/memberOffset') #endif @@ -699,15 +703,15 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label) myShape = int([writeSize(worldrank)], HSIZE_T) myOffset = int([sum(writeSize(0:worldrank-1))], HSIZE_T) totalShape = int([sum(writeSize)], HSIZE_T) - + !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape = hyperslab) and in file (global shape) call h5screate_simple_f(1,myShape,memspace_id,ierr,myShape) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5screate_simple_f/memspace_id') - + call h5screate_simple_f(1,totalShape,filespace_id,ierr,totalShape) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5screate_simple_f/filespace_id') - + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5sselect_hyperslab_f') @@ -716,7 +720,7 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label) do i = 1, size(homogenizationAtMaterialpoint,1) homogenizationAtMaterialpoint(i,:) = homogenizationAt enddo - + !--------------------------------------------------------------------------------------------------- ! renumber member from my process to all processes do i = 1, size(label) @@ -726,11 +730,11 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label) !-------------------------------------------------------------------------------------------------- ! write the components of the compound type individually call h5pset_preserve_f(plist_id, .TRUE., ierr) - + loc_id = results_openGroup('/mapping/cellResults') call h5dcreate_f(loc_id, 'materialpoint', dtype_id, filespace_id, dset_id, ierr) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5dcreate_f') - + call h5dwrite_f(dset_id, name_id, reshape(label(pack(homogenizationAtMaterialpoint,.true.)),myShape), & myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5dwrite_f/name_id')