diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f1af6259f..25616cc99 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -7,9 +7,9 @@ stages: - compilePETScGNU - prepareSpectral - spectral - - compileMarc2018_1 + - compileMarc - marc - - compileAbaqus2017 + - compileAbaqus - example - performance - createPackage @@ -51,39 +51,37 @@ variables: # Names of module files to load # =============================================================================================== # ++++++++++++ Compiler ++++++++++++++++++++++++++++++++++++++++++++++ - IntelCompiler16_0: "Compiler/Intel/16.0 Libraries/IMKL/2016" - IntelCompiler16_4: "Compiler/Intel/16.4 Libraries/IMKL/2016-4" - IntelCompiler17_0: "Compiler/Intel/17.0 Libraries/IMKL/2017" - IntelCompiler18_1: "Compiler/Intel/18.1 Libraries/IMKL/2018" - GNUCompiler7_3: "Compiler/GNU/7.3" + IntelCompiler16_4: "Compiler/Intel/16.4 Libraries/IMKL/2016" + IntelCompiler17_8: "Compiler/Intel/17.8 Libraries/IMKL/2017" + IntelCompiler18_4: "Compiler/Intel/18.4 Libraries/IMKL/2018" + GNUCompiler8_2: "Compiler/GNU/8.2" # ------------ Defaults ---------------------------------------------- - IntelCompiler: "$IntelCompiler18_1" - GNUCompiler: "$GNUCompiler7_3" + IntelCompiler: "$IntelCompiler18_4" + GNUCompiler: "$GNUCompiler8_2" # ++++++++++++ MPI +++++++++++++++++++++++++++++++++++++++++++++++++++ - MPICH3_2Intel18_1: "MPI/Intel/18.1/MPICH/3.2.1" - MPICH3_2GNU7_3: "MPI/GNU/7.3/MPICH/3.2.1" + IMPI2018Intel18_4: "MPI/Intel/18.4/IntelMPI/2018" + MPICH3_3GNU8_2: "MPI/GNU/8.2/MPICH/3.3" # ------------ Defaults ---------------------------------------------- - MPICH_Intel: "$MPICH3_2Intel18_1" - MPICH_GNU: "$MPICH3_2GNU7_3" + MPICH_Intel: "$IMPI2018Intel18_4" + MPICH_GNU: "$MPICH3_3GNU8_2" # ++++++++++++ PETSc +++++++++++++++++++++++++++++++++++++++++++++++++ - PETSc3_10_0MPICH3_2Intel18_1: "Libraries/PETSc/3.10.0/Intel-18.1-MPICH-3.2.1" - PETSc3_10_0MPICH3_2GNU7_3: "Libraries/PETSc/3.10.0/GNU-7.3-MPICH-3.2.1" + PETSc3_10_3IMPI2018Intel18_4: "Libraries/PETSc/3.10.3/Intel-18.4-IntelMPI-2018" + PETSc3_10_3MPICH3_3GNU8_2: "Libraries/PETSc/3.10.3/GNU-8.2-MPICH-3.3" # ------------ Defaults ---------------------------------------------- - PETSc_MPICH_Intel: "$PETSc3_10_0MPICH3_2Intel18_1" - PETSc_MPICH_GNU: "$PETSc3_10_0MPICH3_2GNU7_3" + PETSc_MPICH_Intel: "$PETSc3_10_3IMPI2018Intel18_4" + PETSc_MPICH_GNU: "$PETSc3_10_3MPICH3_3GNU8_2" # ++++++++++++ FEM +++++++++++++++++++++++++++++++++++++++++++++++++++ - Abaqus2017: "FEM/Abaqus/2017" + Abaqus2019: "FEM/Abaqus/2019" MSC2018_1: "FEM/MSC/2018.1" - MSC2017: "FEM/MSC/2017" # ------------ Defaults ---------------------------------------------- - Abaqus: "$Abaqus2017" + Abaqus: "$Abaqus2019" MSC: "$MSC2018_1" - IntelMarc: "$IntelCompiler17_0" + IntelMarc: "$IntelCompiler17_8" IntelAbaqus: "$IntelCompiler16_4" # ++++++++++++ Documentation +++++++++++++++++++++++++++++++++++++++++ - Doxygen1_8_13: "Documentation/Doxygen/1.8.13" + Doxygen1_8_15: "Documentation/Doxygen/1.8.15" # ------------ Defaults ---------------------------------------------- - Doxygen: "$Doxygen1_8_13" + Doxygen: "$Doxygen1_8_15" ################################################################################################### @@ -158,6 +156,13 @@ Post_AverageDown: - master - release +Post_ASCIItable: + stage: postprocessing + script: ASCIItable/test.py + except: + - master + - release + Post_General: stage: postprocessing script: PostProcessing/test.py @@ -383,9 +388,9 @@ TextureComponents: ################################################################################################### Marc_compileIfort2018_1: - stage: compileMarc2018_1 + stage: compileMarc script: - - module load $IntelCompiler17_0 $MSC2018_1 + - module load $IntelMarc $MSC - Marc_compileIfort/test.py -m 2018.1 except: - master @@ -430,11 +435,11 @@ J2_plasticBehavior: - release ################################################################################################### -Abaqus_compile2017: - stage: compileAbaqus2017 +Abaqus_compile: + stage: compileAbaqus script: - - module load $IntelCompiler16_4 $Abaqus2017 - - Abaqus_compileIfort/test.py -a 2017 + - module load $IntelAbaqus $Abaqus + - Abaqus_compileIfort/test.py except: - master - release diff --git a/CMakeLists.txt b/CMakeLists.txt index 3aa49cd7a..6096c8824 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -445,6 +445,33 @@ elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU") # Additional options # -fdefault-integer-8: Use it to set precision to 8 bytes for integer, don't use it for the standard case of pInt=4 (there is no -fdefault-integer-4) + + +################################################################################################### +# PGI Compiler +################################################################################################### +elseif(CMAKE_Fortran_COMPILER_ID STREQUAL "PGI") + + if (OPTIMIZATION STREQUAL "OFF") + set (OPTIMIZATION_FLAGS "-O0" ) + elseif (OPTIMIZATION STREQUAL "DEFENSIVE") + set (OPTIMIZATION_FLAGS "-O2") + elseif (OPTIMIZATION STREQUAL "AGGRESSIVE") + set (OPTIMIZATION_FLAGS "-O3") + endif () + + +#------------------------------------------------------------------------------------------------ +# Fine tuning compilation options + set (COMPILE_FLAGS "${COMPILE_FLAGS} -Mpreprocess") + # preprocessor + + set (STANDARD_CHECK "-Mallocatable=03") + +#------------------------------------------------------------------------------------------------ +# Runtime debugging + set (DEBUG_FLAGS "${DEBUG_FLAGS} -g") + # Includes debugging information in the object module; sets the optimization level to zero unless a -⁠O option is present on the command line else () message (FATAL_ERROR "Compiler type (CMAKE_Fortran_COMPILER_ID) not recognized") endif () diff --git a/CONFIG b/CONFIG index 13b75a768..31a9c34c8 100644 --- a/CONFIG +++ b/CONFIG @@ -8,6 +8,6 @@ set DAMASK_NUM_THREADS = 4 set MSC_ROOT = /opt/msc set MARC_VERSION = 2018.1 -set ABAQUS_VERSION = 2017 +set ABAQUS_VERSION = 2019 set DAMASK_HDF5 = OFF diff --git a/PRIVATE b/PRIVATE index beb9682ff..18ba1ba6a 160000 --- a/PRIVATE +++ b/PRIVATE @@ -1 +1 @@ -Subproject commit beb9682fff7d4d6c65aba12ffd04c7441dc6ba6b +Subproject commit 18ba1ba6a5e9ba446dc9311acf2acf2781614db1 diff --git a/README b/README index 5c5d976b6..7fc372881 100644 --- a/README +++ b/README @@ -10,3 +10,4 @@ Germany Email: DAMASK@mpie.de https://damask.mpie.de +https://magit1.mpie.de diff --git a/VERSION b/VERSION index 543d23432..6e1ce244f 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.2-1674-g683dee82 +v2.0.2-1689-g1a471bcd diff --git a/processing/post/addCumulative.py b/processing/post/addCumulative.py index 4588d915c..dfa8059dc 100755 --- a/processing/post/addCumulative.py +++ b/processing/post/addCumulative.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys diff --git a/processing/post/addDerivative.py b/processing/post/addDerivative.py index dc97c09ea..35ca7130b 100755 --- a/processing/post/addDerivative.py +++ b/processing/post/addDerivative.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys diff --git a/processing/post/addLinked.py b/processing/post/addLinked.py index d60307bc2..097f8f2c5 100755 --- a/processing/post/addLinked.py +++ b/processing/post/addLinked.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys @@ -21,7 +21,7 @@ Add data of selected column(s) from (first) row of linked ASCIItable that shares parser.add_option('--link', dest = 'link', nargs = 2, type = 'string', metavar = 'string string', - help = 'column labels containing linked values') + help = 'column labels of table and linked table containing linking values') parser.add_option('-l','--label', dest = 'label', action = 'extend', metavar = '', @@ -105,7 +105,8 @@ for name in filenames: outputAlive = True while outputAlive and table.data_read(): # read next data line of ASCII table try: - table.data_append(data[np.argwhere(np.all((map(float,table.data[myLink:myLink+myLinkDim]) - index)==0,axis=1))[0]]) # add data of first matching line + table.data_append(data[np.argwhere(np.all((list(map(float,table.data[myLink:myLink+myLinkDim])) - index)==0, + axis=1))[0]]) # add data of first matching line except IndexError: table.data_append(np.nan*np.ones_like(data[0])) # or add NaNs outputAlive = table.data_write() # output processed line diff --git a/processing/post/addTable.py b/processing/post/addTable.py index 82799b4f5..126db6f65 100755 --- a/processing/post/addTable.py +++ b/processing/post/addTable.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys diff --git a/processing/post/blowUp.py b/processing/post/blowUp.py index 5a0d631e0..22de70d5b 100755 --- a/processing/post/blowUp.py +++ b/processing/post/blowUp.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys diff --git a/processing/post/vtk_addGridData.py b/processing/post/vtk_addGridData.py index e0c274dc7..315071a4b 100755 --- a/processing/post/vtk_addGridData.py +++ b/processing/post/vtk_addGridData.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,vtk diff --git a/processing/post/vtk_addPointcloudData.py b/processing/post/vtk_addPointcloudData.py index 3937413c6..d75eb97b4 100755 --- a/processing/post/vtk_addPointcloudData.py +++ b/processing/post/vtk_addPointcloudData.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,vtk diff --git a/processing/post/vtk_addRectilinearGridData.py b/processing/post/vtk_addRectilinearGridData.py index 9ec384e4d..83a1451a0 100755 --- a/processing/post/vtk_addRectilinearGridData.py +++ b/processing/post/vtk_addRectilinearGridData.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,vtk diff --git a/processing/post/vtk_pointcloud.py b/processing/post/vtk_pointcloud.py index 54f02d300..a9ce1f81f 100755 --- a/processing/post/vtk_pointcloud.py +++ b/processing/post/vtk_pointcloud.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys,vtk diff --git a/processing/post/vtk_rectilinearGrid.py b/processing/post/vtk_rectilinearGrid.py index d01d118cb..c94f44228 100755 --- a/processing/post/vtk_rectilinearGrid.py +++ b/processing/post/vtk_rectilinearGrid.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys,vtk diff --git a/processing/pre/geom_grainGrowth.py b/processing/pre/geom_grainGrowth.py index f1394cb5f..1afb02715 100755 --- a/processing/pre/geom_grainGrowth.py +++ b/processing/pre/geom_grainGrowth.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys,math @@ -49,7 +49,7 @@ parser.set_defaults(d = 1, (options, filenames) = parser.parse_args() -options.immutable = map(int,options.immutable) +options.immutable = list(map(int,options.immutable)) getInterfaceEnergy = lambda A,B: np.float32((A*B != 0)*(A != B)*1.0) # 1.0 if A & B are distinct & nonzero, 0.0 otherwise struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood @@ -70,9 +70,9 @@ for name in filenames: table.head_read() info,extra_header = table.head_getGeom() - damask.util.croak(['grid a b c: {}'.format(' x '.join(map(str,info['grid']))), - 'size x y z: {}'.format(' x '.join(map(str,info['size']))), - 'origin x y z: {}'.format(' : '.join(map(str,info['origin']))), + damask.util.croak(['grid a b c: {}'.format(' x '.join(list(map(str,info['grid'])))), + 'size x y z: {}'.format(' x '.join(list(map(str,info['size'])))), + 'origin x y z: {}'.format(' : '.join(list(map(str,info['origin'])))), 'homogenization: {}'.format(info['homogenization']), 'microstructures: {}'.format(info['microstructures']), ]) @@ -102,9 +102,9 @@ for name in filenames: gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d),dtype=np.float32) \ /np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(info['grid'] == 1))/2.,dtype=np.float32) - gauss[:,:,:grid[2]/2:-1] = gauss[:,:,1:(grid[2]+1)/2] # trying to cope with uneven (odd) grid size - gauss[:,:grid[1]/2:-1,:] = gauss[:,1:(grid[1]+1)/2,:] - gauss[:grid[0]/2:-1,:,:] = gauss[1:(grid[0]+1)/2,:,:] + gauss[:,:,:grid[2]//2:-1] = gauss[:,:,1:(grid[2]+1)//2] # trying to cope with uneven (odd) grid size + gauss[:,:grid[1]//2:-1,:] = gauss[:,1:(grid[1]+1)//2,:] + gauss[:grid[0]//2:-1,:,:] = gauss[1:(grid[0]+1)//2,:,:] gauss = np.fft.rfftn(gauss).astype(np.complex64) for smoothIter in range(options.N): @@ -119,9 +119,9 @@ for name in filenames: microstructure,i,axis=0), j,axis=1), k,axis=2))) # periodically extend interfacial energy array by half a grid size in positive and negative directions - periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2, - grid[1]/2:-grid[1]/2, - grid[2]/2:-grid[2]/2] + periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]//2:-grid[0]//2, + grid[1]//2:-grid[1]//2, + grid[2]//2:-grid[2]//2] # transform bulk volume (i.e. where interfacial energy remained zero), store index of closest boundary voxel index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0., @@ -148,15 +148,15 @@ for name in filenames: ndimage.morphology.binary_dilation(interfaceEnergy > 0., structure = struc, iterations = int(round(options.d*2.))-1),# fat boundary - periodic_bulkEnergy[grid[0]/2:-grid[0]/2, # retain filled energy on fat boundary... - grid[1]/2:-grid[1]/2, - grid[2]/2:-grid[2]/2], # ...and zero everywhere else + periodic_bulkEnergy[grid[0]//2:-grid[0]//2, # retain filled energy on fat boundary... + grid[1]//2:-grid[1]//2, + grid[2]//2:-grid[2]//2], # ...and zero everywhere else 0.)).astype(np.complex64) * gauss).astype(np.float32) - periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]/2:-grid[0]/2, - grid[1]/2:-grid[1]/2, - grid[2]/2:-grid[2]/2] # periodically extend the smoothed bulk energy + periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]//2:-grid[0]//2, + grid[1]//2:-grid[1]//2, + grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy # transform voxels close to interface region @@ -164,15 +164,15 @@ for name in filenames: return_distances = False, return_indices = True) # want index of closest bulk grain - periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2, - grid[1]/2:-grid[1]/2, - grid[2]/2:-grid[2]/2] # periodically extend the microstructure + periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]//2:-grid[0]//2, + grid[1]//2:-grid[1]//2, + grid[2]//2:-grid[2]//2] # periodically extend the microstructure microstructure = periodic_microstructure[index[0], index[1], - index[2]].reshape(2*grid)[grid[0]/2:-grid[0]/2, - grid[1]/2:-grid[1]/2, - grid[2]/2:-grid[2]/2] # extent grains into interface region + index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2, + grid[1]//2:-grid[1]//2, + grid[2]//2:-grid[2]//2] # extent grains into interface region # replace immutable microstructures with closest mutable ones index = ndimage.morphology.distance_transform_edt(np.in1d(microstructure,options.immutable).reshape(grid), @@ -236,3 +236,4 @@ for name in filenames: # --- output finalization -------------------------------------------------------------------------- table.close() + \ No newline at end of file diff --git a/processing/pre/geom_toTable.py b/processing/pre/geom_toTable.py index eb6bdde61..a29ef7afb 100755 --- a/processing/pre/geom_toTable.py +++ b/processing/pre/geom_toTable.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import os,sys @@ -48,11 +48,11 @@ for name in filenames: table.head_read() info,extra_header = table.head_getGeom() - damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))), - 'size x y z: %s'%(' x '.join(map(str,info['size']))), - 'origin x y z: %s'%(' : '.join(map(str,info['origin']))), - 'homogenization: %i'%info['homogenization'], - 'microstructures: %i'%info['microstructures'], + damask.util.croak(['grid a b c: {}'.format(' x '.join(list(map(str,info['grid'])))), + 'size x y z: {}'.format(' x '.join(list(map(str,info['size'])))), + 'origin x y z: {}'.format(' : '.join(list(map(str,info['origin'])))), + 'homogenization: {}'.format(info['homogenization']), + 'microstructures: {}'.format(info['microstructures']), ]) errors = [] diff --git a/processing/pre/seeds_fromDistribution.py b/processing/pre/seeds_fromDistribution.py index 3b9005032..2e8936f27 100755 --- a/processing/pre/seeds_fromDistribution.py +++ b/processing/pre/seeds_fromDistribution.py @@ -1,10 +1,11 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python3 # -*- coding: UTF-8 no BOM -*- import threading,time,os,sys,random import numpy as np from optparse import OptionParser -from cStringIO import StringIO +from io import StringIO +import binascii import damask scriptName = os.path.splitext(os.path.basename(__file__))[0] @@ -96,7 +97,7 @@ class myThread (threading.Thread): perturbedGeomVFile = StringIO() perturbedSeedsVFile.reset() perturbedGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+ - ' -g '+' '.join(map(str, options.grid)),streamIn=perturbedSeedsVFile)[0]) + ' -g '+' '.join(list(map(str, options.grid))),streamIn=perturbedSeedsVFile)[0]) perturbedGeomVFile.reset() #--- evaluate current seeds file ---------------------------------------------------------------------- @@ -214,7 +215,7 @@ options = parser.parse_args()[0] damask.util.report(scriptName,options.seedFile) if options.randomSeed is None: - options.randomSeed = int(os.urandom(4).encode('hex'), 16) + options.randomSeed = int(binascii.hexlify(os.urandom(4)),16) damask.util.croak(options.randomSeed) delta = (options.scale/options.grid[0],options.scale/options.grid[1],options.scale/options.grid[2]) baseFile=os.path.splitext(os.path.basename(options.seedFile))[0] @@ -240,17 +241,17 @@ if os.path.isfile(os.path.splitext(options.seedFile)[0]+'.seeds'): for line in initialSeedFile: bestSeedsVFile.write(line) else: bestSeedsVFile.write(damask.util.execute('seeds_fromRandom'+\ - ' -g '+' '.join(map(str, options.grid))+\ + ' -g '+' '.join(list(map(str, options.grid)))+\ ' -r {:d}'.format(options.randomSeed)+\ ' -N '+str(nMicrostructures))[0]) bestSeedsUpdate = time.time() # ----------- tessellate initial seed file to get and evaluate geom file -bestSeedsVFile.reset() +bestSeedsVFile.seek(0) initialGeomVFile = StringIO() initialGeomVFile.write(damask.util.execute('geom_fromVoronoiTessellation '+ - ' -g '+' '.join(map(str, options.grid)),bestSeedsVFile)[0]) -initialGeomVFile.reset() + ' -g '+' '.join(list(map(str, options.grid))),bestSeedsVFile)[0]) +initialGeomVFile.seek(0) initialGeomTable = damask.ASCIItable(initialGeomVFile,None,labeled=False,readonly=True) initialGeomTable.head_read() info,devNull = initialGeomTable.head_getGeom() diff --git a/python/damask/solver/abaqus.py b/python/damask/solver/abaqus.py index bf8691533..22dbab045 100644 --- a/python/damask/solver/abaqus.py +++ b/python/damask/solver/abaqus.py @@ -2,7 +2,7 @@ from .solver import Solver import damask -import subprocess,re +import subprocess class Abaqus(Solver): @@ -15,14 +15,13 @@ class Abaqus(Solver): def return_run_command(self,model): env=damask.Environment() - shortVersion = re.sub('[\.,-]', '',self.version) try: - cmd='abq'+shortVersion - subprocess.check_output(['abq'+shortVersion,'information=release']) + cmd='abq'+self.version + subprocess.check_output([cmd,'information=release']) except OSError: # link to abqXXX not existing cmd='abaqus' process = subprocess.Popen(['abaqus','information=release'],stdout = subprocess.PIPE,stderr = subprocess.PIPE) - detectedVersion = process.stdout.readlines()[1].split()[1] + detectedVersion = process.stdout.readlines()[1].split()[1].decode('utf-8') if self.version != detectedVersion: - raise Exception('found Abaqus version %s, but requested %s'%(detectedVersion,self.version)) - return '%s -job %s -user %s/src/DAMASK_abaqus interactive'%(cmd,model,env.rootDir()) + raise Exception('found Abaqus version {}, but requested {}'.format(detectedVersion,self.version)) + return '{} -job {} -user {}/src/DAMASK_abaqus interactive'.format(cmd,model,env.rootDir()) diff --git a/src/C_routines.c b/src/C_routines.c index e3891765a..3dccb7644 100644 --- a/src/C_routines.c +++ b/src/C_routines.c @@ -6,9 +6,11 @@ #include #include #include +#include /* http://stackoverflow.com/questions/30279228/is-there-an-alternative-to-getcwd-in-fortran-2003-2008 */ + int isdirectory_c(const char *dir){ struct stat statbuf; if(stat(dir, &statbuf) != 0) /* error */ @@ -44,3 +46,11 @@ void gethostname_c(char hostname[], int *stat){ int chdir_c(const char *dir){ return chdir(dir); } + +void signalusr1_c(void (*handler)(int)){ + signal(SIGUSR1, handler); +} + +void signalusr2_c(void (*handler)(int)){ + signal(SIGUSR2, handler); +} \ No newline at end of file diff --git a/src/DAMASK_abaqus.f b/src/DAMASK_abaqus.f index 9072de95d..f17b5bb25 100644 --- a/src/DAMASK_abaqus.f +++ b/src/DAMASK_abaqus.f @@ -40,16 +40,25 @@ subroutine DAMASK_interface_init character(len=256) :: wd call date_and_time(values = dateAndTime) - write(6,'(/,a)') ' <<<+- DAMASK_abaqus_std -+>>>' - write(6,'(/,a)') ' Roters et al., Computational Materials Science, 2018' - write(6,'(/,a)') ' Version: '//DAMASKVERSION - write(6,'(a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',& - dateAndTime(2),'/',& - dateAndTime(1) - write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':',& - dateAndTime(6),':',& - dateAndTime(7) - write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>' + write(6,'(/,a)') ' <<<+- DAMASK_abaqus -+>>>' + write(6,'(/,a)') ' Roters et al., Computational Materials Science 158, 2018, 420-478' + write(6,'(a,/)') ' https://doi.org/10.1016/j.commatsci.2018.04.030' + + write(6,'(a,/)') ' Version: '//DAMASKVERSION + +! https://github.com/jeffhammond/HPCInfo/blob/master/docs/Preprocessor-Macros.md +#if __INTEL_COMPILER >= 1800 + write(6,*) 'Compiled with: ', compiler_version() + write(6,*) 'Compiler options: ', compiler_options() +#else + write(6,'(a,i4.4,a,i8.8)') ' Compiled with Intel fortran version :', __INTEL_COMPILER,& + ', build date :', __INTEL_COMPILER_BUILD_DATE +#endif + + write(6,*) 'Compiled on ', __DATE__,' at ',__TIME__ + + write(6,'(a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1) + write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7) call getoutdir(wd, lenOutDir) ierr = CHDIR(wd) diff --git a/src/DAMASK_interface.f90 b/src/DAMASK_interface.f90 index a2b4f53f2..630b5b921 100644 --- a/src/DAMASK_interface.f90 +++ b/src/DAMASK_interface.f90 @@ -12,9 +12,9 @@ module DAMASK_interface use prec, only: & pInt - implicit none private + logical, public, protected :: SIGUSR1,SIGUSR2 integer(pInt), public, protected :: & interface_restartInc = 0_pInt !< Increment at which calculation starts character(len=1024), public, protected :: & @@ -42,6 +42,8 @@ contains subroutine DAMASK_interface_init() use, intrinsic :: & iso_fortran_env + use :: & + iso_c_binding #include #if defined(__GFORTRAN__) && __GNUC__ < 5 =================================================================================================== @@ -81,6 +83,8 @@ subroutine DAMASK_interface_init() use PETScSys use system_routines, only: & + signalusr1_C, & + signalusr2_C, & getHostName, & getCWD @@ -139,16 +143,27 @@ subroutine DAMASK_interface_init() call date_and_time(values = dateAndTime) write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>' - write(6,'(a,/)') ' Roters et al., Computational Materials Science, 2018' - write(6,'(/,a)') ' Version: '//DAMASKVERSION - write(6,'(a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',& - dateAndTime(2),'/',& - dateAndTime(1) - write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':',& - dateAndTime(6),':',& - dateAndTime(7) - write(6,'(/,a,i4.1)') ' MPI processes: ',worldsize -#include "compilation_info.f90" + write(6,'(/,a)') ' Roters et al., Computational Materials Science 158, 2018, 420-478' + write(6,'(a,/)') ' https://doi.org/10.1016/j.commatsci.2018.04.030' + + write(6,'(a,/)') ' Version: '//DAMASKVERSION + +! https://github.com/jeffhammond/HPCInfo/blob/master/docs/Preprocessor-Macros.md +#if defined(__GFORTRAN__) || __INTEL_COMPILER >= 1800 + write(6,*) 'Compiled with: ', compiler_version() + write(6,*) 'Compiler options: ', compiler_options() +#elif defined(__INTEL_COMPILER) + write(6,'(a,i4.4,a,i8.8)') ' Compiled with Intel fortran version :', __INTEL_COMPILER,& + ', build date :', __INTEL_COMPILER_BUILD_DATE +#elif defined(__PGI) + write(6,'(a,i4.4,a,i8.8)') ' Compiled with PGI fortran version :', __PGIC__,& + '.', __PGIC_MINOR__ +#endif + + write(6,*) 'Compiled on ', __DATE__,' at ',__TIME__ + + write(6,'(a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1) + write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7) call get_command(commandLine) chunkPos = IIO_stringPos(commandLine) @@ -215,9 +230,11 @@ subroutine DAMASK_interface_init() call get_environment_variable('USER',userName) ! ToDo: https://stackoverflow.com/questions/8953424/how-to-get-the-username-in-c-c-in-linux - write(6,'(a,a)') ' Host name: ', trim(getHostName()) - write(6,'(a,a)') ' User name: ', trim(userName) - write(6,'(a,a)') ' Command line call: ', trim(commandLine) + write(6,'(/,a,i4.1)') ' MPI processes: ',worldsize + write(6,'(a,a)') ' Host name: ', trim(getHostName()) + write(6,'(a,a)') ' User name: ', trim(userName) + + write(6,'(/a,a)') ' Command line call: ', trim(commandLine) if (len(trim(workingDirArg)) > 0) & write(6,'(a,a)') ' Working dir argument: ', trim(workingDirArg) write(6,'(a,a)') ' Geometry argument: ', trim(geometryArg) @@ -229,6 +246,12 @@ subroutine DAMASK_interface_init() if (interface_restartInc > 0_pInt) & write(6,'(a,i6.6)') ' Restart from increment: ', interface_restartInc + call signalusr1_c(c_funloc(setSIGUSR1)) + call signalusr2_c(c_funloc(setSIGUSR2)) + SIGUSR1 = .false. + SIGUSR2 = .false. + + end subroutine DAMASK_interface_init @@ -412,6 +435,35 @@ character(len=1024) function makeRelativePath(a,b) end function makeRelativePath +!-------------------------------------------------------------------------------------------------- +!> @brief sets global variable SIGUSR1 to .true. if program receives SIGUSR1 +!-------------------------------------------------------------------------------------------------- +subroutine setSIGUSR1(signal) bind(C) + use :: iso_c_binding + + implicit none + integer(C_INT), value :: signal + SIGUSR1 = .true. + + write(6,*) 'received signal ',signal, 'set SIGUSR1' + +end subroutine setSIGUSR1 + + +!-------------------------------------------------------------------------------------------------- +!> @brief sets global variable SIGUSR2 to .true. if program receives SIGUSR2 +!-------------------------------------------------------------------------------------------------- +subroutine setSIGUSR2(signal) bind(C) + use :: iso_c_binding + + implicit none + integer(C_INT), value :: signal + SIGUSR2 = .true. + + write(6,*) 'received signal ',signal, 'set SIGUSR2' + +end subroutine setSIGUSR2 + !-------------------------------------------------------------------------------------------------- !> @brief taken from IO, check IO_stringValue for documentation @@ -469,7 +521,6 @@ pure function IIO_stringPos(string) do while (verify(string(right+1:),SEP)>0) left = right + verify(string(right+1:),SEP) right = left + scan(string(left:),SEP) - 2 - if ( string(left:left) == '#' ) exit IIO_stringPos = [IIO_stringPos,int(left, pInt), int(right, pInt)] IIO_stringPos(1) = IIO_stringPos(1)+1_pInt enddo diff --git a/src/DAMASK_marc.f90 b/src/DAMASK_marc.f90 index d33cdd4cc..9b1427d78 100644 --- a/src/DAMASK_marc.f90 +++ b/src/DAMASK_marc.f90 @@ -53,17 +53,26 @@ subroutine DAMASK_interface_init character(len=1024) :: wd call date_and_time(values = dateAndTime) - write(6,'(/,a)') ' <<<+- DAMASK_Marc -+>>>' - write(6,'(/,a)') ' Roters et al., Computational Materials Science, 2018' - write(6,'(/,a)') ' Version: '//DAMASKVERSION - write(6,'(a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',& - dateAndTime(2),'/',& - dateAndTime(1) - write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':',& - dateAndTime(6),':',& - dateAndTime(7) - write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>' -#include "compilation_info.f90" + write(6,'(/,a)') ' <<<+- DAMASK_abaqus -+>>>' + write(6,'(/,a)') ' Roters et al., Computational Materials Science 158, 2018, 420-478' + write(6,'(a,/)') ' https://doi.org/10.1016/j.commatsci.2018.04.030' + + write(6,'(a,/)') ' Version: '//DAMASKVERSION + +! https://github.com/jeffhammond/HPCInfo/blob/master/docs/Preprocessor-Macros.md +#if __INTEL_COMPILER >= 1800 + write(6,*) 'Compiled with: ', compiler_version() + write(6,*) 'Compiler options: ', compiler_options() +#else + write(6,'(a,i4.4,a,i8.8)') ' Compiled with Intel fortran version :', __INTEL_COMPILER,& + ', build date :', __INTEL_COMPILER_BUILD_DATE +#endif + + write(6,*) 'Compiled on ', __DATE__,' at ',__TIME__ + + write(6,'(a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1) + write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7) + inquire(5, name=wd) ! determine inputputfile wd = wd(1:scan(wd,'/',back=.true.)) ierr = CHDIR(wd) diff --git a/src/FEM_utilities.f90 b/src/FEM_utilities.f90 index bf5e62851..2087054bd 100644 --- a/src/FEM_utilities.f90 +++ b/src/FEM_utilities.f90 @@ -162,7 +162,6 @@ subroutine utilities_init() character(len=1024) :: petsc_optionsPhysics integer(pInt) :: dimPlex - integer(pInt) :: headerID = 205_pInt PetscInt, allocatable :: nEntities(:), nOutputCells(:), nOutputNodes(:) PetscInt :: dim PetscErrorCode :: ierr @@ -213,13 +212,6 @@ subroutine utilities_init() nOutputCells(worldrank+1) = count(material_homog > 0_pInt) call MPI_Allreduce(MPI_IN_PLACE,nOutputNodes,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,nOutputCells,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) - if (worldrank == 0_pInt) then - open(unit=headerID, file=trim(getSolverJobName())//'.header', & - form='FORMATTED', status='REPLACE') - write(headerID, '(a,i0)') 'dimension : ', dimPlex - write(headerID, '(a,i0)') 'number of nodes : ', sum(nOutputNodes) - write(headerID, '(a,i0)') 'number of cells : ', sum(nOutputCells) - endif end subroutine utilities_init diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 2a05f101c..0582318ce 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -18,7 +18,8 @@ module HDF5_utilities HDF5_ERR_TYPE = 4_pInt !< kind of the integer return in the HDF5 library !-------------------------------------------------------------------------------------------------- -!> @brief reads pInt or pReal data of defined shape from file +!> @brief reads pInt or pReal data of defined shape from file ! ToDo: order of arguments wrong +!> @details for parallel IO, all dimension except for the last need to match !-------------------------------------------------------------------------------------------------- interface HDF5_read module procedure HDF5_read_pReal1 @@ -40,7 +41,8 @@ module HDF5_utilities end interface HDF5_read !-------------------------------------------------------------------------------------------------- -!> @brief writes pInt or pReal data of defined shape to file +!> @brief writes pInt or pReal data of defined shape to file ! ToDo: order of arguments wrong +!> @details for parallel IO, all dimension except for the last need to match !-------------------------------------------------------------------------------------------------- interface HDF5_write module procedure HDF5_write_pReal1 @@ -445,1356 +447,587 @@ subroutine HDF5_setLink(loc_id,target_name,link_name) end subroutine HDF5_setLink + !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pReal with 1 dimensions +!> @brief read dataset of type pReal with 1 dimension !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pReal1(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - real(pReal), intent(inout), dimension(:) :: dataset + real(pReal), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(1) :: myStart - -!------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return - -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - -!------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(1) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal1: MPI_allreduce') - endif; endif -#endif - myStart = int([sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:0),sum(readSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dread_f') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/memspace_id') - -end subroutine HDF5_read_pReal1 - - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pReal with 2 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal2(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize - - implicit none - real(pReal), intent(inout), dimension(:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - logical, intent(in), optional :: parallel - - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr + globalShape !< shape of the dataset (all processes) integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(2) :: myStart -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(2) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal2: MPI_allreduce') - endif; endif -#endif - myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:1),sum(readSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dread_f') + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) + +end subroutine HDF5_read_pReal1 + !-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sclose_f/memspace_id') +!> @brief read dataset of type pReal with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal2(loc_id,dataset,datasetName,parallel) + + implicit none + real(pReal), intent(inout), dimension(:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr + +!--------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) + +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif + + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,globalShape, hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dread_f') + + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pReal2 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pReal with 3 dimensions +!> @brief read dataset of type pReal with 2 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pReal3(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - real(pReal), intent(inout), dimension(:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(3) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - -!------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(3) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal3: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:2),sum(readSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif + + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sclose_f/memspace_id') + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pReal3 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pReal with 4 dimensions +!> @brief read dataset of type pReal with 4 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pReal4(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - real(pReal), intent(inout), dimension(:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(4) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(4) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal4: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:3),sum(readSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sclose_f/memspace_id') + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pReal4 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pReal with 5 dimensions +!> @brief read dataset of type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pReal5(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - real(pReal), intent(inout), dimension(:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(5) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(5) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal5: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:4),sum(readSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sclose_f/memspace_id') + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pReal5 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pReal with 6 dimensions +!> @brief read dataset of type pReal with 6 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pReal6(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - real(pReal), intent(inout), dimension(:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(6) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(6) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal6: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:5),sum(readSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sclose_f/memspace_id') + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pReal6 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pReal with 7 dimensions +!> @brief read dataset of type pReal with 7 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pReal7(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - real(pReal), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(7) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(7) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal7: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:6),sum(readSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sclose_f/memspace_id') + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pReal7 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pInt with 1 dimensions +!> @brief read dataset of type pInt with 1 dimension !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pInt1(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - integer(pInt), intent(inout), dimension(:) :: dataset + integer(pInt), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(1) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!-------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(1) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') - endif; endif -#endif - - myStart = int([sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:0),sum(readSize)] - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_all_coll_metadata_ops_f') - -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5sclose_f/memspace_id') - + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) + end subroutine HDF5_read_pInt1 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pInt with 2 dimensions +!> @brief read dataset of type pInt with 2 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pInt2(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - integer(pInt), intent(inout), dimension(:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(2) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!-------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(2) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:1),sum(readSize)] - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_all_coll_metadata_ops_f') - -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dread_f') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5sclose_f/memspace_id') + + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pInt2 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pInt with 3 dimensions +!> @brief read dataset of type pInt with 3 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pInt3(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - integer(pInt), intent(inout), dimension(:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(3) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!-------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(3) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:2),sum(readSize)] - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_all_coll_metadata_ops_f') - -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dread_f') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5sclose_f/memspace_id') + + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pInt3 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pInt with 4 dimensions +!> @brief read dataset of type pInt withh 4 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pInt4(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - integer(pInt), intent(inout), dimension(:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(4) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!-------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(4) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:3),sum(readSize)] - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_all_coll_metadata_ops_f') - -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dread_f') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sclose_f/memspace_id') + + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pInt4 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pInt with 5 dimensions +!> @brief read dataset of type pInt with 5 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pInt5(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - integer(pInt), intent(inout), dimension(:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(5) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!-------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(5) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:4),sum(readSize)] - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_all_coll_metadata_ops_f') - -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dread_f') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5sclose_f/memspace_id') + + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pInt5 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pInt with 6 dimensions +!> @brief read dataset of type pInt with 6 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pInt6(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - integer(pInt), intent(inout), dimension(:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(6) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!-------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(6) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:5),sum(readSize)] - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_all_coll_metadata_ops_f') - -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dread_f') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5sclose_f/memspace_id') + + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pInt6 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of type pInt with 7 dimensions +!> @brief read dataset of type pInt with 7 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_read_pInt7(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none - integer(pInt), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file logical, intent(in), optional :: parallel - - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - readSize !< contribution of all processes - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id - integer(HSIZE_T), dimension(7) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) + integer(HDF5_ERR_TYPE) :: hdferr -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) +!--------------------------------------------------------------------------------------------------- +! initialize HDF5 data structures + if (present(parallel)) then + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,parallel) + else + call initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, loc_id,localShape,datasetName,.false.) + endif -!-------------------------------------------------------------------------------------------------- -! creating a property list for data access properties - call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) -!-------------------------------------------------------------------------------------------------- - allocate(readSize(worldsize), source = 0_pInt) - readSize(worldrank+1) = localShape(7) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:6),sum(readSize)] - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt{}: h5screate_simple_f/memspace_id') -!-------------------------------------------------------------------------------------------------- -! set I/O mode for read operations to collective - call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_all_coll_metadata_ops_f') - -!-------------------------------------------------------------------------------------------------- -! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! get the space_id of dataset in the file - call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! read - call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,globalShape, hdferr,& file_space_id = filespace_id, xfer_prp = plist_id, mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dread_f') - -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5sclose_f/memspace_id') + + call finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) end subroutine HDF5_read_pInt7 + !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pReal with 1 dimensions +!> @brief write dataset of type pReal with 1 dimension !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pReal1(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none real(pReal), intent(inout), dimension(:) :: dataset @@ -1803,89 +1036,38 @@ subroutine HDF5_write_pReal1(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(1) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape,loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape,loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(1) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') - endif; endif -#endif - - myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:0),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pReal1 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pReal with 2 dimensions +!> @brief write dataset of type pReal with 2 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pReal2(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none real(pReal), intent(inout), dimension(:,:) :: dataset @@ -1894,89 +1076,38 @@ subroutine HDF5_write_pReal2(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(2) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(2) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:1),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pReal2 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pReal with 3 dimensions +!> @brief write dataset of type pReal with 3 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pReal3(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none real(pReal), intent(inout), dimension(:,:,:) :: dataset @@ -1985,89 +1116,38 @@ subroutine HDF5_write_pReal3(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(3) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(3) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:2),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pReal3 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pReal with 4 dimensions +!> @brief write dataset of type pReal with 4 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pReal4(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none real(pReal), intent(inout), dimension(:,:,:,:) :: dataset @@ -2076,89 +1156,39 @@ subroutine HDF5_write_pReal4(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(4) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(4) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:3),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pReal4 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pReal with 5 dimensions +!> @brief write dataset of type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pReal5(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none real(pReal), intent(inout), dimension(:,:,:,:,:) :: dataset @@ -2167,89 +1197,38 @@ subroutine HDF5_write_pReal5(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(5) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(5) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:4),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pReal5 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pReal with 6 dimensions +!> @brief write dataset of type pReal with 6 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pReal6(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none real(pReal), intent(inout), dimension(:,:,:,:,:,:) :: dataset @@ -2258,89 +1237,38 @@ subroutine HDF5_write_pReal6(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(6) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(6) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:5),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pReal6 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pReal with 7 dimensions +!> @brief write dataset of type pReal with 7 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pReal7(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none real(pReal), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset @@ -2349,91 +1277,39 @@ subroutine HDF5_write_pReal7(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(7) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_DOUBLE,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(7) - -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') - endif; endif -#endif - - myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:6),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_DOUBLE, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pReal7 - - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pInt with 1 dimensions +!> @brief write dataset of type pInt with 1 dimension !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt1(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none integer(pInt), intent(inout), dimension(:) :: dataset @@ -2442,87 +1318,38 @@ subroutine HDF5_write_pInt1(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(1) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(1) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') - endif; endif -#endif - myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:0),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pInt1 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pInt with 2 dimensions +!> @brief write dataset of type pInt with 2 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt2(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none integer(pInt), intent(inout), dimension(:,:) :: dataset @@ -2531,87 +1358,38 @@ subroutine HDF5_write_pInt2(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(2) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(2) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') - endif; endif -#endif - myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:1),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pInt2 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pInt with 3 dimensions +!> @brief write dataset of type pInt with 3 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt3(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:) :: dataset @@ -2620,87 +1398,38 @@ subroutine HDF5_write_pInt3(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(3) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(3) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:2),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pInt3 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pInt with 4 dimensions +!> @brief write dataset of type pInt with 4 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt4(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:,:) :: dataset @@ -2709,87 +1438,38 @@ subroutine HDF5_write_pInt4(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(4) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(4) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:3),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pInt4 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pInt with 5 dimensions +!> @brief write dataset of type pInt with 5 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt5(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:,:,:) :: dataset @@ -2798,87 +1478,38 @@ subroutine HDF5_write_pInt5(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(5) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(5) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:4),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pInt5 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pInt with 6 dimensions +!> @brief write dataset of type pInt with 6 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt6(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:,:,:,:) :: dataset @@ -2887,87 +1518,38 @@ subroutine HDF5_write_pInt6(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(6) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(6) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:5),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pInt6 - !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of type pInt with 7 dimensions +!> @brief write dataset of type pInt with 7 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt7(loc_id,dataset,datasetName,parallel) - use numerics, only: & - worldrank, & - worldsize implicit none integer(pInt), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset @@ -2976,93 +1558,216 @@ subroutine HDF5_write_pInt7(loc_id,dataset,datasetName,parallel) logical, intent(in), optional :: parallel - integer(pInt), dimension(:), allocatable :: & - globalShape, & !< shape of the dataset (all processes) - localShape, & !< shape of the dataset (this process) - outputSize !< contribution of all processes - integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id - integer(HSIZE_T), dimension(7) :: myStart + integer(HSIZE_T), dimension(size(shape(dataset))) :: & + myStart, & + localShape, & !< shape of the dataset (this process) + globalShape !< shape of the dataset (all processes) -!------------------------------------------------------------------------------------------------- +!--------------------------------------------------------------------------------------------------- ! determine shape of dataset - localShape = shape(dataset) - if (any(localShape(1:size(localShape)) == 0)) return + localShape = int(shape(dataset),HSIZE_T) + if (any(localShape(1:size(localShape)-1) == 0)) return !< empty dataset (last dimension can be empty) -!------------------------------------------------------------------------------------------------- -! creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (present(parallel)) then + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,parallel) + else + call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, loc_id,localShape,datasetName,H5T_NATIVE_INTEGER,.false.) + endif -!-------------------------------------------------------------------------------------------------- - allocate(outputSize(worldsize), source = 0_pInt) - outputSize(worldrank+1) = localShape(7) -#ifdef PETSc - if (present(parallel)) then; if (parallel) then - call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') - endif; endif -#endif - myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) - globalShape = [localShape(1:6),sum(outputSize)] - - -!-------------------------------------------------------------------------------------------------- -! create dataspace in memory (local shape) - call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & - int(localShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dopen_f') - -!-------------------------------------------------------------------------------------------------- -! create dataspace in file (global shape) - call h5screate_simple_f(size(globalShape), int(globalShape,HSIZE_T), filespace_id, hdferr, & - int(globalShape,HSIZE_T)) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dget_space_f') - -!-------------------------------------------------------------------------------------------------- -! create dataset - call h5dcreate_f(loc_id, trim(datasetName), H5T_NATIVE_INTEGER, filespace_id, dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dcreate_f') -!-------------------------------------------------------------------------------------------------- -! select a hyperslab (the portion of the current process) in the file - call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sselect_hyperslab_f') - -!-------------------------------------------------------------------------------------------------- -! write call h5dwrite_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr,& file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5dread_f') -!-------------------------------------------------------------------------------------------------- -!close types, dataspaces - call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: plist_id') - call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dclose_f') - call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') - call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/memspace_id') + call finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine HDF5_write_pInt7 +!-------------------------------------------------------------------------------------------------- +!> @brief initialize HDF5 handles, determines global shape and start for parallel read +!-------------------------------------------------------------------------------------------------- +subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id, & + myStart, globalShape, & + loc_id,localShape,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in) :: parallel + integer(HSIZE_T), intent(in), dimension(:) :: & + localShape + integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: & + myStart, & + globalShape !< shape of the dataset (all processes) + integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + + integer(pInt), dimension(worldsize) :: & + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties (is collective for MPI) + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pcreate_f') + +!-------------------------------------------------------------------------------------------------- + readSize = 0_pInt + readSize(worldrank+1) = int(localShape(ubound(localShape,1)),pInt) +#ifdef PETSc + if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='initialize_read: MPI_allreduce') + endif +#endif + myStart = int(0,HSIZE_T) + myStart(ubound(myStart)) = int(sum(readSize(1:worldrank)),HSIZE_T) + globalShape = [localShape(1:ubound(localShape,1)-1),int(sum(readSize),HSIZE_T)] + +!-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), localShape, memspace_id, hdferr, localShape) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5screate_simple_f/memspace_id') + +!-------------------------------------------------------------------------------------------------- +! creating a property list for IO and set it to collective + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pcreate_f') + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file and get the space ID + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5dopen_f') + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, localShape, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_read: h5sselect_hyperslab_f') + +end subroutine initialize_read + + +!-------------------------------------------------------------------------------------------------- +!> @brief closes HDF5 handles +!-------------------------------------------------------------------------------------------------- +subroutine finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) + + implicit none + integer(HID_T), intent(in) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HDF5_ERR_TYPE) :: hdferr + + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_read: h5sclose_f/memspace_id') + +end subroutine finalize_read + + +!-------------------------------------------------------------------------------------------------- +!> @brief initialize HDF5 handles, determines global shape and start for parallel write +!-------------------------------------------------------------------------------------------------- +subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & + myStart, globalShape, & + loc_id,localShape,datasetName,datatype,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in) :: parallel + integer(HID_T), intent(in) :: datatype + integer(HSIZE_T), intent(in), dimension(:) :: & + localShape + integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: & + myStart, & + globalShape !< shape of the dataset (all processes) + integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id + + integer(pInt), dimension(worldsize) :: & + writeSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5pcreate_f') + +!-------------------------------------------------------------------------------------------------- + writeSize = 0_pInt + writeSize(worldrank+1) = int(localShape(ubound(localShape,1)),pInt) + +#ifdef PETSc +if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='initialize_write: MPI_allreduce') + endif +#endif + + myStart = int(0,HSIZE_T) + myStart(ubound(myStart)) = int(sum(writeSize(1:worldrank)),HSIZE_T) + globalShape = [localShape(1:ubound(localShape,1)-1),int(sum(writeSize),HSIZE_T)] + +!-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) and in file (global shape) + call h5screate_simple_f(size(localShape), localShape, memspace_id, hdferr, localShape) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5dopen_f') + call h5screate_simple_f(size(globalShape), globalShape, filespace_id, hdferr, globalShape) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! create dataset + call h5dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5dcreate_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, localShape, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='initialize_write: h5sselect_hyperslab_f') + +end subroutine initialize_write + + +!-------------------------------------------------------------------------------------------------- +!> @brief closes HDF5 handles +!-------------------------------------------------------------------------------------------------- +subroutine finalize_write(plist_id, dset_id, filespace_id, memspace_id) + + implicit none + integer(HID_T), intent(in) :: dset_id, filespace_id, memspace_id, plist_id + integer(HDF5_ERR_TYPE) :: hdferr + + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: h5sclose_f/filespace_id') + call h5sclose_f(memspace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='finalize_write: h5sclose_f/memspace_id') + +end subroutine finalize_write + end module HDF5_Utilities - - - - - - - - - - - - - - - diff --git a/src/compilation_info.f90 b/src/compilation_info.f90 index f0ca4d4cc..e69de29bb 100644 --- a/src/compilation_info.f90 +++ b/src/compilation_info.f90 @@ -1,10 +0,0 @@ -#if defined(__GFORTRAN__) || __INTEL_COMPILER >= 1800 - write(6,*) 'Compiled with ', compiler_version() - write(6,*) 'With options ', compiler_options() -#else - write(6,'(a,i4.4,a,i8.8)') ' Compiled with Intel fortran version ', __INTEL_COMPILER,& - ', build date ', __INTEL_COMPILER_BUILD_DATE -#endif -write(6,*) 'Compiled on ', __DATE__,' at ',__TIME__ -write(6,*) -flush(6) diff --git a/src/kinematics_thermal_expansion.f90 b/src/kinematics_thermal_expansion.f90 index 3d1de3d0a..3696593ad 100644 --- a/src/kinematics_thermal_expansion.f90 +++ b/src/kinematics_thermal_expansion.f90 @@ -24,10 +24,10 @@ module kinematics_thermal_expansion integer(pInt), dimension(:), allocatable, target, public :: & kinematics_thermal_expansion_Noutput !< number of outputs per instance of this damage -! enum, bind(c) ! ToDo kinematics need state machinery to deal with sizePostResult -! enumerator :: undefined_ID, & ! possible remedy is to decouple having state vars from having output -! thermalexpansionrate_ID ! which means to separate user-defined types tState + tOutput... -! end enum + enum, bind(c) ! ToDo kinematics need state machinery to deal with sizePostResult + enumerator :: undefined_ID, & ! possible remedy is to decouple having state vars from having output + thermalexpansionrate_ID ! which means to separate user-defined types tState + tOutput... + end enum public :: & kinematics_thermal_expansion_init, & kinematics_thermal_expansion_initialStrain, & diff --git a/src/math.f90 b/src/math.f90 index 28c7175e3..e663103c8 100644 --- a/src/math.f90 +++ b/src/math.f90 @@ -118,6 +118,9 @@ module math !--------------------------------------------------------------------------------------------------- public :: & +#if defined(__PGI) + norm2, & +#endif math_init, & math_qsort, & math_expand, & @@ -351,20 +354,38 @@ end subroutine math_check !-------------------------------------------------------------------------------------------------- !> @brief Quicksort algorithm for two-dimensional integer arrays -! Sorting is done with respect to array(1,:) -! and keeps array(2:N,:) linked to it. +! Sorting is done with respect to array(sort,:) and keeps array(/=sort,:) linked to it. +! default: sort=1 !-------------------------------------------------------------------------------------------------- -recursive subroutine math_qsort(a, istart, iend) +recursive subroutine math_qsort(a, istart, iend, sortDim) implicit none integer(pInt), dimension(:,:), intent(inout) :: a - integer(pInt), intent(in) :: istart,iend - integer(pInt) :: ipivot - - if (istart < iend) then - ipivot = qsort_partition(a,istart, iend) - call math_qsort(a, istart, ipivot-1_pInt) - call math_qsort(a, ipivot+1_pInt, iend) + integer(pInt), intent(in),optional :: istart,iend, sortDim + integer(pInt) :: ipivot,s,e,d + + if(present(istart)) then + s = istart + else + s = lbound(a,2) + endif + + if(present(iend)) then + e = iend + else + e = ubound(a,2) + endif + + if(present(sortDim)) then + d = sortDim + else + d = 1 + endif + + if (s < e) then + ipivot = qsort_partition(a,s, e, d) + call math_qsort(a, s, ipivot-1_pInt, d) + call math_qsort(a, ipivot+1_pInt, e, d) endif !-------------------------------------------------------------------------------------------------- @@ -373,37 +394,34 @@ recursive subroutine math_qsort(a, istart, iend) !------------------------------------------------------------------------------------------------- !> @brief Partitioning required for quicksort !------------------------------------------------------------------------------------------------- - integer(pInt) function qsort_partition(a, istart, iend) + integer(pInt) function qsort_partition(a, istart, iend, sort) implicit none integer(pInt), dimension(:,:), intent(inout) :: a - integer(pInt), intent(in) :: istart,iend - integer(pInt) :: i,j,k,tmp + integer(pInt), intent(in) :: istart,iend,sort + integer(pInt), dimension(size(a,1)) :: tmp + integer(pInt) :: i,j do - ! find the first element on the right side less than or equal to the pivot point + ! find the first element on the right side less than or equal to the pivot point do j = iend, istart, -1_pInt - if (a(1,j) <= a(1,istart)) exit + if (a(sort,j) <= a(sort,istart)) exit enddo - ! find the first element on the left side greater than the pivot point + ! find the first element on the left side greater than the pivot point do i = istart, iend - if (a(1,i) > a(1,istart)) exit + if (a(sort,i) > a(sort,istart)) exit enddo - if (i < j) then ! if the indexes do not cross, exchange values - do k = 1_pInt, int(size(a,1_pInt), pInt) - tmp = a(k,i) - a(k,i) = a(k,j) - a(k,j) = tmp - enddo - else ! if they do cross, exchange left value with pivot and return with the partition index - do k = 1_pInt, int(size(a,1_pInt), pInt) - tmp = a(k,istart) - a(k,istart) = a(k,j) - a(k,j) = tmp - enddo + cross: if (i >= j) then ! if the indices cross, exchange left value with pivot and return with the partition index + tmp = a(:,istart) + a(:,istart) = a(:,j) + a(:,j) = tmp qsort_partition = j return - endif + else cross ! if they do not cross, exchange values + tmp = a(:,i) + a(:,i) = a(:,j) + a(:,j) = tmp + endif cross enddo end function qsort_partition @@ -2707,4 +2725,19 @@ real(pReal) pure elemental function math_clip(a, left, right) end function math_clip + +#if defined(__PGI) +!-------------------------------------------------------------------------------------------------- +!> @brief substitute for the norm2 intrinsic which is not available in PGI 18.10 +!-------------------------------------------------------------------------------------------------- +real(pReal) pure function norm2(v) + + implicit none + real(pReal), intent(in), dimension(3) :: v + + norm2 = sqrt(sum(v**2)) + +end function norm2 +#endif + end module math diff --git a/src/system_routines.f90 b/src/system_routines.f90 index bea777a3d..27f0cae34 100644 --- a/src/system_routines.f90 +++ b/src/system_routines.f90 @@ -3,11 +3,17 @@ !> @brief provides wrappers to C routines !-------------------------------------------------------------------------------------------------- module system_routines - + use, intrinsic :: ISO_C_Binding, only: & + C_INT, & + C_CHAR, & + C_NULL_CHAR + implicit none private public :: & + signalusr1_C, & + signalusr2_C, & isDirectory, & getCWD, & getHostName, & @@ -27,7 +33,7 @@ interface use, intrinsic :: ISO_C_Binding, only: & C_INT, & C_CHAR - character(kind=C_CHAR), dimension(1024), intent(out) :: str ! C string is an array + character(kind=C_CHAR), dimension(1024), intent(out) :: str ! C string is an array integer(C_INT),intent(out) :: stat end subroutine getCurrentWorkDir_C @@ -35,7 +41,7 @@ interface use, intrinsic :: ISO_C_Binding, only: & C_INT, & C_CHAR - character(kind=C_CHAR), dimension(1024), intent(out) :: str ! C string is an array + character(kind=C_CHAR), dimension(1024), intent(out) :: str ! C string is an array integer(C_INT),intent(out) :: stat end subroutine getHostName_C @@ -46,31 +52,38 @@ interface integer(C_INT) :: chdir_C character(kind=C_CHAR), dimension(1024), intent(in) :: path ! C string is an array end function chdir_C + + subroutine signalusr1_C(handler) bind(C) + use, intrinsic :: ISO_C_Binding, only: & + C_FUNPTR + type(C_FUNPTR), intent(in), value :: handler + end subroutine signalusr1_C + + subroutine signalusr2_C(handler) bind(C) + use, intrinsic :: ISO_C_Binding, only: & + C_FUNPTR + type(C_FUNPTR), intent(in), value :: handler + end subroutine signalusr2_C end interface - contains !-------------------------------------------------------------------------------------------------- !> @brief figures out if a given path is a directory (and not an ordinary file) !-------------------------------------------------------------------------------------------------- logical function isDirectory(path) - use, intrinsic :: ISO_C_Binding, only: & - C_INT, & - C_CHAR, & - C_NULL_CHAR - implicit none - character(len=*), intent(in) :: path - character(kind=C_CHAR), dimension(1024) :: strFixedLength - integer :: i + implicit none + character(len=*), intent(in) :: path + character(kind=C_CHAR), dimension(1024) :: strFixedLength ! C string as array + integer :: i - strFixedLength = repeat(C_NULL_CHAR,len(strFixedLength)) - do i=1,len(path) ! copy array components - strFixedLength(i)=path(i:i) - enddo - isDirectory=merge(.True.,.False.,isDirectory_C(strFixedLength) /= 0_C_INT) + strFixedLength = repeat(C_NULL_CHAR,len(strFixedLength)) + do i=1,len(path) ! copy array components + strFixedLength(i)=path(i:i) + enddo + isDirectory=merge(.True.,.False.,isDirectory_C(strFixedLength) /= 0_C_INT) end function isDirectory @@ -79,29 +92,25 @@ end function isDirectory !> @brief gets the current working directory !-------------------------------------------------------------------------------------------------- character(len=1024) function getCWD() - use, intrinsic :: ISO_C_Binding, only: & - C_INT, & - C_CHAR, & - C_NULL_CHAR - implicit none - character(kind=C_CHAR), dimension(1024) :: charArray ! C string is an array - integer(C_INT) :: stat - integer :: i + implicit none + character(kind=C_CHAR), dimension(1024) :: charArray ! C string is an array + integer(C_INT) :: stat + integer :: i - call getCurrentWorkDir_C(charArray,stat) - if (stat /= 0_C_INT) then - getCWD = 'Error occured when getting currend working directory' - else - getCWD = repeat('',len(getCWD)) - arrayToString: do i=1,len(getCWD) - if (charArray(i) /= C_NULL_CHAR) then - getCWD(i:i)=charArray(i) - else - exit - endif - enddo arrayToString - endif + call getCurrentWorkDir_C(charArray,stat) + if (stat /= 0_C_INT) then + getCWD = 'Error occured when getting currend working directory' + else + getCWD = repeat('',len(getCWD)) + arrayToString: do i=1,len(getCWD) + if (charArray(i) /= C_NULL_CHAR) then + getCWD(i:i)=charArray(i) + else + exit + endif + enddo arrayToString + endif end function getCWD @@ -110,51 +119,42 @@ end function getCWD !> @brief gets the current host name !-------------------------------------------------------------------------------------------------- character(len=1024) function getHostName() - use, intrinsic :: ISO_C_Binding, only: & - C_INT, & - C_CHAR, & - C_NULL_CHAR + implicit none + character(kind=C_CHAR), dimension(1024) :: charArray ! C string is an array + integer(C_INT) :: stat + integer :: i - implicit none - character(kind=C_CHAR), dimension(1024) :: charArray ! C string is an array - integer(C_INT) :: stat - integer :: i - - call getHostName_C(charArray,stat) - if (stat /= 0_C_INT) then - getHostName = 'Error occured when getting host name' - else - getHostName = repeat('',len(getHostName)) - arrayToString: do i=1,len(getHostName) - if (charArray(i) /= C_NULL_CHAR) then - getHostName(i:i)=charArray(i) - else - exit - endif - enddo arrayToString - endif + call getHostName_C(charArray,stat) + if (stat /= 0_C_INT) then + getHostName = 'Error occured when getting host name' + else + getHostName = repeat('',len(getHostName)) + arrayToString: do i=1,len(getHostName) + if (charArray(i) /= C_NULL_CHAR) then + getHostName(i:i)=charArray(i) + else + exit + endif + enddo arrayToString + endif end function getHostName + !-------------------------------------------------------------------------------------------------- !> @brief changes the current working directory !-------------------------------------------------------------------------------------------------- logical function setCWD(path) - use, intrinsic :: ISO_C_Binding, only: & - C_INT, & - C_CHAR, & - C_NULL_CHAR + implicit none + character(len=*), intent(in) :: path + character(kind=C_CHAR), dimension(1024) :: strFixedLength ! C string is an array + integer :: i - implicit none - character(len=*), intent(in) :: path - character(kind=C_CHAR), dimension(1024) :: strFixedLength ! C string is an array - integer :: i - - strFixedLength = repeat(C_NULL_CHAR,len(strFixedLength)) - do i=1,len(path) ! copy array components - strFixedLength(i)=path(i:i) - enddo - setCWD=merge(.True.,.False.,chdir_C(strFixedLength) /= 0_C_INT) + strFixedLength = repeat(C_NULL_CHAR,len(strFixedLength)) + do i=1,len(path) ! copy array components + strFixedLength(i)=path(i:i) + enddo + setCWD=merge(.True.,.False.,chdir_C(strFixedLength) /= 0_C_INT) end function setCWD