From 000de75617e46c9bbf179ed0b67f0c4aabc413d7 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 6 Dec 2020 08:32:45 +0100 Subject: [PATCH 1/6] write orientation as plain array, not derived type --- python/damask/_result.py | 7 +++++-- src/crystallite.f90 | 15 ++++++++------- src/results.f90 | 2 +- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/python/damask/_result.py b/python/damask/_result.py index afeea568f..32970e523 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -46,7 +46,7 @@ class Result: self.version_major = f.attrs['DADF5_version_major'] self.version_minor = f.attrs['DADF5_version_minor'] - if self.version_major != 0 or not 7 <= self.version_minor <= 10: + if self.version_major != 0 or not 7 <= self.version_minor <= 11: raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}') self.structured = 'grid' in f['geometry'].attrs.keys() or \ @@ -790,7 +790,10 @@ class Result: lattice = {'fcc':'cF','bcc':'cI','hex':'hP'}[q['meta']['Lattice']] except KeyError: lattice = q['meta']['Lattice'] - o = Orientation(rotation = (rfn.structured_to_unstructured(q['data'])),lattice=lattice) + try: + o = Orientation(rotation = (rfn.structured_to_unstructured(q['data'])),lattice=lattice) + except ValueError: + o = Orientation(rotation = q['data'],lattice=lattice) return { 'data': np.uint8(o.IPF_color(l)*255), diff --git a/src/crystallite.f90 b/src/crystallite.f90 index da349bbcd..273d5397b 100644 --- a/src/crystallite.f90 +++ b/src/crystallite.f90 @@ -734,9 +734,9 @@ end function crystallite_push33ToRef subroutine crystallite_results integer :: p,o - real(pReal), allocatable, dimension(:,:,:) :: selected_tensors - type(rotation), allocatable, dimension(:) :: selected_rotations - character(len=:), allocatable :: group,structureLabel + real(pReal), allocatable, dimension(:,:,:) :: selected_tensors + real(pReal), allocatable, dimension(:,:) :: selected_rotations + character(len=:), allocatable :: group,structureLabel do p=1,size(material_name_phase) group = trim('current/phase')//'/'//trim(material_name_phase(p))//'/mechanics' @@ -794,7 +794,8 @@ subroutine crystallite_results end select selected_rotations = select_rotations(crystallite_orientation,p) call results_writeDataset(group,selected_rotations,output_constituent(p)%label(o),& - 'crystal orientation as quaternion',structureLabel) + 'crystal orientation as quaternion') + call results_addAttribute('Lattice',structureLabel,group//'/'//output_constituent(p)%label(o)) end select enddo enddo @@ -835,10 +836,10 @@ subroutine crystallite_results integer, intent(in) :: instance type(rotation), dimension(:,:,:), intent(in) :: dataset - type(rotation), allocatable, dimension(:) :: select_rotations + real(pReal), allocatable, dimension(:,:) :: select_rotations integer :: e,i,c,j - allocate(select_rotations(count(material_phaseAt==instance)*homogenization_maxNconstituents*discretization_nIPs)) + allocate(select_rotations(4,count(material_phaseAt==instance)*homogenization_maxNconstituents*discretization_nIPs)) j=0 do e = 1, size(material_phaseAt,2) @@ -846,7 +847,7 @@ subroutine crystallite_results do c = 1, size(material_phaseAt,1) !ToDo: this needs to be changed for varying Ngrains if (material_phaseAt(c,e) == instance) then j = j + 1 - select_rotations(j) = dataset(c,i,e) + select_rotations(1:4,j) = dataset(c,i,e)%asQuaternion() endif enddo enddo diff --git a/src/results.f90 b/src/results.f90 index 524a65eaf..ab4e244ba 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -74,7 +74,7 @@ subroutine results_init(restart) if(.not. restart) then resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.) call results_addAttribute('DADF5_version_major',0) - call results_addAttribute('DADF5_version_minor',10) + call results_addAttribute('DADF5_version_minor',11) call results_addAttribute('DAMASK_version',DAMASKVERSION) call get_command(commandLine) call results_addAttribute('Call',trim(commandLine)) From 52e3fb50bc8e0a6b6b2bc0001248e6b587f1ff29 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 6 Dec 2020 10:20:32 +0100 Subject: [PATCH 2/6] compress. Datasets are chunked along first timension. Chunk size (1MB for real) is probably not optimal --- python/damask/_result.py | 10 ++++++++- src/HDF5_utilities.f90 | 47 ++++++++++++++++++++++++++++++++-------- 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/python/damask/_result.py b/python/damask/_result.py index 32970e523..6d83c8872 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1132,6 +1132,7 @@ class Result: Arguments parsed to func. """ + chunk_size = 1024**2//8 num_threads = damask.environment.options['DAMASK_NUM_THREADS'] pool = mp.Pool(int(num_threads) if num_threads is not None else None) lock = mp.Manager().Lock() @@ -1155,7 +1156,14 @@ class Result: dataset.attrs['Overwritten'] = 'Yes' if h5py3 else \ 'Yes'.encode() else: - dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data']) + if result[1]['data'].size >= chunk_size*2: + shape = result[1]['data'].shape + chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:] + dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'], + maxshape=shape,chunks=chunks,compression = 'gzip') + else: + dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'], + maxshape=result[1]['data'].shape) now = datetime.datetime.now().astimezone() dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \ diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 47f4243e7..48b98812b 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1789,7 +1789,7 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_ !-------------------------------------------------------------------------------------------------- ! creating a property list for IO and set it to collective call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) - if(hdferr < 0) error stop 'HDF5 error' + if(hdferr < 0) error stop 'HDF5 error' #ifdef PETSc call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) if(hdferr < 0) error stop 'HDF5 error' @@ -1815,7 +1815,7 @@ end subroutine initialize_read !-------------------------------------------------------------------------------------------------- subroutine finalize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_id) - integer(HID_T), intent(in) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HID_T), intent(in) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id integer :: hdferr call h5pclose_f(plist_id, hdferr) @@ -1836,8 +1836,8 @@ end subroutine finalize_read !> @brief initialize HDF5 handles, determines global shape and start for parallel write !-------------------------------------------------------------------------------------------------- subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & - myStart, totalShape, & - loc_id,myShape,datasetName,datatype,parallel) + myStart, totalShape, & + loc_id,myShape,datasetName,datatype,parallel) integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file @@ -1850,10 +1850,10 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & totalShape !< shape of the dataset (all processes) integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id - integer, dimension(worldsize) :: & - writeSize !< contribution of all processes - integer :: ierr - integer :: hdferr + integer, dimension(worldsize) :: writeSize !< contribution of all processes + integer(HID_T) :: dcpl + integer :: ierr, hdferr + integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T !------------------------------------------------------------------------------------------------- ! creating a property list for transfer properties (is collective when reading in parallel) @@ -1880,6 +1880,17 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & myStart(ubound(myStart)) = int(sum(writeSize(1:worldrank)),HSIZE_T) totalShape = [myShape(1:ubound(myShape,1)-1),int(sum(writeSize),HSIZE_T)] +!-------------------------------------------------------------------------------------------------- +! compress (and chunk) larger datasets + call h5pcreate_f(H5P_DATASET_CREATE_F, dcpl, hdferr) + if(hdferr < 0) error stop 'HDF5 error' + if(product(totalShape) >= chunkSize*2_HSIZE_T) then + call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr) + if(hdferr < 0) error stop 'HDF5 error' + call h5pset_deflate_f(dcpl, 6, hdferr) + if(hdferr < 0) error stop 'HDF5 error' + endif + !-------------------------------------------------------------------------------------------------- ! create dataspace in memory (local shape) and in file (global shape) call h5screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape) @@ -1889,11 +1900,14 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & !-------------------------------------------------------------------------------------------------- ! create dataset in the file and select a hyperslab from it (the portion of the current process) - call h5dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr) + call h5dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr, dcpl) if(hdferr < 0) error stop 'HDF5 error' call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, myShape, hdferr) if(hdferr < 0) error stop 'HDF5 error' + call h5pclose_f(dcpl , hdferr) + if(hdferr < 0) error stop 'HDF5 error' + end subroutine initialize_write @@ -1916,4 +1930,19 @@ subroutine finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine finalize_write + +!-------------------------------------------------------------------------------------------------- +!> @brief determine chunk layout +!-------------------------------------------------------------------------------------------------- +pure function getChunks(totalShape,chunkSize) + + integer(HSIZE_T), dimension(:), intent(in) :: totalShape + integer(HSIZE_T), intent(in) :: chunkSize + integer(HSIZE_T), dimension(size(totalShape)) :: getChunks + + getChunks = [totalShape(1:size(totalShape)-1),& + chunkSize/product(totalShape(1:size(totalShape)-1))] + +end function getChunks + end module HDF5_Utilities From bc4361c2aef4ead0f975c32b37e900ba6e50cd1e Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 6 Dec 2020 13:32:20 +0100 Subject: [PATCH 3/6] use variable string length for array, padding is needed to get same length --- src/HDF5_utilities.f90 | 2 +- src/constitutive_mech.f90 | 4 ++-- src/material.f90 | 18 +++++++++++++----- src/results.f90 | 14 ++++++-------- 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 48b98812b..a2c023ef5 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -175,7 +175,7 @@ end subroutine HDF5_closeFile !-------------------------------------------------------------------------------------------------- integer(HID_T) function HDF5_addGroup(fileHandle,groupName) - integer(HID_T), intent(in) :: fileHandle + integer(HID_T), intent(in) :: fileHandle character(len=*), intent(in) :: groupName integer :: hdferr diff --git a/src/constitutive_mech.f90 b/src/constitutive_mech.f90 index 2a6bf97eb..dc3a935e3 100644 --- a/src/constitutive_mech.f90 +++ b/src/constitutive_mech.f90 @@ -454,10 +454,10 @@ end subroutine constitutive_plastic_LpAndItsTangents module subroutine plastic_results integer :: p - character(len=pStringLen) :: group + character(len=:), allocatable :: group plasticityLoop: do p=1,size(material_name_phase) - group = trim('current/phase')//'/'//trim(material_name_phase(p)) + group = '/current/phase/'//trim(material_name_phase(p)) call results_closeGroup(results_addGroup(group)) group = trim(group)//'/plastic' diff --git a/src/material.f90 b/src/material.f90 index 8679afdc4..94548e052 100644 --- a/src/material.f90 +++ b/src/material.f90 @@ -52,7 +52,7 @@ module material HOMOGENIZATION_RGC_ID end enum - character(len=pStringLen), public, protected, allocatable, dimension(:) :: & + character(len=:), public, protected, allocatable, dimension(:) :: & material_name_phase, & !< name of each phase material_name_homogenization !< name of each homogenization @@ -392,13 +392,21 @@ end subroutine sanityCheck function getKeys(dict) class(tNode), intent(in) :: dict - character(len=pStringLen), dimension(:), allocatable :: getKeys + character(len=:), dimension(:), allocatable :: getKeys + character(len=pStringLen), dimension(:), allocatable :: temp - integer :: i + integer :: i,l - allocate(getKeys(dict%length)) + allocate(temp(dict%length)) + l = 0 do i=1, dict%length - getKeys(i) = dict%getKey(i) + temp(i) = dict%getKey(i) + l = max(len_trim(temp(i)),l) + enddo + + allocate(character(l)::getKeys(dict%length)) + do i=1, dict%length + getKeys(i) = trim(temp(i)) enddo end function getKeys diff --git a/src/results.f90 b/src/results.f90 index ab4e244ba..8b097179b 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -503,8 +503,8 @@ end subroutine results_writeScalarDataset_rotation subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) integer, dimension(:,:), intent(in) :: phaseAt !< phase section at (constituent,element) - integer, dimension(:,:,:), intent(in) :: memberAtLocal !< phase member at (constituent,IP,element) - character(len=pStringLen), dimension(:), intent(in) :: label !< label of each phase section + integer, dimension(:,:,:), intent(in) :: memberAtLocal !< phase member at (constituent,IP,element) + character(len=*), dimension(:), intent(in) :: label !< label of each phase section integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2),size(memberAtLocal,3)) :: & phaseAtMaterialpoint, & @@ -527,7 +527,6 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) plist_id, & dt_id - integer(SIZE_T) :: type_size_string, type_size_int integer :: hdferr, ierr, i @@ -571,10 +570,10 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label) if(hdferr < 0) error stop 'HDF5 error' memberOffset = 0 do i=1, size(label) - memberOffset(i,worldrank) = count(phaseAt == i)*size(memberAtLocal,2) ! number of points/instance of this process + memberOffset(i,worldrank) = count(phaseAt == i)*size(memberAtLocal,2) ! number of points/instance of this process enddo writeSize = 0 - writeSize(worldrank) = size(memberAtLocal(1,:,:)) ! total number of points by this process + writeSize(worldrank) = size(memberAtLocal(1,:,:)) ! total number of points by this process !-------------------------------------------------------------------------------------------------- ! MPI settings and communication @@ -658,8 +657,8 @@ end subroutine results_mapping_constituent subroutine results_mapping_homogenization(homogenizationAt,memberAtLocal,label) integer, dimension(:), intent(in) :: homogenizationAt !< homogenization section at (element) - integer, dimension(:,:), intent(in) :: memberAtLocal !< homogenization member at (IP,element) - character(len=pStringLen), dimension(:), intent(in) :: label !< label of each homogenization section + integer, dimension(:,:), intent(in) :: memberAtLocal !< homogenization member at (IP,element) + character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2)) :: & homogenizationAtMaterialpoint, & @@ -682,7 +681,6 @@ subroutine results_mapping_homogenization(homogenizationAt,memberAtLocal,label) plist_id, & dt_id - integer(SIZE_T) :: type_size_string, type_size_int integer :: hdferr, ierr, i From 429b84004d193e3216ab68f1fa77c712260beac8 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sun, 6 Dec 2020 19:57:00 +0100 Subject: [PATCH 4/6] more filters - shuffle: significanlty better compression - Fletcher32: checksum to detect errors computational overhead is very small --- python/damask/_result.py | 7 ++++--- src/HDF5_utilities.f90 | 4 ++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/python/damask/_result.py b/python/damask/_result.py index 6d83c8872..600469e79 100644 --- a/python/damask/_result.py +++ b/python/damask/_result.py @@ -1160,10 +1160,11 @@ class Result: shape = result[1]['data'].shape chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:] dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'], - maxshape=shape,chunks=chunks,compression = 'gzip') + maxshape=shape, chunks=chunks, + compression='gzip', compression_opts=6, + shuffle=True,fletcher32=True) else: - dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'], - maxshape=result[1]['data'].shape) + dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data']) now = datetime.datetime.now().astimezone() dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \ diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index a2c023ef5..f976ad106 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1887,8 +1887,12 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & if(product(totalShape) >= chunkSize*2_HSIZE_T) then call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr) if(hdferr < 0) error stop 'HDF5 error' + call h5pset_shuffle_f(dcpl, hdferr) + if(hdferr < 0) error stop 'HDF5 error' call h5pset_deflate_f(dcpl, 6, hdferr) if(hdferr < 0) error stop 'HDF5 error' + call h5pset_Fletcher32_f(dcpl,hdferr) + if(hdferr < 0) error stop 'HDF5 error' endif !-------------------------------------------------------------------------------------------------- From 97ee7e6ee59e2a98548707f568284848600165fb Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Mon, 7 Dec 2020 17:26:50 +0100 Subject: [PATCH 5/6] not needed anymore --- src/HDF5_utilities.f90 | 81 -------------------- src/crystallite.f90 | 2 +- src/grid/grid_mech_FEM.f90 | 1 + src/grid/grid_mech_spectral_basic.f90 | 1 + src/grid/grid_mech_spectral_polarisation.f90 | 1 + src/results.f90 | 39 ---------- 6 files changed, 4 insertions(+), 121 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index f976ad106..6f45516e6 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -12,7 +12,6 @@ module HDF5_utilities use prec use parallelization - use rotations implicit none public @@ -37,7 +36,6 @@ module HDF5_utilities module procedure HDF5_read_int5 module procedure HDF5_read_int6 module procedure HDF5_read_int7 - end interface HDF5_read !-------------------------------------------------------------------------------------------------- @@ -60,9 +58,6 @@ module HDF5_utilities module procedure HDF5_write_int5 module procedure HDF5_write_int6 module procedure HDF5_write_int7 - - module procedure HDF5_write_rotation - end interface HDF5_write !-------------------------------------------------------------------------------------------------- @@ -1663,82 +1658,6 @@ subroutine HDF5_write_int7(loc_id,dataset,datasetName,parallel) end subroutine HDF5_write_int7 -!-------------------------------------------------------------------------------------------------- -!> @brief writes a scalar orientation dataset -! ToDo: It might be possible to write the dataset as a whole -! ToDo: We could optionally write out other representations (axis angle, euler, ...) -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_write_rotation(loc_id,dataset,datasetName,parallel) - - type(rotation), intent(in), dimension(:) :: dataset !< data written to file - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - logical, intent(in), optional :: parallel - - integer :: hdferr - real(pReal), dimension(4,size(dataset)) :: dataset_asArray - integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id,dtype_id,w_id,x_id,y_id,z_id - integer(HSIZE_T), dimension(size(shape(dataset))) :: & - myStart, & - myShape, & !< shape of the dataset (this process) - totalShape !< shape of the dataset (all processes) - integer(SIZE_T) :: type_size_real - integer :: i - - do i = 1, size(dataset) - dataset_asArray(1:4,i) = dataset(i)%asQuaternion() - enddo - -!--------------------------------------------------------------------------------------------------- -! determine shape of dataset - myShape = int(shape(dataset),HSIZE_T) - -!--------------------------------------------------------------------------------------------------- -! compound type: name of each quaternion component - call h5tget_size_f(H5T_NATIVE_DOUBLE, type_size_real, hdferr) - - call h5tcreate_f(H5T_COMPOUND_F, type_size_real*4_SIZE_T, dtype_id, hdferr) - call h5tinsert_f(dtype_id, "w", type_size_real*0_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - call h5tinsert_f(dtype_id, "x", type_size_real*1_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - call h5tinsert_f(dtype_id, "y", type_size_real*2_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - call h5tinsert_f(dtype_id, "z", type_size_real*3_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - - if (present(parallel)) then - call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & - myStart, totalShape, loc_id,myShape,datasetName,dtype_id,parallel) - else - call initialize_write(dset_id, filespace_id, memspace_id, plist_id, & - myStart, totalShape, loc_id,myShape,datasetName,dtype_id,.false.) - endif - - call h5pset_preserve_f(plist_id, .TRUE., hdferr) - - if (product(totalShape) /= 0) then - call h5tcreate_f(H5T_COMPOUND_F, type_size_real, x_id, hdferr) - call h5tinsert_f(x_id, "x", 0_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - call h5tcreate_f(H5T_COMPOUND_F, type_size_real, w_id, hdferr) - call h5tinsert_f(w_id, "w", 0_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - call h5tcreate_f(H5T_COMPOUND_F, type_size_real, y_id, hdferr) - call h5tinsert_f(y_id, "y", 0_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - call h5tcreate_f(H5T_COMPOUND_F, type_size_real, z_id, hdferr) - call h5tinsert_f(z_id, "z", 0_SIZE_T, H5T_NATIVE_DOUBLE, hdferr) - - call h5dwrite_f(dset_id, w_id,dataset_asArray(1,:),int(totalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - call h5dwrite_f(dset_id, x_id,dataset_asArray(2,:),int(totalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - call h5dwrite_f(dset_id, y_id,dataset_asArray(3,:),int(totalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - call h5dwrite_f(dset_id, z_id,dataset_asArray(4,:),int(totalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id) - if(hdferr < 0) error stop 'HDF5 error' - endif - - call finalize_write(plist_id, dset_id, filespace_id, memspace_id) - -end subroutine HDF5_write_rotation - - !-------------------------------------------------------------------------------------------------- !> @brief initialize HDF5 handles, determines global shape and start for parallel read !-------------------------------------------------------------------------------------------------- diff --git a/src/crystallite.f90 b/src/crystallite.f90 index 273d5397b..66c1df607 100644 --- a/src/crystallite.f90 +++ b/src/crystallite.f90 @@ -794,7 +794,7 @@ subroutine crystallite_results end select selected_rotations = select_rotations(crystallite_orientation,p) call results_writeDataset(group,selected_rotations,output_constituent(p)%label(o),& - 'crystal orientation as quaternion') + 'crystal orientation as quaternion','q_0 ') call results_addAttribute('Lattice',structureLabel,group//'/'//output_constituent(p)%label(o)) end select enddo diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index 7d0830f67..4394b6f81 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -16,6 +16,7 @@ module grid_mech_FEM use IO use HDF5_utilities use math + use rotations use spectral_utilities use FEsolving use config diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 8677a998f..563b25162 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -16,6 +16,7 @@ module grid_mech_spectral_basic use IO use HDF5_utilities use math + use rotations use spectral_utilities use FEsolving use config diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index 053d958ad..03780f2e0 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -16,6 +16,7 @@ module grid_mech_spectral_polarisation use IO use HDF5_utilities use math + use rotations use spectral_utilities use FEsolving use config diff --git a/src/results.f90 b/src/results.f90 index 8b097179b..f15ad4e4a 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -8,7 +8,6 @@ module results use DAMASK_interface use parallelization use IO - use rotations use HDF5_utilities #ifdef PETSc use PETSC @@ -20,27 +19,21 @@ module results integer(HID_T) :: resultsFile interface results_writeDataset - module procedure results_writeTensorDataset_real module procedure results_writeVectorDataset_real module procedure results_writeScalarDataset_real module procedure results_writeTensorDataset_int module procedure results_writeVectorDataset_int - - module procedure results_writeScalarDataset_rotation - end interface results_writeDataset interface results_addAttribute - module procedure results_addAttribute_real module procedure results_addAttribute_int module procedure results_addAttribute_str module procedure results_addAttribute_int_array module procedure results_addAttribute_real_array - end interface results_addAttribute public :: & @@ -465,38 +458,6 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit end subroutine results_writeTensorDataset_int -!-------------------------------------------------------------------------------------------------- -!> @brief stores a scalar dataset in a group -!-------------------------------------------------------------------------------------------------- -subroutine results_writeScalarDataset_rotation(group,dataset,label,description,lattice_structure) - - character(len=*), intent(in) :: label,group,description - character(len=*), intent(in), optional :: lattice_structure - type(rotation), intent(inout), dimension(:) :: dataset - - integer(HID_T) :: groupHandle - - groupHandle = results_openGroup(group) - -#ifdef PETSc - call HDF5_write(groupHandle,dataset,label,.true.) -#else - call HDF5_write(groupHandle,dataset,label,.false.) -#endif - - if (HDF5_objectExists(groupHandle,label)) & - call HDF5_addAttribute(groupHandle,'Description',description,label) - if (HDF5_objectExists(groupHandle,label) .and. present(lattice_structure)) & - call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label) - if (HDF5_objectExists(groupHandle,label)) & - call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label) - if (HDF5_objectExists(groupHandle,label)) & - call HDF5_addAttribute(groupHandle,'Created',now(),label) - call HDF5_closeGroup(groupHandle) - -end subroutine results_writeScalarDataset_rotation - - !-------------------------------------------------------------------------------------------------- !> @brief adds the unique mapping from spatial position and constituent ID to results !-------------------------------------------------------------------------------------------------- From 3ad741dbeb5285bf0346d795830e731b7f5a00c3 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Thu, 10 Dec 2020 00:31:58 +0100 Subject: [PATCH 6/6] only used in one function --- src/HDF5_utilities.f90 | 29 +++++++++++++++-------------- src/material.f90 | 2 +- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 6f45516e6..88b8d960d 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -1831,6 +1831,21 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, & call h5pclose_f(dcpl , hdferr) if(hdferr < 0) error stop 'HDF5 error' + contains + !------------------------------------------------------------------------------------------------ + !> @brief determine chunk layout + !------------------------------------------------------------------------------------------------ + pure function getChunks(totalShape,chunkSize) + + integer(HSIZE_T), dimension(:), intent(in) :: totalShape + integer(HSIZE_T), intent(in) :: chunkSize + integer(HSIZE_T), dimension(size(totalShape)) :: getChunks + + getChunks = [totalShape(1:size(totalShape)-1),& + chunkSize/product(totalShape(1:size(totalShape)-1))] + + end function getChunks + end subroutine initialize_write @@ -1854,18 +1869,4 @@ subroutine finalize_write(plist_id, dset_id, filespace_id, memspace_id) end subroutine finalize_write -!-------------------------------------------------------------------------------------------------- -!> @brief determine chunk layout -!-------------------------------------------------------------------------------------------------- -pure function getChunks(totalShape,chunkSize) - - integer(HSIZE_T), dimension(:), intent(in) :: totalShape - integer(HSIZE_T), intent(in) :: chunkSize - integer(HSIZE_T), dimension(size(totalShape)) :: getChunks - - getChunks = [totalShape(1:size(totalShape)-1),& - chunkSize/product(totalShape(1:size(totalShape)-1))] - -end function getChunks - end module HDF5_Utilities diff --git a/src/material.f90 b/src/material.f90 index 94548e052..223ea6ed8 100644 --- a/src/material.f90 +++ b/src/material.f90 @@ -403,7 +403,7 @@ function getKeys(dict) temp(i) = dict%getKey(i) l = max(len_trim(temp(i)),l) enddo - + allocate(character(l)::getKeys(dict%length)) do i=1, dict%length getKeys(i) = trim(temp(i))