more filters
- shuffle: significanlty better compression - Fletcher32: checksum to detect errors computational overhead is very small
This commit is contained in:
parent
bc4361c2ae
commit
429b84004d
|
@ -1160,10 +1160,11 @@ class Result:
|
|||
shape = result[1]['data'].shape
|
||||
chunks = (chunk_size//np.prod(shape[1:]),)+shape[1:]
|
||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'],
|
||||
maxshape=shape,chunks=chunks,compression = 'gzip')
|
||||
maxshape=shape, chunks=chunks,
|
||||
compression='gzip', compression_opts=6,
|
||||
shuffle=True,fletcher32=True)
|
||||
else:
|
||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'],
|
||||
maxshape=result[1]['data'].shape)
|
||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
||||
|
||||
now = datetime.datetime.now().astimezone()
|
||||
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z') if h5py3 else \
|
||||
|
|
|
@ -1887,8 +1887,12 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
|||
if(product(totalShape) >= chunkSize*2_HSIZE_T) then
|
||||
call h5pset_chunk_f(dcpl, size(totalShape), getChunks(totalShape,chunkSize), hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
call h5pset_shuffle_f(dcpl, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
call h5pset_deflate_f(dcpl, 6, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
call h5pset_Fletcher32_f(dcpl,hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
endif
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
|
Loading…
Reference in New Issue