polishing
This commit is contained in:
parent
044a048944
commit
e8312a49ed
|
@ -1766,13 +1766,16 @@ class Result:
|
||||||
Defaults to False.
|
Defaults to False.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
with h5py.File(self.fname,'r') as f_in:
|
def export(name,obj,output,overwrite):
|
||||||
for out in _match(output,f_in['setup'].keys()):
|
if type(obj) == h5py.Dataset and _match(output,[name]):
|
||||||
description = f_in['/'.join(('setup',out))].attrs['description']
|
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
|
||||||
if not h5py3: description = description.decode()
|
if not Path(name).exists() or overwrite:
|
||||||
if not Path(out).exists() or overwrite:
|
with open(name,'w') as f_out: f_out.write(obj[()].decode())
|
||||||
with open(out,'w') as f_out:
|
print(f"Exported {d} to '{name}'.")
|
||||||
f_out.write(f_in['/'.join(('setup',out))][()].decode())
|
|
||||||
print(f"exported {description} to '{out}'")
|
|
||||||
else:
|
else:
|
||||||
print(f"'{out}' exists, {description} not exported")
|
print(f"'{name}' exists, {d} not exported.")
|
||||||
|
elif type(obj) == h5py.Group:
|
||||||
|
os.makedirs(name, exist_ok=True)
|
||||||
|
|
||||||
|
with h5py.File(self.fname,'r') as f_in:
|
||||||
|
f_in['setup'].visititems(partial(export,output=output,overwrite=overwrite))
|
||||||
|
|
|
@ -84,7 +84,7 @@ subroutine parse_numerics()
|
||||||
print*, 'reading numerics.yaml'; flush(IO_STDOUT)
|
print*, 'reading numerics.yaml'; flush(IO_STDOUT)
|
||||||
fileContent = IO_read('numerics.yaml')
|
fileContent = IO_read('numerics.yaml')
|
||||||
call results_openJobFile(parallel=.false.)
|
call results_openJobFile(parallel=.false.)
|
||||||
call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration (optional)')
|
call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration')
|
||||||
call results_closeJobFile
|
call results_closeJobFile
|
||||||
endif
|
endif
|
||||||
call parallelization_bcast_str(fileContent)
|
call parallelization_bcast_str(fileContent)
|
||||||
|
@ -114,7 +114,7 @@ subroutine parse_debug()
|
||||||
print*, 'reading debug.yaml'; flush(IO_STDOUT)
|
print*, 'reading debug.yaml'; flush(IO_STDOUT)
|
||||||
fileContent = IO_read('debug.yaml')
|
fileContent = IO_read('debug.yaml')
|
||||||
call results_openJobFile(parallel=.false.)
|
call results_openJobFile(parallel=.false.)
|
||||||
call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration (optional)')
|
call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration')
|
||||||
call results_closeJobFile
|
call results_closeJobFile
|
||||||
endif
|
endif
|
||||||
call parallelization_bcast_str(fileContent)
|
call parallelization_bcast_str(fileContent)
|
||||||
|
|
|
@ -94,10 +94,10 @@ subroutine results_init(restart)
|
||||||
call get_command(commandLine)
|
call get_command(commandLine)
|
||||||
call results_addAttribute('call (restart at '//date//')',trim(commandLine))
|
call results_addAttribute('call (restart at '//date//')',trim(commandLine))
|
||||||
call h5gmove_f(resultsFile,'setup','tmp',hdferr)
|
call h5gmove_f(resultsFile,'setup','tmp',hdferr)
|
||||||
call results_addAttribute('description','input data used to run the simulation (backup from restart at '//date//')','tmp')
|
call results_addAttribute('description','input data used to run the simulation up to restart at '//date,'tmp')
|
||||||
call results_closeGroup(results_addGroup('setup'))
|
call results_closeGroup(results_addGroup('setup'))
|
||||||
call results_addAttribute('description','input data used to run the simulation','setup')
|
call results_addAttribute('description','input data used to run the simulation','setup')
|
||||||
call h5gmove_f(resultsFile,'tmp','setup/backup',hdferr)
|
call h5gmove_f(resultsFile,'tmp','setup/previous',hdferr)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call results_closeJobFile
|
call results_closeJobFile
|
||||||
|
|
Loading…
Reference in New Issue