Merge branch 'store-sim-setup' into 'development'
store information on slip and twin systems See merge request damask/DAMASK!422
This commit is contained in:
commit
4a8c836117
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
|||
Subproject commit 72c58103860e127d37ccf3a06827331de29406ca
|
||||
Subproject commit 9699f20f21f8a5f532c735a1aa9daeba395da94d
|
|
@ -99,8 +99,10 @@ class Result:
|
|||
self.version_major = f.attrs['DADF5_version_major']
|
||||
self.version_minor = f.attrs['DADF5_version_minor']
|
||||
|
||||
if self.version_major != 0 or not 12 <= self.version_minor <= 13:
|
||||
if self.version_major != 0 or not 12 <= self.version_minor <= 14:
|
||||
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
|
||||
if self.version_major == 0 and self.version_minor < 14:
|
||||
self.export_setup = None
|
||||
|
||||
self.structured = 'cells' in f['geometry'].attrs.keys()
|
||||
|
||||
|
@ -1395,7 +1397,7 @@ class Result:
|
|||
|
||||
def export_XDMF(self,output='*'):
|
||||
"""
|
||||
Write XDMF file to directly visualize data in DADF5 file.
|
||||
Write XDMF file to directly visualize data from DADF5 file.
|
||||
|
||||
The XDMF format is only supported for structured grids
|
||||
with single phase and single constituent.
|
||||
|
@ -1748,3 +1750,32 @@ class Result:
|
|||
if flatten: r = util.dict_flatten(r)
|
||||
|
||||
return None if (type(r) == dict and r == {}) else r
|
||||
|
||||
|
||||
def export_setup(self,output='*',overwrite=False):
|
||||
"""
|
||||
Export configuration files.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
output : (list of) str, optional
|
||||
Names of the datasets to export to the file.
|
||||
Defaults to '*', in which case all datasets are exported.
|
||||
overwrite : boolean, optional
|
||||
Overwrite existing configuration files.
|
||||
Defaults to False.
|
||||
|
||||
"""
|
||||
def export(name,obj,output,overwrite):
|
||||
if type(obj) == h5py.Dataset and _match(output,[name]):
|
||||
d = obj.attrs['description'] if h5py3 else obj.attrs['description'].decode()
|
||||
if not Path(name).exists() or overwrite:
|
||||
with open(name,'w') as f_out: f_out.write(obj[()].decode())
|
||||
print(f"Exported {d} to '{name}'.")
|
||||
else:
|
||||
print(f"'{name}' exists, {d} not exported.")
|
||||
elif type(obj) == h5py.Group:
|
||||
os.makedirs(name, exist_ok=True)
|
||||
|
||||
with h5py.File(self.fname,'r') as f_in:
|
||||
f_in['setup'].visititems(partial(export,output=output,overwrite=overwrite))
|
||||
|
|
|
@ -4,22 +4,23 @@
|
|||
!> @brief CPFEM engine
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
module CPFEM
|
||||
use DAMASK_interface
|
||||
use prec
|
||||
use math
|
||||
use rotations
|
||||
use IO
|
||||
use YAML_types
|
||||
use YAML_parse
|
||||
use discretization_marc
|
||||
use material
|
||||
use config
|
||||
use homogenization
|
||||
use IO
|
||||
use discretization
|
||||
use DAMASK_interface
|
||||
use HDF5_utilities
|
||||
use results
|
||||
use config
|
||||
use math
|
||||
use rotations
|
||||
use lattice
|
||||
use material
|
||||
use phase
|
||||
use homogenization
|
||||
|
||||
use discretization
|
||||
use discretization_marc
|
||||
|
||||
implicit none
|
||||
private
|
||||
|
@ -68,7 +69,7 @@ contains
|
|||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief call all module initializations
|
||||
!> @brief Initialize all modules.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine CPFEM_initAll
|
||||
|
||||
|
@ -77,13 +78,13 @@ subroutine CPFEM_initAll
|
|||
call IO_init
|
||||
call YAML_types_init
|
||||
call YAML_parse_init
|
||||
call HDF5_utilities_init
|
||||
call results_init(.false.)
|
||||
call config_init
|
||||
call math_init
|
||||
call rotations_init
|
||||
call HDF5_utilities_init
|
||||
call results_init(.false.)
|
||||
call discretization_marc_init
|
||||
call lattice_init
|
||||
call discretization_marc_init
|
||||
call material_init(.false.)
|
||||
call phase_init
|
||||
call homogenization_init
|
||||
|
|
|
@ -4,28 +4,29 @@
|
|||
!> @brief needs a good name and description
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
module CPFEM2
|
||||
use prec
|
||||
use parallelization
|
||||
use config
|
||||
use math
|
||||
use rotations
|
||||
use DAMASK_interface
|
||||
use prec
|
||||
use IO
|
||||
use YAML_types
|
||||
use YAML_parse
|
||||
use material
|
||||
use lattice
|
||||
use IO
|
||||
use base64
|
||||
use DAMASK_interface
|
||||
use discretization
|
||||
use HDF5
|
||||
use HDF5_utilities
|
||||
use results
|
||||
use homogenization
|
||||
use config
|
||||
use math
|
||||
use rotations
|
||||
use lattice
|
||||
use material
|
||||
use phase
|
||||
use homogenization
|
||||
|
||||
use discretization
|
||||
#if defined(MESH)
|
||||
use FEM_quadrature
|
||||
use discretization_mesh
|
||||
#elif defined(GRID)
|
||||
use base64
|
||||
use discretization_grid
|
||||
#endif
|
||||
|
||||
|
@ -36,7 +37,7 @@ contains
|
|||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief call all module initializations
|
||||
!> @brief Initialize all modules.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine CPFEM_initAll
|
||||
|
||||
|
@ -44,18 +45,19 @@ subroutine CPFEM_initAll
|
|||
call DAMASK_interface_init ! Spectral and FEM interface to commandline
|
||||
call prec_init
|
||||
call IO_init
|
||||
call base64_init
|
||||
#ifdef MESH
|
||||
#if defined(MESH)
|
||||
call FEM_quadrature_init
|
||||
#elif defined(GRID)
|
||||
call base64_init
|
||||
#endif
|
||||
call YAML_types_init
|
||||
call YAML_parse_init
|
||||
call HDF5_utilities_init
|
||||
call results_init(restart=interface_restartInc>0)
|
||||
call config_init
|
||||
call math_init
|
||||
call rotations_init
|
||||
call lattice_init
|
||||
call HDF5_utilities_init
|
||||
call results_init(restart=interface_restartInc>0)
|
||||
#if defined(MESH)
|
||||
call discretization_mesh_init(restart=interface_restartInc>0)
|
||||
#elif defined(GRID)
|
||||
|
|
|
@ -85,6 +85,7 @@ module HDF5_utilities
|
|||
HDF5_utilities_init, &
|
||||
HDF5_read, &
|
||||
HDF5_write, &
|
||||
HDF5_write_str, &
|
||||
HDF5_addAttribute, &
|
||||
HDF5_addGroup, &
|
||||
HDF5_openGroup, &
|
||||
|
@ -128,10 +129,11 @@ end subroutine HDF5_utilities_init
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief open and initializes HDF5 output file
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
integer(HID_T) function HDF5_openFile(fileName,mode)
|
||||
integer(HID_T) function HDF5_openFile(fileName,mode,parallel)
|
||||
|
||||
character(len=*), intent(in) :: fileName
|
||||
character, intent(in), optional :: mode
|
||||
logical, intent(in), optional :: parallel
|
||||
|
||||
character :: m
|
||||
integer(HID_T) :: plist_id
|
||||
|
@ -148,7 +150,11 @@ integer(HID_T) function HDF5_openFile(fileName,mode)
|
|||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
|
||||
#ifdef PETSC
|
||||
if (present(parallel)) then
|
||||
if (parallel) call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
|
||||
else
|
||||
call h5pset_fapl_mpio_f(plist_id, PETSC_COMM_WORLD, MPI_INFO_NULL, hdferr)
|
||||
endif
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
#endif
|
||||
|
||||
|
@ -1467,6 +1473,48 @@ subroutine HDF5_write_real7(dataset,loc_id,datasetName,parallel)
|
|||
end subroutine HDF5_write_real7
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Write dataset of type string (scalar).
|
||||
!> @details Not collective, must be called by one process at at time.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine HDF5_write_str(dataset,loc_id,datasetName)
|
||||
|
||||
character(len=*), intent(in) :: dataset
|
||||
integer(HID_T), intent(in) :: loc_id
|
||||
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
|
||||
|
||||
integer(HID_T) :: filetype_id, space_id, dataset_id
|
||||
integer :: hdferr
|
||||
character(len=len_trim(dataset)+1,kind=C_CHAR), dimension(1), target :: dataset_
|
||||
type(C_PTR), target, dimension(1) :: ptr
|
||||
|
||||
|
||||
dataset_(1) = trim(dataset)//C_NULL_CHAR
|
||||
ptr(1) = c_loc(dataset_(1))
|
||||
|
||||
call h5tcopy_f(H5T_STRING, filetype_id, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
call h5tset_size_f(filetype_id, int(len(dataset_),HSIZE_T), hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
|
||||
call h5screate_f(H5S_SCALAR_F, space_id, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
call h5dcreate_f(loc_id, datasetName, H5T_STRING, space_id, dataset_id, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
|
||||
call h5dwrite_f(dataset_id, H5T_STRING, c_loc(ptr), hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
|
||||
call h5dclose_f(dataset_id, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
call h5sclose_f(space_id, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
call h5tclose_f(filetype_id, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
|
||||
end subroutine HDF5_write_str
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief write dataset of type integer with 1 dimension
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -1872,7 +1920,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
|||
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
|
||||
|
||||
!-------------------------------------------------------------------------------------------------
|
||||
! creating a property list for transfer properties (is collective when reading in parallel)
|
||||
! creating a property list for transfer properties (is collective when writing in parallel)
|
||||
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
|
||||
if(hdferr < 0) error stop 'HDF5 error'
|
||||
#ifdef PETSC
|
||||
|
|
|
@ -119,9 +119,10 @@ function IO_read(fileName) result(fileContent)
|
|||
character(len=:), allocatable :: fileContent
|
||||
|
||||
integer :: &
|
||||
fileLength, &
|
||||
fileUnit, &
|
||||
myStat
|
||||
integer(pI64) :: &
|
||||
fileLength
|
||||
|
||||
|
||||
inquire(file = fileName, size=fileLength)
|
||||
|
|
|
@ -216,7 +216,13 @@ subroutine inputRead(elem,node0_elem,connectivity_elem,materialAt)
|
|||
mapElemSet !< list of elements in elementSet
|
||||
|
||||
|
||||
inputFile = IO_readlines(trim(getSolverJobName())//trim(InputFileExtension))
|
||||
call results_openJobFile
|
||||
call results_writeDataset_str(IO_read(trim(getSolverJobName())//InputFileExtension), 'setup', &
|
||||
trim(getSolverJobName())//InputFileExtension, &
|
||||
'MSC.Marc input deck')
|
||||
call results_closeJobFile
|
||||
|
||||
inputFile = IO_readlines(trim(getSolverJobName())//InputFileExtension)
|
||||
call inputRead_fileFormat(fileFormatVersion, &
|
||||
inputFile)
|
||||
call inputRead_tableStyles(initialcondTableStyle,hypoelasticTableStyle, &
|
||||
|
|
|
@ -14,7 +14,7 @@ module YAML_parse
|
|||
|
||||
public :: &
|
||||
YAML_parse_init, &
|
||||
YAML_parse_file
|
||||
YAML_parse_str
|
||||
|
||||
contains
|
||||
|
||||
|
@ -29,16 +29,16 @@ end subroutine YAML_parse_init
|
|||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Parse a YAML file into a a structure of nodes.
|
||||
!> @brief Parse a YAML string into a a structure of nodes.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
function YAML_parse_file(fname) result(node)
|
||||
function YAML_parse_str(str) result(node)
|
||||
|
||||
character(len=*), intent(in) :: fname
|
||||
character(len=*), intent(in) :: str
|
||||
class (tNode), pointer :: node
|
||||
|
||||
node => parse_flow(to_flow(IO_read(fname)))
|
||||
node => parse_flow(to_flow(str))
|
||||
|
||||
end function YAML_parse_file
|
||||
end function YAML_parse_str
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -7,18 +7,18 @@
|
|||
#include "IO.f90"
|
||||
#include "YAML_types.f90"
|
||||
#include "YAML_parse.f90"
|
||||
#include "HDF5_utilities.f90"
|
||||
#include "results.f90"
|
||||
#include "config.f90"
|
||||
#include "LAPACK_interface.f90"
|
||||
#include "math.f90"
|
||||
#include "rotations.f90"
|
||||
#include "lattice.f90"
|
||||
#include "element.f90"
|
||||
#include "HDF5_utilities.f90"
|
||||
#include "results.f90"
|
||||
#include "geometry_plastic_nonlocal.f90"
|
||||
#include "discretization.f90"
|
||||
#include "Marc/discretization_Marc.f90"
|
||||
#include "material.f90"
|
||||
#include "lattice.f90"
|
||||
#include "phase.f90"
|
||||
#include "phase_mechanical.f90"
|
||||
#include "phase_mechanical_elastic.f90"
|
||||
|
|
|
@ -8,7 +8,8 @@ module config
|
|||
use IO
|
||||
use YAML_parse
|
||||
use YAML_types
|
||||
|
||||
use results
|
||||
use parallelization
|
||||
|
||||
implicit none
|
||||
private
|
||||
|
@ -31,6 +32,7 @@ subroutine config_init
|
|||
|
||||
print'(/,a)', ' <<<+- config init -+>>>'; flush(IO_STDOUT)
|
||||
|
||||
|
||||
call parse_material
|
||||
call parse_numerics
|
||||
call parse_debug
|
||||
|
@ -41,15 +43,25 @@ end subroutine config_init
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Read material.yaml or <jobname>.yaml.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine parse_material
|
||||
subroutine parse_material()
|
||||
|
||||
logical :: fileExists
|
||||
character(len=:), allocatable :: fileContent
|
||||
|
||||
|
||||
inquire(file='material.yaml',exist=fileExists)
|
||||
if(.not. fileExists) call IO_error(100,ext_msg='material.yaml')
|
||||
|
||||
if (worldrank == 0) then
|
||||
print*, 'reading material.yaml'; flush(IO_STDOUT)
|
||||
config_material => YAML_parse_file('material.yaml')
|
||||
fileContent = IO_read('material.yaml')
|
||||
call results_openJobFile(parallel=.false.)
|
||||
call results_writeDataset_str(fileContent,'setup','material.yaml','main configuration')
|
||||
call results_closeJobFile
|
||||
endif
|
||||
call parallelization_bcast_str(fileContent)
|
||||
|
||||
config_material => YAML_parse_str(fileContent)
|
||||
|
||||
end subroutine parse_material
|
||||
|
||||
|
@ -57,16 +69,28 @@ end subroutine parse_material
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Read numerics.yaml.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine parse_numerics
|
||||
subroutine parse_numerics()
|
||||
|
||||
logical :: fexist
|
||||
logical :: fileExists
|
||||
character(len=:), allocatable :: fileContent
|
||||
|
||||
|
||||
config_numerics => emptyDict
|
||||
inquire(file='numerics.yaml', exist=fexist)
|
||||
if (fexist) then
|
||||
|
||||
inquire(file='numerics.yaml', exist=fileExists)
|
||||
if (fileExists) then
|
||||
|
||||
if (worldrank == 0) then
|
||||
print*, 'reading numerics.yaml'; flush(IO_STDOUT)
|
||||
config_numerics => YAML_parse_file('numerics.yaml')
|
||||
fileContent = IO_read('numerics.yaml')
|
||||
call results_openJobFile(parallel=.false.)
|
||||
call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration')
|
||||
call results_closeJobFile
|
||||
endif
|
||||
call parallelization_bcast_str(fileContent)
|
||||
|
||||
config_numerics => YAML_parse_str(fileContent)
|
||||
|
||||
endif
|
||||
|
||||
end subroutine parse_numerics
|
||||
|
@ -75,17 +99,29 @@ end subroutine parse_numerics
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Read debug.yaml.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine parse_debug
|
||||
subroutine parse_debug()
|
||||
|
||||
logical :: fexist
|
||||
logical :: fileExists
|
||||
character(len=:), allocatable :: fileContent
|
||||
|
||||
|
||||
config_debug => emptyDict
|
||||
inquire(file='debug.yaml', exist=fexist)
|
||||
fileExists: if (fexist) then
|
||||
|
||||
inquire(file='debug.yaml', exist=fileExists)
|
||||
if (fileExists) then
|
||||
|
||||
if (worldrank == 0) then
|
||||
print*, 'reading debug.yaml'; flush(IO_STDOUT)
|
||||
config_debug => YAML_parse_file('debug.yaml')
|
||||
endif fileExists
|
||||
fileContent = IO_read('debug.yaml')
|
||||
call results_openJobFile(parallel=.false.)
|
||||
call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration')
|
||||
call results_closeJobFile
|
||||
endif
|
||||
call parallelization_bcast_str(fileContent)
|
||||
|
||||
config_debug => YAML_parse_str(fileContent)
|
||||
|
||||
endif
|
||||
|
||||
end subroutine parse_debug
|
||||
|
||||
|
|
|
@ -107,6 +107,8 @@ program DAMASK_grid
|
|||
step_bc, &
|
||||
step_mech, &
|
||||
step_discretization
|
||||
character(len=:), allocatable :: &
|
||||
fileContent, fname
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! init DAMASK (all modules)
|
||||
|
@ -127,7 +129,17 @@ program DAMASK_grid
|
|||
if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter')
|
||||
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
|
||||
|
||||
config_load => YAML_parse_file(trim(interface_loadFile))
|
||||
if (worldrank == 0) then
|
||||
fileContent = IO_read(interface_loadFile)
|
||||
fname = interface_loadFile
|
||||
if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:)
|
||||
call results_openJobFile(parallel=.false.)
|
||||
call results_writeDataset_str(fileContent,'setup',fname,'load case definition (grid solver)')
|
||||
call results_closeJobFile
|
||||
endif
|
||||
|
||||
call parallelization_bcast_str(fileContent)
|
||||
config_load => YAML_parse_str(fileContent)
|
||||
solver => config_load%get('solver')
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -68,11 +68,21 @@ subroutine discretization_grid_init(restart)
|
|||
devNull, z, z_offset
|
||||
integer, dimension(worldsize) :: &
|
||||
displs, sendcounts
|
||||
character(len=:), allocatable :: &
|
||||
fileContent, fname
|
||||
|
||||
|
||||
print'(/,a)', ' <<<+- discretization_grid init -+>>>'; flush(IO_STDOUT)
|
||||
|
||||
|
||||
if(worldrank == 0) then
|
||||
call readVTI(grid,geomSize,origin,materialAt_global)
|
||||
fileContent = IO_read(interface_geomFile)
|
||||
call readVTI(grid,geomSize,origin,materialAt_global,fileContent)
|
||||
fname = interface_geomFile
|
||||
if (scan(fname,'/') /= 0) fname = fname(scan(fname,'/',.true.)+1:)
|
||||
call results_openJobFile(parallel=.false.)
|
||||
call results_writeDataset_str(fileContent,'setup',fname,'geometry definition (grid solver)')
|
||||
call results_closeJobFile
|
||||
else
|
||||
allocate(materialAt_global(0)) ! needed for IntelMPI
|
||||
endif
|
||||
|
@ -157,7 +167,8 @@ end subroutine discretization_grid_init
|
|||
!> @brief Parse vtk image data (.vti)
|
||||
!> @details https://vtk.org/Wiki/VTK_XML_Formats
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine readVTI(grid,geomSize,origin,material)
|
||||
subroutine readVTI(grid,geomSize,origin,material, &
|
||||
fileContent)
|
||||
|
||||
integer, dimension(3), intent(out) :: &
|
||||
grid ! grid (across all processes!)
|
||||
|
@ -166,28 +177,19 @@ subroutine readVTI(grid,geomSize,origin,material)
|
|||
origin ! origin (across all processes!)
|
||||
integer, dimension(:), intent(out), allocatable :: &
|
||||
material
|
||||
character(len=*), intent(in) :: &
|
||||
fileContent
|
||||
|
||||
character(len=:), allocatable :: fileContent, dataType, headerType
|
||||
character(len=:), allocatable :: dataType, headerType
|
||||
logical :: inFile,inImage,gotCellData,compressed
|
||||
integer :: fileUnit, myStat
|
||||
integer(pI64) :: &
|
||||
fileLength, & !< length of the geom file (in characters)
|
||||
startPos, endPos, &
|
||||
s
|
||||
|
||||
|
||||
grid = -1
|
||||
geomSize = -1.0_pReal
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! read raw data as stream
|
||||
inquire(file = trim(interface_geomFile), size=fileLength)
|
||||
open(newunit=fileUnit, file=trim(interface_geomFile), access='stream',&
|
||||
status='old', position='rewind', action='read',iostat=myStat)
|
||||
if(myStat /= 0) call IO_error(100,ext_msg=trim(interface_geomFile))
|
||||
allocate(character(len=fileLength)::fileContent)
|
||||
read(fileUnit) fileContent
|
||||
close(fileUnit)
|
||||
|
||||
inFile = .false.
|
||||
inImage = .false.
|
||||
gotCelldata = .false.
|
||||
|
|
|
@ -1258,7 +1258,7 @@ subroutine selfTest
|
|||
error stop 'math_sym33to6/math_6toSym33'
|
||||
|
||||
call random_number(t66)
|
||||
if(any(dNeq(math_sym3333to66(math_66toSym3333(t66)),t66))) &
|
||||
if(any(dNeq(math_sym3333to66(math_66toSym3333(t66)),t66,1.0e-15_pReal))) &
|
||||
error stop 'math_sym3333to66/math_66toSym3333'
|
||||
|
||||
call random_number(v6)
|
||||
|
|
|
@ -24,9 +24,18 @@ module parallelization
|
|||
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only)
|
||||
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only)
|
||||
|
||||
#ifdef PETSC
|
||||
#ifndef PETSC
|
||||
public :: parallelization_bcast_str
|
||||
|
||||
contains
|
||||
subroutine parallelization_bcast_str(string)
|
||||
character(len=:), allocatable, intent(inout) :: string
|
||||
end subroutine parallelization_bcast_str
|
||||
|
||||
#else
|
||||
public :: &
|
||||
parallelization_init
|
||||
parallelization_init, &
|
||||
parallelization_bcast_str
|
||||
|
||||
contains
|
||||
|
||||
|
@ -101,6 +110,27 @@ subroutine parallelization_init
|
|||
!$ call omp_set_num_threads(OMP_NUM_THREADS)
|
||||
|
||||
end subroutine parallelization_init
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Broadcast a string from process 0.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine parallelization_bcast_str(string)
|
||||
|
||||
character(len=:), allocatable, intent(inout) :: string
|
||||
|
||||
integer :: strlen, ierr ! pI64 for strlen not supported by MPI
|
||||
|
||||
|
||||
if (worldrank == 0) strlen = len(string)
|
||||
call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr)
|
||||
if (worldrank /= 0) allocate(character(len=strlen)::string)
|
||||
|
||||
call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr)
|
||||
|
||||
|
||||
end subroutine parallelization_bcast_str
|
||||
|
||||
#endif
|
||||
|
||||
end module parallelization
|
||||
|
|
|
@ -89,6 +89,7 @@ end subroutine prec_init
|
|||
! replaces "==" but for certain (relative) tolerance. Counterpart to dNeq
|
||||
! https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/
|
||||
! AlmostEqualRelative
|
||||
! ToDo: Use 'spacing': https://gcc.gnu.org/onlinedocs/gfortran/SPACING.html#SPACING
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
logical elemental pure function dEq(a,b,tol)
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@ module results
|
|||
results_openGroup, &
|
||||
results_closeGroup, &
|
||||
results_writeDataset, &
|
||||
results_writeDataset_str, &
|
||||
results_setLink, &
|
||||
results_addAttribute, &
|
||||
results_removeLink, &
|
||||
|
@ -64,6 +65,10 @@ subroutine results_init(restart)
|
|||
logical, intent(in) :: restart
|
||||
|
||||
character(len=pPathLen) :: commandLine
|
||||
integer :: hdferr
|
||||
integer(HID_T) :: group_id
|
||||
character(len=:), allocatable :: date
|
||||
|
||||
|
||||
print'(/,a)', ' <<<+- results init -+>>>'; flush(IO_STDOUT)
|
||||
|
||||
|
@ -73,7 +78,7 @@ subroutine results_init(restart)
|
|||
if (.not. restart) then
|
||||
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','w')
|
||||
call results_addAttribute('DADF5_version_major',0)
|
||||
call results_addAttribute('DADF5_version_minor',13)
|
||||
call results_addAttribute('DADF5_version_minor',14)
|
||||
call get_command_argument(0,commandLine)
|
||||
call results_addAttribute('creator',trim(commandLine)//' '//DAMASKVERSION)
|
||||
call results_addAttribute('created',now())
|
||||
|
@ -81,18 +86,34 @@ subroutine results_init(restart)
|
|||
call results_addAttribute('call',trim(commandLine))
|
||||
call results_closeGroup(results_addGroup('cell_to'))
|
||||
call results_addAttribute('description','mappings to place data in space','cell_to')
|
||||
call results_closeJobFile
|
||||
call results_closeGroup(results_addGroup('setup'))
|
||||
call results_addAttribute('description','input data used to run the simulation','setup')
|
||||
else
|
||||
date = now()
|
||||
call results_openJobFile
|
||||
call get_command(commandLine)
|
||||
call results_addAttribute('call (restart at '//date//')',trim(commandLine))
|
||||
call h5gmove_f(resultsFile,'setup','tmp',hdferr)
|
||||
call results_addAttribute('description','input data used to run the simulation up to restart at '//date,'tmp')
|
||||
call results_closeGroup(results_addGroup('setup'))
|
||||
call results_addAttribute('description','input data used to run the simulation','setup')
|
||||
call h5gmove_f(resultsFile,'tmp','setup/previous',hdferr)
|
||||
endif
|
||||
|
||||
call results_closeJobFile
|
||||
|
||||
end subroutine results_init
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief opens the results file to append data
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine results_openJobFile
|
||||
subroutine results_openJobFile(parallel)
|
||||
|
||||
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','a')
|
||||
logical, intent(in), optional :: parallel
|
||||
|
||||
|
||||
resultsFile = HDF5_openFile(getSolverJobName()//'.hdf5','a',parallel)
|
||||
|
||||
end subroutine results_openJobFile
|
||||
|
||||
|
@ -297,6 +318,25 @@ subroutine results_removeLink(link)
|
|||
end subroutine results_removeLink
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Store string dataset.
|
||||
!> @details Not collective, must be called by one process at at time.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine results_writeDataset_str(dataset,group,label,description)
|
||||
|
||||
character(len=*), intent(in) :: label,group,description,dataset
|
||||
|
||||
integer(HID_T) :: groupHandle
|
||||
|
||||
|
||||
groupHandle = results_openGroup(group)
|
||||
call HDF5_write_str(dataset,groupHandle,label)
|
||||
call executionStamp(group//'/'//label,description)
|
||||
call HDF5_closeGroup(groupHandle)
|
||||
|
||||
end subroutine results_writeDataset_str
|
||||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Store real scalar dataset with associated metadata.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
|
Loading…
Reference in New Issue