Merge branch 'MiscImprovements' into 'development'

Misc improvements

See merge request damask/DAMASK!76
This commit is contained in:
Vitesh 2019-05-09 08:38:08 +02:00
commit 0e77be2590
10 changed files with 203 additions and 56 deletions

View File

@ -0,0 +1,83 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os
import numpy as np
import argparse
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = argparse.ArgumentParser()
#ToDo: We need to decide on a way of handling arguments of variable lentght
#https://stackoverflow.com/questions/15459997/passing-integer-lists-to-python
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
parser.add_argument('filenames', nargs='+',
help='DADF5 files')
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory to hold output')
options = parser.parse_args()
options.labels = ['Fe','Fp','xi_sl']
# --- loop over input files ------------------------------------------------------------------------
for filename in options.filenames:
results = damask.DADF5(filename)
if not results.structured: continue
delta = results.size/results.grid*0.5
x, y, z = np.meshgrid(np.linspace(delta[2],results.size[2]-delta[2],results.grid[2]),
np.linspace(delta[1],results.size[1]-delta[1],results.grid[1]),
np.linspace(delta[0],results.size[0]-delta[0],results.grid[0]),
indexing = 'ij')
coords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
for i,inc in enumerate(results.increments):
print('Output step {}/{}'.format(i+1,len(results.increments)))
header = '1 header\n'
data = np.array([inc['inc'] for j in range(np.product(results.grid))]).reshape([np.product(results.grid),1])
header+= 'inc'
data = np.concatenate((data,np.array([j+1 for j in range(np.product(results.grid))]).reshape([np.product(results.grid),1])),1)
header+=' node'
coords = coords.reshape([np.product(results.grid),3])
data = np.concatenate((data,coords),1)
header+=' 1_pos 2_pos 3_pos'
results.active['increments'] = [inc]
for label in options.labels:
for o in results.c_output_types:
results.active['c_output_types'] = [o]
for c in results.constituents:
results.active['constituents'] = [c]
x = results.get_dataset_location(label)
if len(x) == 0:
continue
label = x[0].split('/')[-1]
array = results.read_dataset(x,0)
d = np.product(np.shape(array)[1:])
array = np.reshape(array,[np.product(results.grid),d])
data = np.concatenate((data,array),1)
header+= ''.join([' {}_{}'.format(j+1,label) for j in range(d)])
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
try:
os.mkdir(dirname)
except FileExistsError:
pass
file_out = '{}_inc{:04d}.txt'.format(filename.split('.')[0],inc['inc'])
np.savetxt(os.path.join(dirname,file_out),data,header=header,comments='')

View File

@ -21,6 +21,8 @@ parser = argparse.ArgumentParser()
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID)) #parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
parser.add_argument('filenames', nargs='+', parser.add_argument('filenames', nargs='+',
help='DADF5 files') help='DADF5 files')
parser.add_argument('-d','--dir', dest='dir',default='postProc',metavar='string',
help='name of subdirectory to hold output')
options = parser.parse_args() options = parser.parse_args()
@ -80,12 +82,17 @@ for filename in options.filenames:
if results.structured: if results.structured:
writer = vtk.vtkXMLRectilinearGridWriter() writer = vtk.vtkXMLRectilinearGridWriter()
dirname = os.path.abspath(os.path.join(os.path.dirname(filename),options.dir))
try:
os.mkdir(dirname)
except FileExistsError:
pass
file_out = '{}_inc{:04d}.{}'.format(filename.split('.')[0],inc['inc'],writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib() writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary() writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(os.path.split(filename)[0], writer.SetFileName(os.path.join(dirname,file_out))
os.path.splitext(os.path.split(filename)[1])[0] +
'_inc{:04d}'.format(i) + # ToDo: adjust to length of increments
'.' + writer.GetDefaultFileExtension()))
if results.structured: if results.structured:
writer.SetInputData(rGrid) writer.SetInputData(rGrid)

View File

@ -20,14 +20,14 @@ class DADF5():
with h5py.File(filename,'r') as f: with h5py.File(filename,'r') as f:
if f.attrs['DADF5-major'] != 0 or f.attrs['DADF5-minor'] != 1: if f.attrs['DADF5-major'] != 0 or f.attrs['DADF5-minor'] != 2:
raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version'])) raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
self.structured = 'grid' in f['mapping'].attrs.keys() self.structured = 'grid' in f['geometry'].attrs.keys()
if self.structured: if self.structured:
self.grid = f['mapping'].attrs['grid'] self.grid = f['geometry'].attrs['grid']
self.size = f['mapping'].attrs['size'] self.size = f['geometry'].attrs['size']
r=re.compile('inc[0-9]+') r=re.compile('inc[0-9]+')
self.increments = [{'inc': int(u[3:]), self.increments = [{'inc': int(u[3:]),

View File

@ -47,7 +47,8 @@ module CPFEM
public :: & public :: &
CPFEM_general, & CPFEM_general, &
CPFEM_initAll CPFEM_initAll, &
CPFEM_results
contains contains
@ -633,4 +634,35 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
end subroutine CPFEM_general end subroutine CPFEM_general
!--------------------------------------------------------------------------------------------------
!> @brief triggers writing of the results
!--------------------------------------------------------------------------------------------------
subroutine CPFEM_results(inc,time)
use prec, only: &
pInt
#ifdef DAMASK_HDF5
use results
use HDF5_utilities
#endif
use constitutive, only: &
constitutive_results
use crystallite, only: &
crystallite_results
implicit none
integer(pInt), intent(in) :: inc
real(pReal), intent(in) :: time
#ifdef DAMASK_HDF5
call results_openJobFile
call results_addIncrement(inc,time)
call constitutive_results
call crystallite_results
call results_removeLink('current') ! ToDo: put this into closeJobFile
call results_closeJobFile
#endif
end subroutine CPFEM_results
end module CPFEM end module CPFEM

View File

@ -290,6 +290,7 @@ subroutine CPFEM_age()
end subroutine CPFEM_age end subroutine CPFEM_age
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief triggers writing of the results !> @brief triggers writing of the results
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -45,7 +45,7 @@ contains
!> @brief initializes the solver by interpreting the command line arguments. Also writes !> @brief initializes the solver by interpreting the command line arguments. Also writes
!! information on computation to screen !! information on computation to screen
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine DAMASK_interface_init() subroutine DAMASK_interface_init
use, intrinsic :: & use, intrinsic :: &
iso_fortran_env iso_fortran_env
use, intrinsic :: & use, intrinsic :: &
@ -130,11 +130,11 @@ subroutine DAMASK_interface_init()
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,mpi_err) call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,mpi_err)
if (mpi_err /= 0) call quit(1) if (mpi_err /= 0) call quit(1)
if (threadLevel<MPI_THREAD_FUNNELED) then if (threadLevel<MPI_THREAD_FUNNELED) then
write(6,'(a)') ' MPI library does not support OpenMP' write(6,'(/,a)') ' ERROR: MPI library does not support OpenMP'
call quit(1) call quit(1)
endif endif
#endif #endif
call PETScInitialize(PETSC_NULL_CHARACTER,petsc_err) ! according to PETSc manual, that should be the first line in the code call PETScInitializeNoArguments(petsc_err) ! according to PETSc manual, that should be the first line in the code
CHKERRQ(petsc_err) ! this is a macro definition, it is case sensitive CHKERRQ(petsc_err) ! this is a macro definition, it is case sensitive
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,mpi_err) call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,mpi_err)
@ -144,11 +144,11 @@ subroutine DAMASK_interface_init()
mainProcess: if (worldrank == 0) then mainProcess: if (worldrank == 0) then
if (output_unit /= 6) then if (output_unit /= 6) then
write(output_unit,'(a)') ' STDOUT != 6' write(output_unit,'(/,a)') ' ERROR: STDOUT != 6'
call quit(1) call quit(1)
endif endif
if (error_unit /= 0) then if (error_unit /= 0) then
write(output_unit,'(a)') ' STDERR != 0' write(output_unit,'(/,a)') ' ERROR: STDERR != 0'
call quit(1) call quit(1)
endif endif
else mainProcess else mainProcess
@ -254,14 +254,14 @@ subroutine DAMASK_interface_init()
call get_command_argument(i+1,arg) call get_command_argument(i+1,arg)
read(arg,*,iostat=stat) interface_restartInc read(arg,*,iostat=stat) interface_restartInc
if (interface_restartInc < 0 .or. stat /=0) then if (interface_restartInc < 0 .or. stat /=0) then
write(6,'(a)') ' Could not parse restart increment: '//trim(arg) write(6,'(/,a)') ' ERROR: Could not parse restart increment: '//trim(arg)
call quit(1) call quit(1)
endif endif
end select end select
enddo enddo
if (len_trim(loadcaseArg) == 0 .or. len_trim(geometryArg) == 0) then if (len_trim(loadcaseArg) == 0 .or. len_trim(geometryArg) == 0) then
write(6,'(a)') ' Please specify geometry AND load case (-h for help)' write(6,'(/,a)') ' ERROR: Please specify geometry AND load case (-h for help)'
call quit(1) call quit(1)
endif endif
@ -324,7 +324,7 @@ subroutine setWorkingDirectory(workingDirectoryArg)
workingDirectory = trim(rectifyPath(workingDirectory)) workingDirectory = trim(rectifyPath(workingDirectory))
error = setCWD(trim(workingDirectory)) error = setCWD(trim(workingDirectory))
if(error) then if(error) then
write(6,'(a20,a,a16)') ' Working directory "',trim(workingDirectory),'" does not exist' write(6,'(/,a)') ' ERROR: Working directory "'//trim(workingDirectory)//'" does not exist'
call quit(1) call quit(1)
endif endif
@ -374,7 +374,7 @@ character(len=1024) function getGeometryFile(geometryParameter)
inquire(file=trim(getGeometryFile), exist=file_exists) inquire(file=trim(getGeometryFile), exist=file_exists)
if (.not. file_exists) then if (.not. file_exists) then
write(6,'(a)') ' Geometry file does not exists ('//trim(getGeometryFile)//')' write(6,'(/,a)') ' ERROR: Geometry file does not exists ('//trim(getGeometryFile)//')'
call quit(1) call quit(1)
endif endif
@ -399,7 +399,7 @@ character(len=1024) function getLoadCaseFile(loadCaseParameter)
inquire(file=trim(getLoadCaseFile), exist=file_exists) inquire(file=trim(getLoadCaseFile), exist=file_exists)
if (.not. file_exists) then if (.not. file_exists) then
write(6,'(a)') ' Load case file does not exists ('//trim(getLoadCaseFile)//')' write(6,'(/,a)') ' ERROR: Load case file does not exists ('//trim(getLoadCaseFile)//')'
call quit(1) call quit(1)
endif endif

View File

@ -16,6 +16,7 @@
!> @details Marc subroutines used: !> @details Marc subroutines used:
!> @details - hypela2 !> @details - hypela2
!> @details - plotv !> @details - plotv
!> @details - uedinc
!> @details - flux !> @details - flux
!> @details - quit !> @details - quit
!> @details Marc common blocks included: !> @details Marc common blocks included:
@ -273,26 +274,20 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
outdatedByNewInc = .false. ! no aging of state outdatedByNewInc = .false. ! no aging of state
calcMode = .false. ! pretend last step was collection calcMode = .false. ! pretend last step was collection
lastLovl = lovl ! pretend that this is NOT the first after a lovl change lastLovl = lovl ! pretend that this is NOT the first after a lovl change
!$OMP CRITICAL (write2out) write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> start of analysis..! ',m(1),nn
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> start of analysis..! ',m(1),nn flush(6)
flush(6)
!$OMP END CRITICAL (write2out)
else if (inc - theInc > 1) then ! >> restart of broken analysis << else if (inc - theInc > 1) then ! >> restart of broken analysis <<
lastIncConverged = .false. ! no Jacobian backup lastIncConverged = .false. ! no Jacobian backup
outdatedByNewInc = .false. ! no aging of state outdatedByNewInc = .false. ! no aging of state
calcMode = .true. ! pretend last step was calculation calcMode = .true. ! pretend last step was calculation
!$OMP CRITICAL (write2out) write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> restart of analysis..! ',m(1),nn
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> restart of analysis..! ',m(1),nn flush(6)
flush(6)
!$OMP END CRITICAL (write2out)
else ! >> just the next inc << else ! >> just the next inc <<
lastIncConverged = .true. ! request Jacobian backup lastIncConverged = .true. ! request Jacobian backup
outdatedByNewInc = .true. ! request aging of state outdatedByNewInc = .true. ! request aging of state
calcMode = .true. ! assure last step was calculation calcMode = .true. ! assure last step was calculation
!$OMP CRITICAL (write2out) write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> new increment..! ',m(1),nn
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> new increment..! ',m(1),nn flush(6)
flush(6)
!$OMP END CRITICAL (write2out)
endif endif
else if ( timinc < theDelta ) then ! >> cutBack << else if ( timinc < theDelta ) then ! >> cutBack <<
lastIncConverged = .false. ! no Jacobian backup lastIncConverged = .false. ! no Jacobian backup
@ -300,10 +295,8 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
terminallyIll = .false. terminallyIll = .false.
cycleCounter = -1 ! first calc step increments this to cycle = 0 cycleCounter = -1 ! first calc step increments this to cycle = 0
calcMode = .true. ! pretend last step was calculation calcMode = .true. ! pretend last step was calculation
!$OMP CRITICAL (write2out) write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> cutback detected..! ',m(1),nn
write(6,'(a,i6,1x,i2)') '<< HYPELA2 >> cutback detected..! ',m(1),nn flush(6)
flush(6)
!$OMP END CRITICAL (write2out)
endif ! convergence treatment end endif ! convergence treatment end
@ -365,7 +358,6 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
lastLovl = lovl ! record lovl lastLovl = lovl ! record lovl
call CPFEM_general(computationMode,usePingPong,ffn,ffn1,t(1),timinc,m(1),nn,stress,ddsdde) call CPFEM_general(computationMode,usePingPong,ffn,ffn1,t(1),timinc,m(1),nn,stress,ddsdde)
! Mandel: 11, 22, 33, SQRT(2)*12, SQRT(2)*23, SQRT(2)*13 ! Mandel: 11, 22, 33, SQRT(2)*12, SQRT(2)*23, SQRT(2)*13
! Marc: 11, 22, 33, 12, 23, 13 ! Marc: 11, 22, 33, 12, 23, 13
! Marc: 11, 22, 33, 12 ! Marc: 11, 22, 33, 12
@ -407,6 +399,26 @@ subroutine flux(f,ts,n,time)
end subroutine flux end subroutine flux
!--------------------------------------------------------------------------------------------------
!> @brief sets user defined output variables for Marc
!> @details select a variable contour plotting (user subroutine).
!--------------------------------------------------------------------------------------------------
subroutine uedinc(inc,incsub)
use prec, only: &
pReal, &
pInt
use CPFEM, only: &
CPFEM_results
implicit none
integer, intent(in) :: inc, incsub
#include QUOTE(PASTE(./MarcInclude/creeps,Marc4DAMASK)) ! creeps is needed for timinc (time increment)
call CPFEM_results(inc,cptim)
end subroutine uedinc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief sets user defined output variables for Marc !> @brief sets user defined output variables for Marc
!> @details select a variable contour plotting (user subroutine). !> @details select a variable contour plotting (user subroutine).

View File

@ -77,6 +77,7 @@ program DAMASK_spectral
use grid_mech_FEM use grid_mech_FEM
use grid_damage_spectral use grid_damage_spectral
use grid_thermal_spectral use grid_thermal_spectral
use HDF5_utilities
use results use results
implicit none implicit none
@ -155,8 +156,6 @@ program DAMASK_spectral
write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, 2019' write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, 2019'
write(6,'(a)') ' https://doi.org/10.1007/978-981-10-6855-3_80' write(6,'(a)') ' https://doi.org/10.1007/978-981-10-6855-3_80'
call results_openJobFile()
call results_closeJobFile()
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize field solver information ! initialize field solver information
nActiveFields = 1 nActiveFields = 1
@ -301,7 +300,7 @@ program DAMASK_spectral
reportAndCheck: if (worldrank == 0) then reportAndCheck: if (worldrank == 0) then
write (loadcase_string, '(i6)' ) currentLoadCase write (loadcase_string, '(i6)' ) currentLoadCase
write(6,'(1x,a,i6)') 'load case: ', currentLoadCase write(6,'(/,1x,a,i6)') 'load case: ', currentLoadCase
if (.not. newLoadCase%followFormerTrajectory) write(6,'(2x,a)') 'drop guessing along trajectory' if (.not. newLoadCase%followFormerTrajectory) write(6,'(2x,a)') 'drop guessing along trajectory'
if (newLoadCase%deformation%myType == 'l') then if (newLoadCase%deformation%myType == 'l') then
do j = 1_pInt, 3_pInt do j = 1_pInt, 3_pInt
@ -348,10 +347,10 @@ program DAMASK_spectral
if (newLoadCase%time < 0.0_pReal) errorID = 834_pInt ! negative time increment if (newLoadCase%time < 0.0_pReal) errorID = 834_pInt ! negative time increment
write(6,'(2x,a,f12.6)') 'time: ', newLoadCase%time write(6,'(2x,a,f12.6)') 'time: ', newLoadCase%time
if (newLoadCase%incs < 1_pInt) errorID = 835_pInt ! non-positive incs count if (newLoadCase%incs < 1_pInt) errorID = 835_pInt ! non-positive incs count
write(6,'(2x,a,i5)') 'increments: ', newLoadCase%incs write(6,'(2x,a,i5)') 'increments: ', newLoadCase%incs
if (newLoadCase%outputfrequency < 1_pInt) errorID = 836_pInt ! non-positive result frequency if (newLoadCase%outputfrequency < 1_pInt) errorID = 836_pInt ! non-positive result frequency
write(6,'(2x,a,i5)') 'output frequency: ', newLoadCase%outputfrequency write(6,'(2x,a,i5)') 'output frequency: ', newLoadCase%outputfrequency
write(6,'(2x,a,i5,/)') 'restart frequency: ', newLoadCase%restartfrequency write(6,'(2x,a,i5)') 'restart frequency: ', newLoadCase%restartfrequency
if (errorID > 0_pInt) call IO_error(error_ID = errorID, ext_msg = loadcase_string) ! exit with error message if (errorID > 0_pInt) call IO_error(error_ID = errorID, ext_msg = loadcase_string) ! exit with error message
endif reportAndCheck endif reportAndCheck
loadCases = [loadCases,newLoadCase] ! load case is ok, append it loadCases = [loadCases,newLoadCase] ! load case is ok, append it
@ -359,8 +358,9 @@ program DAMASK_spectral
close(fileUnit) close(fileUnit)
call results_openJobFile call results_openJobFile
call results_addAttribute('grid',grid,'mapping') call HDF5_closeGroup(results_addGroup('geometry'))
call results_addAttribute('size',geomSize,'mapping') call results_addAttribute('grid',grid,'geometry')
call results_addAttribute('size',geomSize,'geometry')
call results_closeJobFile call results_closeJobFile
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -34,13 +34,13 @@ subroutine quit(stop_id)
#endif #endif
call date_and_time(values = dateAndTime) call date_and_time(values = dateAndTime)
write(6,'(/,a)') 'DAMASK terminated on:' write(6,'(/,a)') ' DAMASK terminated on:'
write(6,'(a,2(i2.2,a),i4.4)') 'Date: ',dateAndTime(3),'/',& write(6,'(a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',&
dateAndTime(2),'/',& dateAndTime(2),'/',&
dateAndTime(1) dateAndTime(1)
write(6,'(a,2(i2.2,a),i2.2)') 'Time: ',dateAndTime(5),':',& write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':',&
dateAndTime(6),':',& dateAndTime(6),':',&
dateAndTime(7) dateAndTime(7)
if (stop_id == 0 .and. ierr == 0 .and. error == 0) stop 0 ! normal termination if (stop_id == 0 .and. ierr == 0 .and. error == 0) stop 0 ! normal termination
if (stop_id == 2 .and. ierr == 0 .and. error == 0) stop 2 ! not all incs converged if (stop_id == 2 .and. ierr == 0 .and. error == 0) stop 2 ! not all incs converged

View File

@ -66,9 +66,9 @@ subroutine results_init
write(6,'(a)') ' https://doi.org/10.1007/s40192-018-0118-7' write(6,'(a)') ' https://doi.org/10.1007/s40192-018-0118-7'
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.) resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
call HDF5_addAttribute(resultsFile,'DADF5-version',0.1) call HDF5_addAttribute(resultsFile,'DADF5-version',0.2)
call HDF5_addAttribute(resultsFile,'DADF5-major',0) call HDF5_addAttribute(resultsFile,'DADF5-major',0)
call HDF5_addAttribute(resultsFile,'DADF5-minor',1) call HDF5_addAttribute(resultsFile,'DADF5-minor',2)
call HDF5_addAttribute(resultsFile,'DAMASK',DAMASKVERSION) call HDF5_addAttribute(resultsFile,'DAMASK',DAMASKVERSION)
call get_command(commandLine) call get_command(commandLine)
call HDF5_addAttribute(resultsFile,'call',trim(commandLine)) call HDF5_addAttribute(resultsFile,'call',trim(commandLine))
@ -103,7 +103,7 @@ end subroutine results_closeJobFile
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief closes the results file !> @brief creates the group of increment and adds time as attribute to the file
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine results_addIncrement(inc,time) subroutine results_addIncrement(inc,time)
@ -250,6 +250,8 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
#ifdef PETSc #ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.) call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif #endif
if (HDF5_objectExists(groupHandle,label)) & if (HDF5_objectExists(groupHandle,label)) &
@ -277,6 +279,8 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
#ifdef PETSc #ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.) call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif #endif
if (HDF5_objectExists(groupHandle,label)) & if (HDF5_objectExists(groupHandle,label)) &
@ -305,6 +309,8 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
#ifdef PETSc #ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.) call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif #endif
if (HDF5_objectExists(groupHandle,label)) & if (HDF5_objectExists(groupHandle,label)) &
@ -333,6 +339,8 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
#ifdef PETSc #ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.) call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif #endif
if (HDF5_objectExists(groupHandle,label)) & if (HDF5_objectExists(groupHandle,label)) &
@ -347,7 +355,7 @@ end subroutine results_writeVectorDataset_int
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief stores a vector dataset in a group !> @brief stores a tensor dataset in a group
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit) subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit)
@ -361,6 +369,8 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
#ifdef PETSc #ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.) call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif #endif
if (HDF5_objectExists(groupHandle,label)) & if (HDF5_objectExists(groupHandle,label)) &
@ -375,7 +385,7 @@ end subroutine results_writeTensorDataset_int
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief stores a vector dataset in a group !> @brief stores a scalar dataset in a group
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine results_writeScalarDataset_rotation(group,dataset,label,description,lattice_structure) subroutine results_writeScalarDataset_rotation(group,dataset,label,description,lattice_structure)
use rotations, only: & use rotations, only: &
@ -391,6 +401,8 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l
#ifdef PETSc #ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.) call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif #endif
if (HDF5_objectExists(groupHandle,label)) & if (HDF5_objectExists(groupHandle,label)) &