Merge branch 'development' into MiscImprovements
This commit is contained in:
commit
b8bf329a74
|
@ -498,7 +498,7 @@ Processing:
|
||||||
- rm abq_addUserOutput.py marc_addUserOutput.py
|
- rm abq_addUserOutput.py marc_addUserOutput.py
|
||||||
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
|
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
|
||||||
- cd $DAMASKROOT/processing/post
|
- cd $DAMASKROOT/processing/post
|
||||||
- rm marc_to_vtk.py vtk2ang.py
|
- rm marc_to_vtk.py vtk2ang.py DAD*.py
|
||||||
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
|
- $DAMASKROOT/PRIVATE/documenting/scriptHelpToWiki.py --debug *.py
|
||||||
except:
|
except:
|
||||||
- master
|
- master
|
||||||
|
|
|
@ -30,11 +30,20 @@ plasticity phenopowerlaw
|
||||||
(output) resistance_slip
|
(output) resistance_slip
|
||||||
(output) shearrate_slip
|
(output) shearrate_slip
|
||||||
(output) resolvedstress_slip
|
(output) resolvedstress_slip
|
||||||
(output) totalshear
|
(output) accumulatedshear_slip
|
||||||
(output) resistance_twin
|
(output) resistance_twin
|
||||||
(output) shearrate_twin
|
(output) shearrate_twin
|
||||||
(output) resolvedstress_twin
|
(output) resolvedstress_twin
|
||||||
(output) totalvolfrac
|
(output) accumulatedshear_twin
|
||||||
|
|
||||||
|
# only for HDF5 out
|
||||||
|
(output) orientation # quaternion
|
||||||
|
(output) f # deformation gradient tensor; synonyms: "defgrad"
|
||||||
|
(output) fe # elastic deformation gradient tensor
|
||||||
|
(output) fp # plastic deformation gradient tensor
|
||||||
|
(output) p # first Piola-Kichhoff stress tensor; synonyms: "firstpiola", "1stpiola"
|
||||||
|
(output) lp # plastic velocity gradient tensor
|
||||||
|
|
||||||
|
|
||||||
lattice_structure fcc
|
lattice_structure fcc
|
||||||
Nslip 12 # per family
|
Nslip 12 # per family
|
||||||
|
|
|
@ -12,7 +12,10 @@ patch -p1 < installation/patch/nameOfPatch
|
||||||
## Available patches
|
## Available patches
|
||||||
|
|
||||||
* **disable_HDF5** disables all HDF5 output.
|
* **disable_HDF5** disables all HDF5 output.
|
||||||
HDF5 output is an experimental feature. Also, some routines not present in HDF5 1.8.x are remove to allow compilation of DAMASK with HDF5 < 1.10.x
|
HDF5 output is an experimental feature. Also, some routines not present in HDF5 1.8.x are removed to allow compilation of DAMASK with HDF5 < 1.10.x
|
||||||
|
|
||||||
|
* **disable_old_output** disables all non-HDF5 output.
|
||||||
|
Saves some memory when using only HDF5 output
|
||||||
|
|
||||||
## Create patch
|
## Create patch
|
||||||
commit your changes
|
commit your changes
|
||||||
|
|
|
@ -0,0 +1,178 @@
|
||||||
|
From 6dbd904a4cfc28add3c39bb2a4ec9e2dbb2442b6 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Martin Diehl <m.diehl@mpie.de>
|
||||||
|
Date: Thu, 18 Apr 2019 18:25:32 +0200
|
||||||
|
Subject: [PATCH] to create patch
|
||||||
|
|
||||||
|
---
|
||||||
|
src/DAMASK_grid.f90 | 81 +-----------------------------------------
|
||||||
|
src/homogenization.f90 | 2 ++
|
||||||
|
2 files changed, 3 insertions(+), 80 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/src/DAMASK_grid.f90 b/src/DAMASK_grid.f90
|
||||||
|
index f2f52bb2..a7543f4d 100644
|
||||||
|
--- a/src/DAMASK_grid.f90
|
||||||
|
+++ b/src/DAMASK_grid.f90
|
||||||
|
@@ -18,7 +18,6 @@ program DAMASK_spectral
|
||||||
|
use DAMASK_interface, only: &
|
||||||
|
DAMASK_interface_init, &
|
||||||
|
loadCaseFile, &
|
||||||
|
- geometryFile, &
|
||||||
|
getSolverJobName, &
|
||||||
|
interface_restartInc
|
||||||
|
use IO, only: &
|
||||||
|
@@ -49,14 +48,9 @@ program DAMASK_spectral
|
||||||
|
restartInc
|
||||||
|
use numerics, only: &
|
||||||
|
worldrank, &
|
||||||
|
- worldsize, &
|
||||||
|
stagItMax, &
|
||||||
|
maxCutBack, &
|
||||||
|
continueCalculation
|
||||||
|
- use homogenization, only: &
|
||||||
|
- materialpoint_sizeResults, &
|
||||||
|
- materialpoint_results, &
|
||||||
|
- materialpoint_postResults
|
||||||
|
use material, only: &
|
||||||
|
thermal_type, &
|
||||||
|
damage_type, &
|
||||||
|
@@ -131,12 +125,6 @@ program DAMASK_spectral
|
||||||
|
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
|
||||||
|
type(tLoadCase) :: newLoadCase
|
||||||
|
type(tSolutionState), allocatable, dimension(:) :: solres
|
||||||
|
- integer(MPI_OFFSET_KIND) :: fileOffset
|
||||||
|
- integer(MPI_OFFSET_KIND), dimension(:), allocatable :: outputSize
|
||||||
|
- integer(pInt), parameter :: maxByteOut = 2147483647-4096 !< limit of one file output write https://trac.mpich.org/projects/mpich/ticket/1742
|
||||||
|
- integer(pInt), parameter :: maxRealOut = maxByteOut/pReal
|
||||||
|
- integer(pLongInt), dimension(2) :: outputIndex
|
||||||
|
- PetscErrorCode :: ierr
|
||||||
|
procedure(grid_mech_spectral_basic_init), pointer :: &
|
||||||
|
mech_init
|
||||||
|
procedure(grid_mech_spectral_basic_forward), pointer :: &
|
||||||
|
@@ -384,22 +372,6 @@ program DAMASK_spectral
|
||||||
|
! write header of output file
|
||||||
|
if (worldrank == 0) then
|
||||||
|
writeHeader: if (interface_restartInc < 1_pInt) then
|
||||||
|
- open(newunit=fileUnit,file=trim(getSolverJobName())//&
|
||||||
|
- '.spectralOut',form='UNFORMATTED',status='REPLACE')
|
||||||
|
- write(fileUnit) 'load:', trim(loadCaseFile) ! ... and write header
|
||||||
|
- write(fileUnit) 'workingdir:', 'n/a'
|
||||||
|
- write(fileUnit) 'geometry:', trim(geometryFile)
|
||||||
|
- write(fileUnit) 'grid:', grid
|
||||||
|
- write(fileUnit) 'size:', geomSize
|
||||||
|
- write(fileUnit) 'materialpoint_sizeResults:', materialpoint_sizeResults
|
||||||
|
- write(fileUnit) 'loadcases:', size(loadCases)
|
||||||
|
- write(fileUnit) 'frequencies:', loadCases%outputfrequency ! one entry per LoadCase
|
||||||
|
- write(fileUnit) 'times:', loadCases%time ! one entry per LoadCase
|
||||||
|
- write(fileUnit) 'logscales:', loadCases%logscale
|
||||||
|
- write(fileUnit) 'increments:', loadCases%incs ! one entry per LoadCase
|
||||||
|
- write(fileUnit) 'startingIncrement:', restartInc ! start with writing out the previous inc
|
||||||
|
- write(fileUnit) 'eoh'
|
||||||
|
- close(fileUnit) ! end of header
|
||||||
|
open(newunit=statUnit,file=trim(getSolverJobName())//&
|
||||||
|
'.sta',form='FORMATTED',status='REPLACE')
|
||||||
|
write(statUnit,'(a)') 'Increment Time CutbackLevel Converged IterationsNeeded' ! statistics file
|
||||||
|
@@ -412,39 +384,6 @@ program DAMASK_spectral
|
||||||
|
endif writeHeader
|
||||||
|
endif
|
||||||
|
|
||||||
|
-!--------------------------------------------------------------------------------------------------
|
||||||
|
-! prepare MPI parallel out (including opening of file)
|
||||||
|
- allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
|
||||||
|
- outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
|
||||||
|
- call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
|
||||||
|
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_allreduce')
|
||||||
|
- call MPI_file_open(PETSC_COMM_WORLD, trim(getSolverJobName())//'.spectralOut', &
|
||||||
|
- MPI_MODE_WRONLY + MPI_MODE_APPEND, &
|
||||||
|
- MPI_INFO_NULL, &
|
||||||
|
- fileUnit, &
|
||||||
|
- ierr)
|
||||||
|
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_open')
|
||||||
|
- call MPI_file_get_position(fileUnit,fileOffset,ierr) ! get offset from header
|
||||||
|
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_get_position')
|
||||||
|
- fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
|
||||||
|
- call MPI_file_seek (fileUnit,fileOffset,MPI_SEEK_SET,ierr)
|
||||||
|
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_seek')
|
||||||
|
-
|
||||||
|
- writeUndeformed: if (interface_restartInc < 1_pInt) then
|
||||||
|
- write(6,'(1/,a)') ' ... writing initial configuration to file ........................'
|
||||||
|
- call CPFEM_results(0_pInt,0.0_pReal)
|
||||||
|
- do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
|
||||||
|
- outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & ! QUESTION: why not starting i at 0 instead of murky 1?
|
||||||
|
- min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
|
||||||
|
- call MPI_file_write(fileUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
|
||||||
|
- [(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)]), &
|
||||||
|
- int((outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)), &
|
||||||
|
- MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
|
||||||
|
- if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write')
|
||||||
|
- enddo
|
||||||
|
- fileOffset = fileOffset + sum(outputSize) ! forward to current file position
|
||||||
|
- endif writeUndeformed
|
||||||
|
-
|
||||||
|
|
||||||
|
loadCaseLooping: do currentLoadCase = 1_pInt, size(loadCases)
|
||||||
|
time0 = time ! load case start time
|
||||||
|
@@ -574,7 +513,6 @@ program DAMASK_spectral
|
||||||
|
write(6,'(/,a)') ' cutting back '
|
||||||
|
else ! no more options to continue
|
||||||
|
call IO_warning(850_pInt)
|
||||||
|
- call MPI_file_close(fileUnit,ierr)
|
||||||
|
close(statUnit)
|
||||||
|
call quit(-1_pInt*(lastRestartWritten+1_pInt)) ! quit and provide information about last restart inc written
|
||||||
|
endif
|
||||||
|
@@ -593,24 +531,8 @@ program DAMASK_spectral
|
||||||
|
' increment ', totalIncsCounter, ' NOT converged'
|
||||||
|
endif; flush(6)
|
||||||
|
|
||||||
|
- if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0_pInt) then ! at output frequency
|
||||||
|
- write(6,'(1/,a)') ' ... writing results to file ......................................'
|
||||||
|
- flush(6)
|
||||||
|
- call materialpoint_postResults()
|
||||||
|
- call MPI_file_seek (fileUnit,fileOffset,MPI_SEEK_SET,ierr)
|
||||||
|
- if (ierr /= 0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
|
||||||
|
- do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
|
||||||
|
- outputIndex=int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, &
|
||||||
|
- min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
|
||||||
|
- call MPI_file_write(fileUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
|
||||||
|
- [(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)]), &
|
||||||
|
- int((outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)),&
|
||||||
|
- MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
|
||||||
|
- if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
|
||||||
|
- enddo
|
||||||
|
- fileOffset = fileOffset + sum(outputSize) ! forward to current file position
|
||||||
|
+ if (mod(inc,loadCases(currentLoadCase)%outputFrequency) == 0_pInt) & ! at output frequency
|
||||||
|
call CPFEM_results(totalIncsCounter,time)
|
||||||
|
- endif
|
||||||
|
if ( loadCases(currentLoadCase)%restartFrequency > 0_pInt & ! writing of restart info requested ...
|
||||||
|
.and. mod(inc,loadCases(currentLoadCase)%restartFrequency) == 0_pInt) then ! ... and at frequency of writing restart information
|
||||||
|
restartWrite = .true. ! set restart parameter for FEsolving
|
||||||
|
@@ -633,7 +555,6 @@ program DAMASK_spectral
|
||||||
|
real(convergedCounter, pReal)/&
|
||||||
|
real(notConvergedCounter + convergedCounter,pReal)*100.0_pReal, ' %) increments converged!'
|
||||||
|
flush(6)
|
||||||
|
- call MPI_file_close(fileUnit,ierr)
|
||||||
|
close(statUnit)
|
||||||
|
|
||||||
|
if (notConvergedCounter > 0_pInt) call quit(2_pInt) ! error if some are not converged
|
||||||
|
diff --git a/src/homogenization.f90 b/src/homogenization.f90
|
||||||
|
index 06da6ab2..0743d545 100644
|
||||||
|
--- a/src/homogenization.f90
|
||||||
|
+++ b/src/homogenization.f90
|
||||||
|
@@ -269,6 +269,7 @@ subroutine homogenization_init
|
||||||
|
+ homogenization_maxNgrains * (1 + crystallite_maxSizePostResults & ! crystallite size & crystallite results
|
||||||
|
+ 1 + constitutive_plasticity_maxSizePostResults & ! constitutive size & constitutive results
|
||||||
|
+ constitutive_source_maxSizePostResults)
|
||||||
|
+ materialpoint_sizeResults = 0
|
||||||
|
allocate(materialpoint_results(materialpoint_sizeResults,theMesh%elem%nIPs,theMesh%nElems))
|
||||||
|
|
||||||
|
write(6,'(/,a)') ' <<<+- homogenization init -+>>>'
|
||||||
|
@@ -682,6 +683,7 @@ subroutine materialpoint_postResults
|
||||||
|
i, & !< integration point number
|
||||||
|
e !< element number
|
||||||
|
|
||||||
|
+ return
|
||||||
|
!$OMP PARALLEL DO PRIVATE(myNgrains,myCrystallite,thePos,theSize)
|
||||||
|
elementLooping: do e = FEsolving_execElem(1),FEsolving_execElem(2)
|
||||||
|
myNgrains = homogenization_Ngrains(mesh_element(3,e))
|
||||||
|
--
|
||||||
|
2.21.0
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,vtk
|
||||||
|
import numpy as np
|
||||||
|
import argparse
|
||||||
|
import damask
|
||||||
|
from vtk.util import numpy_support
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
|
#ToDo: We need to decide on a way of handling arguments of variable lentght
|
||||||
|
#https://stackoverflow.com/questions/15459997/passing-integer-lists-to-python
|
||||||
|
|
||||||
|
#parser.add_argument('--version', action='version', version='%(prog)s {}'.format(scriptID))
|
||||||
|
parser.add_argument('filenames', nargs='+',
|
||||||
|
help='DADF5 files')
|
||||||
|
|
||||||
|
options = parser.parse_args()
|
||||||
|
|
||||||
|
options.labels = ['Fe','Fp','xi_sl']
|
||||||
|
|
||||||
|
# --- loop over input files ------------------------------------------------------------------------
|
||||||
|
|
||||||
|
for filename in options.filenames:
|
||||||
|
results = damask.DADF5(filename)
|
||||||
|
|
||||||
|
if results.structured: # for grid solvers use rectilinear grid
|
||||||
|
rGrid = vtk.vtkRectilinearGrid()
|
||||||
|
coordArray = [vtk.vtkDoubleArray(),
|
||||||
|
vtk.vtkDoubleArray(),
|
||||||
|
vtk.vtkDoubleArray(),
|
||||||
|
]
|
||||||
|
|
||||||
|
rGrid.SetDimensions(*(results.grid+1))
|
||||||
|
for dim in [0,1,2]:
|
||||||
|
for c in np.linspace(0,results.size[dim],1+results.grid[dim]):
|
||||||
|
coordArray[dim].InsertNextValue(c)
|
||||||
|
|
||||||
|
rGrid.SetXCoordinates(coordArray[0])
|
||||||
|
rGrid.SetYCoordinates(coordArray[1])
|
||||||
|
rGrid.SetZCoordinates(coordArray[2])
|
||||||
|
|
||||||
|
|
||||||
|
for i,inc in enumerate(results.increments):
|
||||||
|
print('Output step {}/{}'.format(i+1,len(results.increments)))
|
||||||
|
vtk_data = []
|
||||||
|
results.active['increments'] = [inc]
|
||||||
|
for label in options.labels:
|
||||||
|
for o in results.c_output_types:
|
||||||
|
results.active['c_output_types'] = [o]
|
||||||
|
if o != 'generic':
|
||||||
|
for c in results.constituents:
|
||||||
|
results.active['constituents'] = [c]
|
||||||
|
x = results.get_dataset_location(label)
|
||||||
|
if len(x) == 0:
|
||||||
|
continue
|
||||||
|
array = results.read_dataset(x,0)
|
||||||
|
shape = [array.shape[0],np.product(array.shape[1:])]
|
||||||
|
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
|
||||||
|
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
|
||||||
|
rGrid.GetCellData().AddArray(vtk_data[-1])
|
||||||
|
else:
|
||||||
|
results.active['constituents'] = results.constituents
|
||||||
|
x = results.get_dataset_location(label)
|
||||||
|
if len(x) == 0:
|
||||||
|
continue
|
||||||
|
array = results.read_dataset(x,0)
|
||||||
|
shape = [array.shape[0],np.product(array.shape[1:])]
|
||||||
|
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True,array_type= vtk.VTK_DOUBLE))
|
||||||
|
vtk_data[-1].SetName('1_'+x[0].split('/')[1]+'/generic/'+label)
|
||||||
|
rGrid.GetCellData().AddArray(vtk_data[-1])
|
||||||
|
|
||||||
|
if results.structured:
|
||||||
|
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||||
|
|
||||||
|
writer.SetCompressorTypeToZLib()
|
||||||
|
writer.SetDataModeToBinary()
|
||||||
|
writer.SetFileName(os.path.join(os.path.split(filename)[0],
|
||||||
|
os.path.splitext(os.path.split(filename)[1])[0] +
|
||||||
|
'_inc{:04d}'.format(i) + # ToDo: adjust to length of increments
|
||||||
|
'.' + writer.GetDefaultFileExtension()))
|
||||||
|
if results.structured:
|
||||||
|
writer.SetInputData(rGrid)
|
||||||
|
|
||||||
|
writer.Write()
|
|
@ -14,6 +14,7 @@ from .asciitable import ASCIItable # noqa
|
||||||
from .config import Material # noqa
|
from .config import Material # noqa
|
||||||
from .colormaps import Colormap, Color # noqa
|
from .colormaps import Colormap, Color # noqa
|
||||||
from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa
|
from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa
|
||||||
|
from .dadf5 import DADF5 # noqa
|
||||||
|
|
||||||
#from .block import Block # only one class
|
#from .block import Block # only one class
|
||||||
from .result import Result # noqa
|
from .result import Result # noqa
|
||||||
|
|
|
@ -0,0 +1,112 @@
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
import h5py
|
||||||
|
import re
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
class DADF5():
|
||||||
|
"""Read and write to DADF5 files"""
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
def __init__(self,
|
||||||
|
filename,
|
||||||
|
mode = 'r',
|
||||||
|
):
|
||||||
|
|
||||||
|
if mode not in ['a','r']:
|
||||||
|
print('Invalid file access mode')
|
||||||
|
with h5py.File(filename,mode):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with h5py.File(filename,'r') as f:
|
||||||
|
|
||||||
|
if f.attrs['DADF5-major'] != 0 or f.attrs['DADF5-minor'] != 1:
|
||||||
|
raise TypeError('Unsupported DADF5 version {} '.format(f.attrs['DADF5-version']))
|
||||||
|
|
||||||
|
self.structured = 'grid' in f['mapping'].attrs.keys()
|
||||||
|
|
||||||
|
if self.structured:
|
||||||
|
self.grid = f['mapping'].attrs['grid']
|
||||||
|
self.size = f['mapping'].attrs['size']
|
||||||
|
|
||||||
|
r=re.compile('inc[0-9]+')
|
||||||
|
self.increments = [{'inc': int(u[3:]),
|
||||||
|
'time': round(f[u].attrs['time/s'],12),
|
||||||
|
} for u in f.keys() if r.match(u)]
|
||||||
|
|
||||||
|
self.constituents = np.unique(f['mapping/cellResults/constituent']['Name']).tolist() # ToDo: I am not to happy with the name
|
||||||
|
self.constituents = [c.decode() for c in self.constituents]
|
||||||
|
|
||||||
|
self.materialpoints = np.unique(f['mapping/cellResults/materialpoint']['Name']).tolist() # ToDo: I am not to happy with the name
|
||||||
|
self.materialpoints = [m.decode() for m in self.materialpoints]
|
||||||
|
|
||||||
|
self.Nconstituents = [i for i in range(np.shape(f['mapping/cellResults/constituent'])[1])]
|
||||||
|
self.Nmaterialpoints = np.shape(f['mapping/cellResults/constituent'])[0]
|
||||||
|
|
||||||
|
self.c_output_types = []
|
||||||
|
for c in self.constituents:
|
||||||
|
for o in f['inc{:05}/constituent/{}'.format(self.increments[0]['inc'],c)].keys():
|
||||||
|
self.c_output_types.append(o)
|
||||||
|
self.c_output_types = list(set(self.c_output_types)) # make unique
|
||||||
|
|
||||||
|
self.active= {'increments': self.increments,
|
||||||
|
'constituents': self.constituents,
|
||||||
|
'materialpoints': self.materialpoints,
|
||||||
|
'constituent': self.Nconstituents,
|
||||||
|
'c_output_types': self.c_output_types}
|
||||||
|
|
||||||
|
self.filename = filename
|
||||||
|
self.mode = mode
|
||||||
|
|
||||||
|
def list_data(self):
|
||||||
|
"""Shows information on all datasets in the file"""
|
||||||
|
with h5py.File(self.filename,'r') as f:
|
||||||
|
group_inc = 'inc{:05}'.format(self.active['increments'][0]['inc'])
|
||||||
|
for c in self.active['constituents']:
|
||||||
|
print('\n'+c)
|
||||||
|
group_constituent = group_inc+'/constituent/'+c
|
||||||
|
for t in self.active['c_output_types']:
|
||||||
|
print(' {}'.format(t))
|
||||||
|
group_output_types = group_constituent+'/'+t
|
||||||
|
try:
|
||||||
|
for x in f[group_output_types].keys():
|
||||||
|
print(' {} ({})'.format(x,f[group_output_types+'/'+x].attrs['Description'].decode()))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_dataset_location(self,label):
|
||||||
|
"""Returns the location of all active datasets with given label"""
|
||||||
|
path = []
|
||||||
|
with h5py.File(self.filename,'r') as f:
|
||||||
|
for i in self.active['increments']:
|
||||||
|
group_inc = 'inc{:05}'.format(i['inc'])
|
||||||
|
for c in self.active['constituents']:
|
||||||
|
group_constituent = group_inc+'/constituent/'+c
|
||||||
|
for t in self.active['c_output_types']:
|
||||||
|
try:
|
||||||
|
f[group_constituent+'/'+t+'/'+label]
|
||||||
|
path.append(group_constituent+'/'+t+'/'+label)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def read_dataset(self,path,c):
|
||||||
|
"""
|
||||||
|
Dataset for all points/cells
|
||||||
|
|
||||||
|
If more than one path is given, the dataset is composed of the individual contributions
|
||||||
|
"""
|
||||||
|
with h5py.File(self.filename,'r') as f:
|
||||||
|
shape = (self.Nmaterialpoints,) + np.shape(f[path[0]])[1:]
|
||||||
|
dataset = np.full(shape,np.nan)
|
||||||
|
for pa in path:
|
||||||
|
label = pa.split('/')[2]
|
||||||
|
p = np.where(f['mapping/cellResults/constituent'][:,c]['Name'] == str.encode(label))[0]
|
||||||
|
u = (f['mapping/cellResults/constituent'][p,c]['Position'])
|
||||||
|
dataset[p,:] = f[pa][u,:]
|
||||||
|
|
||||||
|
return dataset
|
||||||
|
|
||||||
|
|
|
@ -70,6 +70,8 @@ module HDF5_utilities
|
||||||
module procedure HDF5_addAttribute_str
|
module procedure HDF5_addAttribute_str
|
||||||
module procedure HDF5_addAttribute_int
|
module procedure HDF5_addAttribute_int
|
||||||
module procedure HDF5_addAttribute_real
|
module procedure HDF5_addAttribute_real
|
||||||
|
module procedure HDF5_addAttribute_int_array
|
||||||
|
module procedure HDF5_addAttribute_real_array
|
||||||
end interface HDF5_addAttribute
|
end interface HDF5_addAttribute
|
||||||
|
|
||||||
|
|
||||||
|
@ -270,6 +272,7 @@ logical function HDF5_objectExists(loc_id,path)
|
||||||
|
|
||||||
integer(HID_T), intent(in) :: loc_id
|
integer(HID_T), intent(in) :: loc_id
|
||||||
character(len=*), intent(in), optional :: path
|
character(len=*), intent(in), optional :: path
|
||||||
|
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
character(len=256) :: p
|
character(len=256) :: p
|
||||||
|
|
||||||
|
@ -298,6 +301,7 @@ subroutine HDF5_addAttribute_str(loc_id,attrLabel,attrValue,path)
|
||||||
integer(HID_T), intent(in) :: loc_id
|
integer(HID_T), intent(in) :: loc_id
|
||||||
character(len=*), intent(in) :: attrLabel, attrValue
|
character(len=*), intent(in) :: attrLabel, attrValue
|
||||||
character(len=*), intent(in), optional :: path
|
character(len=*), intent(in), optional :: path
|
||||||
|
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
integer(HID_T) :: attr_id, space_id, type_id
|
integer(HID_T) :: attr_id, space_id, type_id
|
||||||
logical :: attrExists
|
logical :: attrExists
|
||||||
|
@ -344,8 +348,9 @@ subroutine HDF5_addAttribute_int(loc_id,attrLabel,attrValue,path)
|
||||||
character(len=*), intent(in) :: attrLabel
|
character(len=*), intent(in) :: attrLabel
|
||||||
integer(pInt), intent(in) :: attrValue
|
integer(pInt), intent(in) :: attrValue
|
||||||
character(len=*), intent(in), optional :: path
|
character(len=*), intent(in), optional :: path
|
||||||
|
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
integer(HID_T) :: attr_id, space_id, type_id
|
integer(HID_T) :: attr_id, space_id
|
||||||
logical :: attrExists
|
logical :: attrExists
|
||||||
character(len=256) :: p
|
character(len=256) :: p
|
||||||
|
|
||||||
|
@ -356,27 +361,21 @@ subroutine HDF5_addAttribute_int(loc_id,attrLabel,attrValue,path)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
|
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5screate_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5screate_f')
|
||||||
call h5tcopy_f(H5T_NATIVE_INTEGER, type_id, hdferr)
|
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5tcopy_f')
|
|
||||||
call h5tset_size_f(type_id, 1_HSIZE_T, hdferr)
|
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5tset_size_f')
|
|
||||||
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5aexists_by_name_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5aexists_by_name_f')
|
||||||
if (attrExists) then
|
if (attrExists) then
|
||||||
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
|
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5adelete_by_name_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5adelete_by_name_f')
|
||||||
endif
|
endif
|
||||||
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr)
|
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_INTEGER,space_id,attr_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5acreate_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5acreate_f')
|
||||||
call h5awrite_f(attr_id, type_id, attrValue, int([1],HSIZE_T), hdferr)
|
call h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attrValue, int([1],HSIZE_T), hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5awrite_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5awrite_f')
|
||||||
call h5aclose_f(attr_id,hdferr)
|
call h5aclose_f(attr_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5aclose_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5tclose_f')
|
||||||
call h5tclose_f(type_id,hdferr)
|
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5tclose_f')
|
|
||||||
call h5sclose_f(space_id,hdferr)
|
call h5sclose_f(space_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pInt: h5sclose_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int: h5sclose_f')
|
||||||
|
|
||||||
end subroutine HDF5_addAttribute_int
|
end subroutine HDF5_addAttribute_int
|
||||||
|
|
||||||
|
@ -390,8 +389,9 @@ subroutine HDF5_addAttribute_real(loc_id,attrLabel,attrValue,path)
|
||||||
character(len=*), intent(in) :: attrLabel
|
character(len=*), intent(in) :: attrLabel
|
||||||
real(pReal), intent(in) :: attrValue
|
real(pReal), intent(in) :: attrValue
|
||||||
character(len=*), intent(in), optional :: path
|
character(len=*), intent(in), optional :: path
|
||||||
|
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
integer(HID_T) :: attr_id, space_id, type_id
|
integer(HID_T) :: attr_id, space_id
|
||||||
logical :: attrExists
|
logical :: attrExists
|
||||||
character(len=256) :: p
|
character(len=256) :: p
|
||||||
|
|
||||||
|
@ -402,31 +402,113 @@ subroutine HDF5_addAttribute_real(loc_id,attrLabel,attrValue,path)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
|
call h5screate_f(H5S_SCALAR_F,space_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5screate_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5screate_f')
|
||||||
call h5tcopy_f(H5T_NATIVE_DOUBLE, type_id, hdferr)
|
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5tcopy_f')
|
|
||||||
call h5tset_size_f(type_id, 8_HSIZE_T, hdferr) ! ToDo
|
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5tset_size_f')
|
|
||||||
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5aexists_by_name_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5aexists_by_name_f')
|
||||||
if (attrExists) then
|
if (attrExists) then
|
||||||
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
|
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5adelete_by_name_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5adelete_by_name_f')
|
||||||
endif
|
endif
|
||||||
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),type_id,space_id,attr_id,hdferr)
|
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_DOUBLE,space_id,attr_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5acreate_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5acreate_f')
|
||||||
call h5awrite_f(attr_id, type_id, attrValue, int([1],HSIZE_T), hdferr)
|
call h5awrite_f(attr_id, H5T_NATIVE_DOUBLE, attrValue, int([1],HSIZE_T), hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5awrite_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5awrite_f')
|
||||||
call h5aclose_f(attr_id,hdferr)
|
call h5aclose_f(attr_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5aclose_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5tclose_f')
|
||||||
call h5tclose_f(type_id,hdferr)
|
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5tclose_f')
|
|
||||||
call h5sclose_f(space_id,hdferr)
|
call h5sclose_f(space_id,hdferr)
|
||||||
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_pReal: h5sclose_f')
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_real: h5sclose_f')
|
||||||
|
|
||||||
end subroutine HDF5_addAttribute_real
|
end subroutine HDF5_addAttribute_real
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief adds a integer attribute to the path given relative to the location
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine HDF5_addAttribute_int_array(loc_id,attrLabel,attrValue,path)
|
||||||
|
|
||||||
|
integer(HID_T), intent(in) :: loc_id
|
||||||
|
character(len=*), intent(in) :: attrLabel
|
||||||
|
integer(pInt), intent(in), dimension(:) :: attrValue
|
||||||
|
character(len=*), intent(in), optional :: path
|
||||||
|
|
||||||
|
integer :: hdferr
|
||||||
|
integer(HID_T) :: attr_id, space_id
|
||||||
|
integer(HSIZE_T),dimension(1) :: array_size
|
||||||
|
logical :: attrExists
|
||||||
|
character(len=256) :: p
|
||||||
|
|
||||||
|
if (present(path)) then
|
||||||
|
p = trim(path)
|
||||||
|
else
|
||||||
|
p = '.'
|
||||||
|
endif
|
||||||
|
|
||||||
|
array_size = size(attrValue,kind=HSIZE_T)
|
||||||
|
|
||||||
|
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
|
||||||
|
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
|
||||||
|
if (attrExists) then
|
||||||
|
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
|
||||||
|
endif
|
||||||
|
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_INTEGER,space_id,attr_id,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
|
||||||
|
call h5awrite_f(attr_id, H5T_NATIVE_INTEGER, attrValue, array_size, hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
|
||||||
|
call h5aclose_f(attr_id,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
|
||||||
|
call h5sclose_f(space_id,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
|
||||||
|
|
||||||
|
end subroutine HDF5_addAttribute_int_array
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief adds a real attribute to the path given relative to the location
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine HDF5_addAttribute_real_array(loc_id,attrLabel,attrValue,path)
|
||||||
|
|
||||||
|
integer(HID_T), intent(in) :: loc_id
|
||||||
|
character(len=*), intent(in) :: attrLabel
|
||||||
|
real(pReal), intent(in), dimension(:) :: attrValue
|
||||||
|
character(len=*), intent(in), optional :: path
|
||||||
|
|
||||||
|
integer :: hdferr
|
||||||
|
integer(HID_T) :: attr_id, space_id
|
||||||
|
integer(HSIZE_T),dimension(1) :: array_size
|
||||||
|
logical :: attrExists
|
||||||
|
character(len=256) :: p
|
||||||
|
|
||||||
|
if (present(path)) then
|
||||||
|
p = trim(path)
|
||||||
|
else
|
||||||
|
p = '.'
|
||||||
|
endif
|
||||||
|
|
||||||
|
array_size = size(attrValue,kind=HSIZE_T)
|
||||||
|
|
||||||
|
call h5screate_simple_f(1, array_size, space_id, hdferr, array_size)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5screate_f')
|
||||||
|
call h5aexists_by_name_f(loc_id,trim(p),attrLabel,attrExists,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5aexists_by_name_f')
|
||||||
|
if (attrExists) then
|
||||||
|
call h5adelete_by_name_f(loc_id, trim(p), attrLabel, hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5adelete_by_name_f')
|
||||||
|
endif
|
||||||
|
call h5acreate_by_name_f(loc_id,trim(p),trim(attrLabel),H5T_NATIVE_DOUBLE,space_id,attr_id,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5acreate_f')
|
||||||
|
call h5awrite_f(attr_id, H5T_NATIVE_DOUBLE, attrValue, array_size, hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5awrite_f')
|
||||||
|
call h5aclose_f(attr_id,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5tclose_f')
|
||||||
|
call h5sclose_f(space_id,hdferr)
|
||||||
|
if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_addAttribute_int_array: h5sclose_f')
|
||||||
|
|
||||||
|
end subroutine HDF5_addAttribute_real_array
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief set link to object in results file
|
!> @brief set link to object in results file
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -1108,13 +1108,11 @@ subroutine constitutive_results
|
||||||
integer :: p
|
integer :: p
|
||||||
character(len=256) :: group
|
character(len=256) :: group
|
||||||
|
|
||||||
call HDF5_closeGroup(results_addGroup('current/constitutive'))
|
|
||||||
|
|
||||||
do p=1,size(config_name_phase)
|
do p=1,size(config_name_phase)
|
||||||
group = trim('current/constitutive')//'/'//trim(config_name_phase(p))
|
group = trim('current/constituent')//'/'//trim(config_name_phase(p))
|
||||||
call HDF5_closeGroup(results_addGroup(group))
|
call HDF5_closeGroup(results_addGroup(group))
|
||||||
|
|
||||||
group = trim(group)//'/'//'plastic'
|
group = trim(group)//'/plastic'
|
||||||
|
|
||||||
call HDF5_closeGroup(results_addGroup(group))
|
call HDF5_closeGroup(results_addGroup(group))
|
||||||
select case(material_phase_plasticity_type(p))
|
select case(material_phase_plasticity_type(p))
|
||||||
|
|
|
@ -1077,7 +1077,7 @@ end function crystallite_postResults
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief writes constitutive results to HDF5 output file
|
!> @brief writes crystallite results to HDF5 output file
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine crystallite_results
|
subroutine crystallite_results
|
||||||
#if defined(PETSc) || defined(DAMASK_HDF5)
|
#if defined(PETSc) || defined(DAMASK_HDF5)
|
||||||
|
@ -1097,11 +1097,11 @@ subroutine crystallite_results
|
||||||
type(rotation), allocatable, dimension(:) :: selected_rotations
|
type(rotation), allocatable, dimension(:) :: selected_rotations
|
||||||
character(len=256) :: group,lattice_label
|
character(len=256) :: group,lattice_label
|
||||||
|
|
||||||
call HDF5_closeGroup(results_addGroup('current/constituent'))
|
|
||||||
|
|
||||||
do p=1,size(config_name_phase)
|
do p=1,size(config_name_phase)
|
||||||
group = trim('current/constituent')//'/'//trim(config_name_phase(p))
|
group = trim('current/constituent')//'/'//trim(config_name_phase(p))//'/generic'
|
||||||
|
|
||||||
call HDF5_closeGroup(results_addGroup(group))
|
call HDF5_closeGroup(results_addGroup(group))
|
||||||
|
|
||||||
do o = 1, size(output_constituent(p)%label)
|
do o = 1, size(output_constituent(p)%label)
|
||||||
select case (output_constituent(p)%label(o))
|
select case (output_constituent(p)%label(o))
|
||||||
case('f')
|
case('f')
|
||||||
|
|
|
@ -358,6 +358,11 @@ program DAMASK_spectral
|
||||||
enddo
|
enddo
|
||||||
close(fileUnit)
|
close(fileUnit)
|
||||||
|
|
||||||
|
call results_openJobFile
|
||||||
|
call results_addAttribute('grid',grid,'mapping')
|
||||||
|
call results_addAttribute('size',geomSize,'mapping')
|
||||||
|
call results_closeJobFile
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! doing initialization depending on active solvers
|
! doing initialization depending on active solvers
|
||||||
call Utilities_init()
|
call Utilities_init()
|
||||||
|
|
|
@ -28,6 +28,17 @@ module results
|
||||||
|
|
||||||
end interface results_writeDataset
|
end interface results_writeDataset
|
||||||
|
|
||||||
|
interface results_addAttribute
|
||||||
|
|
||||||
|
module procedure results_addAttribute_real
|
||||||
|
module procedure results_addAttribute_int
|
||||||
|
module procedure results_addAttribute_str
|
||||||
|
|
||||||
|
module procedure results_addAttribute_int_array
|
||||||
|
module procedure results_addAttribute_real_array
|
||||||
|
|
||||||
|
end interface results_addAttribute
|
||||||
|
|
||||||
public :: &
|
public :: &
|
||||||
results_init, &
|
results_init, &
|
||||||
results_openJobFile, &
|
results_openJobFile, &
|
||||||
|
@ -105,6 +116,9 @@ subroutine results_addIncrement(inc,time)
|
||||||
call results_setLink(trim('inc'//trim(adjustl(incChar))),'current')
|
call results_setLink(trim('inc'//trim(adjustl(incChar))),'current')
|
||||||
call HDF5_addAttribute(resultsFile,'time/s',time,trim('inc'//trim(adjustl(incChar))))
|
call HDF5_addAttribute(resultsFile,'time/s',time,trim('inc'//trim(adjustl(incChar))))
|
||||||
|
|
||||||
|
call HDF5_closeGroup(results_addGroup('current/constituent'))
|
||||||
|
call HDF5_closeGroup(results_addGroup('current/materialpoint'))
|
||||||
|
|
||||||
end subroutine results_addIncrement
|
end subroutine results_addIncrement
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -144,15 +158,67 @@ end subroutine results_setLink
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief adds an attribute to an object
|
!> @brief adds a string attribute to an object in the results file
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine results_addAttribute(attrLabel,attrValue,path)
|
subroutine results_addAttribute_str(attrLabel,attrValue,path)
|
||||||
|
|
||||||
character(len=*), intent(in) :: attrLabel, attrValue, path
|
character(len=*), intent(in) :: attrLabel, attrValue, path
|
||||||
|
|
||||||
call HDF5_addAttribute_str(resultsFile,attrLabel, attrValue, path)
|
call HDF5_addAttribute(resultsFile,attrLabel, attrValue, path)
|
||||||
|
|
||||||
end subroutine results_addAttribute
|
end subroutine results_addAttribute_str
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief adds an integer attribute an object in the results file
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine results_addAttribute_int(attrLabel,attrValue,path)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: attrLabel, path
|
||||||
|
integer, intent(in) :: attrValue
|
||||||
|
|
||||||
|
call HDF5_addAttribute(resultsFile,attrLabel, attrValue, path)
|
||||||
|
|
||||||
|
end subroutine results_addAttribute_int
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief adds a real attribute an object in the results file
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine results_addAttribute_real(attrLabel,attrValue,path)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: attrLabel, path
|
||||||
|
real(pReal), intent(in) :: attrValue
|
||||||
|
|
||||||
|
call HDF5_addAttribute(resultsFile,attrLabel, attrValue, path)
|
||||||
|
|
||||||
|
end subroutine results_addAttribute_real
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief adds an integer array attribute an object in the results file
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine results_addAttribute_int_array(attrLabel,attrValue,path)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: attrLabel, path
|
||||||
|
integer, intent(in), dimension(:) :: attrValue
|
||||||
|
|
||||||
|
call HDF5_addAttribute(resultsFile,attrLabel, attrValue, path)
|
||||||
|
|
||||||
|
end subroutine results_addAttribute_int_array
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief adds a real array attribute an object in the results file
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine results_addAttribute_real_array(attrLabel,attrValue,path)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: attrLabel, path
|
||||||
|
real(pReal), intent(in), dimension(:) :: attrValue
|
||||||
|
|
||||||
|
call HDF5_addAttribute(resultsFile,attrLabel, attrValue, path)
|
||||||
|
|
||||||
|
end subroutine results_addAttribute_real_array
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -190,6 +256,8 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeScalarDataset_real
|
end subroutine results_writeScalarDataset_real
|
||||||
|
@ -215,6 +283,8 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeVectorDataset_real
|
end subroutine results_writeVectorDataset_real
|
||||||
|
@ -241,6 +311,8 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeTensorDataset_real
|
end subroutine results_writeTensorDataset_real
|
||||||
|
@ -267,6 +339,8 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeVectorDataset_int
|
end subroutine results_writeVectorDataset_int
|
||||||
|
@ -293,6 +367,8 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeTensorDataset_int
|
end subroutine results_writeTensorDataset_int
|
||||||
|
@ -321,6 +397,8 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l
|
||||||
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
call HDF5_addAttribute(groupHandle,'Description',description,label)
|
||||||
if (HDF5_objectExists(groupHandle,label) .and. present(lattice_structure)) &
|
if (HDF5_objectExists(groupHandle,label) .and. present(lattice_structure)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label)
|
call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeScalarDataset_rotation
|
end subroutine results_writeScalarDataset_rotation
|
||||||
|
@ -432,7 +510,7 @@ subroutine results_mapping_constituent(phaseAt,memberAt,label)
|
||||||
!---------------------------------------------------------------------------------------------------
|
!---------------------------------------------------------------------------------------------------
|
||||||
! renumber member from my process to all processes
|
! renumber member from my process to all processes
|
||||||
do i = 1, size(label)
|
do i = 1, size(label)
|
||||||
where(phaseAt_perIP == i) memberAt_total = memberAt + sum(memberOffset(i,0:worldrank-1))
|
where(phaseAt_perIP == i) memberAt_total = memberAt + sum(memberOffset(i,0:worldrank-1)) -1 ! convert to 0-based
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -570,7 +648,7 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAt,label)
|
||||||
!---------------------------------------------------------------------------------------------------
|
!---------------------------------------------------------------------------------------------------
|
||||||
! renumber member from my process to all processes
|
! renumber member from my process to all processes
|
||||||
do i = 1, size(label)
|
do i = 1, size(label)
|
||||||
where(homogenizationAt_perIP == i) memberAt_total = memberAt + sum(memberOffset(i,0:worldrank-1))
|
where(homogenizationAt_perIP == i) memberAt_total = memberAt + sum(memberOffset(i,0:worldrank-1)) - 1 ! convert to 0-based
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
|
Loading…
Reference in New Issue