Merge branch 'misc-improvements' into 'development'
Misc improvements See merge request damask/DAMASK!300
This commit is contained in:
commit
ca71b73799
23
Makefile
23
Makefile
|
@ -2,27 +2,20 @@ SHELL = /bin/sh
|
||||||
########################################################################################
|
########################################################################################
|
||||||
# Makefile for the installation of DAMASK
|
# Makefile for the installation of DAMASK
|
||||||
########################################################################################
|
########################################################################################
|
||||||
DAMASK_ROOT = $(shell python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
|
|
||||||
.PHONY: all
|
.PHONY: all
|
||||||
all: grid mesh processing
|
all: grid mesh processing
|
||||||
|
|
||||||
.PHONY: grid
|
.PHONY: grid
|
||||||
grid: build/grid
|
grid:
|
||||||
@(cd build/grid;make -j${DAMASK_NUM_THREADS} all install;)
|
@cmake -B build/grid -DDAMASK_SOLVER=GRID -DCMAKE_INSTALL_PREFIX=${PWD} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP}
|
||||||
|
@cmake --build build/grid --parallel ${DAMASK_NUM_THRADS}
|
||||||
|
@cmake --install build/grid
|
||||||
|
|
||||||
.PHONY: mesh
|
.PHONY: mesh
|
||||||
mesh: build/mesh
|
mesh:
|
||||||
@(cd build/mesh; make -j${DAMASK_NUM_THREADS} all install;)
|
@cmake -B build/mesh -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${PWD} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP}
|
||||||
|
@cmake --build build/mesh --parallel ${DAMASK_NUM_THRADS}
|
||||||
.PHONY: build/grid
|
@cmake --install build/mesh
|
||||||
build/grid:
|
|
||||||
@mkdir -p build/grid
|
|
||||||
@(cd build/grid; cmake -Wno-dev -DDAMASK_SOLVER=GRID -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
|
|
||||||
|
|
||||||
.PHONY: build/mesh
|
|
||||||
build/mesh:
|
|
||||||
@mkdir -p build/mesh
|
|
||||||
@(cd build/mesh; cmake -Wno-dev -DDAMASK_SOLVER=MESH -DCMAKE_INSTALL_PREFIX=${DAMASK_ROOT} -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
|
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean:
|
||||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit fd99d76d1eaa42fd36971ff2f79a59d98534fc27
|
Subproject commit 08f8aea465a1b5e476b584bcae7927d113919b1d
|
|
@ -57,7 +57,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
ax1.imshow(np.linspace(0,1,self.N).reshape(1,-1),
|
ax1.imshow(np.linspace(0,1,self.N).reshape(1,-1),
|
||||||
aspect='auto', cmap=self, interpolation='nearest')
|
aspect='auto', cmap=self, interpolation='nearest')
|
||||||
plt.show(block = False)
|
plt.show(block = False)
|
||||||
return self.name
|
return 'Colormap: '+self.name
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -7,7 +7,8 @@ import warnings
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import h5py
|
import h5py
|
||||||
from scipy import ndimage,spatial
|
from scipy import ndimage, spatial
|
||||||
|
from vtk.util.numpy_support import vtk_to_numpy as vtk_to_np
|
||||||
|
|
||||||
from . import environment
|
from . import environment
|
||||||
from . import VTK
|
from . import VTK
|
||||||
|
@ -43,12 +44,15 @@ class Grid:
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Basic information on grid definition."""
|
"""Basic information on grid definition."""
|
||||||
|
mat_min = np.nanmin(self.material)
|
||||||
|
mat_max = np.nanmax(self.material)
|
||||||
|
mat_N = self.N_materials
|
||||||
return util.srepr([
|
return util.srepr([
|
||||||
f'cells a b c: {util.srepr(self.cells, " x ")}',
|
f'cells a b c: {util.srepr(self.cells, " x ")}',
|
||||||
f'size x y z: {util.srepr(self.size, " x ")}',
|
f'size x y z: {util.srepr(self.size, " x ")}',
|
||||||
f'origin x y z: {util.srepr(self.origin," ")}',
|
f'origin x y z: {util.srepr(self.origin," ")}',
|
||||||
f'# materials: {self.N_materials}',
|
f'# materials: {mat_N}' + ('' if mat_min == 0 and mat_max+1 == mat_N else
|
||||||
f'max material: {np.nanmax(self.material)}',
|
f' (min: {mat_min}, max: {mat_max})')
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,9 +108,9 @@ class Grid:
|
||||||
@material.setter
|
@material.setter
|
||||||
def material(self,material):
|
def material(self,material):
|
||||||
if len(material.shape) != 3:
|
if len(material.shape) != 3:
|
||||||
raise ValueError(f'Invalid material shape {material.shape}.')
|
raise ValueError(f'invalid material shape {material.shape}')
|
||||||
elif material.dtype not in np.sctypes['float'] + np.sctypes['int']:
|
elif material.dtype not in np.sctypes['float'] + np.sctypes['int']:
|
||||||
raise TypeError(f'Invalid material data type {material.dtype}.')
|
raise TypeError(f'invalid material data type {material.dtype}')
|
||||||
else:
|
else:
|
||||||
self._material = np.copy(material)
|
self._material = np.copy(material)
|
||||||
|
|
||||||
|
@ -123,7 +127,7 @@ class Grid:
|
||||||
@size.setter
|
@size.setter
|
||||||
def size(self,size):
|
def size(self,size):
|
||||||
if len(size) != 3 or any(np.array(size) <= 0):
|
if len(size) != 3 or any(np.array(size) <= 0):
|
||||||
raise ValueError(f'Invalid size {size}.')
|
raise ValueError(f'invalid size {size}')
|
||||||
else:
|
else:
|
||||||
self._size = np.array(size)
|
self._size = np.array(size)
|
||||||
|
|
||||||
|
@ -135,7 +139,7 @@ class Grid:
|
||||||
@origin.setter
|
@origin.setter
|
||||||
def origin(self,origin):
|
def origin(self,origin):
|
||||||
if len(origin) != 3:
|
if len(origin) != 3:
|
||||||
raise ValueError(f'Invalid origin {origin}.')
|
raise ValueError(f'invalid origin {origin}')
|
||||||
else:
|
else:
|
||||||
self._origin = np.array(origin)
|
self._origin = np.array(origin)
|
||||||
|
|
||||||
|
@ -178,6 +182,10 @@ class Grid:
|
||||||
cells = np.array(v.vtk_data.GetDimensions())-1
|
cells = np.array(v.vtk_data.GetDimensions())-1
|
||||||
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
|
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
|
||||||
|
|
||||||
|
for i,c in enumerate([v.vtk_data.GetXCoordinates(),v.vtk_data.GetYCoordinates(),v.vtk_data.GetZCoordinates()]):
|
||||||
|
if not np.allclose(vtk_to_np(c),np.linspace(bbox[0][i],bbox[1][i],cells[i]+1)):
|
||||||
|
raise ValueError('regular grid spacing violated')
|
||||||
|
|
||||||
return Grid(material = v.get('material').reshape(cells,order='F'),
|
return Grid(material = v.get('material').reshape(cells,order='F'),
|
||||||
size = bbox[1] - bbox[0],
|
size = bbox[1] - bbox[0],
|
||||||
origin = bbox[0],
|
origin = bbox[0],
|
||||||
|
@ -211,7 +219,7 @@ class Grid:
|
||||||
except ValueError:
|
except ValueError:
|
||||||
header_length,keyword = (-1, 'invalid')
|
header_length,keyword = (-1, 'invalid')
|
||||||
if not keyword.startswith('head') or header_length < 3:
|
if not keyword.startswith('head') or header_length < 3:
|
||||||
raise TypeError('Header length information missing or invalid')
|
raise TypeError('header length information missing or invalid')
|
||||||
|
|
||||||
comments = []
|
comments = []
|
||||||
content = f.readlines()
|
content = f.readlines()
|
||||||
|
@ -243,7 +251,7 @@ class Grid:
|
||||||
i += len(items)
|
i += len(items)
|
||||||
|
|
||||||
if i != cells.prod():
|
if i != cells.prod():
|
||||||
raise TypeError(f'Invalid file: expected {cells.prod()} entries, found {i}')
|
raise TypeError(f'invalid file: expected {cells.prod()} entries, found {i}')
|
||||||
|
|
||||||
if not np.any(np.mod(material,1) != 0.0): # no float present
|
if not np.any(np.mod(material,1) != 0.0): # no float present
|
||||||
material = material.astype('int') - (1 if material.min() > 0 else 0)
|
material = material.astype('int') - (1 if material.min() > 0 else 0)
|
||||||
|
@ -628,7 +636,7 @@ class Grid:
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
valid = ['x','y','z']
|
||||||
if not set(directions).issubset(valid):
|
if not set(directions).issubset(valid):
|
||||||
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
limits = [None,None] if reflect else [-2,0]
|
limits = [None,None] if reflect else [-2,0]
|
||||||
mat = self.material.copy()
|
mat = self.material.copy()
|
||||||
|
@ -660,7 +668,7 @@ class Grid:
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
valid = ['x','y','z']
|
||||||
if not set(directions).issubset(valid):
|
if not set(directions).issubset(valid):
|
||||||
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
mat = np.flip(self.material, (valid.index(d) for d in directions if d in valid))
|
mat = np.flip(self.material, (valid.index(d) for d in directions if d in valid))
|
||||||
|
|
||||||
|
@ -913,7 +921,7 @@ class Grid:
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
valid = ['x','y','z']
|
||||||
if not set(directions).issubset(valid):
|
if not set(directions).issubset(valid):
|
||||||
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified.')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
|
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
|
||||||
[0, np.prod(self.cells[:2]+1), np.prod(self.cells[:2]+1)+1, 1],
|
[0, np.prod(self.cells[:2]+1), np.prod(self.cells[:2]+1)+1, 1],
|
||||||
|
|
|
@ -784,7 +784,7 @@ class Result:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_IPF_color(q,l):
|
def _add_IPF_color(l,q):
|
||||||
m = util.scale_to_coprime(np.array(l))
|
m = util.scale_to_coprime(np.array(l))
|
||||||
try:
|
try:
|
||||||
lattice = {'fcc':'cF','bcc':'cI','hex':'hP'}[q['meta']['Lattice']]
|
lattice = {'fcc':'cF','bcc':'cI','hex':'hP'}[q['meta']['Lattice']]
|
||||||
|
@ -805,16 +805,17 @@ class Result:
|
||||||
'Creator': 'add_IPF_color'
|
'Creator': 'add_IPF_color'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_IPF_color(self,q,l):
|
def add_IPF_color(self,l,q='O'):
|
||||||
"""
|
"""
|
||||||
Add RGB color tuple of inverse pole figure (IPF) color.
|
Add RGB color tuple of inverse pole figure (IPF) color.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
q : str
|
|
||||||
Label of the dataset containing the crystallographic orientation as quaternions.
|
|
||||||
l : numpy.array of shape (3)
|
l : numpy.array of shape (3)
|
||||||
Lab frame direction for inverse pole figure.
|
Lab frame direction for inverse pole figure.
|
||||||
|
q : str
|
||||||
|
Label of the dataset containing the crystallographic orientation as quaternions.
|
||||||
|
Defaults to 'O'.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._add_generic_pointwise(self._add_IPF_color,{'q':q},{'l':l})
|
self._add_generic_pointwise(self._add_IPF_color,{'q':q},{'l':l})
|
||||||
|
|
|
@ -76,7 +76,8 @@ class VTK:
|
||||||
nodes : numpy.ndarray of shape (:,3)
|
nodes : numpy.ndarray of shape (:,3)
|
||||||
Spatial position of the nodes.
|
Spatial position of the nodes.
|
||||||
connectivity : numpy.ndarray of np.dtype = int
|
connectivity : numpy.ndarray of np.dtype = int
|
||||||
Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell.
|
Cell connectivity (0-based), first dimension determines #Cells,
|
||||||
|
second dimension determines #Nodes/Cell.
|
||||||
cell_type : str
|
cell_type : str
|
||||||
Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON.
|
Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, TETRA, and HEXAHEDRON.
|
||||||
|
|
||||||
|
@ -91,7 +92,9 @@ class VTK:
|
||||||
|
|
||||||
vtk_data = vtk.vtkUnstructuredGrid()
|
vtk_data = vtk.vtkUnstructuredGrid()
|
||||||
vtk_data.SetPoints(vtk_nodes)
|
vtk_data.SetPoints(vtk_nodes)
|
||||||
vtk_data.SetCells(eval(f'vtk.VTK_{cell_type.split("_",1)[-1].upper()}'),cells)
|
cell_types = {'TRIANGLE':vtk.VTK_TRIANGLE, 'QUAD':vtk.VTK_QUAD,
|
||||||
|
'TETRA' :vtk.VTK_TETRA, 'HEXAHEDRON':vtk.VTK_HEXAHEDRON}
|
||||||
|
vtk_data.SetCells(cell_types[cell_type.split("_",1)[-1].upper()],cells)
|
||||||
|
|
||||||
return VTK(vtk_data)
|
return VTK(vtk_data)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import pytest
|
import pytest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from vtk.util.numpy_support import numpy_to_vtk as np_to_vtk
|
||||||
|
|
||||||
from damask import VTK
|
from damask import VTK
|
||||||
from damask import Grid
|
from damask import Grid
|
||||||
|
@ -57,13 +58,21 @@ class TestGrid:
|
||||||
new = Grid.load(tmp_path/'default.vtr')
|
new = Grid.load(tmp_path/'default.vtr')
|
||||||
assert grid_equal(new,default)
|
assert grid_equal(new,default)
|
||||||
|
|
||||||
def test_invalid_vtr(self,tmp_path):
|
def test_invalid_no_material(self,tmp_path):
|
||||||
v = VTK.from_rectilinear_grid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
|
v = VTK.from_rectilinear_grid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
|
||||||
v.save(tmp_path/'no_materialpoint.vtr',parallel=False)
|
v.save(tmp_path/'no_materialpoint.vtr',parallel=False)
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
Grid.load(tmp_path/'no_materialpoint.vtr')
|
Grid.load(tmp_path/'no_materialpoint.vtr')
|
||||||
|
|
||||||
def test_invalid_material(self):
|
def test_invalid_spacing(self,tmp_path,default):
|
||||||
|
default.save(tmp_path/'spacing_ok.vtr')
|
||||||
|
vtk = VTK.load(tmp_path/'spacing_ok.vtr')
|
||||||
|
vtk.vtk_data.SetXCoordinates(np_to_vtk(np.sort(np.random.random(default.cells[0]))))
|
||||||
|
vtk.save(tmp_path/'invalid_spacing.vtr',parallel=False)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Grid.load(tmp_path/'invalid_spacing.vtr')
|
||||||
|
|
||||||
|
def test_invalid_material_type(self):
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
Grid(np.zeros((3,3,3),dtype='complex'),np.ones(3))
|
Grid(np.zeros((3,3,3),dtype='complex'),np.ones(3))
|
||||||
|
|
||||||
|
|
|
@ -169,7 +169,7 @@ class TestResult:
|
||||||
|
|
||||||
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
|
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
|
||||||
def test_add_IPF_color(self,default,d):
|
def test_add_IPF_color(self,default,d):
|
||||||
default.add_IPF_color('O',np.array(d))
|
default.add_IPF_color(d,'O')
|
||||||
loc = {'O': default.get_dataset_location('O'),
|
loc = {'O': default.get_dataset_location('O'),
|
||||||
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
|
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
|
||||||
qu = default.read_dataset(loc['O']).view(np.double).squeeze()
|
qu = default.read_dataset(loc['O']).view(np.double).squeeze()
|
||||||
|
|
|
@ -149,168 +149,6 @@ subroutine discretization_grid_init(restart)
|
||||||
end subroutine discretization_grid_init
|
end subroutine discretization_grid_init
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
!> @brief Parses geometry file
|
|
||||||
!> @details important variables have an implicit "save" attribute. Therefore, this function is
|
|
||||||
! supposed to be called only once!
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
subroutine readGeom(grid,geomSize,origin,material)
|
|
||||||
|
|
||||||
integer, dimension(3), intent(out) :: &
|
|
||||||
grid ! grid (across all processes!)
|
|
||||||
real(pReal), dimension(3), intent(out) :: &
|
|
||||||
geomSize, & ! size (across all processes!)
|
|
||||||
origin ! origin (across all processes!)
|
|
||||||
integer, dimension(:), intent(out), allocatable :: &
|
|
||||||
material
|
|
||||||
|
|
||||||
character(len=:), allocatable :: rawData
|
|
||||||
character(len=65536) :: line
|
|
||||||
integer, allocatable, dimension(:) :: chunkPos
|
|
||||||
integer :: &
|
|
||||||
headerLength = -1, & !< length of header (in lines)
|
|
||||||
fileLength, & !< length of the geom file (in characters)
|
|
||||||
fileUnit, &
|
|
||||||
startPos, endPos, &
|
|
||||||
myStat, &
|
|
||||||
l, & !< line counter
|
|
||||||
c, & !< counter for # materials in line
|
|
||||||
o, & !< order of "to" packing
|
|
||||||
e, & !< "element", i.e. spectral collocation point
|
|
||||||
i, j
|
|
||||||
|
|
||||||
grid = -1
|
|
||||||
geomSize = -1.0_pReal
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! read raw data as stream
|
|
||||||
inquire(file = trim(interface_geomFile), size=fileLength)
|
|
||||||
open(newunit=fileUnit, file=trim(interface_geomFile), access='stream',&
|
|
||||||
status='old', position='rewind', action='read',iostat=myStat)
|
|
||||||
if(myStat /= 0) call IO_error(100,ext_msg=trim(interface_geomFile))
|
|
||||||
allocate(character(len=fileLength)::rawData)
|
|
||||||
read(fileUnit) rawData
|
|
||||||
close(fileUnit)
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! get header length
|
|
||||||
endPos = index(rawData,IO_EOL)
|
|
||||||
if(endPos <= index(rawData,'head')) then ! ToDo: Should be 'header'
|
|
||||||
startPos = len(rawData)
|
|
||||||
call IO_error(error_ID=841, ext_msg='readGeom')
|
|
||||||
else
|
|
||||||
chunkPos = IO_stringPos(rawData(1:endPos))
|
|
||||||
if (chunkPos(1) < 2) call IO_error(error_ID=841, ext_msg='readGeom')
|
|
||||||
headerLength = IO_intValue(rawData(1:endPos),chunkPos,1)
|
|
||||||
startPos = endPos + 1
|
|
||||||
endif
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! read and interpret header
|
|
||||||
origin = 0.0_pReal
|
|
||||||
l = 0
|
|
||||||
do while (l < headerLength .and. startPos < len(rawData))
|
|
||||||
endPos = startPos + index(rawData(startPos:),IO_EOL) - 1
|
|
||||||
if (endPos < startPos) endPos = len(rawData) ! end of file without new line
|
|
||||||
line = rawData(startPos:endPos)
|
|
||||||
startPos = endPos + 1
|
|
||||||
l = l + 1
|
|
||||||
|
|
||||||
chunkPos = IO_stringPos(trim(line))
|
|
||||||
if (chunkPos(1) < 2) cycle ! need at least one keyword value pair
|
|
||||||
|
|
||||||
select case (IO_lc(IO_StringValue(trim(line),chunkPos,1)) )
|
|
||||||
case ('grid')
|
|
||||||
if (chunkPos(1) > 6) then
|
|
||||||
do j = 2,6,2
|
|
||||||
select case (IO_lc(IO_stringValue(line,chunkPos,j)))
|
|
||||||
case('a')
|
|
||||||
grid(1) = IO_intValue(line,chunkPos,j+1)
|
|
||||||
case('b')
|
|
||||||
grid(2) = IO_intValue(line,chunkPos,j+1)
|
|
||||||
case('c')
|
|
||||||
grid(3) = IO_intValue(line,chunkPos,j+1)
|
|
||||||
end select
|
|
||||||
enddo
|
|
||||||
endif
|
|
||||||
|
|
||||||
case ('size')
|
|
||||||
if (chunkPos(1) > 6) then
|
|
||||||
do j = 2,6,2
|
|
||||||
select case (IO_lc(IO_stringValue(line,chunkPos,j)))
|
|
||||||
case('x')
|
|
||||||
geomSize(1) = IO_floatValue(line,chunkPos,j+1)
|
|
||||||
case('y')
|
|
||||||
geomSize(2) = IO_floatValue(line,chunkPos,j+1)
|
|
||||||
case('z')
|
|
||||||
geomSize(3) = IO_floatValue(line,chunkPos,j+1)
|
|
||||||
end select
|
|
||||||
enddo
|
|
||||||
endif
|
|
||||||
|
|
||||||
case ('origin')
|
|
||||||
if (chunkPos(1) > 6) then
|
|
||||||
do j = 2,6,2
|
|
||||||
select case (IO_lc(IO_stringValue(line,chunkPos,j)))
|
|
||||||
case('x')
|
|
||||||
origin(1) = IO_floatValue(line,chunkPos,j+1)
|
|
||||||
case('y')
|
|
||||||
origin(2) = IO_floatValue(line,chunkPos,j+1)
|
|
||||||
case('z')
|
|
||||||
origin(3) = IO_floatValue(line,chunkPos,j+1)
|
|
||||||
end select
|
|
||||||
enddo
|
|
||||||
endif
|
|
||||||
|
|
||||||
end select
|
|
||||||
|
|
||||||
enddo
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! sanity checks
|
|
||||||
if(any(grid < 1)) &
|
|
||||||
call IO_error(error_ID = 842, ext_msg='grid (readGeom)')
|
|
||||||
if(any(geomSize < 0.0_pReal)) &
|
|
||||||
call IO_error(error_ID = 842, ext_msg='size (readGeom)')
|
|
||||||
|
|
||||||
allocate(material(product(grid)), source = -1) ! too large in case of MPI (shrink later, not very elegant)
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! read and interpret content
|
|
||||||
e = 1
|
|
||||||
do while (startPos < len(rawData))
|
|
||||||
endPos = startPos + index(rawData(startPos:),IO_EOL) - 1
|
|
||||||
if (endPos < startPos) endPos = len(rawData) ! end of file without new line
|
|
||||||
line = rawData(startPos:endPos)
|
|
||||||
startPos = endPos + 1
|
|
||||||
l = l + 1
|
|
||||||
chunkPos = IO_stringPos(trim(line))
|
|
||||||
|
|
||||||
noCompression: if (chunkPos(1) /= 3) then
|
|
||||||
c = chunkPos(1)
|
|
||||||
material(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)]
|
|
||||||
else noCompression
|
|
||||||
compression: if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'of') then
|
|
||||||
c = IO_intValue(line,chunkPos,1)
|
|
||||||
material(e:e+c-1) = [(IO_intValue(line,chunkPos,3),i = 1,IO_intValue(line,chunkPos,1))]
|
|
||||||
else if (IO_lc(IO_stringValue(line,chunkPos,2)) == 'to') then compression
|
|
||||||
c = abs(IO_intValue(line,chunkPos,3) - IO_intValue(line,chunkPos,1)) + 1
|
|
||||||
o = merge(+1, -1, IO_intValue(line,chunkPos,3) > IO_intValue(line,chunkPos,1))
|
|
||||||
material(e:e+c-1) = [(i, i = IO_intValue(line,chunkPos,1),IO_intValue(line,chunkPos,3),o)]
|
|
||||||
else compression
|
|
||||||
c = chunkPos(1)
|
|
||||||
material(e:e+c-1) = [(IO_intValue(line,chunkPos,i+1), i=0, c-1)]
|
|
||||||
endif compression
|
|
||||||
endif noCompression
|
|
||||||
|
|
||||||
e = e+c
|
|
||||||
end do
|
|
||||||
|
|
||||||
if (e-1 /= product(grid)) call IO_error(error_ID = 843, el=e)
|
|
||||||
|
|
||||||
end subroutine readGeom
|
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Parse vtk rectilinear grid (.vtr)
|
!> @brief Parse vtk rectilinear grid (.vtr)
|
||||||
!> @details https://vtk.org/Wiki/VTK_XML_Formats
|
!> @details https://vtk.org/Wiki/VTK_XML_Formats
|
||||||
|
@ -566,12 +404,12 @@ subroutine readVTR(grid,geomSize,origin,material)
|
||||||
size_deflated = temp(4:)
|
size_deflated = temp(4:)
|
||||||
bytes_inflated = base64_to_bytes(base64_str(base64_nChar(headerLen)+1_pI64:))
|
bytes_inflated = base64_to_bytes(base64_str(base64_nChar(headerLen)+1_pI64:))
|
||||||
|
|
||||||
allocate(bytes(0))
|
allocate(bytes(sum(size_inflated)))
|
||||||
e = 0_pI64
|
e = 0_pI64
|
||||||
do b = 1, nBlock
|
do b = 1, nBlock
|
||||||
s = e + 1_pI64
|
s = e + 1_pI64
|
||||||
e = s + size_deflated(b) - 1_pI64
|
e = s + size_deflated(b) - 1_pI64
|
||||||
bytes = [bytes,zlib_inflate(bytes_inflated(s:e),size_inflated(b))]
|
bytes(sum(size_inflated(:b-1))+1_pI64:sum(size_inflated(:b))) = zlib_inflate(bytes_inflated(s:e),size_inflated(b))
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
end function asBytes_compressed
|
end function asBytes_compressed
|
||||||
|
|
Loading…
Reference in New Issue