Merge branch 'development' into MiscImprovements
This commit is contained in:
commit
d28df815f0
|
@ -9,20 +9,23 @@ with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f:
|
||||||
# classes
|
# classes
|
||||||
from .environment import Environment # noqa
|
from .environment import Environment # noqa
|
||||||
from .table import Table # noqa
|
from .table import Table # noqa
|
||||||
from .asciitable import ASCIItable # noqa
|
from .ktv import VTK # noqa
|
||||||
|
|
||||||
from .config import Material # noqa
|
|
||||||
from .colormaps import Colormap, Color # noqa
|
from .colormaps import Colormap, Color # noqa
|
||||||
from .rotation import Rotation # noqa
|
from .rotation import Rotation # noqa
|
||||||
from .lattice import Symmetry, Lattice# noqa
|
from .lattice import Symmetry, Lattice# noqa
|
||||||
from .orientation import Orientation # noqa
|
from .orientation import Orientation # noqa
|
||||||
from .result import Result # noqa
|
from .result import Result # noqa
|
||||||
from .result import Result as DADF5 # noqa
|
|
||||||
|
|
||||||
from .geom import Geom # noqa
|
from .geom import Geom # noqa
|
||||||
from .solver import Solver # noqa
|
from .solver import Solver # noqa
|
||||||
from .test import Test # noqa
|
|
||||||
|
# compatibility hack
|
||||||
|
from .result import Result as DADF5 # noqa
|
||||||
|
|
||||||
|
# deprecated
|
||||||
|
from .asciitable import ASCIItable # noqa
|
||||||
from .util import extendableOption # noqa
|
from .util import extendableOption # noqa
|
||||||
|
from .config import Material # noqa
|
||||||
|
from .test import Test # noqa
|
||||||
|
|
||||||
# functions in modules
|
# functions in modules
|
||||||
from . import mechanics # noqa
|
from . import mechanics # noqa
|
||||||
|
|
|
@ -1,24 +1,35 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
class Environment():
|
class Environment():
|
||||||
__slots__ = [ \
|
|
||||||
'options',
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Read and provide values of DAMASK configuration."""
|
"""Read and provide values of DAMASK configuration."""
|
||||||
self.options = {}
|
self.options = self._get_options()
|
||||||
self.__get_options()
|
try:
|
||||||
|
import tkinter
|
||||||
|
tk = tkinter.Tk()
|
||||||
|
self.screen_width = tk.winfo_screenwidth()
|
||||||
|
self.screen_height = tk.winfo_screenheight()
|
||||||
|
except Exception:
|
||||||
|
self.screen_width = 1024
|
||||||
|
self.screen_height = 768
|
||||||
|
|
||||||
def relPath(self,relative = '.'):
|
def relPath(self,relative = '.'):
|
||||||
|
"""Return absolute path from path relative to DAMASK root."""
|
||||||
return os.path.join(self.rootDir(),relative)
|
return os.path.join(self.rootDir(),relative)
|
||||||
|
|
||||||
|
|
||||||
def rootDir(self):
|
def rootDir(self):
|
||||||
|
"""Return DAMASK root path."""
|
||||||
return os.path.normpath(os.path.join(os.path.realpath(__file__),'../../../'))
|
return os.path.normpath(os.path.join(os.path.realpath(__file__),'../../../'))
|
||||||
|
|
||||||
def __get_options(self):
|
|
||||||
|
def _get_options(self):
|
||||||
|
options = {}
|
||||||
for item in ['DAMASK_NUM_THREADS',
|
for item in ['DAMASK_NUM_THREADS',
|
||||||
'MSC_ROOT',
|
'MSC_ROOT',
|
||||||
'MARC_VERSION',
|
'MARC_VERSION',
|
||||||
]:
|
]:
|
||||||
self.options[item] = os.environ[item] if item in os.environ else None
|
options[item] = os.environ[item] if item in os.environ else None
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
import os
|
import sys
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy import ndimage
|
from scipy import ndimage
|
||||||
import vtk
|
|
||||||
from vtk.util import numpy_support
|
|
||||||
|
|
||||||
|
from . import VTK
|
||||||
from . import util
|
from . import util
|
||||||
from . import version
|
|
||||||
|
|
||||||
|
|
||||||
class Geom:
|
class Geom:
|
||||||
|
@ -403,56 +401,13 @@ class Geom:
|
||||||
vtk file to write. If no file is given, a string is returned.
|
vtk file to write. If no file is given, a string is returned.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
grid = self.get_grid() + np.ones(3,dtype=int)
|
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
|
||||||
size = self.get_size()
|
v.add(self.microstructure.flatten(order='F'),'microstructure')
|
||||||
origin = self.get_origin()
|
|
||||||
|
|
||||||
coords = [
|
if fname:
|
||||||
np.linspace(0,size[0],grid[0]) + origin[0],
|
v.write(fname)
|
||||||
np.linspace(0,size[1],grid[1]) + origin[1],
|
|
||||||
np.linspace(0,size[2],grid[2]) + origin[2]
|
|
||||||
]
|
|
||||||
|
|
||||||
rGrid = vtk.vtkRectilinearGrid()
|
|
||||||
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
|
|
||||||
|
|
||||||
rGrid.SetDimensions(*grid)
|
|
||||||
for d,coord in enumerate(coords):
|
|
||||||
for c in coord:
|
|
||||||
coordArray[d].InsertNextValue(c)
|
|
||||||
|
|
||||||
rGrid.SetXCoordinates(coordArray[0])
|
|
||||||
rGrid.SetYCoordinates(coordArray[1])
|
|
||||||
rGrid.SetZCoordinates(coordArray[2])
|
|
||||||
|
|
||||||
ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'),
|
|
||||||
array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
|
|
||||||
ms.SetName('microstructure')
|
|
||||||
rGrid.GetCellData().AddArray(ms)
|
|
||||||
|
|
||||||
|
|
||||||
if not fname:
|
|
||||||
writer = vtk.vtkDataSetWriter()
|
|
||||||
writer.SetHeader('damask.Geom '+version)
|
|
||||||
writer.WriteToOutputStringOn()
|
|
||||||
else:
|
else:
|
||||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
sys.stdout.write(v.__repr__())
|
||||||
writer.SetCompressorTypeToZLib()
|
|
||||||
writer.SetDataModeToBinary()
|
|
||||||
|
|
||||||
ext = os.path.splitext(fname)[1]
|
|
||||||
if ext == '':
|
|
||||||
name = fname + '.' + writer.GetDefaultFileExtension()
|
|
||||||
elif ext[1:] == writer.GetDefaultFileExtension():
|
|
||||||
name = fname
|
|
||||||
else:
|
|
||||||
raise ValueError("unknown extension {}".format(ext))
|
|
||||||
writer.SetFileName(name)
|
|
||||||
|
|
||||||
writer.SetInputData(rGrid)
|
|
||||||
writer.Write()
|
|
||||||
|
|
||||||
if not fname: return writer.GetOutputString()
|
|
||||||
|
|
||||||
|
|
||||||
def show(self):
|
def show(self):
|
||||||
|
|
|
@ -0,0 +1,245 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import numpy as np
|
||||||
|
import vtk
|
||||||
|
from vtk.util.numpy_support import numpy_to_vtk as np_to_vtk
|
||||||
|
|
||||||
|
from . import Table
|
||||||
|
from . import Environment
|
||||||
|
from . import version
|
||||||
|
|
||||||
|
|
||||||
|
class VTK:
|
||||||
|
"""
|
||||||
|
Spatial visualization (and potentially manipulation).
|
||||||
|
|
||||||
|
High-level interface to VTK.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self,geom):
|
||||||
|
"""
|
||||||
|
Set geometry and topology.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
geom : subclass of vtk.vtkDataSet
|
||||||
|
Description of geometry and topology. Valid types are vtk.vtkRectilinearGrid,
|
||||||
|
vtk.vtkUnstructuredGrid, or vtk.vtkPolyData.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.geom = geom
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_rectilinearGrid(grid,size,origin=np.zeros(3)):
|
||||||
|
"""
|
||||||
|
Create VTK of type vtk.vtkRectilinearGrid.
|
||||||
|
|
||||||
|
This is the common type for results from the grid solver.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
grid : numpy.ndarray of shape (3) of np.dtype = int
|
||||||
|
Number of cells.
|
||||||
|
size : numpy.ndarray of shape (3)
|
||||||
|
Physical length.
|
||||||
|
origin : numpy.ndarray of shape (3), optional
|
||||||
|
Spatial origin.
|
||||||
|
|
||||||
|
"""
|
||||||
|
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
|
||||||
|
for dim in [0,1,2]:
|
||||||
|
coords = np.linspace(origin[dim],origin[dim]+size[dim],grid[dim]+1)
|
||||||
|
coordArray[dim].SetArray(np_to_vtk(coords),grid[dim]+1,1)
|
||||||
|
|
||||||
|
geom = vtk.vtkRectilinearGrid()
|
||||||
|
geom.SetDimensions(*(grid+1))
|
||||||
|
geom.SetXCoordinates(coordArray[0])
|
||||||
|
geom.SetYCoordinates(coordArray[1])
|
||||||
|
geom.SetZCoordinates(coordArray[2])
|
||||||
|
|
||||||
|
return VTK(geom)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_unstructuredGrid(nodes,connectivity,cell_type):
|
||||||
|
"""
|
||||||
|
Create VTK of type vtk.vtkUnstructuredGrid.
|
||||||
|
|
||||||
|
This is the common type for results from FEM solvers.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
nodes : numpy.ndarray of shape (:,3)
|
||||||
|
Spatial position of the nodes.
|
||||||
|
connectivity : numpy.ndarray of np.dtype = int
|
||||||
|
Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell.
|
||||||
|
cell_type : str
|
||||||
|
Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, and HEXAHEDRON.
|
||||||
|
|
||||||
|
"""
|
||||||
|
vtk_nodes = vtk.vtkPoints()
|
||||||
|
vtk_nodes.SetData(np_to_vtk(nodes))
|
||||||
|
cells = vtk.vtkCellArray()
|
||||||
|
cells.SetNumberOfCells(connectivity.shape[0])
|
||||||
|
T = np.concatenate((np.ones((connectivity.shape[0],1),dtype=np.int64)*connectivity.shape[1],
|
||||||
|
connectivity),axis=1).ravel()
|
||||||
|
cells.SetCells(connectivity.shape[0],np_to_vtk(T, deep=True, array_type=vtk.VTK_ID_TYPE))
|
||||||
|
|
||||||
|
geom = vtk.vtkUnstructuredGrid()
|
||||||
|
geom.SetPoints(vtk_nodes)
|
||||||
|
geom.SetCells(eval('vtk.VTK_{}'.format(cell_type.split('_',1)[-1].upper())),cells)
|
||||||
|
|
||||||
|
return VTK(geom)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_polyData(points):
|
||||||
|
"""
|
||||||
|
Create VTK of type vtk.polyData.
|
||||||
|
|
||||||
|
This is the common type for point-wise data.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
points : numpy.ndarray of shape (:,3)
|
||||||
|
Spatial position of the points.
|
||||||
|
|
||||||
|
"""
|
||||||
|
vtk_points= vtk.vtkPoints()
|
||||||
|
vtk_points.SetData(np_to_vtk(points))
|
||||||
|
|
||||||
|
geom = vtk.vtkPolyData()
|
||||||
|
geom.SetPoints(vtk_points)
|
||||||
|
|
||||||
|
return VTK(geom)
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_file(fname,dataset_type=None):
|
||||||
|
"""
|
||||||
|
Create VTK from file.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : str
|
||||||
|
Filename for reading. Valid extensions are .vtk, .vtr, .vtu, and .vtp.
|
||||||
|
dataset_type : str, optional
|
||||||
|
Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,
|
||||||
|
vtkUnstructuredGrid, and vtkPolyData.
|
||||||
|
|
||||||
|
"""
|
||||||
|
ext = os.path.splitext(fname)[1]
|
||||||
|
if ext == '.vtk':
|
||||||
|
reader = vtk.vtkGenericDataObjectReader()
|
||||||
|
reader.SetFileName(fname)
|
||||||
|
reader.Update()
|
||||||
|
if 'rectilineargrid' in dataset_type.lower():
|
||||||
|
geom = reader.GetRectilinearGridOutput()
|
||||||
|
elif 'unstructuredgrid' in dataset_type.lower():
|
||||||
|
geom = reader.GetUnstructuredGridOutput()
|
||||||
|
elif 'polydata' in dataset_type.lower():
|
||||||
|
geom = reader.GetPolyDataOutput()
|
||||||
|
else:
|
||||||
|
raise TypeError('Unknown dataset type for vtk file {}'.format(dataset_type))
|
||||||
|
else:
|
||||||
|
if ext == '.vtr':
|
||||||
|
reader = vtk.vtkXMLRectilinearGridReader()
|
||||||
|
elif ext == '.vtu':
|
||||||
|
reader = vtk.vtkXMLUnstructuredGridReader()
|
||||||
|
elif ext == '.vtp':
|
||||||
|
reader = vtk.vtkXMLPolyDataReader()
|
||||||
|
else:
|
||||||
|
raise TypeError('Unknown file extension {}'.format(ext))
|
||||||
|
|
||||||
|
reader.SetFileName(fname)
|
||||||
|
reader.Update()
|
||||||
|
geom = reader.GetOutput()
|
||||||
|
|
||||||
|
return VTK(geom)
|
||||||
|
|
||||||
|
|
||||||
|
# ToDo: If extension is given, check for consistency.
|
||||||
|
def write(self,fname):
|
||||||
|
"""
|
||||||
|
Write to file.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
fname : str
|
||||||
|
Filename for writing.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if (isinstance(self.geom,vtk.vtkRectilinearGrid)):
|
||||||
|
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||||
|
elif(isinstance(self.geom,vtk.vtkUnstructuredGrid)):
|
||||||
|
writer = vtk.vtkXMLUnstructuredGridWriter()
|
||||||
|
elif(isinstance(self.geom,vtk.vtkPolyData)):
|
||||||
|
writer = vtk.vtkXMLPolyDataWriter()
|
||||||
|
|
||||||
|
writer.SetFileName('{}.{}'.format(os.path.splitext(fname)[0],
|
||||||
|
writer.GetDefaultFileExtension()))
|
||||||
|
writer.SetCompressorTypeToZLib()
|
||||||
|
writer.SetDataModeToBinary()
|
||||||
|
writer.SetInputData(self.geom)
|
||||||
|
|
||||||
|
writer.Write()
|
||||||
|
|
||||||
|
|
||||||
|
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
|
||||||
|
# Needs support for pd.DataFrame and/or table
|
||||||
|
def add(self,data,label=None):
|
||||||
|
"""Add data to either cells or points."""
|
||||||
|
N_points = self.geom.GetNumberOfPoints()
|
||||||
|
N_cells = self.geom.GetNumberOfCells()
|
||||||
|
|
||||||
|
if isinstance(data,np.ndarray):
|
||||||
|
d = np_to_vtk(num_array=data.reshape(data.shape[0],-1),deep=True)
|
||||||
|
d.SetName(label)
|
||||||
|
if data.shape[0] == N_cells:
|
||||||
|
self.geom.GetCellData().AddArray(d)
|
||||||
|
elif data.shape[0] == N_points:
|
||||||
|
self.geom.GetPointData().AddArray(d)
|
||||||
|
elif isinstance(data,pd.DataFrame):
|
||||||
|
pass
|
||||||
|
elif isinstance(data,Table):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""ASCII representation of the VTK data."""
|
||||||
|
writer = vtk.vtkDataSetWriter()
|
||||||
|
writer.SetHeader('# DAMASK.VTK v{}'.format(version))
|
||||||
|
writer.WriteToOutputStringOn()
|
||||||
|
writer.SetInputData(self.geom)
|
||||||
|
writer.Write()
|
||||||
|
return writer.GetOutputString()
|
||||||
|
|
||||||
|
|
||||||
|
def show(self):
|
||||||
|
"""
|
||||||
|
Render.
|
||||||
|
|
||||||
|
See http://compilatrix.com/article/vtk-1 for further ideas.
|
||||||
|
"""
|
||||||
|
mapper = vtk.vtkDataSetMapper()
|
||||||
|
mapper.SetInputData(self.geom)
|
||||||
|
actor = vtk.vtkActor()
|
||||||
|
actor.SetMapper(mapper)
|
||||||
|
|
||||||
|
ren = vtk.vtkRenderer()
|
||||||
|
|
||||||
|
renWin = vtk.vtkRenderWindow()
|
||||||
|
renWin.AddRenderer(ren)
|
||||||
|
|
||||||
|
ren.AddActor(actor)
|
||||||
|
ren.SetBackground(0.2,0.2,0.2)
|
||||||
|
|
||||||
|
renWin.SetSize(Environment().screen_width,Environment().screen_height)
|
||||||
|
|
||||||
|
iren = vtk.vtkRenderWindowInteractor()
|
||||||
|
iren.SetRenderWindow(renWin)
|
||||||
|
|
||||||
|
iren.Initialize()
|
||||||
|
renWin.Render()
|
||||||
|
iren.Start()
|
|
@ -4,18 +4,19 @@ import glob
|
||||||
import os
|
import os
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
import vtk
|
|
||||||
from vtk.util import numpy_support
|
|
||||||
import h5py
|
import h5py
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from . import util
|
from . import VTK
|
||||||
from . import version
|
from . import Table
|
||||||
from . import mechanics
|
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
from . import Orientation
|
from . import Orientation
|
||||||
from . import Environment
|
from . import Environment
|
||||||
from . import grid_filters
|
from . import grid_filters
|
||||||
|
from . import mechanics
|
||||||
|
from . import util
|
||||||
|
from . import version
|
||||||
|
|
||||||
|
|
||||||
class Result:
|
class Result:
|
||||||
"""
|
"""
|
||||||
|
@ -26,7 +27,7 @@ class Result:
|
||||||
|
|
||||||
def __init__(self,fname):
|
def __init__(self,fname):
|
||||||
"""
|
"""
|
||||||
Opens an existing DADF5 file.
|
Open an existing DADF5 file.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
@ -75,14 +76,18 @@ class Result:
|
||||||
self.mat_physics = list(set(self.mat_physics)) # make unique
|
self.mat_physics = list(set(self.mat_physics)) # make unique
|
||||||
|
|
||||||
self.selection= {'increments': self.increments,
|
self.selection= {'increments': self.increments,
|
||||||
'constituents': self.constituents,
|
'constituents': self.constituents,'materialpoints': self.materialpoints,
|
||||||
'materialpoints': self.materialpoints,
|
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
|
||||||
'con_physics': self.con_physics,
|
}
|
||||||
'mat_physics': self.mat_physics}
|
|
||||||
|
|
||||||
self.fname = fname
|
self.fname = fname
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Show selected data."""
|
||||||
|
return util.srepr(self.list_data())
|
||||||
|
|
||||||
|
|
||||||
def _manage_selection(self,action,what,datasets):
|
def _manage_selection(self,action,what,datasets):
|
||||||
"""
|
"""
|
||||||
Manages the visibility of the groups.
|
Manages the visibility of the groups.
|
||||||
|
@ -151,12 +156,11 @@ class Result:
|
||||||
selected = []
|
selected = []
|
||||||
for i,time in enumerate(self.times):
|
for i,time in enumerate(self.times):
|
||||||
if start <= time <= end:
|
if start <= time <= end:
|
||||||
selected.append(self.increments[i])
|
selected.append(self.times[i])
|
||||||
return selected
|
return selected
|
||||||
|
|
||||||
|
|
||||||
|
def iterate(self,what):
|
||||||
def iter_selection(self,what):
|
|
||||||
"""
|
"""
|
||||||
Iterate over selection items by setting each one selected.
|
Iterate over selection items by setting each one selected.
|
||||||
|
|
||||||
|
@ -225,6 +229,74 @@ class Result:
|
||||||
"""
|
"""
|
||||||
self._manage_selection('del',what,datasets)
|
self._manage_selection('del',what,datasets)
|
||||||
|
|
||||||
|
# def datamerger(regular expression to filter groups into one copy)
|
||||||
|
|
||||||
|
|
||||||
|
def place(self,datasets,component=0,tagged=False,split=True):
|
||||||
|
"""
|
||||||
|
Distribute datasets onto geometry and return Table or (split) dictionary of Tables.
|
||||||
|
|
||||||
|
Must not mix nodal end cell data.
|
||||||
|
|
||||||
|
Only data within
|
||||||
|
- inc?????/constituent/*_*/*
|
||||||
|
- inc?????/materialpoint/*_*/*
|
||||||
|
- inc?????/geometry/*
|
||||||
|
are considered.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
datasets : iterable or str
|
||||||
|
component : int
|
||||||
|
homogenization component to consider for constituent data
|
||||||
|
tagged : Boolean
|
||||||
|
tag Table.column name with '#component'
|
||||||
|
defaults to False
|
||||||
|
split : Boolean
|
||||||
|
split Table by increment and return dictionary of Tables
|
||||||
|
defaults to True
|
||||||
|
|
||||||
|
"""
|
||||||
|
sets = datasets if hasattr(datasets,'__iter__') and not isinstance(datasets,str) \
|
||||||
|
else [datasets]
|
||||||
|
tag = f'#{component}' if tagged else ''
|
||||||
|
tbl = {} if split else None
|
||||||
|
inGeom = {}
|
||||||
|
inData = {}
|
||||||
|
with h5py.File(self.fname,'r') as f:
|
||||||
|
for dataset in sets:
|
||||||
|
for group in self.groups_with_datasets(dataset):
|
||||||
|
path = os.path.join(group,dataset)
|
||||||
|
inc,prop,name,cat,item = (path.split('/') + ['']*5)[:5]
|
||||||
|
key = '/'.join([prop,name+tag])
|
||||||
|
if key not in inGeom:
|
||||||
|
if prop == 'geometry':
|
||||||
|
inGeom[key] = inData[key] = np.arange(self.Nmaterialpoints)
|
||||||
|
elif prop == 'constituent':
|
||||||
|
inGeom[key] = np.where(f['mapping/cellResults/constituent'][:,component]['Name'] == str.encode(name))[0]
|
||||||
|
inData[key] = f['mapping/cellResults/constituent'][inGeom[key],component]['Position']
|
||||||
|
else:
|
||||||
|
inGeom[key] = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(name))[0]
|
||||||
|
inData[key] = f['mapping/cellResults/materialpoint'][inGeom[key].tolist()]['Position']
|
||||||
|
shape = np.shape(f[path])
|
||||||
|
data = np.full((self.Nmaterialpoints,) + (shape[1:] if len(shape)>1 else (1,)),
|
||||||
|
np.nan,
|
||||||
|
dtype=np.dtype(f[path]))
|
||||||
|
data[inGeom[key]] = (f[path] if len(shape)>1 else np.expand_dims(f[path],1))[inData[key]]
|
||||||
|
path = (os.path.join(*([prop,name]+([cat] if cat else [])+([item] if item else []))) if split else path)+tag
|
||||||
|
if split:
|
||||||
|
try:
|
||||||
|
tbl[inc].add(path,data)
|
||||||
|
except KeyError:
|
||||||
|
tbl[inc] = Table(data.reshape(self.Nmaterialpoints,-1),{path:data.shape[1:]})
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
tbl.add(path,data)
|
||||||
|
except AttributeError:
|
||||||
|
tbl = Table(data.reshape(self.Nmaterialpoints,-1),{path:data.shape[1:]})
|
||||||
|
|
||||||
|
return tbl
|
||||||
|
|
||||||
|
|
||||||
def groups_with_datasets(self,datasets):
|
def groups_with_datasets(self,datasets):
|
||||||
"""
|
"""
|
||||||
|
@ -262,10 +334,10 @@ class Result:
|
||||||
groups = []
|
groups = []
|
||||||
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for i in self.iter_selection('increments'):
|
for i in self.iterate('increments'):
|
||||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for oo in self.iter_selection(o):
|
for oo in self.iterate(o):
|
||||||
for pp in self.iter_selection(p):
|
for pp in self.iterate(p):
|
||||||
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||||
if sets is True:
|
if sets is True:
|
||||||
groups.append(group)
|
groups.append(group)
|
||||||
|
@ -279,12 +351,12 @@ class Result:
|
||||||
"""Return information on all active datasets in the file."""
|
"""Return information on all active datasets in the file."""
|
||||||
message = ''
|
message = ''
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for i in self.iter_selection('increments'):
|
for i in self.iterate('increments'):
|
||||||
message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
|
message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
|
||||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for oo in self.iter_selection(o):
|
for oo in self.iterate(o):
|
||||||
message+=' {}\n'.format(oo)
|
message+=' {}\n'.format(oo)
|
||||||
for pp in self.iter_selection(p):
|
for pp in self.iterate(p):
|
||||||
message+=' {}\n'.format(pp)
|
message+=' {}\n'.format(pp)
|
||||||
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
|
||||||
for d in f[group].keys():
|
for d in f[group].keys():
|
||||||
|
@ -301,7 +373,7 @@ class Result:
|
||||||
"""Return the location of all active datasets with given label."""
|
"""Return the location of all active datasets with given label."""
|
||||||
path = []
|
path = []
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
for i in self.iter_selection('increments'):
|
for i in self.iterate('increments'):
|
||||||
k = '/'.join([i,'geometry',label])
|
k = '/'.join([i,'geometry',label])
|
||||||
try:
|
try:
|
||||||
f[k]
|
f[k]
|
||||||
|
@ -309,8 +381,8 @@ class Result:
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
pass
|
pass
|
||||||
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
for oo in self.iter_selection(o):
|
for oo in self.iterate(o):
|
||||||
for pp in self.iter_selection(p):
|
for pp in self.iterate(p):
|
||||||
k = '/'.join([i,o[:-1],oo,pp,label])
|
k = '/'.join([i,o[:-1],oo,pp,label])
|
||||||
try:
|
try:
|
||||||
f[k]
|
f[k]
|
||||||
|
@ -375,7 +447,7 @@ class Result:
|
||||||
def cell_coordinates(self):
|
def cell_coordinates(self):
|
||||||
"""Return initial coordinates of the cell centers."""
|
"""Return initial coordinates of the cell centers."""
|
||||||
if self.structured:
|
if self.structured:
|
||||||
return grid_filters.cell_coord0(self.grid,self.size,self.origin)
|
return grid_filters.cell_coord0(self.grid,self.size,self.origin).reshape(-1,3)
|
||||||
else:
|
else:
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
return f['geometry/x_c'][()]
|
return f['geometry/x_c'][()]
|
||||||
|
@ -958,143 +1030,71 @@ class Result:
|
||||||
if mode.lower()=='cell':
|
if mode.lower()=='cell':
|
||||||
|
|
||||||
if self.structured:
|
if self.structured:
|
||||||
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
|
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
|
||||||
for dim in [0,1,2]:
|
|
||||||
for c in np.linspace(0,self.size[dim],1+self.grid[dim]):
|
|
||||||
coordArray[dim].InsertNextValue(c)
|
|
||||||
|
|
||||||
vtk_geom = vtk.vtkRectilinearGrid()
|
|
||||||
vtk_geom.SetDimensions(*(self.grid+1))
|
|
||||||
vtk_geom.SetXCoordinates(coordArray[0])
|
|
||||||
vtk_geom.SetYCoordinates(coordArray[1])
|
|
||||||
vtk_geom.SetZCoordinates(coordArray[2])
|
|
||||||
else:
|
else:
|
||||||
nodes = vtk.vtkPoints()
|
|
||||||
with h5py.File(self.fname,'r') as f:
|
with h5py.File(self.fname,'r') as f:
|
||||||
nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True))
|
v = VTK.from_unstructuredGrid(f['/geometry/x_n'][()],
|
||||||
|
f['/geometry/T_c'][()]-1,
|
||||||
vtk_geom = vtk.vtkUnstructuredGrid()
|
f['/geometry/T_c'].attrs['VTK_TYPE'].decode())
|
||||||
vtk_geom.SetPoints(nodes)
|
|
||||||
vtk_geom.Allocate(f['/geometry/T_c'].shape[0])
|
|
||||||
|
|
||||||
if self.version_major == 0 and self.version_minor <= 5:
|
|
||||||
vtk_type = vtk.VTK_HEXAHEDRON
|
|
||||||
n_nodes = 8
|
|
||||||
else:
|
|
||||||
if f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TRIANGLE':
|
|
||||||
vtk_type = vtk.VTK_TRIANGLE
|
|
||||||
n_nodes = 3
|
|
||||||
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'QUAD':
|
|
||||||
vtk_type = vtk.VTK_QUAD
|
|
||||||
n_nodes = 4
|
|
||||||
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TETRA': # not tested
|
|
||||||
vtk_type = vtk.VTK_TETRA
|
|
||||||
n_nodes = 4
|
|
||||||
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'HEXAHEDRON':
|
|
||||||
vtk_type = vtk.VTK_HEXAHEDRON
|
|
||||||
n_nodes = 8
|
|
||||||
|
|
||||||
for i in f['/geometry/T_c']:
|
|
||||||
vtk_geom.InsertNextCell(vtk_type,n_nodes,i-1)
|
|
||||||
|
|
||||||
elif mode.lower()=='point':
|
elif mode.lower()=='point':
|
||||||
Points = vtk.vtkPoints()
|
v = VTK.from_polyData(self.cell_coordinates())
|
||||||
Vertices = vtk.vtkCellArray()
|
|
||||||
for c in self.cell_coordinates():
|
|
||||||
pointID = Points.InsertNextPoint(c)
|
|
||||||
Vertices.InsertNextCell(1)
|
|
||||||
Vertices.InsertCellPoint(pointID)
|
|
||||||
|
|
||||||
vtk_geom = vtk.vtkPolyData()
|
N_digits = int(np.floor(np.log10(min(int(self.increments[-1][3:]),1))))+1
|
||||||
vtk_geom.SetPoints(Points)
|
|
||||||
vtk_geom.SetVerts(Vertices)
|
|
||||||
vtk_geom.Modified()
|
|
||||||
|
|
||||||
N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1
|
for i,inc in enumerate(util.show_progress(self.iterate('increments'),len(self.selection['increments']))):
|
||||||
|
|
||||||
for i,inc in enumerate(self.iter_selection('increments')):
|
|
||||||
vtk_data = []
|
|
||||||
|
|
||||||
materialpoints_backup = self.selection['materialpoints'].copy()
|
materialpoints_backup = self.selection['materialpoints'].copy()
|
||||||
self.pick('materialpoints',False)
|
self.pick('materialpoints',False)
|
||||||
for label in (labels if isinstance(labels,list) else [labels]):
|
for label in (labels if isinstance(labels,list) else [labels]):
|
||||||
for p in self.iter_selection('con_physics'):
|
for p in self.iterate('con_physics'):
|
||||||
if p != 'generic':
|
if p != 'generic':
|
||||||
for c in self.iter_selection('constituents'):
|
for c in self.iterate('constituents'):
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
shape = [array.shape[0],np.product(array.shape[1:])]
|
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
|
||||||
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
|
|
||||||
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
|
|
||||||
vtk_geom.GetCellData().AddArray(vtk_data[-1])
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
shape = [array.shape[0],np.product(array.shape[1:])]
|
|
||||||
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
|
|
||||||
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name
|
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name
|
||||||
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name
|
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name
|
||||||
vtk_data[-1].SetName(dset_name)
|
v.add(array,dset_name)
|
||||||
vtk_geom.GetCellData().AddArray(vtk_data[-1])
|
|
||||||
|
|
||||||
self.pick('materialpoints',materialpoints_backup)
|
self.pick('materialpoints',materialpoints_backup)
|
||||||
|
|
||||||
constituents_backup = self.selection['constituents'].copy()
|
constituents_backup = self.selection['constituents'].copy()
|
||||||
self.pick('constituents',False)
|
self.pick('constituents',False)
|
||||||
for label in (labels if isinstance(labels,list) else [labels]):
|
for label in (labels if isinstance(labels,list) else [labels]):
|
||||||
for p in self.iter_selection('mat_physics'):
|
for p in self.iterate('mat_physics'):
|
||||||
if p != 'generic':
|
if p != 'generic':
|
||||||
for m in self.iter_selection('materialpoints'):
|
for m in self.iterate('materialpoints'):
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
shape = [array.shape[0],np.product(array.shape[1:])]
|
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
|
||||||
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
|
|
||||||
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
|
|
||||||
vtk_geom.GetCellData().AddArray(vtk_data[-1])
|
|
||||||
else:
|
else:
|
||||||
x = self.get_dataset_location(label)
|
x = self.get_dataset_location(label)
|
||||||
if len(x) == 0:
|
if len(x) == 0:
|
||||||
continue
|
continue
|
||||||
array = self.read_dataset(x,0)
|
array = self.read_dataset(x,0)
|
||||||
shape = [array.shape[0],np.product(array.shape[1:])]
|
v.add(array,'1_'+x[0].split('/',1)[1])
|
||||||
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
|
|
||||||
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
|
|
||||||
vtk_geom.GetCellData().AddArray(vtk_data[-1])
|
|
||||||
self.pick('constituents',constituents_backup)
|
self.pick('constituents',constituents_backup)
|
||||||
|
|
||||||
if mode.lower()=='cell':
|
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
|
||||||
writer = vtk.vtkXMLRectilinearGridWriter() if self.structured else \
|
v.add(u,'u')
|
||||||
vtk.vtkXMLUnstructuredGridWriter()
|
|
||||||
x = self.get_dataset_location('u_n')
|
|
||||||
vtk_data.append(numpy_support.numpy_to_vtk(num_array=self.read_dataset(x,0),deep=True))
|
|
||||||
vtk_data[-1].SetName('u')
|
|
||||||
vtk_geom.GetPointData().AddArray(vtk_data[-1])
|
|
||||||
elif mode.lower()=='point':
|
|
||||||
writer = vtk.vtkXMLPolyDataWriter()
|
|
||||||
|
|
||||||
|
file_out = '{}_inc{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
|
||||||
|
inc[3:].zfill(N_digits))
|
||||||
|
|
||||||
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
|
v.write(file_out)
|
||||||
inc[3:].zfill(N_digits),
|
|
||||||
writer.GetDefaultFileExtension())
|
|
||||||
|
|
||||||
writer.SetCompressorTypeToZLib()
|
|
||||||
writer.SetDataModeToBinary()
|
|
||||||
writer.SetFileName(file_out)
|
|
||||||
writer.SetInputData(vtk_geom)
|
|
||||||
|
|
||||||
writer.Write()
|
|
||||||
|
|
||||||
|
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
# BEGIN DEPRECATED
|
# BEGIN DEPRECATED
|
||||||
iter_visible = iter_selection
|
iter_visible = iterate
|
||||||
|
iter_selection = iterate
|
||||||
|
|
||||||
|
|
||||||
def _time_to_inc(self,start,end):
|
def _time_to_inc(self,start,end):
|
||||||
|
|
|
@ -348,4 +348,4 @@ class Table:
|
||||||
f = fname
|
f = fname
|
||||||
|
|
||||||
for line in header + [' '.join(labels)]: f.write(line+'\n')
|
for line in header + [' '.join(labels)]: f.write(line+'\n')
|
||||||
self.data.to_csv(f,sep=' ',index=False,header=False)
|
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)
|
||||||
|
|
Loading…
Reference in New Issue