Merge branch 'development' into MiscImprovements

This commit is contained in:
Martin Diehl 2020-03-19 07:30:03 +01:00
commit d28df815f0
6 changed files with 426 additions and 212 deletions

View File

@ -9,20 +9,23 @@ with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f:
# classes
from .environment import Environment # noqa
from .table import Table # noqa
from .asciitable import ASCIItable # noqa
from .config import Material # noqa
from .ktv import VTK # noqa
from .colormaps import Colormap, Color # noqa
from .rotation import Rotation # noqa
from .lattice import Symmetry, Lattice# noqa
from .orientation import Orientation # noqa
from .result import Result # noqa
from .result import Result as DADF5 # noqa
from .geom import Geom # noqa
from .solver import Solver # noqa
from .test import Test # noqa
# compatibility hack
from .result import Result as DADF5 # noqa
# deprecated
from .asciitable import ASCIItable # noqa
from .util import extendableOption # noqa
from .config import Material # noqa
from .test import Test # noqa
# functions in modules
from . import mechanics # noqa

View File

@ -1,24 +1,35 @@
import os
class Environment():
__slots__ = [ \
'options',
]
def __init__(self):
"""Read and provide values of DAMASK configuration."""
self.options = {}
self.__get_options()
self.options = self._get_options()
try:
import tkinter
tk = tkinter.Tk()
self.screen_width = tk.winfo_screenwidth()
self.screen_height = tk.winfo_screenheight()
except Exception:
self.screen_width = 1024
self.screen_height = 768
def relPath(self,relative = '.'):
"""Return absolute path from path relative to DAMASK root."""
return os.path.join(self.rootDir(),relative)
def rootDir(self):
"""Return DAMASK root path."""
return os.path.normpath(os.path.join(os.path.realpath(__file__),'../../../'))
def __get_options(self):
def _get_options(self):
options = {}
for item in ['DAMASK_NUM_THREADS',
'MSC_ROOT',
'MARC_VERSION',
]:
self.options[item] = os.environ[item] if item in os.environ else None
options[item] = os.environ[item] if item in os.environ else None
return options

View File

@ -1,13 +1,11 @@
import os
import sys
from io import StringIO
import numpy as np
from scipy import ndimage
import vtk
from vtk.util import numpy_support
from . import VTK
from . import util
from . import version
class Geom:
@ -36,7 +34,7 @@ class Geom:
self.set_origin(origin)
self.set_homogenization(homogenization)
self.set_comments(comments)
def __repr__(self):
"""Basic information on geometry definition."""
@ -71,7 +69,7 @@ class Geom:
origin_old = self.get_origin()
unique_old = len(np.unique(self.microstructure))
max_old = np.nanmax(self.microstructure)
if size is not None and rescale:
raise ValueError('Either set size explicitly or rescale automatically')
@ -109,7 +107,7 @@ class Geom:
if max_old != np.nanmax(self.microstructure):
message[-1] = util.delete(message[-1])
message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure))))
return util.return_message(message)
@ -125,7 +123,7 @@ class Geom:
"""
self.comments = []
self.add_comments(comments)
def add_comments(self,comments):
"""
@ -261,7 +259,7 @@ class Geom:
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
header.append('homogenization {}'.format(self.get_homogenization()))
return header
@staticmethod
def from_file(fname):
@ -287,7 +285,7 @@ class Geom:
if not keyword.startswith('head') or header_length < 3:
raise TypeError('Header length information missing or invalid')
comments = []
comments = []
for i,line in enumerate(content[:header_length]):
items = line.lower().strip().split()
key = items[0] if items else ''
@ -316,14 +314,14 @@ class Geom:
else: items = list(map(float,items))
microstructure[i:i+len(items)] = items
i += len(items)
if i != grid.prod():
raise TypeError('Invalid file: expected {} entries, found {}'.format(grid.prod(),i))
microstructure = microstructure.reshape(grid,order='F')
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
microstructure = microstructure.astype('int')
return Geom(microstructure.reshape(grid),size,origin,homogenization,comments)
@ -341,7 +339,7 @@ class Geom:
"""
header = self.get_header()
grid = self.get_grid()
if pack is None:
plain = grid.prod()/np.unique(self.microstructure).size < 250
else:
@ -392,7 +390,7 @@ class Geom:
elif compressType == 'of':
f.write('{} of {}\n'.format(reps,former))
def to_vtk(self,fname=None):
"""
Generates vtk file.
@ -403,58 +401,15 @@ class Geom:
vtk file to write. If no file is given, a string is returned.
"""
grid = self.get_grid() + np.ones(3,dtype=int)
size = self.get_size()
origin = self.get_origin()
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
v.add(self.microstructure.flatten(order='F'),'microstructure')
coords = [
np.linspace(0,size[0],grid[0]) + origin[0],
np.linspace(0,size[1],grid[1]) + origin[1],
np.linspace(0,size[2],grid[2]) + origin[2]
]
rGrid = vtk.vtkRectilinearGrid()
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
rGrid.SetDimensions(*grid)
for d,coord in enumerate(coords):
for c in coord:
coordArray[d].InsertNextValue(c)
rGrid.SetXCoordinates(coordArray[0])
rGrid.SetYCoordinates(coordArray[1])
rGrid.SetZCoordinates(coordArray[2])
ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'),
array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
ms.SetName('microstructure')
rGrid.GetCellData().AddArray(ms)
if not fname:
writer = vtk.vtkDataSetWriter()
writer.SetHeader('damask.Geom '+version)
writer.WriteToOutputStringOn()
if fname:
v.write(fname)
else:
writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
ext = os.path.splitext(fname)[1]
if ext == '':
name = fname + '.' + writer.GetDefaultFileExtension()
elif ext[1:] == writer.GetDefaultFileExtension():
name = fname
else:
raise ValueError("unknown extension {}".format(ext))
writer.SetFileName(name)
writer.SetInputData(rGrid)
writer.Write()
if not fname: return writer.GetOutputString()
sys.stdout.write(v.__repr__())
def show(self):
"""Show raw content (as in file)."""
f=StringIO()
@ -490,7 +445,7 @@ class Geom:
ms = np.concatenate([ms,ms[:,limits[0]:limits[1]:-1,:]],1)
if 'x' in directions:
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
#self.add_comments('geom.py:mirror v{}'.format(version)
return self.update(ms,rescale=True)

245
python/damask/ktv.py Normal file
View File

@ -0,0 +1,245 @@
import os
import pandas as pd
import numpy as np
import vtk
from vtk.util.numpy_support import numpy_to_vtk as np_to_vtk
from . import Table
from . import Environment
from . import version
class VTK:
"""
Spatial visualization (and potentially manipulation).
High-level interface to VTK.
"""
def __init__(self,geom):
"""
Set geometry and topology.
Parameters
----------
geom : subclass of vtk.vtkDataSet
Description of geometry and topology. Valid types are vtk.vtkRectilinearGrid,
vtk.vtkUnstructuredGrid, or vtk.vtkPolyData.
"""
self.geom = geom
@staticmethod
def from_rectilinearGrid(grid,size,origin=np.zeros(3)):
"""
Create VTK of type vtk.vtkRectilinearGrid.
This is the common type for results from the grid solver.
Parameters
----------
grid : numpy.ndarray of shape (3) of np.dtype = int
Number of cells.
size : numpy.ndarray of shape (3)
Physical length.
origin : numpy.ndarray of shape (3), optional
Spatial origin.
"""
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
for dim in [0,1,2]:
coords = np.linspace(origin[dim],origin[dim]+size[dim],grid[dim]+1)
coordArray[dim].SetArray(np_to_vtk(coords),grid[dim]+1,1)
geom = vtk.vtkRectilinearGrid()
geom.SetDimensions(*(grid+1))
geom.SetXCoordinates(coordArray[0])
geom.SetYCoordinates(coordArray[1])
geom.SetZCoordinates(coordArray[2])
return VTK(geom)
@staticmethod
def from_unstructuredGrid(nodes,connectivity,cell_type):
"""
Create VTK of type vtk.vtkUnstructuredGrid.
This is the common type for results from FEM solvers.
Parameters
----------
nodes : numpy.ndarray of shape (:,3)
Spatial position of the nodes.
connectivity : numpy.ndarray of np.dtype = int
Cell connectivity (0-based), first dimension determines #Cells, second dimension determines #Nodes/Cell.
cell_type : str
Name of the vtk.vtkCell subclass. Tested for TRIANGLE, QUAD, and HEXAHEDRON.
"""
vtk_nodes = vtk.vtkPoints()
vtk_nodes.SetData(np_to_vtk(nodes))
cells = vtk.vtkCellArray()
cells.SetNumberOfCells(connectivity.shape[0])
T = np.concatenate((np.ones((connectivity.shape[0],1),dtype=np.int64)*connectivity.shape[1],
connectivity),axis=1).ravel()
cells.SetCells(connectivity.shape[0],np_to_vtk(T, deep=True, array_type=vtk.VTK_ID_TYPE))
geom = vtk.vtkUnstructuredGrid()
geom.SetPoints(vtk_nodes)
geom.SetCells(eval('vtk.VTK_{}'.format(cell_type.split('_',1)[-1].upper())),cells)
return VTK(geom)
@staticmethod
def from_polyData(points):
"""
Create VTK of type vtk.polyData.
This is the common type for point-wise data.
Parameters
----------
points : numpy.ndarray of shape (:,3)
Spatial position of the points.
"""
vtk_points= vtk.vtkPoints()
vtk_points.SetData(np_to_vtk(points))
geom = vtk.vtkPolyData()
geom.SetPoints(vtk_points)
return VTK(geom)
@staticmethod
def from_file(fname,dataset_type=None):
"""
Create VTK from file.
Parameters
----------
fname : str
Filename for reading. Valid extensions are .vtk, .vtr, .vtu, and .vtp.
dataset_type : str, optional
Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,
vtkUnstructuredGrid, and vtkPolyData.
"""
ext = os.path.splitext(fname)[1]
if ext == '.vtk':
reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(fname)
reader.Update()
if 'rectilineargrid' in dataset_type.lower():
geom = reader.GetRectilinearGridOutput()
elif 'unstructuredgrid' in dataset_type.lower():
geom = reader.GetUnstructuredGridOutput()
elif 'polydata' in dataset_type.lower():
geom = reader.GetPolyDataOutput()
else:
raise TypeError('Unknown dataset type for vtk file {}'.format(dataset_type))
else:
if ext == '.vtr':
reader = vtk.vtkXMLRectilinearGridReader()
elif ext == '.vtu':
reader = vtk.vtkXMLUnstructuredGridReader()
elif ext == '.vtp':
reader = vtk.vtkXMLPolyDataReader()
else:
raise TypeError('Unknown file extension {}'.format(ext))
reader.SetFileName(fname)
reader.Update()
geom = reader.GetOutput()
return VTK(geom)
# ToDo: If extension is given, check for consistency.
def write(self,fname):
"""
Write to file.
Parameters
----------
fname : str
Filename for writing.
"""
if (isinstance(self.geom,vtk.vtkRectilinearGrid)):
writer = vtk.vtkXMLRectilinearGridWriter()
elif(isinstance(self.geom,vtk.vtkUnstructuredGrid)):
writer = vtk.vtkXMLUnstructuredGridWriter()
elif(isinstance(self.geom,vtk.vtkPolyData)):
writer = vtk.vtkXMLPolyDataWriter()
writer.SetFileName('{}.{}'.format(os.path.splitext(fname)[0],
writer.GetDefaultFileExtension()))
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetInputData(self.geom)
writer.Write()
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
# Needs support for pd.DataFrame and/or table
def add(self,data,label=None):
"""Add data to either cells or points."""
N_points = self.geom.GetNumberOfPoints()
N_cells = self.geom.GetNumberOfCells()
if isinstance(data,np.ndarray):
d = np_to_vtk(num_array=data.reshape(data.shape[0],-1),deep=True)
d.SetName(label)
if data.shape[0] == N_cells:
self.geom.GetCellData().AddArray(d)
elif data.shape[0] == N_points:
self.geom.GetPointData().AddArray(d)
elif isinstance(data,pd.DataFrame):
pass
elif isinstance(data,Table):
pass
def __repr__(self):
"""ASCII representation of the VTK data."""
writer = vtk.vtkDataSetWriter()
writer.SetHeader('# DAMASK.VTK v{}'.format(version))
writer.WriteToOutputStringOn()
writer.SetInputData(self.geom)
writer.Write()
return writer.GetOutputString()
def show(self):
"""
Render.
See http://compilatrix.com/article/vtk-1 for further ideas.
"""
mapper = vtk.vtkDataSetMapper()
mapper.SetInputData(self.geom)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
ren.AddActor(actor)
ren.SetBackground(0.2,0.2,0.2)
renWin.SetSize(Environment().screen_width,Environment().screen_height)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
iren.Initialize()
renWin.Render()
iren.Start()

View File

@ -4,18 +4,19 @@ import glob
import os
from functools import partial
import vtk
from vtk.util import numpy_support
import h5py
import numpy as np
from . import util
from . import version
from . import mechanics
from . import VTK
from . import Table
from . import Rotation
from . import Orientation
from . import Environment
from . import grid_filters
from . import mechanics
from . import util
from . import version
class Result:
"""
@ -26,12 +27,12 @@ class Result:
def __init__(self,fname):
"""
Opens an existing DADF5 file.
Open an existing DADF5 file.
Parameters
----------
fname : str
name of the DADF5 file to be openend.
name of the DADF5 file to be openend.
"""
with h5py.File(fname,'r') as f:
@ -75,14 +76,18 @@ class Result:
self.mat_physics = list(set(self.mat_physics)) # make unique
self.selection= {'increments': self.increments,
'constituents': self.constituents,
'materialpoints': self.materialpoints,
'con_physics': self.con_physics,
'mat_physics': self.mat_physics}
'constituents': self.constituents,'materialpoints': self.materialpoints,
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
}
self.fname = fname
def __repr__(self):
"""Show selected data."""
return util.srepr(self.list_data())
def _manage_selection(self,action,what,datasets):
"""
Manages the visibility of the groups.
@ -151,12 +156,11 @@ class Result:
selected = []
for i,time in enumerate(self.times):
if start <= time <= end:
selected.append(self.increments[i])
selected.append(self.times[i])
return selected
def iter_selection(self,what):
def iterate(self,what):
"""
Iterate over selection items by setting each one selected.
@ -225,6 +229,74 @@ class Result:
"""
self._manage_selection('del',what,datasets)
# def datamerger(regular expression to filter groups into one copy)
def place(self,datasets,component=0,tagged=False,split=True):
"""
Distribute datasets onto geometry and return Table or (split) dictionary of Tables.
Must not mix nodal end cell data.
Only data within
- inc?????/constituent/*_*/*
- inc?????/materialpoint/*_*/*
- inc?????/geometry/*
are considered.
Parameters
----------
datasets : iterable or str
component : int
homogenization component to consider for constituent data
tagged : Boolean
tag Table.column name with '#component'
defaults to False
split : Boolean
split Table by increment and return dictionary of Tables
defaults to True
"""
sets = datasets if hasattr(datasets,'__iter__') and not isinstance(datasets,str) \
else [datasets]
tag = f'#{component}' if tagged else ''
tbl = {} if split else None
inGeom = {}
inData = {}
with h5py.File(self.fname,'r') as f:
for dataset in sets:
for group in self.groups_with_datasets(dataset):
path = os.path.join(group,dataset)
inc,prop,name,cat,item = (path.split('/') + ['']*5)[:5]
key = '/'.join([prop,name+tag])
if key not in inGeom:
if prop == 'geometry':
inGeom[key] = inData[key] = np.arange(self.Nmaterialpoints)
elif prop == 'constituent':
inGeom[key] = np.where(f['mapping/cellResults/constituent'][:,component]['Name'] == str.encode(name))[0]
inData[key] = f['mapping/cellResults/constituent'][inGeom[key],component]['Position']
else:
inGeom[key] = np.where(f['mapping/cellResults/materialpoint']['Name'] == str.encode(name))[0]
inData[key] = f['mapping/cellResults/materialpoint'][inGeom[key].tolist()]['Position']
shape = np.shape(f[path])
data = np.full((self.Nmaterialpoints,) + (shape[1:] if len(shape)>1 else (1,)),
np.nan,
dtype=np.dtype(f[path]))
data[inGeom[key]] = (f[path] if len(shape)>1 else np.expand_dims(f[path],1))[inData[key]]
path = (os.path.join(*([prop,name]+([cat] if cat else [])+([item] if item else []))) if split else path)+tag
if split:
try:
tbl[inc].add(path,data)
except KeyError:
tbl[inc] = Table(data.reshape(self.Nmaterialpoints,-1),{path:data.shape[1:]})
else:
try:
tbl.add(path,data)
except AttributeError:
tbl = Table(data.reshape(self.Nmaterialpoints,-1),{path:data.shape[1:]})
return tbl
def groups_with_datasets(self,datasets):
"""
@ -262,10 +334,10 @@ class Result:
groups = []
with h5py.File(self.fname,'r') as f:
for i in self.iter_selection('increments'):
for i in self.iterate('increments'):
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_selection(o):
for pp in self.iter_selection(p):
for oo in self.iterate(o):
for pp in self.iterate(p):
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
if sets is True:
groups.append(group)
@ -279,12 +351,12 @@ class Result:
"""Return information on all active datasets in the file."""
message = ''
with h5py.File(self.fname,'r') as f:
for i in self.iter_selection('increments'):
for i in self.iterate('increments'):
message+='\n{} ({}s)\n'.format(i,self.times[self.increments.index(i)])
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_selection(o):
for oo in self.iterate(o):
message+=' {}\n'.format(oo)
for pp in self.iter_selection(p):
for pp in self.iterate(p):
message+=' {}\n'.format(pp)
group = '/'.join([i,o[:-1],oo,pp]) # o[:-1]: plural/singular issue
for d in f[group].keys():
@ -301,7 +373,7 @@ class Result:
"""Return the location of all active datasets with given label."""
path = []
with h5py.File(self.fname,'r') as f:
for i in self.iter_selection('increments'):
for i in self.iterate('increments'):
k = '/'.join([i,'geometry',label])
try:
f[k]
@ -309,8 +381,8 @@ class Result:
except KeyError as e:
pass
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in self.iter_selection(o):
for pp in self.iter_selection(p):
for oo in self.iterate(o):
for pp in self.iterate(p):
k = '/'.join([i,o[:-1],oo,pp,label])
try:
f[k]
@ -375,7 +447,7 @@ class Result:
def cell_coordinates(self):
"""Return initial coordinates of the cell centers."""
if self.structured:
return grid_filters.cell_coord0(self.grid,self.size,self.origin)
return grid_filters.cell_coord0(self.grid,self.size,self.origin).reshape(-1,3)
else:
with h5py.File(self.fname,'r') as f:
return f['geometry/x_c'][()]
@ -892,11 +964,11 @@ class Result:
datasets_in = {}
lock.acquire()
with h5py.File(self.fname,'r') as f:
for arg,label in datasets.items():
loc = f[group+'/'+label]
datasets_in[arg]={'data' :loc[()],
'label':label,
'meta': {k:v.decode() for k,v in loc.attrs.items()}}
for arg,label in datasets.items():
loc = f[group+'/'+label]
datasets_in[arg]={'data' :loc[()],
'label':label,
'meta': {k:v.decode() for k,v in loc.attrs.items()}}
lock.release()
r = func(**datasets_in,**args)
return [group,r]
@ -958,143 +1030,71 @@ class Result:
if mode.lower()=='cell':
if self.structured:
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
for dim in [0,1,2]:
for c in np.linspace(0,self.size[dim],1+self.grid[dim]):
coordArray[dim].InsertNextValue(c)
vtk_geom = vtk.vtkRectilinearGrid()
vtk_geom.SetDimensions(*(self.grid+1))
vtk_geom.SetXCoordinates(coordArray[0])
vtk_geom.SetYCoordinates(coordArray[1])
vtk_geom.SetZCoordinates(coordArray[2])
v = VTK.from_rectilinearGrid(self.grid,self.size,self.origin)
else:
nodes = vtk.vtkPoints()
with h5py.File(self.fname,'r') as f:
nodes.SetData(numpy_support.numpy_to_vtk(f['/geometry/x_n'][()],deep=True))
vtk_geom = vtk.vtkUnstructuredGrid()
vtk_geom.SetPoints(nodes)
vtk_geom.Allocate(f['/geometry/T_c'].shape[0])
if self.version_major == 0 and self.version_minor <= 5:
vtk_type = vtk.VTK_HEXAHEDRON
n_nodes = 8
else:
if f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TRIANGLE':
vtk_type = vtk.VTK_TRIANGLE
n_nodes = 3
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'QUAD':
vtk_type = vtk.VTK_QUAD
n_nodes = 4
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'TETRA': # not tested
vtk_type = vtk.VTK_TETRA
n_nodes = 4
elif f['/geometry/T_c'].attrs['VTK_TYPE'] == b'HEXAHEDRON':
vtk_type = vtk.VTK_HEXAHEDRON
n_nodes = 8
for i in f['/geometry/T_c']:
vtk_geom.InsertNextCell(vtk_type,n_nodes,i-1)
v = VTK.from_unstructuredGrid(f['/geometry/x_n'][()],
f['/geometry/T_c'][()]-1,
f['/geometry/T_c'].attrs['VTK_TYPE'].decode())
elif mode.lower()=='point':
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for c in self.cell_coordinates():
pointID = Points.InsertNextPoint(c)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
v = VTK.from_polyData(self.cell_coordinates())
vtk_geom = vtk.vtkPolyData()
vtk_geom.SetPoints(Points)
vtk_geom.SetVerts(Vertices)
vtk_geom.Modified()
N_digits = int(np.floor(np.log10(min(int(self.increments[-1][3:]),1))))+1
N_digits = int(np.floor(np.log10(int(self.increments[-1][3:]))))+1
for i,inc in enumerate(self.iter_selection('increments')):
vtk_data = []
for i,inc in enumerate(util.show_progress(self.iterate('increments'),len(self.selection['increments']))):
materialpoints_backup = self.selection['materialpoints'].copy()
self.pick('materialpoints',False)
for label in (labels if isinstance(labels,list) else [labels]):
for p in self.iter_selection('con_physics'):
if p != 'generic':
for c in self.iter_selection('constituents'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
vtk_geom.GetCellData().AddArray(vtk_data[-1])
for p in self.iterate('con_physics'):
if p != 'generic':
for c in self.iterate('constituents'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: hard coded 1!
else:
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
ph_name = re.compile(r'(?<=(constituent\/))(.*?)(?=(generic))') # identify phase name
dset_name = '1_' + re.sub(ph_name,r'',x[0].split('/',1)[1]) # removing phase name
vtk_data[-1].SetName(dset_name)
vtk_geom.GetCellData().AddArray(vtk_data[-1])
v.add(array,dset_name)
self.pick('materialpoints',materialpoints_backup)
constituents_backup = self.selection['constituents'].copy()
self.pick('constituents',False)
for label in (labels if isinstance(labels,list) else [labels]):
for p in self.iter_selection('mat_physics'):
if p != 'generic':
for m in self.iter_selection('materialpoints'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
vtk_geom.GetCellData().AddArray(vtk_data[-1])
for p in self.iterate('mat_physics'):
if p != 'generic':
for m in self.iterate('materialpoints'):
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
v.add(array,'1_'+x[0].split('/',1)[1]) #ToDo: why 1_?
else:
x = self.get_dataset_location(label)
if len(x) == 0:
continue
array = self.read_dataset(x,0)
shape = [array.shape[0],np.product(array.shape[1:])]
vtk_data.append(numpy_support.numpy_to_vtk(num_array=array.reshape(shape),deep=True))
vtk_data[-1].SetName('1_'+x[0].split('/',1)[1])
vtk_geom.GetCellData().AddArray(vtk_data[-1])
v.add(array,'1_'+x[0].split('/',1)[1])
self.pick('constituents',constituents_backup)
if mode.lower()=='cell':
writer = vtk.vtkXMLRectilinearGridWriter() if self.structured else \
vtk.vtkXMLUnstructuredGridWriter()
x = self.get_dataset_location('u_n')
vtk_data.append(numpy_support.numpy_to_vtk(num_array=self.read_dataset(x,0),deep=True))
vtk_data[-1].SetName('u')
vtk_geom.GetPointData().AddArray(vtk_data[-1])
elif mode.lower()=='point':
writer = vtk.vtkXMLPolyDataWriter()
u = self.read_dataset(self.get_dataset_location('u_n' if mode.lower() == 'cell' else 'u_p'))
v.add(u,'u')
file_out = '{}_inc{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
inc[3:].zfill(N_digits))
file_out = '{}_inc{}.{}'.format(os.path.splitext(os.path.basename(self.fname))[0],
inc[3:].zfill(N_digits),
writer.GetDefaultFileExtension())
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(file_out)
writer.SetInputData(vtk_geom)
writer.Write()
v.write(file_out)
###################################################################################################
# BEGIN DEPRECATED
iter_visible = iter_selection
iter_visible = iterate
iter_selection = iterate
def _time_to_inc(self,start,end):

View File

@ -348,4 +348,4 @@ class Table:
f = fname
for line in header + [' '.join(labels)]: f.write(line+'\n')
self.data.to_csv(f,sep=' ',index=False,header=False)
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)