Merge remote-tracking branch 'origin/development' into separate-vtk

This commit is contained in:
Martin Diehl 2022-03-01 22:52:36 +01:00
commit 802957f61e
24 changed files with 480 additions and 284 deletions

View File

@ -1 +1 @@
v3.0.0-alpha6-27-gf4a15f579
v3.0.0-alpha6-71-g3bc1a5eda

View File

@ -328,7 +328,7 @@ class Colormap(mpl.colors.ListedColormap):
if fname is None:
return open(self.name.replace(' ','_')+suffix, 'w', newline='\n')
elif isinstance(fname, (str, Path)):
return open(fname, 'w', newline='\n')
return open(Path(fname).expanduser(), 'w', newline='\n')
else:
return fname

View File

@ -133,7 +133,7 @@ class Crystal():
def __repr__(self):
"""Represent."""
"""Give short human-readable summary."""
family = f'Crystal family: {self.family}'
return family if self.lattice is None else \
util.srepr([family,

View File

@ -17,6 +17,7 @@ from . import util
from . import grid_filters
from . import Rotation
from . import Table
from . import Colormap
from ._typehints import FloatSequence, IntSequence
class Grid:
@ -57,7 +58,7 @@ class Grid:
def __repr__(self) -> str:
"""Basic information on grid definition."""
"""Give short human-readable summary."""
mat_min = np.nanmin(self.material)
mat_max = np.nanmax(self.material)
mat_N = self.N_materials
@ -173,8 +174,8 @@ class Grid:
Parameters
----------
fname : str or pathlib.Path
Grid file to read. Valid extension is .vti, which will be appended
if not given.
Grid file to read.
Valid extension is .vti, which will be appended if not given.
Returns
-------
@ -183,14 +184,14 @@ class Grid:
"""
v = VTK.load(fname if str(fname).endswith('.vti') else str(fname)+'.vti')
comments = v.get_comments()
cells = np.array(v.vtk_data.GetDimensions())-1
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
comments = v.comments
return Grid(material = v.get('material').reshape(cells,order='F'),
size = bbox[1] - bbox[0],
origin = bbox[0],
comments=comments)
comments = comments)
@typing. no_type_check
@ -643,9 +644,9 @@ class Grid:
Compress with zlib algorithm. Defaults to True.
"""
v = VTK.from_image_data(self.cells,self.size,self.origin)
v.add(self.material.flatten(order='F'),'material')
v.add_comments(self.comments)
v = VTK.from_image_data(self.cells,self.size,self.origin)\
.add(self.material.flatten(order='F'),'material')
v.comments += self.comments
v.save(fname,parallel=False,compress=compress)
@ -681,9 +682,20 @@ class Grid:
header='\n'.join(header), fmt=format_string, comments='')
def show(self) -> None:
"""Show on screen."""
VTK.from_image_data(self.cells,self.size,self.origin).show()
def show(self,
colormap: Colormap = Colormap.from_predefined('cividis')) -> None:
"""
Show on screen.
Parameters
----------
colormap : damask.Colormap, optional
Colors used to map material IDs. Defaults to 'cividis'.
"""
VTK.from_image_data(self.cells,self.size,self.origin) \
.add(self.material.flatten('F'),'material') \
.show('material',colormap)
def add_primitive(self,

View File

@ -120,9 +120,10 @@ class Orientation(Rotation,Crystal):
def __repr__(self) -> str:
"""Represent."""
return '\n'.join([Crystal.__repr__(self),
Rotation.__repr__(self)])
"""Give short human-readable summary."""
return util.srepr([Crystal.__repr__(self),
Rotation.__repr__(self)])
def __copy__(self: MyType,
rotation: Union[FloatSequence, Rotation] = None) -> MyType:

View File

@ -154,7 +154,7 @@ class Result:
'fields': self.fields,
}
self.fname = Path(fname).absolute()
self.fname = Path(fname).expanduser().absolute()
self._protected = True
@ -167,7 +167,7 @@ class Result:
def __repr__(self):
"""Show summary of file content."""
"""Give short human-readable summary."""
with h5py.File(self.fname,'r') as f:
header = [f'Created by {f.attrs["creator"]}',
f' on {f.attrs["created"]}',
@ -1635,7 +1635,7 @@ class Result:
else:
raise ValueError(f'invalid mode "{mode}"')
v.set_comments(util.execution_stamp('Result','export_VTK'))
v.comments = util.execution_stamp('Result','export_VTK')
N_digits = int(np.floor(np.log10(max(1,int(self.increments[-1][10:])))))+1
@ -1651,12 +1651,12 @@ class Result:
if self.version_minor >= 13:
creator = f.attrs['creator'] if h5py3 else f.attrs['creator'].decode()
created = f.attrs['created'] if h5py3 else f.attrs['created'].decode()
v.add_comments(f'{creator} ({created})')
v.comments += f'{creator} ({created})'
for inc in util.show_progress(self.visible['increments']):
u = _read(f['/'.join([inc,'geometry','u_n' if mode.lower() == 'cell' else 'u_p'])])
v.add(u,'u')
v = v.add(u,'u')
for ty in ['phase','homogenization']:
for field in self.visible['fields']:
@ -1683,7 +1683,7 @@ class Result:
outs[out][at_cell_ho[label]] = data[in_data_ho[label]]
for label,dataset in outs.items():
v.add(dataset,' / '.join(['/'.join([ty,field,label]),dataset.dtype.metadata['unit']]))
v = v.add(dataset,' / '.join(['/'.join([ty,field,label]),dataset.dtype.metadata['unit']]))
v.save(f'{self.fname.stem}_inc{inc[10:].zfill(N_digits)}',parallel=parallel)

View File

@ -88,7 +88,7 @@ class Rotation:
def __repr__(self) -> str:
"""Represent rotation as unit quaternion(s)."""
"""Give short human-readable summary."""
return f'Quaternion{" " if self.quaternion.shape == (4,) else "s of shape "+str(self.quaternion.shape[:-1])+chr(10)}'\
+ str(self.quaternion)

View File

@ -37,7 +37,7 @@ class Table:
def __repr__(self) -> str:
"""Brief overview."""
"""Give short human-readable summary."""
self._relabel('shapes')
data_repr = self.data.__repr__()
self._relabel('uniform')
@ -259,7 +259,7 @@ class Table:
Table data from file.
"""
f = open(fname) if isinstance(fname, (str, Path)) else fname
f = open(Path(fname).expanduser()) if isinstance(fname, (str, Path)) else fname
f.seek(0)
comments = []
@ -333,7 +333,7 @@ class Table:
@property
def labels(self) -> List[Tuple[int, ...]]:
def labels(self) -> List[str]:
return list(self.shapes)
@ -589,7 +589,7 @@ class Table:
labels += [f'{util.srepr(self.shapes[l],"x")}:{i+1}_{l}' \
for i in range(np.prod(self.shapes[l]))]
f = open(fname,'w',newline='\n') if isinstance(fname, (str, Path)) else fname
f = open(Path(fname).expanduser(),'w',newline='\n') if isinstance(fname, (str, Path)) else fname
f.write('\n'.join([f'# {c}' for c in self.comments] + [' '.join(labels)])+'\n')
self.data.to_csv(f,sep=' ',na_rep='nan',index=False,header=False)

View File

@ -1,8 +1,7 @@
import os
import warnings
import multiprocessing as mp
from pathlib import Path
from typing import Union, Literal, List
from typing import Union, Literal, List, Sequence
import numpy as np
import vtk
@ -13,6 +12,7 @@ from vtk.util.numpy_support import vtk_to_numpy as vtk_to_np
from ._typehints import FloatSequence, IntSequence
from . import util
from . import Table
from . import Colormap
class VTK:
@ -38,6 +38,110 @@ class VTK:
self.vtk_data = vtk_data
def __repr__(self) -> str:
"""Give short human-readable summary."""
info = [self.vtk_data.__vtkname__]
for data in ['Cell Data', 'Point Data']:
if data == 'Cell Data': info.append(f'\n# cells: {self.N_cells}')
if data == 'Point Data': info.append(f'\n# points: {self.N_points}')
if data in self.labels:
info += [f' - {l}' for l in self.labels[data]]
return util.srepr(info)
def __eq__(self,
other: object) -> bool:
"""
Equal to other.
Parameters
----------
other : damask.VTK
VTK to check for equality.
"""
if not isinstance(other, VTK):
return NotImplemented
return self.as_ASCII() == other.as_ASCII()
def copy(self):
if isinstance(self.vtk_data,vtk.vtkImageData):
dup = vtk.vtkImageData()
elif isinstance(self.vtk_data,vtk.vtkUnstructuredGrid):
dup = vtk.vtkUnstructuredGrid()
elif isinstance(self.vtk_data,vtk.vtkPolyData):
dup = vtk.vtkPolyData()
elif isinstance(self.vtk_data,vtk.vtkRectilinearGrid):
dup = vtk.vtkRectilinearGrid()
else:
raise TypeError
dup.DeepCopy(self.vtk_data)
return VTK(dup)
@property
def comments(self) -> List[str]:
"""Return the comments."""
fielddata = self.vtk_data.GetFieldData()
for a in range(fielddata.GetNumberOfArrays()):
if fielddata.GetArrayName(a) == 'comments':
comments = fielddata.GetAbstractArray(a)
return [comments.GetValue(i) for i in range(comments.GetNumberOfValues())]
return []
@comments.setter
def comments(self,
comments: Union[str, Sequence[str]]):
"""
Set comments.
Parameters
----------
comments : str or list of str
Comments.
"""
s = vtk.vtkStringArray()
s.SetName('comments')
for c in util.tail_repack(comments,self.comments):
s.InsertNextValue(c)
self.vtk_data.GetFieldData().AddArray(s)
@property
def N_points(self) -> int:
"""Number of points in vtkdata."""
return self.vtk_data.GetNumberOfPoints()
@property
def N_cells(self) -> int:
"""Number of cells in vtkdata."""
return self.vtk_data.GetNumberOfCells()
@property
def labels(self):
"""Labels of datasets."""
labels = {}
cell_data = self.vtk_data.GetCellData()
if c := [cell_data.GetArrayName(a) for a in range(cell_data.GetNumberOfArrays())]:
labels['Cell Data'] = c
point_data = self.vtk_data.GetPointData()
if p := [point_data.GetArrayName(a) for a in range(point_data.GetNumberOfArrays())]:
labels['Point Data'] = p
return labels
@staticmethod
def from_image_data(cells: IntSequence,
size: FloatSequence,
@ -70,40 +174,6 @@ class VTK:
return VTK(vtk_data)
@staticmethod
def from_rectilinear_grid(grid: np.ndarray,
size: FloatSequence,
origin: FloatSequence = np.zeros(3)) -> 'VTK':
"""
Create VTK of type vtk.vtkRectilinearGrid.
Parameters
----------
grid : iterable of int, len (3)
Number of cells along each dimension.
size : iterable of float, len (3)
Physical length along each dimension.
origin : iterable of float, len (3), optional
Coordinates of grid origin.
Returns
-------
new : damask.VTK
VTK-based geometry without nodal or cell data.
"""
warnings.warn('Support for vtr files will be removed in DAMASK 3.1.0', DeprecationWarning,2)
vtk_data = vtk.vtkRectilinearGrid()
vtk_data.SetDimensions(*(np.array(grid)+1))
coord = [np_to_vtk(np.linspace(origin[i],origin[i]+size[i],grid[i]+1),deep=True) for i in [0,1,2]]
[coord[i].SetName(n) for i,n in enumerate(['x','y','z'])]
vtk_data.SetXCoordinates(coord[0])
vtk_data.SetYCoordinates(coord[1])
vtk_data.SetZCoordinates(coord[2])
return VTK(vtk_data)
@staticmethod
def from_unstructured_grid(nodes: np.ndarray,
connectivity: np.ndarray,
@ -115,7 +185,7 @@ class VTK:
Parameters
----------
nodes : numpy.ndarray of shape (:,3)
nodes : numpy.ndarray, shape (:,3)
Spatial position of the nodes.
connectivity : numpy.ndarray of np.dtype = int
Cell connectivity (0-based), first dimension determines #Cells,
@ -155,7 +225,7 @@ class VTK:
Parameters
----------
points : numpy.ndarray of shape (:,3)
points : numpy.ndarray, shape (:,3)
Spatial position of the points.
Returns
@ -180,9 +250,36 @@ class VTK:
return VTK(vtk_data)
@staticmethod
def from_rectilinear_grid(grid: FloatSequence) -> 'VTK':
"""
Create VTK of type vtk.vtkRectilinearGrid.
Parameters
----------
grid : iterables of floats, len (3)
Grid coordinates along x, y, and z directions.
Returns
-------
new : damask.VTK
VTK-based geometry without nodal or cell data.
"""
vtk_data = vtk.vtkRectilinearGrid()
vtk_data.SetDimensions(*map(len,grid))
coord = [np_to_vtk(np.array(grid[i]),deep=True) for i in [0,1,2]]
[coord[i].SetName(n) for i,n in enumerate(['x','y','z'])]
vtk_data.SetXCoordinates(coord[0])
vtk_data.SetYCoordinates(coord[1])
vtk_data.SetZCoordinates(coord[2])
return VTK(vtk_data)
@staticmethod
def load(fname: Union[str, Path],
dataset_type: Literal['ImageData', 'UnstructuredGrid', 'PolyData'] = None) -> 'VTK':
dataset_type: Literal['ImageData', 'UnstructuredGrid', 'PolyData', 'RectilinearGrid'] = None) -> 'VTK':
"""
Load from VTK file.
@ -190,8 +287,8 @@ class VTK:
----------
fname : str or pathlib.Path
Filename for reading.
Valid extensions are .vti, .vtr, .vtu, .vtp, and .vtk.
dataset_type : {'ImageData', 'UnstructuredGrid', 'PolyData'}, optional
Valid extensions are .vti, .vtu, .vtp, .vtr, and .vtk.
dataset_type : {'ImageData', 'UnstructuredGrid', 'PolyData', 'RectilinearGrid'}, optional
Name of the vtk.vtkDataSet subclass when opening a .vtk file.
Returns
@ -200,40 +297,40 @@ class VTK:
VTK-based geometry from file.
"""
if not os.path.isfile(fname): # vtk has a strange error handling
if not Path(fname).expanduser().is_file(): # vtk has a strange error handling
raise FileNotFoundError(f'file "{fname}" not found')
if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None:
reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(str(fname))
reader.SetFileName(str(Path(fname).expanduser()))
if dataset_type is None:
raise TypeError('dataset type for *.vtk file not given')
elif dataset_type.lower().endswith(('imagedata','image_data')):
reader.Update()
vtk_data = reader.GetStructuredPointsOutput()
elif dataset_type.lower().endswith(('rectilineargrid','rectilinear_grid')):
reader.Update()
vtk_data = reader.GetRectilinearGridOutput()
elif dataset_type.lower().endswith(('unstructuredgrid','unstructured_grid')):
reader.Update()
vtk_data = reader.GetUnstructuredGridOutput()
elif dataset_type.lower().endswith(('polydata','poly_data')):
reader.Update()
vtk_data = reader.GetPolyDataOutput()
elif dataset_type.lower().endswith(('rectilineargrid','rectilinear_grid')):
reader.Update()
vtk_data = reader.GetRectilinearGridOutput()
else:
raise TypeError(f'unknown dataset type "{dataset_type}" for vtk file')
else:
if ext == '.vti':
reader = vtk.vtkXMLImageDataReader()
elif ext == '.vtr':
reader = vtk.vtkXMLRectilinearGridReader()
elif ext == '.vtu':
reader = vtk.vtkXMLUnstructuredGridReader()
elif ext == '.vtp':
reader = vtk.vtkXMLPolyDataReader()
elif ext == '.vtr':
reader = vtk.vtkXMLRectilinearGridReader()
else:
raise TypeError(f'unknown file extension "{ext}"')
reader.SetFileName(str(fname))
reader.SetFileName(str(Path(fname).expanduser()))
reader.Update()
vtk_data = reader.GetOutput()
@ -245,6 +342,17 @@ class VTK:
"""Wrapper for parallel writing."""
writer.Write()
def as_ASCII(self) -> str:
"""ASCII representation of the VTK data."""
writer = vtk.vtkDataSetWriter()
writer.SetHeader(f'# {util.execution_stamp("VTK")}')
writer.WriteToOutputStringOn()
writer.SetInputData(self.vtk_data)
writer.Write()
return writer.GetOutputString()
def save(self,
fname: Union[str, Path],
parallel: bool = True,
@ -264,16 +372,16 @@ class VTK:
"""
if isinstance(self.vtk_data,vtk.vtkImageData):
writer = vtk.vtkXMLImageDataWriter()
elif isinstance(self.vtk_data,vtk.vtkRectilinearGrid):
writer = vtk.vtkXMLRectilinearGridWriter()
elif isinstance(self.vtk_data,vtk.vtkUnstructuredGrid):
writer = vtk.vtkXMLUnstructuredGridWriter()
elif isinstance(self.vtk_data,vtk.vtkPolyData):
writer = vtk.vtkXMLPolyDataWriter()
elif isinstance(self.vtk_data,vtk.vtkRectilinearGrid):
writer = vtk.vtkXMLRectilinearGridWriter()
default_ext = '.'+writer.GetDefaultFileExtension()
ext = Path(fname).suffix
writer.SetFileName(str(fname)+(default_ext if default_ext != ext else ''))
writer.SetFileName(str(Path(fname).expanduser())+(default_ext if default_ext != ext else ''))
if compress:
writer.SetCompressorTypeToZLib()
@ -293,36 +401,31 @@ class VTK:
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
# Needs support for damask.Table
def add(self,
data: Union[np.ndarray, np.ma.MaskedArray],
data: Union[np.ndarray, np.ma.MaskedArray, 'Table'],
label: str = None):
"""
Add data to either cells or points.
Parameters
----------
data : numpy.ndarray or numpy.ma.MaskedArray
data : numpy.ndarray, numpy.ma.MaskedArray, or damask.Table
Data to add. First dimension needs to match either
number of cells or number of points.
label : str
label : str, optional if data is damask.Table
Data label.
"""
N_points = self.vtk_data.GetNumberOfPoints()
N_cells = self.vtk_data.GetNumberOfCells()
if isinstance(data,np.ndarray):
if label is None:
raise ValueError('no label defined for numpy.ndarray')
def _add_array(vtk_data,
data: np.ndarray,
label: str):
N_data = data.shape[0]
data_ = (data if not isinstance(data,np.ma.MaskedArray) else
np.where(data.mask,data.fill_value,data)).reshape(N_data,-1)
data_ = data.reshape(N_data,-1) \
.astype(np.single if data.dtype in [np.double,np.longdouble] else data.dtype)
if data_.dtype in [np.double,np.longdouble]:
d = np_to_vtk(data_.astype(np.single),deep=True) # avoid large files
elif data_.dtype.type is np.str_:
if data.dtype.type is np.str_:
d = vtk.vtkStringArray()
for s in np.squeeze(data_):
d.InsertNextValue(s)
@ -331,17 +434,29 @@ class VTK:
d.SetName(label)
if N_data == N_points:
self.vtk_data.GetPointData().AddArray(d)
elif N_data == N_cells:
self.vtk_data.GetCellData().AddArray(d)
if N_data == vtk_data.GetNumberOfPoints():
vtk_data.GetPointData().AddArray(d)
elif N_data == vtk_data.GetNumberOfCells():
vtk_data.GetCellData().AddArray(d)
else:
raise ValueError(f'cell / point count ({N_cells} / {N_points}) differs from data ({N_data})')
raise ValueError(f'data count mismatch ({N_data}{self.N_points} & {self.N_cells})')
dup = self.copy()
if isinstance(data,np.ndarray):
if label is not None:
_add_array(dup.vtk_data,
np.where(data.mask,data.fill_value,data) if isinstance(data,np.ma.MaskedArray) else data,
label)
else:
raise ValueError('no label defined for numpy.ndarray')
elif isinstance(data,Table):
raise NotImplementedError('damask.Table')
for l in data.labels:
_add_array(dup.vtk_data,data.get(l),l)
else:
raise TypeError
return dup
def get(self,
label: str) -> np.ndarray:
@ -386,59 +501,9 @@ class VTK:
raise ValueError(f'array "{label}" not found')
def get_comments(self) -> List[str]:
"""Return the comments."""
fielddata = self.vtk_data.GetFieldData()
for a in range(fielddata.GetNumberOfArrays()):
if fielddata.GetArrayName(a) == 'comments':
comments = fielddata.GetAbstractArray(a)
return [comments.GetValue(i) for i in range(comments.GetNumberOfValues())]
return []
def set_comments(self,
comments: Union[str, List[str]]):
"""
Set comments.
Parameters
----------
comments : str or list of str
Comments.
"""
s = vtk.vtkStringArray()
s.SetName('comments')
for c in [comments] if isinstance(comments,str) else comments:
s.InsertNextValue(c)
self.vtk_data.GetFieldData().AddArray(s)
def add_comments(self,
comments: Union[str, List[str]]):
"""
Add comments.
Parameters
----------
comments : str or list of str
Comments to add.
"""
self.set_comments(self.get_comments() + ([comments] if isinstance(comments,str) else comments))
def __repr__(self) -> str:
"""ASCII representation of the VTK data."""
writer = vtk.vtkDataSetWriter()
writer.SetHeader(f'# {util.execution_stamp("VTK")}')
writer.WriteToOutputStringOn()
writer.SetInputData(self.vtk_data)
writer.Write()
return writer.GetOutputString()
def show(self):
def show(self,
label: str = None,
colormap: Colormap = Colormap.from_predefined('cividis')):
"""
Render.
@ -459,8 +524,17 @@ class VTK:
width = 1024
height = 768
lut = vtk.vtkLookupTable()
lut.SetNumberOfTableValues(len(colormap.colors))
for i,c in enumerate(colormap.colors):
lut.SetTableValue(i,c if len(c)==4 else np.append(c,1.0))
lut.Build()
self.vtk_data.GetCellData().SetActiveScalars(label)
mapper = vtk.vtkDataSetMapper()
mapper.SetInputData(self.vtk_data)
mapper.SetLookupTable(lut)
mapper.SetScalarRange(self.vtk_data.GetScalarRange())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
@ -468,7 +542,15 @@ class VTK:
ren = vtk.vtkRenderer()
ren.AddActor(actor)
ren.SetBackground(67/255,128/255,208/255)
if label is None:
ren.SetBackground(67/255,128/255,208/255)
else:
colormap = vtk.vtkScalarBarActor()
colormap.SetLookupTable(lut)
colormap.SetTitle(label)
colormap.SetMaximumWidthInPixels(width//100)
ren.AddActor2D(colormap)
ren.SetBackground(0.3,0.3,0.3)
window = vtk.vtkRenderWindow()
window.AddRenderer(ren)

View File

@ -9,7 +9,7 @@ import re
import fractions
from collections import abc
from functools import reduce
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal
from typing import Callable, Union, Iterable, Sequence, Dict, List, Tuple, Literal, Any
from pathlib import Path
import numpy as np
@ -33,7 +33,8 @@ __all__=[
'extend_docstring', 'extended_docstring',
'Bravais_to_Miller', 'Miller_to_Bravais',
'DREAM3D_base_group', 'DREAM3D_cell_data_group',
'dict_prune', 'dict_flatten'
'dict_prune', 'dict_flatten',
'tail_repack',
]
# https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
@ -722,6 +723,36 @@ def dict_flatten(d: Dict) -> Dict:
def tail_repack(extended: Union[str, Sequence[str]],
existing: List[str] = []) -> List[str]:
"""
Repack tailing characters into single string if all are new.
Parameters
----------
extended : str or list of str
Extended string list with potentially autosplitted tailing string relative to `existing`.
existing : list of str
Base string list.
Returns
-------
repacked : list of str
Repacked version of `extended`.
Examples
--------
>>> tail_repack(['a','new','e','n','t','r','y'],['a','new'])
['a','new','entry']
>>> tail_repack(['a','new','shiny','e','n','t','r','y'],['a','new'])
['a','new','shiny','e','n','t','r','y']
"""
return [extended] if isinstance(extended,str) else existing + \
([''.join(extended[len(existing):])] if np.prod([len(i) for i in extended[len(existing):]]) == 1 else
list(extended[len(existing):]))
####################################################################################################
# Classes
####################################################################################################

View File

@ -1,42 +1,42 @@
<?xml version="1.0"?>
<VTKFile type="RectilinearGrid" version="0.1" byte_order="LittleEndian" header_type="UInt32" compressor="vtkZLibDataCompressor">
<RectilinearGrid WholeExtent="0 5 0 6 0 7">
<Piece Extent="0 5 0 6 0 7">
<RectilinearGrid WholeExtent="0 3 0 4 0 5">
<Piece Extent="0 3 0 4 0 5">
<PointData>
<DataArray type="Float32" Name="node" NumberOfComponents="3" format="binary" RangeMin="0" RangeMax="1.268857765318962">
AQAAAACAAADADwAAFQMAAA==eF5Vl7GtFEEQBTeG8wgAhxxux8bEOIsYwDu/M0A6D0JofMAmjU0BF5+d7anXjzP+L6NV4l3pj8S29efL77/35ucO//nwS3zeiL99fTM2+3zPd/u6uTc/d3h67EY8PXB50jxpnjRPmifNk/Kcn4Gn+do18NiNeO3StvPfJk/ztUseuxGvXfI8Hg95mp87PD12I54euD5hu0IeuHbpRly74r9mb9+/7nQvru6T6b5uxHSfPH/PdniaqzseuxHTvT1pnjRPmifNk+ZJec7PsF3Ddg3bxY2Y7rZLnubqbrvkgemOZ7bD01zd8diNmO69K2xXyAPTvXeF7QrzzHa3vbtPpvtt7+7Xjbi73/b1/ex4muleHrsRd3c8aZ40T5onzZPmSXm2q512Dds1bBc34u6uXfI001275IG7e3mqXXma6V4euxF3d3aF7Qp54O7OrrBdYZ5t+/npo7oXV/fJdF83YrpPXt/Pjqe5uuOxGzHd25PmSfOkedI8aZ6U5/wM2zVs17Bd3Ijpbrvkaa7utksemO54Zjs8zdUdj92I6d67wnaFPDDde1fYrjDP9Vare7Heeft7f6n7ZHvn7e/9pe54YHvn1R0PXJ40T5onzZPmSfOkPFu91epuu4bt4kZs77z9vWuXPLC98+puu+RZb7W644HtnVd3PHDNCtsV8sD2zqt77wrzbNvn44e6F1f3yXRfN2K6T17fz46nubrjsRsx3duT5knzpHnSPGmelOf8DNs1bNewXdyI6W675Gmu7rZLHpjueGY7PM3VHY/diOneu8J2hTww3XtX2K4wz3yrD3Uv5p0/7J0/1H1yv/OHuuNp5p0/7J0/1B0PXJ40T5onzZPmSfOkPNv1VmvXsF3DdnEj7ndeu+Rp5p3XLnngfucPdcfTzDt/2Dt/qDseuGaF7Qp54H7n2RW2K8xT3xHdi5/67ui+bsR0nzx/zHZ4mqs7HrsR0709aZ40T5onzZPmSXnWb3YNPDDd8cB0t13yNFd32yUPTHc8sx2eZv0/btAdD0z33hW2K+SB6d67wnYV/wMyiXC6
<DataArray type="Float32" Name="node" NumberOfComponents="3" format="binary" RangeMin="0" RangeMax="31.016124838541646">
AQAAAACAAACgBQAA+QAAAA==eF5tkoENwjAMBDsCG/XZzON1LJSIf58Dlaoc4mr7k1wXn7rBan69hxZvrWE1L/9fre2b1bx9te9+yw+rea2cqeAX/IJf8OMyh5rZn+541ey8514MT83Oe+7XqKVm5019+AW/4Bd8zhRWM/d4r0c+zs65zrMcLt7k/frMz//Myev68At+wS/46cH9RW/eo13/OMNzPp7D9gVf8DVnZ5+fs4TLd9eHH1az89r3N+N+aNYy75X3hnvKfjd8wTdrzmRej+cbd1FzbnPmRc/Uh2/283z9sJqd93H9u/2wmp03vuALvuCr/fXbfljNzpv68MNqdt7n/QEgMrwB
<InformationKey name="L2_NORM_RANGE" location="vtkDataArray" length="2">
<Value index="0">
0
</Value>
<Value index="1">
1.2688577653
31.016124839
</Value>
</InformationKey>
</DataArray>
</PointData>
<CellData>
<DataArray type="Float32" Name="cell" NumberOfComponents="3" format="binary" RangeMin="0.10871961651677305" RangeMax="1.1607924233069467">
AQAAAACAAADYCQAA8QEAAA==eF5N0CGOFlEQAOHVK0nmBnAFgnn/PLsSsZd4CSQIPDfYZN2iUbMePI4DEMR/BbgDNJ2ve1yZSir18P3jeD6O8eruxfj99s0Ff356Kh63v4o/jNsdP/xzb24+Xbg4XBwuDheHe3//s1wcLg4Xh4vT3fZ2k9NNTjc53eRsnuXibJ7l4mye5T4fq1ycr1a5OF+tk3uMb++u9TnY52Cfg30O9pmLfeZin7nxjYt95mKf2932dpN9bjfZ526e5WKfu3mWi33uV6tc7HO/Wif3GO+vry8+B/sc7HOwz8E+c7HPXOwzN75xsc9c7HO7295uss/tJvvczbNc7HM3z3Kxz/1qlYt97lfr5B7/f/kc7HOwz8E+B/vMxT5zsc/c+MbFPnOxz+1ue7vJPreb7HM3z3Kxz908y8U+96tVLva5X62Te4y7xy/1OdjnYJ+DfQ72mYt95mKfufGNi33mYp/b3fZ2k31uN9nnbp7lYp+7eZaLfe5Xq1zsc79aJ/cYjy9/1Odgn4N9DvY52Gcu9pmLfebGNy72mYt9bnfb2032ud1kn7t5lot97uZZLva5X61ysc/9ap3cY1y//qnPwT4H+xzsc7DPXOwzF/vMjW9c7DMX+9zutreb7HO7yT538ywX+9zNs1zsc79a5WKf+1XyX6K10A4=
<DataArray type="Float32" Name="cell" NumberOfComponents="3" format="binary" RangeMin="0" RangeMax="18.788294228055936">
AQAAAACAAADQAgAAigAAAA==eF5tkQEOgDAIA32CP7L+bE83oMBVJVnWhGMt2bax1gEta3Uv7tb6n0mmtPyt/RymtUZXmQ/fe+Wwo/981pPnDt9iWmt0FWe403tn89NozlkfJyp9hTxCnodpjjtgPsr2Ob7vJiMw8OB/xF1Ma42uWsizkGchTzGtNToZgREYDZP+yFPMrS+Grlfp
<InformationKey name="L2_NORM_RANGE" location="vtkDataArray" length="2">
<Value index="0">
0.10871961652
0
</Value>
<Value index="1">
1.1607924233
18.788294228
</Value>
</InformationKey>
</DataArray>
</CellData>
<Coordinates>
<DataArray type="Float64" Name="x" format="binary" RangeMin="0" RangeMax="0.6">
AQAAAACAAAAwAAAAJwAAAA==eF5jYICAHXKtrwN37LOH0Ofsua4vLrDlug7l37M3BoPH9gCdQxK6
<DataArray type="Float64" Name="x" format="binary" RangeMin="0" RangeMax="9">
AQAAAACAAAAgAAAAFQAAAA==eF5jYEAGH+whtIADhFZyAAAYkwHi
</DataArray>
<DataArray type="Float64" Name="y" format="binary" RangeMin="0" RangeMax="1">
AQAAAACAAAA4AAAAIgAAAA==eF5jYICAUDA4ag+hr9pDRB9A+U/tV4HBK6j4B3sAk7wQqg==
<DataArray type="Float64" Name="y" format="binary" RangeMin="0" RangeMax="16">
AQAAAACAAAAoAAAAFwAAAA==eF5jYEAGH+whtIADhFaC0gYOAChDAlI=
</DataArray>
<DataArray type="Float64" Name="z" format="binary" RangeMin="0" RangeMax="0.5">
AQAAAACAAABAAAAALAAAAA==eF5jYICASSqeQLTJHkIfsr+9LReITkP5l+zB3NvXoOK37SG6HtgDANusGUo=
<DataArray type="Float64" Name="z" format="binary" RangeMin="0" RangeMax="25">
AQAAAACAAAAwAAAAGgAAAA==eF5jYEAGH+whtIADhFaC0gZQ2tIBADuFAss=
</DataArray>
</Coordinates>
</Piece>

View File

@ -42,6 +42,10 @@ class TestGrid:
print('patched datetime.datetime.now')
def test_show(sef,default,monkeypatch):
monkeypatch.delenv('DISPLAY',raising=False)
default.show()
def test_equal(self,default):
assert default == default
assert not default == 42

View File

@ -385,7 +385,7 @@ class TestResult:
result.export_VTK(output,parallel=False)
fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vti'
v = VTK.load(tmp_path/fname)
v.set_comments('n/a')
v.comments = 'n/a'
v.save(tmp_path/fname,parallel=False)
with open(fname) as f:
cur = hashlib.md5(f.read().encode()).hexdigest()

View File

@ -1,6 +1,7 @@
import os
import filecmp
import time
import string
import pytest
import numpy as np
@ -8,7 +9,7 @@ import numpy.ma as ma
import vtk
from damask import VTK
from damask import grid_filters
from damask import Table
@pytest.fixture
def ref_path(ref_path_base):
@ -32,29 +33,44 @@ class TestVTK:
monkeypatch.delenv('DISPLAY',raising=False)
default.show()
def test_imageData(self,tmp_path):
cells = np.random.randint(5,10,3)
size = np.random.random(3) + 0.1
origin = np.random.random(3) - 0.5
v = VTK.from_image_data(cells,size,origin)
string = str(v)
string = v.as_ASCII()
v.save(tmp_path/'imageData',False)
vtr = VTK.load(tmp_path/'imageData.vti')
with open(tmp_path/'imageData.vtk','w') as f:
f.write(string)
vtk = VTK.load(tmp_path/'imageData.vtk','VTK_imageData')
assert (string == vtr.as_ASCII() == vtk.as_ASCII())
def test_rectilinearGrid(self,tmp_path):
cells = np.random.randint(5,10,3)*2
size = np.random.random(3) + 1.0
origin = np.random.random(3)
v = VTK.from_rectilinear_grid(cells,size,origin)
string = v.__repr__()
grid = np.sort(np.random.random((3,10)))
v = VTK.from_rectilinear_grid(grid)
string = str(v)
string = v.as_ASCII()
v.save(tmp_path/'rectilinearGrid',False)
vtr = VTK.load(tmp_path/'rectilinearGrid.vtr')
with open(tmp_path/'rectilinearGrid.vtk','w') as f:
f.write(string)
vtk = VTK.load(tmp_path/'rectilinearGrid.vtk','VTK_rectilinearGrid')
assert(string == vtr.__repr__() == vtk.__repr__())
assert (string == vtr.as_ASCII() == vtk.as_ASCII())
def test_polyData(self,tmp_path):
points = np.random.rand(100,3)
v = VTK.from_poly_data(points)
string = v.__repr__()
string = str(v)
string = v.as_ASCII()
v.save(tmp_path/'polyData',False)
vtp = VTK.load(tmp_path/'polyData.vtp')
with open(tmp_path/'polyData.vtk','w') as f:
f.write(string)
vtk = VTK.load(tmp_path/'polyData.vtk','polyData')
assert(string == vtp.__repr__() == vtk.__repr__())
assert(string == vtp.as_ASCII() == vtk.as_ASCII())
@pytest.mark.parametrize('cell_type,n',[
('VTK_hexahedron',8),
@ -67,13 +83,14 @@ class TestVTK:
nodes = np.random.rand(n,3)
connectivity = np.random.choice(np.arange(n),n,False).reshape(-1,n)
v = VTK.from_unstructured_grid(nodes,connectivity,cell_type)
string = v.__repr__()
string = str(v)
string = v.as_ASCII()
v.save(tmp_path/'unstructuredGrid',False)
vtu = VTK.load(tmp_path/'unstructuredGrid.vtu')
with open(tmp_path/'unstructuredGrid.vtk','w') as f:
f.write(string)
vtk = VTK.load(tmp_path/'unstructuredGrid.vtk','unstructuredgrid')
assert(string == vtu.__repr__() == vtk.__repr__())
assert(string == vtu.as_ASCII() == vtk.as_ASCII())
def test_parallel_out(self,tmp_path):
@ -97,7 +114,7 @@ class TestVTK:
fname_p = tmp_path/'plain.vtp'
v.save(fname_c,parallel=False,compress=False)
v.save(fname_p,parallel=False,compress=True)
assert(VTK.load(fname_c).__repr__() == VTK.load(fname_p).__repr__())
assert(VTK.load(fname_c).as_ASCII() == VTK.load(fname_p).as_ASCII())
@pytest.mark.parametrize('fname',['a','a.vtp','a.b','a.b.vtp'])
@ -148,49 +165,79 @@ class TestVTK:
@pytest.mark.parametrize('N_values',[5*6*7,6*7*8])
def test_add_get(self,default,data_type,shape,N_values):
data = np.squeeze(np.random.randint(0,100,(N_values,)+shape)).astype(data_type)
default.add(data,'data')
assert (np.squeeze(data.reshape(N_values,-1)) == default.get('data')).all()
new = default.add(data,'data')
assert (np.squeeze(data.reshape(N_values,-1)) == new.get('data')).all()
@pytest.mark.parametrize('shapes',[{'scalar':(1,),'vector':(3,),'tensor':(3,3)},
{'vector':(6,),'tensor':(3,3)},
{'tensor':(3,3),'scalar':(1,)}])
def test_add_table(self,default,shapes):
N = np.random.choice([default.N_points,default.N_cells])
d = dict()
for k,s in shapes.items():
d[k] = dict(shape = s,
data = np.random.random(N*np.prod(s)).reshape((N,-1)))
new = default.add(Table(np.column_stack([d[k]['data'] for k in shapes.keys()]),shapes))
for k,s in shapes.items():
assert np.allclose(np.squeeze(d[k]['data']),new.get(k),rtol=1e-7)
def test_add_masked(self,default):
data = np.random.rand(5*6*7,3)
masked = ma.MaskedArray(data,mask=data<.4,fill_value=42.)
default.add(masked,'D')
result_masked = str(default)
default.add(np.where(masked.mask,masked.fill_value,masked),'D')
assert result_masked == str(default)
mask_auto = default.add(masked,'D')
mask_manual = default.add(np.where(masked.mask,masked.fill_value,masked),'D')
assert mask_manual == mask_auto
@pytest.mark.parametrize('data_type,shape',[(float,(3,)),
(float,(3,3)),
(float,(1,)),
(int,(4,)),
(str,(1,))])
@pytest.mark.parametrize('N_values',[5*6*7,6*7*8])
def test_labels(self,default,data_type,shape,N_values):
data = np.squeeze(np.random.randint(0,100,(N_values,)+shape)).astype(data_type)
ALPHABET = np.array(list(string.ascii_lowercase + ' '))
label = ''.join(np.random.choice(ALPHABET, size=10))
new = default.add(data,label)
if N_values == default.N_points: assert label in new.labels['Point Data']
if N_values == default.N_cells: assert label in new.labels['Cell Data']
def test_comments(self,tmp_path,default):
default.add_comments(['this is a comment'])
default.comments += 'this is a comment'
default.save(tmp_path/'with_comments',parallel=False)
new = VTK.load(tmp_path/'with_comments.vti')
assert new.get_comments() == ['this is a comment']
assert new.comments == ['this is a comment']
@pytest.mark.xfail(int(vtk.vtkVersion.GetVTKVersion().split('.')[0])<8, reason='missing METADATA')
def test_compare_reference_polyData(self,update,ref_path,tmp_path):
points=np.dstack((np.linspace(0.,1.,10),np.linspace(0.,2.,10),np.linspace(-1.,1.,10))).squeeze()
polyData = VTK.from_poly_data(points)
polyData.add(points,'coordinates')
polyData = VTK.from_poly_data(points).add(points,'coordinates')
if update:
polyData.save(ref_path/'polyData')
else:
reference = VTK.load(ref_path/'polyData.vtp')
assert polyData.__repr__() == reference.__repr__() and \
assert polyData.as_ASCII() == reference.as_ASCII() and \
np.allclose(polyData.get('coordinates'),points)
@pytest.mark.xfail(int(vtk.vtkVersion.GetVTKVersion().split('.')[0])<8, reason='missing METADATA')
def test_compare_reference_rectilinearGrid(self,update,ref_path,tmp_path):
cells = np.array([5,6,7],int)
size = np.array([.6,1.,.5])
rectilinearGrid = VTK.from_rectilinear_grid(cells,size)
c = grid_filters.coordinates0_point(cells,size).reshape(-1,3,order='F')
n = grid_filters.coordinates0_node(cells,size).reshape(-1,3,order='F')
rectilinearGrid.add(np.ascontiguousarray(c),'cell')
rectilinearGrid.add(np.ascontiguousarray(n),'node')
grid = [np.arange(4)**2.,
np.arange(5)**2.,
np.arange(6)**2.] # ParaView renders tetrahedral meshing unless using float coordinates!
coords = np.stack(np.meshgrid(*grid,indexing='ij'),axis=-1)
c = coords[:-1,:-1,:-1,:].reshape(-1,3,order='F')
n = coords[:,:,:,:].reshape(-1,3,order='F')
rectilinearGrid = VTK.from_rectilinear_grid(grid) \
.add(np.ascontiguousarray(c),'cell') \
.add(np.ascontiguousarray(n),'node')
if update:
rectilinearGrid.save(ref_path/'rectilinearGrid')
else:
reference = VTK.load(ref_path/'rectilinearGrid.vtr')
assert rectilinearGrid.__repr__() == reference.__repr__() and \
assert rectilinearGrid.as_ASCII() == reference.as_ASCII() and \
np.allclose(rectilinearGrid.get('cell'),c)

View File

@ -302,21 +302,21 @@ program DAMASK_grid
!--------------------------------------------------------------------------------------------------
! doing initialization depending on active solvers
call spectral_Utilities_init
do field = 1, nActiveFields
do field = 2, nActiveFields
select case (ID(field))
case(FIELD_MECH_ID)
call mechanical_init
case(FIELD_THERMAL_ID)
case (FIELD_THERMAL_ID)
initial_conditions => config_load%get('initial_conditions',defaultVal=emptyDict)
thermal => initial_conditions%get('thermal',defaultVal=emptyDict)
call grid_thermal_spectral_init(thermal%get_asFloat('T'))
case(FIELD_DAMAGE_ID)
call grid_damage_spectral_init
case (FIELD_DAMAGE_ID)
call grid_damage_spectral_init()
end select
enddo
end do
call mechanical_init()
!--------------------------------------------------------------------------------------------------
! write header of output file

View File

@ -21,26 +21,21 @@ module homogenization
implicit none
private
enum, bind(c); enumerator :: &
THERMAL_ISOTHERMAL_ID, &
THERMAL_CONDUCTION_ID, &
DAMAGE_NONE_ID, &
DAMAGE_NONLOCAL_ID, &
HOMOGENIZATION_UNDEFINED_ID, &
HOMOGENIZATION_NONE_ID, &
HOMOGENIZATION_ISOSTRAIN_ID, &
HOMOGENIZATION_RGC_ID
THERMAL_UNDEFINED_ID, &
THERMAL_PASS_ID, &
THERMAL_ISOTEMPERATURE_ID
end enum
integer(kind(THERMAL_UNDEFINED_ID)), dimension(:), allocatable :: &
thermal_type !< type of each homogenization
type(tState), allocatable, dimension(:), public :: &
homogState, &
damageState_h
integer(kind(THERMAL_isothermal_ID)), dimension(:), allocatable :: &
thermal_type !< thermal transport model
integer(kind(DAMAGE_none_ID)), dimension(:), allocatable :: &
damage_type !< nonlocal damage model
logical, allocatable, dimension(:) :: &
thermal_active, &
damage_active
logical, public :: &
terminallyIll = .false. !< at least one material point is terminally ill
@ -182,9 +177,7 @@ module homogenization
homogenization_forward, &
homogenization_results, &
homogenization_restartRead, &
homogenization_restartWrite, &
THERMAL_CONDUCTION_ID, &
DAMAGE_NONLOCAL_ID
homogenization_restartWrite
contains
@ -292,7 +285,6 @@ subroutine homogenization_thermal_response(Delta_t,cell_start,cell_end)
do ce = cell_start, cell_end
if (terminallyIll) continue
ho = material_homogenizationID(ce)
call thermal_partition(ce)
do co = 1, homogenization_Nconstituents(ho)
if (.not. phase_thermal_constitutive(Delta_t,material_phaseID(co,ce),material_phaseEntry(co,ce))) then
if (.not. terminallyIll) print*, ' Cell ', ce, ' terminally ill'
@ -352,19 +344,17 @@ subroutine homogenization_results
call mechanical_results(group_base,ho)
select case(damage_type(ho))
case(DAMAGE_NONLOCAL_ID)
group = trim(group_base)//'/damage'
call results_closeGroup(results_addGroup(group))
call damage_results(ho,group)
end select
if (damage_active(ho)) then
group = trim(group_base)//'/damage'
call results_closeGroup(results_addGroup(group))
call damage_results(ho,group)
end if
select case(thermal_type(ho))
case(THERMAL_CONDUCTION_ID)
group = trim(group_base)//'/thermal'
call results_closeGroup(results_addGroup(group))
call thermal_results(ho,group)
end select
if (thermal_active(ho)) then
group = trim(group_base)//'/thermal'
call results_closeGroup(results_addGroup(group))
call thermal_results(ho,group)
end if
enddo
@ -458,8 +448,9 @@ subroutine parseHomogenization
material_homogenization => config_material%get('homogenization')
allocate(thermal_type(size(material_name_homogenization)),source=THERMAL_isothermal_ID)
allocate(damage_type (size(material_name_homogenization)),source=DAMAGE_none_ID)
allocate(thermal_type(size(material_name_homogenization)),source=THERMAL_UNDEFINED_ID)
allocate(thermal_active(size(material_name_homogenization)),source=.false.)
allocate(damage_active(size(material_name_homogenization)),source=.false.)
do h=1, size(material_name_homogenization)
homog => material_homogenization%get(h)
@ -467,8 +458,12 @@ subroutine parseHomogenization
if (homog%contains('thermal')) then
homogThermal => homog%get('thermal')
select case (homogThermal%get_asString('type'))
case('pass','isotemperature')
thermal_type(h) = THERMAL_conduction_ID
case('pass')
thermal_type(h) = THERMAL_PASS_ID
thermal_active(h) = .true.
case('isotemperature')
thermal_type(h) = THERMAL_ISOTEMPERATURE_ID
thermal_active(h) = .true.
case default
call IO_error(500,ext_msg=homogThermal%get_asString('type'))
end select
@ -478,7 +473,7 @@ subroutine parseHomogenization
homogDamage => homog%get('damage')
select case (homogDamage%get_asString('type'))
case('pass')
damage_type(h) = DAMAGE_nonlocal_ID
damage_active(h) = .true.
case default
call IO_error(500,ext_msg=homogDamage%get_asString('type'))
end select

View File

@ -7,9 +7,20 @@ submodule(homogenization:damage) damage_pass
contains
module subroutine pass_init()
integer :: &
ho
print'(/,1x,a)', '<<<+- homogenization:damage:pass init -+>>>'
do ho = 1, size(damage_active)
if (.not. damage_active(ho)) cycle
if (homogenization_Nconstituents(ho) /= 1) &
call IO_error(211,ext_msg='(pass) with N_constituents !=1')
end do
end subroutine pass_init
end submodule damage_pass

View File

@ -56,8 +56,14 @@ submodule(homogenization) mechanical
end type tOutput
type(tOutput), allocatable, dimension(:) :: output_mechanical
integer(kind(HOMOGENIZATION_undefined_ID)), dimension(:), allocatable :: &
homogenization_type !< type of each homogenization
enum, bind(c); enumerator :: &
MECHANICAL_UNDEFINED_ID, &
MECHANICAL_PASS_ID, &
MECHANICAL_ISOSTRAIN_ID, &
MECHANICAL_RGC_ID
end enum
integer(kind(MECHANICAL_UNDEFINED_ID)), dimension(:), allocatable :: &
mechanical_type !< type of each homogenization
contains
@ -75,9 +81,9 @@ module subroutine mechanical_init()
homogenization_F = homogenization_F0
allocate(homogenization_P(3,3,discretization_Ncells),source=0.0_pReal)
if (any(homogenization_type == HOMOGENIZATION_NONE_ID)) call pass_init()
if (any(homogenization_type == HOMOGENIZATION_ISOSTRAIN_ID)) call isostrain_init()
if (any(homogenization_type == HOMOGENIZATION_RGC_ID)) call RGC_init()
if (any(mechanical_type == MECHANICAL_PASS_ID)) call pass_init()
if (any(mechanical_type == MECHANICAL_ISOSTRAIN_ID)) call isostrain_init()
if (any(mechanical_type == MECHANICAL_RGC_ID)) call RGC_init()
end subroutine mechanical_init
@ -96,15 +102,15 @@ module subroutine mechanical_partition(subF,ce)
real(pReal), dimension (3,3,homogenization_Nconstituents(material_homogenizationID(ce))) :: Fs
chosenHomogenization: select case(homogenization_type(material_homogenizationID(ce)))
chosenHomogenization: select case(mechanical_type(material_homogenizationID(ce)))
case (HOMOGENIZATION_NONE_ID) chosenHomogenization
case (MECHANICAL_PASS_ID) chosenHomogenization
Fs(1:3,1:3,1) = subF
case (HOMOGENIZATION_ISOSTRAIN_ID) chosenHomogenization
case (MECHANICAL_ISOSTRAIN_ID) chosenHomogenization
call isostrain_partitionDeformation(Fs,subF)
case (HOMOGENIZATION_RGC_ID) chosenHomogenization
case (MECHANICAL_RGC_ID) chosenHomogenization
call RGC_partitionDeformation(Fs,subF,ce)
end select chosenHomogenization
@ -160,7 +166,7 @@ module function mechanical_updateState(subdt,subF,ce) result(doneAndHappy)
real(pReal) :: Ps(3,3,homogenization_Nconstituents(material_homogenizationID(ce)))
if (homogenization_type(material_homogenizationID(ce)) == HOMOGENIZATION_RGC_ID) then
if (mechanical_type(material_homogenizationID(ce)) == MECHANICAL_RGC_ID) then
do co = 1, homogenization_Nconstituents(material_homogenizationID(ce))
dPdFs(:,:,:,:,co) = phase_mechanical_dPdF(subdt,co,ce)
Fs(:,:,co) = phase_F(co,ce)
@ -189,9 +195,9 @@ module subroutine mechanical_results(group_base,ho)
group = trim(group_base)//'/mechanical'
call results_closeGroup(results_addGroup(group))
select case(homogenization_type(ho))
select case(mechanical_type(ho))
case(HOMOGENIZATION_rgc_ID)
case(MECHANICAL_RGC_ID)
call RGC_results(ho,group)
end select
@ -204,7 +210,7 @@ module subroutine mechanical_results(group_base,ho)
'deformation gradient','1')
case('P')
call results_writeDataset(reshape(homogenization_P,[3,3,discretization_nCells]),group,'P', &
'deformation gradient','1')
'first Piola-Kirchhoff stress','Pa')
end select
end do
@ -226,7 +232,7 @@ subroutine parseMechanical()
material_homogenization => config_material%get('homogenization')
allocate(homogenization_type(size(material_name_homogenization)), source=HOMOGENIZATION_undefined_ID)
allocate(mechanical_type(size(material_name_homogenization)), source=MECHANICAL_UNDEFINED_ID)
allocate(output_mechanical(size(material_name_homogenization)))
do ho=1, size(material_name_homogenization)
@ -239,11 +245,11 @@ subroutine parseMechanical()
#endif
select case (mechanical%get_asString('type'))
case('pass')
homogenization_type(ho) = HOMOGENIZATION_NONE_ID
mechanical_type(ho) = MECHANICAL_PASS_ID
case('isostrain')
homogenization_type(ho) = HOMOGENIZATION_ISOSTRAIN_ID
mechanical_type(ho) = MECHANICAL_ISOSTRAIN_ID
case('RGC')
homogenization_type(ho) = HOMOGENIZATION_RGC_ID
mechanical_type(ho) = MECHANICAL_RGC_ID
case default
call IO_error(500,ext_msg=mechanical%get_asString('type'))
end select

View File

@ -88,7 +88,7 @@ module subroutine RGC_init()
print'(/,1x,a)', '<<<+- homogenization:mechanical:RGC init -+>>>'
print'(/,a,i0)', ' # homogenizations: ',count(homogenization_type == HOMOGENIZATION_RGC_ID)
print'(/,a,i0)', ' # homogenizations: ',count(mechanical_type == MECHANICAL_RGC_ID)
flush(IO_STDOUT)
print'(/,1x,a)', 'D.D. Tjahjanto et al., International Journal of Material Forming 2(1):939942, 2009'
@ -137,8 +137,8 @@ module subroutine RGC_init()
if (num%volDiscrPow <= 0.0_pReal) call IO_error(301,ext_msg='volDiscrPw_RGC')
do ho = 1, size(homogenization_type)
if (homogenization_type(ho) /= HOMOGENIZATION_RGC_ID) cycle
do ho = 1, size(mechanical_type)
if (mechanical_type(ho) /= MECHANICAL_RGC_ID) cycle
homog => material_homogenization%get(ho)
homogMech => homog%get('mechanical')
associate(prm => param(ho), &

View File

@ -19,11 +19,11 @@ module subroutine isostrain_init
print'(/,1x,a)', '<<<+- homogenization:mechanical:isostrain init -+>>>'
print'(/,a,i0)', ' # homogenizations: ',count(homogenization_type == HOMOGENIZATION_ISOSTRAIN_ID)
print'(/,a,i0)', ' # homogenizations: ',count(mechanical_type == MECHANICAL_ISOSTRAIN_ID)
flush(IO_STDOUT)
do ho = 1, size(homogenization_type)
if (homogenization_type(ho) /= HOMOGENIZATION_ISOSTRAIN_ID) cycle
do ho = 1, size(mechanical_type)
if (mechanical_type(ho) /= MECHANICAL_ISOSTRAIN_ID) cycle
Nmembers = count(material_homogenizationID == ho)
homogState(ho)%sizeState = 0

View File

@ -11,7 +11,7 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all necessary fields, reads information from material configuration file
!--------------------------------------------------------------------------------------------------
module subroutine pass_init
module subroutine pass_init()
integer :: &
ho, &
@ -19,14 +19,14 @@ module subroutine pass_init
print'(/,1x,a)', '<<<+- homogenization:mechanical:pass init -+>>>'
print'(/,a,i0)', ' # homogenizations: ',count(homogenization_type == HOMOGENIZATION_NONE_ID)
print'(/,a,i0)', ' # homogenizations: ',count(mechanical_type == MECHANICAL_PASS_ID)
flush(IO_STDOUT)
do ho = 1, size(homogenization_type)
if (homogenization_type(ho) /= HOMOGENIZATION_NONE_ID) cycle
do ho = 1, size(mechanical_type)
if (mechanical_type(ho) /= MECHANICAL_PASS_ID) cycle
if (homogenization_Nconstituents(ho) /= 1) &
call IO_error(211,ext_msg='N_constituents (pass)')
call IO_error(211,ext_msg='(pass) with N_constituents !=1')
Nmembers = count(material_homogenizationID == ho)
homogState(ho)%sizeState = 0

View File

@ -69,7 +69,7 @@ module subroutine thermal_init()
case ('pass')
call pass_init()
case ('isothermal')
case ('isotemperature')
call isotemperature_init()
end select
@ -172,7 +172,7 @@ module subroutine homogenization_thermal_setField(T,dot_T, ce)
current(material_homogenizationID(ce))%T(material_homogenizationEntry(ce)) = T
current(material_homogenizationID(ce))%dot_T(material_homogenizationEntry(ce)) = dot_T
call thermal_partition(ce)
end subroutine homogenization_thermal_setField

View File

@ -8,10 +8,19 @@ contains
module subroutine pass_init()
integer :: &
ho
print'(/,1x,a)', '<<<+- homogenization:thermal:pass init -+>>>'
if (homogenization_Nconstituents(1) /= 1) &
call IO_error(211,ext_msg='N_constituents (pass)')
do ho = 1, size(thermal_type)
if (thermal_type(ho) /= THERMAL_PASS_ID) cycle
if (homogenization_Nconstituents(ho) /= 1) &
call IO_error(211,ext_msg='(pass) with N_constituents !=1')
end do
end subroutine pass_init

View File

@ -388,7 +388,7 @@ end function thermal_active
!----------------------------------------------------------------------------------------------
!< @brief writes damage sources results to HDF5 output file
!< @brief writes thermal sources results to HDF5 output file
!----------------------------------------------------------------------------------------------
module subroutine thermal_results(group,ph)
@ -398,18 +398,16 @@ module subroutine thermal_results(group,ph)
integer :: ou
if (allocated(param(ph)%output)) then
call results_closeGroup(results_addGroup(group//'thermal'))
else
return
endif
if (.not. allocated(param(ph)%output)) return
call results_closeGroup(results_addGroup(group//'thermal'))
do ou = 1, size(param(ph)%output)
select case(trim(param(ph)%output(ou)))
case ('T')
call results_writeDataset(current(ph)%T,group//'thermal','T', 'temperature','T')
call results_writeDataset(current(ph)%T,group//'thermal','T', 'temperature','K')
end select