Merge branch 'PythonImprovements' into no-crystallite

This commit is contained in:
Martin Diehl 2019-11-24 09:13:16 +01:00
commit b937ed594b
11 changed files with 468 additions and 448 deletions

View File

@ -65,7 +65,7 @@ for filename in options.filenames:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) == 0: if len(x) == 0:
continue continue
array = results.read_dataset(x,0) array = results.read_dataset(x,0,plain=True)
d = int(np.product(np.shape(array)[1:])) d = int(np.product(np.shape(array)[1:]))
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1) data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)
@ -80,7 +80,7 @@ for filename in options.filenames:
x = results.get_dataset_location(label) x = results.get_dataset_location(label)
if len(x) == 0: if len(x) == 0:
continue continue
array = results.read_dataset(x,0) array = results.read_dataset(x,0,plain=True)
d = int(np.product(np.shape(array)[1:])) d = int(np.product(np.shape(array)[1:]))
data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1) data = np.concatenate((data,np.reshape(array,[np.product(results.grid),d])),1)

View File

@ -5,9 +5,6 @@ import sys
from io import StringIO from io import StringIO
from optparse import OptionParser from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask import damask
@ -15,11 +12,6 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def mostFrequent(arr):
unique, inverse = np.unique(arr, return_inverse=True)
return unique[np.argmax(np.bincount(inverse))]
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
@ -46,9 +38,8 @@ for name in filenames:
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.update(ndimage.filters.generic_filter( damask.util.croak(geom.clean(options.stencil))
geom.microstructure,mostFrequent,
size=(options.stencil,)*3).astype(geom.microstructure.dtype)))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: if name is None:

View File

@ -5,8 +5,6 @@ import sys
from io import StringIO from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
@ -38,16 +36,6 @@ parser.set_defaults(reflect = False)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if options.directions is None:
parser.error('no direction given.')
if not set(options.directions).issubset(validDirections):
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
parser.error('invalid directions {}. '.format(*invalidDirections))
limits = [None,None] if options.reflect else [-2,0]
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
@ -55,15 +43,7 @@ for name in filenames:
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name) geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
microstructure = geom.get_microstructure() damask.util.croak(geom.mirror(options.directions,options.reflect))
if 'z' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[:,:,limits[0]:limits[1]:-1]],2)
if 'y' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[:,limits[0]:limits[1]:-1,:]],1)
if 'x' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[limits[0]:limits[1]:-1,:,:]],0)
damask.util.croak(geom.update(microstructure,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:])) geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None: if name is None:

View File

@ -2,7 +2,7 @@
import os import os
with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f: with open(os.path.join(os.path.dirname(__file__),'VERSION')) as f:
version = f.readline()[1:-1] version = f.readline()[1:-1]
name = 'damask' name = 'damask'

View File

@ -2,7 +2,7 @@ import os
import sys import sys
import re import re
import shlex import shlex
from collections import Iterable from collections.abc import Iterable
import numpy as np import numpy as np
@ -15,7 +15,7 @@ except NameError:
# ------------------------------------------------------------------ # ------------------------------------------------------------------
class ASCIItable(): class ASCIItable():
"""Read and write to ASCII tables""" """Read and write to ASCII tables."""
tmpext = '_tmp' # filename extension for in-place access tmpext = '_tmp' # filename extension for in-place access
@ -27,6 +27,7 @@ class ASCIItable():
labeled = True, # assume table has labels labeled = True, # assume table has labels
readonly = False, # no reading from file readonly = False, # no reading from file
): ):
"""Read and write to ASCII tables."""
self.__IO__ = {'output': [], self.__IO__ = {'output': [],
'buffered': buffered, 'buffered': buffered,
'labeled': labeled, # header contains labels 'labeled': labeled, # header contains labels
@ -72,7 +73,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def _removeCRLF(self, def _removeCRLF(self,
string): string):
"""Delete any carriage return and line feed from string""" """Delete any carriage return and line feed from string."""
try: try:
return string.replace('\n','').replace('\r','') return string.replace('\n','').replace('\r','')
except AttributeError: except AttributeError:
@ -82,7 +83,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def _quote(self, def _quote(self,
what): what):
"""Quote empty or white space-containing output""" """Quote empty or white space-containing output."""
return '{quote}{content}{quote}'.format( return '{quote}{content}{quote}'.format(
quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''), quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''),
content = what) content = what)
@ -103,7 +104,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def output_write(self, def output_write(self,
what): what):
"""Aggregate a single row (string) or list of (possibly containing further lists of) rows into output""" """Aggregate a single row (string) or list of (possibly containing further lists of) rows into output."""
if isinstance(what, (str, unicode)): if isinstance(what, (str, unicode)):
self.__IO__['output'] += [what] self.__IO__['output'] += [what]
else: else:
@ -143,7 +144,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def head_read(self): def head_read(self):
""" """
Get column labels Get column labels.
by either reading the first row or, by either reading the first row or,
if keyword "head[*]" is present, the last line of the header if keyword "head[*]" is present, the last line of the header
@ -154,7 +155,7 @@ class ASCIItable():
pass pass
firstline = self.__IO__['in'].readline().strip() firstline = self.__IO__['in'].readline().strip()
m = re.search('(\d+)\s+head', firstline.lower()) # search for "head" keyword m = re.search(r'(\d+)\s+head', firstline.lower()) # search for "head" keyword
if m: # proper ASCIItable format if m: # proper ASCIItable format
@ -194,7 +195,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def head_write(self, def head_write(self,
header = True): header = True):
"""Write current header information (info + labels)""" """Write current header information (info + labels)."""
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else [] head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
head.append(self.info) head.append(self.info)
if self.__IO__['labeled']: if self.__IO__['labeled']:
@ -205,7 +206,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def head_getGeom(self): def head_getGeom(self):
"""Interpret geom header""" """Interpret geom header."""
identifiers = { identifiers = {
'grid': ['a','b','c'], 'grid': ['a','b','c'],
'size': ['x','y','z'], 'size': ['x','y','z'],
@ -247,7 +248,7 @@ class ASCIItable():
def labels_append(self, def labels_append(self,
what, what,
reset = False): reset = False):
"""Add item or list to existing set of labels (and switch on labeling)""" """Add item or list to existing set of labels (and switch on labeling)."""
if isinstance(what, (str, unicode)): if isinstance(what, (str, unicode)):
self.tags += [self._removeCRLF(what)] self.tags += [self._removeCRLF(what)]
else: else:
@ -261,7 +262,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def labels_clear(self): def labels_clear(self):
"""Delete existing labels and switch to no labeling""" """Delete existing labels and switch to no labeling."""
self.tags = [] self.tags = []
self.__IO__['labeled'] = False self.__IO__['labeled'] = False
@ -392,7 +393,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def info_append(self, def info_append(self,
what): what):
"""Add item or list to existing set of infos""" """Add item or list to existing set of infos."""
if isinstance(what, (str, unicode)): if isinstance(what, (str, unicode)):
self.info += [self._removeCRLF(what)] self.info += [self._removeCRLF(what)]
else: else:
@ -403,7 +404,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def info_clear(self): def info_clear(self):
"""Delete any info block""" """Delete any info block."""
self.info = [] self.info = []
# ------------------------------------------------------------------ # ------------------------------------------------------------------
@ -416,7 +417,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def data_skipLines(self, def data_skipLines(self,
count): count):
"""Wind forward by count number of lines""" """Wind forward by count number of lines."""
for i in range(count): for i in range(count):
alive = self.data_read() alive = self.data_read()
@ -426,7 +427,7 @@ class ASCIItable():
def data_read(self, def data_read(self,
advance = True, advance = True,
respectLabels = True): respectLabels = True):
"""Read next line (possibly buffered) and parse it into data array""" """Read next line (possibly buffered) and parse it into data array."""
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \ self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
@ -446,9 +447,11 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def data_readArray(self, def data_readArray(self,
labels = []): labels = []):
"""Read whole data of all (given) labels as numpy array""" """Read whole data of all (given) labels as numpy array."""
try: self.data_rewind() # try to wind back to start of data try:
except: pass # assume/hope we are at data start already... self.data_rewind() # try to wind back to start of data
except IOError:
pass # assume/hope we are at data start already...
if labels is None or labels == []: if labels is None or labels == []:
use = None # use all columns (and keep labels intact) use = None # use all columns (and keep labels intact)
@ -480,7 +483,7 @@ class ASCIItable():
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def data_write(self, def data_write(self,
delimiter = '\t'): delimiter = '\t'):
"""Write current data array and report alive output back""" """Write current data array and report alive output back."""
if len(self.data) == 0: return True if len(self.data) == 0: return True
if isinstance(self.data[0],list): if isinstance(self.data[0],list):
@ -492,16 +495,16 @@ class ASCIItable():
def data_writeArray(self, def data_writeArray(self,
fmt = None, fmt = None,
delimiter = '\t'): delimiter = '\t'):
"""Write whole numpy array data""" """Write whole numpy array data."""
for row in self.data: for row in self.data:
try: try:
output = [fmt % value for value in row] if fmt else list(map(repr,row)) output = [fmt % value for value in row] if fmt else list(map(repr,row))
except: except Exception:
output = [fmt % row] if fmt else [repr(row)] output = [fmt % row] if fmt else [repr(row)]
try: try:
self.__IO__['out'].write(delimiter.join(output) + '\n') self.__IO__['out'].write(delimiter.join(output) + '\n')
except: except Exception:
pass pass
# ------------------------------------------------------------------ # ------------------------------------------------------------------
@ -545,7 +548,7 @@ class ASCIItable():
grid, grid,
type = 'i', type = 'i',
strict = False): strict = False):
"""Read microstructure data (from .geom format)""" """Read microstructure data (from .geom format)."""
def datatype(item): def datatype(item):
return int(item) if type.lower() == 'i' else float(item) return int(item) if type.lower() == 'i' else float(item)

View File

@ -369,7 +369,7 @@ class DADF5():
return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string return f[self.get_dataset_location('orientation')[0]].attrs['Lattice'].astype('str') # np.bytes_ to string
def read_dataset(self,path,c): def read_dataset(self,path,c=0,plain=False):
""" """
Dataset for all points/cells. Dataset for all points/cells.
@ -402,7 +402,7 @@ class DADF5():
a=a.reshape([a.shape[0],1]) a=a.reshape([a.shape[0],1])
dataset[p,:] = a[u,:] dataset[p,:] = a[u,:]
return dataset return dataset if not plain else dataset.view(('float64',len(dataset.dtype.names)))
def cell_coordinates(self): def cell_coordinates(self):
@ -620,7 +620,7 @@ class DADF5():
raise ValueError raise ValueError
return { return {
'data': mechanics.deviator(x['data']), 'data': mechanics.deviatoric_part(x['data']),
'label': 's_{}'.format(x['label']), 'label': 's_{}'.format(x['label']),
'meta': { 'meta': {
'Unit': x['meta']['Unit'], 'Unit': x['meta']['Unit'],

View File

@ -2,6 +2,7 @@ import os
from io import StringIO from io import StringIO
import numpy as np import numpy as np
from scipy import ndimage
import vtk import vtk
from vtk.util import numpy_support from vtk.util import numpy_support
@ -10,380 +11,429 @@ from . import version
class Geom(): class Geom():
"""Geometry definition for grid solvers.""" """Geometry definition for grid solvers."""
def __init__(self,microstructure,size,origin=[0.0,0.0,0.0],homogenization=1,comments=[]): def __init__(self,microstructure,size,origin=[0.0,0.0,0.0],homogenization=1,comments=[]):
""" """
New geometry definition from array of microstructures and size. New geometry definition from array of microstructures and size.
Parameters Parameters
---------- ----------
microstructure : numpy.ndarray microstructure : numpy.ndarray
microstructure array (3D) microstructure array (3D)
size : list or numpy.ndarray size : list or numpy.ndarray
physical size of the microstructure in meter. physical size of the microstructure in meter.
origin : list or numpy.ndarray, optional origin : list or numpy.ndarray, optional
physical origin of the microstructure in meter. physical origin of the microstructure in meter.
homogenization : integer, optional homogenization : integer, optional
homogenization index. homogenization index.
comments : list of str, optional comments : list of str, optional
comments lines. comments lines.
""" """
self.__transforms__ = \ self.set_microstructure(microstructure)
self.set_microstructure(microstructure) self.set_size(size)
self.set_size(size) self.set_origin(origin)
self.set_origin(origin) self.set_homogenization(homogenization)
self.set_homogenization(homogenization) self.set_comments(comments)
self.set_comments(comments)
def __repr__(self):
def __repr__(self): """Basic information on geometry definition."""
"""Basic information on geometry definition.""" return util.srepr([
return util.srepr([ 'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))),
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))), 'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))),
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))), 'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))),
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))), 'homogenization: {}'.format(self.get_homogenization()),
'homogenization: {}'.format(self.get_homogenization()), '# microstructures: {}'.format(len(np.unique(self.microstructure))),
'# microstructures: {}'.format(len(np.unique(self.microstructure))), 'max microstructure: {}'.format(np.nanmax(self.microstructure)),
'max microstructure: {}'.format(np.nanmax(self.microstructure)), ])
])
def update(self,microstructure=None,size=None,origin=None,rescale=False):
def update(self,microstructure=None,size=None,origin=None,rescale=False): """
""" Updates microstructure and size.
Updates microstructure and size.
Parameters
Parameters ----------
---------- microstructure : numpy.ndarray, optional
microstructure : numpy.ndarray, optional microstructure array (3D).
microstructure array (3D). size : list or numpy.ndarray, optional
size : list or numpy.ndarray, optional physical size of the microstructure in meter.
physical size of the microstructure in meter. origin : list or numpy.ndarray, optional
origin : list or numpy.ndarray, optional physical origin of the microstructure in meter.
physical origin of the microstructure in meter. rescale : bool, optional
rescale : bool, optional ignore size parameter and rescale according to change of grid points.
ignore size parameter and rescale according to change of grid points.
"""
""" grid_old = self.get_grid()
grid_old = self.get_grid() size_old = self.get_size()
size_old = self.get_size() origin_old = self.get_origin()
origin_old = self.get_origin() unique_old = len(np.unique(self.microstructure))
unique_old = len(np.unique(self.microstructure)) max_old = np.nanmax(self.microstructure)
max_old = np.nanmax(self.microstructure)
if size is not None and rescale:
if size is not None and rescale: raise ValueError('Either set size explicitly or rescale automatically')
raise ValueError('Either set size explicitly or rescale automatically')
self.set_microstructure(microstructure)
self.set_microstructure(microstructure) self.set_origin(origin)
self.set_origin(origin)
if size is not None:
if size is not None: self.set_size(size)
self.set_size(size) elif rescale:
elif rescale: self.set_size(self.get_grid()/grid_old*self.size)
self.set_size(self.get_grid()/grid_old*self.size)
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))]
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))] if np.any(grid_old != self.get_grid()):
if np.any(grid_old != self.get_grid()): message[-1] = util.delete(message[-1])
message[-1] = util.delete(message[-1]) message.append(util.emph('grid a b c: {}'.format(' x '.join(map(str,self.get_grid())))))
message.append(util.emph('grid a b c: {}'.format(' x '.join(map(str,self.get_grid())))))
message.append('size x y z: {}'.format(' x '.join(map(str,size_old))))
message.append('size x y z: {}'.format(' x '.join(map(str,size_old)))) if np.any(size_old != self.get_size()):
if np.any(size_old != self.get_size()): message[-1] = util.delete(message[-1])
message[-1] = util.delete(message[-1]) message.append(util.emph('size x y z: {}'.format(' x '.join(map(str,self.get_size())))))
message.append(util.emph('size x y z: {}'.format(' x '.join(map(str,self.get_size())))))
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old))))
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old)))) if np.any(origin_old != self.get_origin()):
if np.any(origin_old != self.get_origin()): message[-1] = util.delete(message[-1])
message[-1] = util.delete(message[-1]) message.append(util.emph('origin x y z: {}'.format(' '.join(map(str,self.get_origin())))))
message.append(util.emph('origin x y z: {}'.format(' '.join(map(str,self.get_origin())))))
message.append('homogenization: {}'.format(self.get_homogenization()))
message.append('homogenization: {}'.format(self.get_homogenization()))
message.append('# microstructures: {}'.format(unique_old))
message.append('# microstructures: {}'.format(unique_old)) if unique_old != len(np.unique(self.microstructure)):
if unique_old != len(np.unique(self.microstructure)): message[-1] = util.delete(message[-1])
message[-1] = util.delete(message[-1]) message.append(util.emph('# microstructures: {}'.format(len(np.unique(self.microstructure)))))
message.append(util.emph('# microstructures: {}'.format(len(np.unique(self.microstructure)))))
message.append('max microstructure: {}'.format(max_old))
message.append('max microstructure: {}'.format(max_old)) if max_old != np.nanmax(self.microstructure):
if max_old != np.nanmax(self.microstructure): message[-1] = util.delete(message[-1])
message[-1] = util.delete(message[-1]) message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure))))
message.append(util.emph('max microstructure: {}'.format(np.nanmax(self.microstructure))))
return util.return_message(message)
return util.return_message(message)
def set_comments(self,comments):
def set_comments(self,comments): """
""" Replaces all existing comments.
Replaces all existing comments.
Parameters
Parameters ----------
---------- comments : list of str
comments : list of str new comments.
new comments.
"""
""" self.comments = []
self.comments = [] self.add_comments(comments)
self.add_comments(comments)
def add_comments(self,comments):
def add_comments(self,comments): """
""" Appends comments to existing comments.
Appends comments to existing comments.
Parameters
Parameters ----------
---------- comments : list of str
comments : list of str new comments.
new comments.
"""
""" self.comments += [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
self.comments += [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
def set_microstructure(self,microstructure):
def set_microstructure(self,microstructure): """
""" Replaces the existing microstructure representation.
Replaces the existing microstructure representation.
Parameters
Parameters ----------
---------- microstructure : numpy.ndarray
microstructure : numpy.ndarray microstructure array (3D).
microstructure array (3D).
"""
""" if microstructure is not None:
if microstructure is not None: if len(microstructure.shape) != 3:
if len(microstructure.shape) != 3: raise ValueError('Invalid microstructure shape {}'.format(*microstructure.shape))
raise ValueError('Invalid microstructure shape {}'.format(*microstructure.shape)) elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']: raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype))
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype)) else:
else: self.microstructure = np.copy(microstructure)
self.microstructure = np.copy(microstructure)
def set_size(self,size):
def set_size(self,size): """
""" Replaces the existing size information.
Replaces the existing size information.
Parameters
Parameters ----------
---------- size : list or numpy.ndarray
size : list or numpy.ndarray physical size of the microstructure in meter.
physical size of the microstructure in meter.
"""
""" if size is None:
if size is None: grid = np.asarray(self.microstructure.shape)
grid = np.asarray(self.microstructure.shape) self.size = grid/np.max(grid)
self.size = grid/np.max(grid) else:
else: if len(size) != 3 or any(np.array(size)<=0):
if len(size) != 3 or any(np.array(size)<=0): raise ValueError('Invalid size {}'.format(*size))
raise ValueError('Invalid size {}'.format(*size)) else:
else: self.size = np.array(size)
self.size = np.array(size)
def set_origin(self,origin):
def set_origin(self,origin): """
""" Replaces the existing origin information.
Replaces the existing origin information.
Parameters
Parameters ----------
---------- origin : list or numpy.ndarray
origin : list or numpy.ndarray physical origin of the microstructure in meter
physical origin of the microstructure in meter
"""
""" if origin is not None:
if origin is not None: if len(origin) != 3:
if len(origin) != 3: raise ValueError('Invalid origin {}'.format(*origin))
raise ValueError('Invalid origin {}'.format(*origin)) else:
else: self.origin = np.array(origin)
self.origin = np.array(origin)
def set_homogenization(self,homogenization):
def set_homogenization(self,homogenization): """
""" Replaces the existing homogenization index.
Replaces the existing homogenization index.
Parameters
Parameters ----------
---------- homogenization : integer
homogenization : integer homogenization index
homogenization index
"""
""" if homogenization is not None:
if homogenization is not None: if not isinstance(homogenization,int) or homogenization < 1:
if not isinstance(homogenization,int) or homogenization < 1: raise TypeError('Invalid homogenization {}'.format(homogenization))
raise TypeError('Invalid homogenization {}'.format(homogenization)) else:
else: self.homogenization = homogenization
self.homogenization = homogenization
def get_microstructure(self):
def get_microstructure(self): """Return the microstructure representation."""
"""Return the microstructure representation.""" return np.copy(self.microstructure)
return np.copy(self.microstructure)
def get_size(self):
def get_size(self): """Return the physical size in meter."""
"""Return the physical size in meter.""" return np.copy(self.size)
return np.copy(self.size)
def get_origin(self):
def get_origin(self): """Return the origin in meter."""
"""Return the origin in meter.""" return np.copy(self.origin)
return np.copy(self.origin)
def get_grid(self):
def get_grid(self): """Return the grid discretization."""
"""Return the grid discretization.""" return np.array(self.microstructure.shape)
return np.array(self.microstructure.shape)
def get_homogenization(self):
def get_homogenization(self): """Return the homogenization index."""
"""Return the homogenization index.""" return self.homogenization
return self.homogenization
def get_comments(self):
def get_comments(self): """Return the comments."""
"""Return the comments.""" return self.comments[:]
return self.comments[:]
def get_header(self):
def get_header(self): """Return the full header (grid, size, origin, homogenization, comments)."""
"""Return the full header (grid, size, origin, homogenization, comments).""" header = ['{} header'.format(len(self.comments)+4)] + self.comments
header = ['{} header'.format(len(self.comments)+4)] + self.comments header.append('grid a {} b {} c {}'.format(*self.get_grid()))
header.append('grid a {} b {} c {}'.format(*self.get_grid())) header.append('size x {} y {} z {}'.format(*self.get_size()))
header.append('size x {} y {} z {}'.format(*self.get_size())) header.append('origin x {} y {} z {}'.format(*self.get_origin()))
header.append('origin x {} y {} z {}'.format(*self.get_origin())) header.append('homogenization {}'.format(self.get_homogenization()))
header.append('homogenization {}'.format(self.get_homogenization())) return header
return header
@classmethod
@classmethod def from_file(cls,fname):
def from_file(cls,fname): """
""" Reads a geom file.
Reads a geom file.
Parameters
Parameters ----------
---------- fname : str or file handle
fname : str or file handle geometry file to read.
geometry file to read.
"""
""" with (open(fname) if isinstance(fname,str) else fname) as f:
with (open(fname) if isinstance(fname,str) else fname) as f: f.seek(0)
f.seek(0) header_length,keyword = f.readline().split()[:2]
header_length,keyword = f.readline().split()[:2] header_length = int(header_length)
header_length = int(header_length) content = f.readlines()
content = f.readlines()
if not keyword.startswith('head') or header_length < 3:
if not keyword.startswith('head') or header_length < 3: raise TypeError('Header length information missing or invalid')
raise TypeError('Header length information missing or invalid')
comments = []
comments = [] for i,line in enumerate(content[:header_length]):
for i,line in enumerate(content[:header_length]): items = line.lower().strip().split()
items = line.lower().strip().split() key = items[0] if len(items) > 0 else ''
key = items[0] if len(items) > 0 else '' if key == 'grid':
if key == 'grid': grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']]) elif key == 'size':
elif key == 'size': size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']]) elif key == 'origin':
elif key == 'origin': origin = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
origin = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']]) elif key == 'homogenization':
elif key == 'homogenization': homogenization = int(items[1])
homogenization = int(items[1]) else:
else: comments.append(line.strip())
comments.append(line.strip())
microstructure = np.empty(grid.prod()) # initialize as flat array
microstructure = np.empty(grid.prod()) # initialize as flat array i = 0
i = 0 for line in content[header_length:]:
for line in content[header_length:]: items = line.split()
items = line.split() if len(items) == 3:
if len(items) == 3: if items[1].lower() == 'of':
if items[1].lower() == 'of': items = np.ones(int(items[0]))*float(items[2])
items = np.ones(int(items[0]))*float(items[2]) elif items[1].lower() == 'to':
elif items[1].lower() == 'to': items = np.linspace(int(items[0]),int(items[2]),
items = np.linspace(int(items[0]),int(items[2]), abs(int(items[2])-int(items[0]))+1,dtype=float)
abs(int(items[2])-int(items[0]))+1,dtype=float) else: items = list(map(float,items))
else: items = list(map(float,items)) else: items = list(map(float,items))
else: items = list(map(float,items)) microstructure[i:i+len(items)] = items
i += len(items)
microstructure[i:i+len(items)] = items
i += len(items) if i != grid.prod():
raise TypeError('Invalid file: expected {} entries,found {}'.format(grid.prod(),i))
if i != grid.prod():
raise TypeError('Invalid file: expected {} entries,found {}'.format(grid.prod(),i)) microstructure = microstructure.reshape(grid,order='F')
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
microstructure = microstructure.reshape(grid,order='F') microstructure = microstructure.astype('int')
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
microstructure = microstructure.astype('int') return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
def to_file(self,fname):
def to_file(self,fname): """
""" Writes a geom file.
Writes a geom file.
Parameters
Parameters ----------
---------- fname : str or file handle
fname : str or file handle geometry file to write.
geometry file to write.
"""
""" header = self.get_header()
header = self.get_header() grid = self.get_grid()
grid = self.get_grid() format_string = '%g' if self.microstructure in np.sctypes['float'] else \
format_string = '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure))))) if self.microstructure.dtype == int \ '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure)))))
else '%g' np.savetxt(fname,
np.savetxt(fname, self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T, header='\n'.join(header), fmt=format_string, comments='')
header='\n'.join(header), fmt=format_string, comments='')
def to_vtk(self,fname=None):
"""
Generates vtk file.
def to_vtk(self,fname=None):
""" Parameters
Generates vtk file. ----------
fname : str, optional
Parameters vtk file to write. If no file is given, a string is returned.
----------
fname : str, optional """
vtk file to write. If no file is given, a string is returned. grid = self.get_grid() + np.ones(3,dtype=int)
size = self.get_size()
""" origin = self.get_origin()
grid = self.get_grid() + np.ones(3,dtype=int)
size = self.get_size() coords = [
origin = self.get_origin() np.linspace(0,size[0],grid[0]) + origin[0],
np.linspace(0,size[1],grid[1]) + origin[1],
coords = [ np.linspace(0,size[2],grid[2]) + origin[2]
np.linspace(0,size[0],grid[0]) + origin[0], ]
np.linspace(0,size[1],grid[1]) + origin[1],
np.linspace(0,size[2],grid[2]) + origin[2] rGrid = vtk.vtkRectilinearGrid()
] coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
rGrid = vtk.vtkRectilinearGrid() rGrid.SetDimensions(*grid)
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()] for d,coord in enumerate(coords):
for c in coord:
rGrid.SetDimensions(*grid) coordArray[d].InsertNextValue(c)
for d,coord in enumerate(coords):
for c in coord: rGrid.SetXCoordinates(coordArray[0])
coordArray[d].InsertNextValue(c) rGrid.SetYCoordinates(coordArray[1])
rGrid.SetZCoordinates(coordArray[2])
rGrid.SetXCoordinates(coordArray[0])
rGrid.SetYCoordinates(coordArray[1]) ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'),
rGrid.SetZCoordinates(coordArray[2]) array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
ms.SetName('microstructure')
ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'), rGrid.GetCellData().AddArray(ms)
array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
ms.SetName('microstructure')
rGrid.GetCellData().AddArray(ms) if fname is None:
writer = vtk.vtkDataSetWriter()
writer.SetHeader('damask.Geom '+version)
if fname is None: writer.WriteToOutputStringOn()
writer = vtk.vtkDataSetWriter() else:
writer.SetHeader('damask.Geom '+version) writer = vtk.vtkXMLRectilinearGridWriter()
writer.WriteToOutputStringOn() writer.SetCompressorTypeToZLib()
else: writer.SetDataModeToBinary()
writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetCompressorTypeToZLib() ext = os.path.splitext(fname)[1]
writer.SetDataModeToBinary() if ext == '':
name = fname + '.' + writer.GetDefaultFileExtension()
ext = os.path.splitext(fname)[1] elif ext == writer.GetDefaultFileExtension():
if ext == '': name = fname
name = fname + '.' + writer.GetDefaultFileExtension() else:
elif ext == writer.GetDefaultFileExtension(): raise ValueError("unknown extension {}".format(ext))
name = fname writer.SetFileName(name)
else:
raise ValueError("unknown extension {}".format(ext)) writer.SetInputData(rGrid)
writer.SetFileName(name) writer.Write()
writer.SetInputData(rGrid) if fname is None: return writer.GetOutputString()
writer.Write()
if fname is None: return writer.GetOutputString() def show(self):
"""Show raw content (as in file)."""
f=StringIO()
def show(self): self.to_file(f)
"""Show raw content (as in file).""" f.seek(0)
f=StringIO() return ''.join(f.readlines())
self.to_file(f)
f.seek(0)
return ''.join(f.readlines()) def mirror(self,directions,reflect=False):
"""
Mirror microstructure along given directions.
Parameters
----------
directions : iterable containing str
direction(s) along which the microstructure is mirrored. Valid entries are 'x', 'y', 'z'.
reflect : bool, optional
reflect (include) outermost layers.
"""
valid = {'x','y','z'}
if not all(isinstance(d, str) for d in directions):
raise TypeError('Directions are not of type str.')
elif not set(directions).issubset(valid):
raise ValueError('Invalid direction specified {}'.format(*set(directions).difference(valid)))
limits = [None,None] if reflect else [-2,0]
ms = self.get_microstructure()
if 'z' in directions:
ms = np.concatenate([ms,ms[:,:,limits[0]:limits[1]:-1]],2)
if 'y' in directions:
ms = np.concatenate([ms,ms[:,limits[0]:limits[1]:-1,:]],1)
if 'x' in directions:
ms = np.concatenate([ms,ms[limits[0]:limits[1]:-1,:,:]],0)
return self.update(ms,rescale=True)
#self.add_comments('tbd')
def clean(self,stencil=3):
"""
Smooth microstructure by selecting most frequent index within given stencil at each location.
Parameters
----------
stencil : int, optional
size of smoothing stencil.
"""
def mostFrequent(arr):
unique, inverse = np.unique(arr, return_inverse=True)
return unique[np.argmax(np.bincount(inverse))]
return self.update(ndimage.filters.generic_filter(self.microstructure,
mostFrequent,
size=(stencil,)*3).astype(self.microstructure.dtype))
#self.add_comments('tbd')

View File

@ -48,10 +48,10 @@ def strain_tensor(F,t,m):
if m > 0.0: if m > 0.0:
eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n)) eps = 1.0/(2.0*abs(m)) * (+ np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
- np.broadcast_to(np.ones(3),[F_.shape[0],3])) - np.broadcast_to(np.eye(3),[F_.shape[0],3,3]))
elif m < 0.0: elif m < 0.0:
eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n)) eps = 1.0/(2.0*abs(m)) * (- np.matmul(n,np.einsum('ij,ikj->ijk',w**m,n))
+ np.broadcast_to(np.ones(3),[F_.shape[0],3])) + np.broadcast_to(np.eye(3),[F_.shape[0],3,3]))
else: else:
eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n)) eps = np.matmul(n,np.einsum('ij,ikj->ijk',0.5*np.log(w),n))
@ -190,7 +190,7 @@ def rotational_part(x):
Tensor of which the rotational part is computed. Tensor of which the rotational part is computed.
""" """
return __polar_decomposition(x,'R') return __polar_decomposition(x,'R')[0]
def left_stretch(x): def left_stretch(x):
@ -203,7 +203,7 @@ def left_stretch(x):
Tensor of which the left stretch is computed. Tensor of which the left stretch is computed.
""" """
return __polar_decomposition(x,'V') return __polar_decomposition(x,'V')[0]
def right_stretch(x): def right_stretch(x):
@ -216,7 +216,7 @@ def right_stretch(x):
Tensor of which the right stretch is computed. Tensor of which the right stretch is computed.
""" """
return __polar_decomposition(x,'U') return __polar_decomposition(x,'U')[0]
def __polar_decomposition(x,requested): def __polar_decomposition(x,requested):
@ -227,7 +227,7 @@ def __polar_decomposition(x,requested):
---------- ----------
x : numpy.array of shape (:,3,3) or (3,3) x : numpy.array of shape (:,3,3) or (3,3)
Tensor of which the singular values are computed. Tensor of which the singular values are computed.
requested : list of str requested : iterable of str
Requested outputs: R for the rotation tensor, Requested outputs: R for the rotation tensor,
V for left stretch tensor and U for right stretch tensor. V for left stretch tensor and U for right stretch tensor.

View File

@ -79,9 +79,9 @@ class Marc(Solver):
exitnumber = -1 exitnumber = -1
fid_out = open(outFile,'r') fid_out = open(outFile,'r')
for line in fid_out: for line in fid_out:
if (string.find(line,'tress iteration') is not -1): if (string.find(line,'tress iteration') != -1):
print(line) print(line)
elif (string.find(line,'Exit number') is not -1): elif (string.find(line,'Exit number') != -1):
substr = line[string.find(line,'Exit number'):len(line)] substr = line[string.find(line,'Exit number'):len(line)]
exitnumber = int(substr[12:16]) exitnumber = int(substr[12:16])

View File

@ -1,10 +1,8 @@
# -*- coding: UTF-8 no BOM -*-
import os,sys,shutil import os,sys,shutil
import logging,logging.config import logging,logging.config
import damask import damask
import numpy as np import numpy as np
from collections import Iterable from collections.abc import Iterable
from optparse import OptionParser from optparse import OptionParser
class Test(): class Test():
@ -17,7 +15,7 @@ class Test():
variants = [] variants = []
def __init__(self, **kwargs): def __init__(self, **kwargs):
"""New test."""
defaults = {'description': '', defaults = {'description': '',
'keep': False, 'keep': False,
'accept': False, 'accept': False,
@ -120,22 +118,22 @@ class Test():
"""Delete directory tree containing current results.""" """Delete directory tree containing current results."""
try: try:
shutil.rmtree(self.dirCurrent()) shutil.rmtree(self.dirCurrent())
except: except FileNotFoundError:
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent())) logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
try: try:
os.mkdir(self.dirCurrent()) os.mkdir(self.dirCurrent())
return True return True
except: except FileExistsError:
logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent())) logging.critical('creation of directory "{}" failed.'.format(self.dirCurrent()))
return False return False
def prepareAll(self): def prepareAll(self):
"""Do all necessary preparations for the whole test""" """Do all necessary preparations for the whole test."""
return True return True
def prepare(self,variant): def prepare(self,variant):
"""Do all necessary preparations for the run of each test variant""" """Do all necessary preparations for the run of each test variant."""
return True return True
@ -207,9 +205,9 @@ class Test():
for source,target in zip(list(map(mapA,A)),list(map(mapB,B))): for source,target in zip(list(map(mapA,A)),list(map(mapB,B))):
try: try:
shutil.copy2(source,target) shutil.copy2(source,target)
except: except FileNotFoundError:
logging.critical('error copying {} to {}'.format(source,target)) logging.critical('error copying {} to {}'.format(source,target))
raise raise FileNotFoundError
def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]): def copy_Reference2Current(self,sourcefiles=[],targetfiles=[]):
@ -218,9 +216,9 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Reference2Current: Unable to copy file "{}"'.format(f)) logging.critical('Reference2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]): def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
@ -230,10 +228,10 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i])) shutil.copy2(os.path.join(source,f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.error(os.path.join(source,f)) logging.error(os.path.join(source,f))
logging.critical('Base2Current: Unable to copy file "{}"'.format(f)) logging.critical('Base2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]): def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
@ -242,9 +240,9 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i])) shutil.copy2(self.fileInCurrent(f),self.fileInReference(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Current2Reference: Unable to copy file "{}"'.format(f)) logging.critical('Current2Reference: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]): def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
@ -253,9 +251,9 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInProof(f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Proof2Current: Unable to copy file "{}"'.format(f)) logging.critical('Proof2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]): def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
@ -263,9 +261,10 @@ class Test():
for i,f in enumerate(sourcefiles): for i,f in enumerate(sourcefiles):
try: try:
shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i])) shutil.copy2(self.fileInReference(f),self.fileInCurrent(targetfiles[i]))
except: except FileNotFoundError:
logging.critical('Current2Current: Unable to copy file "{}"'.format(f)) logging.critical('Current2Current: Unable to copy file "{}"'.format(f))
raise raise FileNotFoundError
def execute_inCurrentDir(self,cmd,streamIn=None,env=None): def execute_inCurrentDir(self,cmd,streamIn=None,env=None):
@ -439,7 +438,7 @@ class Test():
stdTol = 1.0e-6, stdTol = 1.0e-6,
preFilter = 1.0e-9): preFilter = 1.0e-9):
""" """
Calculate statistics of tables Calculate statistics of tables.
threshold can be used to ignore small values (a negative number disables this feature) threshold can be used to ignore small values (a negative number disables this feature)
""" """
@ -492,7 +491,7 @@ class Test():
rtol = 1e-5, rtol = 1e-5,
atol = 1e-8, atol = 1e-8,
debug = False): debug = False):
"""Compare multiple tables with np.allclose""" """Compare multiple tables with np.allclose."""
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
files = [str(files)] files = [str(files)]

View File

@ -1,3 +0,0 @@
"""Test functionality."""
from .test import Test # noqa