Merge branch 'development' of magit1.mpie.de:damask/DAMASK into development
This commit is contained in:
commit
52ba6e19a0
|
@ -5,10 +5,10 @@ set LOCATION=%~dp0
|
|||
set DAMASK_ROOT=%LOCATION%\DAMASK
|
||||
set DAMASK_NUM_THREADS=2
|
||||
chcp 1252
|
||||
Title Düsseldorf Advanced Materials Simulation Kit - DAMASK, MPIE Düsseldorf
|
||||
Title Düsseldorf Advanced Materials Simulation Kit - DAMASK, MPIE Düsseldorf
|
||||
echo.
|
||||
echo Düsseldorf Advanced Materials Simulation Kit - DAMASK
|
||||
echo Max-Planck-Institut für Eisenforschung, Düsseldorf
|
||||
echo Düsseldorf Advanced Materials Simulation Kit - DAMASK
|
||||
echo Max-Planck-Institut für Eisenforschung, Düsseldorf
|
||||
echo http://damask.mpie.de
|
||||
echo.
|
||||
echo Preparing environment ...
|
||||
|
|
|
@ -53,7 +53,8 @@ if [ ! -z "$PS1" ]; then
|
|||
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
|
||||
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
|
||||
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
|
||||
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR"
|
||||
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR" && \
|
||||
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
|
||||
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
|
||||
echo "MSC.Marc/Mentat $MSC_ROOT"
|
||||
echo
|
||||
|
|
|
@ -51,7 +51,8 @@ if [ ! -z "$PS1" ]; then
|
|||
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
|
||||
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
|
||||
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
|
||||
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR"
|
||||
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR" && \
|
||||
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
|
||||
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
|
||||
echo "MSC.Marc/Mentat $MSC_ROOT"
|
||||
echo
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,4 +1,4 @@
|
|||
Copyright 2011-15 Max-Planck-Institut für Eisenforschung GmbH
|
||||
Copyright 2011-16 Max-Planck-Institut für Eisenforschung GmbH
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
|
|
4
README
4
README
|
@ -2,9 +2,9 @@ visit damask.mpie.de for installation and usage instructions
|
|||
|
||||
CONTACT INFORMATION
|
||||
|
||||
Max-Planck-Institut für Eisenforschung GmbH
|
||||
Max-Planck-Institut für Eisenforschung GmbH
|
||||
Max-Planck-Str. 1
|
||||
40237 Düsseldorf
|
||||
40237 Düsseldorf
|
||||
Germany
|
||||
|
||||
Email: DAMASK@mpie.de
|
||||
|
|
|
@ -329,7 +329,7 @@ program DAMASK_spectral
|
|||
errorID = 838_pInt ! no rotation is allowed by stress BC
|
||||
write(6,'(2x,a)') 'stress / GPa:'
|
||||
do i = 1_pInt, 3_pInt; do j = 1_pInt, 3_pInt
|
||||
if(loadCases(currentLoadCase)%deformation%maskLogical(i,j)) then
|
||||
if(loadCases(currentLoadCase)%P%maskLogical(i,j)) then
|
||||
write(6,'(2x,f12.7)',advance='no') loadCases(currentLoadCase)%P%values(i,j)*1e-9_pReal
|
||||
else
|
||||
write(6,'(2x,12a)',advance='no') ' * '
|
||||
|
|
|
@ -1237,8 +1237,7 @@ character(len=300) pure function IO_extractValue(pair,key)
|
|||
IO_extractValue = ''
|
||||
|
||||
myChunk = scan(pair,SEP)
|
||||
if (myChunk > 0 .and. pair(:myChunk-1) == key(:myChunk-1)) &
|
||||
IO_extractValue = pair(myChunk+1:) ! extract value if key matches
|
||||
if (myChunk > 0 .and. pair(:myChunk-1) == key) IO_extractValue = pair(myChunk+1:) ! extract value if key matches
|
||||
|
||||
end function IO_extractValue
|
||||
|
||||
|
|
|
@ -184,14 +184,13 @@ subroutine plastic_isotropic_init(fileUnit)
|
|||
if (IO_getTag(line,'[',']') /= '') then ! next section
|
||||
phase = phase + 1_pInt ! advance section counter
|
||||
if (phase_plasticity(phase) == PLASTICITY_ISOTROPIC_ID) then
|
||||
instance = phase_plasticityInstance(phase)
|
||||
|
||||
instance = phase_plasticityInstance(phase) ! count instances of my constitutive law
|
||||
allocate(param(instance)%outputID(phase_Noutput(phase))) ! allocate space for IDs of every requested output
|
||||
endif
|
||||
cycle ! skip to next line
|
||||
endif
|
||||
if (phase > 0_pInt) then; if (phase_plasticity(phase) == PLASTICITY_ISOTROPIC_ID) then ! one of my phases. Do not short-circuit here (.and. between if-statements), it's not safe in Fortran
|
||||
instance = phase_plasticityInstance(phase) ! which instance of my plasticity is present phase
|
||||
allocate(param(instance)%outputID(phase_Noutput(phase))) ! allocate space for IDs of every requested output
|
||||
chunkPos = IO_stringPos(line)
|
||||
tag = IO_lc(IO_stringValue(line,chunkPos,1_pInt)) ! extract key
|
||||
extmsg = trim(tag)//' ('//PLASTICITY_ISOTROPIC_label//')' ! prepare error message identifier
|
||||
|
|
|
@ -54,12 +54,12 @@ discrepancyPower_RGC 5.0
|
|||
fixed_seed 0 # put any number larger than zero, integer, if you want to have a pseudo random distribution
|
||||
|
||||
## spectral parameters ##
|
||||
err_div_tolAbs 1.0e-3 # relative tolerance for fulfillment of stress equilibrium
|
||||
err_div_tolRel 5.0e-4 # absolute tolerance for fulfillment of stress equilibrium
|
||||
err_curl_tolAbs 1.0e-12 # relative tolerance for fulfillment of strain compatibility
|
||||
err_curl_tolRel 5.0e-4 # absolute tolerance for fulfillment of strain compatibility
|
||||
err_stress_tolrel 0.01 # relative tolerance for fulfillment of stress BC
|
||||
err_stress_tolabs 1.0e3 # absolute tolerance for fulfillment of stress BC
|
||||
err_div_tolAbs 1.0e-3 # absolute tolerance for fulfillment of stress equilibrium
|
||||
err_div_tolRel 5.0e-4 # relative tolerance for fulfillment of stress equilibrium
|
||||
err_curl_tolAbs 1.0e-12 # absolute tolerance for fulfillment of strain compatibility
|
||||
err_curl_tolRel 5.0e-4 # relative tolerance for fulfillment of strain compatibility
|
||||
err_stress_tolAbs 1.0e3 # absolute tolerance for fulfillment of stress BC
|
||||
err_stress_tolRel 0.01 # relative tolerance for fulfillment of stress BC
|
||||
fftw_timelimit -1.0 # timelimit of plan creation for FFTW, see manual on www.fftw.org, Default -1.0: disable timelimit
|
||||
rotation_tol 1.0e-12 # tolerance of rotation specified in loadcase, Default 1.0e-12: first guess
|
||||
fftw_plan_mode FFTW_PATIENT # reads the planing-rigor flag, see manual on www.fftw.org, Default FFTW_PATIENT: use patient planner flag
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
# $Id$
|
||||
"""Main aggregator"""
|
||||
import sys, os
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__),'../../VERSION')) as f:
|
||||
version = f.readline()[:-1]
|
||||
|
||||
from .environment import Environment # only one class
|
||||
from .asciitable import ASCIItable # only one class
|
||||
from .config import Material # will be extended to debug and numerics
|
||||
from .colormaps import Colormap, Color
|
||||
from .orientation import Quaternion, Rodrigues, Symmetry, Orientation
|
||||
from .environment import Environment # noqa
|
||||
from .asciitable import ASCIItable # noqa
|
||||
from .config import Material # noqa
|
||||
from .colormaps import Colormap, Color # noqa
|
||||
from .orientation import Quaternion, Rodrigues, Symmetry, Orientation # noqa
|
||||
# try:
|
||||
# from .corientation import Quaternion, Rodrigues, Symmetry, Orientation
|
||||
# print "Import Cython version of Orientation module"
|
||||
# except:
|
||||
# from .orientation import Quaternion, Rodrigues, Symmetry, Orientation
|
||||
#from .block import Block # only one class
|
||||
from .result import Result # only one class
|
||||
from .geometry import Geometry # one class with subclasses
|
||||
from .solver import Solver # one class with subclasses
|
||||
from .test import Test
|
||||
from .util import extendableOption
|
||||
from .result import Result # noqa
|
||||
from .geometry import Geometry # noqa
|
||||
from .solver import Solver # noqa
|
||||
from .test import Test # noqa
|
||||
from .util import extendableOption # noqa
|
||||
|
||||
try:
|
||||
from . import core
|
||||
|
|
|
@ -4,12 +4,9 @@
|
|||
|
||||
import os,sys
|
||||
import numpy as np
|
||||
import util
|
||||
|
||||
class ASCIItable():
|
||||
'''
|
||||
There should be a doc string here :)
|
||||
'''
|
||||
"""Read and write to ASCII tables"""
|
||||
|
||||
__slots__ = ['__IO__',
|
||||
'info',
|
||||
|
@ -58,8 +55,8 @@ class ASCIItable():
|
|||
self.data = []
|
||||
self.line = ''
|
||||
|
||||
if self.__IO__['in'] == None \
|
||||
or self.__IO__['out'] == None: raise IOError # complain if any required file access not possible
|
||||
if self.__IO__['in'] is None \
|
||||
or self.__IO__['out'] is None: raise IOError # complain if any required file access not possible
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def _transliterateToFloat(self,
|
||||
|
@ -86,9 +83,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def output_write(self,
|
||||
what):
|
||||
'''
|
||||
aggregate a single row (string) or list of (possibly containing further lists of) rows into output
|
||||
'''
|
||||
"""aggregate a single row (string) or list of (possibly containing further lists of) rows into output"""
|
||||
if not isinstance(what, (str, unicode)):
|
||||
try:
|
||||
for item in what: self.output_write(item)
|
||||
|
@ -104,7 +99,7 @@ class ASCIItable():
|
|||
clear = True):
|
||||
try:
|
||||
self.__IO__['output'] == [] or self.__IO__['out'].write('\n'.join(self.__IO__['output']) + '\n')
|
||||
except IOError as e:
|
||||
except IOError:
|
||||
return False
|
||||
if clear: self.output_clear()
|
||||
return True
|
||||
|
@ -127,11 +122,12 @@ class ASCIItable():
|
|||
|
||||
# ------------------------------------------------------------------
|
||||
def head_read(self):
|
||||
'''
|
||||
"""
|
||||
get column labels by either reading
|
||||
|
||||
the first row or, if keyword "head[*]" is present,
|
||||
the last line of the header
|
||||
'''
|
||||
"""
|
||||
import re
|
||||
|
||||
try:
|
||||
|
@ -180,10 +176,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def head_write(self,
|
||||
header = True):
|
||||
'''
|
||||
write current header information (info + labels)
|
||||
'''
|
||||
|
||||
"""write current header information (info + labels)"""
|
||||
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
||||
head.append(self.info)
|
||||
if self.__IO__['labeled']: head.append('\t'.join(self.labels))
|
||||
|
@ -192,9 +185,7 @@ class ASCIItable():
|
|||
|
||||
# ------------------------------------------------------------------
|
||||
def head_getGeom(self):
|
||||
'''
|
||||
interpret geom header
|
||||
'''
|
||||
"""interpret geom header"""
|
||||
identifiers = {
|
||||
'grid': ['a','b','c'],
|
||||
'size': ['x','y','z'],
|
||||
|
@ -234,9 +225,7 @@ class ASCIItable():
|
|||
|
||||
# ------------------------------------------------------------------
|
||||
def head_putGeom(self,info):
|
||||
'''
|
||||
translate geometry description to header
|
||||
'''
|
||||
"""translate geometry description to header"""
|
||||
self.info_append([
|
||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||
|
@ -249,9 +238,7 @@ class ASCIItable():
|
|||
def labels_append(self,
|
||||
what,
|
||||
reset = False):
|
||||
'''
|
||||
add item or list to existing set of labels (and switch on labeling)
|
||||
'''
|
||||
"""add item or list to existing set of labels (and switch on labeling)"""
|
||||
if not isinstance(what, (str, unicode)):
|
||||
try:
|
||||
for item in what: self.labels_append(item)
|
||||
|
@ -265,26 +252,25 @@ class ASCIItable():
|
|||
|
||||
# ------------------------------------------------------------------
|
||||
def labels_clear(self):
|
||||
'''
|
||||
delete existing labels and switch to no labeling
|
||||
'''
|
||||
"""delete existing labels and switch to no labeling"""
|
||||
self.labels = []
|
||||
self.__IO__['labeled'] = False
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def label_index(self,
|
||||
labels):
|
||||
'''
|
||||
"""
|
||||
tell index of column label(s).
|
||||
|
||||
return numpy array if asked for list of labels.
|
||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||
'''
|
||||
"""
|
||||
from collections import Iterable
|
||||
|
||||
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
|
||||
idx = []
|
||||
for label in labels:
|
||||
if label != None:
|
||||
if label is not None:
|
||||
try:
|
||||
idx.append(int(label)) # column given as integer number?
|
||||
except ValueError:
|
||||
|
@ -305,25 +291,25 @@ class ASCIItable():
|
|||
try:
|
||||
idx = self.labels.index('1_'+labels) # locate '1_'+string in label list
|
||||
except ValueError:
|
||||
idx = None if labels == None else -1
|
||||
idx = None if labels is None else -1
|
||||
|
||||
return np.array(idx) if isinstance(idx,list) else idx
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def label_dimension(self,
|
||||
labels):
|
||||
'''
|
||||
"""
|
||||
tell dimension (length) of column label(s).
|
||||
|
||||
return numpy array if asked for list of labels.
|
||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||
'''
|
||||
|
||||
"""
|
||||
from collections import Iterable
|
||||
|
||||
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
|
||||
dim = []
|
||||
for label in labels:
|
||||
if label != None:
|
||||
if label is not None:
|
||||
myDim = -1
|
||||
try: # column given as number?
|
||||
idx = int(label)
|
||||
|
@ -364,12 +350,12 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def label_indexrange(self,
|
||||
labels):
|
||||
'''
|
||||
"""
|
||||
tell index range for given label(s).
|
||||
|
||||
return numpy array if asked for list of labels.
|
||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||
'''
|
||||
|
||||
"""
|
||||
from collections import Iterable
|
||||
|
||||
start = self.label_index(labels)
|
||||
|
@ -381,9 +367,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def info_append(self,
|
||||
what):
|
||||
'''
|
||||
add item or list to existing set of infos
|
||||
'''
|
||||
"""add item or list to existing set of infos"""
|
||||
if not isinstance(what, (str, unicode)):
|
||||
try:
|
||||
for item in what: self.info_append(item)
|
||||
|
@ -394,9 +378,7 @@ class ASCIItable():
|
|||
|
||||
# ------------------------------------------------------------------
|
||||
def info_clear(self):
|
||||
'''
|
||||
delete any info block
|
||||
'''
|
||||
"""delete any info block"""
|
||||
self.info = []
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
@ -409,9 +391,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def data_skipLines(self,
|
||||
count):
|
||||
'''
|
||||
wind forward by count number of lines
|
||||
'''
|
||||
"""wind forward by count number of lines"""
|
||||
for i in xrange(count):
|
||||
alive = self.data_read()
|
||||
|
||||
|
@ -421,9 +401,7 @@ class ASCIItable():
|
|||
def data_read(self,
|
||||
advance = True,
|
||||
respectLabels = True):
|
||||
'''
|
||||
read next line (possibly buffered) and parse it into data array
|
||||
'''
|
||||
"""read next line (possibly buffered) and parse it into data array"""
|
||||
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
||||
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
|
||||
|
||||
|
@ -434,7 +412,7 @@ class ASCIItable():
|
|||
|
||||
if self.__IO__['labeled'] and respectLabels: # if table has labels
|
||||
items = self.line.split()[:len(self.__IO__['labels'])] # use up to label count (from original file info)
|
||||
self.data = items if len(items) == len(self.__IO__['labels']) else [] # take entries if correct number, i.e. not too few compared to label count
|
||||
self.data = items if len(items) == len(self.__IO__['labels']) else [] # take entries if label count matches
|
||||
else:
|
||||
self.data = self.line.split() # otherwise take all
|
||||
|
||||
|
@ -443,9 +421,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def data_readArray(self,
|
||||
labels = []):
|
||||
'''
|
||||
read whole data of all (given) labels as numpy array
|
||||
'''
|
||||
"""read whole data of all (given) labels as numpy array"""
|
||||
from collections import Iterable
|
||||
|
||||
try:
|
||||
|
@ -453,7 +429,7 @@ class ASCIItable():
|
|||
except:
|
||||
pass # assume/hope we are at data start already...
|
||||
|
||||
if labels == None or labels == []:
|
||||
if labels is None or labels == []:
|
||||
use = None # use all columns (and keep labels intact)
|
||||
labels_missing = []
|
||||
else:
|
||||
|
@ -467,9 +443,10 @@ class ASCIItable():
|
|||
|
||||
columns = []
|
||||
for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ...
|
||||
# ... transparently add all components unless column referenced by number or with explicit dimension
|
||||
columns += range(c,c + \
|
||||
(d if str(c) != str(labels[present[i]]) else \
|
||||
1)) # ... transparently add all components unless column referenced by number or with explicit dimension
|
||||
1))
|
||||
use = np.array(columns)
|
||||
|
||||
self.labels = list(np.array(self.labels)[use]) # update labels with valid subset
|
||||
|
@ -481,9 +458,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def data_write(self,
|
||||
delimiter = '\t'):
|
||||
'''
|
||||
write current data array and report alive output back
|
||||
'''
|
||||
"""write current data array and report alive output back"""
|
||||
if len(self.data) == 0: return True
|
||||
|
||||
if isinstance(self.data[0],list):
|
||||
|
@ -495,9 +470,7 @@ class ASCIItable():
|
|||
def data_writeArray(self,
|
||||
fmt = None,
|
||||
delimiter = '\t'):
|
||||
'''
|
||||
write whole numpy array data
|
||||
'''
|
||||
"""write whole numpy array data"""
|
||||
for row in self.data:
|
||||
try:
|
||||
output = [fmt % value for value in row] if fmt else map(repr,row)
|
||||
|
@ -520,9 +493,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def data_set(self,
|
||||
what, where):
|
||||
'''
|
||||
update data entry in column "where". grows data array if needed.
|
||||
'''
|
||||
"""update data entry in column "where". grows data array if needed."""
|
||||
idx = -1
|
||||
try:
|
||||
idx = self.label_index(where)
|
||||
|
@ -547,10 +518,7 @@ class ASCIItable():
|
|||
# ------------------------------------------------------------------
|
||||
def microstructure_read(self,
|
||||
grid):
|
||||
'''
|
||||
read microstructure data (from .geom format)
|
||||
'''
|
||||
|
||||
"""read microstructure data (from .geom format)"""
|
||||
N = grid.prod() # expected number of microstructure indices in data
|
||||
microstructure = np.zeros(N,'i') # initialize as flat array
|
||||
|
||||
|
|
|
@ -5,11 +5,12 @@ import math,numpy as np
|
|||
### --- COLOR CLASS --------------------------------------------------
|
||||
|
||||
class Color():
|
||||
'''
|
||||
Conversion of colors between different color-spaces. Colors should be given in the form
|
||||
"""Conversion of colors between different color-spaces.
|
||||
|
||||
Colors should be given in the form
|
||||
Color('model',[vector]).To convert and copy color from one space to other, use the methods
|
||||
convertTo('model') and expressAs('model')spectively
|
||||
'''
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
'model',
|
||||
|
@ -17,7 +18,7 @@ class Color():
|
|||
]
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def __init__(self,
|
||||
model = 'RGB',
|
||||
color = np.zeros(3,'d')):
|
||||
|
@ -45,17 +46,19 @@ class Color():
|
|||
self.color = np.array(color,'d')
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def __repr__(self):
|
||||
"""Color model and values"""
|
||||
return 'Model: %s Color: %s'%(self.model,str(self.color))
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def __str__(self):
|
||||
"""Color model and values"""
|
||||
return self.__repr__()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def convertTo(self,toModel = 'RGB'):
|
||||
toModel = toModel.upper()
|
||||
if toModel not in self.__transforms__.keys(): return
|
||||
|
@ -73,17 +76,19 @@ class Color():
|
|||
return self
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def expressAs(self,asModel = 'RGB'):
|
||||
return self.__class__(self.model,self.color).convertTo(asModel)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue)
|
||||
# with S,L,H,R,G,B running from 0 to 1
|
||||
# from http://en.wikipedia.org/wiki/HSL_and_HSV
|
||||
def _HSL2RGB(self):
|
||||
|
||||
def _HSL2RGB(self):
|
||||
"""
|
||||
convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue)
|
||||
|
||||
with S,L,H,R,G,B running from 0 to 1
|
||||
from http://en.wikipedia.org/wiki/HSL_and_HSV
|
||||
"""
|
||||
if self.model != 'HSL': return
|
||||
|
||||
sextant = self.color[0]*6.0
|
||||
|
@ -103,12 +108,13 @@ class Color():
|
|||
self.color = converted.color
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance)
|
||||
# with S,L,H,R,G,B running from 0 to 1
|
||||
# from http://130.113.54.154/~monger/hsl-rgb.html
|
||||
def _RGB2HSL(self):
|
||||
"""
|
||||
convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance)
|
||||
|
||||
with S,L,H,R,G,B running from 0 to 1
|
||||
from http://130.113.54.154/~monger/hsl-rgb.html
|
||||
"""
|
||||
if self.model != 'RGB': return
|
||||
|
||||
HSL = np.zeros(3,'d')
|
||||
|
@ -129,7 +135,7 @@ class Color():
|
|||
HSL[0] = 2.0 + (self.color[2] - self.color[0])/(maxcolor - mincolor)
|
||||
elif (maxcolor == self.color[2]):
|
||||
HSL[0] = 4.0 + (self.color[0] - self.color[1])/(maxcolor - mincolor)
|
||||
HSL[0] = HSL[0]*60.0 # is it necessary to scale to 360 hue values? might be dangerous for small values <1..!
|
||||
HSL[0] = HSL[0]*60.0 # scaling to 360 might be dangerous for small values
|
||||
if (HSL[0] < 0.0):
|
||||
HSL[0] = HSL[0] + 360.0
|
||||
for i in xrange(2):
|
||||
|
@ -141,12 +147,14 @@ class Color():
|
|||
self.color = converted.color
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert R(ed) G(reen) B(lue) to CIE XYZ
|
||||
# with all values in the range of 0 to 1
|
||||
# from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||
def _RGB2XYZ(self):
|
||||
|
||||
def _RGB2XYZ(self):
|
||||
"""
|
||||
convert R(ed) G(reen) B(lue) to CIE XYZ
|
||||
|
||||
with all values in the range of 0 to 1
|
||||
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||
"""
|
||||
if self.model != 'RGB': return
|
||||
|
||||
XYZ = np.zeros(3,'d')
|
||||
|
@ -168,12 +176,14 @@ class Color():
|
|||
self.color = converted.color
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert CIE XYZ to R(ed) G(reen) B(lue)
|
||||
# with all values in the range of 0 to 1
|
||||
# from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||
def _XYZ2RGB(self):
|
||||
|
||||
def _XYZ2RGB(self):
|
||||
"""
|
||||
convert CIE XYZ to R(ed) G(reen) B(lue)
|
||||
|
||||
with all values in the range of 0 to 1
|
||||
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||
"""
|
||||
if self.model != 'XYZ': return
|
||||
|
||||
convert = np.array([[ 3.240479,-1.537150,-0.498535],
|
||||
|
@ -197,12 +207,14 @@ class Color():
|
|||
self.color = converted.color
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert CIE Lab to CIE XYZ
|
||||
# with XYZ in the range of 0 to 1
|
||||
# from http://www.easyrgb.com/index.php?X=MATH&H=07#text7
|
||||
def _CIELAB2XYZ(self):
|
||||
|
||||
def _CIELAB2XYZ(self):
|
||||
"""
|
||||
convert CIE Lab to CIE XYZ
|
||||
|
||||
with XYZ in the range of 0 to 1
|
||||
from http://www.easyrgb.com/index.php?X=MATH&H=07#text7
|
||||
"""
|
||||
if self.model != 'CIELAB': return
|
||||
|
||||
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
||||
|
@ -220,13 +232,13 @@ class Color():
|
|||
self.model = converted.model
|
||||
self.color = converted.color
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert CIE XYZ to CIE Lab
|
||||
# with XYZ in the range of 0 to 1
|
||||
# from http://en.wikipedia.org/wiki/Lab_color_space, http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||
def _XYZ2CIELAB(self):
|
||||
"""
|
||||
convert CIE XYZ to CIE Lab
|
||||
|
||||
with XYZ in the range of 0 to 1
|
||||
from http://en.wikipedia.org/wiki/Lab_color_space, http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||
"""
|
||||
if self.model != 'XYZ': return
|
||||
|
||||
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
||||
|
@ -243,11 +255,12 @@ class Color():
|
|||
self.color = converted.color
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert CIE Lab to Msh colorspace
|
||||
# from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||
def _CIELAB2MSH(self):
|
||||
"""
|
||||
convert CIE Lab to Msh colorspace
|
||||
|
||||
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||
"""
|
||||
if self.model != 'CIELAB': return
|
||||
|
||||
Msh = np.zeros(3,'d')
|
||||
|
@ -262,12 +275,13 @@ class Color():
|
|||
self.color = converted.color
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# convert Msh colorspace to CIE Lab
|
||||
# s,h in radians
|
||||
# from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||
def _MSH2CIELAB(self):
|
||||
"""
|
||||
convert Msh colorspace to CIE Lab
|
||||
|
||||
s,h in radians
|
||||
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||
"""
|
||||
if self.model != 'MSH': return
|
||||
|
||||
Lab = np.zeros(3,'d')
|
||||
|
@ -280,13 +294,8 @@ class Color():
|
|||
self.color = converted.color
|
||||
|
||||
|
||||
|
||||
### --- COLORMAP CLASS -----------------------------------------------
|
||||
|
||||
class Colormap():
|
||||
'''
|
||||
perceptually uniform diverging or sequential colormaps.
|
||||
'''
|
||||
"""perceptually uniform diverging or sequential colormaps."""
|
||||
|
||||
__slots__ = [
|
||||
'left',
|
||||
|
@ -294,20 +303,40 @@ class Colormap():
|
|||
'interpolate',
|
||||
]
|
||||
__predefined__ = {
|
||||
'gray': {'left': Color('HSL',[0,1,1]), 'right': Color('HSL',[0,0,0.15]), 'interpolate': 'perceptualuniform'},
|
||||
'grey': {'left': Color('HSL',[0,1,1]), 'right': Color('HSL',[0,0,0.15]), 'interpolate': 'perceptualuniform'},
|
||||
'red': {'left': Color('HSL',[0,1,0.14]), 'right': Color('HSL',[0,0.35,0.91]), 'interpolate': 'perceptualuniform'},
|
||||
'green': {'left': Color('HSL',[0.33333,1,0.14]), 'right': Color('HSL',[0.33333,0.35,0.91]), 'interpolate': 'perceptualuniform'},
|
||||
'blue': {'left': Color('HSL',[0.66,1,0.14]), 'right': Color('HSL',[0.66,0.35,0.91]), 'interpolate': 'perceptualuniform'},
|
||||
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]), 'right': Color('HSL',[0.40000,0.1,0.9]), 'interpolate': 'perceptualuniform'},
|
||||
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]), 'right': Color('HSL',[0.11,0.75,0.38]), 'interpolate': 'perceptualuniform'},
|
||||
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]), 'right': Color('HSL',[0.33333,1.0,0.14]), 'interpolate': 'perceptualuniform'},
|
||||
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]), 'right': Color('HSL',[0.97,0.96,0.36]), 'interpolate': 'perceptualuniform'},
|
||||
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]), 'right': Color('HSL',[0,1,0.5]), 'interpolate': 'linear' },
|
||||
'gray': {'left': Color('HSL',[0,1,1]),
|
||||
'right': Color('HSL',[0,0,0.15]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'grey': {'left': Color('HSL',[0,1,1]),
|
||||
'right': Color('HSL',[0,0,0.15]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'red': {'left': Color('HSL',[0,1,0.14]),
|
||||
'right': Color('HSL',[0,0.35,0.91]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'green': {'left': Color('HSL',[0.33333,1,0.14]),
|
||||
'right': Color('HSL',[0.33333,0.35,0.91]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'blue': {'left': Color('HSL',[0.66,1,0.14]),
|
||||
'right': Color('HSL',[0.66,0.35,0.91]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]),
|
||||
'right': Color('HSL',[0.40000,0.1,0.9]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]),
|
||||
'right': Color('HSL',[0.11,0.75,0.38]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]),
|
||||
'right': Color('HSL',[0.33333,1.0,0.14]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]),
|
||||
'right': Color('HSL',[0.97,0.96,0.36]),
|
||||
'interpolate': 'perceptualuniform'},
|
||||
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]),
|
||||
'right': Color('HSL',[0,1,0.5]),
|
||||
'interpolate': 'linear' },
|
||||
}
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def __init__(self,
|
||||
left = Color('RGB',[1,1,1]),
|
||||
right = Color('RGB',[0,0,0]),
|
||||
|
@ -330,26 +359,27 @@ class Colormap():
|
|||
self.interpolate = interpolate
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def __repr__(self):
|
||||
"""left and right value of colormap"""
|
||||
return 'Left: %s Right: %s'%(self.left,self.right)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def invert(self):
|
||||
(self.left, self.right) = (self.right, self.left)
|
||||
return self
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def color(self,fraction = 0.5):
|
||||
|
||||
def interpolate_Msh(lo, hi, frac):
|
||||
|
||||
def rad_diff(a,b):
|
||||
return abs(a[2]-b[2])
|
||||
|
||||
def adjust_hue(Msh_sat, Msh_unsat): # if saturation of one of the two colors is too less than the other, hue of the less
|
||||
# if saturation of one of the two colors is too less than the other, hue of the less
|
||||
def adjust_hue(Msh_sat, Msh_unsat):
|
||||
if Msh_sat[0] >= Msh_unsat[0]:
|
||||
return Msh_sat[2]
|
||||
else:
|
||||
|
@ -375,10 +405,11 @@ class Colormap():
|
|||
return Color('MSH',Msh)
|
||||
|
||||
def interpolate_linear(lo, hi, frac):
|
||||
'''
|
||||
linearly interpolate color at given fraction between lower and higher color in model of lower color
|
||||
'''
|
||||
"""
|
||||
linearly interpolate color at given fraction between lower and
|
||||
|
||||
higher color in model of lower color
|
||||
"""
|
||||
interpolation = (1.0 - frac) * np.array(lo.color[:]) \
|
||||
+ frac * np.array(hi.expressAs(lo.model).color[:])
|
||||
|
||||
|
@ -393,21 +424,21 @@ class Colormap():
|
|||
else:
|
||||
raise NameError('unknown color interpolation method')
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------
|
||||
def export(self,name = 'uniformPerceptualColorMap',\
|
||||
format = 'paraview',\
|
||||
steps = 2,\
|
||||
crop = [-1.0,1.0],
|
||||
model = 'RGB'):
|
||||
'''
|
||||
"""
|
||||
[RGB] colormap for use in paraview or gmsh, or as raw string, or array.
|
||||
|
||||
arguments: name, format, steps, crop.
|
||||
format is one of (paraview, gmsh, raw, list).
|
||||
crop selects a (sub)range in [-1.0,1.0].
|
||||
generates sequential map if one limiting color is either white or black,
|
||||
diverging map otherwise.
|
||||
'''
|
||||
|
||||
"""
|
||||
format = format.lower() # consistent comparison basis
|
||||
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
||||
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
# $Id$
|
||||
"""Aggregator for configuration file handling"""
|
||||
|
||||
from .material import Material
|
||||
from .material import Material # noqa
|
||||
|
|
|
@ -100,20 +100,19 @@ class Texture(Section):
|
|||
|
||||
|
||||
class Material():
|
||||
"""Reads, manipulates and writes material.config files"""
|
||||
|
||||
'''
|
||||
Reads, manipulates and writes material.config files
|
||||
'''
|
||||
__slots__ = ['data']
|
||||
|
||||
def __init__(self,verbose=True):
|
||||
"""generates ordered list of parts"""
|
||||
self.parts = [
|
||||
'homogenization',
|
||||
'microstructure',
|
||||
'crystallite',
|
||||
'phase',
|
||||
'texture',
|
||||
] # ordered (!) list of parts
|
||||
]
|
||||
self.data = {\
|
||||
'homogenization': {'__order__': []},
|
||||
'microstructure': {'__order__': []},
|
||||
|
@ -124,6 +123,7 @@ class Material():
|
|||
self.verbose = verbose
|
||||
|
||||
def __repr__(self):
|
||||
"""returns current configuration to be used as material.config"""
|
||||
me = []
|
||||
for part in self.parts:
|
||||
if self.verbose: print('doing '+part)
|
||||
|
@ -144,7 +144,6 @@ class Material():
|
|||
re_sec = re.compile(r'^\[(.+)\]$') # pattern for section
|
||||
|
||||
name_section = ''
|
||||
idx_section = 0
|
||||
active = False
|
||||
|
||||
for line in content:
|
||||
|
@ -197,8 +196,7 @@ class Material():
|
|||
return saveFile
|
||||
|
||||
def add_section(self, part=None, section=None, initialData=None, merge = False):
|
||||
'''adding/updating'''
|
||||
|
||||
"""adding/updating"""
|
||||
part = part.lower()
|
||||
section = section.lower()
|
||||
if part not in self.parts: raise Exception('invalid part %s'%part)
|
||||
|
@ -227,10 +225,10 @@ class Material():
|
|||
def add_microstructure(self, section='',
|
||||
components={}, # dict of phase,texture, and fraction lists
|
||||
):
|
||||
''' Experimental! Needs expansion to multi-constituent microstructures...'''
|
||||
|
||||
"""Experimental! Needs expansion to multi-constituent microstructures..."""
|
||||
microstructure = Microstructure()
|
||||
components=dict((k.lower(), v) for k,v in components.iteritems()) # make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
||||
# make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
||||
components=dict((k.lower(), v) for k,v in components.iteritems())
|
||||
|
||||
for key in ['phase','texture','fraction','crystallite']:
|
||||
if type(components[key]) is not list:
|
||||
|
@ -245,7 +243,8 @@ class Material():
|
|||
except AttributeError:
|
||||
pass
|
||||
|
||||
for (phase,texture,fraction,crystallite) in zip(components['phase'],components['texture'],components['fraction'],components['crystallite']):
|
||||
for (phase,texture,fraction,crystallite) in zip(components['phase'],components['texture'],
|
||||
components['fraction'],components['crystallite']):
|
||||
microstructure.add_multiKey('constituent','phase %i\ttexture %i\tfraction %g\ncrystallite %i'%(
|
||||
self.data['phase']['__order__'].index(phase)+1,
|
||||
self.data['texture']['__order__'].index(texture)+1,
|
||||
|
@ -259,8 +258,8 @@ class Material():
|
|||
section=None,
|
||||
key=None,
|
||||
value=None):
|
||||
if type(value) is not type([]):
|
||||
if type(value) is not type('s'):
|
||||
if not isinstance(value,list):
|
||||
if not isinstance(value,str):
|
||||
value = '%s'%value
|
||||
value = [value]
|
||||
newlen = len(value)
|
||||
|
@ -271,17 +270,3 @@ class Material():
|
|||
if newlen is not oldlen:
|
||||
print('Length of value was changed from %i to %i!'%(oldlen,newlen))
|
||||
|
||||
|
||||
|
||||
def ex1():
|
||||
mat=Material()
|
||||
p=Phase({'constitution':'lump'})
|
||||
t=Texture()
|
||||
t.add_component('gauss',{'eulers':[1,2,3]})
|
||||
mat.add_section('phase','phase1',p)
|
||||
mat.add_section('texture','tex1',t)
|
||||
mat.add_microstructure('mustruct1',{'phase':['phase1']*2,'texture':['tex1']*2,'fraction':[0.2]*2})
|
||||
print(mat)
|
||||
mat.write(file='poop')
|
||||
mat.write(file='poop',overwrite=True)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
# $Id$
|
||||
|
||||
import os,sys,string,re,subprocess,shlex
|
||||
import os,subprocess,shlex
|
||||
|
||||
class Environment():
|
||||
__slots__ = [ \
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
# $Id$
|
||||
"""Aggregator for geometry handling"""
|
||||
|
||||
from .geometry import Geometry # only one class
|
||||
from .spectral import Spectral # only one class
|
||||
from .marc import Marc # only one class
|
||||
from .geometry import Geometry # noqa
|
||||
from .spectral import Spectral # noqa
|
||||
from .marc import Marc # noqa
|
||||
|
|
|
@ -5,10 +5,11 @@
|
|||
import damask.geometry
|
||||
|
||||
class Geometry():
|
||||
'''
|
||||
"""
|
||||
General class for geometry parsing.
|
||||
|
||||
Sub-classed by the individual solvers.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self,solver=''):
|
||||
solverClass = {
|
||||
|
|
|
@ -9,7 +9,6 @@ import numpy as np
|
|||
|
||||
# ******************************************************************************************
|
||||
class Rodrigues:
|
||||
# ******************************************************************************************
|
||||
|
||||
def __init__(self, vector = np.zeros(3)):
|
||||
self.vector = vector
|
||||
|
@ -28,20 +27,22 @@ class Rodrigues:
|
|||
|
||||
# ******************************************************************************************
|
||||
class Quaternion:
|
||||
# ******************************************************************************************
|
||||
# All methods and naming conventions based off
|
||||
# http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions
|
||||
"""
|
||||
Orientation represented as unit quaternion
|
||||
|
||||
# w is the real part, (x, y, z) are the imaginary parts
|
||||
All methods and naming conventions based on http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions
|
||||
|
||||
# Representation of rotation is in ACTIVE form!
|
||||
# (derived directly or through angleAxis, Euler angles, or active matrix)
|
||||
# vector "a" (defined in coordinate system "A") is actively rotated to new coordinates "b"
|
||||
# b = Q * a
|
||||
# b = np.dot(Q.asMatrix(),a)
|
||||
w is the real part, (x, y, z) are the imaginary parts
|
||||
Representation of rotation is in ACTIVE form!
|
||||
(derived directly or through angleAxis, Euler angles, or active matrix)
|
||||
vector "a" (defined in coordinate system "A") is actively rotated to new coordinates "b"
|
||||
b = Q * a
|
||||
b = np.dot(Q.asMatrix(),a)
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
quatArray = [1.0,0.0,0.0,0.0]):
|
||||
"""initializes to identity if not given"""
|
||||
self.w, \
|
||||
self.x, \
|
||||
self.y, \
|
||||
|
@ -49,19 +50,23 @@ class Quaternion:
|
|||
self.homomorph()
|
||||
|
||||
def __iter__(self):
|
||||
"""components"""
|
||||
return iter([self.w,self.x,self.y,self.z])
|
||||
|
||||
def __copy__(self):
|
||||
"""create copy"""
|
||||
Q = Quaternion([self.w,self.x,self.y,self.z])
|
||||
return Q
|
||||
|
||||
copy = __copy__
|
||||
|
||||
def __repr__(self):
|
||||
"""readbable string"""
|
||||
return 'Quaternion(real=%+.6f, imag=<%+.6f, %+.6f, %+.6f>)' % \
|
||||
(self.w, self.x, self.y, self.z)
|
||||
|
||||
def __pow__(self, exponent):
|
||||
"""power"""
|
||||
omega = math.acos(self.w)
|
||||
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
||||
Q = Quaternion()
|
||||
|
@ -72,6 +77,7 @@ class Quaternion:
|
|||
return Q
|
||||
|
||||
def __ipow__(self, exponent):
|
||||
"""in place power"""
|
||||
omega = math.acos(self.w)
|
||||
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
||||
self.w = np.cos(exponent*omega)
|
||||
|
@ -81,6 +87,7 @@ class Quaternion:
|
|||
return self
|
||||
|
||||
def __mul__(self, other):
|
||||
"""multiplication"""
|
||||
try: # quaternion
|
||||
Aw = self.w
|
||||
Ax = self.x
|
||||
|
@ -128,6 +135,7 @@ class Quaternion:
|
|||
return self.copy()
|
||||
|
||||
def __imul__(self, other):
|
||||
"""in place multiplication"""
|
||||
try: # Quaternion
|
||||
Ax = self.x
|
||||
Ay = self.y
|
||||
|
@ -145,6 +153,7 @@ class Quaternion:
|
|||
return self
|
||||
|
||||
def __div__(self, other):
|
||||
"""division"""
|
||||
if isinstance(other, (int,float,long)):
|
||||
w = self.w / other
|
||||
x = self.x / other
|
||||
|
@ -155,6 +164,7 @@ class Quaternion:
|
|||
return NotImplemented
|
||||
|
||||
def __idiv__(self, other):
|
||||
"""in place division"""
|
||||
if isinstance(other, (int,float,long)):
|
||||
self.w /= other
|
||||
self.x /= other
|
||||
|
@ -163,6 +173,7 @@ class Quaternion:
|
|||
return self
|
||||
|
||||
def __add__(self, other):
|
||||
"""addition"""
|
||||
if isinstance(other, Quaternion):
|
||||
w = self.w + other.w
|
||||
x = self.x + other.x
|
||||
|
@ -173,6 +184,7 @@ class Quaternion:
|
|||
return NotImplemented
|
||||
|
||||
def __iadd__(self, other):
|
||||
"""in place division"""
|
||||
if isinstance(other, Quaternion):
|
||||
self.w += other.w
|
||||
self.x += other.x
|
||||
|
@ -181,6 +193,7 @@ class Quaternion:
|
|||
return self
|
||||
|
||||
def __sub__(self, other):
|
||||
"""subtraction"""
|
||||
if isinstance(other, Quaternion):
|
||||
Q = self.copy()
|
||||
Q.w -= other.w
|
||||
|
@ -192,6 +205,7 @@ class Quaternion:
|
|||
return self.copy()
|
||||
|
||||
def __isub__(self, other):
|
||||
"""in place subtraction"""
|
||||
if isinstance(other, Quaternion):
|
||||
self.w -= other.w
|
||||
self.x -= other.x
|
||||
|
@ -200,6 +214,7 @@ class Quaternion:
|
|||
return self
|
||||
|
||||
def __neg__(self):
|
||||
"""additive inverse"""
|
||||
self.w = -self.w
|
||||
self.x = -self.x
|
||||
self.y = -self.y
|
||||
|
@ -207,6 +222,7 @@ class Quaternion:
|
|||
return self
|
||||
|
||||
def __abs__(self):
|
||||
"""norm"""
|
||||
return math.sqrt(self.w ** 2 + \
|
||||
self.x ** 2 + \
|
||||
self.y ** 2 + \
|
||||
|
@ -215,6 +231,7 @@ class Quaternion:
|
|||
magnitude = __abs__
|
||||
|
||||
def __eq__(self,other):
|
||||
"""equal at e-8 precision"""
|
||||
return (abs(self.w-other.w) < 1e-8 and \
|
||||
abs(self.x-other.x) < 1e-8 and \
|
||||
abs(self.y-other.y) < 1e-8 and \
|
||||
|
@ -226,9 +243,11 @@ class Quaternion:
|
|||
abs(-self.z-other.z) < 1e-8)
|
||||
|
||||
def __ne__(self,other):
|
||||
"""not equal at e-8 precision"""
|
||||
return not self.__eq__(self,other)
|
||||
|
||||
def __cmp__(self,other):
|
||||
"""linear ordering"""
|
||||
return cmp(self.Rodrigues(),other.Rodrigues())
|
||||
|
||||
def magnitude_squared(self):
|
||||
|
@ -290,7 +309,8 @@ class Quaternion:
|
|||
return np.outer([i for i in self],[i for i in self])
|
||||
|
||||
def asMatrix(self):
|
||||
return np.array([[1.0-2.0*(self.y*self.y+self.z*self.z), 2.0*(self.x*self.y-self.z*self.w), 2.0*(self.x*self.z+self.y*self.w)],
|
||||
return np.array(
|
||||
[[1.0-2.0*(self.y*self.y+self.z*self.z), 2.0*(self.x*self.y-self.z*self.w), 2.0*(self.x*self.z+self.y*self.w)],
|
||||
[ 2.0*(self.x*self.y+self.z*self.w), 1.0-2.0*(self.x*self.x+self.z*self.z), 2.0*(self.y*self.z-self.x*self.w)],
|
||||
[ 2.0*(self.x*self.z-self.y*self.w), 2.0*(self.x*self.w+self.y*self.z), 1.0-2.0*(self.x*self.x+self.y*self.y)]])
|
||||
|
||||
|
@ -315,15 +335,17 @@ class Quaternion:
|
|||
return np.inf*np.ones(3) if self.w == 0.0 else np.array([self.x, self.y, self.z])/self.w
|
||||
|
||||
def asEulers(self,
|
||||
type = 'bunge',
|
||||
type = "bunge",
|
||||
degrees = False,
|
||||
standardRange = False):
|
||||
'''
|
||||
u"""
|
||||
Orientation as Bunge-Euler angles
|
||||
|
||||
conversion of ACTIVE rotation to Euler angles taken from:
|
||||
Melcher, A.; Unser, A.; Reichhardt, M.; Nestler, B.; Pötschke, M.; Selzer, M.
|
||||
Conversion of EBSD data by a quaternion based algorithm to be used for grain structure simulations
|
||||
Technische Mechanik 30 (2010) pp 401--413
|
||||
'''
|
||||
"""
|
||||
angles = [0.0,0.0,0.0]
|
||||
|
||||
if type.lower() == 'bunge' or type.lower() == 'zxz':
|
||||
|
@ -369,7 +391,7 @@ class Quaternion:
|
|||
|
||||
@classmethod
|
||||
def fromRandom(cls,randomSeed = None):
|
||||
if randomSeed == None:
|
||||
if randomSeed is None:
|
||||
randomSeed = int(os.urandom(4).encode('hex'), 16)
|
||||
np.random.seed(randomSeed)
|
||||
r = np.random.random(3)
|
||||
|
@ -420,7 +442,6 @@ class Quaternion:
|
|||
y = - c1 * s2 * s3 + s1 * s2 * c3
|
||||
z = c1 * c2 * s3 + s1 * c2 * c3
|
||||
else:
|
||||
# print 'unknown Euler convention'
|
||||
w = c1 * c2 * c3 - s1 * s2 * s3
|
||||
x = s1 * s2 * c3 + c1 * c2 * s3
|
||||
y = s1 * c2 * c3 + c1 * s2 * s3
|
||||
|
@ -428,7 +449,8 @@ class Quaternion:
|
|||
return cls([w,x,y,z])
|
||||
|
||||
|
||||
## Modified Method to calculate Quaternion from Orientation Matrix, Source: http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/
|
||||
# Modified Method to calculate Quaternion from Orientation Matrix,
|
||||
# Source: http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/
|
||||
|
||||
@classmethod
|
||||
def fromMatrix(cls, m):
|
||||
|
@ -482,8 +504,12 @@ class Quaternion:
|
|||
|
||||
@classmethod
|
||||
def new_interpolate(cls, q1, q2, t):
|
||||
# see http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/20070017872_2007014421.pdf for (another?) way to interpolate quaternions
|
||||
"""
|
||||
interpolation
|
||||
|
||||
see http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/20070017872_2007014421.pdf
|
||||
for (another?) way to interpolate quaternions
|
||||
"""
|
||||
assert isinstance(q1, Quaternion) and isinstance(q2, Quaternion)
|
||||
Q = cls()
|
||||
|
||||
|
@ -522,11 +548,11 @@ class Quaternion:
|
|||
|
||||
# ******************************************************************************************
|
||||
class Symmetry:
|
||||
# ******************************************************************************************
|
||||
|
||||
lattices = [None,'orthorhombic','tetragonal','hexagonal','cubic',]
|
||||
|
||||
def __init__(self, symmetry = None):
|
||||
"""lattice with given symmetry, defaults to None"""
|
||||
if isinstance(symmetry, basestring) and symmetry.lower() in Symmetry.lattices:
|
||||
self.lattice = symmetry.lower()
|
||||
else:
|
||||
|
@ -534,29 +560,31 @@ class Symmetry:
|
|||
|
||||
|
||||
def __copy__(self):
|
||||
"""copy"""
|
||||
return self.__class__(self.lattice)
|
||||
|
||||
copy = __copy__
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
"""readbable string"""
|
||||
return '%s' % (self.lattice)
|
||||
|
||||
|
||||
def __eq__(self, other):
|
||||
"""equal"""
|
||||
return self.lattice == other.lattice
|
||||
|
||||
|
||||
def __neq__(self, other):
|
||||
"""not equal"""
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __cmp__(self,other):
|
||||
"""linear ordering"""
|
||||
return cmp(Symmetry.lattices.index(self.lattice),Symmetry.lattices.index(other.lattice))
|
||||
|
||||
def symmetryQuats(self,who = []):
|
||||
'''
|
||||
List of symmetry operations as quaternions.
|
||||
'''
|
||||
"""List of symmetry operations as quaternions."""
|
||||
if self.lattice == 'cubic':
|
||||
symQuats = [
|
||||
[ 1.0, 0.0, 0.0, 0.0 ],
|
||||
|
@ -629,18 +657,15 @@ class Symmetry:
|
|||
def equivalentQuaternions(self,
|
||||
quaternion,
|
||||
who = []):
|
||||
'''
|
||||
List of symmetrically equivalent quaternions based on own symmetry.
|
||||
'''
|
||||
"""List of symmetrically equivalent quaternions based on own symmetry."""
|
||||
return [quaternion*q for q in self.symmetryQuats(who)]
|
||||
|
||||
|
||||
def inFZ(self,R):
|
||||
'''
|
||||
Check whether given Rodrigues vector falls into fundamental zone of own symmetry.
|
||||
'''
|
||||
"""Check whether given Rodrigues vector falls into fundamental zone of own symmetry."""
|
||||
if isinstance(R, Quaternion): R = R.asRodrigues() # translate accidentially passed quaternion
|
||||
R = abs(R) # fundamental zone in Rodrigues space is point symmetric around origin
|
||||
# fundamental zone in Rodrigues space is point symmetric around origin
|
||||
R = abs(R)
|
||||
if self.lattice == 'cubic':
|
||||
return math.sqrt(2.0)-1.0 >= R[0] \
|
||||
and math.sqrt(2.0)-1.0 >= R[1] \
|
||||
|
@ -662,12 +687,13 @@ class Symmetry:
|
|||
|
||||
|
||||
def inDisorientationSST(self,R):
|
||||
'''
|
||||
"""
|
||||
Check whether given Rodrigues vector (of misorientation) falls into standard stereographic triangle of own symmetry.
|
||||
|
||||
Determination of disorientations follow the work of A. Heinz and P. Neumann:
|
||||
Representation of Orientation and Disorientation Data for Cubic, Hexagonal, Tetragonal and Orthorhombic Crystals
|
||||
Acta Cryst. (1991). A47, 780-789
|
||||
'''
|
||||
"""
|
||||
if isinstance(R, Quaternion): R = R.asRodrigues() # translate accidentially passed quaternion
|
||||
|
||||
epsilon = 0.0
|
||||
|
@ -691,11 +717,12 @@ class Symmetry:
|
|||
vector,
|
||||
proper = False,
|
||||
color = False):
|
||||
'''
|
||||
"""
|
||||
Check whether given vector falls into standard stereographic triangle of own symmetry.
|
||||
|
||||
proper considers only vectors with z >= 0, hence uses two neighboring SSTs.
|
||||
Return inverse pole figure color if requested.
|
||||
'''
|
||||
"""
|
||||
# basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
||||
# [1.,0.,1.]/np.sqrt(2.), # direction of green
|
||||
# [1.,1.,1.]/np.sqrt(3.)]).transpose()), # direction of blue
|
||||
|
@ -759,8 +786,8 @@ class Symmetry:
|
|||
theComponents = np.dot(basis['proper'],v)
|
||||
inSST = np.all(theComponents >= 0.0)
|
||||
else:
|
||||
v[2] = abs(v[2]) # z component projects identical for positive and negative values
|
||||
theComponents = np.dot(basis['improper'],v)
|
||||
v[2] = abs(v[2]) # z component projects identical
|
||||
theComponents = np.dot(basis['improper'],v) # for positive and negative values
|
||||
inSST = np.all(theComponents >= 0.0)
|
||||
|
||||
if color: # have to return color array
|
||||
|
@ -781,7 +808,6 @@ class Symmetry:
|
|||
|
||||
# ******************************************************************************************
|
||||
class Orientation:
|
||||
# ******************************************************************************************
|
||||
|
||||
__slots__ = ['quaternion','symmetry']
|
||||
|
||||
|
@ -791,7 +817,7 @@ class Orientation:
|
|||
angleAxis = None,
|
||||
matrix = None,
|
||||
Eulers = None,
|
||||
random = False, # put any integer to have a fixed seed or True for real random
|
||||
random = False, # integer to have a fixed seed or True for real random
|
||||
symmetry = None,
|
||||
):
|
||||
if random: # produce random orientation
|
||||
|
@ -815,12 +841,14 @@ class Orientation:
|
|||
self.symmetry = Symmetry(symmetry)
|
||||
|
||||
def __copy__(self):
|
||||
"""copy"""
|
||||
return self.__class__(quaternion=self.quaternion,symmetry=self.symmetry.lattice)
|
||||
|
||||
copy = __copy__
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
"""value as all implemented representations"""
|
||||
return 'Symmetry: %s\n' % (self.symmetry) + \
|
||||
'Quaternion: %s\n' % (self.quaternion) + \
|
||||
'Matrix:\n%s\n' % ( '\n'.join(['\t'.join(map(str,self.asMatrix()[i,:])) for i in range(3)]) ) + \
|
||||
|
@ -863,10 +891,7 @@ class Orientation:
|
|||
self.equivalentQuaternions(who))
|
||||
|
||||
def reduced(self):
|
||||
'''
|
||||
Transform orientation to fall into fundamental zone according to symmetry
|
||||
'''
|
||||
|
||||
"""Transform orientation to fall into fundamental zone according to symmetry"""
|
||||
for me in self.symmetry.equivalentQuaternions(self.quaternion):
|
||||
if self.symmetry.inFZ(me.asRodrigues()): break
|
||||
|
||||
|
@ -876,13 +901,13 @@ class Orientation:
|
|||
def disorientation(self,
|
||||
other,
|
||||
SST = True):
|
||||
'''
|
||||
"""
|
||||
Disorientation between myself and given other orientation.
|
||||
|
||||
Rotation axis falls into SST if SST == True.
|
||||
(Currently requires same symmetry for both orientations.
|
||||
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
|
||||
'''
|
||||
|
||||
"""
|
||||
if self.symmetry != other.symmetry: raise TypeError('disorientation between different symmetry classes not supported yet.')
|
||||
|
||||
misQ = self.quaternion.conjugated()*other.quaternion
|
||||
|
@ -900,18 +925,16 @@ class Orientation:
|
|||
if breaker: break
|
||||
if breaker: break
|
||||
|
||||
# disorientation, own sym, other sym, self-->other: True, self<--other: False
|
||||
return (Orientation(quaternion = theQ,symmetry = self.symmetry.lattice),
|
||||
i,j,k == 1) # disorientation, own sym, other sym, self-->other: True, self<--other: False
|
||||
i,j,k == 1)
|
||||
|
||||
|
||||
def inversePole(self,
|
||||
axis,
|
||||
proper = False,
|
||||
SST = True):
|
||||
'''
|
||||
axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)
|
||||
'''
|
||||
|
||||
"""axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)"""
|
||||
if SST: # pole requested to be within SST
|
||||
for i,q in enumerate(self.symmetry.equivalentQuaternions(self.quaternion)): # test all symmetric equivalent quaternions
|
||||
pole = q.conjugated()*axis # align crystal direction to axis
|
||||
|
@ -922,10 +945,7 @@ class Orientation:
|
|||
return (pole,i if SST else 0)
|
||||
|
||||
def IPFcolor(self,axis):
|
||||
'''
|
||||
TSL color of inverse pole figure for given axis
|
||||
'''
|
||||
|
||||
"""TSL color of inverse pole figure for given axis"""
|
||||
color = np.zeros(3,'d')
|
||||
|
||||
for q in self.symmetry.equivalentQuaternions(self.quaternion):
|
||||
|
@ -939,7 +959,9 @@ class Orientation:
|
|||
def average(cls,
|
||||
orientations,
|
||||
multiplicity = []):
|
||||
"""RETURN THE AVERAGE ORIENTATION
|
||||
"""
|
||||
average orientation
|
||||
|
||||
ref: F. Landis Markley, Yang Cheng, John Lucas Crassidis, and Yaakov Oshman.
|
||||
Averaging Quaternions,
|
||||
Journal of Guidance, Control, and Dynamics, Vol. 30, No. 4 (2007), pp. 1193-1197.
|
||||
|
@ -949,7 +971,6 @@ class Orientation:
|
|||
b = Orientation(Eulers=np.radians([20, 0, 0]), symmetry='hexagonal')
|
||||
avg = Orientation.average([a,b])
|
||||
"""
|
||||
|
||||
if not all(isinstance(item, Orientation) for item in orientations):
|
||||
raise TypeError("Only instances of Orientation can be averaged.")
|
||||
|
||||
|
@ -960,8 +981,7 @@ class Orientation:
|
|||
reference = orientations[0] # take first as reference
|
||||
for i,(o,n) in enumerate(zip(orientations,multiplicity)):
|
||||
closest = o.equivalentOrientations(reference.disorientation(o,SST = False)[2])[0] # select sym orientation with lowest misorientation
|
||||
M = closest.quaternion.asM() * n if i == 0 else M + closest.quaternion.asM() * n # add (multiples) of this orientation to average
|
||||
|
||||
M = closest.quaternion.asM() * n if i == 0 else M + closest.quaternion.asM() * n # noqa add (multiples) of this orientation to average noqa
|
||||
eig, vec = np.linalg.eig(M/N)
|
||||
|
||||
return Orientation(quaternion = Quaternion(quatArray = np.real(vec.T[eig.argmax()])),
|
||||
|
|
|
@ -11,10 +11,11 @@ except (ImportError) as e:
|
|||
sys.stderr.write('\nREMARK: h5py module not available \n\n')
|
||||
|
||||
class Result():
|
||||
'''
|
||||
"""
|
||||
General class for result parsing.
|
||||
|
||||
Needs h5py to be installed
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self,resultsFile):
|
||||
self.data=h5py.File(resultsFile,"r")
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
# $Id$
|
||||
# This tool converts a msc.marc result file into the vtk format that
|
||||
# can be viewed by Paraview software (Kitware), or MayaVi (needs xml-vtk, or ...
|
||||
#
|
||||
|
@ -8,13 +7,8 @@
|
|||
# Some example vtk files: http://people.sc.fsu.edu/~jburkardt/data/vtk/vtk.html
|
||||
# www.paraview.org
|
||||
|
||||
import os,sys,math,time,re
|
||||
# python external
|
||||
try:
|
||||
import numpy as N
|
||||
import numpy
|
||||
except:
|
||||
print('Could not import numpy.')
|
||||
import os,sys,re
|
||||
import numpy as np
|
||||
|
||||
import py_post # MSC closed source module to access marc result files
|
||||
|
||||
|
@ -27,7 +21,7 @@ class MARC_POST():
|
|||
self.fpath=os.path.join(self.projdir,self.postname)
|
||||
print('Trying to open ',self.fpath,' ...')
|
||||
self.p=py_post.post_open(self.fpath)
|
||||
if self.p==None:
|
||||
if self.p is None:
|
||||
print('Could not open %s.'%self.postname); #return 'err'#; sys.exit(1)
|
||||
raise Exception('Could not open t16')
|
||||
print('Postfile %s%s is open ...'%(self.projdir,self.postname))
|
||||
|
@ -105,7 +99,6 @@ class MARC_POST():
|
|||
def writeNodes2VTK(self, fobj):
|
||||
self.points=[]
|
||||
self.VTKcnt=200 # number of values per line in vtk file
|
||||
ndCnt=1
|
||||
fobj.write('POINTS %i'%self.p.nodes()+' float\n')
|
||||
self.nodes_dict={} # store the node IDs in case of holes in the numbering
|
||||
for iNd in self.nodes:
|
||||
|
@ -126,8 +119,6 @@ class MARC_POST():
|
|||
el=self.p.element(iEl)
|
||||
cell_nodes=[] # for pyvtk
|
||||
ndlist=el.items
|
||||
#for k in [0, 1, 3, 2, 4, 5, 7, 6]: # FOR CELL TPYE VTK_VOXEL
|
||||
#for k in [0, 4, 3, 1, 5, 7, 6, 2]:
|
||||
for k in [0, 1, 2, 3, 4, 5, 6, 7]: # FOR CELL TYPE VTK_HEXAHEDRON
|
||||
node=ndlist[k]-1
|
||||
cell_nodes.append(self.nodes_dict[node])
|
||||
|
@ -147,7 +138,6 @@ class MARC_POST():
|
|||
fobj.write('\n');cnt=0
|
||||
fobj.write('\n')
|
||||
print('Elements written to VTK: %i'%self.p.elements())
|
||||
#print('Nr of nodes: ',self.nodes)
|
||||
|
||||
def writeElScalars2NodesVTK(self,fobj):
|
||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||
|
@ -157,7 +147,6 @@ class MARC_POST():
|
|||
fobj.write('LOOKUP_TABLE default\n')
|
||||
idxScal=self.nscal_list.index('Displacement Z')
|
||||
for iNd in self.nodes:
|
||||
#fobj.write('%f %f '%(self.p.node_scalar(iNd,idxScal), N.random.rand()))
|
||||
fobj.write('%f '%(self.p.node_scalar(iNd,idxScal)))
|
||||
for iEl in range(0,self.nel):
|
||||
el=self.p.element(iEl)
|
||||
|
@ -173,8 +162,6 @@ class MARC_POST():
|
|||
|
||||
def writeNodeScalars2VTK(self,fobj):
|
||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||
nNdDat=self.nscals
|
||||
nComponents=1+nNdDat
|
||||
self.pointDataScalars=[]
|
||||
for idxNdScal in range(-3,self.nscals): #now include node x,y,z
|
||||
if idxNdScal>=0:
|
||||
|
@ -209,8 +196,6 @@ class MARC_POST():
|
|||
idx_sig_vMises=self.getLabelNr('Equivalent Von Mises Stress')
|
||||
idx_sig33=self.getLabelNr('Comp 33 of Cauchy Stress')
|
||||
fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
||||
nElDat=self.elscals
|
||||
nComponents=1+nElDat
|
||||
for idxElScal in range(0,self.elscals):
|
||||
datalabel=self.elscal_list[idxElScal]
|
||||
datalabel=re.sub("\s",'_',datalabel)
|
||||
|
@ -251,18 +236,15 @@ class MARC_POST():
|
|||
return result
|
||||
|
||||
def writeUniaxiality2VTK(self,fobj):
|
||||
#fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
||||
datalabel='uniaxiality_sig_vMises_durch_sig33'
|
||||
fobj.write('SCALARS %s float %i\n'%(datalabel,1))
|
||||
fobj.write('LOOKUP_TABLE default\n')
|
||||
cnt=0
|
||||
for iEl in range(0,self.nel):
|
||||
cnt=cnt+1
|
||||
#if abs(self.sig33[iEl])<1e-5:
|
||||
if abs(self.sig_vMises[iEl])<1e-5:
|
||||
datum=0.
|
||||
else:
|
||||
#datum=self.sig_vMises[iEl]/self.sig33[iEl]
|
||||
datum=self.sig33[iEl]/self.sig_vMises[iEl]
|
||||
fobj.write('%E '%(datum))
|
||||
if cnt>self.VTKcnt:
|
||||
|
@ -303,10 +285,6 @@ class MARC_POST():
|
|||
fobj.write('\n')
|
||||
|
||||
def calc_lode_parameter(self):
|
||||
# [-1 ... +1] see e.g. Wippler & Boehlke triaxiality measures doi:10.1002/pamm.201010061
|
||||
# +1 : uniax tensile?
|
||||
# 0 : shear
|
||||
# -1 : uniax compr ?
|
||||
self.lode=[]
|
||||
try:
|
||||
self.stress
|
||||
|
@ -328,10 +306,11 @@ class MARC_POST():
|
|||
def princStress(self, stress):
|
||||
"""
|
||||
Function to compute 3D principal stresses and sort them.
|
||||
|
||||
from: http://geodynamics.org/svn/cig/short/3D/PyLith/trunk/playpen/postproc/vtkcff.py
|
||||
"""
|
||||
stressMat=N.array(stress)
|
||||
(princStress, princAxes) = numpy.linalg.eigh(stressMat)
|
||||
stressMat=np.array(stress)
|
||||
(princStress, princAxes) = np.linalg.eigh(stressMat)
|
||||
idx = princStress.argsort()
|
||||
princStressOrdered = princStress[idx]
|
||||
princAxesOrdered = princAxes[:,idx]
|
||||
|
@ -339,36 +318,28 @@ class MARC_POST():
|
|||
|
||||
def avg_elten(self,
|
||||
idxElTen, mat=0, elID=None):
|
||||
tensum=N.zeros((3,3));
|
||||
T=N.zeros((3,3));
|
||||
tensum=np.zeros((3,3));
|
||||
T=np.zeros((3,3));
|
||||
pts=0;
|
||||
avg=N.zeros((3,3));
|
||||
#print 'Element Scalars'
|
||||
#print self.p.element_scalar_label(elscal2)
|
||||
if elID==None:
|
||||
avg=np.zeros((3,3));
|
||||
|
||||
if elID is None:
|
||||
averaged_elements=range(0,self.nel)
|
||||
else:
|
||||
averaged_elements=[elID]
|
||||
#for i in range (0,self.nel):
|
||||
for i in averaged_elements:
|
||||
if mat==0 or int(self.p.element_scalar(i,4)[0].value)==mat:
|
||||
eldata=self.p.element(i)
|
||||
T=self.p.element_tensor(i,idxElTen)
|
||||
for k in range (0,8):
|
||||
tensum[0][0] = tensum[0][0] + T[k].t11
|
||||
tensum[0][1] = tensum[0][1] + T[k].t12
|
||||
tensum[0][2] = tensum[0][2] + T[k].t13
|
||||
#tensum1[1][0] = tensum1[1][0] + T1[k].t21
|
||||
tensum[1][1] = tensum[1][1] + T[k].t22
|
||||
tensum[1][2] = tensum[1][2] + T[k].t23
|
||||
#tensum1[2][0] = tensum1[2][0] + T1[k].t31
|
||||
#tensum1[2][1] = tensum1[2][1] + T1[k].t32
|
||||
tensum[2][2] = tensum[2][2] + T[k].t33
|
||||
pts=pts+1
|
||||
avg=tensum/pts
|
||||
#print avg
|
||||
avg=self.fillComponents(avg)
|
||||
#print avg
|
||||
del [T]
|
||||
return (avg,tensum,pts)
|
||||
|
||||
|
@ -384,7 +355,7 @@ class MARC_POST():
|
|||
t=tensor33
|
||||
s=(t[0,0]-t[1,1])**2+(t[1,1]-t[2,2])**2+(t[0,0]-t[2,2])**2+\
|
||||
6*(t[0,1]**2+t[1,2]**2+t[2,0]**2)
|
||||
vM=N.sqrt(s/2.)
|
||||
vM=np.sqrt(s/2.)
|
||||
return vM
|
||||
|
||||
def meanStress(self,tensor33):
|
||||
|
@ -398,7 +369,6 @@ class MARC_POST():
|
|||
I1=t[0,0]+t[1,1]+t[2,2]
|
||||
I2=t[0,0]*t[1,1]+t[1,1]*t[2,2]+t[0,0]*t[2,2]-\
|
||||
t[0,1]**2-t[1,2]**2-t[0,2]**2
|
||||
# I3 = det(t)
|
||||
I3=t[0,0]*t[1,1]*t[2,2]+\
|
||||
2*t[0,1]*t[1,2]*t[2,0]-\
|
||||
t[2,2]*t[0,1]**2-t[0,0]*t[1,2]**2-t[1,1]*t[0,2]**2
|
||||
|
@ -406,17 +376,18 @@ class MARC_POST():
|
|||
|
||||
|
||||
class VTK_WRITER():
|
||||
'''
|
||||
"""
|
||||
The resulting vtk-file can be imported in Paraview 3.12
|
||||
|
||||
Then use Filters: Cell Data to Point Data + Contour
|
||||
to plot semi-transparent iso-surfaces.
|
||||
'''
|
||||
"""
|
||||
|
||||
import re
|
||||
def __init__(self):
|
||||
self.p=MARC_POST() # self.p
|
||||
|
||||
def openFile(self, filename='test.vtp'):
|
||||
#if not self.f:#==None:
|
||||
self.f=open(filename,'w+')
|
||||
self.fname=filename
|
||||
|
||||
|
@ -427,7 +398,7 @@ class VTK_WRITER():
|
|||
dformat='ASCII', # BINARY | [ASCII]
|
||||
dtype='UNSTRUCTURED_GRID' # UNSTRUCTURED GRID
|
||||
):
|
||||
if vtkFile==None:
|
||||
if vtkFile is None:
|
||||
vtkFile=self.f
|
||||
# First Line contains Data format version
|
||||
self.versionVTK=version
|
||||
|
@ -440,7 +411,6 @@ class VTK_WRITER():
|
|||
|
||||
def marc2vtkBatch(self):
|
||||
for iori in range(1,63):
|
||||
#self.p=msc_post.MSC_POST()
|
||||
self.p.postname='indent_fric0.3_R2.70_cA146.0_h0.320_ori%03i_OST_h19d.t16'%(iori)
|
||||
if os.path.exists(self.p.postname):
|
||||
self.marc2vtk(mode='fast', batchMode=1)
|
||||
|
@ -496,14 +466,14 @@ class VTK_WRITER():
|
|||
def scaleBar(self, length=1.0, posXYZ=[0., 0., 0.]):
|
||||
self.fsb=open('micronbar_l%.1f.vtp'%length,'w+')
|
||||
self.writeFirstLines(self.fsb, comment='micronbar')
|
||||
pts=N.array([])
|
||||
pts=np.array([])
|
||||
width=length*1.
|
||||
height=length*1.
|
||||
wVec=N.array([0., width, 0.])
|
||||
lVec=N.array([length,0.,0.])
|
||||
hVec=N.array([0.,0.,height])
|
||||
wVec=np.array([0., width, 0.])
|
||||
lVec=np.array([length,0.,0.])
|
||||
hVec=np.array([0.,0.,height])
|
||||
posXYZ=posXYZ-0.5*wVec-0.5*lVec#-0.5*hVec # CENTERING Y/N
|
||||
posXYZ=N.array(posXYZ)
|
||||
posXYZ=np.array(posXYZ)
|
||||
pts=[posXYZ, posXYZ+lVec,
|
||||
posXYZ+wVec,
|
||||
posXYZ+wVec+lVec]
|
||||
|
@ -514,34 +484,22 @@ class VTK_WRITER():
|
|||
self.fsb.write('%f %f %f\n'%(pts[npts][0], pts[npts][1], pts[npts][2]))
|
||||
if 1: #Triad
|
||||
nCells=3
|
||||
#nCells=1 #One Line
|
||||
ptsPerCell=2 # Lines (Type=3)
|
||||
#ptsPerCell=4 # Quads (Type=9)
|
||||
#ptsPerCell=8 # Hexahedron (Type=12)
|
||||
cellSize=(ptsPerCell+1)*nCells
|
||||
self.fsb.write('CELLS %i %i\n'%(nCells,cellSize))
|
||||
self.fsb.write('2 0 1\n') #X-Line
|
||||
self.fsb.write('2 0 2\n') #Y-Line
|
||||
self.fsb.write('2 0 4\n') #Z-Line
|
||||
#self.fsb.write('4 0 1 3 2\n') #Quad
|
||||
#self.fsb.write('%i 0 1 3 2 4 5 7 6\n'%ptsPerCell) #Hexahedron
|
||||
self.fsb.write('CELL_TYPES %i\n'%(nCells))
|
||||
self.fsb.write('3\n3\n3\n')#Line
|
||||
#self.fsb.write('12\n')#Hexahedron
|
||||
else: # Cube, change posXYZ
|
||||
nCells=1
|
||||
ptsPerCell=2 # Lines (Type=3)
|
||||
#ptsPerCell=4 # Quads (Type=9)
|
||||
#ptsPerCell=8 # Hexahedron (Type=12)
|
||||
cellSize=(ptsPerCell+1)*nCells
|
||||
self.fsb.write('CELLS %i %i\n'%(nCells,cellSize))
|
||||
self.fsb.write('2 0 1\n') #Line
|
||||
#self.fsb.write('4 0 1 3 2\n') #Quad
|
||||
#self.fsb.write('%i 0 1 3 2 4 5 7 6\n'%ptsPerCell) #Hexahedron
|
||||
self.fsb.write('CELL_TYPES %i\n'%(nCells))
|
||||
self.fsb.write('3\n')#Line
|
||||
#self.fsb.write('12\n')#Hexahedron
|
||||
|
||||
|
||||
self.fsb.write('\n')
|
||||
self.fsb.close()
|
||||
|
@ -549,8 +507,7 @@ class VTK_WRITER():
|
|||
|
||||
def example_unstructured(self):
|
||||
self.openFile(filename='example_unstructured_grid.vtk')
|
||||
#self.writeFirstLines()
|
||||
self.f.write('''
|
||||
self.f.write("""
|
||||
# vtk DataFile Version 2.0
|
||||
example_unstruct_grid
|
||||
ASCII
|
||||
|
@ -590,61 +547,40 @@ LOOKUP_TABLE default
|
|||
1.02
|
||||
1.50
|
||||
0.00
|
||||
3 5 6 23423423423423423423.23423423''')
|
||||
3 5 6 23423423423423423423.23423423""")
|
||||
self.f.close()
|
||||
|
||||
|
||||
|
||||
def writeNodes2VTK(self, fobj):
|
||||
self.VTKcnt=200 # how many numbers per line in vtk file
|
||||
#self.VTKcnt=6
|
||||
ndCnt=1
|
||||
#self.nodes=range(0,10)
|
||||
fobj.write('POINTS %i'%self.p.nodes()+' float\n')
|
||||
for iNd in self.nodes:
|
||||
nd=self.p.node(iNd)
|
||||
disp=self.p.node_displacement(iNd)
|
||||
#contact=self.p.node_scalar(iNd,contactNr)
|
||||
#ndCnt=ndCnt+1
|
||||
fobj.write('%f %f %f \n'%
|
||||
#(nd.x, nd.y, nd.z))
|
||||
(nd.x+disp[0], nd.y+disp[1], nd.z+disp[2]))
|
||||
|
||||
#if ndCnt>6:
|
||||
# fobj.write('\n')
|
||||
# ndCnt=1
|
||||
fobj.write('\n')
|
||||
print('Nodes written to VTK: %i'%self.p.nodes())
|
||||
#print('Nr of nodes: ',self.nodes)
|
||||
|
||||
def writeElements2VTK(self, fobj):
|
||||
fobj.write('\nCELLS %i %i'%(self.p.elements(),self.p.elements()*9)+'\n')
|
||||
for iEl in range(0,self.nel):
|
||||
el=self.p.element(iEl)
|
||||
#disp=self.p.node_displacement(iNd)
|
||||
#contact=self.p.node_scalar(iNd,contactNr)
|
||||
#ndCnt=ndCnt+1
|
||||
fobj.write('8 ')
|
||||
ndlist=el.items
|
||||
#for k in [0, 1, 3, 2, 4, 5, 7, 6]: # FOR CELL TPYE VTK_VOXEL
|
||||
#for k in [0, 4, 3, 1, 5, 7, 6, 2]:
|
||||
for k in [0, 1, 2, 3, 4, 5, 6, 7]: # FOR CELL TYPE VTK_HEXAHEDRON
|
||||
fobj.write('%6i '%(ndlist[k]-1))
|
||||
fobj.write('\n')
|
||||
#if ndCnt>6:
|
||||
# fobj.write('\n')
|
||||
# ndCnt=1
|
||||
fobj.write('\nCELL_TYPES %i'%self.p.elements()+'\n')
|
||||
cnt=0
|
||||
for iEl in range(0,self.nel):
|
||||
cnt=cnt+1
|
||||
#fobj.write('11\n') #VTK_VOXEL
|
||||
fobj.write('12 ') #VTK_HEXAHEDRON
|
||||
if cnt>self.VTKcnt:
|
||||
fobj.write('\n');cnt=0
|
||||
fobj.write('\n')
|
||||
print('Elements written to VTK: %i'%self.p.elements())
|
||||
#print('Nr of nodes: ',self.nodes)
|
||||
|
||||
def writeElScalars2NodesVTK(self,fobj):
|
||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||
|
@ -668,10 +604,7 @@ LOOKUP_TABLE default
|
|||
fobj.write('\n')
|
||||
|
||||
def writeNodeScalars2VTK(self,fobj):
|
||||
#print('writeElementData2VTK')
|
||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||
nNdDat=self.nscals
|
||||
nComponents=1+nNdDat
|
||||
for idxNdScal in range(-3,self.nscals): # include node x,y,z
|
||||
if idxNdScal>=0:
|
||||
datalabel=self.nscal_list[idxNdScal]
|
||||
|
@ -700,10 +633,7 @@ LOOKUP_TABLE default
|
|||
fobj.write('\n')
|
||||
|
||||
def writeElementData2VTK(self,fobj):
|
||||
#print('writeElementData2VTK')
|
||||
fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
||||
nElDat=self.elscals
|
||||
nComponents=1+nElDat
|
||||
for idxElScal in range(0,self.elscals):
|
||||
datalabel=self.elscal_list[idxElScal]
|
||||
datalabel=re.sub("\s",'_',datalabel)
|
||||
|
@ -730,7 +660,7 @@ LOOKUP_TABLE default
|
|||
def example1(self):
|
||||
self.openFile()
|
||||
self.writeFirstLines()
|
||||
self.f.write('''DATASET POLYDATA
|
||||
self.f.write("""DATASET POLYDATA
|
||||
POINTS 8 float
|
||||
0.0 0.0 0.0
|
||||
1.0 0.0 0.0
|
||||
|
@ -789,18 +719,20 @@ LOOKUP_TABLE my_table 8
|
|||
0.0 0.0 1.0 1.0
|
||||
1.0 0.0 1.0 1.0
|
||||
0.0 1.0 1.0 1.0
|
||||
1.0 1.0 1.0 1.0''')
|
||||
1.0 1.0 1.0 1.0""")
|
||||
self.f.close()
|
||||
|
||||
|
||||
import pyvtk
|
||||
class marc_to_vtk():
|
||||
'''
|
||||
"""
|
||||
Anybody wants to implement it with pyvtk?
|
||||
|
||||
The advantage would be that pyvtk can also wirte the
|
||||
<xml>-VTK format and binary.
|
||||
These can be plotted with mayavi.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.p=[]#MARC_POST() # self.p
|
||||
|
||||
|
@ -810,5 +742,4 @@ class marc_to_vtk():
|
|||
hexahedron=self.p.cells),
|
||||
'm2v output')
|
||||
vtk.tofile('m2v_file')
|
||||
#vtk.tofile('example3b','binary')
|
||||
#VtkData('example3')
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
# $Id$
|
||||
"""Test functionality"""
|
||||
|
||||
from .test import Test
|
||||
from .test import Test # noqa
|
||||
|
|
|
@ -2,17 +2,19 @@
|
|||
|
||||
# $Id$
|
||||
|
||||
import os, sys, shlex, inspect
|
||||
import subprocess,shutil,string
|
||||
import logging, logging.config
|
||||
import os,sys,shutil
|
||||
import logging,logging.config
|
||||
import damask
|
||||
import numpy as np
|
||||
from collections import Iterable
|
||||
from optparse import OptionParser
|
||||
|
||||
class Test():
|
||||
'''
|
||||
"""
|
||||
General class for testing.
|
||||
|
||||
Is sub-classed by the individual tests.
|
||||
'''
|
||||
"""
|
||||
|
||||
variants = []
|
||||
|
||||
|
@ -52,9 +54,7 @@ class Test():
|
|||
accept=False)
|
||||
|
||||
def execute(self):
|
||||
'''
|
||||
Run all variants and report first failure.
|
||||
'''
|
||||
"""Run all variants and report first failure."""
|
||||
if self.options.debug:
|
||||
for variant in xrange(len(self.variants)):
|
||||
try:
|
||||
|
@ -84,15 +84,11 @@ class Test():
|
|||
return 0
|
||||
|
||||
def testPossible(self):
|
||||
'''
|
||||
Check if test is possible or not (e.g. no license available).
|
||||
'''
|
||||
"""Check if test is possible or not (e.g. no license available)."""
|
||||
return True
|
||||
|
||||
def clean(self):
|
||||
'''
|
||||
Delete directory tree containing current results.
|
||||
'''
|
||||
"""Delete directory tree containing current results."""
|
||||
status = True
|
||||
|
||||
try:
|
||||
|
@ -110,103 +106,77 @@ class Test():
|
|||
return status
|
||||
|
||||
def prepareAll(self):
|
||||
'''
|
||||
Do all necessary preparations for the whole test
|
||||
'''
|
||||
"""Do all necessary preparations for the whole test"""
|
||||
return True
|
||||
|
||||
def prepare(self,variant):
|
||||
'''
|
||||
Do all necessary preparations for the run of each test variant
|
||||
'''
|
||||
"""Do all necessary preparations for the run of each test variant"""
|
||||
return True
|
||||
|
||||
|
||||
def run(self,variant):
|
||||
'''
|
||||
Execute the requested test variant.
|
||||
'''
|
||||
"""Execute the requested test variant."""
|
||||
return True
|
||||
|
||||
|
||||
def postprocess(self,variant):
|
||||
'''
|
||||
Perform post-processing of generated results for this test variant.
|
||||
'''
|
||||
"""Perform post-processing of generated results for this test variant."""
|
||||
return True
|
||||
|
||||
|
||||
def compare(self,variant):
|
||||
'''
|
||||
Compare reference to current results.
|
||||
'''
|
||||
"""Compare reference to current results."""
|
||||
return True
|
||||
|
||||
|
||||
def update(self,variant):
|
||||
'''
|
||||
Update reference with current results.
|
||||
'''
|
||||
"""Update reference with current results."""
|
||||
logging.debug('Update not necessary')
|
||||
return True
|
||||
|
||||
|
||||
def dirReference(self):
|
||||
'''
|
||||
Directory containing reference results of the test.
|
||||
'''
|
||||
"""Directory containing reference results of the test."""
|
||||
return os.path.normpath(os.path.join(self.dirBase,'reference/'))
|
||||
|
||||
|
||||
def dirCurrent(self):
|
||||
'''
|
||||
Directory containing current results of the test.
|
||||
'''
|
||||
"""Directory containing current results of the test."""
|
||||
return os.path.normpath(os.path.join(self.dirBase,'current/'))
|
||||
|
||||
|
||||
def dirProof(self):
|
||||
'''
|
||||
Directory containing human readable proof of correctness for the test.
|
||||
'''
|
||||
"""Directory containing human readable proof of correctness for the test."""
|
||||
return os.path.normpath(os.path.join(self.dirBase,'proof/'))
|
||||
|
||||
|
||||
def fileInRoot(self,dir,file):
|
||||
'''
|
||||
Path to a file in the root directory of DAMASK.
|
||||
'''
|
||||
"""Path to a file in the root directory of DAMASK."""
|
||||
return os.path.join(damask.Environment().rootDir(),dir,file)
|
||||
|
||||
|
||||
def fileInReference(self,file):
|
||||
'''
|
||||
Path to a file in the refrence directory for the test.
|
||||
'''
|
||||
"""Path to a file in the refrence directory for the test."""
|
||||
return os.path.join(self.dirReference(),file)
|
||||
|
||||
|
||||
def fileInCurrent(self,file):
|
||||
'''
|
||||
Path to a file in the current results directory for the test.
|
||||
'''
|
||||
"""Path to a file in the current results directory for the test."""
|
||||
return os.path.join(self.dirCurrent(),file)
|
||||
|
||||
|
||||
def fileInProof(self,file):
|
||||
'''
|
||||
Path to a file in the proof directory for the test.
|
||||
'''
|
||||
"""Path to a file in the proof directory for the test."""
|
||||
return os.path.join(self.dirProof(),file)
|
||||
|
||||
|
||||
def copy(self, mapA, mapB,
|
||||
A = [], B = []):
|
||||
'''
|
||||
"""
|
||||
copy list of files from (mapped) source to target.
|
||||
mapA/B is one of self.fileInX.
|
||||
'''
|
||||
|
||||
mapA/B is one of self.fileInX.
|
||||
"""
|
||||
if not B or len(B) == 0: B = A
|
||||
|
||||
for source,target in zip(map(mapA,A),map(mapB,B)):
|
||||
|
@ -328,7 +298,8 @@ class Test():
|
|||
logging.info('comparing ASCII Tables\n %s \n %s'%(file0,file1))
|
||||
if normHeadings == '': normHeadings = headings0
|
||||
|
||||
if len(headings0) == len(headings1) == len(normHeadings): #check if comparison is possible and determine lenght of columns
|
||||
# check if comparison is possible and determine lenght of columns
|
||||
if len(headings0) == len(headings1) == len(normHeadings):
|
||||
dataLength = len(headings0)
|
||||
length = [1 for i in xrange(dataLength)]
|
||||
shape = [[] for i in xrange(dataLength)]
|
||||
|
@ -431,15 +402,11 @@ class Test():
|
|||
meanTol = 1.0e-4,
|
||||
stdTol = 1.0e-6,
|
||||
preFilter = 1.0e-9):
|
||||
|
||||
'''
|
||||
"""
|
||||
calculate statistics of tables
|
||||
|
||||
threshold can be used to ignore small values (a negative number disables this feature)
|
||||
'''
|
||||
|
||||
import numpy as np
|
||||
from collections import Iterable
|
||||
|
||||
"""
|
||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||
files = [str(files)]
|
||||
|
||||
|
@ -491,15 +458,11 @@ class Test():
|
|||
preFilter = -1.0,
|
||||
postFilter = -1.0,
|
||||
debug = False):
|
||||
|
||||
'''
|
||||
"""
|
||||
compare tables with np.allclose
|
||||
|
||||
threshold can be used to ignore small values (a negative number disables this feature)
|
||||
'''
|
||||
|
||||
import numpy as np
|
||||
from collections import Iterable
|
||||
|
||||
"""
|
||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||
files = [str(files)]
|
||||
|
||||
|
|
|
@ -165,7 +165,7 @@ class backgroundMessage(threading.Thread):
|
|||
def print_message(self):
|
||||
length = len(self.symbols[self.counter] + self.gap + self.message)
|
||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length + \
|
||||
self.symbols[self.counter] + self.gap + self.new_message) # delete former and print new message
|
||||
self.symbols[self.counter].encode('utf-8') + self.gap + self.new_message) # delete former and print new message
|
||||
sys.stderr.flush()
|
||||
self.message = self.new_message
|
||||
|
||||
|
@ -442,9 +442,9 @@ def execute(cmd,streamIn=None,wd='./'):
|
|||
os.chdir(wd)
|
||||
process = subprocess.Popen(shlex.split(cmd),stdout=subprocess.PIPE,stderr = subprocess.PIPE,stdin=subprocess.PIPE)
|
||||
if streamIn is not None:
|
||||
out,error = process.communicate(streamIn.read())
|
||||
out,error = [i.replace("\x08","") for i in process.communicate(streamIn.read())]
|
||||
else:
|
||||
out,error = process.communicate()
|
||||
out,error =[i.replace("\x08","") for i in process.communicate()]
|
||||
os.chdir(initialPath)
|
||||
if process.returncode !=0: raise RuntimeError(cmd+' failed with returncode '+str(process.returncode))
|
||||
return out,error
|
||||
|
|
|
@ -110,7 +110,7 @@ for name in filenames:
|
|||
remarks = []
|
||||
|
||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} has wrong dimension {}.'.format(label,dim))
|
||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
|
||||
else: column = table.label_index(label)
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
|
|
@ -99,7 +99,7 @@ for name in filenames:
|
|||
# ------------------------------------------ sanity checks ----------------------------------------
|
||||
|
||||
if not np.all(table.label_dimension(label) == dim):
|
||||
damask.util.croak('input {} has wrong dimension {}.'.format(label,dim))
|
||||
damask.util.croak('input {} does not have dimension {}.'.format(label,dim))
|
||||
table.close(dismiss = True) # close ASCIItable and remove empty file
|
||||
continue
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ for name in filenames:
|
|||
errors = []
|
||||
remarks = []
|
||||
|
||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} has wrong dimension {}.'.format(label,dim))
|
||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
|
||||
else: column = table.label_index(label)
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
|
|
@ -101,7 +101,7 @@ for name in filenames:
|
|||
errors = []
|
||||
remarks = []
|
||||
|
||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} has wrong dimension {}.'.format(label,dim))
|
||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
|
||||
else: column = table.label_index(label)
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
|
|
@ -98,7 +98,7 @@ for name in filenames:
|
|||
errors.append('column{} {} not found'.format('s' if len(missing_labels) > 1 else '',
|
||||
', '.join(missing_labels)))
|
||||
if table.label_dimension(options.label) != 3:
|
||||
errors.append('column {} has wrong dimension'.format(options.label))
|
||||
errors.append('column {} does not have dimension'.format(options.label))
|
||||
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
|
|
|
@ -276,10 +276,10 @@ for name in filenames:
|
|||
grid = np.vstack(meshgrid2(x, y, z)).reshape(3,-1).T
|
||||
indices = laguerreTessellation(grid, coords, weights, grains, options.nonperiodic, options.cpus)
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
# --- write header ------------------------------------------------------------------------
|
||||
|
||||
grainIDs = np.intersect1d(grainIDs,indices)
|
||||
info['microstructures'] = len(grainIDs)
|
||||
usedGrainIDs = np.intersect1d(grainIDs,indices)
|
||||
info['microstructures'] = len(usedGrainIDs)
|
||||
|
||||
if info['homogenization'] == 0: info['homogenization'] = options.homogenization
|
||||
|
||||
|
@ -292,13 +292,13 @@ for name in filenames:
|
|||
])
|
||||
|
||||
config_header = []
|
||||
formatwidth = 1+int(math.log10(info['microstructures']))
|
||||
formatwidth = 1+int(math.log10(NgrainIDs))
|
||||
|
||||
phase = options.phase * np.ones(info['microstructures'],'i')
|
||||
phase = options.phase * np.ones(NgrainIDs,'i')
|
||||
if int(options.secondphase*info['microstructures']) > 0:
|
||||
phase[0:int(options.secondphase*info['microstructures'])] += 1
|
||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed is None \
|
||||
else options.randomSeed # random seed for second phase
|
||||
phase[0:int(options.secondphase*info['microstructures'])] += 1 # alter fraction 'options.secondphase' of used grainIDs
|
||||
randomSeed = options.randomSeed if options.randomSeed \
|
||||
else int(os.urandom(4).encode('hex'), 16) # random seed for second phase
|
||||
np.random.seed(randomSeed)
|
||||
np.random.shuffle(phase)
|
||||
config_header += ['# random seed (phase shuffling): {}'.format(randomSeed)]
|
||||
|
|
|
@ -120,7 +120,7 @@ for name in filenames:
|
|||
np.where(
|
||||
ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
|
||||
structure = struc,
|
||||
terations = options.d/2 + 1), # fat boundary | PE: why 2d-1? I would argue for d/2 + 1
|
||||
iterations = options.d/2 + 1), # fat boundary | PE: why 2d-1? I would argue for d/2 + 1
|
||||
periodic_bulkEnergy[grid[0]/2:-grid[0]/2, # retain filled energy on fat boundary...
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2], # ...and zero everywhere else
|
||||
|
|
Loading…
Reference in New Issue