Merge remote-tracking branch 'origin/development' into thermal-restart

This commit is contained in:
Sharan 2022-02-02 23:07:32 +01:00
commit d72347fe25
12 changed files with 55 additions and 242 deletions

View File

@ -1,178 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
from scipy import ndimage
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
getInterfaceEnergy = lambda A,B: np.float32((A != B)*1.0) # 1.0 if A & B are distinct, 0.0 otherwise
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [geomfile(s)]', description = """
Smoothen interface roughness by simulated curvature flow.
This is achieved by the diffusion of each initially sharply bounded grain volume within the periodic domain
up to a given distance 'd' voxels.
The final geometry is assembled by selecting at each voxel that grain index for which the concentration remains largest.
References 10.1073/pnas.1111557108 (10.1006/jcph.1994.1105)
""", version = scriptID)
parser.add_option('-d', '--distance',
dest = 'd',
type = 'float', metavar = 'float',
help = 'diffusion distance in voxels [%default]')
parser.add_option('-N', '--iterations',
dest = 'N',
type = 'int', metavar = 'int',
help = 'curvature flow iterations [%default]')
parser.add_option('-i', '--immutable',
action = 'extend', dest = 'immutable', metavar = '<int LIST>',
help = 'list of immutable material indices')
parser.add_option('--ndimage',
dest = 'ndimage', action='store_true',
help = 'use ndimage.gaussian_filter in lieu of explicit FFT')
parser.set_defaults(d = 1,
N = 1,
immutable = [],
ndimage = False,
)
(options, filenames) = parser.parse_args()
options.immutable = list(map(int,options.immutable))
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Grid.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid_original = geom.cells
damask.util.croak(geom)
material = np.tile(geom.material,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
grid = np.array(material.shape)
# --- initialize support data ---------------------------------------------------------------------
# store a copy of the initial material indices to find locations of immutable indices
material_original = np.copy(material)
if not options.ndimage:
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
# Calculates gaussian weights for simulating 3d diffusion
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d),dtype=np.float32) \
/np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(grid_original == 1))/2.,dtype=np.float32)
gauss[:,:,:grid[2]//2:-1] = gauss[:,:,1:(grid[2]+1)//2] # trying to cope with uneven (odd) grid size
gauss[:,:grid[1]//2:-1,:] = gauss[:,1:(grid[1]+1)//2,:]
gauss[:grid[0]//2:-1,:,:] = gauss[1:(grid[0]+1)//2,:,:]
gauss = np.fft.rfftn(gauss).astype(np.complex64)
for smoothIter in range(options.N):
interfaceEnergy = np.zeros(material.shape,dtype=np.float32)
for i in (-1,0,1):
for j in (-1,0,1):
for k in (-1,0,1):
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
interfaceEnergy = np.maximum(interfaceEnergy,
getInterfaceEnergy(material,np.roll(np.roll(np.roll(
material,i,axis=0), j,axis=1), k,axis=2)))
# periodically extend interfacial energy array by half a grid size in positive and negative directions
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
grid[1]//2:-grid[1]//2,
grid[2]//2:-grid[2]//2]
# transform bulk volume (i.e. where interfacial energy remained zero), store index of closest boundary voxel
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0.,
return_distances = False,
return_indices = True)
# want array index of nearest voxel on periodically extended boundary
periodic_bulkEnergy = periodic_interfaceEnergy[index[0],
index[1],
index[2]].reshape(2*grid) # fill bulk with energy of nearest interface
if options.ndimage:
periodic_diffusedEnergy = ndimage.gaussian_filter(
np.where(ndimage.morphology.binary_dilation(periodic_interfaceEnergy > 0.,
structure = struc,
iterations = int(round(options.d*2.))-1, # fat boundary
),
periodic_bulkEnergy, # ...and zero everywhere else
0.),
sigma = options.d)
else:
diffusedEnergy = np.fft.irfftn(np.fft.rfftn(
np.where(
ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
structure = struc,
iterations = int(round(options.d*2.))-1),# fat boundary
periodic_bulkEnergy[grid[0]//2:-grid[0]//2, # retain filled energy on fat boundary...
grid[1]//2:-grid[1]//2,
grid[2]//2:-grid[2]//2], # ...and zero everywhere else
0.)).astype(np.complex64) *
gauss).astype(np.float32)
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
grid[1]//2:-grid[1]//2,
grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy
# transform voxels close to interface region
index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.95*np.amax(periodic_diffusedEnergy),
return_distances = False,
return_indices = True) # want index of closest bulk grain
periodic_material = np.tile(material,(3,3,3))[grid[0]//2:-grid[0]//2,
grid[1]//2:-grid[1]//2,
grid[2]//2:-grid[2]//2] # periodically extend the geometry
material = periodic_material[index[0],
index[1],
index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2,
grid[1]//2:-grid[1]//2,
grid[2]//2:-grid[2]//2] # extent grains into interface region
# replace immutable materials with closest mutable ones
index = ndimage.morphology.distance_transform_edt(np.in1d(material,options.immutable).reshape(grid),
return_distances = False,
return_indices = True)
material = material[index[0],
index[1],
index[2]]
immutable = np.zeros(material.shape, dtype=np.bool)
# find locations where immutable materials have been in original structure
for micro in options.immutable:
immutable += material_original == micro
# undo any changes involving immutable materials
material = np.where(immutable, material_original,material)
damask.Grid(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]],
size = geom.size,
origin = geom.origin,
comments = geom.comments + [scriptID + ' ' + ' '.join(sys.argv[1:])],
)\
.save(sys.stdout if name is None else name)

View File

@ -1 +1 @@
v3.0.0-alpha5-545-gad74f5dbe v3.0.0-alpha5-552-ga6e78c5b6

View File

@ -9,3 +9,4 @@ import numpy as np
FloatSequence = Union[np.ndarray,Sequence[float]] FloatSequence = Union[np.ndarray,Sequence[float]]
IntSequence = Union[np.ndarray,Sequence[int]] IntSequence = Union[np.ndarray,Sequence[int]]
FileHandle = Union[TextIO, str, Path] FileHandle = Union[TextIO, str, Path]
NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.BitGenerator, np.random.Generator]

View File

@ -1,3 +1,4 @@
"""Functionality for generation of seed points for Voronoi or Laguerre tessellation.""" """Functionality for generation of seed points for Voronoi or Laguerre tessellation."""
from typing import Tuple as _Tuple from typing import Tuple as _Tuple
@ -5,7 +6,7 @@ from typing import Tuple as _Tuple
from scipy import spatial as _spatial from scipy import spatial as _spatial
import numpy as _np import numpy as _np
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence, NumpyRngSeed as _NumpyRngSeed
from . import util as _util from . import util as _util
from . import grid_filters as _grid_filters from . import grid_filters as _grid_filters
@ -13,7 +14,7 @@ from . import grid_filters as _grid_filters
def from_random(size: _FloatSequence, def from_random(size: _FloatSequence,
N_seeds: int, N_seeds: int,
cells: _IntSequence = None, cells: _IntSequence = None,
rng_seed=None) -> _np.ndarray: rng_seed: _NumpyRngSeed = None) -> _np.ndarray:
""" """
Place seeds randomly in space. Place seeds randomly in space.
@ -53,7 +54,7 @@ def from_Poisson_disc(size: _FloatSequence,
N_candidates: int, N_candidates: int,
distance: float, distance: float,
periodic: bool = True, periodic: bool = True,
rng_seed=None) -> _np.ndarray: rng_seed: _NumpyRngSeed = None) -> _np.ndarray:
""" """
Place seeds according to a Poisson disc distribution. Place seeds according to a Poisson disc distribution.

View File

@ -16,7 +16,7 @@ import numpy as np
import h5py import h5py
from . import version from . import version
from ._typehints import IntSequence, FloatSequence from ._typehints import FloatSequence, NumpyRngSeed
# limit visibility # limit visibility
__all__=[ __all__=[
@ -396,7 +396,7 @@ def execution_stamp(class_name: str,
def hybrid_IA(dist: np.ndarray, def hybrid_IA(dist: np.ndarray,
N: int, N: int,
rng_seed: Union[int, IntSequence] = None) -> np.ndarray: rng_seed: NumpyRngSeed = None) -> np.ndarray:
""" """
Hybrid integer approximation. Hybrid integer approximation.

View File

@ -287,7 +287,7 @@ class TestOrientation:
@pytest.mark.parametrize('family',crystal_families) @pytest.mark.parametrize('family',crystal_families)
@pytest.mark.parametrize('proper',[True,False]) @pytest.mark.parametrize('proper',[True,False])
def test_in_SST(self,family,proper): def test_in_SST(self,family,proper):
assert Orientation(family=family).in_SST(np.zeros(3),proper) # noqa assert Orientation(family=family).in_SST(np.zeros(3),proper)
@pytest.mark.parametrize('function',['in_SST','IPF_color']) @pytest.mark.parametrize('function',['in_SST','IPF_color'])
def test_invalid_argument(self,function): def test_invalid_argument(self,function):

View File

@ -367,13 +367,13 @@ class TestResult:
@pytest.mark.parametrize('mode',['cell','node']) @pytest.mark.parametrize('mode',['cell','node'])
def test_coordinates(self,default,mode): def test_coordinates(self,default,mode):
if mode == 'cell': # noqa if mode == 'cell':
a = grid_filters.coordinates0_point(default.cells,default.size,default.origin) a = grid_filters.coordinates0_point(default.cells,default.size,default.origin)
b = default.coordinates0_point.reshape(tuple(default.cells)+(3,),order='F') b = default.coordinates0_point.reshape(tuple(default.cells)+(3,),order='F')
elif mode == 'node': elif mode == 'node':
a = grid_filters.coordinates0_node(default.cells,default.size,default.origin) a = grid_filters.coordinates0_node(default.cells,default.size,default.origin)
b = default.coordinates0_node.reshape(tuple(default.cells+1)+(3,),order='F') b = default.coordinates0_node.reshape(tuple(default.cells+1)+(3,),order='F')
assert np.allclose(a,b) assert np.allclose(a,b)
@pytest.mark.parametrize('output',['F','*',['P'],['P','F']],ids=range(4)) @pytest.mark.parametrize('output',['F','*',['P'],['P','F']],ids=range(4))
@pytest.mark.parametrize('fname',['12grains6x7x8_tensionY.hdf5'],ids=range(1)) @pytest.mark.parametrize('fname',['12grains6x7x8_tensionY.hdf5'],ids=range(1))
@ -421,7 +421,7 @@ class TestResult:
def test_XDMF_datatypes(self,tmp_path,single_phase,update,ref_path): def test_XDMF_datatypes(self,tmp_path,single_phase,update,ref_path):
for shape in [('scalar',()),('vector',(3,)),('tensor',(3,3)),('matrix',(12,))]: for shape in [('scalar',()),('vector',(3,)),('tensor',(3,3)),('matrix',(12,))]:
for dtype in ['f4','f8','i1','i2','i4','i8','u1','u2','u4','u8']: for dtype in ['f4','f8','i1','i2','i4','i8','u1','u2','u4','u8']:
single_phase.add_calculation(f"np.ones(np.shape(#F#)[0:1]+{shape[1]},'{dtype}')",f'{shape[0]}_{dtype}') # noqa single_phase.add_calculation(f"np.ones(np.shape(#F#)[0:1]+{shape[1]},'{dtype}')",f'{shape[0]}_{dtype}')
fname = os.path.splitext(os.path.basename(single_phase.fname))[0]+'.xdmf' fname = os.path.splitext(os.path.basename(single_phase.fname))[0]+'.xdmf'
os.chdir(tmp_path) os.chdir(tmp_path)
single_phase.export_XDMF() single_phase.export_XDMF()

View File

@ -1076,19 +1076,19 @@ class TestRotation:
def test_from_fiber_component(self,N,sigma): def test_from_fiber_component(self,N,sigma):
p = [] p = []
for run in range(5): for run in range(5):
alpha = np.random.random()*2*np.pi,np.arccos(np.random.random()) # noqa alpha = np.random.random()*2*np.pi,np.arccos(np.random.random())
beta = np.random.random()*2*np.pi,np.arccos(np.random.random()) beta = np.random.random()*2*np.pi,np.arccos(np.random.random())
f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])]) f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])])
f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )]) f_in_S = np.array([np.sin(beta[0] )*np.cos(beta[1] ), np.sin(beta[0] )*np.sin(beta[1] ), np.cos(beta[0] )])
ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S))) ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S)))
n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system
o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False) o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),N,False)
angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1)) angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,(N,3)),n@f_in_S),-1,1))
dist = np.array(angles) * (np.random.randint(0,2,N)*2-1) dist = np.array(angles) * (np.random.randint(0,2,N)*2-1)
p.append(stats.normaltest(dist)[1]) p.append(stats.normaltest(dist)[1])
sigma_out = np.degrees(np.std(dist)) sigma_out = np.degrees(np.std(dist))
p = np.average(p) p = np.average(p)

View File

@ -173,11 +173,11 @@ class TestVTK:
polyData = VTK.from_poly_data(points) polyData = VTK.from_poly_data(points)
polyData.add(points,'coordinates') polyData.add(points,'coordinates')
if update: if update:
polyData.save(ref_path/'polyData') # noqa polyData.save(ref_path/'polyData')
else: else:
reference = VTK.load(ref_path/'polyData.vtp') # noqa reference = VTK.load(ref_path/'polyData.vtp')
assert polyData.__repr__() == reference.__repr__() and \ assert polyData.__repr__() == reference.__repr__() and \
np.allclose(polyData.get('coordinates'),points) np.allclose(polyData.get('coordinates'),points)
@pytest.mark.xfail(int(vtk.vtkVersion.GetVTKVersion().split('.')[0])<8, reason='missing METADATA') @pytest.mark.xfail(int(vtk.vtkVersion.GetVTKVersion().split('.')[0])<8, reason='missing METADATA')
def test_compare_reference_rectilinearGrid(self,update,ref_path,tmp_path): def test_compare_reference_rectilinearGrid(self,update,ref_path,tmp_path):
@ -189,8 +189,8 @@ class TestVTK:
rectilinearGrid.add(np.ascontiguousarray(c),'cell') rectilinearGrid.add(np.ascontiguousarray(c),'cell')
rectilinearGrid.add(np.ascontiguousarray(n),'node') rectilinearGrid.add(np.ascontiguousarray(n),'node')
if update: if update:
rectilinearGrid.save(ref_path/'rectilinearGrid') # noqa rectilinearGrid.save(ref_path/'rectilinearGrid')
else: else:
reference = VTK.load(ref_path/'rectilinearGrid.vtr') # noqa reference = VTK.load(ref_path/'rectilinearGrid.vtr')
assert rectilinearGrid.__repr__() == reference.__repr__() and \ assert rectilinearGrid.__repr__() == reference.__repr__() and \
np.allclose(rectilinearGrid.get('cell'),c) np.allclose(rectilinearGrid.get('cell'),c)

View File

@ -8,10 +8,9 @@ submodule(phase:eigen) thermalexpansion
integer, dimension(:), allocatable :: kinematics_thermal_expansion_instance integer, dimension(:), allocatable :: kinematics_thermal_expansion_instance
type :: tParameters type :: tParameters
real(pReal) :: & type(tPolynomial) :: &
T_ref A_11, &
real(pReal), dimension(3,3,3) :: & A_33
A = 0.0_pReal
end type tParameters end type tParameters
type(tParameters), dimension(:), allocatable :: param type(tParameters), dimension(:), allocatable :: param
@ -34,7 +33,7 @@ module function thermalexpansion_init(kinematics_length) result(myKinematics)
phase, & phase, &
mech, & mech, &
kinematics, & kinematics, &
kinematic_type myConfig
print'(/,1x,a)', '<<<+- phase:mechanical:eigen:thermalexpansion init -+>>>' print'(/,1x,a)', '<<<+- phase:mechanical:eigen:thermalexpansion init -+>>>'
@ -56,21 +55,13 @@ module function thermalexpansion_init(kinematics_length) result(myKinematics)
do k = 1, kinematics%length do k = 1, kinematics%length
if (myKinematics(k,p)) then if (myKinematics(k,p)) then
associate(prm => param(kinematics_thermal_expansion_instance(p))) associate(prm => param(kinematics_thermal_expansion_instance(p)))
kinematic_type => kinematics%get(k)
prm%T_ref = kinematic_type%get_asFloat('T_ref', defaultVal=T_ROOM) myConfig => kinematics%get(k)
prm%A_11 = polynomial(myConfig%asDict(),'A_11','T')
if (any(phase_lattice(p) == ['hP','tI'])) &
prm%A_33 = polynomial(myConfig%asDict(),'A_33','T')
prm%A(1,1,1) = kinematic_type%get_asFloat('A_11')
prm%A(1,1,2) = kinematic_type%get_asFloat('A_11,T', defaultVal=0.0_pReal)
prm%A(1,1,3) = kinematic_type%get_asFloat('A_11,T^2',defaultVal=0.0_pReal)
if (any(phase_lattice(p) == ['hP','tI'])) then
prm%A(3,3,1) = kinematic_type%get_asFloat('A_33')
prm%A(3,3,2) = kinematic_type%get_asFloat('A_33,T', defaultVal=0.0_pReal)
prm%A(3,3,3) = kinematic_type%get_asFloat('A_33,T^2',defaultVal=0.0_pReal)
end if
do i=1, size(prm%A,3)
prm%A(1:3,1:3,i) = lattice_symmetrize_33(prm%A(1:3,1:3,i),phase_lattice(p))
end do
end associate end associate
end if end if
end do end do
@ -91,22 +82,20 @@ module subroutine thermalexpansion_LiAndItsTangent(Li, dLi_dTstar, ph,me)
dLi_dTstar !< derivative of Li with respect to Tstar (4th-order tensor defined to be zero) dLi_dTstar !< derivative of Li with respect to Tstar (4th-order tensor defined to be zero)
real(pReal) :: T, dot_T real(pReal) :: T, dot_T
real(pReal), dimension(3,3) :: A
T = thermal_T(ph,me) T = thermal_T(ph,me)
dot_T = thermal_dot_T(ph,me) dot_T = thermal_dot_T(ph,me)
associate(prm => param(kinematics_thermal_expansion_instance(ph))) associate(prm => param(kinematics_thermal_expansion_instance(ph)))
Li = dot_T * ( &
prm%A(1:3,1:3,1) & ! constant coefficient A = 0.0_pReal
+ prm%A(1:3,1:3,2)*(T - prm%T_ref) & ! linear coefficient A(1,1) = prm%A_11%at(T)
+ prm%A(1:3,1:3,3)*(T - prm%T_ref)**2 & ! quadratic coefficient if (any(phase_lattice(ph) == ['hP','tI'])) A(3,3) = prm%A_33%at(T)
) / & A = lattice_symmetrize_33(A,phase_lattice(ph))
(1.0_pReal & Li = dot_T * A
+ prm%A(1:3,1:3,1)*(T - prm%T_ref) / 1.0_pReal &
+ prm%A(1:3,1:3,2)*(T - prm%T_ref)**2 / 2.0_pReal &
+ prm%A(1:3,1:3,3)*(T - prm%T_ref)**3 / 3.0_pReal &
)
end associate end associate
dLi_dTstar = 0.0_pReal dLi_dTstar = 0.0_pReal

View File

@ -48,7 +48,7 @@ module subroutine elastic_init(phases)
prm%C_11 = polynomial(elastic%asDict(),'C_11','T') prm%C_11 = polynomial(elastic%asDict(),'C_11','T')
prm%C_12 = polynomial(elastic%asDict(),'C_12','T') prm%C_12 = polynomial(elastic%asDict(),'C_12','T')
prm%C_44 = polynomial(elastic%asDict(),'C_44','T') prm%C_44 = polynomial(elastic%asDict(),'C_44','T')
if (any(phase_lattice(ph) == ['hP','tI'])) then if (any(phase_lattice(ph) == ['hP','tI'])) then
prm%C_13 = polynomial(elastic%asDict(),'C_13','T') prm%C_13 = polynomial(elastic%asDict(),'C_13','T')
prm%C_33 = polynomial(elastic%asDict(),'C_33','T') prm%C_33 = polynomial(elastic%asDict(),'C_33','T')
@ -77,7 +77,7 @@ pure module function elastic_C66(ph,en) result(C66)
associate(prm => param(ph)) associate(prm => param(ph))
C66 = 0.0_pReal C66 = 0.0_pReal
T = thermal_T(ph,en) T = thermal_T(ph,en)

View File

@ -163,7 +163,7 @@ subroutine selfTest
'T_ref: '//trim(adjustl(x_ref_s))//IO_EOL 'T_ref: '//trim(adjustl(x_ref_s))//IO_EOL
Dict => YAML_parse_str(trim(YAML_s)) Dict => YAML_parse_str(trim(YAML_s))
p2 = polynomial(dict%asDict(),'C','T') p2 = polynomial(dict%asDict(),'C','T')
if (dNeq(p1%at(x),p2%at(x),1.0e-12_pReal)) error stop 'polynomials: init' if (dNeq(p1%at(x),p2%at(x),1.0e-10_pReal)) error stop 'polynomials: init'
p1 = polynomial(coef*[0.0_pReal,1.0_pReal,0.0_pReal],x_ref) p1 = polynomial(coef*[0.0_pReal,1.0_pReal,0.0_pReal],x_ref)
if (dNeq(p1%at(x_ref+x),-p1%at(x_ref-x),1.0e-10_pReal)) error stop 'polynomials: eval(odd)' if (dNeq(p1%at(x_ref+x),-p1%at(x_ref-x),1.0e-10_pReal)) error stop 'polynomials: eval(odd)'