follow Python exception message convention of lower-case
This commit is contained in:
parent
aeb0e527ec
commit
47dcd4bd41
|
@ -141,7 +141,7 @@ class Colormap(mpl.colors.ListedColormap):
|
|||
)
|
||||
|
||||
if model.lower() not in toMsh:
|
||||
raise ValueError(f'Invalid color model "{model}"')
|
||||
raise ValueError(f'invalid color model "{model}"')
|
||||
|
||||
low_high = np.vstack((low,high)).astype(float)
|
||||
out_of_bounds = np.bool_(False)
|
||||
|
|
|
@ -64,9 +64,9 @@ class Crystal():
|
|||
|
||||
"""
|
||||
if family is not None and family not in list(lattice_symmetries.values()):
|
||||
raise KeyError(f'Invalid crystal family "{family}"')
|
||||
raise KeyError(f'invalid crystal family "{family}"')
|
||||
if lattice is not None and family is not None and family != lattice_symmetries[lattice]:
|
||||
raise KeyError(f'Incompatible family "{family}" for lattice "{lattice}"')
|
||||
raise KeyError(f'incompatible family "{family}" for lattice "{lattice}"')
|
||||
|
||||
self.family = lattice_symmetries[lattice] if family is None else family
|
||||
self.lattice = lattice
|
||||
|
@ -101,13 +101,13 @@ class Crystal():
|
|||
or (self.alpha is None or ('alpha' in self.immutable and self.alpha != self.immutable['alpha'])) \
|
||||
or (self.beta is None or ('beta' in self.immutable and self.beta != self.immutable['beta'])) \
|
||||
or (self.gamma is None or ('gamma' in self.immutable and self.gamma != self.immutable['gamma'])):
|
||||
raise ValueError (f'Incompatible parameters {self.parameters} for crystal family {self.family}')
|
||||
raise ValueError (f'incompatible parameters {self.parameters} for crystal family {self.family}')
|
||||
|
||||
if np.any(np.array([self.alpha,self.beta,self.gamma]) <= 0):
|
||||
raise ValueError ('Lattice angles must be positive')
|
||||
raise ValueError ('lattice angles must be positive')
|
||||
if np.any([np.roll([self.alpha,self.beta,self.gamma],r)[0]
|
||||
>= np.sum(np.roll([self.alpha,self.beta,self.gamma],r)[1:]) for r in range(3)]):
|
||||
raise ValueError ('Each lattice angle must be less than sum of others')
|
||||
raise ValueError ('each lattice angle must be less than sum of others')
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
|
@ -269,7 +269,7 @@ class Crystal():
|
|||
|
||||
"""
|
||||
if self.parameters is None:
|
||||
raise KeyError('Missing crystal lattice parameters')
|
||||
raise KeyError('missing crystal lattice parameters')
|
||||
return np.array([
|
||||
[1,0,0],
|
||||
[np.cos(self.gamma),np.sin(self.gamma),0],
|
||||
|
@ -309,7 +309,7 @@ class Crystal():
|
|||
],
|
||||
}
|
||||
|
||||
if self.lattice is None: raise KeyError('No lattice type specified')
|
||||
if self.lattice is None: raise KeyError('no lattice type specified')
|
||||
return np.array([[0,0,0]]
|
||||
+ _lattice_points.get(self.lattice if self.lattice == 'hP' else \
|
||||
self.lattice[-1],None),dtype=float)
|
||||
|
@ -333,7 +333,7 @@ class Crystal():
|
|||
|
||||
"""
|
||||
if (direction is not None) ^ (plane is None):
|
||||
raise KeyError('Specify either "direction" or "plane"')
|
||||
raise KeyError('specify either "direction" or "plane"')
|
||||
basis,axis = (self.basis_reciprocal,np.array(direction)) \
|
||||
if plane is None else \
|
||||
(self.basis_real,np.array(plane))
|
||||
|
@ -358,7 +358,7 @@ class Crystal():
|
|||
|
||||
"""
|
||||
if (uvw is not None) ^ (hkl is None):
|
||||
raise KeyError('Specify either "uvw" or "hkl"')
|
||||
raise KeyError('specify either "uvw" or "hkl"')
|
||||
basis,axis = (self.basis_real,np.array(uvw)) \
|
||||
if hkl is None else \
|
||||
(self.basis_reciprocal,np.array(hkl))
|
||||
|
@ -930,7 +930,7 @@ class Crystal():
|
|||
}
|
||||
orientation_relationships = {k:v for k,v in _orientation_relationships.items() if self.lattice in v}
|
||||
if model not in orientation_relationships:
|
||||
raise KeyError(f'Unknown orientation relationship "{model}"')
|
||||
raise KeyError(f'unknown orientation relationship "{model}"')
|
||||
r = orientation_relationships[model]
|
||||
|
||||
sl = self.lattice
|
||||
|
|
|
@ -105,9 +105,9 @@ class Grid:
|
|||
def material(self,
|
||||
material: np.ndarray):
|
||||
if len(material.shape) != 3:
|
||||
raise ValueError(f'Invalid material shape {material.shape}')
|
||||
raise ValueError(f'invalid material shape {material.shape}')
|
||||
if material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
|
||||
raise TypeError(f'Invalid material data type {material.dtype}')
|
||||
raise TypeError(f'invalid material data type "{material.dtype}"')
|
||||
|
||||
self._material = np.copy(material)
|
||||
|
||||
|
@ -125,7 +125,7 @@ class Grid:
|
|||
def size(self,
|
||||
size: FloatSequence):
|
||||
if len(size) != 3 or any(np.array(size) < 0):
|
||||
raise ValueError(f'Invalid size {size}')
|
||||
raise ValueError(f'invalid size {size}')
|
||||
|
||||
self._size = np.array(size)
|
||||
|
||||
|
@ -138,7 +138,7 @@ class Grid:
|
|||
def origin(self,
|
||||
origin: FloatSequence):
|
||||
if len(origin) != 3:
|
||||
raise ValueError(f'Invalid origin {origin}')
|
||||
raise ValueError(f'invalid origin {origin}')
|
||||
|
||||
self._origin = np.array(origin)
|
||||
|
||||
|
@ -228,7 +228,7 @@ class Grid:
|
|||
except ValueError:
|
||||
header_length,keyword = (-1, 'invalid')
|
||||
if not keyword.startswith('head') or header_length < 3:
|
||||
raise TypeError('Header length information missing or invalid')
|
||||
raise TypeError('invalid or missing header length information')
|
||||
|
||||
comments = []
|
||||
content = f.readlines()
|
||||
|
@ -258,7 +258,7 @@ class Grid:
|
|||
i += len(items)
|
||||
|
||||
if i != cells.prod():
|
||||
raise TypeError(f'Invalid file: expected {cells.prod()} entries, found {i}')
|
||||
raise TypeError(f'mismatch between {cells.prod()} expected entries and {i} found')
|
||||
|
||||
if not np.any(np.mod(material,1) != 0.0): # no float present
|
||||
material = material.astype('int') - (1 if material.min() > 0 else 0)
|
||||
|
@ -811,7 +811,7 @@ class Grid:
|
|||
|
||||
"""
|
||||
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified')
|
||||
raise ValueError(f'invalid direction "{set(directions).difference(valid)}" specified')
|
||||
|
||||
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
|
||||
mat = self.material.copy()
|
||||
|
@ -847,7 +847,7 @@ class Grid:
|
|||
|
||||
"""
|
||||
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified')
|
||||
raise ValueError(f'invalid direction "{set(directions).difference(valid)}" specified')
|
||||
|
||||
|
||||
mat = np.flip(self.material, [valid.index(d) for d in directions if d in valid])
|
||||
|
@ -1184,7 +1184,7 @@ class Grid:
|
|||
|
||||
"""
|
||||
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||
raise ValueError(f'Invalid direction {set(directions).difference(valid)} specified')
|
||||
raise ValueError(f'invalid direction "{set(directions).difference(valid)}" specified')
|
||||
|
||||
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
|
||||
[0, np.prod(self.cells[:2]+1), np.prod(self.cells[:2]+1)+1, 1],
|
||||
|
|
|
@ -247,7 +247,7 @@ class Orientation(Rotation,Crystal):
|
|||
if isinstance(other, (Orientation,Rotation)):
|
||||
return self.copy(Rotation(self.quaternion)*Rotation(other.quaternion))
|
||||
else:
|
||||
raise TypeError('Use "O@b", i.e. matmul, to apply Orientation "O" to object "b"')
|
||||
raise TypeError('use "O@b", i.e. matmul, to apply Orientation "O" to object "b"')
|
||||
|
||||
|
||||
@staticmethod
|
||||
|
@ -550,7 +550,7 @@ class Orientation(Rotation,Crystal):
|
|||
|
||||
"""
|
||||
if self.family != other.family:
|
||||
raise NotImplementedError('Disorientation between different crystal families')
|
||||
raise NotImplementedError('disorientation between different crystal families')
|
||||
|
||||
blend = util.shapeblender(self.shape,other.shape)
|
||||
s = self.equivalent
|
||||
|
@ -649,7 +649,7 @@ class Orientation(Rotation,Crystal):
|
|||
"""
|
||||
vector_ = np.array(vector,float)
|
||||
if vector_.shape[-1] != 3:
|
||||
raise ValueError('Input is not a field of three-dimensional vectors')
|
||||
raise ValueError('input is not a field of three-dimensional vectors')
|
||||
eq = self.equivalent
|
||||
blend = util.shapeblender(eq.shape,vector_.shape[:-1])
|
||||
poles = eq.broadcast_to(blend,mode='right') @ np.broadcast_to(vector_,blend+(3,))
|
||||
|
@ -686,7 +686,7 @@ class Orientation(Rotation,Crystal):
|
|||
"""
|
||||
vector_ = np.array(vector,float)
|
||||
if vector_.shape[-1] != 3:
|
||||
raise ValueError('Input is not a field of three-dimensional vectors')
|
||||
raise ValueError('input is not a field of three-dimensional vectors')
|
||||
|
||||
if self.standard_triangle is None: # direct exit for no symmetry
|
||||
return np.ones_like(vector_[...,0],bool)
|
||||
|
@ -744,7 +744,7 @@ class Orientation(Rotation,Crystal):
|
|||
|
||||
"""
|
||||
if np.array(vector).shape[-1] != 3:
|
||||
raise ValueError('Input is not a field of three-dimensional vectors')
|
||||
raise ValueError('input is not a field of three-dimensional vectors')
|
||||
|
||||
vector_ = self.to_SST(vector,proper) if in_SST else \
|
||||
self @ np.broadcast_to(vector,self.shape+(3,))
|
||||
|
@ -918,7 +918,7 @@ class Orientation(Rotation,Crystal):
|
|||
|
||||
"""
|
||||
if (N_slip is not None) ^ (N_twin is None):
|
||||
raise KeyError('Specify either "N_slip" or "N_twin"')
|
||||
raise KeyError('specify either "N_slip" or "N_twin"')
|
||||
|
||||
kinematics,active = (self.kinematics('slip'),N_slip) if N_twin is None else \
|
||||
(self.kinematics('twin'),N_twin)
|
||||
|
|
|
@ -32,10 +32,10 @@ def _view_transition(what,datasets,increments,times,phases,homogenizations,field
|
|||
if (datasets is not None and what is None) or (what is not None and datasets is None):
|
||||
raise ValueError('"what" and "datasets" need to be used as a pair')
|
||||
if datasets is not None or what is not None:
|
||||
warnings.warn('Arguments "what" and "datasets" will be removed in DAMASK v3.0.0-alpha7', DeprecationWarning,2)
|
||||
warnings.warn('arguments "what" and "datasets" will be removed in DAMASK v3.0.0-alpha7', DeprecationWarning,2)
|
||||
return what,datasets
|
||||
if sum(1 for _ in filter(None.__ne__, [increments,times,phases,homogenizations,fields])) > 1:
|
||||
raise ValueError('Only one out of "increments", "times", "phases", "homogenizations", and "fields" can be used')
|
||||
raise ValueError('only one out of "increments", "times", "phases", "homogenizations", and "fields" can be used')
|
||||
else:
|
||||
if increments is not None: return "increments", increments
|
||||
if times is not None: return "times", times
|
||||
|
@ -115,7 +115,7 @@ class Result:
|
|||
self.version_minor = f.attrs['DADF5_version_minor']
|
||||
|
||||
if self.version_major != 0 or not 12 <= self.version_minor <= 14:
|
||||
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}')
|
||||
raise TypeError(f'unsupported DADF5 version "{self.version_major}.{self.version_minor}"')
|
||||
if self.version_major == 0 and self.version_minor < 14:
|
||||
self.export_setup = None
|
||||
|
||||
|
@ -132,7 +132,7 @@ class Result:
|
|||
self.increments = sorted([i for i in f.keys() if r.match(i)],key=util.natural_sort)
|
||||
self.times = [round(f[i].attrs['t/s'],12) for i in self.increments]
|
||||
if len(self.increments) == 0:
|
||||
raise ValueError('Incomplete DADF5 file')
|
||||
raise ValueError('incomplete DADF5 file')
|
||||
|
||||
self.N_materialpoints, self.N_constituents = np.shape(f['cell_to/phase'])
|
||||
|
||||
|
@ -490,7 +490,7 @@ class Result:
|
|||
|
||||
"""
|
||||
if self._protected:
|
||||
raise PermissionError('Renaming datasets not permitted')
|
||||
raise PermissionError('rename datasets')
|
||||
|
||||
with h5py.File(self.fname,'a') as f:
|
||||
for inc in self.visible['increments']:
|
||||
|
@ -529,7 +529,7 @@ class Result:
|
|||
|
||||
"""
|
||||
if self._protected:
|
||||
raise PermissionError('Removing datasets not permitted')
|
||||
raise PermissionError('delete datasets')
|
||||
|
||||
with h5py.File(self.fname,'a') as f:
|
||||
for inc in self.visible['increments']:
|
||||
|
@ -639,7 +639,7 @@ class Result:
|
|||
data = eval(formula)
|
||||
|
||||
if not hasattr(data,'shape') or data.shape[0] != kwargs[d]['data'].shape[0]:
|
||||
raise ValueError("'{}' results in invalid shape".format(kwargs['formula']))
|
||||
raise ValueError('"{}" results in invalid shape'.format(kwargs['formula']))
|
||||
|
||||
return {
|
||||
'data': data,
|
||||
|
@ -939,7 +939,7 @@ class Result:
|
|||
elif T_sym['meta']['unit'] == 'Pa':
|
||||
k = 'stress'
|
||||
if k not in ['stress', 'strain']:
|
||||
raise ValueError(f'Invalid von Mises kind {kind}')
|
||||
raise ValueError(f'invalid von Mises kind "{kind}"')
|
||||
|
||||
return {
|
||||
'data': (mechanics.equivalent_strain_Mises if k=='strain' else \
|
||||
|
@ -993,7 +993,7 @@ class Result:
|
|||
t = 'tensor'
|
||||
if o is None: o = 'fro'
|
||||
else:
|
||||
raise ValueError(f'Invalid shape of {x["label"]}')
|
||||
raise ValueError(f'invalid shape of {x["label"]}')
|
||||
|
||||
return {
|
||||
'data': np.linalg.norm(x['data'],ord=o,axis=axis,keepdims=True),
|
||||
|
@ -1633,7 +1633,7 @@ class Result:
|
|||
elif mode.lower()=='point':
|
||||
v = VTK.from_poly_data(self.coordinates0_point)
|
||||
else:
|
||||
raise ValueError(f'Invalid mode {mode}')
|
||||
raise ValueError(f'invalid mode "{mode}"')
|
||||
|
||||
v.set_comments(util.execution_stamp('Result','export_VTK'))
|
||||
|
||||
|
|
|
@ -279,7 +279,7 @@ class Rotation:
|
|||
p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o)
|
||||
return self.copy(Rotation(np.block([q,p]))._standardize())
|
||||
else:
|
||||
raise TypeError('Use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
|
||||
raise TypeError('use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
|
||||
|
||||
def __imul__(self: MyType,
|
||||
other: MyType) -> MyType:
|
||||
|
@ -314,7 +314,7 @@ class Rotation:
|
|||
if isinstance(other,Rotation):
|
||||
return self*~other
|
||||
else:
|
||||
raise TypeError('Use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
|
||||
raise TypeError('use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
|
||||
|
||||
def __itruediv__(self: MyType,
|
||||
other: MyType) -> MyType:
|
||||
|
@ -365,11 +365,11 @@ class Rotation:
|
|||
R = self.as_matrix()
|
||||
return np.einsum('...im,...jn,...ko,...lp,...mnop',R,R,R,R,other)
|
||||
else:
|
||||
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors')
|
||||
raise ValueError('can only rotate vectors, 2nd order tensors, and 4th order tensors')
|
||||
elif isinstance(other, Rotation):
|
||||
raise TypeError('Use "R1*R2", i.e. multiplication, to compose rotations "R1" and "R2"')
|
||||
raise TypeError('use "R1*R2", i.e. multiplication, to compose rotations "R1" and "R2"')
|
||||
else:
|
||||
raise TypeError(f'Cannot rotate {type(other)}')
|
||||
raise TypeError(f'cannot rotate "{type(other)}"')
|
||||
|
||||
apply = __matmul__
|
||||
|
||||
|
@ -731,7 +731,7 @@ class Rotation:
|
|||
"""
|
||||
qu = np.array(q,dtype=float)
|
||||
if qu.shape[:-2:-1] != (4,):
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
if abs(P) != 1:
|
||||
raise ValueError('P ∉ {-1,1}')
|
||||
|
||||
|
@ -740,9 +740,9 @@ class Rotation:
|
|||
qu[qu[...,0] < 0.0] *= -1
|
||||
else:
|
||||
if np.any(qu[...,0] < 0.0):
|
||||
raise ValueError('Quaternion with negative first (real) component')
|
||||
raise ValueError('quaternion with negative first (real) component')
|
||||
if not np.all(np.isclose(np.linalg.norm(qu,axis=-1), 1.0,rtol=0.0)):
|
||||
raise ValueError('Quaternion is not of unit length')
|
||||
raise ValueError('quaternion is not of unit length')
|
||||
|
||||
return Rotation(qu)
|
||||
|
||||
|
@ -767,7 +767,7 @@ class Rotation:
|
|||
"""
|
||||
eu = np.array(phi,dtype=float)
|
||||
if eu.shape[:-2:-1] != (3,):
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
|
||||
eu = np.radians(eu) if degrees else eu
|
||||
if np.any(eu < 0.0) or np.any(eu > 2.0*np.pi) or np.any(eu[...,1] > np.pi): # ToDo: No separate check for PHI
|
||||
|
@ -798,7 +798,7 @@ class Rotation:
|
|||
"""
|
||||
ax = np.array(axis_angle,dtype=float)
|
||||
if ax.shape[:-2:-1] != (4,):
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
if abs(P) != 1:
|
||||
raise ValueError('P ∉ {-1,1}')
|
||||
|
||||
|
@ -806,10 +806,10 @@ class Rotation:
|
|||
if degrees: ax[..., 3] = np.radians(ax[...,3])
|
||||
if normalize: ax[...,0:3] /= np.linalg.norm(ax[...,0:3],axis=-1,keepdims=True)
|
||||
if np.any(ax[...,3] < 0.0) or np.any(ax[...,3] > np.pi):
|
||||
raise ValueError('Axis–angle rotation angle outside of [0..π]')
|
||||
raise ValueError('axis–angle rotation angle outside of [0..π]')
|
||||
if not np.all(np.isclose(np.linalg.norm(ax[...,0:3],axis=-1), 1.0)):
|
||||
print(np.linalg.norm(ax[...,0:3],axis=-1))
|
||||
raise ValueError('Axis–angle rotation axis is not of unit length')
|
||||
raise ValueError('axis–angle rotation axis is not of unit length')
|
||||
|
||||
return Rotation(Rotation._ax2qu(ax))
|
||||
|
||||
|
@ -832,7 +832,7 @@ class Rotation:
|
|||
"""
|
||||
om = np.array(basis,dtype=float)
|
||||
if om.shape[-2:] != (3,3):
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
|
||||
if reciprocal:
|
||||
om = np.linalg.inv(tensor.transpose(om)/np.pi) # transform reciprocal basis set
|
||||
|
@ -841,11 +841,11 @@ class Rotation:
|
|||
(U,S,Vh) = np.linalg.svd(om) # singular value decomposition
|
||||
om = np.einsum('...ij,...jl',U,Vh)
|
||||
if not np.all(np.isclose(np.linalg.det(om),1.0)):
|
||||
raise ValueError('Orientation matrix has determinant ≠ 1')
|
||||
raise ValueError('orientation matrix has determinant ≠ 1')
|
||||
if not np.all(np.isclose(np.einsum('...i,...i',om[...,0],om[...,1]), 0.0)) \
|
||||
or not np.all(np.isclose(np.einsum('...i,...i',om[...,1],om[...,2]), 0.0)) \
|
||||
or not np.all(np.isclose(np.einsum('...i,...i',om[...,2],om[...,0]), 0.0)):
|
||||
raise ValueError('Orientation matrix is not orthogonal')
|
||||
raise ValueError('orientation matrix is not orthogonal')
|
||||
|
||||
return Rotation(Rotation._om2qu(om))
|
||||
|
||||
|
@ -879,7 +879,7 @@ class Rotation:
|
|||
a_ = np.array(a)
|
||||
b_ = np.array(b)
|
||||
if a_.shape[-2:] != (2,3) or b_.shape[-2:] != (2,3) or a_.shape != b_.shape:
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
am = np.stack([ a_[...,0,:],
|
||||
a_[...,1,:],
|
||||
np.cross(a_[...,0,:],a_[...,1,:]) ],axis=-2)
|
||||
|
@ -910,7 +910,7 @@ class Rotation:
|
|||
"""
|
||||
ro = np.array(rho,dtype=float)
|
||||
if ro.shape[:-2:-1] != (4,):
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
if abs(P) != 1:
|
||||
raise ValueError('P ∉ {-1,1}')
|
||||
|
||||
|
@ -939,14 +939,14 @@ class Rotation:
|
|||
"""
|
||||
ho = np.array(h,dtype=float)
|
||||
if ho.shape[:-2:-1] != (3,):
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
if abs(P) != 1:
|
||||
raise ValueError('P ∉ {-1,1}')
|
||||
|
||||
ho *= -P
|
||||
|
||||
if np.any(np.linalg.norm(ho,axis=-1) >_R1+1e-9):
|
||||
raise ValueError('Homochoric coordinate outside of the sphere')
|
||||
raise ValueError('homochoric coordinate outside of the sphere')
|
||||
|
||||
return Rotation(Rotation._ho2qu(ho))
|
||||
|
||||
|
@ -966,12 +966,12 @@ class Rotation:
|
|||
"""
|
||||
cu = np.array(x,dtype=float)
|
||||
if cu.shape[:-2:-1] != (3,):
|
||||
raise ValueError('Invalid shape')
|
||||
raise ValueError('invalid shape')
|
||||
if abs(P) != 1:
|
||||
raise ValueError('P ∉ {-1,1}')
|
||||
|
||||
if np.abs(np.max(cu)) > np.pi**(2./3.) * 0.5+1e-9:
|
||||
raise ValueError('Cubochoric coordinate outside of the cube')
|
||||
raise ValueError('cubochoric coordinate outside of the cube')
|
||||
|
||||
ho = -P * Rotation._cu2ho(cu)
|
||||
|
||||
|
|
|
@ -532,7 +532,7 @@ class Table:
|
|||
|
||||
"""
|
||||
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
|
||||
raise KeyError('Labels or shapes or order do not match')
|
||||
raise KeyError('mismatch of shapes or labels or their order')
|
||||
|
||||
dup = self.copy()
|
||||
dup.data = dup.data.append(other.data,ignore_index=True)
|
||||
|
@ -558,7 +558,7 @@ class Table:
|
|||
|
||||
"""
|
||||
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
|
||||
raise KeyError('Duplicated keys or row count mismatch')
|
||||
raise KeyError('duplicated keys or row count mismatch')
|
||||
|
||||
dup = self.copy()
|
||||
dup.data = dup.data.join(other.data)
|
||||
|
|
|
@ -201,12 +201,12 @@ class VTK:
|
|||
|
||||
"""
|
||||
if not os.path.isfile(fname): # vtk has a strange error handling
|
||||
raise FileNotFoundError(f'No such file: {fname}')
|
||||
raise FileNotFoundError(f'file "{fname}" not found')
|
||||
if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None:
|
||||
reader = vtk.vtkGenericDataObjectReader()
|
||||
reader.SetFileName(str(fname))
|
||||
if dataset_type is None:
|
||||
raise TypeError('Dataset type for *.vtk file not given')
|
||||
raise TypeError('dataset type for *.vtk file not given')
|
||||
elif dataset_type.lower().endswith(('imagedata','image_data')):
|
||||
reader.Update()
|
||||
vtk_data = reader.GetStructuredPointsOutput()
|
||||
|
@ -220,7 +220,7 @@ class VTK:
|
|||
reader.Update()
|
||||
vtk_data = reader.GetPolyDataOutput()
|
||||
else:
|
||||
raise TypeError(f'Unknown dataset type "{dataset_type}" for vtk file')
|
||||
raise TypeError(f'unknown dataset type "{dataset_type}" for vtk file')
|
||||
else:
|
||||
if ext == '.vti':
|
||||
reader = vtk.vtkXMLImageDataReader()
|
||||
|
@ -231,7 +231,7 @@ class VTK:
|
|||
elif ext == '.vtp':
|
||||
reader = vtk.vtkXMLPolyDataReader()
|
||||
else:
|
||||
raise TypeError(f'Unknown file extension "{ext}"')
|
||||
raise TypeError(f'unknown file extension "{ext}"')
|
||||
|
||||
reader.SetFileName(str(fname))
|
||||
reader.Update()
|
||||
|
@ -314,7 +314,7 @@ class VTK:
|
|||
|
||||
if isinstance(data,np.ndarray):
|
||||
if label is None:
|
||||
raise ValueError('No label defined for numpy.ndarray')
|
||||
raise ValueError('no label defined for numpy.ndarray')
|
||||
|
||||
N_data = data.shape[0]
|
||||
data_ = (data if not isinstance(data,np.ma.MaskedArray) else
|
||||
|
@ -336,7 +336,7 @@ class VTK:
|
|||
elif N_data == N_cells:
|
||||
self.vtk_data.GetCellData().AddArray(d)
|
||||
else:
|
||||
raise ValueError(f'Cell / point count ({N_cells} / {N_points}) differs from data ({N_data})')
|
||||
raise ValueError(f'cell / point count ({N_cells} / {N_points}) differs from data ({N_data})')
|
||||
elif isinstance(data,Table):
|
||||
raise NotImplementedError('damask.Table')
|
||||
else:
|
||||
|
@ -383,7 +383,7 @@ class VTK:
|
|||
# string array
|
||||
return np.array([vtk_array.GetValue(i) for i in range(vtk_array.GetNumberOfValues())]).astype(str)
|
||||
except UnboundLocalError:
|
||||
raise ValueError(f'Array "{label}" not found')
|
||||
raise ValueError(f'array "{label}" not found')
|
||||
|
||||
|
||||
def get_comments(self) -> List[str]:
|
||||
|
|
|
@ -300,7 +300,7 @@ def cellsSizeOrigin_coordinates0_point(coordinates0: _np.ndarray,
|
|||
origin[_np.where(cells==1)] = 0.0
|
||||
|
||||
if cells.prod() != len(coordinates0):
|
||||
raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}')
|
||||
raise ValueError(f'data count {len(coordinates0)} does not match cells {cells}')
|
||||
|
||||
start = origin + delta*.5
|
||||
end = origin - delta*.5 + size
|
||||
|
@ -309,11 +309,11 @@ def cellsSizeOrigin_coordinates0_point(coordinates0: _np.ndarray,
|
|||
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],cells[0]),atol=atol) and \
|
||||
_np.allclose(coords[1],_np.linspace(start[1],end[1],cells[1]),atol=atol) and \
|
||||
_np.allclose(coords[2],_np.linspace(start[2],end[2],cells[2]),atol=atol)):
|
||||
raise ValueError('Non-uniform cell spacing')
|
||||
raise ValueError('non-uniform cell spacing')
|
||||
|
||||
if ordered and not _np.allclose(coordinates0.reshape(tuple(cells)+(3,),order='F'),
|
||||
coordinates0_point(list(cells),size,origin),atol=atol):
|
||||
raise ValueError('Input data is not ordered (x fast, z slow)')
|
||||
raise ValueError('input data is not ordered (x fast, z slow)')
|
||||
|
||||
return (cells,size,origin)
|
||||
|
||||
|
@ -460,17 +460,17 @@ def cellsSizeOrigin_coordinates0_node(coordinates0: _np.ndarray,
|
|||
origin = mincorner
|
||||
|
||||
if (cells+1).prod() != len(coordinates0):
|
||||
raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}')
|
||||
raise ValueError(f'data count {len(coordinates0)} does not match cells {cells}')
|
||||
|
||||
atol = _np.max(size)*5e-2
|
||||
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],cells[0]+1),atol=atol) and \
|
||||
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],cells[1]+1),atol=atol) and \
|
||||
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],cells[2]+1),atol=atol)):
|
||||
raise ValueError('Non-uniform cell spacing')
|
||||
raise ValueError('non-uniform cell spacing')
|
||||
|
||||
if ordered and not _np.allclose(coordinates0.reshape(tuple(cells+1)+(3,),order='F'),
|
||||
coordinates0_node(list(cells),size,origin),atol=atol):
|
||||
raise ValueError('Input data is not ordered (x fast, z slow)')
|
||||
raise ValueError('input data is not ordered (x fast, z slow)')
|
||||
|
||||
return (cells,size,origin)
|
||||
|
||||
|
|
|
@ -258,7 +258,7 @@ def _polar_decomposition(T: _np.ndarray,
|
|||
Tensor of which the singular values are computed.
|
||||
requested : sequence of {'R', 'U', 'V'}
|
||||
Requested outputs: ‘R’ for the rotation tensor,
|
||||
‘V’ for left stretch tensor and ‘U’ for right stretch tensor.
|
||||
‘V’ for left stretch tensor, and ‘U’ for right stretch tensor.
|
||||
|
||||
"""
|
||||
u, _, vh = _np.linalg.svd(T)
|
||||
|
@ -273,7 +273,7 @@ def _polar_decomposition(T: _np.ndarray,
|
|||
output+=[_np.einsum('...ji,...jk',R,T)]
|
||||
|
||||
if len(output) == 0:
|
||||
raise ValueError('Output not in {V, R, U}')
|
||||
raise ValueError('output not in {V, R, U}')
|
||||
|
||||
return tuple(output)
|
||||
|
||||
|
|
|
@ -99,8 +99,8 @@ def from_Poisson_disc(size: _FloatSequence,
|
|||
s += 1
|
||||
progress.update(s)
|
||||
|
||||
if i == 100:
|
||||
raise ValueError('Seeding not possible')
|
||||
if i >= 100:
|
||||
raise ValueError('seeding not possible')
|
||||
|
||||
return coords
|
||||
|
||||
|
|
|
@ -284,7 +284,7 @@ def scale_to_coprime(v: FloatSequence) -> np.ndarray:
|
|||
|
||||
with np.errstate(invalid='ignore'):
|
||||
if not np.allclose(np.ma.masked_invalid(v_/m),v_[np.argmax(abs(v_))]/m[np.argmax(abs(v_))]):
|
||||
raise ValueError(f'Invalid result {m} for input {v_}. Insufficient precision?')
|
||||
raise ValueError(f'invalid result "{m}" for input "{v_}"')
|
||||
|
||||
return m
|
||||
|
||||
|
@ -482,7 +482,7 @@ def shapeshifter(fro: Tuple[int, ...],
|
|||
assert match
|
||||
grp = match.groups()
|
||||
except AssertionError:
|
||||
raise ValueError(f'Shapes can not be shifted {fro} --> {to}')
|
||||
raise ValueError(f'shapes cannot be shifted {fro} --> {to}')
|
||||
fill: Any = ()
|
||||
for g,d in zip(grp,fro+(None,)):
|
||||
fill += (1,)*g.count(',')+(d,)
|
||||
|
@ -575,7 +575,7 @@ def DREAM3D_base_group(fname: Union[str, Path]) -> str:
|
|||
base_group = f.visit(lambda path: path.rsplit('/',2)[0] if '_SIMPL_GEOMETRY/SPACING' in path else None)
|
||||
|
||||
if base_group is None:
|
||||
raise ValueError(f'Could not determine base group in file {fname}')
|
||||
raise ValueError(f'could not determine base group in file "{fname}"')
|
||||
|
||||
return base_group
|
||||
|
||||
|
@ -606,7 +606,7 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
|
|||
else None)
|
||||
|
||||
if cell_data_group is None:
|
||||
raise ValueError(f'Could not determine cell data group in file {fname}/{base_group}')
|
||||
raise ValueError(f'could not determine cell-data group in file "{fname}/{base_group}"')
|
||||
|
||||
return cell_data_group
|
||||
|
||||
|
@ -629,7 +629,7 @@ def Bravais_to_Miller(*,
|
|||
|
||||
"""
|
||||
if (uvtw is not None) ^ (hkil is None):
|
||||
raise KeyError('Specify either "uvtw" or "hkil"')
|
||||
raise KeyError('specify either "uvtw" or "hkil"')
|
||||
axis,basis = (np.array(uvtw),np.array([[1,0,-1,0],
|
||||
[0,1,-1,0],
|
||||
[0,0, 0,1]])) \
|
||||
|
@ -658,7 +658,7 @@ def Miller_to_Bravais(*,
|
|||
|
||||
"""
|
||||
if (uvw is not None) ^ (hkl is None):
|
||||
raise KeyError('Specify either "uvw" or "hkl"')
|
||||
raise KeyError('specify either "uvw" or "hkl"')
|
||||
axis,basis = (np.array(uvw),np.array([[ 2,-1, 0],
|
||||
[-1, 2, 0],
|
||||
[-1,-1, 0],
|
||||
|
|
Loading…
Reference in New Issue