Merge branch 'consistent-uppercase-in-exceptions' into 'development'

Consistent formatting of raised messages

See merge request damask/DAMASK!533
This commit is contained in:
Sharan Roongta 2022-02-23 13:55:14 +00:00
commit f4a15f5796
11 changed files with 66 additions and 66 deletions

View File

@ -141,7 +141,7 @@ class Colormap(mpl.colors.ListedColormap):
) )
if model.lower() not in toMsh: if model.lower() not in toMsh:
raise ValueError(f'Invalid color model: {model}.') raise ValueError(f'invalid color model "{model}"')
low_high = np.vstack((low,high)).astype(float) low_high = np.vstack((low,high)).astype(float)
out_of_bounds = np.bool_(False) out_of_bounds = np.bool_(False)
@ -156,7 +156,7 @@ class Colormap(mpl.colors.ListedColormap):
out_of_bounds = np.any(low_high[:,0]<0) out_of_bounds = np.any(low_high[:,0]<0)
if out_of_bounds: if out_of_bounds:
raise ValueError(f'{model.upper()} colors {low_high[0]} | {low_high[1]} are out of bounds.') raise ValueError(f'{model.upper()} colors {low_high[0]} | {low_high[1]} are out of bounds')
low_,high_ = map(toMsh[model.lower()],low_high) low_,high_ = map(toMsh[model.lower()],low_high)
msh = map(functools.partial(Colormap._interpolate_msh,low=low_,high=high_),np.linspace(0,1,N)) msh = map(functools.partial(Colormap._interpolate_msh,low=low_,high=high_),np.linspace(0,1,N))

View File

@ -107,7 +107,7 @@ class Grid:
if len(material.shape) != 3: if len(material.shape) != 3:
raise ValueError(f'invalid material shape {material.shape}') raise ValueError(f'invalid material shape {material.shape}')
if material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']: if material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
raise TypeError(f'invalid material data type {material.dtype}') raise TypeError(f'invalid material data type "{material.dtype}"')
self._material = np.copy(material) self._material = np.copy(material)
@ -228,7 +228,7 @@ class Grid:
except ValueError: except ValueError:
header_length,keyword = (-1, 'invalid') header_length,keyword = (-1, 'invalid')
if not keyword.startswith('head') or header_length < 3: if not keyword.startswith('head') or header_length < 3:
raise TypeError('header length information missing or invalid') raise TypeError('invalid or missing header length information')
comments = [] comments = []
content = f.readlines() content = f.readlines()
@ -258,7 +258,7 @@ class Grid:
i += len(items) i += len(items)
if i != cells.prod(): if i != cells.prod():
raise TypeError(f'invalid file: expected {cells.prod()} entries, found {i}') raise TypeError(f'mismatch between {cells.prod()} expected entries and {i} found')
if not np.any(np.mod(material,1) != 0.0): # no float present if not np.any(np.mod(material,1) != 0.0): # no float present
material = material.astype('int') - (1 if material.min() > 0 else 0) material = material.astype('int') - (1 if material.min() > 0 else 0)
@ -811,7 +811,7 @@ class Grid:
""" """
if not set(directions).issubset(valid := ['x', 'y', 'z']): if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified') raise ValueError(f'invalid direction "{set(directions).difference(valid)}" specified')
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0] limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
mat = self.material.copy() mat = self.material.copy()
@ -847,7 +847,7 @@ class Grid:
""" """
if not set(directions).issubset(valid := ['x', 'y', 'z']): if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified') raise ValueError(f'invalid direction "{set(directions).difference(valid)}" specified')
mat = np.flip(self.material, [valid.index(d) for d in directions if d in valid]) mat = np.flip(self.material, [valid.index(d) for d in directions if d in valid])
@ -1184,7 +1184,7 @@ class Grid:
""" """
if not set(directions).issubset(valid := ['x', 'y', 'z']): if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified') raise ValueError(f'invalid direction "{set(directions).difference(valid)}" specified')
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)], o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
[0, np.prod(self.cells[:2]+1), np.prod(self.cells[:2]+1)+1, 1], [0, np.prod(self.cells[:2]+1), np.prod(self.cells[:2]+1)+1, 1],

View File

@ -848,7 +848,7 @@ class Orientation(Rotation,Crystal):
""" """
if (N_slip is not None) ^ (N_twin is None): if (N_slip is not None) ^ (N_twin is None):
raise KeyError('Specify either "N_slip" or "N_twin"') raise KeyError('specify either "N_slip" or "N_twin"')
kinematics,active = (self.kinematics('slip'),N_slip) if N_twin is None else \ kinematics,active = (self.kinematics('slip'),N_slip) if N_twin is None else \
(self.kinematics('twin'),N_twin) (self.kinematics('twin'),N_twin)

View File

@ -32,10 +32,10 @@ def _view_transition(what,datasets,increments,times,phases,homogenizations,field
if (datasets is not None and what is None) or (what is not None and datasets is None): if (datasets is not None and what is None) or (what is not None and datasets is None):
raise ValueError('"what" and "datasets" need to be used as a pair') raise ValueError('"what" and "datasets" need to be used as a pair')
if datasets is not None or what is not None: if datasets is not None or what is not None:
warnings.warn('Arguments "what" and "datasets" will be removed in DAMASK v3.0.0-alpha7', DeprecationWarning,2) warnings.warn('arguments "what" and "datasets" will be removed in DAMASK v3.0.0-alpha7', DeprecationWarning,2)
return what,datasets return what,datasets
if sum(1 for _ in filter(None.__ne__, [increments,times,phases,homogenizations,fields])) > 1: if sum(1 for _ in filter(None.__ne__, [increments,times,phases,homogenizations,fields])) > 1:
raise ValueError('Only one out of "increments", "times", "phases", "homogenizations", and "fields" can be used') raise ValueError('only one out of "increments", "times", "phases", "homogenizations", and "fields" can be used')
else: else:
if increments is not None: return "increments", increments if increments is not None: return "increments", increments
if times is not None: return "times", times if times is not None: return "times", times
@ -115,7 +115,7 @@ class Result:
self.version_minor = f.attrs['DADF5_version_minor'] self.version_minor = f.attrs['DADF5_version_minor']
if self.version_major != 0 or not 12 <= self.version_minor <= 14: if self.version_major != 0 or not 12 <= self.version_minor <= 14:
raise TypeError(f'Unsupported DADF5 version {self.version_major}.{self.version_minor}') raise TypeError(f'unsupported DADF5 version "{self.version_major}.{self.version_minor}"')
if self.version_major == 0 and self.version_minor < 14: if self.version_major == 0 and self.version_minor < 14:
self.export_setup = None self.export_setup = None
@ -490,7 +490,7 @@ class Result:
""" """
if self._protected: if self._protected:
raise PermissionError('Renaming datasets not permitted') raise PermissionError('rename datasets')
with h5py.File(self.fname,'a') as f: with h5py.File(self.fname,'a') as f:
for inc in self.visible['increments']: for inc in self.visible['increments']:
@ -529,7 +529,7 @@ class Result:
""" """
if self._protected: if self._protected:
raise PermissionError('Removing datasets not permitted') raise PermissionError('delete datasets')
with h5py.File(self.fname,'a') as f: with h5py.File(self.fname,'a') as f:
for inc in self.visible['increments']: for inc in self.visible['increments']:
@ -639,7 +639,7 @@ class Result:
data = eval(formula) data = eval(formula)
if not hasattr(data,'shape') or data.shape[0] != kwargs[d]['data'].shape[0]: if not hasattr(data,'shape') or data.shape[0] != kwargs[d]['data'].shape[0]:
raise ValueError("'{}' results in invalid shape".format(kwargs['formula'])) raise ValueError('"{}" results in invalid shape'.format(kwargs['formula']))
return { return {
'data': data, 'data': data,
@ -939,7 +939,7 @@ class Result:
elif T_sym['meta']['unit'] == 'Pa': elif T_sym['meta']['unit'] == 'Pa':
k = 'stress' k = 'stress'
if k not in ['stress', 'strain']: if k not in ['stress', 'strain']:
raise ValueError(f'invalid von Mises kind {kind}') raise ValueError(f'invalid von Mises kind "{kind}"')
return { return {
'data': (mechanics.equivalent_strain_Mises if k=='strain' else \ 'data': (mechanics.equivalent_strain_Mises if k=='strain' else \
@ -1633,7 +1633,7 @@ class Result:
elif mode.lower()=='point': elif mode.lower()=='point':
v = VTK.from_poly_data(self.coordinates0_point) v = VTK.from_poly_data(self.coordinates0_point)
else: else:
raise ValueError(f'invalid mode {mode}') raise ValueError(f'invalid mode "{mode}"')
v.set_comments(util.execution_stamp('Result','export_VTK')) v.set_comments(util.execution_stamp('Result','export_VTK'))

View File

@ -279,7 +279,7 @@ class Rotation:
p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o) p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o)
return self.copy(Rotation(np.block([q,p]))._standardize()) return self.copy(Rotation(np.block([q,p]))._standardize())
else: else:
raise TypeError('Use "R@b", i.e. matmul, to apply rotation "R" to object "b"') raise TypeError('use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
def __imul__(self: MyType, def __imul__(self: MyType,
other: MyType) -> MyType: other: MyType) -> MyType:
@ -314,7 +314,7 @@ class Rotation:
if isinstance(other,Rotation): if isinstance(other,Rotation):
return self*~other return self*~other
else: else:
raise TypeError('Use "R@b", i.e. matmul, to apply rotation "R" to object "b"') raise TypeError('use "R@b", i.e. matmul, to apply rotation "R" to object "b"')
def __itruediv__(self: MyType, def __itruediv__(self: MyType,
other: MyType) -> MyType: other: MyType) -> MyType:
@ -365,11 +365,11 @@ class Rotation:
R = self.as_matrix() R = self.as_matrix()
return np.einsum('...im,...jn,...ko,...lp,...mnop',R,R,R,R,other) return np.einsum('...im,...jn,...ko,...lp,...mnop',R,R,R,R,other)
else: else:
raise ValueError('Can only rotate vectors, 2nd order tensors, and 4th order tensors') raise ValueError('can only rotate vectors, 2nd order tensors, and 4th order tensors')
elif isinstance(other, Rotation): elif isinstance(other, Rotation):
raise TypeError('Use "R1*R2", i.e. multiplication, to compose rotations "R1" and "R2"') raise TypeError('use "R1*R2", i.e. multiplication, to compose rotations "R1" and "R2"')
else: else:
raise TypeError(f'Cannot rotate {type(other)}') raise TypeError(f'cannot rotate "{type(other)}"')
apply = __matmul__ apply = __matmul__
@ -731,7 +731,7 @@ class Rotation:
""" """
qu = np.array(q,dtype=float) qu = np.array(q,dtype=float)
if qu.shape[:-2:-1] != (4,): if qu.shape[:-2:-1] != (4,):
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
if abs(P) != 1: if abs(P) != 1:
raise ValueError('P ∉ {-1,1}') raise ValueError('P ∉ {-1,1}')
@ -740,9 +740,9 @@ class Rotation:
qu[qu[...,0] < 0.0] *= -1 qu[qu[...,0] < 0.0] *= -1
else: else:
if np.any(qu[...,0] < 0.0): if np.any(qu[...,0] < 0.0):
raise ValueError('Quaternion with negative first (real) component.') raise ValueError('quaternion with negative first (real) component')
if not np.all(np.isclose(np.linalg.norm(qu,axis=-1), 1.0,rtol=0.0)): if not np.all(np.isclose(np.linalg.norm(qu,axis=-1), 1.0,rtol=0.0)):
raise ValueError('Quaternion is not of unit length.') raise ValueError('quaternion is not of unit length')
return Rotation(qu) return Rotation(qu)
@ -767,11 +767,11 @@ class Rotation:
""" """
eu = np.array(phi,dtype=float) eu = np.array(phi,dtype=float)
if eu.shape[:-2:-1] != (3,): if eu.shape[:-2:-1] != (3,):
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
eu = np.radians(eu) if degrees else eu eu = np.radians(eu) if degrees else eu
if np.any(eu < 0.0) or np.any(eu > 2.0*np.pi) or np.any(eu[...,1] > np.pi): # ToDo: No separate check for PHI if np.any(eu < 0.0) or np.any(eu > 2.0*np.pi) or np.any(eu[...,1] > np.pi): # ToDo: No separate check for PHI
raise ValueError('Euler angles outside of [0..2π],[0..π],[0..2π].') raise ValueError('Euler angles outside of [0..2π],[0..π],[0..2π]')
return Rotation(Rotation._eu2qu(eu)) return Rotation(Rotation._eu2qu(eu))
@ -798,7 +798,7 @@ class Rotation:
""" """
ax = np.array(axis_angle,dtype=float) ax = np.array(axis_angle,dtype=float)
if ax.shape[:-2:-1] != (4,): if ax.shape[:-2:-1] != (4,):
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
if abs(P) != 1: if abs(P) != 1:
raise ValueError('P ∉ {-1,1}') raise ValueError('P ∉ {-1,1}')
@ -806,10 +806,10 @@ class Rotation:
if degrees: ax[..., 3] = np.radians(ax[...,3]) if degrees: ax[..., 3] = np.radians(ax[...,3])
if normalize: ax[...,0:3] /= np.linalg.norm(ax[...,0:3],axis=-1,keepdims=True) if normalize: ax[...,0:3] /= np.linalg.norm(ax[...,0:3],axis=-1,keepdims=True)
if np.any(ax[...,3] < 0.0) or np.any(ax[...,3] > np.pi): if np.any(ax[...,3] < 0.0) or np.any(ax[...,3] > np.pi):
raise ValueError('Axisangle rotation angle outside of [0..π].') raise ValueError('axisangle rotation angle outside of [0..π]')
if not np.all(np.isclose(np.linalg.norm(ax[...,0:3],axis=-1), 1.0)): if not np.all(np.isclose(np.linalg.norm(ax[...,0:3],axis=-1), 1.0)):
print(np.linalg.norm(ax[...,0:3],axis=-1)) print(np.linalg.norm(ax[...,0:3],axis=-1))
raise ValueError('Axisangle rotation axis is not of unit length.') raise ValueError('axisangle rotation axis is not of unit length')
return Rotation(Rotation._ax2qu(ax)) return Rotation(Rotation._ax2qu(ax))
@ -832,7 +832,7 @@ class Rotation:
""" """
om = np.array(basis,dtype=float) om = np.array(basis,dtype=float)
if om.shape[-2:] != (3,3): if om.shape[-2:] != (3,3):
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
if reciprocal: if reciprocal:
om = np.linalg.inv(tensor.transpose(om)/np.pi) # transform reciprocal basis set om = np.linalg.inv(tensor.transpose(om)/np.pi) # transform reciprocal basis set
@ -841,11 +841,11 @@ class Rotation:
(U,S,Vh) = np.linalg.svd(om) # singular value decomposition (U,S,Vh) = np.linalg.svd(om) # singular value decomposition
om = np.einsum('...ij,...jl',U,Vh) om = np.einsum('...ij,...jl',U,Vh)
if not np.all(np.isclose(np.linalg.det(om),1.0)): if not np.all(np.isclose(np.linalg.det(om),1.0)):
raise ValueError('Orientation matrix has determinant ≠ 1.') raise ValueError('orientation matrix has determinant ≠ 1')
if not np.all(np.isclose(np.einsum('...i,...i',om[...,0],om[...,1]), 0.0)) \ if not np.all(np.isclose(np.einsum('...i,...i',om[...,0],om[...,1]), 0.0)) \
or not np.all(np.isclose(np.einsum('...i,...i',om[...,1],om[...,2]), 0.0)) \ or not np.all(np.isclose(np.einsum('...i,...i',om[...,1],om[...,2]), 0.0)) \
or not np.all(np.isclose(np.einsum('...i,...i',om[...,2],om[...,0]), 0.0)): or not np.all(np.isclose(np.einsum('...i,...i',om[...,2],om[...,0]), 0.0)):
raise ValueError('Orientation matrix is not orthogonal.') raise ValueError('orientation matrix is not orthogonal')
return Rotation(Rotation._om2qu(om)) return Rotation(Rotation._om2qu(om))
@ -879,7 +879,7 @@ class Rotation:
a_ = np.array(a) a_ = np.array(a)
b_ = np.array(b) b_ = np.array(b)
if a_.shape[-2:] != (2,3) or b_.shape[-2:] != (2,3) or a_.shape != b_.shape: if a_.shape[-2:] != (2,3) or b_.shape[-2:] != (2,3) or a_.shape != b_.shape:
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
am = np.stack([ a_[...,0,:], am = np.stack([ a_[...,0,:],
a_[...,1,:], a_[...,1,:],
np.cross(a_[...,0,:],a_[...,1,:]) ],axis=-2) np.cross(a_[...,0,:],a_[...,1,:]) ],axis=-2)
@ -910,16 +910,16 @@ class Rotation:
""" """
ro = np.array(rho,dtype=float) ro = np.array(rho,dtype=float)
if ro.shape[:-2:-1] != (4,): if ro.shape[:-2:-1] != (4,):
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
if abs(P) != 1: if abs(P) != 1:
raise ValueError('P ∉ {-1,1}') raise ValueError('P ∉ {-1,1}')
ro[...,0:3] *= -P ro[...,0:3] *= -P
if normalize: ro[...,0:3] /= np.linalg.norm(ro[...,0:3],axis=-1,keepdims=True) if normalize: ro[...,0:3] /= np.linalg.norm(ro[...,0:3],axis=-1,keepdims=True)
if np.any(ro[...,3] < 0.0): if np.any(ro[...,3] < 0.0):
raise ValueError('Rodrigues vector rotation angle is negative.') raise ValueError('Rodrigues vector rotation angle is negative')
if not np.all(np.isclose(np.linalg.norm(ro[...,0:3],axis=-1), 1.0)): if not np.all(np.isclose(np.linalg.norm(ro[...,0:3],axis=-1), 1.0)):
raise ValueError('Rodrigues vector rotation axis is not of unit length.') raise ValueError('Rodrigues vector rotation axis is not of unit length')
return Rotation(Rotation._ro2qu(ro)) return Rotation(Rotation._ro2qu(ro))
@ -939,14 +939,14 @@ class Rotation:
""" """
ho = np.array(h,dtype=float) ho = np.array(h,dtype=float)
if ho.shape[:-2:-1] != (3,): if ho.shape[:-2:-1] != (3,):
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
if abs(P) != 1: if abs(P) != 1:
raise ValueError('P ∉ {-1,1}') raise ValueError('P ∉ {-1,1}')
ho *= -P ho *= -P
if np.any(np.linalg.norm(ho,axis=-1) >_R1+1e-9): if np.any(np.linalg.norm(ho,axis=-1) >_R1+1e-9):
raise ValueError('Homochoric coordinate outside of the sphere.') raise ValueError('homochoric coordinate outside of the sphere')
return Rotation(Rotation._ho2qu(ho)) return Rotation(Rotation._ho2qu(ho))
@ -966,12 +966,12 @@ class Rotation:
""" """
cu = np.array(x,dtype=float) cu = np.array(x,dtype=float)
if cu.shape[:-2:-1] != (3,): if cu.shape[:-2:-1] != (3,):
raise ValueError('Invalid shape.') raise ValueError('invalid shape')
if abs(P) != 1: if abs(P) != 1:
raise ValueError('P ∉ {-1,1}') raise ValueError('P ∉ {-1,1}')
if np.abs(np.max(cu)) > np.pi**(2./3.) * 0.5+1e-9: if np.abs(np.max(cu)) > np.pi**(2./3.) * 0.5+1e-9:
raise ValueError('Cubochoric coordinate outside of the cube.') raise ValueError('cubochoric coordinate outside of the cube')
ho = -P * Rotation._cu2ho(cu) ho = -P * Rotation._cu2ho(cu)

View File

@ -532,7 +532,7 @@ class Table:
""" """
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns): if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
raise KeyError('Labels or shapes or order do not match') raise KeyError('mismatch of shapes or labels or their order')
dup = self.copy() dup = self.copy()
dup.data = dup.data.append(other.data,ignore_index=True) dup.data = dup.data.append(other.data,ignore_index=True)
@ -558,7 +558,7 @@ class Table:
""" """
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]: if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
raise KeyError('Duplicated keys or row count mismatch') raise KeyError('duplicated keys or row count mismatch')
dup = self.copy() dup = self.copy()
dup.data = dup.data.join(other.data) dup.data = dup.data.join(other.data)

View File

@ -201,12 +201,12 @@ class VTK:
""" """
if not os.path.isfile(fname): # vtk has a strange error handling if not os.path.isfile(fname): # vtk has a strange error handling
raise FileNotFoundError(f'No such file: {fname}') raise FileNotFoundError(f'file "{fname}" not found')
if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None: if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None:
reader = vtk.vtkGenericDataObjectReader() reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(str(fname)) reader.SetFileName(str(fname))
if dataset_type is None: if dataset_type is None:
raise TypeError('Dataset type for *.vtk file not given.') raise TypeError('dataset type for *.vtk file not given')
elif dataset_type.lower().endswith(('imagedata','image_data')): elif dataset_type.lower().endswith(('imagedata','image_data')):
reader.Update() reader.Update()
vtk_data = reader.GetStructuredPointsOutput() vtk_data = reader.GetStructuredPointsOutput()
@ -220,7 +220,7 @@ class VTK:
reader.Update() reader.Update()
vtk_data = reader.GetPolyDataOutput() vtk_data = reader.GetPolyDataOutput()
else: else:
raise TypeError(f'Unknown dataset type {dataset_type} for vtk file') raise TypeError(f'unknown dataset type "{dataset_type}" for vtk file')
else: else:
if ext == '.vti': if ext == '.vti':
reader = vtk.vtkXMLImageDataReader() reader = vtk.vtkXMLImageDataReader()
@ -231,7 +231,7 @@ class VTK:
elif ext == '.vtp': elif ext == '.vtp':
reader = vtk.vtkXMLPolyDataReader() reader = vtk.vtkXMLPolyDataReader()
else: else:
raise TypeError(f'Unknown file extension {ext}') raise TypeError(f'unknown file extension "{ext}"')
reader.SetFileName(str(fname)) reader.SetFileName(str(fname))
reader.Update() reader.Update()
@ -314,7 +314,7 @@ class VTK:
if isinstance(data,np.ndarray): if isinstance(data,np.ndarray):
if label is None: if label is None:
raise ValueError('No label defined for numpy.ndarray') raise ValueError('no label defined for numpy.ndarray')
N_data = data.shape[0] N_data = data.shape[0]
data_ = (data if not isinstance(data,np.ma.MaskedArray) else data_ = (data if not isinstance(data,np.ma.MaskedArray) else
@ -336,7 +336,7 @@ class VTK:
elif N_data == N_cells: elif N_data == N_cells:
self.vtk_data.GetCellData().AddArray(d) self.vtk_data.GetCellData().AddArray(d)
else: else:
raise ValueError(f'Cell / point count ({N_cells} / {N_points}) differs from data ({N_data}).') raise ValueError(f'cell / point count ({N_cells} / {N_points}) differs from data ({N_data})')
elif isinstance(data,Table): elif isinstance(data,Table):
raise NotImplementedError('damask.Table') raise NotImplementedError('damask.Table')
else: else:
@ -383,7 +383,7 @@ class VTK:
# string array # string array
return np.array([vtk_array.GetValue(i) for i in range(vtk_array.GetNumberOfValues())]).astype(str) return np.array([vtk_array.GetValue(i) for i in range(vtk_array.GetNumberOfValues())]).astype(str)
except UnboundLocalError: except UnboundLocalError:
raise ValueError(f'Array "{label}" not found.') raise ValueError(f'array "{label}" not found')
def get_comments(self) -> List[str]: def get_comments(self) -> List[str]:

View File

@ -300,7 +300,7 @@ def cellsSizeOrigin_coordinates0_point(coordinates0: _np.ndarray,
origin[_np.where(cells==1)] = 0.0 origin[_np.where(cells==1)] = 0.0
if cells.prod() != len(coordinates0): if cells.prod() != len(coordinates0):
raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}.') raise ValueError(f'data count {len(coordinates0)} does not match cells {cells}')
start = origin + delta*.5 start = origin + delta*.5
end = origin - delta*.5 + size end = origin - delta*.5 + size
@ -309,11 +309,11 @@ def cellsSizeOrigin_coordinates0_point(coordinates0: _np.ndarray,
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],cells[0]),atol=atol) and \ if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],cells[0]),atol=atol) and \
_np.allclose(coords[1],_np.linspace(start[1],end[1],cells[1]),atol=atol) and \ _np.allclose(coords[1],_np.linspace(start[1],end[1],cells[1]),atol=atol) and \
_np.allclose(coords[2],_np.linspace(start[2],end[2],cells[2]),atol=atol)): _np.allclose(coords[2],_np.linspace(start[2],end[2],cells[2]),atol=atol)):
raise ValueError('Regular cell spacing violated.') raise ValueError('non-uniform cell spacing')
if ordered and not _np.allclose(coordinates0.reshape(tuple(cells)+(3,),order='F'), if ordered and not _np.allclose(coordinates0.reshape(tuple(cells)+(3,),order='F'),
coordinates0_point(list(cells),size,origin),atol=atol): coordinates0_point(list(cells),size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).') raise ValueError('input data is not ordered (x fast, z slow)')
return (cells,size,origin) return (cells,size,origin)
@ -460,17 +460,17 @@ def cellsSizeOrigin_coordinates0_node(coordinates0: _np.ndarray,
origin = mincorner origin = mincorner
if (cells+1).prod() != len(coordinates0): if (cells+1).prod() != len(coordinates0):
raise ValueError(f'Data count {len(coordinates0)} does not match cells {cells}.') raise ValueError(f'data count {len(coordinates0)} does not match cells {cells}')
atol = _np.max(size)*5e-2 atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],cells[0]+1),atol=atol) and \ if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],cells[0]+1),atol=atol) and \
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],cells[1]+1),atol=atol) and \ _np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],cells[1]+1),atol=atol) and \
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],cells[2]+1),atol=atol)): _np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],cells[2]+1),atol=atol)):
raise ValueError('Regular cell spacing violated.') raise ValueError('non-uniform cell spacing')
if ordered and not _np.allclose(coordinates0.reshape(tuple(cells+1)+(3,),order='F'), if ordered and not _np.allclose(coordinates0.reshape(tuple(cells+1)+(3,),order='F'),
coordinates0_node(list(cells),size,origin),atol=atol): coordinates0_node(list(cells),size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).') raise ValueError('input data is not ordered (x fast, z slow)')
return (cells,size,origin) return (cells,size,origin)

View File

@ -258,7 +258,7 @@ def _polar_decomposition(T: _np.ndarray,
Tensor of which the singular values are computed. Tensor of which the singular values are computed.
requested : sequence of {'R', 'U', 'V'} requested : sequence of {'R', 'U', 'V'}
Requested outputs: R for the rotation tensor, Requested outputs: R for the rotation tensor,
V for left stretch tensor and U for right stretch tensor. V for left stretch tensor, and U for right stretch tensor.
""" """
u, _, vh = _np.linalg.svd(T) u, _, vh = _np.linalg.svd(T)
@ -273,7 +273,7 @@ def _polar_decomposition(T: _np.ndarray,
output+=[_np.einsum('...ji,...jk',R,T)] output+=[_np.einsum('...ji,...jk',R,T)]
if len(output) == 0: if len(output) == 0:
raise ValueError('output needs to be out of V, R, U') raise ValueError('output not in {V, R, U}')
return tuple(output) return tuple(output)

View File

@ -99,8 +99,8 @@ def from_Poisson_disc(size: _FloatSequence,
s += 1 s += 1
progress.update(s) progress.update(s)
if i == 100: if i >= 100:
raise ValueError('Seeding not possible') raise ValueError('seeding not possible')
return coords return coords

View File

@ -284,7 +284,7 @@ def scale_to_coprime(v: FloatSequence) -> np.ndarray:
with np.errstate(invalid='ignore'): with np.errstate(invalid='ignore'):
if not np.allclose(np.ma.masked_invalid(v_/m),v_[np.argmax(abs(v_))]/m[np.argmax(abs(v_))]): if not np.allclose(np.ma.masked_invalid(v_/m),v_[np.argmax(abs(v_))]/m[np.argmax(abs(v_))]):
raise ValueError(f'Invalid result {m} for input {v_}. Insufficient precision?') raise ValueError(f'invalid result "{m}" for input "{v_}"')
return m return m
@ -482,7 +482,7 @@ def shapeshifter(fro: Tuple[int, ...],
assert match assert match
grp = match.groups() grp = match.groups()
except AssertionError: except AssertionError:
raise ValueError(f'Shapes can not be shifted {fro} --> {to}') raise ValueError(f'shapes cannot be shifted {fro} --> {to}')
fill: Any = () fill: Any = ()
for g,d in zip(grp,fro+(None,)): for g,d in zip(grp,fro+(None,)):
fill += (1,)*g.count(',')+(d,) fill += (1,)*g.count(',')+(d,)
@ -575,7 +575,7 @@ def DREAM3D_base_group(fname: Union[str, Path]) -> str:
base_group = f.visit(lambda path: path.rsplit('/',2)[0] if '_SIMPL_GEOMETRY/SPACING' in path else None) base_group = f.visit(lambda path: path.rsplit('/',2)[0] if '_SIMPL_GEOMETRY/SPACING' in path else None)
if base_group is None: if base_group is None:
raise ValueError(f'Could not determine base group in file {fname}.') raise ValueError(f'could not determine base group in file "{fname}"')
return base_group return base_group
@ -606,7 +606,7 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
else None) else None)
if cell_data_group is None: if cell_data_group is None:
raise ValueError(f'Could not determine cell data group in file {fname}/{base_group}.') raise ValueError(f'could not determine cell-data group in file "{fname}/{base_group}"')
return cell_data_group return cell_data_group
@ -629,7 +629,7 @@ def Bravais_to_Miller(*,
""" """
if (uvtw is not None) ^ (hkil is None): if (uvtw is not None) ^ (hkil is None):
raise KeyError('Specify either "uvtw" or "hkil"') raise KeyError('specify either "uvtw" or "hkil"')
axis,basis = (np.array(uvtw),np.array([[1,0,-1,0], axis,basis = (np.array(uvtw),np.array([[1,0,-1,0],
[0,1,-1,0], [0,1,-1,0],
[0,0, 0,1]])) \ [0,0, 0,1]])) \
@ -658,7 +658,7 @@ def Miller_to_Bravais(*,
""" """
if (uvw is not None) ^ (hkl is None): if (uvw is not None) ^ (hkl is None):
raise KeyError('Specify either "uvw" or "hkl"') raise KeyError('specify either "uvw" or "hkl"')
axis,basis = (np.array(uvw),np.array([[ 2,-1, 0], axis,basis = (np.array(uvw),np.array([[ 2,-1, 0],
[-1, 2, 0], [-1, 2, 0],
[-1,-1, 0], [-1,-1, 0],