Merge remote-tracking branch 'origin/development' into spring-cleaning
This commit is contained in:
commit
8a3af87e3e
|
@ -27,20 +27,69 @@ class Table:
|
||||||
self.comments = [] if comments_ is None else [c for c in comments_]
|
self.comments = [] if comments_ is None else [c for c in comments_]
|
||||||
self.data = pd.DataFrame(data=data)
|
self.data = pd.DataFrame(data=data)
|
||||||
self.shapes = { k:(v,) if isinstance(v,(np.int64,np.int32,int)) else v for k,v in shapes.items() }
|
self.shapes = { k:(v,) if isinstance(v,(np.int64,np.int32,int)) else v for k,v in shapes.items() }
|
||||||
self._label_uniform()
|
self._relabel('uniform')
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Brief overview."""
|
"""Brief overview."""
|
||||||
return '\n'.join(['# '+c for c in self.comments])+'\n'+self.data.__repr__()
|
self._relabel('shapes')
|
||||||
|
data_repr = self.data.__repr__()
|
||||||
|
self._relabel('uniform')
|
||||||
|
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
|
||||||
|
|
||||||
|
|
||||||
def __getitem__(self,item):
|
def __getitem__(self,item):
|
||||||
"""Return slice according to item."""
|
"""
|
||||||
return self.__class__(data=self.data[item],shapes=self.shapes,comments=self.comments)
|
Slice the Table according to item.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
item : row and/or column indexer
|
||||||
|
Slice to select from Table.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
slice : Table
|
||||||
|
Sliced part of the Table.
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
>>> import damask
|
||||||
|
>>> import numpy as np
|
||||||
|
>>> tbl = damask.Table(data=np.arange(12).reshape((4,3)),
|
||||||
|
... shapes=dict(colA=(1,),colB=(1,),colC=(1,)))
|
||||||
|
>>> tbl['colA','colB']
|
||||||
|
colA colB
|
||||||
|
0 0 1
|
||||||
|
1 3 4
|
||||||
|
2 6 7
|
||||||
|
3 9 10
|
||||||
|
>>> tbl[::2,['colB','colA']]
|
||||||
|
colB colA
|
||||||
|
0 1 0
|
||||||
|
2 7 6
|
||||||
|
>>> tbl[1:2,'colB']
|
||||||
|
colB
|
||||||
|
1 4
|
||||||
|
2 7
|
||||||
|
|
||||||
|
"""
|
||||||
|
item = (item,slice(None,None,None)) if isinstance(item,slice) else \
|
||||||
|
item if isinstance(item[0],slice) else \
|
||||||
|
(slice(None,None,None),item)
|
||||||
|
sliced = self.data.loc[item]
|
||||||
|
cols = np.array(sliced.columns if isinstance(sliced,pd.core.frame.DataFrame) else [item[1]])
|
||||||
|
_,idx = np.unique(cols,return_index=True)
|
||||||
|
return self.__class__(data=sliced,
|
||||||
|
shapes = {k:self.shapes[k] for k in cols[np.sort(idx)]},
|
||||||
|
comments=self.comments)
|
||||||
|
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
"""Number of rows."""
|
"""Number of rows."""
|
||||||
return len(self.data)
|
return len(self.data)
|
||||||
|
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
"""Create deep copy."""
|
"""Create deep copy."""
|
||||||
return copy.deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
|
@ -48,21 +97,51 @@ class Table:
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def _label_discrete(self):
|
def _label(self,what,how):
|
||||||
"""Label data individually, e.g. v v v ==> 1_v 2_v 3_v."""
|
"""
|
||||||
|
Expand labels according to data shape.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
what : str or list
|
||||||
|
Labels to expand.
|
||||||
|
how : str
|
||||||
|
Mode of labeling.
|
||||||
|
'uniform' ==> v v v
|
||||||
|
'shapes' ==> 3:v v v
|
||||||
|
'linear' ==> 1_v 2_v 3_v
|
||||||
|
|
||||||
|
"""
|
||||||
|
what = [what] if isinstance(what,str) else what
|
||||||
labels = []
|
labels = []
|
||||||
for label,shape in self.shapes.items():
|
for label in what:
|
||||||
size = int(np.prod(shape))
|
shape = self.shapes[label]
|
||||||
|
size = np.prod(shape,dtype=int)
|
||||||
|
if how == 'uniform':
|
||||||
|
labels += [label] * size
|
||||||
|
elif how == 'shapes':
|
||||||
|
labels += [('' if size == 1 or i>0 else f'{util.srepr(shape,"x")}:')+label for i in range(size)]
|
||||||
|
elif how == 'linear':
|
||||||
labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)]
|
labels += [('' if size == 1 else f'{i+1}_')+label for i in range(size)]
|
||||||
self.data.columns = labels
|
else:
|
||||||
|
raise KeyError
|
||||||
|
return labels
|
||||||
|
|
||||||
|
|
||||||
def _label_uniform(self):
|
def _relabel(self,how):
|
||||||
"""Label data uniformly, e.g. 1_v 2_v 3_v ==> v v v."""
|
"""
|
||||||
labels = []
|
Modify labeling of data in-place.
|
||||||
for label,shape in self.shapes.items():
|
|
||||||
labels += [label] * int(np.prod(shape))
|
Parameters
|
||||||
self.data.columns = labels
|
----------
|
||||||
|
how : str
|
||||||
|
Mode of labeling.
|
||||||
|
'uniform' ==> v v v
|
||||||
|
'shapes' ==> 3:v v v
|
||||||
|
'linear' ==> 1_v 2_v 3_v
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.data.columns = self._label(self.shapes,how)
|
||||||
|
|
||||||
|
|
||||||
def _add_comment(self,label,shape,info):
|
def _add_comment(self,label,shape,info):
|
||||||
|
@ -72,6 +151,62 @@ class Table:
|
||||||
self.comments.append(f'{specific} / {general}')
|
self.comments.append(f'{specific} / {general}')
|
||||||
|
|
||||||
|
|
||||||
|
def isclose(self,other,rtol=1e-5,atol=1e-8,equal_nan=True):
|
||||||
|
"""
|
||||||
|
Report where values are approximately equal to corresponding ones of other Table.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
other : Table
|
||||||
|
Table to compare against.
|
||||||
|
rtol : float, optional
|
||||||
|
Relative tolerance of equality.
|
||||||
|
atol : float, optional
|
||||||
|
Absolute tolerance of equality.
|
||||||
|
equal_nan : bool, optional
|
||||||
|
Consider matching NaN values as equal. Defaults to True.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
mask : numpy.ndarray bool
|
||||||
|
Mask indicating where corresponding table values are close.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return np.isclose( self.data.to_numpy(),
|
||||||
|
other.data.to_numpy(),
|
||||||
|
rtol=rtol,
|
||||||
|
atol=atol,
|
||||||
|
equal_nan=equal_nan)
|
||||||
|
|
||||||
|
|
||||||
|
def allclose(self,other,rtol=1e-5,atol=1e-8,equal_nan=True):
|
||||||
|
"""
|
||||||
|
Test whether all values are approximately equal to corresponding ones of other Table.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
other : Table
|
||||||
|
Table to compare against.
|
||||||
|
rtol : float, optional
|
||||||
|
Relative tolerance of equality.
|
||||||
|
atol : float, optional
|
||||||
|
Absolute tolerance of equality.
|
||||||
|
equal_nan : bool, optional
|
||||||
|
Consider matching NaN values as equal. Defaults to True.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
answer : bool
|
||||||
|
Whether corresponding values are close between both tables.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return np.allclose( self.data.to_numpy(),
|
||||||
|
other.data.to_numpy(),
|
||||||
|
rtol=rtol,
|
||||||
|
atol=atol,
|
||||||
|
equal_nan=equal_nan)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(fname):
|
def load(fname):
|
||||||
"""
|
"""
|
||||||
|
@ -120,12 +255,13 @@ class Table:
|
||||||
|
|
||||||
return Table(data,shapes,comments)
|
return Table(data,shapes,comments)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_ang(fname):
|
def load_ang(fname):
|
||||||
"""
|
"""
|
||||||
Load from ang file.
|
Load from ang file.
|
||||||
|
|
||||||
A valid TSL ang file needs to contains the following columns:
|
A valid TSL ang file has to have the following columns:
|
||||||
|
|
||||||
- Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
- Euler angles (Bunge notation) in radians, 3 floats, label 'eu'.
|
||||||
- Spatial position in meters, 2 floats, label 'pos'.
|
- Spatial position in meters, 2 floats, label 'pos'.
|
||||||
|
@ -216,10 +352,12 @@ class Table:
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup._add_comment(label,data.shape[1:],info)
|
dup._add_comment(label,data.shape[1:],info)
|
||||||
|
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',label)
|
||||||
if re.match(r'[0-9]*?_',label):
|
if m:
|
||||||
idx,key = label.split('_',1)
|
key = m.group(1)
|
||||||
iloc = dup.data.columns.get_loc(key).tolist().index(True) + int(idx) -1
|
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
|
||||||
|
self.shapes[key])
|
||||||
|
iloc = dup.data.columns.get_loc(key).tolist().index(True) + idx
|
||||||
dup.data.iloc[:,iloc] = data
|
dup.data.iloc[:,iloc] = data
|
||||||
else:
|
else:
|
||||||
dup.data[label] = data.reshape(dup.data[label].shape)
|
dup.data[label] = data.reshape(dup.data[label].shape)
|
||||||
|
@ -322,10 +460,18 @@ class Table:
|
||||||
Updated table.
|
Updated table.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
labels_ = [labels] if isinstance(labels,str) else labels.copy()
|
||||||
|
for i,l in enumerate(labels_):
|
||||||
|
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',l)
|
||||||
|
if m:
|
||||||
|
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
|
||||||
|
self.shapes[m.group(1)])
|
||||||
|
labels_[i] = f'{1+idx}_{m.group(1)}'
|
||||||
|
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup._label_discrete()
|
dup._relabel('linear')
|
||||||
dup.data.sort_values(labels,axis=0,inplace=True,ascending=ascending)
|
dup.data.sort_values(labels_,axis=0,inplace=True,ascending=ascending)
|
||||||
dup._label_uniform()
|
dup._relabel('uniform')
|
||||||
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
|
dup.comments.append(f'sorted {"ascending" if ascending else "descending"} by {labels}')
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
|
@ -86,9 +86,12 @@ class TestConfigMaterial:
|
||||||
|
|
||||||
def test_from_table(self):
|
def test_from_table(self):
|
||||||
N = np.random.randint(3,10)
|
N = np.random.randint(3,10)
|
||||||
a = np.vstack((np.hstack((np.arange(N),np.arange(N)[::-1])),np.ones(N*2),np.zeros(N*2),np.ones(N*2),np.ones(N*2))).T
|
a = np.vstack((np.hstack((np.arange(N),np.arange(N)[::-1])),
|
||||||
t = Table(a,{'varying':1,'constant':4})
|
np.ones(N*2),np.zeros(N*2),np.ones(N*2),np.ones(N*2),
|
||||||
c = ConfigMaterial.from_table(t,**{'phase':'varying','O':'constant','homogenization':'4_constant'})
|
np.ones(N*2),
|
||||||
|
)).T
|
||||||
|
t = Table(a,{'varying':1,'constant':4,'ones':1})
|
||||||
|
c = ConfigMaterial.from_table(t,**{'phase':'varying','O':'constant','homogenization':'ones'})
|
||||||
assert len(c['material']) == N
|
assert len(c['material']) == N
|
||||||
for i,m in enumerate(c['material']):
|
for i,m in enumerate(c['material']):
|
||||||
assert m['homogenization'] == 1 and (m['constituents'][0]['O'] == [1,0,1,1]).all()
|
assert m['homogenization'] == 1 and (m['constituents'][0]['O'] == [1,0,1,1]).all()
|
||||||
|
|
|
@ -407,7 +407,8 @@ class TestGrid:
|
||||||
z=np.ones(cells.prod())
|
z=np.ones(cells.prod())
|
||||||
z[cells[:2].prod()*int(cells[2]/2):]=0
|
z[cells[:2].prod()*int(cells[2]/2):]=0
|
||||||
t = Table(np.column_stack((coords,z)),{'coords':3,'z':1})
|
t = Table(np.column_stack((coords,z)),{'coords':3,'z':1})
|
||||||
g = Grid.from_table(t,'coords',['1_coords','z'])
|
t = t.add('indicator',t.get('coords')[:,0])
|
||||||
|
g = Grid.from_table(t,'coords',['indicator','z'])
|
||||||
assert g.N_materials == g.cells[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == cells[0]).all()
|
assert g.N_materials == g.cells[0]*2 and (g.material[:,:,-1]-g.material[:,:,0] == cells[0]).all()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -36,13 +36,33 @@ class TestTable:
|
||||||
d = default.get('F')
|
d = default.get('F')
|
||||||
assert np.allclose(d,1.0) and d.shape[1:] == (3,3)
|
assert np.allclose(d,1.0) and d.shape[1:] == (3,3)
|
||||||
|
|
||||||
def test_get_component(self,default):
|
def test_set(self,default):
|
||||||
d = default.get('5_F')
|
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
||||||
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
|
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
|
||||||
|
|
||||||
@pytest.mark.parametrize('N',[10,40])
|
def test_set_component(self,default):
|
||||||
def test_getitem(self,N):
|
d = default.set('F[0,0]',np.zeros((5)),'set to zero').get('F')
|
||||||
assert len(Table(np.random.rand(N,1),{'X':1})[:N//2]) == N//2
|
assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
|
||||||
|
|
||||||
|
def test_labels(self,default):
|
||||||
|
assert default.labels == ['F','v','s']
|
||||||
|
|
||||||
|
def test_add(self,default):
|
||||||
|
d = np.random.random((5,9))
|
||||||
|
assert np.allclose(d,default.add('nine',d,'random data').get('nine'))
|
||||||
|
|
||||||
|
def test_isclose(self,default):
|
||||||
|
assert default.isclose(default).all()
|
||||||
|
|
||||||
|
def test_allclose(self,default):
|
||||||
|
assert default.allclose(default)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('N',[1,3,4])
|
||||||
|
def test_slice(self,default,N):
|
||||||
|
assert len(default[:N]) == 1+N
|
||||||
|
assert len(default[:N,['F','s']]) == 1+N
|
||||||
|
assert default[N:].get('F').shape == (len(default)-N,3,3)
|
||||||
|
assert (default[:N,['v','s']].data == default['v','s'][:N].data).all().all()
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['str','path'])
|
@pytest.mark.parametrize('mode',['str','path'])
|
||||||
def test_write_read(self,default,tmp_path,mode):
|
def test_write_read(self,default,tmp_path,mode):
|
||||||
|
@ -84,21 +104,6 @@ class TestTable:
|
||||||
with open(ref_path/fname) as f:
|
with open(ref_path/fname) as f:
|
||||||
Table.load(f)
|
Table.load(f)
|
||||||
|
|
||||||
def test_set(self,default):
|
|
||||||
d = default.set('F',np.zeros((5,3,3)),'set to zero').get('F')
|
|
||||||
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
|
|
||||||
|
|
||||||
def test_set_component(self,default):
|
|
||||||
d = default.set('1_F',np.zeros((5)),'set to zero').get('F')
|
|
||||||
assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
|
|
||||||
|
|
||||||
def test_labels(self,default):
|
|
||||||
assert default.labels == ['F','v','s']
|
|
||||||
|
|
||||||
def test_add(self,default):
|
|
||||||
d = np.random.random((5,9))
|
|
||||||
assert np.allclose(d,default.add('nine',d,'random data').get('nine'))
|
|
||||||
|
|
||||||
def test_rename_equivalent(self):
|
def test_rename_equivalent(self):
|
||||||
x = np.random.random((5,13))
|
x = np.random.random((5,13))
|
||||||
t = Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['random test data'])
|
t = Table(x,{'F':(3,3),'v':(3,),'s':(1,)},['random test data'])
|
||||||
|
@ -169,15 +174,15 @@ class TestTable:
|
||||||
def test_sort_component(self):
|
def test_sort_component(self):
|
||||||
x = np.random.random((5,12))
|
x = np.random.random((5,12))
|
||||||
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
||||||
unsort = t.get('4_F')
|
unsort = t.get('F')[:,1,0]
|
||||||
sort = t.sort_by('4_F').get('4_F')
|
sort = t.sort_by('F[1,0]').get('F')[:,1,0]
|
||||||
assert np.all(np.sort(unsort,0)==sort)
|
assert np.all(np.sort(unsort,0)==sort)
|
||||||
|
|
||||||
def test_sort_revert(self):
|
def test_sort_revert(self):
|
||||||
x = np.random.random((5,12))
|
x = np.random.random((5,12))
|
||||||
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
t = Table(x,{'F':(3,3),'v':(3,)},['random test data'])
|
||||||
sort = t.sort_by('4_F',ascending=False).get('4_F')
|
sort = t.sort_by('F[1,0]',ascending=False).get('F')[:,1,0]
|
||||||
assert np.all(np.sort(sort,0)==sort[::-1,:])
|
assert np.all(np.sort(sort,0)==sort[::-1])
|
||||||
|
|
||||||
def test_sort(self):
|
def test_sort(self):
|
||||||
t = Table(np.array([[0,1,],[2,1,]]),
|
t = Table(np.array([[0,1,],[2,1,]]),
|
||||||
|
@ -185,4 +190,4 @@ class TestTable:
|
||||||
['test data'])\
|
['test data'])\
|
||||||
.add('s',np.array(['b','a']))\
|
.add('s',np.array(['b','a']))\
|
||||||
.sort_by('s')
|
.sort_by('s')
|
||||||
assert np.all(t.get('1_v') == np.array([2,0]).reshape(2,1))
|
assert np.all(t.get('v')[:,0] == np.array([2,0]))
|
||||||
|
|
Loading…
Reference in New Issue